use std::collections::LinkedList; use test::Bencher; #[bench] fn bench_collect_into(b: &mut Bencher) { let v = &[0; 64]; b.iter(|| { let _: LinkedList<_> = v.iter().cloned().collect(); }) } #[bench] fn bench_push_front(b: &mut Bencher) { let mut m: LinkedList<_> = LinkedList::new(); b.iter(|| { m.push_front(0); }) } #[bench] fn bench_push_back(b: &mut Bencher) { let mut m: LinkedList<_> = LinkedList::new(); b.iter(|| { m.push_back(0); }) } #[bench] fn bench_push_back_pop_back(b: &mut Bencher) { let mut m: LinkedList<_> = LinkedList::new(); b.iter(|| { m.push_back(0); m.pop_back(); }) } #[bench] fn bench_push_front_pop_front(b: &mut Bencher) { let mut m: LinkedList<_> = LinkedList::new(); b.iter(|| { m.push_front(0); m.pop_front(); }) } #[bench] fn bench_iter(b: &mut Bencher) { let v = &[0; 128]; let m: LinkedList<_> = v.iter().cloned().collect(); b.iter(|| { assert!(m.iter().count() == 128); }) } #[bench] fn bench_iter_mut(b: &mut Bencher) { let v = &[0; 128]; let mut m: LinkedList<_> = v.iter().cloned().collect(); b.iter(|| { assert!(m.iter_mut().count() == 128); }) } #[bench] fn bench_iter_rev(b: &mut Bencher) { let v = &[0; 128]; let m: LinkedList<_> = v.iter().cloned().collect(); b.iter(|| { assert!(m.iter().rev().count() == 128); }) } #[bench] fn bench_iter_mut_rev(b: &mut Bencher) { let v = &[0; 128]; let mut m: LinkedList<_> = v.iter().cloned().collect(); b.iter(|| { assert!(m.iter_mut().rev().count() == 128); }) } use test::{black_box, Bencher}; #[bench] fn char_iterator(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; b.iter(|| s.chars().count()); } #[bench] fn char_iterator_for(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; b.iter(|| { for ch in s.chars() { black_box(ch); } }); } #[bench] fn char_iterator_ascii(b: &mut Bencher) { let s = "Mary had a little lamb, Little lamb Mary had a little lamb, Little lamb Mary had a little lamb, Little lamb Mary had a little lamb, Little lamb Mary had a little lamb, Little lamb Mary had a little lamb, Little lamb"; b.iter(|| s.chars().count()); } #[bench] fn char_iterator_rev(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; b.iter(|| s.chars().rev().count()); } #[bench] fn char_iterator_rev_for(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; b.iter(|| { for ch in s.chars().rev() { black_box(ch); } }); } #[bench] fn char_indicesator(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; let len = s.chars().count(); b.iter(|| assert_eq!(s.char_indices().count(), len)); } #[bench] fn char_indicesator_rev(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; let len = s.chars().count(); b.iter(|| assert_eq!(s.char_indices().rev().count(), len)); } #[bench] fn split_unicode_ascii(b: &mut Bencher) { let s = "ประเทศไทย中华Việt Namประเทศไทย中华Việt Nam"; b.iter(|| assert_eq!(s.split('V').count(), 3)); } #[bench] fn split_ascii(b: &mut Bencher) { let s = "Mary had a little lamb, Little lamb, little-lamb."; let len = s.split(' ').count(); b.iter(|| assert_eq!(s.split(' ').count(), len)); } #[bench] fn split_extern_fn(b: &mut Bencher) { let s = "Mary had a little lamb, Little lamb, little-lamb."; let len = s.split(' ').count(); fn pred(c: char) -> bool { c == ' ' } b.iter(|| assert_eq!(s.split(pred).count(), len)); } #[bench] fn split_closure(b: &mut Bencher) { let s = "Mary had a little lamb, Little lamb, little-lamb."; let len = s.split(' ').count(); b.iter(|| assert_eq!(s.split(|c: char| c == ' ').count(), len)); } #[bench] fn split_slice(b: &mut Bencher) { let s = "Mary had a little lamb, Little lamb, little-lamb."; let len = s.split(' ').count(); let c: &[char] = &[' ']; b.iter(|| assert_eq!(s.split(c).count(), len)); } #[bench] fn bench_join(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; let sep = "→"; let v = vec![s, s, s, s, s, s, s, s, s, s]; b.iter(|| { assert_eq!(v.join(sep).len(), s.len() * 10 + sep.len() * 9); }) } #[bench] fn bench_contains_short_short(b: &mut Bencher) { let haystack = "Lorem ipsum dolor sit amet, consectetur adipiscing elit."; let needle = "sit"; b.iter(|| { assert!(haystack.contains(needle)); }) } #[bench] fn bench_contains_short_long(b: &mut Bencher) { let haystack = "\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \ ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \ eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \ sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \ tempus vel, gravida nec quam. In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \ sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \ diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \ lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \ eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \ interdum. Curabitur ut nisi justo. Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \ mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \ lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \ est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \ felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \ ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \ feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \ Aliquam sit amet placerat lorem. Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \ mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \ Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \ lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \ suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \ cursus accumsan. Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \ feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \ vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \ leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \ malesuada sollicitudin quam eu fermentum."; let needle = "english"; b.iter(|| { assert!(!haystack.contains(needle)); }) } #[bench] fn bench_contains_bad_naive(b: &mut Bencher) { let haystack = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; let needle = "aaaaaaaab"; b.iter(|| { assert!(!haystack.contains(needle)); }) } #[bench] fn bench_contains_equal(b: &mut Bencher) { let haystack = "Lorem ipsum dolor sit amet, consectetur adipiscing elit."; let needle = "Lorem ipsum dolor sit amet, consectetur adipiscing elit."; b.iter(|| { assert!(haystack.contains(needle)); }) } macro_rules! make_test_inner { ($s:ident, $code:expr, $name:ident, $str:expr, $iters:expr) => { #[bench] fn $name(bencher: &mut Bencher) { let mut $s = $str; black_box(&mut $s); bencher.iter(|| { for _ in 0..$iters { black_box($code); } }); } }; } macro_rules! make_test { ($name:ident, $s:ident, $code:expr) => { make_test!($name, $s, $code, 1); }; ($name:ident, $s:ident, $code:expr, $iters:expr) => { mod $name { use test::Bencher; use test::black_box; // Short strings: 65 bytes each make_test_inner!($s, $code, short_ascii, "Mary had a little lamb, Little lamb Mary had a littl lamb, lamb!", $iters); make_test_inner!($s, $code, short_mixed, "ศไทย中华Việt Nam; Mary had a little lamb, Little lam!", $iters); make_test_inner!($s, $code, short_pile_of_poo, "💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩!", $iters); make_test_inner!($s, $code, long_lorem_ipsum,"\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \ ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \ eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \ sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \ tempus vel, gravida nec quam. In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \ sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \ diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \ lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \ eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \ interdum. Curabitur ut nisi justo. Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \ mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \ lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \ est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \ felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \ ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \ feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \ Aliquam sit amet placerat lorem. Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \ mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \ Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \ lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \ suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \ cursus accumsan. Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \ feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \ vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \ leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \ malesuada sollicitudin quam eu fermentum!", $iters); } } } make_test!(chars_count, s, s.chars().count()); make_test!(contains_bang_str, s, s.contains("!")); make_test!(contains_bang_char, s, s.contains('!')); make_test!(match_indices_a_str, s, s.match_indices("a").count()); make_test!(split_a_str, s, s.split("a").count()); make_test!(trim_ascii_char, s, { s.trim_matches(|c: char| c.is_ascii()) }); make_test!(trim_start_ascii_char, s, { s.trim_start_matches(|c: char| c.is_ascii()) }); make_test!(trim_end_ascii_char, s, { s.trim_end_matches(|c: char| c.is_ascii()) }); make_test!(find_underscore_char, s, s.find('_')); make_test!(rfind_underscore_char, s, s.rfind('_')); make_test!(find_underscore_str, s, s.find("_")); make_test!(find_zzz_char, s, s.find('\u{1F4A4}')); make_test!(rfind_zzz_char, s, s.rfind('\u{1F4A4}')); make_test!(find_zzz_str, s, s.find("\u{1F4A4}")); make_test!(starts_with_ascii_char, s, s.starts_with('/'), 1024); make_test!(ends_with_ascii_char, s, s.ends_with('/'), 1024); make_test!(starts_with_unichar, s, s.starts_with('\u{1F4A4}'), 1024); make_test!(ends_with_unichar, s, s.ends_with('\u{1F4A4}'), 1024); make_test!(starts_with_str, s, s.starts_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), 1024); make_test!(ends_with_str, s, s.ends_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), 1024); make_test!(split_space_char, s, s.split(' ').count()); make_test!(split_terminator_space_char, s, s.split_terminator(' ').count()); make_test!(splitn_space_char, s, s.splitn(10, ' ').count()); make_test!(rsplitn_space_char, s, s.rsplitn(10, ' ').count()); make_test!(split_space_str, s, s.split(" ").count()); make_test!(split_ad_str, s, s.split("ad").count()); use std::iter::repeat; use test::{black_box, Bencher}; #[bench] fn bench_with_capacity(b: &mut Bencher) { b.iter(|| String::with_capacity(100)); } #[bench] fn bench_push_str(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; b.iter(|| { let mut r = String::new(); r.push_str(s); }); } const REPETITIONS: u64 = 10_000; #[bench] fn bench_push_str_one_byte(b: &mut Bencher) { b.bytes = REPETITIONS; b.iter(|| { let mut r = String::new(); for _ in 0..REPETITIONS { r.push_str("a") } }); } #[bench] fn bench_push_char_one_byte(b: &mut Bencher) { b.bytes = REPETITIONS; b.iter(|| { let mut r = String::new(); for _ in 0..REPETITIONS { r.push('a') } }); } #[bench] fn bench_push_char_two_bytes(b: &mut Bencher) { b.bytes = REPETITIONS * 2; b.iter(|| { let mut r = String::new(); for _ in 0..REPETITIONS { r.push('â') } }); } #[bench] fn from_utf8_lossy_100_ascii(b: &mut Bencher) { let s = b"Hello there, the quick brown fox jumped over the lazy dog! \ Lorem ipsum dolor sit amet, consectetur. "; assert_eq!(100, s.len()); b.iter(|| { let _ = String::from_utf8_lossy(s); }); } #[bench] fn from_utf8_lossy_100_multibyte(b: &mut Bencher) { let s = "𐌀𐌖𐌋𐌄𐌑𐌉ปรدولة الكويتทศไทย中华𐍅𐌿𐌻𐍆𐌹𐌻𐌰".as_bytes(); assert_eq!(100, s.len()); b.iter(|| { let _ = String::from_utf8_lossy(s); }); } #[bench] fn from_utf8_lossy_invalid(b: &mut Bencher) { let s = b"Hello\xC0\x80 There\xE6\x83 Goodbye"; b.iter(|| { let _ = String::from_utf8_lossy(s); }); } #[bench] fn from_utf8_lossy_100_invalid(b: &mut Bencher) { let s = repeat(0xf5).take(100).collect::>(); b.iter(|| { let _ = String::from_utf8_lossy(&s); }); } #[bench] fn bench_exact_size_shrink_to_fit(b: &mut Bencher) { let s = "Hello there, the quick brown fox jumped over the lazy dog! \ Lorem ipsum dolor sit amet, consectetur. "; // ensure our operation produces an exact-size string before we benchmark it let mut r = String::with_capacity(s.len()); r.push_str(s); assert_eq!(r.len(), r.capacity()); b.iter(|| { let mut r = String::with_capacity(s.len()); r.push_str(s); r.shrink_to_fit(); r }); } #[bench] fn bench_from_str(b: &mut Bencher) { let s = "Hello there, the quick brown fox jumped over the lazy dog! \ Lorem ipsum dolor sit amet, consectetur. "; b.iter(|| String::from(s)) } #[bench] fn bench_from(b: &mut Bencher) { let s = "Hello there, the quick brown fox jumped over the lazy dog! \ Lorem ipsum dolor sit amet, consectetur. "; b.iter(|| String::from(s)) } #[bench] fn bench_to_string(b: &mut Bencher) { let s = "Hello there, the quick brown fox jumped over the lazy dog! \ Lorem ipsum dolor sit amet, consectetur. "; b.iter(|| s.to_string()) } #[bench] fn bench_insert_char_short(b: &mut Bencher) { let s = "Hello, World!"; b.iter(|| { let mut x = String::from(s); black_box(&mut x).insert(6, black_box(' ')); x }) } #[bench] fn bench_insert_char_long(b: &mut Bencher) { let s = "Hello, World!"; b.iter(|| { let mut x = String::from(s); black_box(&mut x).insert(6, black_box('❤')); x }) } #[bench] fn bench_insert_str_short(b: &mut Bencher) { let s = "Hello, World!"; b.iter(|| { let mut x = String::from(s); black_box(&mut x).insert_str(6, black_box(" ")); x }) } #[bench] fn bench_insert_str_long(b: &mut Bencher) { let s = "Hello, World!"; b.iter(|| { let mut x = String::from(s); black_box(&mut x).insert_str(6, black_box(" rustic ")); x }) } use rand::RngCore; use std::iter::{repeat, FromIterator}; use test::{black_box, Bencher}; #[bench] fn bench_new(b: &mut Bencher) { b.iter(|| Vec::::new()) } fn do_bench_with_capacity(b: &mut Bencher, src_len: usize) { b.bytes = src_len as u64; b.iter(|| Vec::::with_capacity(src_len)) } #[bench] fn bench_with_capacity_0000(b: &mut Bencher) { do_bench_with_capacity(b, 0) } #[bench] fn bench_with_capacity_0010(b: &mut Bencher) { do_bench_with_capacity(b, 10) } #[bench] fn bench_with_capacity_0100(b: &mut Bencher) { do_bench_with_capacity(b, 100) } #[bench] fn bench_with_capacity_1000(b: &mut Bencher) { do_bench_with_capacity(b, 1000) } fn do_bench_from_fn(b: &mut Bencher, src_len: usize) { b.bytes = src_len as u64; b.iter(|| (0..src_len).collect::>()) } #[bench] fn bench_from_fn_0000(b: &mut Bencher) { do_bench_from_fn(b, 0) } #[bench] fn bench_from_fn_0010(b: &mut Bencher) { do_bench_from_fn(b, 10) } #[bench] fn bench_from_fn_0100(b: &mut Bencher) { do_bench_from_fn(b, 100) } #[bench] fn bench_from_fn_1000(b: &mut Bencher) { do_bench_from_fn(b, 1000) } fn do_bench_from_elem(b: &mut Bencher, src_len: usize) { b.bytes = src_len as u64; b.iter(|| repeat(5).take(src_len).collect::>()) } #[bench] fn bench_from_elem_0000(b: &mut Bencher) { do_bench_from_elem(b, 0) } #[bench] fn bench_from_elem_0010(b: &mut Bencher) { do_bench_from_elem(b, 10) } #[bench] fn bench_from_elem_0100(b: &mut Bencher) { do_bench_from_elem(b, 100) } #[bench] fn bench_from_elem_1000(b: &mut Bencher) { do_bench_from_elem(b, 1000) } fn do_bench_from_slice(b: &mut Bencher, src_len: usize) { let src: Vec<_> = FromIterator::from_iter(0..src_len); b.bytes = src_len as u64; b.iter(|| src.as_slice().to_vec()); } #[bench] fn bench_from_slice_0000(b: &mut Bencher) { do_bench_from_slice(b, 0) } #[bench] fn bench_from_slice_0010(b: &mut Bencher) { do_bench_from_slice(b, 10) } #[bench] fn bench_from_slice_0100(b: &mut Bencher) { do_bench_from_slice(b, 100) } #[bench] fn bench_from_slice_1000(b: &mut Bencher) { do_bench_from_slice(b, 1000) } fn do_bench_from_iter(b: &mut Bencher, src_len: usize) { let src: Vec<_> = FromIterator::from_iter(0..src_len); b.bytes = src_len as u64; b.iter(|| { let dst: Vec<_> = FromIterator::from_iter(src.iter().cloned()); dst }); } #[bench] fn bench_from_iter_0000(b: &mut Bencher) { do_bench_from_iter(b, 0) } #[bench] fn bench_from_iter_0010(b: &mut Bencher) { do_bench_from_iter(b, 10) } #[bench] fn bench_from_iter_0100(b: &mut Bencher) { do_bench_from_iter(b, 100) } #[bench] fn bench_from_iter_1000(b: &mut Bencher) { do_bench_from_iter(b, 1000) } fn do_bench_extend(b: &mut Bencher, dst_len: usize, src_len: usize) { let dst: Vec<_> = FromIterator::from_iter(0..dst_len); let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len); b.bytes = src_len as u64; b.iter(|| { let mut dst = dst.clone(); dst.extend(src.clone()); dst }); } #[bench] fn bench_extend_0000_0000(b: &mut Bencher) { do_bench_extend(b, 0, 0) } #[bench] fn bench_extend_0000_0010(b: &mut Bencher) { do_bench_extend(b, 0, 10) } #[bench] fn bench_extend_0000_0100(b: &mut Bencher) { do_bench_extend(b, 0, 100) } #[bench] fn bench_extend_0000_1000(b: &mut Bencher) { do_bench_extend(b, 0, 1000) } #[bench] fn bench_extend_0010_0010(b: &mut Bencher) { do_bench_extend(b, 10, 10) } #[bench] fn bench_extend_0100_0100(b: &mut Bencher) { do_bench_extend(b, 100, 100) } #[bench] fn bench_extend_1000_1000(b: &mut Bencher) { do_bench_extend(b, 1000, 1000) } fn do_bench_extend_from_slice(b: &mut Bencher, dst_len: usize, src_len: usize) { let dst: Vec<_> = FromIterator::from_iter(0..dst_len); let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len); b.bytes = src_len as u64; b.iter(|| { let mut dst = dst.clone(); dst.extend_from_slice(&src); dst }); } #[bench] fn bench_extend_recycle(b: &mut Bencher) { let mut data = vec![0; 1000]; b.iter(|| { let tmp = std::mem::take(&mut data); let mut to_extend = black_box(Vec::new()); to_extend.extend(tmp.into_iter()); data = black_box(to_extend); }); black_box(data); } #[bench] fn bench_extend_from_slice_0000_0000(b: &mut Bencher) { do_bench_extend_from_slice(b, 0, 0) } #[bench] fn bench_extend_from_slice_0000_0010(b: &mut Bencher) { do_bench_extend_from_slice(b, 0, 10) } #[bench] fn bench_extend_from_slice_0000_0100(b: &mut Bencher) { do_bench_extend_from_slice(b, 0, 100) } #[bench] fn bench_extend_from_slice_0000_1000(b: &mut Bencher) { do_bench_extend_from_slice(b, 0, 1000) } #[bench] fn bench_extend_from_slice_0010_0010(b: &mut Bencher) { do_bench_extend_from_slice(b, 10, 10) } #[bench] fn bench_extend_from_slice_0100_0100(b: &mut Bencher) { do_bench_extend_from_slice(b, 100, 100) } #[bench] fn bench_extend_from_slice_1000_1000(b: &mut Bencher) { do_bench_extend_from_slice(b, 1000, 1000) } fn do_bench_clone(b: &mut Bencher, src_len: usize) { let src: Vec = FromIterator::from_iter(0..src_len); b.bytes = src_len as u64; b.iter(|| src.clone()); } #[bench] fn bench_clone_0000(b: &mut Bencher) { do_bench_clone(b, 0) } #[bench] fn bench_clone_0010(b: &mut Bencher) { do_bench_clone(b, 10) } #[bench] fn bench_clone_0100(b: &mut Bencher) { do_bench_clone(b, 100) } #[bench] fn bench_clone_1000(b: &mut Bencher) { do_bench_clone(b, 1000) } fn do_bench_clone_from(b: &mut Bencher, times: usize, dst_len: usize, src_len: usize) { let dst: Vec<_> = FromIterator::from_iter(0..src_len); let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len); b.bytes = (times * src_len) as u64; b.iter(|| { let mut dst = dst.clone(); for _ in 0..times { dst.clone_from(&src); dst = black_box(dst); } dst }); } #[bench] fn bench_clone_from_01_0000_0000(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 0) } #[bench] fn bench_clone_from_01_0000_0010(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 10) } #[bench] fn bench_clone_from_01_0000_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 100) } #[bench] fn bench_clone_from_01_0000_1000(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 1000) } #[bench] fn bench_clone_from_01_0010_0010(b: &mut Bencher) { do_bench_clone_from(b, 1, 10, 10) } #[bench] fn bench_clone_from_01_0100_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 100, 100) } #[bench] fn bench_clone_from_01_1000_1000(b: &mut Bencher) { do_bench_clone_from(b, 1, 1000, 1000) } #[bench] fn bench_clone_from_01_0010_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 10, 100) } #[bench] fn bench_clone_from_01_0100_1000(b: &mut Bencher) { do_bench_clone_from(b, 1, 100, 1000) } #[bench] fn bench_clone_from_01_0010_0000(b: &mut Bencher) { do_bench_clone_from(b, 1, 10, 0) } #[bench] fn bench_clone_from_01_0100_0010(b: &mut Bencher) { do_bench_clone_from(b, 1, 100, 10) } #[bench] fn bench_clone_from_01_1000_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 1000, 100) } #[bench] fn bench_clone_from_10_0000_0000(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 0) } #[bench] fn bench_clone_from_10_0000_0010(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 10) } #[bench] fn bench_clone_from_10_0000_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 100) } #[bench] fn bench_clone_from_10_0000_1000(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 1000) } #[bench] fn bench_clone_from_10_0010_0010(b: &mut Bencher) { do_bench_clone_from(b, 10, 10, 10) } #[bench] fn bench_clone_from_10_0100_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 100, 100) } #[bench] fn bench_clone_from_10_1000_1000(b: &mut Bencher) { do_bench_clone_from(b, 10, 1000, 1000) } #[bench] fn bench_clone_from_10_0010_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 10, 100) } #[bench] fn bench_clone_from_10_0100_1000(b: &mut Bencher) { do_bench_clone_from(b, 10, 100, 1000) } #[bench] fn bench_clone_from_10_0010_0000(b: &mut Bencher) { do_bench_clone_from(b, 10, 10, 0) } #[bench] fn bench_clone_from_10_0100_0010(b: &mut Bencher) { do_bench_clone_from(b, 10, 100, 10) } #[bench] fn bench_clone_from_10_1000_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 1000, 100) } macro_rules! bench_in_place { ($($fname:ident, $type:ty, $count:expr, $init:expr);*) => { $( #[bench] fn $fname(b: &mut Bencher) { b.iter(|| { let src: Vec<$type> = black_box(vec![$init; $count]); src.into_iter() .enumerate() .map(|(idx, e)| idx as $type ^ e) .collect::>() }); } )+ }; } bench_in_place![ bench_in_place_xxu8_0010_i0, u8, 10, 0; bench_in_place_xxu8_0100_i0, u8, 100, 0; bench_in_place_xxu8_1000_i0, u8, 1000, 0; bench_in_place_xxu8_0010_i1, u8, 10, 1; bench_in_place_xxu8_0100_i1, u8, 100, 1; bench_in_place_xxu8_1000_i1, u8, 1000, 1; bench_in_place_xu32_0010_i0, u32, 10, 0; bench_in_place_xu32_0100_i0, u32, 100, 0; bench_in_place_xu32_1000_i0, u32, 1000, 0; bench_in_place_xu32_0010_i1, u32, 10, 1; bench_in_place_xu32_0100_i1, u32, 100, 1; bench_in_place_xu32_1000_i1, u32, 1000, 1; bench_in_place_u128_0010_i0, u128, 10, 0; bench_in_place_u128_0100_i0, u128, 100, 0; bench_in_place_u128_1000_i0, u128, 1000, 0; bench_in_place_u128_0010_i1, u128, 10, 1; bench_in_place_u128_0100_i1, u128, 100, 1; bench_in_place_u128_1000_i1, u128, 1000, 1 ]; #[bench] fn bench_in_place_recycle(b: &mut Bencher) { let mut data = vec![0; 1000]; b.iter(|| { let tmp = std::mem::take(&mut data); data = black_box( tmp.into_iter() .enumerate() .map(|(idx, e)| idx.wrapping_add(e)) .fuse() .peekable() .collect::>(), ); }); } #[bench] fn bench_in_place_zip_recycle(b: &mut Bencher) { let mut data = vec![0u8; 1000]; let mut rng = rand::thread_rng(); let mut subst = vec![0u8; 1000]; rng.fill_bytes(&mut subst[..]); b.iter(|| { let tmp = std::mem::take(&mut data); let mangled = tmp .into_iter() .zip(subst.iter().copied()) .enumerate() .map(|(i, (d, s))| d.wrapping_add(i as u8) ^ s) .collect::>(); data = black_box(mangled); }); } #[bench] fn bench_in_place_zip_iter_mut(b: &mut Bencher) { let mut data = vec![0u8; 256]; let mut rng = rand::thread_rng(); let mut subst = vec![0u8; 1000]; rng.fill_bytes(&mut subst[..]); b.iter(|| { data.iter_mut().enumerate().for_each(|(i, d)| { *d = d.wrapping_add(i as u8) ^ subst[i]; }); }); black_box(data); } pub fn vec_cast(input: Vec) -> Vec { input.into_iter().map(|e| unsafe { std::mem::transmute_copy(&e) }).collect() } #[bench] fn bench_transmute(b: &mut Bencher) { let mut vec = vec![10u32; 100]; b.bytes = 800; // 2 casts x 4 bytes x 100 b.iter(|| { let v = std::mem::take(&mut vec); let v = black_box(vec_cast::(v)); let v = black_box(vec_cast::(v)); vec = v; }); } #[derive(Clone)] struct Droppable(usize); impl Drop for Droppable { fn drop(&mut self) { black_box(self); } } #[bench] fn bench_in_place_collect_droppable(b: &mut Bencher) { let v: Vec = std::iter::repeat_with(|| Droppable(0)).take(1000).collect(); b.iter(|| { v.clone() .into_iter() .skip(100) .enumerate() .map(|(i, e)| Droppable(i ^ e.0)) .collect::>() }) } const LEN: usize = 16384; #[bench] fn bench_chain_collect(b: &mut Bencher) { let data = black_box([0; LEN]); b.iter(|| data.iter().cloned().chain([1].iter().cloned()).collect::>()); } #[bench] fn bench_chain_chain_collect(b: &mut Bencher) { let data = black_box([0; LEN]); b.iter(|| { data.iter() .cloned() .chain([1].iter().cloned()) .chain([2].iter().cloned()) .collect::>() }); } #[bench] fn bench_nest_chain_chain_collect(b: &mut Bencher) { let data = black_box([0; LEN]); b.iter(|| { data.iter().cloned().chain([1].iter().chain([2].iter()).cloned()).collect::>() }); } #[bench] fn bench_range_map_collect(b: &mut Bencher) { b.iter(|| (0..LEN).map(|_| u32::default()).collect::>()); } #[bench] fn bench_chain_extend_ref(b: &mut Bencher) { let data = black_box([0; LEN]); b.iter(|| { let mut v = Vec::::with_capacity(data.len() + 1); v.extend(data.iter().chain([1].iter())); v }); } #[bench] fn bench_chain_extend_value(b: &mut Bencher) { let data = black_box([0; LEN]); b.iter(|| { let mut v = Vec::::with_capacity(data.len() + 1); v.extend(data.iter().cloned().chain(Some(1))); v }); } #[bench] fn bench_rev_1(b: &mut Bencher) { let data = black_box([0; LEN]); b.iter(|| { let mut v = Vec::::new(); v.extend(data.iter().rev()); v }); } #[bench] fn bench_rev_2(b: &mut Bencher) { let data = black_box([0; LEN]); b.iter(|| { let mut v = Vec::::with_capacity(data.len()); v.extend(data.iter().rev()); v }); } #[bench] fn bench_map_regular(b: &mut Bencher) { let data = black_box([(0, 0); LEN]); b.iter(|| { let mut v = Vec::::new(); v.extend(data.iter().map(|t| t.1)); v }); } #[bench] fn bench_map_fast(b: &mut Bencher) { let data = black_box([(0, 0); LEN]); b.iter(|| { let mut result = Vec::with_capacity(data.len()); for i in 0..data.len() { unsafe { *result.get_unchecked_mut(i) = data[i].0; result.set_len(i); } } result }); } fn random_sorted_fill(mut seed: u32, buf: &mut [u32]) { let mask = if buf.len() < 8192 { 0xFF } else if buf.len() < 200_000 { 0xFFFF } else { 0xFFFF_FFFF }; for item in buf.iter_mut() { seed ^= seed << 13; seed ^= seed >> 17; seed ^= seed << 5; *item = seed & mask; } buf.sort(); } fn bench_vec_dedup_old(b: &mut Bencher, sz: usize) { let mut template = vec![0u32; sz]; b.bytes = std::mem::size_of_val(template.as_slice()) as u64; random_sorted_fill(0x43, &mut template); let mut vec = template.clone(); b.iter(|| { let len = { let (dedup, _) = vec.partition_dedup(); dedup.len() }; vec.truncate(len); black_box(vec.first()); vec.clear(); vec.extend_from_slice(&template); }); } fn bench_vec_dedup_new(b: &mut Bencher, sz: usize) { let mut template = vec![0u32; sz]; b.bytes = std::mem::size_of_val(template.as_slice()) as u64; random_sorted_fill(0x43, &mut template); let mut vec = template.clone(); b.iter(|| { vec.dedup(); black_box(vec.first()); vec.clear(); vec.extend_from_slice(&template); }); } #[bench] fn bench_dedup_old_100(b: &mut Bencher) { bench_vec_dedup_old(b, 100); } #[bench] fn bench_dedup_new_100(b: &mut Bencher) { bench_vec_dedup_new(b, 100); } #[bench] fn bench_dedup_old_1000(b: &mut Bencher) { bench_vec_dedup_old(b, 1000); } #[bench] fn bench_dedup_new_1000(b: &mut Bencher) { bench_vec_dedup_new(b, 1000); } #[bench] fn bench_dedup_old_10000(b: &mut Bencher) { bench_vec_dedup_old(b, 10000); } #[bench] fn bench_dedup_new_10000(b: &mut Bencher) { bench_vec_dedup_new(b, 10000); } #[bench] fn bench_dedup_old_100000(b: &mut Bencher) { bench_vec_dedup_old(b, 100000); } #[bench] fn bench_dedup_new_100000(b: &mut Bencher) { bench_vec_dedup_new(b, 100000); } // Disabling on android for the time being // See https://github.com/rust-lang/rust/issues/73535#event-3477699747 #![cfg(not(target_os = "android"))] #![feature(btree_drain_filter)] #![feature(map_first_last)] #![feature(repr_simd)] #![feature(slice_partition_dedup)] #![feature(test)] extern crate test; mod binary_heap; mod btree; mod linked_list; mod slice; mod str; mod string; mod vec; mod vec_deque; use std::collections::BTreeMap; use std::iter::Iterator; use std::ops::RangeBounds; use std::vec::Vec; use rand::{seq::SliceRandom, thread_rng, Rng}; use test::{black_box, Bencher}; macro_rules! map_insert_rand_bench { ($name: ident, $n: expr, $map: ident) => { #[bench] pub fn $name(b: &mut Bencher) { let n: usize = $n; let mut map = $map::new(); // setup let mut rng = thread_rng(); for _ in 0..n { let i = rng.gen::() % n; map.insert(i, i); } // measure b.iter(|| { let k = rng.gen::() % n; map.insert(k, k); map.remove(&k); }); black_box(map); } }; } macro_rules! map_insert_seq_bench { ($name: ident, $n: expr, $map: ident) => { #[bench] pub fn $name(b: &mut Bencher) { let mut map = $map::new(); let n: usize = $n; // setup for i in 0..n { map.insert(i * 2, i * 2); } // measure let mut i = 1; b.iter(|| { map.insert(i, i); map.remove(&i); i = (i + 2) % n; }); black_box(map); } }; } macro_rules! map_find_rand_bench { ($name: ident, $n: expr, $map: ident) => { #[bench] pub fn $name(b: &mut Bencher) { let mut map = $map::new(); let n: usize = $n; // setup let mut rng = thread_rng(); let mut keys: Vec<_> = (0..n).map(|_| rng.gen::() % n).collect(); for &k in &keys { map.insert(k, k); } keys.shuffle(&mut rng); // measure let mut i = 0; b.iter(|| { let t = map.get(&keys[i]); i = (i + 1) % n; black_box(t); }) } }; } macro_rules! map_find_seq_bench { ($name: ident, $n: expr, $map: ident) => { #[bench] pub fn $name(b: &mut Bencher) { let mut map = $map::new(); let n: usize = $n; // setup for i in 0..n { map.insert(i, i); } // measure let mut i = 0; b.iter(|| { let x = map.get(&i); i = (i + 1) % n; black_box(x); }) } }; } map_insert_rand_bench! {insert_rand_100, 100, BTreeMap} map_insert_rand_bench! {insert_rand_10_000, 10_000, BTreeMap} map_insert_seq_bench! {insert_seq_100, 100, BTreeMap} map_insert_seq_bench! {insert_seq_10_000, 10_000, BTreeMap} map_find_rand_bench! {find_rand_100, 100, BTreeMap} map_find_rand_bench! {find_rand_10_000, 10_000, BTreeMap} map_find_seq_bench! {find_seq_100, 100, BTreeMap} map_find_seq_bench! {find_seq_10_000, 10_000, BTreeMap} fn bench_iteration(b: &mut Bencher, size: i32) { let mut map = BTreeMap::::new(); let mut rng = thread_rng(); for _ in 0..size { map.insert(rng.gen(), rng.gen()); } b.iter(|| { for entry in &map { black_box(entry); } }); } #[bench] pub fn iteration_20(b: &mut Bencher) { bench_iteration(b, 20); } #[bench] pub fn iteration_1000(b: &mut Bencher) { bench_iteration(b, 1000); } #[bench] pub fn iteration_100000(b: &mut Bencher) { bench_iteration(b, 100000); } fn bench_iteration_mut(b: &mut Bencher, size: i32) { let mut map = BTreeMap::::new(); let mut rng = thread_rng(); for _ in 0..size { map.insert(rng.gen(), rng.gen()); } b.iter(|| { for kv in map.iter_mut() { black_box(kv); } }); } #[bench] pub fn iteration_mut_20(b: &mut Bencher) { bench_iteration_mut(b, 20); } #[bench] pub fn iteration_mut_1000(b: &mut Bencher) { bench_iteration_mut(b, 1000); } #[bench] pub fn iteration_mut_100000(b: &mut Bencher) { bench_iteration_mut(b, 100000); } fn bench_first_and_last(b: &mut Bencher, size: i32) { let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect(); b.iter(|| { for _ in 0..10 { black_box(map.first_key_value()); black_box(map.last_key_value()); } }); } #[bench] pub fn first_and_last_0(b: &mut Bencher) { bench_first_and_last(b, 0); } #[bench] pub fn first_and_last_100(b: &mut Bencher) { bench_first_and_last(b, 100); } #[bench] pub fn first_and_last_10k(b: &mut Bencher) { bench_first_and_last(b, 10_000); } const BENCH_RANGE_SIZE: i32 = 145; const BENCH_RANGE_COUNT: i32 = BENCH_RANGE_SIZE * (BENCH_RANGE_SIZE - 1) / 2; fn bench_range(b: &mut Bencher, f: F) where F: Fn(i32, i32) -> R, R: RangeBounds, { let map: BTreeMap<_, _> = (0..BENCH_RANGE_SIZE).map(|i| (i, i)).collect(); b.iter(|| { let mut c = 0; for i in 0..BENCH_RANGE_SIZE { for j in i + 1..BENCH_RANGE_SIZE { black_box(map.range(f(i, j))); c += 1; } } debug_assert_eq!(c, BENCH_RANGE_COUNT); }); } #[bench] pub fn range_included_excluded(b: &mut Bencher) { bench_range(b, |i, j| i..j); } #[bench] pub fn range_included_included(b: &mut Bencher) { bench_range(b, |i, j| i..=j); } #[bench] pub fn range_included_unbounded(b: &mut Bencher) { bench_range(b, |i, _| i..); } #[bench] pub fn range_unbounded_unbounded(b: &mut Bencher) { bench_range(b, |_, _| ..); } fn bench_iter(b: &mut Bencher, repeats: i32, size: i32) { let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect(); b.iter(|| { for _ in 0..repeats { black_box(map.iter()); } }); } /// Contrast range_unbounded_unbounded with `iter()`. #[bench] pub fn range_unbounded_vs_iter(b: &mut Bencher) { bench_iter(b, BENCH_RANGE_COUNT, BENCH_RANGE_SIZE); } #[bench] pub fn iter_0(b: &mut Bencher) { bench_iter(b, 1_000, 0); } #[bench] pub fn iter_1(b: &mut Bencher) { bench_iter(b, 1_000, 1); } #[bench] pub fn iter_100(b: &mut Bencher) { bench_iter(b, 1_000, 100); } #[bench] pub fn iter_10k(b: &mut Bencher) { bench_iter(b, 1_000, 10_000); } #[bench] pub fn iter_1m(b: &mut Bencher) { bench_iter(b, 1_000, 1_000_000); } const FAT: usize = 256; // The returned map has small keys and values. // Benchmarks on it have a counterpart in set.rs with the same keys and no values at all. fn slim_map(n: usize) -> BTreeMap { (0..n).map(|i| (i, i)).collect::>() } // The returned map has small keys and large values. fn fat_val_map(n: usize) -> BTreeMap { (0..n).map(|i| (i, [i; FAT])).collect::>() } #[bench] pub fn clone_slim_100(b: &mut Bencher) { let src = slim_map(100); b.iter(|| src.clone()) } #[bench] pub fn clone_slim_100_and_clear(b: &mut Bencher) { let src = slim_map(100); b.iter(|| src.clone().clear()) } #[bench] pub fn clone_slim_100_and_drain_all(b: &mut Bencher) { let src = slim_map(100); b.iter(|| src.clone().drain_filter(|_, _| true).count()) } #[bench] pub fn clone_slim_100_and_drain_half(b: &mut Bencher) { let src = slim_map(100); b.iter(|| { let mut map = src.clone(); assert_eq!(map.drain_filter(|i, _| i % 2 == 0).count(), 100 / 2); assert_eq!(map.len(), 100 / 2); }) } #[bench] pub fn clone_slim_100_and_into_iter(b: &mut Bencher) { let src = slim_map(100); b.iter(|| src.clone().into_iter().count()) } #[bench] pub fn clone_slim_100_and_pop_all(b: &mut Bencher) { let src = slim_map(100); b.iter(|| { let mut map = src.clone(); while map.pop_first().is_some() {} map }); } #[bench] pub fn clone_slim_100_and_remove_all(b: &mut Bencher) { let src = slim_map(100); b.iter(|| { let mut map = src.clone(); while let Some(elt) = map.iter().map(|(&i, _)| i).next() { let v = map.remove(&elt); debug_assert!(v.is_some()); } map }); } #[bench] pub fn clone_slim_100_and_remove_half(b: &mut Bencher) { let src = slim_map(100); b.iter(|| { let mut map = src.clone(); for i in (0..100).step_by(2) { let v = map.remove(&i); debug_assert!(v.is_some()); } assert_eq!(map.len(), 100 / 2); map }) } #[bench] pub fn clone_slim_10k(b: &mut Bencher) { let src = slim_map(10_000); b.iter(|| src.clone()) } #[bench] pub fn clone_slim_10k_and_clear(b: &mut Bencher) { let src = slim_map(10_000); b.iter(|| src.clone().clear()) } #[bench] pub fn clone_slim_10k_and_drain_all(b: &mut Bencher) { let src = slim_map(10_000); b.iter(|| src.clone().drain_filter(|_, _| true).count()) } #[bench] pub fn clone_slim_10k_and_drain_half(b: &mut Bencher) { let src = slim_map(10_000); b.iter(|| { let mut map = src.clone(); assert_eq!(map.drain_filter(|i, _| i % 2 == 0).count(), 10_000 / 2); assert_eq!(map.len(), 10_000 / 2); }) } #[bench] pub fn clone_slim_10k_and_into_iter(b: &mut Bencher) { let src = slim_map(10_000); b.iter(|| src.clone().into_iter().count()) } #[bench] pub fn clone_slim_10k_and_pop_all(b: &mut Bencher) { let src = slim_map(10_000); b.iter(|| { let mut map = src.clone(); while map.pop_first().is_some() {} map }); } #[bench] pub fn clone_slim_10k_and_remove_all(b: &mut Bencher) { let src = slim_map(10_000); b.iter(|| { let mut map = src.clone(); while let Some(elt) = map.iter().map(|(&i, _)| i).next() { let v = map.remove(&elt); debug_assert!(v.is_some()); } map }); } #[bench] pub fn clone_slim_10k_and_remove_half(b: &mut Bencher) { let src = slim_map(10_000); b.iter(|| { let mut map = src.clone(); for i in (0..10_000).step_by(2) { let v = map.remove(&i); debug_assert!(v.is_some()); } assert_eq!(map.len(), 10_000 / 2); map }) } #[bench] pub fn clone_fat_val_100(b: &mut Bencher) { let src = fat_val_map(100); b.iter(|| src.clone()) } #[bench] pub fn clone_fat_val_100_and_clear(b: &mut Bencher) { let src = fat_val_map(100); b.iter(|| src.clone().clear()) } #[bench] pub fn clone_fat_val_100_and_drain_all(b: &mut Bencher) { let src = fat_val_map(100); b.iter(|| src.clone().drain_filter(|_, _| true).count()) } #[bench] pub fn clone_fat_val_100_and_drain_half(b: &mut Bencher) { let src = fat_val_map(100); b.iter(|| { let mut map = src.clone(); assert_eq!(map.drain_filter(|i, _| i % 2 == 0).count(), 100 / 2); assert_eq!(map.len(), 100 / 2); }) } #[bench] pub fn clone_fat_val_100_and_into_iter(b: &mut Bencher) { let src = fat_val_map(100); b.iter(|| src.clone().into_iter().count()) } #[bench] pub fn clone_fat_val_100_and_pop_all(b: &mut Bencher) { let src = fat_val_map(100); b.iter(|| { let mut map = src.clone(); while map.pop_first().is_some() {} map }); } #[bench] pub fn clone_fat_val_100_and_remove_all(b: &mut Bencher) { let src = fat_val_map(100); b.iter(|| { let mut map = src.clone(); while let Some(elt) = map.iter().map(|(&i, _)| i).next() { let v = map.remove(&elt); debug_assert!(v.is_some()); } map }); } #[bench] pub fn clone_fat_val_100_and_remove_half(b: &mut Bencher) { let src = fat_val_map(100); b.iter(|| { let mut map = src.clone(); for i in (0..100).step_by(2) { let v = map.remove(&i); debug_assert!(v.is_some()); } assert_eq!(map.len(), 100 / 2); map }) } use std::collections::BTreeSet; use rand::{thread_rng, Rng}; use test::Bencher; fn random(n: usize) -> BTreeSet { let mut rng = thread_rng(); let mut set = BTreeSet::new(); while set.len() < n { set.insert(rng.gen()); } assert_eq!(set.len(), n); set } fn neg(n: usize) -> BTreeSet { let set: BTreeSet = (-(n as i32)..=-1).collect(); assert_eq!(set.len(), n); set } fn pos(n: usize) -> BTreeSet { let set: BTreeSet = (1..=(n as i32)).collect(); assert_eq!(set.len(), n); set } fn stagger(n1: usize, factor: usize) -> [BTreeSet; 2] { let n2 = n1 * factor; let mut sets = [BTreeSet::new(), BTreeSet::new()]; for i in 0..(n1 + n2) { let b = i % (factor + 1) != 0; sets[b as usize].insert(i as u32); } assert_eq!(sets[0].len(), n1); assert_eq!(sets[1].len(), n2); sets } macro_rules! set_bench { ($name: ident, $set_func: ident, $result_func: ident, $sets: expr) => { #[bench] pub fn $name(b: &mut Bencher) { // setup let sets = $sets; // measure b.iter(|| sets[0].$set_func(&sets[1]).$result_func()) } }; } fn slim_set(n: usize) -> BTreeSet { (0..n).collect::>() } #[bench] pub fn clone_100(b: &mut Bencher) { let src = slim_set(100); b.iter(|| src.clone()) } #[bench] pub fn clone_100_and_clear(b: &mut Bencher) { let src = slim_set(100); b.iter(|| src.clone().clear()) } #[bench] pub fn clone_100_and_drain_all(b: &mut Bencher) { let src = slim_set(100); b.iter(|| src.clone().drain_filter(|_| true).count()) } #[bench] pub fn clone_100_and_drain_half(b: &mut Bencher) { let src = slim_set(100); b.iter(|| { let mut set = src.clone(); assert_eq!(set.drain_filter(|i| i % 2 == 0).count(), 100 / 2); assert_eq!(set.len(), 100 / 2); }) } #[bench] pub fn clone_100_and_into_iter(b: &mut Bencher) { let src = slim_set(100); b.iter(|| src.clone().into_iter().count()) } #[bench] pub fn clone_100_and_pop_all(b: &mut Bencher) { let src = slim_set(100); b.iter(|| { let mut set = src.clone(); while set.pop_first().is_some() {} set }); } #[bench] pub fn clone_100_and_remove_all(b: &mut Bencher) { let src = slim_set(100); b.iter(|| { let mut set = src.clone(); while let Some(elt) = set.iter().copied().next() { let ok = set.remove(&elt); debug_assert!(ok); } set }); } #[bench] pub fn clone_100_and_remove_half(b: &mut Bencher) { let src = slim_set(100); b.iter(|| { let mut set = src.clone(); for i in (0..100).step_by(2) { let ok = set.remove(&i); debug_assert!(ok); } assert_eq!(set.len(), 100 / 2); set }) } #[bench] pub fn clone_10k(b: &mut Bencher) { let src = slim_set(10_000); b.iter(|| src.clone()) } #[bench] pub fn clone_10k_and_clear(b: &mut Bencher) { let src = slim_set(10_000); b.iter(|| src.clone().clear()) } #[bench] pub fn clone_10k_and_drain_all(b: &mut Bencher) { let src = slim_set(10_000); b.iter(|| src.clone().drain_filter(|_| true).count()) } #[bench] pub fn clone_10k_and_drain_half(b: &mut Bencher) { let src = slim_set(10_000); b.iter(|| { let mut set = src.clone(); assert_eq!(set.drain_filter(|i| i % 2 == 0).count(), 10_000 / 2); assert_eq!(set.len(), 10_000 / 2); }) } #[bench] pub fn clone_10k_and_into_iter(b: &mut Bencher) { let src = slim_set(10_000); b.iter(|| src.clone().into_iter().count()) } #[bench] pub fn clone_10k_and_pop_all(b: &mut Bencher) { let src = slim_set(10_000); b.iter(|| { let mut set = src.clone(); while set.pop_first().is_some() {} set }); } #[bench] pub fn clone_10k_and_remove_all(b: &mut Bencher) { let src = slim_set(10_000); b.iter(|| { let mut set = src.clone(); while let Some(elt) = set.iter().copied().next() { let ok = set.remove(&elt); debug_assert!(ok); } set }); } #[bench] pub fn clone_10k_and_remove_half(b: &mut Bencher) { let src = slim_set(10_000); b.iter(|| { let mut set = src.clone(); for i in (0..10_000).step_by(2) { let ok = set.remove(&i); debug_assert!(ok); } assert_eq!(set.len(), 10_000 / 2); set }) } set_bench! {intersection_100_neg_vs_100_pos, intersection, count, [neg(100), pos(100)]} set_bench! {intersection_100_neg_vs_10k_pos, intersection, count, [neg(100), pos(10_000)]} set_bench! {intersection_100_pos_vs_100_neg, intersection, count, [pos(100), neg(100)]} set_bench! {intersection_100_pos_vs_10k_neg, intersection, count, [pos(100), neg(10_000)]} set_bench! {intersection_10k_neg_vs_100_pos, intersection, count, [neg(10_000), pos(100)]} set_bench! {intersection_10k_neg_vs_10k_pos, intersection, count, [neg(10_000), pos(10_000)]} set_bench! {intersection_10k_pos_vs_100_neg, intersection, count, [pos(10_000), neg(100)]} set_bench! {intersection_10k_pos_vs_10k_neg, intersection, count, [pos(10_000), neg(10_000)]} set_bench! {intersection_random_100_vs_100, intersection, count, [random(100), random(100)]} set_bench! {intersection_random_100_vs_10k, intersection, count, [random(100), random(10_000)]} set_bench! {intersection_random_10k_vs_100, intersection, count, [random(10_000), random(100)]} set_bench! {intersection_random_10k_vs_10k, intersection, count, [random(10_000), random(10_000)]} set_bench! {intersection_staggered_100_vs_100, intersection, count, stagger(100, 1)} set_bench! {intersection_staggered_10k_vs_10k, intersection, count, stagger(10_000, 1)} set_bench! {intersection_staggered_100_vs_10k, intersection, count, stagger(100, 100)} set_bench! {difference_random_100_vs_100, difference, count, [random(100), random(100)]} set_bench! {difference_random_100_vs_10k, difference, count, [random(100), random(10_000)]} set_bench! {difference_random_10k_vs_100, difference, count, [random(10_000), random(100)]} set_bench! {difference_random_10k_vs_10k, difference, count, [random(10_000), random(10_000)]} set_bench! {difference_staggered_100_vs_100, difference, count, stagger(100, 1)} set_bench! {difference_staggered_10k_vs_10k, difference, count, stagger(10_000, 1)} set_bench! {difference_staggered_100_vs_10k, difference, count, stagger(100, 100)} set_bench! {is_subset_100_vs_100, is_subset, clone, [pos(100), pos(100)]} set_bench! {is_subset_100_vs_10k, is_subset, clone, [pos(100), pos(10_000)]} set_bench! {is_subset_10k_vs_100, is_subset, clone, [pos(10_000), pos(100)]} set_bench! {is_subset_10k_vs_10k, is_subset, clone, [pos(10_000), pos(10_000)]} mod map; mod set; use std::{mem, ptr}; use rand::distributions::{Alphanumeric, Standard}; use rand::{thread_rng, Rng, SeedableRng}; use rand_xorshift::XorShiftRng; use test::{black_box, Bencher}; #[bench] fn iterator(b: &mut Bencher) { // peculiar numbers to stop LLVM from optimising the summation // out. let v: Vec<_> = (0..100).map(|i| i ^ (i << 1) ^ (i >> 1)).collect(); b.iter(|| { let mut sum = 0; for x in &v { sum += *x; } // sum == 11806, to stop dead code elimination. if sum == 0 { panic!() } }) } #[bench] fn mut_iterator(b: &mut Bencher) { let mut v = vec![0; 100]; b.iter(|| { let mut i = 0; for x in &mut v { *x = i; i += 1; } }) } #[bench] fn concat(b: &mut Bencher) { let xss: Vec> = (0..100).map(|i| (0..i).collect()).collect(); b.iter(|| { xss.concat(); }); } #[bench] fn join(b: &mut Bencher) { let xss: Vec> = (0..100).map(|i| (0..i).collect()).collect(); b.iter(|| xss.join(&0)); } #[bench] fn push(b: &mut Bencher) { let mut vec = Vec::::new(); b.iter(|| { vec.push(0); black_box(&vec); }); } #[bench] fn starts_with_same_vector(b: &mut Bencher) { let vec: Vec<_> = (0..100).collect(); b.iter(|| vec.starts_with(&vec)) } #[bench] fn starts_with_single_element(b: &mut Bencher) { let vec: Vec<_> = vec![0]; b.iter(|| vec.starts_with(&vec)) } #[bench] fn starts_with_diff_one_element_at_end(b: &mut Bencher) { let vec: Vec<_> = (0..100).collect(); let mut match_vec: Vec<_> = (0..99).collect(); match_vec.push(0); b.iter(|| vec.starts_with(&match_vec)) } #[bench] fn ends_with_same_vector(b: &mut Bencher) { let vec: Vec<_> = (0..100).collect(); b.iter(|| vec.ends_with(&vec)) } #[bench] fn ends_with_single_element(b: &mut Bencher) { let vec: Vec<_> = vec![0]; b.iter(|| vec.ends_with(&vec)) } #[bench] fn ends_with_diff_one_element_at_beginning(b: &mut Bencher) { let vec: Vec<_> = (0..100).collect(); let mut match_vec: Vec<_> = (0..100).collect(); match_vec[0] = 200; b.iter(|| vec.starts_with(&match_vec)) } #[bench] fn contains_last_element(b: &mut Bencher) { let vec: Vec<_> = (0..100).collect(); b.iter(|| vec.contains(&99)) } #[bench] fn zero_1kb_from_elem(b: &mut Bencher) { b.iter(|| vec![0u8; 1024]); } #[bench] fn zero_1kb_set_memory(b: &mut Bencher) { b.iter(|| { let mut v = Vec::::with_capacity(1024); unsafe { let vp = v.as_mut_ptr(); ptr::write_bytes(vp, 0, 1024); v.set_len(1024); } v }); } #[bench] fn zero_1kb_loop_set(b: &mut Bencher) { b.iter(|| { let mut v = Vec::::with_capacity(1024); unsafe { v.set_len(1024); } for i in 0..1024 { v[i] = 0; } }); } #[bench] fn zero_1kb_mut_iter(b: &mut Bencher) { b.iter(|| { let mut v = Vec::::with_capacity(1024); unsafe { v.set_len(1024); } for x in &mut v { *x = 0; } v }); } #[bench] fn random_inserts(b: &mut Bencher) { let mut rng = thread_rng(); b.iter(|| { let mut v = vec![(0, 0); 30]; for _ in 0..100 { let l = v.len(); v.insert(rng.gen::() % (l + 1), (1, 1)); } }) } #[bench] fn random_removes(b: &mut Bencher) { let mut rng = thread_rng(); b.iter(|| { let mut v = vec![(0, 0); 130]; for _ in 0..100 { let l = v.len(); v.remove(rng.gen::() % l); } }) } fn gen_ascending(len: usize) -> Vec { (0..len as u64).collect() } fn gen_descending(len: usize) -> Vec { (0..len as u64).rev().collect() } const SEED: [u8; 16] = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]; fn gen_random(len: usize) -> Vec { let mut rng = XorShiftRng::from_seed(SEED); (&mut rng).sample_iter(&Standard).take(len).collect() } fn gen_random_bytes(len: usize) -> Vec { let mut rng = XorShiftRng::from_seed(SEED); (&mut rng).sample_iter(&Standard).take(len).collect() } fn gen_mostly_ascending(len: usize) -> Vec { let mut rng = XorShiftRng::from_seed(SEED); let mut v = gen_ascending(len); for _ in (0usize..).take_while(|x| x * x <= len) { let x = rng.gen::() % len; let y = rng.gen::() % len; v.swap(x, y); } v } fn gen_mostly_descending(len: usize) -> Vec { let mut rng = XorShiftRng::from_seed(SEED); let mut v = gen_descending(len); for _ in (0usize..).take_while(|x| x * x <= len) { let x = rng.gen::() % len; let y = rng.gen::() % len; v.swap(x, y); } v } fn gen_strings(len: usize) -> Vec { let mut rng = XorShiftRng::from_seed(SEED); let mut v = vec![]; for _ in 0..len { let n = rng.gen::() % 20 + 1; v.push((&mut rng).sample_iter(&Alphanumeric).take(n).collect()); } v } fn gen_big_random(len: usize) -> Vec<[u64; 16]> { let mut rng = XorShiftRng::from_seed(SEED); (&mut rng).sample_iter(&Standard).map(|x| [x; 16]).take(len).collect() } macro_rules! sort { ($f:ident, $name:ident, $gen:expr, $len:expr) => { #[bench] fn $name(b: &mut Bencher) { let v = $gen($len); b.iter(|| v.clone().$f()); b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64; } }; } macro_rules! sort_strings { ($f:ident, $name:ident, $gen:expr, $len:expr) => { #[bench] fn $name(b: &mut Bencher) { let v = $gen($len); let v = v.iter().map(|s| &**s).collect::>(); b.iter(|| v.clone().$f()); b.bytes = $len * mem::size_of::<&str>() as u64; } }; } macro_rules! sort_expensive { ($f:ident, $name:ident, $gen:expr, $len:expr) => { #[bench] fn $name(b: &mut Bencher) { let v = $gen($len); b.iter(|| { let mut v = v.clone(); let mut count = 0; v.$f(|a: &u64, b: &u64| { count += 1; if count % 1_000_000_000 == 0 { panic!("should not happen"); } (*a as f64).cos().partial_cmp(&(*b as f64).cos()).unwrap() }); black_box(count); }); b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64; } }; } macro_rules! sort_lexicographic { ($f:ident, $name:ident, $gen:expr, $len:expr) => { #[bench] fn $name(b: &mut Bencher) { let v = $gen($len); b.iter(|| v.clone().$f(|x| x.to_string())); b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64; } }; } sort!(sort, sort_small_ascending, gen_ascending, 10); sort!(sort, sort_small_descending, gen_descending, 10); sort!(sort, sort_small_random, gen_random, 10); sort!(sort, sort_small_big, gen_big_random, 10); sort!(sort, sort_medium_random, gen_random, 100); sort!(sort, sort_large_ascending, gen_ascending, 10000); sort!(sort, sort_large_descending, gen_descending, 10000); sort!(sort, sort_large_mostly_ascending, gen_mostly_ascending, 10000); sort!(sort, sort_large_mostly_descending, gen_mostly_descending, 10000); sort!(sort, sort_large_random, gen_random, 10000); sort!(sort, sort_large_big, gen_big_random, 10000); sort_strings!(sort, sort_large_strings, gen_strings, 10000); sort_expensive!(sort_by, sort_large_expensive, gen_random, 10000); sort!(sort_unstable, sort_unstable_small_ascending, gen_ascending, 10); sort!(sort_unstable, sort_unstable_small_descending, gen_descending, 10); sort!(sort_unstable, sort_unstable_small_random, gen_random, 10); sort!(sort_unstable, sort_unstable_small_big, gen_big_random, 10); sort!(sort_unstable, sort_unstable_medium_random, gen_random, 100); sort!(sort_unstable, sort_unstable_large_ascending, gen_ascending, 10000); sort!(sort_unstable, sort_unstable_large_descending, gen_descending, 10000); sort!(sort_unstable, sort_unstable_large_mostly_ascending, gen_mostly_ascending, 10000); sort!(sort_unstable, sort_unstable_large_mostly_descending, gen_mostly_descending, 10000); sort!(sort_unstable, sort_unstable_large_random, gen_random, 10000); sort!(sort_unstable, sort_unstable_large_big, gen_big_random, 10000); sort_strings!(sort_unstable, sort_unstable_large_strings, gen_strings, 10000); sort_expensive!(sort_unstable_by, sort_unstable_large_expensive, gen_random, 10000); sort_lexicographic!(sort_by_key, sort_by_key_lexicographic, gen_random, 10000); sort_lexicographic!(sort_unstable_by_key, sort_unstable_by_key_lexicographic, gen_random, 10000); sort_lexicographic!(sort_by_cached_key, sort_by_cached_key_lexicographic, gen_random, 10000); macro_rules! reverse { ($name:ident, $ty:ty, $f:expr) => { #[bench] fn $name(b: &mut Bencher) { // odd length and offset by 1 to be as unaligned as possible let n = 0xFFFFF; let mut v: Vec<_> = (0..1 + (n / mem::size_of::<$ty>() as u64)).map($f).collect(); b.iter(|| black_box(&mut v[1..]).reverse()); b.bytes = n; } }; } reverse!(reverse_u8, u8, |x| x as u8); reverse!(reverse_u16, u16, |x| x as u16); reverse!(reverse_u8x3, [u8; 3], |x| [x as u8, (x >> 8) as u8, (x >> 16) as u8]); reverse!(reverse_u32, u32, |x| x as u32); reverse!(reverse_u64, u64, |x| x as u64); reverse!(reverse_u128, u128, |x| x as u128); #[repr(simd)] struct F64x4(f64, f64, f64, f64); reverse!(reverse_simd_f64x4, F64x4, |x| { let x = x as f64; F64x4(x, x, x, x) }); macro_rules! rotate { ($name:ident, $gen:expr, $len:expr, $mid:expr) => { #[bench] fn $name(b: &mut Bencher) { let size = mem::size_of_val(&$gen(1)[0]); let mut v = $gen($len * 8 / size); b.iter(|| black_box(&mut v).rotate_left(($mid * 8 + size - 1) / size)); b.bytes = (v.len() * size) as u64; } }; } rotate!(rotate_tiny_by1, gen_random, 16, 1); rotate!(rotate_tiny_half, gen_random, 16, 16 / 2); rotate!(rotate_tiny_half_plus_one, gen_random, 16, 16 / 2 + 1); rotate!(rotate_medium_by1, gen_random, 9158, 1); rotate!(rotate_medium_by727_u64, gen_random, 9158, 727); rotate!(rotate_medium_by727_bytes, gen_random_bytes, 9158, 727); rotate!(rotate_medium_by727_strings, gen_strings, 9158, 727); rotate!(rotate_medium_half, gen_random, 9158, 9158 / 2); rotate!(rotate_medium_half_plus_one, gen_random, 9158, 9158 / 2 + 1); // Intended to use more RAM than the machine has cache rotate!(rotate_huge_by1, gen_random, 5 * 1024 * 1024, 1); rotate!(rotate_huge_by9199_u64, gen_random, 5 * 1024 * 1024, 9199); rotate!(rotate_huge_by9199_bytes, gen_random_bytes, 5 * 1024 * 1024, 9199); rotate!(rotate_huge_by9199_strings, gen_strings, 5 * 1024 * 1024, 9199); rotate!(rotate_huge_by9199_big, gen_big_random, 5 * 1024 * 1024, 9199); rotate!(rotate_huge_by1234577_u64, gen_random, 5 * 1024 * 1024, 1234577); rotate!(rotate_huge_by1234577_bytes, gen_random_bytes, 5 * 1024 * 1024, 1234577); rotate!(rotate_huge_by1234577_strings, gen_strings, 5 * 1024 * 1024, 1234577); rotate!(rotate_huge_by1234577_big, gen_big_random, 5 * 1024 * 1024, 1234577); rotate!(rotate_huge_half, gen_random, 5 * 1024 * 1024, 5 * 1024 * 1024 / 2); rotate!(rotate_huge_half_plus_one, gen_random, 5 * 1024 * 1024, 5 * 1024 * 1024 / 2 + 1); use std::collections::VecDeque; use test::{black_box, Bencher}; #[bench] fn bench_new(b: &mut Bencher) { b.iter(|| { let ring: VecDeque = VecDeque::new(); black_box(ring); }) } #[bench] fn bench_grow_1025(b: &mut Bencher) { b.iter(|| { let mut deq = VecDeque::new(); for i in 0..1025 { deq.push_front(i); } black_box(deq); }) } #[bench] fn bench_iter_1000(b: &mut Bencher) { let ring: VecDeque<_> = (0..1000).collect(); b.iter(|| { let mut sum = 0; for &i in &ring { sum += i; } black_box(sum); }) } #[bench] fn bench_mut_iter_1000(b: &mut Bencher) { let mut ring: VecDeque<_> = (0..1000).collect(); b.iter(|| { let mut sum = 0; for i in &mut ring { sum += *i; } black_box(sum); }) } #[bench] fn bench_try_fold(b: &mut Bencher) { let ring: VecDeque<_> = (0..1000).collect(); b.iter(|| black_box(ring.iter().try_fold(0, |a, b| Some(a + b)))) } use std::collections::BinaryHeap; use rand::{seq::SliceRandom, thread_rng}; use test::{black_box, Bencher}; #[bench] fn bench_find_smallest_1000(b: &mut Bencher) { let mut rng = thread_rng(); let mut vec: Vec = (0..100_000).collect(); vec.shuffle(&mut rng); b.iter(|| { let mut iter = vec.iter().copied(); let mut heap: BinaryHeap<_> = iter.by_ref().take(1000).collect(); for x in iter { let mut max = heap.peek_mut().unwrap(); // This comparison should be true only 1% of the time. // Unnecessary `sift_down`s will degrade performance if x < *max { *max = x; } } heap }) } #[bench] fn bench_peek_mut_deref_mut(b: &mut Bencher) { let mut bheap = BinaryHeap::from(vec![42]); let vec: Vec = (0..1_000_000).collect(); b.iter(|| { let vec = black_box(&vec); let mut peek_mut = bheap.peek_mut().unwrap(); // The compiler shouldn't be able to optimize away the `sift_down` // assignment in `PeekMut`'s `DerefMut` implementation since // the loop may not run. for &i in vec.iter() { *peek_mut = i; } // Remove the already minimal overhead of the sift_down std::mem::forget(peek_mut); }) } #[bench] fn bench_from_vec(b: &mut Bencher) { let mut rng = thread_rng(); let mut vec: Vec = (0..100_000).collect(); vec.shuffle(&mut rng); b.iter(|| BinaryHeap::from(vec.clone())) } #[bench] fn bench_into_sorted_vec(b: &mut Bencher) { let bheap: BinaryHeap = (0..10_000).collect(); b.iter(|| bheap.clone().into_sorted_vec()) } #[bench] fn bench_push(b: &mut Bencher) { let mut bheap = BinaryHeap::with_capacity(50_000); let mut rng = thread_rng(); let mut vec: Vec = (0..50_000).collect(); vec.shuffle(&mut rng); b.iter(|| { for &i in vec.iter() { bheap.push(i); } black_box(&mut bheap); bheap.clear(); }) } #[bench] fn bench_pop(b: &mut Bencher) { let mut bheap = BinaryHeap::with_capacity(10_000); b.iter(|| { bheap.extend((0..10_000).rev()); black_box(&mut bheap); while let Some(elem) = bheap.pop() { black_box(elem); } }) } use std::{collections::VecDeque, time::Instant}; const VECDEQUE_LEN: i32 = 100000; const WARMUP_N: usize = 100; const BENCH_N: usize = 1000; fn main() { let a: VecDeque = (0..VECDEQUE_LEN).collect(); let b: VecDeque = (0..VECDEQUE_LEN).collect(); for _ in 0..WARMUP_N { let mut c = a.clone(); let mut d = b.clone(); c.append(&mut d); } let mut durations = Vec::with_capacity(BENCH_N); for _ in 0..BENCH_N { let mut c = a.clone(); let mut d = b.clone(); let before = Instant::now(); c.append(&mut d); let after = Instant::now(); durations.push(after.duration_since(before)); } let l = durations.len(); durations.sort(); assert!(BENCH_N % 2 == 0); let median = (durations[(l / 2) - 1] + durations[l / 2]) / 2; println!("\ncustom-bench vec_deque_append {:?} ns/iter\n", median.as_nanos()); } use std::collections::LinkedList; use std::panic::{catch_unwind, AssertUnwindSafe}; #[test] fn test_basic() { let mut m = LinkedList::>::new(); assert_eq!(m.pop_front(), None); assert_eq!(m.pop_back(), None); assert_eq!(m.pop_front(), None); m.push_front(box 1); assert_eq!(m.pop_front(), Some(box 1)); m.push_back(box 2); m.push_back(box 3); assert_eq!(m.len(), 2); assert_eq!(m.pop_front(), Some(box 2)); assert_eq!(m.pop_front(), Some(box 3)); assert_eq!(m.len(), 0); assert_eq!(m.pop_front(), None); m.push_back(box 1); m.push_back(box 3); m.push_back(box 5); m.push_back(box 7); assert_eq!(m.pop_front(), Some(box 1)); let mut n = LinkedList::new(); n.push_front(2); n.push_front(3); { assert_eq!(n.front().unwrap(), &3); let x = n.front_mut().unwrap(); assert_eq!(*x, 3); *x = 0; } { assert_eq!(n.back().unwrap(), &2); let y = n.back_mut().unwrap(); assert_eq!(*y, 2); *y = 1; } assert_eq!(n.pop_front(), Some(0)); assert_eq!(n.pop_front(), Some(1)); } fn generate_test() -> LinkedList { list_from(&[0, 1, 2, 3, 4, 5, 6]) } fn list_from(v: &[T]) -> LinkedList { v.iter().cloned().collect() } #[test] fn test_split_off() { // singleton { let mut m = LinkedList::new(); m.push_back(1); let p = m.split_off(0); assert_eq!(m.len(), 0); assert_eq!(p.len(), 1); assert_eq!(p.back(), Some(&1)); assert_eq!(p.front(), Some(&1)); } // not singleton, forwards { let u = vec![1, 2, 3, 4, 5]; let mut m = list_from(&u); let mut n = m.split_off(2); assert_eq!(m.len(), 2); assert_eq!(n.len(), 3); for elt in 1..3 { assert_eq!(m.pop_front(), Some(elt)); } for elt in 3..6 { assert_eq!(n.pop_front(), Some(elt)); } } // not singleton, backwards { let u = vec![1, 2, 3, 4, 5]; let mut m = list_from(&u); let mut n = m.split_off(4); assert_eq!(m.len(), 4); assert_eq!(n.len(), 1); for elt in 1..5 { assert_eq!(m.pop_front(), Some(elt)); } for elt in 5..6 { assert_eq!(n.pop_front(), Some(elt)); } } // no-op on the last index { let mut m = LinkedList::new(); m.push_back(1); let p = m.split_off(1); assert_eq!(m.len(), 1); assert_eq!(p.len(), 0); assert_eq!(m.back(), Some(&1)); assert_eq!(m.front(), Some(&1)); } } #[test] fn test_iterator() { let m = generate_test(); for (i, elt) in m.iter().enumerate() { assert_eq!(i as i32, *elt); } let mut n = LinkedList::new(); assert_eq!(n.iter().next(), None); n.push_front(4); let mut it = n.iter(); assert_eq!(it.size_hint(), (1, Some(1))); assert_eq!(it.next().unwrap(), &4); assert_eq!(it.size_hint(), (0, Some(0))); assert_eq!(it.next(), None); } #[test] fn test_iterator_clone() { let mut n = LinkedList::new(); n.push_back(2); n.push_back(3); n.push_back(4); let mut it = n.iter(); it.next(); let mut jt = it.clone(); assert_eq!(it.next(), jt.next()); assert_eq!(it.next_back(), jt.next_back()); assert_eq!(it.next(), jt.next()); } #[test] fn test_iterator_double_end() { let mut n = LinkedList::new(); assert_eq!(n.iter().next(), None); n.push_front(4); n.push_front(5); n.push_front(6); let mut it = n.iter(); assert_eq!(it.size_hint(), (3, Some(3))); assert_eq!(it.next().unwrap(), &6); assert_eq!(it.size_hint(), (2, Some(2))); assert_eq!(it.next_back().unwrap(), &4); assert_eq!(it.size_hint(), (1, Some(1))); assert_eq!(it.next_back().unwrap(), &5); assert_eq!(it.next_back(), None); assert_eq!(it.next(), None); } #[test] fn test_rev_iter() { let m = generate_test(); for (i, elt) in m.iter().rev().enumerate() { assert_eq!((6 - i) as i32, *elt); } let mut n = LinkedList::new(); assert_eq!(n.iter().rev().next(), None); n.push_front(4); let mut it = n.iter().rev(); assert_eq!(it.size_hint(), (1, Some(1))); assert_eq!(it.next().unwrap(), &4); assert_eq!(it.size_hint(), (0, Some(0))); assert_eq!(it.next(), None); } #[test] fn test_mut_iter() { let mut m = generate_test(); let mut len = m.len(); for (i, elt) in m.iter_mut().enumerate() { assert_eq!(i as i32, *elt); len -= 1; } assert_eq!(len, 0); let mut n = LinkedList::new(); assert!(n.iter_mut().next().is_none()); n.push_front(4); n.push_back(5); let mut it = n.iter_mut(); assert_eq!(it.size_hint(), (2, Some(2))); assert!(it.next().is_some()); assert!(it.next().is_some()); assert_eq!(it.size_hint(), (0, Some(0))); assert!(it.next().is_none()); } #[test] fn test_iterator_mut_double_end() { let mut n = LinkedList::new(); assert!(n.iter_mut().next_back().is_none()); n.push_front(4); n.push_front(5); n.push_front(6); let mut it = n.iter_mut(); assert_eq!(it.size_hint(), (3, Some(3))); assert_eq!(*it.next().unwrap(), 6); assert_eq!(it.size_hint(), (2, Some(2))); assert_eq!(*it.next_back().unwrap(), 4); assert_eq!(it.size_hint(), (1, Some(1))); assert_eq!(*it.next_back().unwrap(), 5); assert!(it.next_back().is_none()); assert!(it.next().is_none()); } #[test] fn test_mut_rev_iter() { let mut m = generate_test(); for (i, elt) in m.iter_mut().rev().enumerate() { assert_eq!((6 - i) as i32, *elt); } let mut n = LinkedList::new(); assert!(n.iter_mut().rev().next().is_none()); n.push_front(4); let mut it = n.iter_mut().rev(); assert!(it.next().is_some()); assert!(it.next().is_none()); } #[test] fn test_eq() { let mut n = list_from(&[]); let mut m = list_from(&[]); assert!(n == m); n.push_front(1); assert!(n != m); m.push_back(1); assert!(n == m); let n = list_from(&[2, 3, 4]); let m = list_from(&[1, 2, 3]); assert!(n != m); } #[test] fn test_hash() { use crate::hash; let mut x = LinkedList::new(); let mut y = LinkedList::new(); assert!(hash(&x) == hash(&y)); x.push_back(1); x.push_back(2); x.push_back(3); y.push_front(3); y.push_front(2); y.push_front(1); assert!(hash(&x) == hash(&y)); } #[test] fn test_ord() { let n = list_from(&[]); let m = list_from(&[1, 2, 3]); assert!(n < m); assert!(m > n); assert!(n <= n); assert!(n >= n); } #[test] fn test_ord_nan() { let nan = 0.0f64 / 0.0; let n = list_from(&[nan]); let m = list_from(&[nan]); assert!(!(n < m)); assert!(!(n > m)); assert!(!(n <= m)); assert!(!(n >= m)); let n = list_from(&[nan]); let one = list_from(&[1.0f64]); assert!(!(n < one)); assert!(!(n > one)); assert!(!(n <= one)); assert!(!(n >= one)); let u = list_from(&[1.0f64, 2.0, nan]); let v = list_from(&[1.0f64, 2.0, 3.0]); assert!(!(u < v)); assert!(!(u > v)); assert!(!(u <= v)); assert!(!(u >= v)); let s = list_from(&[1.0f64, 2.0, 4.0, 2.0]); let t = list_from(&[1.0f64, 2.0, 3.0, 2.0]); assert!(!(s < t)); assert!(s > one); assert!(!(s <= one)); assert!(s >= one); } #[test] fn test_show() { let list: LinkedList<_> = (0..10).collect(); assert_eq!(format!("{:?}", list), "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"); let list: LinkedList<_> = vec!["just", "one", "test", "more"].iter().cloned().collect(); assert_eq!(format!("{:?}", list), "[\"just\", \"one\", \"test\", \"more\"]"); } #[test] fn test_extend_ref() { let mut a = LinkedList::new(); a.push_back(1); a.extend(&[2, 3, 4]); assert_eq!(a.len(), 4); assert_eq!(a, list_from(&[1, 2, 3, 4])); let mut b = LinkedList::new(); b.push_back(5); b.push_back(6); a.extend(&b); assert_eq!(a.len(), 6); assert_eq!(a, list_from(&[1, 2, 3, 4, 5, 6])); } #[test] fn test_extend() { let mut a = LinkedList::new(); a.push_back(1); a.extend(vec![2, 3, 4]); // uses iterator assert_eq!(a.len(), 4); assert!(a.iter().eq(&[1, 2, 3, 4])); let b: LinkedList<_> = vec![5, 6, 7].into_iter().collect(); a.extend(b); // specializes to `append` assert_eq!(a.len(), 7); assert!(a.iter().eq(&[1, 2, 3, 4, 5, 6, 7])); } #[test] fn test_contains() { let mut l = LinkedList::new(); l.extend(&[2, 3, 4]); assert!(l.contains(&3)); assert!(!l.contains(&1)); l.clear(); assert!(!l.contains(&3)); } #[test] fn drain_filter_empty() { let mut list: LinkedList = LinkedList::new(); { let mut iter = list.drain_filter(|_| true); assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); } assert_eq!(list.len(), 0); assert_eq!(list.into_iter().collect::>(), vec![]); } #[test] fn drain_filter_zst() { let mut list: LinkedList<_> = vec![(), (), (), (), ()].into_iter().collect(); let initial_len = list.len(); let mut count = 0; { let mut iter = list.drain_filter(|_| true); assert_eq!(iter.size_hint(), (0, Some(initial_len))); while let Some(_) = iter.next() { count += 1; assert_eq!(iter.size_hint(), (0, Some(initial_len - count))); } assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); } assert_eq!(count, initial_len); assert_eq!(list.len(), 0); assert_eq!(list.into_iter().collect::>(), vec![]); } #[test] fn drain_filter_false() { let mut list: LinkedList<_> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); let initial_len = list.len(); let mut count = 0; { let mut iter = list.drain_filter(|_| false); assert_eq!(iter.size_hint(), (0, Some(initial_len))); for _ in iter.by_ref() { count += 1; } assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); } assert_eq!(count, 0); assert_eq!(list.len(), initial_len); assert_eq!(list.into_iter().collect::>(), vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); } #[test] fn drain_filter_true() { let mut list: LinkedList<_> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); let initial_len = list.len(); let mut count = 0; { let mut iter = list.drain_filter(|_| true); assert_eq!(iter.size_hint(), (0, Some(initial_len))); while let Some(_) = iter.next() { count += 1; assert_eq!(iter.size_hint(), (0, Some(initial_len - count))); } assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); } assert_eq!(count, initial_len); assert_eq!(list.len(), 0); assert_eq!(list.into_iter().collect::>(), vec![]); } #[test] fn drain_filter_complex() { { // [+xxx++++++xxxxx++++x+x++] let mut list = vec![ 1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39, ] .into_iter() .collect::>(); let removed = list.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]); assert_eq!(list.len(), 14); assert_eq!( list.into_iter().collect::>(), vec![1, 7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39] ); } { // [xxx++++++xxxxx++++x+x++] let mut list = vec![ 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39, ] .into_iter() .collect::>(); let removed = list.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]); assert_eq!(list.len(), 13); assert_eq!( list.into_iter().collect::>(), vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39] ); } { // [xxx++++++xxxxx++++x+x] let mut list = vec![2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36] .into_iter() .collect::>(); let removed = list.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]); assert_eq!(list.len(), 11); assert_eq!( list.into_iter().collect::>(), vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35] ); } { // [xxxxxxxxxx+++++++++++] let mut list = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19] .into_iter() .collect::>(); let removed = list.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]); assert_eq!(list.len(), 10); assert_eq!(list.into_iter().collect::>(), vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]); } { // [+++++++++++xxxxxxxxxx] let mut list = vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20] .into_iter() .collect::>(); let removed = list.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]); assert_eq!(list.len(), 10); assert_eq!(list.into_iter().collect::>(), vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]); } } #[test] fn drain_filter_drop_panic_leak() { static mut DROPS: i32 = 0; struct D(bool); impl Drop for D { fn drop(&mut self) { unsafe { DROPS += 1; } if self.0 { panic!("panic in `drop`"); } } } let mut q = LinkedList::new(); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_front(D(false)); q.push_front(D(true)); q.push_front(D(false)); catch_unwind(AssertUnwindSafe(|| drop(q.drain_filter(|_| true)))).ok(); assert_eq!(unsafe { DROPS }, 8); assert!(q.is_empty()); } #[test] fn drain_filter_pred_panic_leak() { static mut DROPS: i32 = 0; #[derive(Debug)] struct D(u32); impl Drop for D { fn drop(&mut self) { unsafe { DROPS += 1; } } } let mut q = LinkedList::new(); q.push_back(D(3)); q.push_back(D(4)); q.push_back(D(5)); q.push_back(D(6)); q.push_back(D(7)); q.push_front(D(2)); q.push_front(D(1)); q.push_front(D(0)); catch_unwind(AssertUnwindSafe(|| { drop(q.drain_filter(|item| if item.0 >= 2 { panic!() } else { true })) })) .ok(); assert_eq!(unsafe { DROPS }, 2); // 0 and 1 assert_eq!(q.len(), 6); } #[test] fn test_drop() { static mut DROPS: i32 = 0; struct Elem; impl Drop for Elem { fn drop(&mut self) { unsafe { DROPS += 1; } } } let mut ring = LinkedList::new(); ring.push_back(Elem); ring.push_front(Elem); ring.push_back(Elem); ring.push_front(Elem); drop(ring); assert_eq!(unsafe { DROPS }, 4); } #[test] fn test_drop_with_pop() { static mut DROPS: i32 = 0; struct Elem; impl Drop for Elem { fn drop(&mut self) { unsafe { DROPS += 1; } } } let mut ring = LinkedList::new(); ring.push_back(Elem); ring.push_front(Elem); ring.push_back(Elem); ring.push_front(Elem); drop(ring.pop_back()); drop(ring.pop_front()); assert_eq!(unsafe { DROPS }, 2); drop(ring); assert_eq!(unsafe { DROPS }, 4); } #[test] fn test_drop_clear() { static mut DROPS: i32 = 0; struct Elem; impl Drop for Elem { fn drop(&mut self) { unsafe { DROPS += 1; } } } let mut ring = LinkedList::new(); ring.push_back(Elem); ring.push_front(Elem); ring.push_back(Elem); ring.push_front(Elem); ring.clear(); assert_eq!(unsafe { DROPS }, 4); drop(ring); assert_eq!(unsafe { DROPS }, 4); } #[test] fn test_drop_panic() { static mut DROPS: i32 = 0; struct D(bool); impl Drop for D { fn drop(&mut self) { unsafe { DROPS += 1; } if self.0 { panic!("panic in `drop`"); } } } let mut q = LinkedList::new(); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_front(D(false)); q.push_front(D(false)); q.push_front(D(true)); catch_unwind(move || drop(q)).ok(); assert_eq!(unsafe { DROPS }, 8); } use std::any::Any; use std::cell::RefCell; use std::cmp::PartialEq; use std::iter::TrustedLen; use std::mem; use std::sync::{Arc, Weak}; #[test] fn uninhabited() { enum Void {} let mut a = Weak::::new(); a = a.clone(); assert!(a.upgrade().is_none()); let mut a: Weak = a; // Unsizing a = a.clone(); assert!(a.upgrade().is_none()); } #[test] fn slice() { let a: Arc<[u32; 3]> = Arc::new([3, 2, 1]); let a: Arc<[u32]> = a; // Unsizing let b: Arc<[u32]> = Arc::from(&[3, 2, 1][..]); // Conversion assert_eq!(a, b); // Exercise is_dangling() with a DST let mut a = Arc::downgrade(&a); a = a.clone(); assert!(a.upgrade().is_some()); } #[test] fn trait_object() { let a: Arc = Arc::new(4); let a: Arc = a; // Unsizing // Exercise is_dangling() with a DST let mut a = Arc::downgrade(&a); a = a.clone(); assert!(a.upgrade().is_some()); let mut b = Weak::::new(); b = b.clone(); assert!(b.upgrade().is_none()); let mut b: Weak = b; // Unsizing b = b.clone(); assert!(b.upgrade().is_none()); } #[test] fn float_nan_ne() { let x = Arc::new(f32::NAN); assert!(x != x); assert!(!(x == x)); } #[test] fn partial_eq() { struct TestPEq(RefCell); impl PartialEq for TestPEq { fn eq(&self, other: &TestPEq) -> bool { *self.0.borrow_mut() += 1; *other.0.borrow_mut() += 1; true } } let x = Arc::new(TestPEq(RefCell::new(0))); assert!(x == x); assert!(!(x != x)); assert_eq!(*x.0.borrow(), 4); } #[test] fn eq() { #[derive(Eq)] struct TestEq(RefCell); impl PartialEq for TestEq { fn eq(&self, other: &TestEq) -> bool { *self.0.borrow_mut() += 1; *other.0.borrow_mut() += 1; true } } let x = Arc::new(TestEq(RefCell::new(0))); assert!(x == x); assert!(!(x != x)); assert_eq!(*x.0.borrow(), 0); } // The test code below is identical to that in `rc.rs`. // For better maintainability we therefore define this type alias. type Rc = Arc; const SHARED_ITER_MAX: u16 = 100; fn assert_trusted_len(_: &I) {} #[test] fn shared_from_iter_normal() { // Exercise the base implementation for non-`TrustedLen` iterators. { // `Filter` is never `TrustedLen` since we don't // know statically how many elements will be kept: let iter = (0..SHARED_ITER_MAX).filter(|x| x % 2 == 0).map(Box::new); // Collecting into a `Vec` or `Rc<[T]>` should make no difference: let vec = iter.clone().collect::>(); let rc = iter.collect::>(); assert_eq!(&*vec, &*rc); // Clone a bit and let these get dropped. { let _rc_2 = rc.clone(); let _rc_3 = rc.clone(); let _rc_4 = Rc::downgrade(&_rc_3); } } // Drop what hasn't been here. } #[test] fn shared_from_iter_trustedlen_normal() { // Exercise the `TrustedLen` implementation under normal circumstances // where `size_hint()` matches `(_, Some(exact_len))`. { let iter = (0..SHARED_ITER_MAX).map(Box::new); assert_trusted_len(&iter); // Collecting into a `Vec` or `Rc<[T]>` should make no difference: let vec = iter.clone().collect::>(); let rc = iter.collect::>(); assert_eq!(&*vec, &*rc); assert_eq!(mem::size_of::>() * SHARED_ITER_MAX as usize, mem::size_of_val(&*rc)); // Clone a bit and let these get dropped. { let _rc_2 = rc.clone(); let _rc_3 = rc.clone(); let _rc_4 = Rc::downgrade(&_rc_3); } } // Drop what hasn't been here. // Try a ZST to make sure it is handled well. { let iter = (0..SHARED_ITER_MAX).map(drop); let vec = iter.clone().collect::>(); let rc = iter.collect::>(); assert_eq!(&*vec, &*rc); assert_eq!(0, mem::size_of_val(&*rc)); { let _rc_2 = rc.clone(); let _rc_3 = rc.clone(); let _rc_4 = Rc::downgrade(&_rc_3); } } } #[test] #[should_panic = "I've almost got 99 problems."] fn shared_from_iter_trustedlen_panic() { // Exercise the `TrustedLen` implementation when `size_hint()` matches // `(_, Some(exact_len))` but where `.next()` drops before the last iteration. let iter = (0..SHARED_ITER_MAX).map(|val| match val { 98 => panic!("I've almost got 99 problems."), _ => Box::new(val), }); assert_trusted_len(&iter); let _ = iter.collect::>(); panic!("I am unreachable."); } #[test] fn shared_from_iter_trustedlen_no_fuse() { // Exercise the `TrustedLen` implementation when `size_hint()` matches // `(_, Some(exact_len))` but where the iterator does not behave in a fused manner. struct Iter(std::vec::IntoIter>>); unsafe impl TrustedLen for Iter {} impl Iterator for Iter { fn size_hint(&self) -> (usize, Option) { (2, Some(2)) } type Item = Box; fn next(&mut self) -> Option { self.0.next().flatten() } } let vec = vec![Some(Box::new(42)), Some(Box::new(24)), None, Some(Box::new(12))]; let iter = Iter(vec.into_iter()); assert_trusted_len(&iter); assert_eq!(&[Box::new(42), Box::new(24)], &*iter.collect::>()); } use std::borrow::Cow; use std::cmp::Ordering::{Equal, Greater, Less}; use std::str::{from_utf8, from_utf8_unchecked}; #[test] fn test_le() { assert!("" <= ""); assert!("" <= "foo"); assert!("foo" <= "foo"); assert_ne!("foo", "bar"); } #[test] fn test_find() { assert_eq!("hello".find('l'), Some(2)); assert_eq!("hello".find(|c: char| c == 'o'), Some(4)); assert!("hello".find('x').is_none()); assert!("hello".find(|c: char| c == 'x').is_none()); assert_eq!("ประเทศไทย中华Việt Nam".find('华'), Some(30)); assert_eq!("ประเทศไทย中华Việt Nam".find(|c: char| c == '华'), Some(30)); } #[test] fn test_rfind() { assert_eq!("hello".rfind('l'), Some(3)); assert_eq!("hello".rfind(|c: char| c == 'o'), Some(4)); assert!("hello".rfind('x').is_none()); assert!("hello".rfind(|c: char| c == 'x').is_none()); assert_eq!("ประเทศไทย中华Việt Nam".rfind('华'), Some(30)); assert_eq!("ประเทศไทย中华Việt Nam".rfind(|c: char| c == '华'), Some(30)); } #[test] fn test_collect() { let empty = ""; let s: String = empty.chars().collect(); assert_eq!(empty, s); let data = "ประเทศไทย中"; let s: String = data.chars().collect(); assert_eq!(data, s); } #[test] fn test_into_bytes() { let data = String::from("asdf"); let buf = data.into_bytes(); assert_eq!(buf, b"asdf"); } #[test] fn test_find_str() { // byte positions assert_eq!("".find(""), Some(0)); assert!("banana".find("apple pie").is_none()); let data = "abcabc"; assert_eq!(data[0..6].find("ab"), Some(0)); assert_eq!(data[2..6].find("ab"), Some(3 - 2)); assert!(data[2..4].find("ab").is_none()); let string = "ประเทศไทย中华Việt Nam"; let mut data = String::from(string); data.push_str(string); assert!(data.find("ไท华").is_none()); assert_eq!(data[0..43].find(""), Some(0)); assert_eq!(data[6..43].find(""), Some(6 - 6)); assert_eq!(data[0..43].find("ประ"), Some(0)); assert_eq!(data[0..43].find("ทศไ"), Some(12)); assert_eq!(data[0..43].find("ย中"), Some(24)); assert_eq!(data[0..43].find("iệt"), Some(34)); assert_eq!(data[0..43].find("Nam"), Some(40)); assert_eq!(data[43..86].find("ประ"), Some(43 - 43)); assert_eq!(data[43..86].find("ทศไ"), Some(55 - 43)); assert_eq!(data[43..86].find("ย中"), Some(67 - 43)); assert_eq!(data[43..86].find("iệt"), Some(77 - 43)); assert_eq!(data[43..86].find("Nam"), Some(83 - 43)); // find every substring -- assert that it finds it, or an earlier occurrence. let string = "Việt Namacbaabcaabaaba"; for (i, ci) in string.char_indices() { let ip = i + ci.len_utf8(); for j in string[ip..].char_indices().map(|(i, _)| i).chain(Some(string.len() - ip)) { let pat = &string[i..ip + j]; assert!(match string.find(pat) { None => false, Some(x) => x <= i, }); assert!(match string.rfind(pat) { None => false, Some(x) => x >= i, }); } } } fn s(x: &str) -> String { x.to_string() } macro_rules! test_concat { ($expected: expr, $string: expr) => {{ let s: String = $string.concat(); assert_eq!($expected, s); }}; } #[test] fn test_concat_for_different_types() { test_concat!("ab", vec![s("a"), s("b")]); test_concat!("ab", vec!["a", "b"]); } #[test] fn test_concat_for_different_lengths() { let empty: &[&str] = &[]; test_concat!("", empty); test_concat!("a", ["a"]); test_concat!("ab", ["a", "b"]); test_concat!("abc", ["", "a", "bc"]); } macro_rules! test_join { ($expected: expr, $string: expr, $delim: expr) => {{ let s = $string.join($delim); assert_eq!($expected, s); }}; } #[test] fn test_join_for_different_types() { test_join!("a-b", ["a", "b"], "-"); let hyphen = "-".to_string(); test_join!("a-b", [s("a"), s("b")], &*hyphen); test_join!("a-b", vec!["a", "b"], &*hyphen); test_join!("a-b", &*vec!["a", "b"], "-"); test_join!("a-b", vec![s("a"), s("b")], "-"); } #[test] fn test_join_for_different_lengths() { let empty: &[&str] = &[]; test_join!("", empty, "-"); test_join!("a", ["a"], "-"); test_join!("a-b", ["a", "b"], "-"); test_join!("-a-bc", ["", "a", "bc"], "-"); } // join has fast paths for small separators up to 4 bytes // this tests the slow paths. #[test] fn test_join_for_different_lengths_with_long_separator() { assert_eq!("~~~~~".len(), 15); let empty: &[&str] = &[]; test_join!("", empty, "~~~~~"); test_join!("a", ["a"], "~~~~~"); test_join!("a~~~~~b", ["a", "b"], "~~~~~"); test_join!("~~~~~a~~~~~bc", ["", "a", "bc"], "~~~~~"); } #[test] fn test_join_isue_80335() { use core::{borrow::Borrow, cell::Cell}; struct WeirdBorrow { state: Cell, } impl Default for WeirdBorrow { fn default() -> Self { WeirdBorrow { state: Cell::new(false) } } } impl Borrow for WeirdBorrow { fn borrow(&self) -> &str { let state = self.state.get(); if state { "0" } else { self.state.set(true); "123456" } } } let arr: [WeirdBorrow; 3] = Default::default(); test_join!("0-0-0", arr, "-"); } #[test] #[cfg_attr(miri, ignore)] // Miri is too slow fn test_unsafe_slice() { assert_eq!("ab", unsafe { "abc".get_unchecked(0..2) }); assert_eq!("bc", unsafe { "abc".get_unchecked(1..3) }); assert_eq!("", unsafe { "abc".get_unchecked(1..1) }); fn a_million_letter_a() -> String { let mut i = 0; let mut rs = String::new(); while i < 100000 { rs.push_str("aaaaaaaaaa"); i += 1; } rs } fn half_a_million_letter_a() -> String { let mut i = 0; let mut rs = String::new(); while i < 100000 { rs.push_str("aaaaa"); i += 1; } rs } let letters = a_million_letter_a(); assert_eq!(half_a_million_letter_a(), unsafe { letters.get_unchecked(0..500000) }); } #[test] fn test_starts_with() { assert!("".starts_with("")); assert!("abc".starts_with("")); assert!("abc".starts_with("a")); assert!(!"a".starts_with("abc")); assert!(!"".starts_with("abc")); assert!(!"ödd".starts_with("-")); assert!("ödd".starts_with("öd")); } #[test] fn test_ends_with() { assert!("".ends_with("")); assert!("abc".ends_with("")); assert!("abc".ends_with("c")); assert!(!"a".ends_with("abc")); assert!(!"".ends_with("abc")); assert!(!"ddö".ends_with("-")); assert!("ddö".ends_with("dö")); } #[test] fn test_is_empty() { assert!("".is_empty()); assert!(!"a".is_empty()); } #[test] fn test_replacen() { assert_eq!("".replacen('a', "b", 5), ""); assert_eq!("acaaa".replacen("a", "b", 3), "bcbba"); assert_eq!("aaaa".replacen("a", "b", 0), "aaaa"); let test = "test"; assert_eq!(" test test ".replacen(test, "toast", 3), " toast toast "); assert_eq!(" test test ".replacen(test, "toast", 0), " test test "); assert_eq!(" test test ".replacen(test, "", 5), " "); assert_eq!("qwer123zxc789".replacen(char::is_numeric, "", 3), "qwerzxc789"); } #[test] fn test_replace() { let a = "a"; assert_eq!("".replace(a, "b"), ""); assert_eq!("a".replace(a, "b"), "b"); assert_eq!("ab".replace(a, "b"), "bb"); let test = "test"; assert_eq!(" test test ".replace(test, "toast"), " toast toast "); assert_eq!(" test test ".replace(test, ""), " "); } #[test] fn test_replace_2a() { let data = "ประเทศไทย中华"; let repl = "دولة الكويت"; let a = "ประเ"; let a2 = "دولة الكويتทศไทย中华"; assert_eq!(data.replace(a, repl), a2); } #[test] fn test_replace_2b() { let data = "ประเทศไทย中华"; let repl = "دولة الكويت"; let b = "ะเ"; let b2 = "ปรدولة الكويتทศไทย中华"; assert_eq!(data.replace(b, repl), b2); } #[test] fn test_replace_2c() { let data = "ประเทศไทย中华"; let repl = "دولة الكويت"; let c = "中华"; let c2 = "ประเทศไทยدولة الكويت"; assert_eq!(data.replace(c, repl), c2); } #[test] fn test_replace_2d() { let data = "ประเทศไทย中华"; let repl = "دولة الكويت"; let d = "ไท华"; assert_eq!(data.replace(d, repl), data); } #[test] fn test_replace_pattern() { let data = "abcdαβγδabcdαβγδ"; assert_eq!(data.replace("dαβ", "😺😺😺"), "abc😺😺😺γδabc😺😺😺γδ"); assert_eq!(data.replace('γ', "😺😺😺"), "abcdαβ😺😺😺δabcdαβ😺😺😺δ"); assert_eq!(data.replace(&['a', 'γ'] as &[_], "😺😺😺"), "😺😺😺bcdαβ😺😺😺δ😺😺😺bcdαβ😺😺😺δ"); assert_eq!(data.replace(|c| c == 'γ', "😺😺😺"), "abcdαβ😺😺😺δabcdαβ😺😺😺δ"); } // The current implementation of SliceIndex fails to handle methods // orthogonally from range types; therefore, it is worth testing // all of the indexing operations on each input. mod slice_index { // Test a slicing operation **that should succeed,** // testing it on all of the indexing methods. // // This is not suitable for testing failure on invalid inputs. macro_rules! assert_range_eq { ($s:expr, $range:expr, $expected:expr) => { let mut s: String = $s.to_owned(); let mut expected: String = $expected.to_owned(); { let s: &str = &s; let expected: &str = &expected; assert_eq!(&s[$range], expected, "(in assertion for: index)"); assert_eq!(s.get($range), Some(expected), "(in assertion for: get)"); unsafe { assert_eq!( s.get_unchecked($range), expected, "(in assertion for: get_unchecked)", ); } } { let s: &mut str = &mut s; let expected: &mut str = &mut expected; assert_eq!(&mut s[$range], expected, "(in assertion for: index_mut)",); assert_eq!( s.get_mut($range), Some(&mut expected[..]), "(in assertion for: get_mut)", ); unsafe { assert_eq!( s.get_unchecked_mut($range), expected, "(in assertion for: get_unchecked_mut)", ); } } }; } // Make sure the macro can actually detect bugs, // because if it can't, then what are we even doing here? // // (Be aware this only demonstrates the ability to detect bugs // in the FIRST method that panics, as the macro is not designed // to be used in `should_panic`) #[test] #[should_panic(expected = "out of bounds")] fn assert_range_eq_can_fail_by_panic() { assert_range_eq!("abc", 0..5, "abc"); } // (Be aware this only demonstrates the ability to detect bugs // in the FIRST method it calls, as the macro is not designed // to be used in `should_panic`) #[test] #[should_panic(expected = "==")] fn assert_range_eq_can_fail_by_inequality() { assert_range_eq!("abc", 0..2, "abc"); } // Generates test cases for bad index operations. // // This generates `should_panic` test cases for Index/IndexMut // and `None` test cases for get/get_mut. macro_rules! panic_cases { ($( in mod $case_name:ident { data: $data:expr; // optional: // // a similar input for which DATA[input] succeeds, and the corresponding // output str. This helps validate "critical points" where an input range // straddles the boundary between valid and invalid. // (such as the input `len..len`, which is just barely valid) $( good: data[$good:expr] == $output:expr; )* bad: data[$bad:expr]; message: $expect_msg:expr; // must be a literal } )*) => {$( mod $case_name { #[test] fn pass() { let mut v: String = $data.into(); $( assert_range_eq!(v, $good, $output); )* { let v: &str = &v; assert_eq!(v.get($bad), None, "(in None assertion for get)"); } { let v: &mut str = &mut v; assert_eq!(v.get_mut($bad), None, "(in None assertion for get_mut)"); } } #[test] #[should_panic(expected = $expect_msg)] fn index_fail() { let v: String = $data.into(); let v: &str = &v; let _v = &v[$bad]; } #[test] #[should_panic(expected = $expect_msg)] fn index_mut_fail() { let mut v: String = $data.into(); let v: &mut str = &mut v; let _v = &mut v[$bad]; } } )*}; } #[test] fn simple_ascii() { assert_range_eq!("abc", .., "abc"); assert_range_eq!("abc", 0..2, "ab"); assert_range_eq!("abc", 0..=1, "ab"); assert_range_eq!("abc", ..2, "ab"); assert_range_eq!("abc", ..=1, "ab"); assert_range_eq!("abc", 1..3, "bc"); assert_range_eq!("abc", 1..=2, "bc"); assert_range_eq!("abc", 1..1, ""); assert_range_eq!("abc", 1..=0, ""); } #[test] fn simple_unicode() { // 日本 assert_range_eq!("\u{65e5}\u{672c}", .., "\u{65e5}\u{672c}"); assert_range_eq!("\u{65e5}\u{672c}", 0..3, "\u{65e5}"); assert_range_eq!("\u{65e5}\u{672c}", 0..=2, "\u{65e5}"); assert_range_eq!("\u{65e5}\u{672c}", ..3, "\u{65e5}"); assert_range_eq!("\u{65e5}\u{672c}", ..=2, "\u{65e5}"); assert_range_eq!("\u{65e5}\u{672c}", 3..6, "\u{672c}"); assert_range_eq!("\u{65e5}\u{672c}", 3..=5, "\u{672c}"); assert_range_eq!("\u{65e5}\u{672c}", 3.., "\u{672c}"); let data = "ประเทศไทย中华"; assert_range_eq!(data, 0..3, "ป"); assert_range_eq!(data, 3..6, "ร"); assert_range_eq!(data, 3..3, ""); assert_range_eq!(data, 30..33, "华"); /*0: 中 3: 华 6: V 7: i 8: ệ 11: t 12: 13: N 14: a 15: m */ let ss = "中华Việt Nam"; assert_range_eq!(ss, 3..6, "华"); assert_range_eq!(ss, 6..16, "Việt Nam"); assert_range_eq!(ss, 6..=15, "Việt Nam"); assert_range_eq!(ss, 6.., "Việt Nam"); assert_range_eq!(ss, 0..3, "中"); assert_range_eq!(ss, 3..7, "华V"); assert_range_eq!(ss, 3..=6, "华V"); assert_range_eq!(ss, 3..3, ""); assert_range_eq!(ss, 3..=2, ""); } #[test] #[cfg_attr(target_os = "emscripten", ignore)] // hits an OOM #[cfg_attr(miri, ignore)] // Miri is too slow fn simple_big() { fn a_million_letter_x() -> String { let mut i = 0; let mut rs = String::new(); while i < 100000 { rs.push_str("华华华华华华华华华华"); i += 1; } rs } fn half_a_million_letter_x() -> String { let mut i = 0; let mut rs = String::new(); while i < 100000 { rs.push_str("华华华华华"); i += 1; } rs } let letters = a_million_letter_x(); assert_range_eq!(letters, 0..3 * 500000, half_a_million_letter_x()); } #[test] #[should_panic] fn test_slice_fail() { &"中华Việt Nam"[0..2]; } panic_cases! { in mod rangefrom_len { data: "abcdef"; good: data[6..] == ""; bad: data[7..]; message: "out of bounds"; } in mod rangeto_len { data: "abcdef"; good: data[..6] == "abcdef"; bad: data[..7]; message: "out of bounds"; } in mod rangetoinclusive_len { data: "abcdef"; good: data[..=5] == "abcdef"; bad: data[..=6]; message: "out of bounds"; } in mod rangeinclusive_len { data: "abcdef"; good: data[0..=5] == "abcdef"; bad: data[0..=6]; message: "out of bounds"; } in mod range_len_len { data: "abcdef"; good: data[6..6] == ""; bad: data[7..7]; message: "out of bounds"; } in mod rangeinclusive_len_len { data: "abcdef"; good: data[6..=5] == ""; bad: data[7..=6]; message: "out of bounds"; } } panic_cases! { in mod rangeinclusive_exhausted { data: "abcdef"; good: data[0..=5] == "abcdef"; good: data[{ let mut iter = 0..=5; iter.by_ref().count(); // exhaust it iter }] == ""; // 0..=6 is out of bounds before exhaustion, so it // stands to reason that it still would be after. bad: data[{ let mut iter = 0..=6; iter.by_ref().count(); // exhaust it iter }]; message: "out of bounds"; } } panic_cases! { in mod range_neg_width { data: "abcdef"; good: data[4..4] == ""; bad: data[4..3]; message: "begin <= end (4 <= 3)"; } in mod rangeinclusive_neg_width { data: "abcdef"; good: data[4..=3] == ""; bad: data[4..=2]; message: "begin <= end (4 <= 3)"; } } mod overflow { panic_cases! { in mod rangeinclusive { data: "hello"; // note: using 0 specifically ensures that the result of overflowing is 0..0, // so that `get` doesn't simply return None for the wrong reason. bad: data[0..=usize::MAX]; message: "maximum usize"; } in mod rangetoinclusive { data: "hello"; bad: data[..=usize::MAX]; message: "maximum usize"; } } } mod boundary { const DATA: &str = "abcαβγ"; const BAD_START: usize = 4; const GOOD_START: usize = 3; const BAD_END: usize = 6; const GOOD_END: usize = 7; const BAD_END_INCL: usize = BAD_END - 1; const GOOD_END_INCL: usize = GOOD_END - 1; // it is especially important to test all of the different range types here // because some of the logic may be duplicated as part of micro-optimizations // to dodge unicode boundary checks on half-ranges. panic_cases! { in mod range_1 { data: super::DATA; bad: data[super::BAD_START..super::GOOD_END]; message: "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of"; } in mod range_2 { data: super::DATA; bad: data[super::GOOD_START..super::BAD_END]; message: "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of"; } in mod rangefrom { data: super::DATA; bad: data[super::BAD_START..]; message: "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of"; } in mod rangeto { data: super::DATA; bad: data[..super::BAD_END]; message: "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of"; } in mod rangeinclusive_1 { data: super::DATA; bad: data[super::BAD_START..=super::GOOD_END_INCL]; message: "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of"; } in mod rangeinclusive_2 { data: super::DATA; bad: data[super::GOOD_START..=super::BAD_END_INCL]; message: "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of"; } in mod rangetoinclusive { data: super::DATA; bad: data[..=super::BAD_END_INCL]; message: "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of"; } } } const LOREM_PARAGRAPH: &str = "\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem \ sit amet dolor ultricies condimentum. Praesent iaculis purus elit, ac malesuada \ quam malesuada in. Duis sed orci eros. Suspendisse sit amet magna mollis, mollis \ nunc luctus, imperdiet mi. Integer fringilla non sem ut lacinia. Fusce varius \ tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec tempus vel, \ gravida nec quam."; // check the panic includes the prefix of the sliced string #[test] #[should_panic(expected = "byte index 1024 is out of bounds of `Lorem ipsum dolor sit amet")] fn test_slice_fail_truncated_1() { &LOREM_PARAGRAPH[..1024]; } // check the truncation in the panic message #[test] #[should_panic(expected = "luctus, im`[...]")] fn test_slice_fail_truncated_2() { &LOREM_PARAGRAPH[..1024]; } } #[test] fn test_str_slice_rangetoinclusive_ok() { let s = "abcαβγ"; assert_eq!(&s[..=2], "abc"); assert_eq!(&s[..=4], "abcα"); } #[test] #[should_panic] fn test_str_slice_rangetoinclusive_notok() { let s = "abcαβγ"; &s[..=3]; } #[test] fn test_str_slicemut_rangetoinclusive_ok() { let mut s = "abcαβγ".to_owned(); let s: &mut str = &mut s; assert_eq!(&mut s[..=2], "abc"); assert_eq!(&mut s[..=4], "abcα"); } #[test] #[should_panic] fn test_str_slicemut_rangetoinclusive_notok() { let mut s = "abcαβγ".to_owned(); let s: &mut str = &mut s; &mut s[..=3]; } #[test] fn test_is_char_boundary() { let s = "ศไทย中华Việt Nam β-release 🐱123"; assert!(s.is_char_boundary(0)); assert!(s.is_char_boundary(s.len())); assert!(!s.is_char_boundary(s.len() + 1)); for (i, ch) in s.char_indices() { // ensure character locations are boundaries and continuation bytes are not assert!(s.is_char_boundary(i), "{} is a char boundary in {:?}", i, s); for j in 1..ch.len_utf8() { assert!( !s.is_char_boundary(i + j), "{} should not be a char boundary in {:?}", i + j, s ); } } } #[test] fn test_trim_start_matches() { let v: &[char] = &[]; assert_eq!(" *** foo *** ".trim_start_matches(v), " *** foo *** "); let chars: &[char] = &['*', ' ']; assert_eq!(" *** foo *** ".trim_start_matches(chars), "foo *** "); assert_eq!(" *** *** ".trim_start_matches(chars), ""); assert_eq!("foo *** ".trim_start_matches(chars), "foo *** "); assert_eq!("11foo1bar11".trim_start_matches('1'), "foo1bar11"); let chars: &[char] = &['1', '2']; assert_eq!("12foo1bar12".trim_start_matches(chars), "foo1bar12"); assert_eq!("123foo1bar123".trim_start_matches(|c: char| c.is_numeric()), "foo1bar123"); } #[test] fn test_trim_end_matches() { let v: &[char] = &[]; assert_eq!(" *** foo *** ".trim_end_matches(v), " *** foo *** "); let chars: &[char] = &['*', ' ']; assert_eq!(" *** foo *** ".trim_end_matches(chars), " *** foo"); assert_eq!(" *** *** ".trim_end_matches(chars), ""); assert_eq!(" *** foo".trim_end_matches(chars), " *** foo"); assert_eq!("11foo1bar11".trim_end_matches('1'), "11foo1bar"); let chars: &[char] = &['1', '2']; assert_eq!("12foo1bar12".trim_end_matches(chars), "12foo1bar"); assert_eq!("123foo1bar123".trim_end_matches(|c: char| c.is_numeric()), "123foo1bar"); } #[test] fn test_trim_matches() { let v: &[char] = &[]; assert_eq!(" *** foo *** ".trim_matches(v), " *** foo *** "); let chars: &[char] = &['*', ' ']; assert_eq!(" *** foo *** ".trim_matches(chars), "foo"); assert_eq!(" *** *** ".trim_matches(chars), ""); assert_eq!("foo".trim_matches(chars), "foo"); assert_eq!("11foo1bar11".trim_matches('1'), "foo1bar"); let chars: &[char] = &['1', '2']; assert_eq!("12foo1bar12".trim_matches(chars), "foo1bar"); assert_eq!("123foo1bar123".trim_matches(|c: char| c.is_numeric()), "foo1bar"); } #[test] fn test_trim_start() { assert_eq!("".trim_start(), ""); assert_eq!("a".trim_start(), "a"); assert_eq!(" ".trim_start(), ""); assert_eq!(" blah".trim_start(), "blah"); assert_eq!(" \u{3000} wut".trim_start(), "wut"); assert_eq!("hey ".trim_start(), "hey "); } #[test] fn test_trim_end() { assert_eq!("".trim_end(), ""); assert_eq!("a".trim_end(), "a"); assert_eq!(" ".trim_end(), ""); assert_eq!("blah ".trim_end(), "blah"); assert_eq!("wut \u{3000} ".trim_end(), "wut"); assert_eq!(" hey".trim_end(), " hey"); } #[test] fn test_trim() { assert_eq!("".trim(), ""); assert_eq!("a".trim(), "a"); assert_eq!(" ".trim(), ""); assert_eq!(" blah ".trim(), "blah"); assert_eq!("\nwut \u{3000} ".trim(), "wut"); assert_eq!(" hey dude ".trim(), "hey dude"); } #[test] fn test_is_whitespace() { assert!("".chars().all(|c| c.is_whitespace())); assert!(" ".chars().all(|c| c.is_whitespace())); assert!("\u{2009}".chars().all(|c| c.is_whitespace())); // Thin space assert!(" \n\t ".chars().all(|c| c.is_whitespace())); assert!(!" _ ".chars().all(|c| c.is_whitespace())); } #[test] fn test_is_utf8() { // deny overlong encodings assert!(from_utf8(&[0xc0, 0x80]).is_err()); assert!(from_utf8(&[0xc0, 0xae]).is_err()); assert!(from_utf8(&[0xe0, 0x80, 0x80]).is_err()); assert!(from_utf8(&[0xe0, 0x80, 0xaf]).is_err()); assert!(from_utf8(&[0xe0, 0x81, 0x81]).is_err()); assert!(from_utf8(&[0xf0, 0x82, 0x82, 0xac]).is_err()); assert!(from_utf8(&[0xf4, 0x90, 0x80, 0x80]).is_err()); // deny surrogates assert!(from_utf8(&[0xED, 0xA0, 0x80]).is_err()); assert!(from_utf8(&[0xED, 0xBF, 0xBF]).is_err()); assert!(from_utf8(&[0xC2, 0x80]).is_ok()); assert!(from_utf8(&[0xDF, 0xBF]).is_ok()); assert!(from_utf8(&[0xE0, 0xA0, 0x80]).is_ok()); assert!(from_utf8(&[0xED, 0x9F, 0xBF]).is_ok()); assert!(from_utf8(&[0xEE, 0x80, 0x80]).is_ok()); assert!(from_utf8(&[0xEF, 0xBF, 0xBF]).is_ok()); assert!(from_utf8(&[0xF0, 0x90, 0x80, 0x80]).is_ok()); assert!(from_utf8(&[0xF4, 0x8F, 0xBF, 0xBF]).is_ok()); } #[test] fn from_utf8_mostly_ascii() { // deny invalid bytes embedded in long stretches of ascii for i in 32..64 { let mut data = [0; 128]; data[i] = 0xC0; assert!(from_utf8(&data).is_err()); data[i] = 0xC2; assert!(from_utf8(&data).is_err()); } } #[test] fn from_utf8_error() { macro_rules! test { ($input: expr, $expected_valid_up_to: expr, $expected_error_len: expr) => { let error = from_utf8($input).unwrap_err(); assert_eq!(error.valid_up_to(), $expected_valid_up_to); assert_eq!(error.error_len(), $expected_error_len); }; } test!(b"A\xC3\xA9 \xFF ", 4, Some(1)); test!(b"A\xC3\xA9 \x80 ", 4, Some(1)); test!(b"A\xC3\xA9 \xC1 ", 4, Some(1)); test!(b"A\xC3\xA9 \xC1", 4, Some(1)); test!(b"A\xC3\xA9 \xC2", 4, None); test!(b"A\xC3\xA9 \xC2 ", 4, Some(1)); test!(b"A\xC3\xA9 \xC2\xC0", 4, Some(1)); test!(b"A\xC3\xA9 \xE0", 4, None); test!(b"A\xC3\xA9 \xE0\x9F", 4, Some(1)); test!(b"A\xC3\xA9 \xE0\xA0", 4, None); test!(b"A\xC3\xA9 \xE0\xA0\xC0", 4, Some(2)); test!(b"A\xC3\xA9 \xE0\xA0 ", 4, Some(2)); test!(b"A\xC3\xA9 \xED\xA0\x80 ", 4, Some(1)); test!(b"A\xC3\xA9 \xF1", 4, None); test!(b"A\xC3\xA9 \xF1\x80", 4, None); test!(b"A\xC3\xA9 \xF1\x80\x80", 4, None); test!(b"A\xC3\xA9 \xF1 ", 4, Some(1)); test!(b"A\xC3\xA9 \xF1\x80 ", 4, Some(2)); test!(b"A\xC3\xA9 \xF1\x80\x80 ", 4, Some(3)); } #[test] fn test_as_bytes() { // no null let v = [ 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97, 109, ]; let b: &[u8] = &[]; assert_eq!("".as_bytes(), b); assert_eq!("abc".as_bytes(), b"abc"); assert_eq!("ศไทย中华Việt Nam".as_bytes(), v); } #[test] #[should_panic] fn test_as_bytes_fail() { // Don't double free. (I'm not sure if this exercises the // original problem code path anymore.) let s = String::from(""); let _bytes = s.as_bytes(); panic!(); } #[test] fn test_as_ptr() { let buf = "hello".as_ptr(); unsafe { assert_eq!(*buf.offset(0), b'h'); assert_eq!(*buf.offset(1), b'e'); assert_eq!(*buf.offset(2), b'l'); assert_eq!(*buf.offset(3), b'l'); assert_eq!(*buf.offset(4), b'o'); } } #[test] fn vec_str_conversions() { let s1: String = String::from("All mimsy were the borogoves"); let v: Vec = s1.as_bytes().to_vec(); let s2: String = String::from(from_utf8(&v).unwrap()); let mut i = 0; let n1 = s1.len(); let n2 = v.len(); assert_eq!(n1, n2); while i < n1 { let a: u8 = s1.as_bytes()[i]; let b: u8 = s2.as_bytes()[i]; assert_eq!(a, b); i += 1; } } #[test] fn test_contains() { assert!("abcde".contains("bcd")); assert!("abcde".contains("abcd")); assert!("abcde".contains("bcde")); assert!("abcde".contains("")); assert!("".contains("")); assert!(!"abcde".contains("def")); assert!(!"".contains("a")); let data = "ประเทศไทย中华Việt Nam"; assert!(data.contains("ประเ")); assert!(data.contains("ะเ")); assert!(data.contains("中华")); assert!(!data.contains("ไท华")); } #[test] fn test_contains_char() { assert!("abc".contains('b')); assert!("a".contains('a')); assert!(!"abc".contains('d')); assert!(!"".contains('a')); } #[test] fn test_split_at() { let s = "ศไทย中华Việt Nam"; for (index, _) in s.char_indices() { let (a, b) = s.split_at(index); assert_eq!(&s[..a.len()], a); assert_eq!(&s[a.len()..], b); } let (a, b) = s.split_at(s.len()); assert_eq!(a, s); assert_eq!(b, ""); } #[test] fn test_split_at_mut() { let mut s = "Hello World".to_string(); { let (a, b) = s.split_at_mut(5); a.make_ascii_uppercase(); b.make_ascii_lowercase(); } assert_eq!(s, "HELLO world"); } #[test] #[should_panic] fn test_split_at_boundscheck() { let s = "ศไทย中华Việt Nam"; s.split_at(1); } #[test] fn test_escape_unicode() { assert_eq!("abc".escape_unicode().to_string(), "\\u{61}\\u{62}\\u{63}"); assert_eq!("a c".escape_unicode().to_string(), "\\u{61}\\u{20}\\u{63}"); assert_eq!("\r\n\t".escape_unicode().to_string(), "\\u{d}\\u{a}\\u{9}"); assert_eq!("'\"\\".escape_unicode().to_string(), "\\u{27}\\u{22}\\u{5c}"); assert_eq!("\x00\x01\u{fe}\u{ff}".escape_unicode().to_string(), "\\u{0}\\u{1}\\u{fe}\\u{ff}"); assert_eq!("\u{100}\u{ffff}".escape_unicode().to_string(), "\\u{100}\\u{ffff}"); assert_eq!("\u{10000}\u{10ffff}".escape_unicode().to_string(), "\\u{10000}\\u{10ffff}"); assert_eq!("ab\u{fb00}".escape_unicode().to_string(), "\\u{61}\\u{62}\\u{fb00}"); assert_eq!("\u{1d4ea}\r".escape_unicode().to_string(), "\\u{1d4ea}\\u{d}"); } #[test] fn test_escape_debug() { // Note that there are subtleties with the number of backslashes // on the left- and right-hand sides. In particular, Unicode code points // are usually escaped with two backslashes on the right-hand side, as // they are escaped. However, when the character is unescaped (e.g., for // printable characters), only a single backslash appears (as the character // itself appears in the debug string). assert_eq!("abc".escape_debug().to_string(), "abc"); assert_eq!("a c".escape_debug().to_string(), "a c"); assert_eq!("éèê".escape_debug().to_string(), "éèê"); assert_eq!("\r\n\t".escape_debug().to_string(), "\\r\\n\\t"); assert_eq!("'\"\\".escape_debug().to_string(), "\\'\\\"\\\\"); assert_eq!("\u{7f}\u{ff}".escape_debug().to_string(), "\\u{7f}\u{ff}"); assert_eq!("\u{100}\u{ffff}".escape_debug().to_string(), "\u{100}\\u{ffff}"); assert_eq!("\u{10000}\u{10ffff}".escape_debug().to_string(), "\u{10000}\\u{10ffff}"); assert_eq!("ab\u{200b}".escape_debug().to_string(), "ab\\u{200b}"); assert_eq!("\u{10d4ea}\r".escape_debug().to_string(), "\\u{10d4ea}\\r"); assert_eq!( "\u{301}a\u{301}bé\u{e000}".escape_debug().to_string(), "\\u{301}a\u{301}bé\\u{e000}" ); } #[test] fn test_escape_default() { assert_eq!("abc".escape_default().to_string(), "abc"); assert_eq!("a c".escape_default().to_string(), "a c"); assert_eq!("éèê".escape_default().to_string(), "\\u{e9}\\u{e8}\\u{ea}"); assert_eq!("\r\n\t".escape_default().to_string(), "\\r\\n\\t"); assert_eq!("'\"\\".escape_default().to_string(), "\\'\\\"\\\\"); assert_eq!("\u{7f}\u{ff}".escape_default().to_string(), "\\u{7f}\\u{ff}"); assert_eq!("\u{100}\u{ffff}".escape_default().to_string(), "\\u{100}\\u{ffff}"); assert_eq!("\u{10000}\u{10ffff}".escape_default().to_string(), "\\u{10000}\\u{10ffff}"); assert_eq!("ab\u{200b}".escape_default().to_string(), "ab\\u{200b}"); assert_eq!("\u{10d4ea}\r".escape_default().to_string(), "\\u{10d4ea}\\r"); } #[test] fn test_total_ord() { assert_eq!("1234".cmp("123"), Greater); assert_eq!("123".cmp("1234"), Less); assert_eq!("1234".cmp("1234"), Equal); assert_eq!("12345555".cmp("123456"), Less); assert_eq!("22".cmp("1234"), Greater); } #[test] fn test_iterator() { let s = "ศไทย中华Việt Nam"; let v = ['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm']; let mut pos = 0; let it = s.chars(); for c in it { assert_eq!(c, v[pos]); pos += 1; } assert_eq!(pos, v.len()); assert_eq!(s.chars().count(), v.len()); } #[test] fn test_rev_iterator() { let s = "ศไทย中华Việt Nam"; let v = ['m', 'a', 'N', ' ', 't', 'ệ', 'i', 'V', '华', '中', 'ย', 'ท', 'ไ', 'ศ']; let mut pos = 0; let it = s.chars().rev(); for c in it { assert_eq!(c, v[pos]); pos += 1; } assert_eq!(pos, v.len()); } #[test] #[cfg_attr(miri, ignore)] // Miri is too slow fn test_chars_decoding() { let mut bytes = [0; 4]; for c in (0..0x110000).filter_map(std::char::from_u32) { let s = c.encode_utf8(&mut bytes); if Some(c) != s.chars().next() { panic!("character {:x}={} does not decode correctly", c as u32, c); } } } #[test] #[cfg_attr(miri, ignore)] // Miri is too slow fn test_chars_rev_decoding() { let mut bytes = [0; 4]; for c in (0..0x110000).filter_map(std::char::from_u32) { let s = c.encode_utf8(&mut bytes); if Some(c) != s.chars().rev().next() { panic!("character {:x}={} does not decode correctly", c as u32, c); } } } #[test] fn test_iterator_clone() { let s = "ศไทย中华Việt Nam"; let mut it = s.chars(); it.next(); assert!(it.clone().zip(it).all(|(x, y)| x == y)); } #[test] fn test_iterator_last() { let s = "ศไทย中华Việt Nam"; let mut it = s.chars(); it.next(); assert_eq!(it.last(), Some('m')); } #[test] fn test_chars_debug() { let s = "ศไทย中华Việt Nam"; let c = s.chars(); assert_eq!( format!("{:?}", c), r#"Chars(['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm'])"# ); } #[test] fn test_bytesator() { let s = "ศไทย中华Việt Nam"; let v = [ 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97, 109, ]; let mut pos = 0; for b in s.bytes() { assert_eq!(b, v[pos]); pos += 1; } } #[test] fn test_bytes_revator() { let s = "ศไทย中华Việt Nam"; let v = [ 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97, 109, ]; let mut pos = v.len(); for b in s.bytes().rev() { pos -= 1; assert_eq!(b, v[pos]); } } #[test] fn test_bytesator_nth() { let s = "ศไทย中华Việt Nam"; let v = [ 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97, 109, ]; let mut b = s.bytes(); assert_eq!(b.nth(2).unwrap(), v[2]); assert_eq!(b.nth(10).unwrap(), v[10]); assert_eq!(b.nth(200), None); } #[test] fn test_bytesator_count() { let s = "ศไทย中华Việt Nam"; let b = s.bytes(); assert_eq!(b.count(), 28) } #[test] fn test_bytesator_last() { let s = "ศไทย中华Việt Nam"; let b = s.bytes(); assert_eq!(b.last().unwrap(), 109) } #[test] fn test_char_indicesator() { let s = "ศไทย中华Việt Nam"; let p = [0, 3, 6, 9, 12, 15, 18, 19, 20, 23, 24, 25, 26, 27]; let v = ['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm']; let mut pos = 0; let it = s.char_indices(); for c in it { assert_eq!(c, (p[pos], v[pos])); pos += 1; } assert_eq!(pos, v.len()); assert_eq!(pos, p.len()); } #[test] fn test_char_indices_revator() { let s = "ศไทย中华Việt Nam"; let p = [27, 26, 25, 24, 23, 20, 19, 18, 15, 12, 9, 6, 3, 0]; let v = ['m', 'a', 'N', ' ', 't', 'ệ', 'i', 'V', '华', '中', 'ย', 'ท', 'ไ', 'ศ']; let mut pos = 0; let it = s.char_indices().rev(); for c in it { assert_eq!(c, (p[pos], v[pos])); pos += 1; } assert_eq!(pos, v.len()); assert_eq!(pos, p.len()); } #[test] fn test_char_indices_last() { let s = "ศไทย中华Việt Nam"; let mut it = s.char_indices(); it.next(); assert_eq!(it.last(), Some((27, 'm'))); } #[test] fn test_splitn_char_iterator() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.splitn(4, ' ').collect(); assert_eq!(split, ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]); let split: Vec<&str> = data.splitn(4, |c: char| c == ' ').collect(); assert_eq!(split, ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]); // Unicode let split: Vec<&str> = data.splitn(4, 'ä').collect(); assert_eq!(split, ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]); let split: Vec<&str> = data.splitn(4, |c: char| c == 'ä').collect(); assert_eq!(split, ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]); } #[test] fn test_split_char_iterator_no_trailing() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.split('\n').collect(); assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]); let split: Vec<&str> = data.split_terminator('\n').collect(); assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]); } #[test] fn test_split_char_iterator_inclusive() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.split_inclusive('\n').collect(); assert_eq!(split, ["\n", "Märy häd ä little lämb\n", "Little lämb\n"]); let uppercase_separated = "SheePSharKTurtlECaT"; let mut first_char = true; let split: Vec<&str> = uppercase_separated .split_inclusive(|c: char| { let split = !first_char && c.is_uppercase(); first_char = split; split }) .collect(); assert_eq!(split, ["SheeP", "SharK", "TurtlE", "CaT"]); } #[test] fn test_split_char_iterator_inclusive_rev() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.split_inclusive('\n').rev().collect(); assert_eq!(split, ["Little lämb\n", "Märy häd ä little lämb\n", "\n"]); // Note that the predicate is stateful and thus dependent // on the iteration order. // (A different predicate is needed for reverse iterator vs normal iterator.) // Not sure if anything can be done though. let uppercase_separated = "SheePSharKTurtlECaT"; let mut term_char = true; let split: Vec<&str> = uppercase_separated .split_inclusive(|c: char| { let split = term_char && c.is_uppercase(); term_char = c.is_uppercase(); split }) .rev() .collect(); assert_eq!(split, ["CaT", "TurtlE", "SharK", "SheeP"]); } #[test] fn test_rsplit() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.rsplit(' ').collect(); assert_eq!(split, ["lämb\n", "lämb\nLittle", "little", "ä", "häd", "\nMäry"]); let split: Vec<&str> = data.rsplit("lämb").collect(); assert_eq!(split, ["\n", "\nLittle ", "\nMäry häd ä little "]); let split: Vec<&str> = data.rsplit(|c: char| c == 'ä').collect(); assert_eq!(split, ["mb\n", "mb\nLittle l", " little l", "d ", "ry h", "\nM"]); } #[test] fn test_rsplitn() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.rsplitn(2, ' ').collect(); assert_eq!(split, ["lämb\n", "\nMäry häd ä little lämb\nLittle"]); let split: Vec<&str> = data.rsplitn(2, "lämb").collect(); assert_eq!(split, ["\n", "\nMäry häd ä little lämb\nLittle "]); let split: Vec<&str> = data.rsplitn(2, |c: char| c == 'ä').collect(); assert_eq!(split, ["mb\n", "\nMäry häd ä little lämb\nLittle l"]); } #[test] fn test_split_once() { assert_eq!("".split_once("->"), None); assert_eq!("-".split_once("->"), None); assert_eq!("->".split_once("->"), Some(("", ""))); assert_eq!("a->".split_once("->"), Some(("a", ""))); assert_eq!("->b".split_once("->"), Some(("", "b"))); assert_eq!("a->b".split_once("->"), Some(("a", "b"))); assert_eq!("a->b->c".split_once("->"), Some(("a", "b->c"))); assert_eq!("---".split_once("--"), Some(("", "-"))); } #[test] fn test_rsplit_once() { assert_eq!("".rsplit_once("->"), None); assert_eq!("-".rsplit_once("->"), None); assert_eq!("->".rsplit_once("->"), Some(("", ""))); assert_eq!("a->".rsplit_once("->"), Some(("a", ""))); assert_eq!("->b".rsplit_once("->"), Some(("", "b"))); assert_eq!("a->b".rsplit_once("->"), Some(("a", "b"))); assert_eq!("a->b->c".rsplit_once("->"), Some(("a->b", "c"))); assert_eq!("---".rsplit_once("--"), Some(("-", ""))); } #[test] fn test_split_whitespace() { let data = "\n \tMäry häd\tä little lämb\nLittle lämb\n"; let words: Vec<&str> = data.split_whitespace().collect(); assert_eq!(words, ["Märy", "häd", "ä", "little", "lämb", "Little", "lämb"]) } #[test] fn test_lines() { let data = "\nMäry häd ä little lämb\n\r\nLittle lämb\n"; let lines: Vec<&str> = data.lines().collect(); assert_eq!(lines, ["", "Märy häd ä little lämb", "", "Little lämb"]); let data = "\r\nMäry häd ä little lämb\n\nLittle lämb"; // no trailing \n let lines: Vec<&str> = data.lines().collect(); assert_eq!(lines, ["", "Märy häd ä little lämb", "", "Little lämb"]); } #[test] fn test_splitator() { fn t(s: &str, sep: &str, u: &[&str]) { let v: Vec<&str> = s.split(sep).collect(); assert_eq!(v, u); } t("--1233345--", "12345", &["--1233345--"]); t("abc::hello::there", "::", &["abc", "hello", "there"]); t("::hello::there", "::", &["", "hello", "there"]); t("hello::there::", "::", &["hello", "there", ""]); t("::hello::there::", "::", &["", "hello", "there", ""]); t("ประเทศไทย中华Việt Nam", "中华", &["ประเทศไทย", "Việt Nam"]); t("zzXXXzzYYYzz", "zz", &["", "XXX", "YYY", ""]); t("zzXXXzYYYz", "XXX", &["zz", "zYYYz"]); t(".XXX.YYY.", ".", &["", "XXX", "YYY", ""]); t("", ".", &[""]); t("zz", "zz", &["", ""]); t("ok", "z", &["ok"]); t("zzz", "zz", &["", "z"]); t("zzzzz", "zz", &["", "", "z"]); } #[test] fn test_str_default() { use std::default::Default; fn t>() { let s: S = Default::default(); assert_eq!(s.as_ref(), ""); } t::<&str>(); t::(); t::<&mut str>(); } #[test] fn test_str_container() { fn sum_len(v: &[&str]) -> usize { v.iter().map(|x| x.len()).sum() } let s = "01234"; assert_eq!(5, sum_len(&["012", "", "34"])); assert_eq!(5, sum_len(&["01", "2", "34", ""])); assert_eq!(5, sum_len(&[s])); } #[test] fn test_str_from_utf8() { let xs = b"hello"; assert_eq!(from_utf8(xs), Ok("hello")); let xs = "ศไทย中华Việt Nam".as_bytes(); assert_eq!(from_utf8(xs), Ok("ศไทย中华Việt Nam")); let xs = b"hello\xFF"; assert!(from_utf8(xs).is_err()); } #[test] fn test_pattern_deref_forward() { let data = "aabcdaa"; assert!(data.contains("bcd")); assert!(data.contains(&"bcd")); assert!(data.contains(&"bcd".to_string())); } #[test] fn test_empty_match_indices() { let data = "aä中!"; let vec: Vec<_> = data.match_indices("").collect(); assert_eq!(vec, [(0, ""), (1, ""), (3, ""), (6, ""), (7, "")]); } #[test] fn test_bool_from_str() { assert_eq!("true".parse().ok(), Some(true)); assert_eq!("false".parse().ok(), Some(false)); assert_eq!("not even a boolean".parse::().ok(), None); } fn check_contains_all_substrings(s: &str) { assert!(s.contains("")); for i in 0..s.len() { for j in i + 1..=s.len() { assert!(s.contains(&s[i..j])); } } } #[test] #[cfg_attr(miri, ignore)] // Miri is too slow fn strslice_issue_16589() { assert!("bananas".contains("nana")); // prior to the fix for #16589, x.contains("abcdabcd") returned false // test all substrings for good measure check_contains_all_substrings("012345678901234567890123456789bcdabcdabcd"); } #[test] fn strslice_issue_16878() { assert!(!"1234567ah012345678901ah".contains("hah")); assert!(!"00abc01234567890123456789abc".contains("bcabc")); } #[test] #[cfg_attr(miri, ignore)] // Miri is too slow fn test_strslice_contains() { let x = "There are moments, Jeeves, when one asks oneself, 'Do trousers matter?'"; check_contains_all_substrings(x); } #[test] fn test_rsplitn_char_iterator() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let mut split: Vec<&str> = data.rsplitn(4, ' ').collect(); split.reverse(); assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]); let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == ' ').collect(); split.reverse(); assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]); // Unicode let mut split: Vec<&str> = data.rsplitn(4, 'ä').collect(); split.reverse(); assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]); let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == 'ä').collect(); split.reverse(); assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]); } #[test] fn test_split_char_iterator() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.split(' ').collect(); assert_eq!(split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); let mut rsplit: Vec<&str> = data.split(' ').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); let split: Vec<&str> = data.split(|c: char| c == ' ').collect(); assert_eq!(split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); // Unicode let split: Vec<&str> = data.split('ä').collect(); assert_eq!(split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); let mut rsplit: Vec<&str> = data.split('ä').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); let split: Vec<&str> = data.split(|c: char| c == 'ä').collect(); assert_eq!(split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); } #[test] fn test_rev_split_char_iterator_no_trailing() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let mut split: Vec<&str> = data.split('\n').rev().collect(); split.reverse(); assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]); let mut split: Vec<&str> = data.split_terminator('\n').rev().collect(); split.reverse(); assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]); } #[test] fn test_utf16_code_units() { assert_eq!("é\u{1F4A9}".encode_utf16().collect::>(), [0xE9, 0xD83D, 0xDCA9]) } #[test] fn starts_with_in_unicode() { assert!(!"├── Cargo.toml".starts_with("# ")); } #[test] fn starts_short_long() { assert!(!"".starts_with("##")); assert!(!"##".starts_with("####")); assert!("####".starts_with("##")); assert!(!"##ä".starts_with("####")); assert!("####ä".starts_with("##")); assert!(!"##".starts_with("####ä")); assert!("##ä##".starts_with("##ä")); assert!("".starts_with("")); assert!("ä".starts_with("")); assert!("#ä".starts_with("")); assert!("##ä".starts_with("")); assert!("ä###".starts_with("")); assert!("#ä##".starts_with("")); assert!("##ä#".starts_with("")); } #[test] fn contains_weird_cases() { assert!("* \t".contains(' ')); assert!(!"* \t".contains('?')); assert!(!"* \t".contains('\u{1F4A9}')); } #[test] fn trim_ws() { assert_eq!(" \t a \t ".trim_start_matches(|c: char| c.is_whitespace()), "a \t "); assert_eq!(" \t a \t ".trim_end_matches(|c: char| c.is_whitespace()), " \t a"); assert_eq!(" \t a \t ".trim_start_matches(|c: char| c.is_whitespace()), "a \t "); assert_eq!(" \t a \t ".trim_end_matches(|c: char| c.is_whitespace()), " \t a"); assert_eq!(" \t a \t ".trim_matches(|c: char| c.is_whitespace()), "a"); assert_eq!(" \t \t ".trim_start_matches(|c: char| c.is_whitespace()), ""); assert_eq!(" \t \t ".trim_end_matches(|c: char| c.is_whitespace()), ""); assert_eq!(" \t \t ".trim_start_matches(|c: char| c.is_whitespace()), ""); assert_eq!(" \t \t ".trim_end_matches(|c: char| c.is_whitespace()), ""); assert_eq!(" \t \t ".trim_matches(|c: char| c.is_whitespace()), ""); } #[test] fn to_lowercase() { assert_eq!("".to_lowercase(), ""); assert_eq!("AÉDžaé ".to_lowercase(), "aédžaé "); // https://github.com/rust-lang/rust/issues/26035 assert_eq!("ΑΣ".to_lowercase(), "ας"); assert_eq!("Α'Σ".to_lowercase(), "α'ς"); assert_eq!("Α''Σ".to_lowercase(), "α''ς"); assert_eq!("ΑΣ Α".to_lowercase(), "ας α"); assert_eq!("Α'Σ Α".to_lowercase(), "α'ς α"); assert_eq!("Α''Σ Α".to_lowercase(), "α''ς α"); assert_eq!("ΑΣ' Α".to_lowercase(), "ας' α"); assert_eq!("ΑΣ'' Α".to_lowercase(), "ας'' α"); assert_eq!("Α'Σ' Α".to_lowercase(), "α'ς' α"); assert_eq!("Α''Σ'' Α".to_lowercase(), "α''ς'' α"); assert_eq!("Α Σ".to_lowercase(), "α σ"); assert_eq!("Α 'Σ".to_lowercase(), "α 'σ"); assert_eq!("Α ''Σ".to_lowercase(), "α ''σ"); assert_eq!("Σ".to_lowercase(), "σ"); assert_eq!("'Σ".to_lowercase(), "'σ"); assert_eq!("''Σ".to_lowercase(), "''σ"); assert_eq!("ΑΣΑ".to_lowercase(), "ασα"); assert_eq!("ΑΣ'Α".to_lowercase(), "ασ'α"); assert_eq!("ΑΣ''Α".to_lowercase(), "ασ''α"); } #[test] fn to_uppercase() { assert_eq!("".to_uppercase(), ""); assert_eq!("aéDžßfiᾀ".to_uppercase(), "AÉDŽSSFIἈΙ"); } #[test] fn test_into_string() { // The only way to acquire a Box in the first place is through a String, so just // test that we can round-trip between Box and String. let string = String::from("Some text goes here"); assert_eq!(string.clone().into_boxed_str().into_string(), string); } #[test] fn test_box_slice_clone() { let data = String::from("hello HELLO hello HELLO yes YES 5 中ä华!!!"); let data2 = data.clone().into_boxed_str().clone().into_string(); assert_eq!(data, data2); } #[test] fn test_cow_from() { let borrowed = "borrowed"; let owned = String::from("owned"); match (Cow::from(owned.clone()), Cow::from(borrowed)) { (Cow::Owned(o), Cow::Borrowed(b)) => assert!(o == owned && b == borrowed), _ => panic!("invalid `Cow::from`"), } } #[test] fn test_repeat() { assert_eq!("".repeat(3), ""); assert_eq!("abc".repeat(0), ""); assert_eq!("α".repeat(3), "ααα"); } mod pattern { use std::str::pattern::SearchStep::{self, Done, Match, Reject}; use std::str::pattern::{Pattern, ReverseSearcher, Searcher}; macro_rules! make_test { ($name:ident, $p:expr, $h:expr, [$($e:expr,)*]) => { #[allow(unused_imports)] mod $name { use std::str::pattern::SearchStep::{Match, Reject}; use super::{cmp_search_to_vec}; #[test] fn fwd() { cmp_search_to_vec(false, $p, $h, vec![$($e),*]); } #[test] fn bwd() { cmp_search_to_vec(true, $p, $h, vec![$($e),*]); } } } } fn cmp_search_to_vec<'a>( rev: bool, pat: impl Pattern<'a, Searcher: ReverseSearcher<'a>>, haystack: &'a str, right: Vec, ) { let mut searcher = pat.into_searcher(haystack); let mut v = vec![]; loop { match if !rev { searcher.next() } else { searcher.next_back() } { Match(a, b) => v.push(Match(a, b)), Reject(a, b) => v.push(Reject(a, b)), Done => break, } } if rev { v.reverse(); } let mut first_index = 0; let mut err = None; for (i, e) in right.iter().enumerate() { match *e { Match(a, b) | Reject(a, b) if a <= b && a == first_index => { first_index = b; } _ => { err = Some(i); break; } } } if let Some(err) = err { panic!("Input skipped range at {}", err); } if first_index != haystack.len() { panic!("Did not cover whole input"); } assert_eq!(v, right); } make_test!( str_searcher_ascii_haystack, "bb", "abbcbbd", [Reject(0, 1), Match(1, 3), Reject(3, 4), Match(4, 6), Reject(6, 7),] ); make_test!( str_searcher_ascii_haystack_seq, "bb", "abbcbbbbd", [Reject(0, 1), Match(1, 3), Reject(3, 4), Match(4, 6), Match(6, 8), Reject(8, 9),] ); make_test!( str_searcher_empty_needle_ascii_haystack, "", "abbcbbd", [ Match(0, 0), Reject(0, 1), Match(1, 1), Reject(1, 2), Match(2, 2), Reject(2, 3), Match(3, 3), Reject(3, 4), Match(4, 4), Reject(4, 5), Match(5, 5), Reject(5, 6), Match(6, 6), Reject(6, 7), Match(7, 7), ] ); make_test!( str_searcher_multibyte_haystack, " ", "├──", [Reject(0, 3), Reject(3, 6), Reject(6, 9),] ); make_test!( str_searcher_empty_needle_multibyte_haystack, "", "├──", [ Match(0, 0), Reject(0, 3), Match(3, 3), Reject(3, 6), Match(6, 6), Reject(6, 9), Match(9, 9), ] ); make_test!(str_searcher_empty_needle_empty_haystack, "", "", [Match(0, 0),]); make_test!(str_searcher_nonempty_needle_empty_haystack, "├", "", []); make_test!( char_searcher_ascii_haystack, 'b', "abbcbbd", [ Reject(0, 1), Match(1, 2), Match(2, 3), Reject(3, 4), Match(4, 5), Match(5, 6), Reject(6, 7), ] ); make_test!( char_searcher_multibyte_haystack, ' ', "├──", [Reject(0, 3), Reject(3, 6), Reject(6, 9),] ); make_test!( char_searcher_short_haystack, '\u{1F4A9}', "* \t", [Reject(0, 1), Reject(1, 2), Reject(2, 3),] ); } macro_rules! generate_iterator_test { { $name:ident { $( ($($arg:expr),*) -> [$($t:tt)*]; )* } with $fwd:expr, $bwd:expr; } => { #[test] fn $name() { $( { let res = vec![$($t)*]; let fwd_vec: Vec<_> = ($fwd)($($arg),*).collect(); assert_eq!(fwd_vec, res); let mut bwd_vec: Vec<_> = ($bwd)($($arg),*).collect(); bwd_vec.reverse(); assert_eq!(bwd_vec, res); } )* } }; { $name:ident { $( ($($arg:expr),*) -> [$($t:tt)*]; )* } with $fwd:expr; } => { #[test] fn $name() { $( { let res = vec![$($t)*]; let fwd_vec: Vec<_> = ($fwd)($($arg),*).collect(); assert_eq!(fwd_vec, res); } )* } } } generate_iterator_test! { double_ended_split { ("foo.bar.baz", '.') -> ["foo", "bar", "baz"]; ("foo::bar::baz", "::") -> ["foo", "bar", "baz"]; } with str::split, str::rsplit; } generate_iterator_test! { double_ended_split_terminator { ("foo;bar;baz;", ';') -> ["foo", "bar", "baz"]; } with str::split_terminator, str::rsplit_terminator; } generate_iterator_test! { double_ended_matches { ("a1b2c3", char::is_numeric) -> ["1", "2", "3"]; } with str::matches, str::rmatches; } generate_iterator_test! { double_ended_match_indices { ("a1b2c3", char::is_numeric) -> [(1, "1"), (3, "2"), (5, "3")]; } with str::match_indices, str::rmatch_indices; } generate_iterator_test! { not_double_ended_splitn { ("foo::bar::baz", 2, "::") -> ["foo", "bar::baz"]; } with str::splitn; } generate_iterator_test! { not_double_ended_rsplitn { ("foo::bar::baz", 2, "::") -> ["baz", "foo::bar"]; } with str::rsplitn; } #[test] fn different_str_pattern_forwarding_lifetimes() { use std::str::pattern::Pattern; fn foo<'a, P>(p: P) where for<'b> &'b P: Pattern<'a>, { for _ in 0..3 { "asdf".find(&p); } } foo::<&str>("x"); } #[test] fn test_str_multiline() { let a: String = "this \ is a test" .to_string(); let b: String = "this \ is \ another \ test" .to_string(); assert_eq!(a, "this is a test".to_string()); assert_eq!(b, "this is another test".to_string()); } #[test] fn test_str_escapes() { let x = "\\\\\ "; assert_eq!(x, r"\\"); // extraneous whitespace stripped } #[test] fn const_str_ptr() { const A: [u8; 2] = ['h' as u8, 'i' as u8]; const B: &'static [u8; 2] = &A; const C: *const u8 = B as *const u8; // Miri does not deduplicate consts (https://github.com/rust-lang/miri/issues/131) #[cfg(not(miri))] { let foo = &A as *const u8; assert_eq!(foo, C); } unsafe { assert_eq!(from_utf8_unchecked(&A), "hi"); assert_eq!(*C, A[0]); assert_eq!(*(&B[0] as *const u8), A[0]); } } #[test] fn utf8() { let yen: char = '¥'; // 0xa5 let c_cedilla: char = 'ç'; // 0xe7 let thorn: char = 'þ'; // 0xfe let y_diaeresis: char = 'ÿ'; // 0xff let pi: char = 'Π'; // 0x3a0 assert_eq!(yen as isize, 0xa5); assert_eq!(c_cedilla as isize, 0xe7); assert_eq!(thorn as isize, 0xfe); assert_eq!(y_diaeresis as isize, 0xff); assert_eq!(pi as isize, 0x3a0); assert_eq!(pi as isize, '\u{3a0}' as isize); assert_eq!('\x0a' as isize, '\n' as isize); let bhutan: String = "འབྲུག་ཡུལ།".to_string(); let japan: String = "日本".to_string(); let uzbekistan: String = "Ўзбекистон".to_string(); let austria: String = "Österreich".to_string(); let bhutan_e: String = "\u{f60}\u{f56}\u{fb2}\u{f74}\u{f42}\u{f0b}\u{f61}\u{f74}\u{f63}\u{f0d}".to_string(); let japan_e: String = "\u{65e5}\u{672c}".to_string(); let uzbekistan_e: String = "\u{40e}\u{437}\u{431}\u{435}\u{43a}\u{438}\u{441}\u{442}\u{43e}\u{43d}".to_string(); let austria_e: String = "\u{d6}sterreich".to_string(); let oo: char = 'Ö'; assert_eq!(oo as isize, 0xd6); fn check_str_eq(a: String, b: String) { let mut i: isize = 0; for ab in a.bytes() { println!("{}", i); println!("{}", ab); let bb: u8 = b.as_bytes()[i as usize]; println!("{}", bb); assert_eq!(ab, bb); i += 1; } } check_str_eq(bhutan, bhutan_e); check_str_eq(japan, japan_e); check_str_eq(uzbekistan, uzbekistan_e); check_str_eq(austria, austria_e); } #[test] fn utf8_chars() { // Chars of 1, 2, 3, and 4 bytes let chs: Vec = vec!['e', 'é', '€', '\u{10000}']; let s: String = chs.iter().cloned().collect(); let schs: Vec = s.chars().collect(); assert_eq!(s.len(), 10); assert_eq!(s.chars().count(), 4); assert_eq!(schs.len(), 4); assert_eq!(schs.iter().cloned().collect::(), s); assert!((from_utf8(s.as_bytes()).is_ok())); // invalid prefix assert!((!from_utf8(&[0x80]).is_ok())); // invalid 2 byte prefix assert!((!from_utf8(&[0xc0]).is_ok())); assert!((!from_utf8(&[0xc0, 0x10]).is_ok())); // invalid 3 byte prefix assert!((!from_utf8(&[0xe0]).is_ok())); assert!((!from_utf8(&[0xe0, 0x10]).is_ok())); assert!((!from_utf8(&[0xe0, 0xff, 0x10]).is_ok())); // invalid 4 byte prefix assert!((!from_utf8(&[0xf0]).is_ok())); assert!((!from_utf8(&[0xf0, 0x10]).is_ok())); assert!((!from_utf8(&[0xf0, 0xff, 0x10]).is_ok())); assert!((!from_utf8(&[0xf0, 0xff, 0xff, 0x10]).is_ok())); } use std::collections::BTreeSet; #[test] fn test_hash() { use crate::hash; let mut x = BTreeSet::new(); let mut y = BTreeSet::new(); x.insert(1); x.insert(2); x.insert(3); y.insert(3); y.insert(2); y.insert(1); assert_eq!(hash(&x), hash(&y)); } use std::borrow::Cow; use std::cell::Cell; use std::collections::TryReserveError::*; use std::ops::Bound; use std::ops::Bound::*; use std::ops::RangeBounds; use std::panic; use std::str; pub trait IntoCow<'a, B: ?Sized> where B: ToOwned, { fn into_cow(self) -> Cow<'a, B>; } impl<'a> IntoCow<'a, str> for String { fn into_cow(self) -> Cow<'a, str> { Cow::Owned(self) } } impl<'a> IntoCow<'a, str> for &'a str { fn into_cow(self) -> Cow<'a, str> { Cow::Borrowed(self) } } #[test] fn test_from_str() { let owned: Option = "string".parse().ok(); assert_eq!(owned.as_ref().map(|s| &**s), Some("string")); } #[test] fn test_from_cow_str() { assert_eq!(String::from(Cow::Borrowed("string")), "string"); assert_eq!(String::from(Cow::Owned(String::from("string"))), "string"); } #[test] fn test_unsized_to_string() { let s: &str = "abc"; let _: String = (*s).to_string(); } #[test] fn test_from_utf8() { let xs = b"hello".to_vec(); assert_eq!(String::from_utf8(xs).unwrap(), String::from("hello")); let xs = "ศไทย中华Việt Nam".as_bytes().to_vec(); assert_eq!(String::from_utf8(xs).unwrap(), String::from("ศไทย中华Việt Nam")); let xs = b"hello\xFF".to_vec(); let err = String::from_utf8(xs).unwrap_err(); assert_eq!(err.as_bytes(), b"hello\xff"); let err_clone = err.clone(); assert_eq!(err, err_clone); assert_eq!(err.into_bytes(), b"hello\xff".to_vec()); assert_eq!(err_clone.utf8_error().valid_up_to(), 5); } #[test] fn test_from_utf8_lossy() { let xs = b"hello"; let ys: Cow<'_, str> = "hello".into_cow(); assert_eq!(String::from_utf8_lossy(xs), ys); let xs = "ศไทย中华Việt Nam".as_bytes(); let ys: Cow<'_, str> = "ศไทย中华Việt Nam".into_cow(); assert_eq!(String::from_utf8_lossy(xs), ys); let xs = b"Hello\xC2 There\xFF Goodbye"; assert_eq!( String::from_utf8_lossy(xs), String::from("Hello\u{FFFD} There\u{FFFD} Goodbye").into_cow() ); let xs = b"Hello\xC0\x80 There\xE6\x83 Goodbye"; assert_eq!( String::from_utf8_lossy(xs), String::from("Hello\u{FFFD}\u{FFFD} There\u{FFFD} Goodbye").into_cow() ); let xs = b"\xF5foo\xF5\x80bar"; assert_eq!( String::from_utf8_lossy(xs), String::from("\u{FFFD}foo\u{FFFD}\u{FFFD}bar").into_cow() ); let xs = b"\xF1foo\xF1\x80bar\xF1\x80\x80baz"; assert_eq!( String::from_utf8_lossy(xs), String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}baz").into_cow() ); let xs = b"\xF4foo\xF4\x80bar\xF4\xBFbaz"; assert_eq!( String::from_utf8_lossy(xs), String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}\u{FFFD}baz").into_cow() ); let xs = b"\xF0\x80\x80\x80foo\xF0\x90\x80\x80bar"; assert_eq!( String::from_utf8_lossy(xs), String::from("\u{FFFD}\u{FFFD}\u{FFFD}\u{FFFD}foo\u{10000}bar").into_cow() ); // surrogates let xs = b"\xED\xA0\x80foo\xED\xBF\xBFbar"; assert_eq!( String::from_utf8_lossy(xs), String::from("\u{FFFD}\u{FFFD}\u{FFFD}foo\u{FFFD}\u{FFFD}\u{FFFD}bar").into_cow() ); } #[test] fn test_from_utf16() { let pairs = [ ( String::from("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n"), vec![ 0xd800, 0xdf45, 0xd800, 0xdf3f, 0xd800, 0xdf3b, 0xd800, 0xdf46, 0xd800, 0xdf39, 0xd800, 0xdf3b, 0xd800, 0xdf30, 0x000a, ], ), ( String::from("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n"), vec![ 0xd801, 0xdc12, 0xd801, 0xdc49, 0xd801, 0xdc2e, 0xd801, 0xdc40, 0xd801, 0xdc32, 0xd801, 0xdc4b, 0x0020, 0xd801, 0xdc0f, 0xd801, 0xdc32, 0xd801, 0xdc4d, 0x000a, ], ), ( String::from("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n"), vec![ 0xd800, 0xdf00, 0xd800, 0xdf16, 0xd800, 0xdf0b, 0xd800, 0xdf04, 0xd800, 0xdf11, 0xd800, 0xdf09, 0x00b7, 0xd800, 0xdf0c, 0xd800, 0xdf04, 0xd800, 0xdf15, 0xd800, 0xdf04, 0xd800, 0xdf0b, 0xd800, 0xdf09, 0xd800, 0xdf11, 0x000a, ], ), ( String::from("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n"), vec![ 0xd801, 0xdc8b, 0xd801, 0xdc98, 0xd801, 0xdc88, 0xd801, 0xdc91, 0xd801, 0xdc9b, 0xd801, 0xdc92, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc93, 0x0020, 0xd801, 0xdc88, 0xd801, 0xdc9a, 0xd801, 0xdc8d, 0x0020, 0xd801, 0xdc8f, 0xd801, 0xdc9c, 0xd801, 0xdc92, 0xd801, 0xdc96, 0xd801, 0xdc86, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc86, 0x000a, ], ), // Issue #12318, even-numbered non-BMP planes (String::from("\u{20000}"), vec![0xD840, 0xDC00]), ]; for p in &pairs { let (s, u) = (*p).clone(); let s_as_utf16 = s.encode_utf16().collect::>(); let u_as_string = String::from_utf16(&u).unwrap(); assert!(core::char::decode_utf16(u.iter().cloned()).all(|r| r.is_ok())); assert_eq!(s_as_utf16, u); assert_eq!(u_as_string, s); assert_eq!(String::from_utf16_lossy(&u), s); assert_eq!(String::from_utf16(&s_as_utf16).unwrap(), s); assert_eq!(u_as_string.encode_utf16().collect::>(), u); } } #[test] fn test_utf16_invalid() { // completely positive cases tested above. // lead + eof assert!(String::from_utf16(&[0xD800]).is_err()); // lead + lead assert!(String::from_utf16(&[0xD800, 0xD800]).is_err()); // isolated trail assert!(String::from_utf16(&[0x0061, 0xDC00]).is_err()); // general assert!(String::from_utf16(&[0xD800, 0xd801, 0xdc8b, 0xD800]).is_err()); } #[test] fn test_from_utf16_lossy() { // completely positive cases tested above. // lead + eof assert_eq!(String::from_utf16_lossy(&[0xD800]), String::from("\u{FFFD}")); // lead + lead assert_eq!(String::from_utf16_lossy(&[0xD800, 0xD800]), String::from("\u{FFFD}\u{FFFD}")); // isolated trail assert_eq!(String::from_utf16_lossy(&[0x0061, 0xDC00]), String::from("a\u{FFFD}")); // general assert_eq!( String::from_utf16_lossy(&[0xD800, 0xd801, 0xdc8b, 0xD800]), String::from("\u{FFFD}𐒋\u{FFFD}") ); } #[test] fn test_push_bytes() { let mut s = String::from("ABC"); unsafe { let mv = s.as_mut_vec(); mv.extend_from_slice(&[b'D']); } assert_eq!(s, "ABCD"); } #[test] fn test_push_str() { let mut s = String::new(); s.push_str(""); assert_eq!(&s[0..], ""); s.push_str("abc"); assert_eq!(&s[0..], "abc"); s.push_str("ประเทศไทย中华Việt Nam"); assert_eq!(&s[0..], "abcประเทศไทย中华Việt Nam"); } #[test] fn test_add_assign() { let mut s = String::new(); s += ""; assert_eq!(s.as_str(), ""); s += "abc"; assert_eq!(s.as_str(), "abc"); s += "ประเทศไทย中华Việt Nam"; assert_eq!(s.as_str(), "abcประเทศไทย中华Việt Nam"); } #[test] fn test_push() { let mut data = String::from("ประเทศไทย中"); data.push('华'); data.push('b'); // 1 byte data.push('¢'); // 2 byte data.push('€'); // 3 byte data.push('𤭢'); // 4 byte assert_eq!(data, "ประเทศไทย中华b¢€𤭢"); } #[test] fn test_pop() { let mut data = String::from("ประเทศไทย中华b¢€𤭢"); assert_eq!(data.pop().unwrap(), '𤭢'); // 4 bytes assert_eq!(data.pop().unwrap(), '€'); // 3 bytes assert_eq!(data.pop().unwrap(), '¢'); // 2 bytes assert_eq!(data.pop().unwrap(), 'b'); // 1 bytes assert_eq!(data.pop().unwrap(), '华'); assert_eq!(data, "ประเทศไทย中"); } #[test] fn test_split_off_empty() { let orig = "Hello, world!"; let mut split = String::from(orig); let empty: String = split.split_off(orig.len()); assert!(empty.is_empty()); } #[test] #[should_panic] fn test_split_off_past_end() { let orig = "Hello, world!"; let mut split = String::from(orig); let _ = split.split_off(orig.len() + 1); } #[test] #[should_panic] fn test_split_off_mid_char() { let mut shan = String::from("山"); let _broken_mountain = shan.split_off(1); } #[test] fn test_split_off_ascii() { let mut ab = String::from("ABCD"); let orig_capacity = ab.capacity(); let cd = ab.split_off(2); assert_eq!(ab, "AB"); assert_eq!(cd, "CD"); assert_eq!(ab.capacity(), orig_capacity); } #[test] fn test_split_off_unicode() { let mut nihon = String::from("日本語"); let orig_capacity = nihon.capacity(); let go = nihon.split_off("日本".len()); assert_eq!(nihon, "日本"); assert_eq!(go, "語"); assert_eq!(nihon.capacity(), orig_capacity); } #[test] fn test_str_truncate() { let mut s = String::from("12345"); s.truncate(5); assert_eq!(s, "12345"); s.truncate(3); assert_eq!(s, "123"); s.truncate(0); assert_eq!(s, ""); let mut s = String::from("12345"); let p = s.as_ptr(); s.truncate(3); s.push_str("6"); let p_ = s.as_ptr(); assert_eq!(p_, p); } #[test] fn test_str_truncate_invalid_len() { let mut s = String::from("12345"); s.truncate(6); assert_eq!(s, "12345"); } #[test] #[should_panic] fn test_str_truncate_split_codepoint() { let mut s = String::from("\u{FC}"); // ü s.truncate(1); } #[test] fn test_str_clear() { let mut s = String::from("12345"); s.clear(); assert_eq!(s.len(), 0); assert_eq!(s, ""); } #[test] fn test_str_add() { let a = String::from("12345"); let b = a + "2"; let b = b + "2"; assert_eq!(b.len(), 7); assert_eq!(b, "1234522"); } #[test] fn remove() { let mut s = "ศไทย中华Việt Nam; foobar".to_string(); assert_eq!(s.remove(0), 'ศ'); assert_eq!(s.len(), 33); assert_eq!(s, "ไทย中华Việt Nam; foobar"); assert_eq!(s.remove(17), 'ệ'); assert_eq!(s, "ไทย中华Vit Nam; foobar"); } #[test] #[should_panic] fn remove_bad() { "ศ".to_string().remove(1); } #[test] fn test_remove_matches() { let mut s = "abc".to_string(); s.remove_matches('b'); assert_eq!(s, "ac"); s.remove_matches('b'); assert_eq!(s, "ac"); let mut s = "abcb".to_string(); s.remove_matches('b'); assert_eq!(s, "ac"); let mut s = "ศไทย中华Việt Nam; foobarศ".to_string(); s.remove_matches('ศ'); assert_eq!(s, "ไทย中华Việt Nam; foobar"); let mut s = "".to_string(); s.remove_matches(""); assert_eq!(s, ""); let mut s = "aaaaa".to_string(); s.remove_matches('a'); assert_eq!(s, ""); } #[test] fn test_retain() { let mut s = String::from("α_β_γ"); s.retain(|_| true); assert_eq!(s, "α_β_γ"); s.retain(|c| c != '_'); assert_eq!(s, "αβγ"); s.retain(|c| c != 'β'); assert_eq!(s, "αγ"); s.retain(|c| c == 'α'); assert_eq!(s, "α"); s.retain(|_| false); assert_eq!(s, ""); let mut s = String::from("0è0"); let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| { let mut count = 0; s.retain(|_| { count += 1; match count { 1 => false, 2 => true, _ => panic!(), } }); })); assert!(std::str::from_utf8(s.as_bytes()).is_ok()); } #[test] fn insert() { let mut s = "foobar".to_string(); s.insert(0, 'ệ'); assert_eq!(s, "ệfoobar"); s.insert(6, 'ย'); assert_eq!(s, "ệfooยbar"); } #[test] #[should_panic] fn insert_bad1() { "".to_string().insert(1, 't'); } #[test] #[should_panic] fn insert_bad2() { "ệ".to_string().insert(1, 't'); } #[test] fn test_slicing() { let s = "foobar".to_string(); assert_eq!("foobar", &s[..]); assert_eq!("foo", &s[..3]); assert_eq!("bar", &s[3..]); assert_eq!("oob", &s[1..4]); } #[test] fn test_simple_types() { assert_eq!(1.to_string(), "1"); assert_eq!((-1).to_string(), "-1"); assert_eq!(200.to_string(), "200"); assert_eq!(2.to_string(), "2"); assert_eq!(true.to_string(), "true"); assert_eq!(false.to_string(), "false"); assert_eq!(("hi".to_string()).to_string(), "hi"); } #[test] fn test_vectors() { let x: Vec = vec![]; assert_eq!(format!("{:?}", x), "[]"); assert_eq!(format!("{:?}", vec![1]), "[1]"); assert_eq!(format!("{:?}", vec![1, 2, 3]), "[1, 2, 3]"); assert!(format!("{:?}", vec![vec![], vec![1], vec![1, 1]]) == "[[], [1], [1, 1]]"); } #[test] fn test_from_iterator() { let s = "ศไทย中华Việt Nam".to_string(); let t = "ศไทย中华"; let u = "Việt Nam"; let a: String = s.chars().collect(); assert_eq!(s, a); let mut b = t.to_string(); b.extend(u.chars()); assert_eq!(s, b); let c: String = vec![t, u].into_iter().collect(); assert_eq!(s, c); let mut d = t.to_string(); d.extend(vec![u]); assert_eq!(s, d); } #[test] fn test_drain() { let mut s = String::from("αβγ"); assert_eq!(s.drain(2..4).collect::(), "β"); assert_eq!(s, "αγ"); let mut t = String::from("abcd"); t.drain(..0); assert_eq!(t, "abcd"); t.drain(..1); assert_eq!(t, "bcd"); t.drain(3..); assert_eq!(t, "bcd"); t.drain(..); assert_eq!(t, ""); } #[test] #[should_panic] fn test_drain_start_overflow() { let mut s = String::from("abc"); s.drain((Excluded(usize::MAX), Included(0))); } #[test] #[should_panic] fn test_drain_end_overflow() { let mut s = String::from("abc"); s.drain((Included(0), Included(usize::MAX))); } #[test] fn test_replace_range() { let mut s = "Hello, world!".to_owned(); s.replace_range(7..12, "世界"); assert_eq!(s, "Hello, 世界!"); } #[test] #[should_panic] fn test_replace_range_char_boundary() { let mut s = "Hello, 世界!".to_owned(); s.replace_range(..8, ""); } #[test] fn test_replace_range_inclusive_range() { let mut v = String::from("12345"); v.replace_range(2..=3, "789"); assert_eq!(v, "127895"); v.replace_range(1..=2, "A"); assert_eq!(v, "1A895"); } #[test] #[should_panic] fn test_replace_range_out_of_bounds() { let mut s = String::from("12345"); s.replace_range(5..6, "789"); } #[test] #[should_panic] fn test_replace_range_inclusive_out_of_bounds() { let mut s = String::from("12345"); s.replace_range(5..=5, "789"); } #[test] #[should_panic] fn test_replace_range_start_overflow() { let mut s = String::from("123"); s.replace_range((Excluded(usize::MAX), Included(0)), ""); } #[test] #[should_panic] fn test_replace_range_end_overflow() { let mut s = String::from("456"); s.replace_range((Included(0), Included(usize::MAX)), ""); } #[test] fn test_replace_range_empty() { let mut s = String::from("12345"); s.replace_range(1..2, ""); assert_eq!(s, "1345"); } #[test] fn test_replace_range_unbounded() { let mut s = String::from("12345"); s.replace_range(.., ""); assert_eq!(s, ""); } #[test] fn test_replace_range_evil_start_bound() { struct EvilRange(Cell); impl RangeBounds for EvilRange { fn start_bound(&self) -> Bound<&usize> { Bound::Included(if self.0.get() { &1 } else { self.0.set(true); &0 }) } fn end_bound(&self) -> Bound<&usize> { Bound::Unbounded } } let mut s = String::from("🦀"); s.replace_range(EvilRange(Cell::new(false)), ""); assert_eq!(Ok(""), str::from_utf8(s.as_bytes())); } #[test] fn test_replace_range_evil_end_bound() { struct EvilRange(Cell); impl RangeBounds for EvilRange { fn start_bound(&self) -> Bound<&usize> { Bound::Included(&0) } fn end_bound(&self) -> Bound<&usize> { Bound::Excluded(if self.0.get() { &3 } else { self.0.set(true); &4 }) } } let mut s = String::from("🦀"); s.replace_range(EvilRange(Cell::new(false)), ""); assert_eq!(Ok(""), str::from_utf8(s.as_bytes())); } #[test] fn test_extend_ref() { let mut a = "foo".to_string(); a.extend(&['b', 'a', 'r']); assert_eq!(&a, "foobar"); } #[test] fn test_into_boxed_str() { let xs = String::from("hello my name is bob"); let ys = xs.into_boxed_str(); assert_eq!(&*ys, "hello my name is bob"); } #[test] fn test_reserve_exact() { // This is all the same as test_reserve let mut s = String::new(); assert_eq!(s.capacity(), 0); s.reserve_exact(2); assert!(s.capacity() >= 2); for _i in 0..16 { s.push('0'); } assert!(s.capacity() >= 16); s.reserve_exact(16); assert!(s.capacity() >= 32); s.push('0'); s.reserve_exact(16); assert!(s.capacity() >= 33) } #[test] #[cfg_attr(miri, ignore)] // Miri does not support signalling OOM #[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc fn test_try_reserve() { // These are the interesting cases: // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM) // * > isize::MAX should always fail // * On 16/32-bit should CapacityOverflow // * On 64-bit should OOM // * overflow may trigger when adding `len` to `cap` (in number of elements) // * overflow may trigger when multiplying `new_cap` by size_of:: (to get bytes) const MAX_CAP: usize = isize::MAX as usize; const MAX_USIZE: usize = usize::MAX; // On 16/32-bit, we check that allocations don't exceed isize::MAX, // on 64-bit, we assume the OS will give an OOM for such a ridiculous size. // Any platform that succeeds for these requests is technically broken with // ptr::offset because LLVM is the worst. let guards_against_isize = usize::BITS < 64; { // Note: basic stuff is checked by test_reserve let mut empty_string: String = String::new(); // Check isize::MAX doesn't count as an overflow if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } // Play it again, frank! (just to be sure) if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { // Check isize::MAX + 1 does count as overflow if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_CAP + 1) { } else { panic!("isize::MAX + 1 should trigger an overflow!") } // Check usize::MAX does count as overflow if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } else { // Check isize::MAX + 1 is an OOM if let Err(AllocError { .. }) = empty_string.try_reserve(MAX_CAP + 1) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } // Check usize::MAX is an OOM if let Err(AllocError { .. }) = empty_string.try_reserve(MAX_USIZE) { } else { panic!("usize::MAX should trigger an OOM!") } } } { // Same basic idea, but with non-zero len let mut ten_bytes: String = String::from("0123456789"); if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an overflow!"); } } else { if let Err(AllocError { .. }) = ten_bytes.try_reserve(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } // Should always overflow in the add-to-len if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } } #[test] #[cfg_attr(miri, ignore)] // Miri does not support signalling OOM #[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc fn test_try_reserve_exact() { // This is exactly the same as test_try_reserve with the method changed. // See that test for comments. const MAX_CAP: usize = isize::MAX as usize; const MAX_USIZE: usize = usize::MAX; let guards_against_isize = usize::BITS < 64; { let mut empty_string: String = String::new(); if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_CAP + 1) { } else { panic!("isize::MAX + 1 should trigger an overflow!") } if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } else { if let Err(AllocError { .. }) = empty_string.try_reserve_exact(MAX_CAP + 1) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } if let Err(AllocError { .. }) = empty_string.try_reserve_exact(MAX_USIZE) { } else { panic!("usize::MAX should trigger an OOM!") } } } { let mut ten_bytes: String = String::from("0123456789"); if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an overflow!"); } } else { if let Err(AllocError { .. }) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } } #[test] fn test_from_char() { assert_eq!(String::from('a'), 'a'.to_string()); let s: String = 'x'.into(); assert_eq!(s, 'x'.to_string()); } #[test] fn test_str_concat() { let a: String = "hello".to_string(); let b: String = "world".to_string(); let s: String = format!("{}{}", a, b); assert_eq!(s.as_bytes()[9], 'd' as u8); } use std::borrow::{Cow, ToOwned}; use std::ffi::{CStr, OsStr}; use std::path::Path; use std::rc::Rc; use std::sync::Arc; macro_rules! test_from_cow { ($value:ident => $($ty:ty),+) => {$( let borrowed = <$ty>::from(Cow::Borrowed($value)); let owned = <$ty>::from(Cow::Owned($value.to_owned())); assert_eq!($value, &*borrowed); assert_eq!($value, &*owned); )+}; ($value:ident : & $ty:ty) => { test_from_cow!($value => Box<$ty>, Rc<$ty>, Arc<$ty>); } } #[test] fn test_from_cow_slice() { let slice: &[i32] = &[1, 2, 3]; test_from_cow!(slice: &[i32]); } #[test] fn test_from_cow_str() { let string = "hello"; test_from_cow!(string: &str); } #[test] fn test_from_cow_c_str() { let string = CStr::from_bytes_with_nul(b"hello\0").unwrap(); test_from_cow!(string: &CStr); } #[test] fn test_from_cow_os_str() { let string = OsStr::new("hello"); test_from_cow!(string: &OsStr); } #[test] fn test_from_cow_path() { let path = Path::new("hello"); test_from_cow!(path: &Path); } #[test] fn cow_const() { // test that the methods of `Cow` are usable in a const context const COW: Cow<'_, str> = Cow::Borrowed("moo"); const IS_BORROWED: bool = COW.is_borrowed(); assert!(IS_BORROWED); const IS_OWNED: bool = COW.is_owned(); assert!(!IS_OWNED); } use std::borrow::Cow; use std::cell::Cell; use std::collections::TryReserveError::*; use std::fmt::Debug; use std::iter::InPlaceIterable; use std::mem::{size_of, swap}; use std::ops::Bound::*; use std::panic::{catch_unwind, AssertUnwindSafe}; use std::rc::Rc; use std::sync::atomic::{AtomicU32, Ordering}; use std::vec::{Drain, IntoIter}; struct DropCounter<'a> { count: &'a mut u32, } impl Drop for DropCounter<'_> { fn drop(&mut self) { *self.count += 1; } } #[test] fn test_small_vec_struct() { assert_eq!(size_of::>(), size_of::() * 3); } #[test] fn test_double_drop() { struct TwoVec { x: Vec, y: Vec, } let (mut count_x, mut count_y) = (0, 0); { let mut tv = TwoVec { x: Vec::new(), y: Vec::new() }; tv.x.push(DropCounter { count: &mut count_x }); tv.y.push(DropCounter { count: &mut count_y }); // If Vec had a drop flag, here is where it would be zeroed. // Instead, it should rely on its internal state to prevent // doing anything significant when dropped multiple times. drop(tv.x); // Here tv goes out of scope, tv.y should be dropped, but not tv.x. } assert_eq!(count_x, 1); assert_eq!(count_y, 1); } #[test] fn test_reserve() { let mut v = Vec::new(); assert_eq!(v.capacity(), 0); v.reserve(2); assert!(v.capacity() >= 2); for i in 0..16 { v.push(i); } assert!(v.capacity() >= 16); v.reserve(16); assert!(v.capacity() >= 32); v.push(16); v.reserve(16); assert!(v.capacity() >= 33) } #[test] fn test_zst_capacity() { assert_eq!(Vec::<()>::new().capacity(), usize::MAX); } #[test] fn test_indexing() { let v: Vec = vec![10, 20]; assert_eq!(v[0], 10); assert_eq!(v[1], 20); let mut x: usize = 0; assert_eq!(v[x], 10); assert_eq!(v[x + 1], 20); x = x + 1; assert_eq!(v[x], 20); assert_eq!(v[x - 1], 10); } #[test] fn test_debug_fmt() { let vec1: Vec = vec![]; assert_eq!("[]", format!("{:?}", vec1)); let vec2 = vec![0, 1]; assert_eq!("[0, 1]", format!("{:?}", vec2)); let slice: &[isize] = &[4, 5]; assert_eq!("[4, 5]", format!("{:?}", slice)); } #[test] fn test_push() { let mut v = vec![]; v.push(1); assert_eq!(v, [1]); v.push(2); assert_eq!(v, [1, 2]); v.push(3); assert_eq!(v, [1, 2, 3]); } #[test] fn test_extend() { let mut v = Vec::new(); let mut w = Vec::new(); v.extend(w.clone()); assert_eq!(v, &[]); v.extend(0..3); for i in 0..3 { w.push(i) } assert_eq!(v, w); v.extend(3..10); for i in 3..10 { w.push(i) } assert_eq!(v, w); v.extend(w.clone()); // specializes to `append` assert!(v.iter().eq(w.iter().chain(w.iter()))); // Zero sized types #[derive(PartialEq, Debug)] struct Foo; let mut a = Vec::new(); let b = vec![Foo, Foo]; a.extend(b); assert_eq!(a, &[Foo, Foo]); // Double drop let mut count_x = 0; { let mut x = Vec::new(); let y = vec![DropCounter { count: &mut count_x }]; x.extend(y); } assert_eq!(count_x, 1); } #[test] fn test_extend_from_slice() { let a: Vec = vec![1, 2, 3, 4, 5]; let b: Vec = vec![6, 7, 8, 9, 0]; let mut v: Vec = a; v.extend_from_slice(&b); assert_eq!(v, [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]); } #[test] fn test_extend_ref() { let mut v = vec![1, 2]; v.extend(&[3, 4, 5]); assert_eq!(v.len(), 5); assert_eq!(v, [1, 2, 3, 4, 5]); let w = vec![6, 7]; v.extend(&w); assert_eq!(v.len(), 7); assert_eq!(v, [1, 2, 3, 4, 5, 6, 7]); } #[test] fn test_slice_from_ref() { let values = vec![1, 2, 3, 4, 5]; let slice = &values[1..3]; assert_eq!(slice, [2, 3]); } #[test] fn test_slice_from_mut() { let mut values = vec![1, 2, 3, 4, 5]; { let slice = &mut values[2..]; assert!(slice == [3, 4, 5]); for p in slice { *p += 2; } } assert!(values == [1, 2, 5, 6, 7]); } #[test] fn test_slice_to_mut() { let mut values = vec![1, 2, 3, 4, 5]; { let slice = &mut values[..2]; assert!(slice == [1, 2]); for p in slice { *p += 1; } } assert!(values == [2, 3, 3, 4, 5]); } #[test] fn test_split_at_mut() { let mut values = vec![1, 2, 3, 4, 5]; { let (left, right) = values.split_at_mut(2); { let left: &[_] = left; assert!(&left[..left.len()] == &[1, 2]); } for p in left { *p += 1; } { let right: &[_] = right; assert!(&right[..right.len()] == &[3, 4, 5]); } for p in right { *p += 2; } } assert_eq!(values, [2, 3, 5, 6, 7]); } #[test] fn test_clone() { let v: Vec = vec![]; let w = vec![1, 2, 3]; assert_eq!(v, v.clone()); let z = w.clone(); assert_eq!(w, z); // they should be disjoint in memory. assert!(w.as_ptr() != z.as_ptr()) } #[test] fn test_clone_from() { let mut v = vec![]; let three: Vec> = vec![box 1, box 2, box 3]; let two: Vec> = vec![box 4, box 5]; // zero, long v.clone_from(&three); assert_eq!(v, three); // equal v.clone_from(&three); assert_eq!(v, three); // long, short v.clone_from(&two); assert_eq!(v, two); // short, long v.clone_from(&three); assert_eq!(v, three) } #[test] fn test_retain() { let mut vec = vec![1, 2, 3, 4]; vec.retain(|&x| x % 2 == 0); assert_eq!(vec, [2, 4]); } #[test] fn test_retain_pred_panic_with_hole() { let v = (0..5).map(Rc::new).collect::>(); catch_unwind(AssertUnwindSafe(|| { let mut v = v.clone(); v.retain(|r| match **r { 0 => true, 1 => false, 2 => true, _ => panic!(), }); })) .unwrap_err(); // Everything is dropped when predicate panicked. assert!(v.iter().all(|r| Rc::strong_count(r) == 1)); } #[test] fn test_retain_pred_panic_no_hole() { let v = (0..5).map(Rc::new).collect::>(); catch_unwind(AssertUnwindSafe(|| { let mut v = v.clone(); v.retain(|r| match **r { 0 | 1 | 2 => true, _ => panic!(), }); })) .unwrap_err(); // Everything is dropped when predicate panicked. assert!(v.iter().all(|r| Rc::strong_count(r) == 1)); } #[test] fn test_retain_drop_panic() { struct Wrap(Rc); impl Drop for Wrap { fn drop(&mut self) { if *self.0 == 3 { panic!(); } } } let v = (0..5).map(|x| Rc::new(x)).collect::>(); catch_unwind(AssertUnwindSafe(|| { let mut v = v.iter().map(|r| Wrap(r.clone())).collect::>(); v.retain(|w| match *w.0 { 0 => true, 1 => false, 2 => true, 3 => false, // Drop panic. _ => true, }); })) .unwrap_err(); // Other elements are dropped when `drop` of one element panicked. // The panicked wrapper also has its Rc dropped. assert!(v.iter().all(|r| Rc::strong_count(r) == 1)); } #[test] fn test_dedup() { fn case(a: Vec, b: Vec) { let mut v = a; v.dedup(); assert_eq!(v, b); } case(vec![], vec![]); case(vec![1], vec![1]); case(vec![1, 1], vec![1]); case(vec![1, 2, 3], vec![1, 2, 3]); case(vec![1, 1, 2, 3], vec![1, 2, 3]); case(vec![1, 2, 2, 3], vec![1, 2, 3]); case(vec![1, 2, 3, 3], vec![1, 2, 3]); case(vec![1, 1, 2, 2, 2, 3, 3], vec![1, 2, 3]); } #[test] fn test_dedup_by_key() { fn case(a: Vec, b: Vec) { let mut v = a; v.dedup_by_key(|i| *i / 10); assert_eq!(v, b); } case(vec![], vec![]); case(vec![10], vec![10]); case(vec![10, 11], vec![10]); case(vec![10, 20, 30], vec![10, 20, 30]); case(vec![10, 11, 20, 30], vec![10, 20, 30]); case(vec![10, 20, 21, 30], vec![10, 20, 30]); case(vec![10, 20, 30, 31], vec![10, 20, 30]); case(vec![10, 11, 20, 21, 22, 30, 31], vec![10, 20, 30]); } #[test] fn test_dedup_by() { let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"]; vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b)); assert_eq!(vec, ["foo", "bar", "baz", "bar"]); let mut vec = vec![("foo", 1), ("foo", 2), ("bar", 3), ("bar", 4), ("bar", 5)]; vec.dedup_by(|a, b| { a.0 == b.0 && { b.1 += a.1; true } }); assert_eq!(vec, [("foo", 3), ("bar", 12)]); } #[test] fn test_dedup_unique() { let mut v0: Vec> = vec![box 1, box 1, box 2, box 3]; v0.dedup(); let mut v1: Vec> = vec![box 1, box 2, box 2, box 3]; v1.dedup(); let mut v2: Vec> = vec![box 1, box 2, box 3, box 3]; v2.dedup(); // If the boxed pointers were leaked or otherwise misused, valgrind // and/or rt should raise errors. } #[test] fn zero_sized_values() { let mut v = Vec::new(); assert_eq!(v.len(), 0); v.push(()); assert_eq!(v.len(), 1); v.push(()); assert_eq!(v.len(), 2); assert_eq!(v.pop(), Some(())); assert_eq!(v.pop(), Some(())); assert_eq!(v.pop(), None); assert_eq!(v.iter().count(), 0); v.push(()); assert_eq!(v.iter().count(), 1); v.push(()); assert_eq!(v.iter().count(), 2); for &() in &v {} assert_eq!(v.iter_mut().count(), 2); v.push(()); assert_eq!(v.iter_mut().count(), 3); v.push(()); assert_eq!(v.iter_mut().count(), 4); for &mut () in &mut v {} unsafe { v.set_len(0); } assert_eq!(v.iter_mut().count(), 0); } #[test] fn test_partition() { assert_eq!(vec![].into_iter().partition(|x: &i32| *x < 3), (vec![], vec![])); assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 4), (vec![1, 2, 3], vec![])); assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 2), (vec![1], vec![2, 3])); assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 0), (vec![], vec![1, 2, 3])); } #[test] fn test_zip_unzip() { let z1 = vec![(1, 4), (2, 5), (3, 6)]; let (left, right): (Vec<_>, Vec<_>) = z1.iter().cloned().unzip(); assert_eq!((1, 4), (left[0], right[0])); assert_eq!((2, 5), (left[1], right[1])); assert_eq!((3, 6), (left[2], right[2])); } #[test] fn test_cmp() { let x: &[isize] = &[1, 2, 3, 4, 5]; let cmp: &[isize] = &[1, 2, 3, 4, 5]; assert_eq!(&x[..], cmp); let cmp: &[isize] = &[3, 4, 5]; assert_eq!(&x[2..], cmp); let cmp: &[isize] = &[1, 2, 3]; assert_eq!(&x[..3], cmp); let cmp: &[isize] = &[2, 3, 4]; assert_eq!(&x[1..4], cmp); let x: Vec = vec![1, 2, 3, 4, 5]; let cmp: &[isize] = &[1, 2, 3, 4, 5]; assert_eq!(&x[..], cmp); let cmp: &[isize] = &[3, 4, 5]; assert_eq!(&x[2..], cmp); let cmp: &[isize] = &[1, 2, 3]; assert_eq!(&x[..3], cmp); let cmp: &[isize] = &[2, 3, 4]; assert_eq!(&x[1..4], cmp); } #[test] fn test_vec_truncate_drop() { static mut DROPS: u32 = 0; struct Elem(i32); impl Drop for Elem { fn drop(&mut self) { unsafe { DROPS += 1; } } } let mut v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)]; assert_eq!(unsafe { DROPS }, 0); v.truncate(3); assert_eq!(unsafe { DROPS }, 2); v.truncate(0); assert_eq!(unsafe { DROPS }, 5); } #[test] #[should_panic] fn test_vec_truncate_fail() { struct BadElem(i32); impl Drop for BadElem { fn drop(&mut self) { let BadElem(ref mut x) = *self; if *x == 0xbadbeef { panic!("BadElem panic: 0xbadbeef") } } } let mut v = vec![BadElem(1), BadElem(2), BadElem(0xbadbeef), BadElem(4)]; v.truncate(0); } #[test] fn test_index() { let vec = vec![1, 2, 3]; assert!(vec[1] == 2); } #[test] #[should_panic] fn test_index_out_of_bounds() { let vec = vec![1, 2, 3]; let _ = vec[3]; } #[test] #[should_panic] fn test_slice_out_of_bounds_1() { let x = vec![1, 2, 3, 4, 5]; &x[!0..]; } #[test] #[should_panic] fn test_slice_out_of_bounds_2() { let x = vec![1, 2, 3, 4, 5]; &x[..6]; } #[test] #[should_panic] fn test_slice_out_of_bounds_3() { let x = vec![1, 2, 3, 4, 5]; &x[!0..4]; } #[test] #[should_panic] fn test_slice_out_of_bounds_4() { let x = vec![1, 2, 3, 4, 5]; &x[1..6]; } #[test] #[should_panic] fn test_slice_out_of_bounds_5() { let x = vec![1, 2, 3, 4, 5]; &x[3..2]; } #[test] #[should_panic] fn test_swap_remove_empty() { let mut vec = Vec::::new(); vec.swap_remove(0); } #[test] fn test_move_items() { let vec = vec![1, 2, 3]; let mut vec2 = vec![]; for i in vec { vec2.push(i); } assert_eq!(vec2, [1, 2, 3]); } #[test] fn test_move_items_reverse() { let vec = vec![1, 2, 3]; let mut vec2 = vec![]; for i in vec.into_iter().rev() { vec2.push(i); } assert_eq!(vec2, [3, 2, 1]); } #[test] fn test_move_items_zero_sized() { let vec = vec![(), (), ()]; let mut vec2 = vec![]; for i in vec { vec2.push(i); } assert_eq!(vec2, [(), (), ()]); } #[test] fn test_drain_empty_vec() { let mut vec: Vec = vec![]; let mut vec2: Vec = vec![]; for i in vec.drain(..) { vec2.push(i); } assert!(vec.is_empty()); assert!(vec2.is_empty()); } #[test] fn test_drain_items() { let mut vec = vec![1, 2, 3]; let mut vec2 = vec![]; for i in vec.drain(..) { vec2.push(i); } assert_eq!(vec, []); assert_eq!(vec2, [1, 2, 3]); } #[test] fn test_drain_items_reverse() { let mut vec = vec![1, 2, 3]; let mut vec2 = vec![]; for i in vec.drain(..).rev() { vec2.push(i); } assert_eq!(vec, []); assert_eq!(vec2, [3, 2, 1]); } #[test] fn test_drain_items_zero_sized() { let mut vec = vec![(), (), ()]; let mut vec2 = vec![]; for i in vec.drain(..) { vec2.push(i); } assert_eq!(vec, []); assert_eq!(vec2, [(), (), ()]); } #[test] #[should_panic] fn test_drain_out_of_bounds() { let mut v = vec![1, 2, 3, 4, 5]; v.drain(5..6); } #[test] fn test_drain_range() { let mut v = vec![1, 2, 3, 4, 5]; for _ in v.drain(4..) {} assert_eq!(v, &[1, 2, 3, 4]); let mut v: Vec<_> = (1..6).map(|x| x.to_string()).collect(); for _ in v.drain(1..4) {} assert_eq!(v, &[1.to_string(), 5.to_string()]); let mut v: Vec<_> = (1..6).map(|x| x.to_string()).collect(); for _ in v.drain(1..4).rev() {} assert_eq!(v, &[1.to_string(), 5.to_string()]); let mut v: Vec<_> = vec![(); 5]; for _ in v.drain(1..4).rev() {} assert_eq!(v, &[(), ()]); } #[test] fn test_drain_inclusive_range() { let mut v = vec!['a', 'b', 'c', 'd', 'e']; for _ in v.drain(1..=3) {} assert_eq!(v, &['a', 'e']); let mut v: Vec<_> = (0..=5).map(|x| x.to_string()).collect(); for _ in v.drain(1..=5) {} assert_eq!(v, &["0".to_string()]); let mut v: Vec = (0..=5).map(|x| x.to_string()).collect(); for _ in v.drain(0..=5) {} assert_eq!(v, Vec::::new()); let mut v: Vec<_> = (0..=5).map(|x| x.to_string()).collect(); for _ in v.drain(0..=3) {} assert_eq!(v, &["4".to_string(), "5".to_string()]); let mut v: Vec<_> = (0..=1).map(|x| x.to_string()).collect(); for _ in v.drain(..=0) {} assert_eq!(v, &["1".to_string()]); } #[test] fn test_drain_max_vec_size() { let mut v = Vec::<()>::with_capacity(usize::MAX); unsafe { v.set_len(usize::MAX); } for _ in v.drain(usize::MAX - 1..) {} assert_eq!(v.len(), usize::MAX - 1); let mut v = Vec::<()>::with_capacity(usize::MAX); unsafe { v.set_len(usize::MAX); } for _ in v.drain(usize::MAX - 1..=usize::MAX - 1) {} assert_eq!(v.len(), usize::MAX - 1); } #[test] #[should_panic] fn test_drain_index_overflow() { let mut v = Vec::<()>::with_capacity(usize::MAX); unsafe { v.set_len(usize::MAX); } v.drain(0..=usize::MAX); } #[test] #[should_panic] fn test_drain_inclusive_out_of_bounds() { let mut v = vec![1, 2, 3, 4, 5]; v.drain(5..=5); } #[test] #[should_panic] fn test_drain_start_overflow() { let mut v = vec![1, 2, 3]; v.drain((Excluded(usize::MAX), Included(0))); } #[test] #[should_panic] fn test_drain_end_overflow() { let mut v = vec![1, 2, 3]; v.drain((Included(0), Included(usize::MAX))); } #[test] fn test_drain_leak() { static mut DROPS: i32 = 0; #[derive(Debug, PartialEq)] struct D(u32, bool); impl Drop for D { fn drop(&mut self) { unsafe { DROPS += 1; } if self.1 { panic!("panic in `drop`"); } } } let mut v = vec![ D(0, false), D(1, false), D(2, false), D(3, false), D(4, true), D(5, false), D(6, false), ]; catch_unwind(AssertUnwindSafe(|| { v.drain(2..=5); })) .ok(); assert_eq!(unsafe { DROPS }, 4); assert_eq!(v, vec![D(0, false), D(1, false), D(6, false),]); } #[test] fn test_splice() { let mut v = vec![1, 2, 3, 4, 5]; let a = [10, 11, 12]; v.splice(2..4, a.iter().cloned()); assert_eq!(v, &[1, 2, 10, 11, 12, 5]); v.splice(1..3, Some(20)); assert_eq!(v, &[1, 20, 11, 12, 5]); } #[test] fn test_splice_inclusive_range() { let mut v = vec![1, 2, 3, 4, 5]; let a = [10, 11, 12]; let t1: Vec<_> = v.splice(2..=3, a.iter().cloned()).collect(); assert_eq!(v, &[1, 2, 10, 11, 12, 5]); assert_eq!(t1, &[3, 4]); let t2: Vec<_> = v.splice(1..=2, Some(20)).collect(); assert_eq!(v, &[1, 20, 11, 12, 5]); assert_eq!(t2, &[2, 10]); } #[test] #[should_panic] fn test_splice_out_of_bounds() { let mut v = vec![1, 2, 3, 4, 5]; let a = [10, 11, 12]; v.splice(5..6, a.iter().cloned()); } #[test] #[should_panic] fn test_splice_inclusive_out_of_bounds() { let mut v = vec![1, 2, 3, 4, 5]; let a = [10, 11, 12]; v.splice(5..=5, a.iter().cloned()); } #[test] fn test_splice_items_zero_sized() { let mut vec = vec![(), (), ()]; let vec2 = vec![]; let t: Vec<_> = vec.splice(1..2, vec2.iter().cloned()).collect(); assert_eq!(vec, &[(), ()]); assert_eq!(t, &[()]); } #[test] fn test_splice_unbounded() { let mut vec = vec![1, 2, 3, 4, 5]; let t: Vec<_> = vec.splice(.., None).collect(); assert_eq!(vec, &[]); assert_eq!(t, &[1, 2, 3, 4, 5]); } #[test] fn test_splice_forget() { let mut v = vec![1, 2, 3, 4, 5]; let a = [10, 11, 12]; std::mem::forget(v.splice(2..4, a.iter().cloned())); assert_eq!(v, &[1, 2]); } #[test] fn test_into_boxed_slice() { let xs = vec![1, 2, 3]; let ys = xs.into_boxed_slice(); assert_eq!(&*ys, [1, 2, 3]); } #[test] fn test_append() { let mut vec = vec![1, 2, 3]; let mut vec2 = vec![4, 5, 6]; vec.append(&mut vec2); assert_eq!(vec, [1, 2, 3, 4, 5, 6]); assert_eq!(vec2, []); } #[test] fn test_split_off() { let mut vec = vec![1, 2, 3, 4, 5, 6]; let orig_capacity = vec.capacity(); let vec2 = vec.split_off(4); assert_eq!(vec, [1, 2, 3, 4]); assert_eq!(vec2, [5, 6]); assert_eq!(vec.capacity(), orig_capacity); } #[test] fn test_split_off_take_all() { let mut vec = vec![1, 2, 3, 4, 5, 6]; let orig_ptr = vec.as_ptr(); let orig_capacity = vec.capacity(); let vec2 = vec.split_off(0); assert_eq!(vec, []); assert_eq!(vec2, [1, 2, 3, 4, 5, 6]); assert_eq!(vec.capacity(), orig_capacity); assert_eq!(vec2.as_ptr(), orig_ptr); } #[test] fn test_into_iter_as_slice() { let vec = vec!['a', 'b', 'c']; let mut into_iter = vec.into_iter(); assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); let _ = into_iter.next().unwrap(); assert_eq!(into_iter.as_slice(), &['b', 'c']); let _ = into_iter.next().unwrap(); let _ = into_iter.next().unwrap(); assert_eq!(into_iter.as_slice(), &[]); } #[test] fn test_into_iter_as_mut_slice() { let vec = vec!['a', 'b', 'c']; let mut into_iter = vec.into_iter(); assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); into_iter.as_mut_slice()[0] = 'x'; into_iter.as_mut_slice()[1] = 'y'; assert_eq!(into_iter.next().unwrap(), 'x'); assert_eq!(into_iter.as_slice(), &['y', 'c']); } #[test] fn test_into_iter_debug() { let vec = vec!['a', 'b', 'c']; let into_iter = vec.into_iter(); let debug = format!("{:?}", into_iter); assert_eq!(debug, "IntoIter(['a', 'b', 'c'])"); } #[test] fn test_into_iter_count() { assert_eq!(vec![1, 2, 3].into_iter().count(), 3); } #[test] fn test_into_iter_clone() { fn iter_equal>(it: I, slice: &[i32]) { let v: Vec = it.collect(); assert_eq!(&v[..], slice); } let mut it = vec![1, 2, 3].into_iter(); iter_equal(it.clone(), &[1, 2, 3]); assert_eq!(it.next(), Some(1)); let mut it = it.rev(); iter_equal(it.clone(), &[3, 2]); assert_eq!(it.next(), Some(3)); iter_equal(it.clone(), &[2]); assert_eq!(it.next(), Some(2)); iter_equal(it.clone(), &[]); assert_eq!(it.next(), None); } #[test] fn test_into_iter_leak() { static mut DROPS: i32 = 0; struct D(bool); impl Drop for D { fn drop(&mut self) { unsafe { DROPS += 1; } if self.0 { panic!("panic in `drop`"); } } } let v = vec![D(false), D(true), D(false)]; catch_unwind(move || drop(v.into_iter())).ok(); assert_eq!(unsafe { DROPS }, 3); } #[test] fn test_from_iter_specialization() { let src: Vec = vec![0usize; 1]; let srcptr = src.as_ptr(); let sink = src.into_iter().collect::>(); let sinkptr = sink.as_ptr(); assert_eq!(srcptr, sinkptr); } #[test] fn test_from_iter_partially_drained_in_place_specialization() { let src: Vec = vec![0usize; 10]; let srcptr = src.as_ptr(); let mut iter = src.into_iter(); iter.next(); iter.next(); let sink = iter.collect::>(); let sinkptr = sink.as_ptr(); assert_eq!(srcptr, sinkptr); } #[test] fn test_from_iter_specialization_with_iterator_adapters() { fn assert_in_place_trait(_: &T) {} let src: Vec = vec![0usize; 256]; let srcptr = src.as_ptr(); let iter = src .into_iter() .enumerate() .map(|i| i.0 + i.1) .zip(std::iter::repeat(1usize)) .map(|(a, b)| a + b) .map_while(Option::Some) .peekable() .skip(1) .map(|e| if e != usize::MAX { Ok(std::num::NonZeroUsize::new(e)) } else { Err(()) }); assert_in_place_trait(&iter); let sink = iter.collect::, _>>().unwrap(); let sinkptr = sink.as_ptr(); assert_eq!(srcptr, sinkptr as *const usize); } #[test] fn test_from_iter_specialization_head_tail_drop() { let drop_count: Vec<_> = (0..=2).map(|_| Rc::new(())).collect(); let src: Vec<_> = drop_count.iter().cloned().collect(); let srcptr = src.as_ptr(); let iter = src.into_iter(); let sink: Vec<_> = iter.skip(1).take(1).collect(); let sinkptr = sink.as_ptr(); assert_eq!(srcptr, sinkptr, "specialization was applied"); assert_eq!(Rc::strong_count(&drop_count[0]), 1, "front was dropped"); assert_eq!(Rc::strong_count(&drop_count[1]), 2, "one element was collected"); assert_eq!(Rc::strong_count(&drop_count[2]), 1, "tail was dropped"); assert_eq!(sink.len(), 1); } #[test] fn test_from_iter_specialization_panic_during_iteration_drops() { let drop_count: Vec<_> = (0..=2).map(|_| Rc::new(())).collect(); let src: Vec<_> = drop_count.iter().cloned().collect(); let iter = src.into_iter(); let _ = std::panic::catch_unwind(AssertUnwindSafe(|| { let _ = iter .enumerate() .filter_map(|(i, e)| { if i == 1 { std::panic!("aborting iteration"); } Some(e) }) .collect::>(); })); assert!( drop_count.iter().map(Rc::strong_count).all(|count| count == 1), "all items were dropped once" ); } #[test] fn test_from_iter_specialization_panic_during_drop_leaks() { static mut DROP_COUNTER: usize = 0; #[derive(Debug)] enum Droppable { DroppedTwice(Box), PanicOnDrop, } impl Drop for Droppable { fn drop(&mut self) { match self { Droppable::DroppedTwice(_) => { unsafe { DROP_COUNTER += 1; } println!("Dropping!") } Droppable::PanicOnDrop => { if !std::thread::panicking() { panic!(); } } } } } let mut to_free: *mut Droppable = core::ptr::null_mut(); let mut cap = 0; let _ = std::panic::catch_unwind(AssertUnwindSafe(|| { let mut v = vec![Droppable::DroppedTwice(Box::new(123)), Droppable::PanicOnDrop]; to_free = v.as_mut_ptr(); cap = v.capacity(); let _ = v.into_iter().take(0).collect::>(); })); assert_eq!(unsafe { DROP_COUNTER }, 1); // clean up the leak to keep miri happy unsafe { drop(Vec::from_raw_parts(to_free, 0, cap)); } } #[test] fn test_cow_from() { let borrowed: &[_] = &["borrowed", "(slice)"]; let owned = vec!["owned", "(vec)"]; match (Cow::from(owned.clone()), Cow::from(borrowed)) { (Cow::Owned(o), Cow::Borrowed(b)) => assert!(o == owned && b == borrowed), _ => panic!("invalid `Cow::from`"), } } #[test] fn test_from_cow() { let borrowed: &[_] = &["borrowed", "(slice)"]; let owned = vec!["owned", "(vec)"]; assert_eq!(Vec::from(Cow::Borrowed(borrowed)), vec!["borrowed", "(slice)"]); assert_eq!(Vec::from(Cow::Owned(owned)), vec!["owned", "(vec)"]); } #[allow(dead_code)] fn assert_covariance() { fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { d } fn into_iter<'new>(i: IntoIter<&'static str>) -> IntoIter<&'new str> { i } } #[test] fn from_into_inner() { let vec = vec![1, 2, 3]; let ptr = vec.as_ptr(); let vec = vec.into_iter().collect::>(); assert_eq!(vec, [1, 2, 3]); assert_eq!(vec.as_ptr(), ptr); let ptr = &vec[1] as *const _; let mut it = vec.into_iter(); it.next().unwrap(); let vec = it.collect::>(); assert_eq!(vec, [2, 3]); assert!(ptr != vec.as_ptr()); } #[test] fn overaligned_allocations() { #[repr(align(256))] struct Foo(usize); let mut v = vec![Foo(273)]; for i in 0..0x1000 { v.reserve_exact(i); assert!(v[0].0 == 273); assert!(v.as_ptr() as usize & 0xff == 0); v.shrink_to_fit(); assert!(v[0].0 == 273); assert!(v.as_ptr() as usize & 0xff == 0); } } #[test] fn drain_filter_empty() { let mut vec: Vec = vec![]; { let mut iter = vec.drain_filter(|_| true); assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); } assert_eq!(vec.len(), 0); assert_eq!(vec, vec![]); } #[test] fn drain_filter_zst() { let mut vec = vec![(), (), (), (), ()]; let initial_len = vec.len(); let mut count = 0; { let mut iter = vec.drain_filter(|_| true); assert_eq!(iter.size_hint(), (0, Some(initial_len))); while let Some(_) = iter.next() { count += 1; assert_eq!(iter.size_hint(), (0, Some(initial_len - count))); } assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); } assert_eq!(count, initial_len); assert_eq!(vec.len(), 0); assert_eq!(vec, vec![]); } #[test] fn drain_filter_false() { let mut vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; let initial_len = vec.len(); let mut count = 0; { let mut iter = vec.drain_filter(|_| false); assert_eq!(iter.size_hint(), (0, Some(initial_len))); for _ in iter.by_ref() { count += 1; } assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); } assert_eq!(count, 0); assert_eq!(vec.len(), initial_len); assert_eq!(vec, vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); } #[test] fn drain_filter_true() { let mut vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; let initial_len = vec.len(); let mut count = 0; { let mut iter = vec.drain_filter(|_| true); assert_eq!(iter.size_hint(), (0, Some(initial_len))); while let Some(_) = iter.next() { count += 1; assert_eq!(iter.size_hint(), (0, Some(initial_len - count))); } assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); } assert_eq!(count, initial_len); assert_eq!(vec.len(), 0); assert_eq!(vec, vec![]); } #[test] fn drain_filter_complex() { { // [+xxx++++++xxxxx++++x+x++] let mut vec = vec![ 1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39, ]; let removed = vec.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]); assert_eq!(vec.len(), 14); assert_eq!(vec, vec![1, 7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]); } { // [xxx++++++xxxxx++++x+x++] let mut vec = vec![ 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39, ]; let removed = vec.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]); assert_eq!(vec.len(), 13); assert_eq!(vec, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]); } { // [xxx++++++xxxxx++++x+x] let mut vec = vec![2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36]; let removed = vec.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]); assert_eq!(vec.len(), 11); assert_eq!(vec, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35]); } { // [xxxxxxxxxx+++++++++++] let mut vec = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19]; let removed = vec.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]); assert_eq!(vec.len(), 10); assert_eq!(vec, vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]); } { // [+++++++++++xxxxxxxxxx] let mut vec = vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20]; let removed = vec.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]); assert_eq!(vec.len(), 10); assert_eq!(vec, vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]); } } // FIXME: re-enable emscripten once it can unwind again #[test] #[cfg(not(target_os = "emscripten"))] fn drain_filter_consumed_panic() { use std::rc::Rc; use std::sync::Mutex; struct Check { index: usize, drop_counts: Rc>>, } impl Drop for Check { fn drop(&mut self) { self.drop_counts.lock().unwrap()[self.index] += 1; println!("drop: {}", self.index); } } let check_count = 10; let drop_counts = Rc::new(Mutex::new(vec![0_usize; check_count])); let mut data: Vec = (0..check_count) .map(|index| Check { index, drop_counts: Rc::clone(&drop_counts) }) .collect(); let _ = std::panic::catch_unwind(move || { let filter = |c: &mut Check| { if c.index == 2 { panic!("panic at index: {}", c.index); } // Verify that if the filter could panic again on another element // that it would not cause a double panic and all elements of the // vec would still be dropped exactly once. if c.index == 4 { panic!("panic at index: {}", c.index); } c.index < 6 }; let drain = data.drain_filter(filter); // NOTE: The DrainFilter is explicitly consumed drain.for_each(drop); }); let drop_counts = drop_counts.lock().unwrap(); assert_eq!(check_count, drop_counts.len()); for (index, count) in drop_counts.iter().cloned().enumerate() { assert_eq!(1, count, "unexpected drop count at index: {} (count: {})", index, count); } } // FIXME: Re-enable emscripten once it can catch panics #[test] #[cfg(not(target_os = "emscripten"))] fn drain_filter_unconsumed_panic() { use std::rc::Rc; use std::sync::Mutex; struct Check { index: usize, drop_counts: Rc>>, } impl Drop for Check { fn drop(&mut self) { self.drop_counts.lock().unwrap()[self.index] += 1; println!("drop: {}", self.index); } } let check_count = 10; let drop_counts = Rc::new(Mutex::new(vec![0_usize; check_count])); let mut data: Vec = (0..check_count) .map(|index| Check { index, drop_counts: Rc::clone(&drop_counts) }) .collect(); let _ = std::panic::catch_unwind(move || { let filter = |c: &mut Check| { if c.index == 2 { panic!("panic at index: {}", c.index); } // Verify that if the filter could panic again on another element // that it would not cause a double panic and all elements of the // vec would still be dropped exactly once. if c.index == 4 { panic!("panic at index: {}", c.index); } c.index < 6 }; let _drain = data.drain_filter(filter); // NOTE: The DrainFilter is dropped without being consumed }); let drop_counts = drop_counts.lock().unwrap(); assert_eq!(check_count, drop_counts.len()); for (index, count) in drop_counts.iter().cloned().enumerate() { assert_eq!(1, count, "unexpected drop count at index: {} (count: {})", index, count); } } #[test] fn drain_filter_unconsumed() { let mut vec = vec![1, 2, 3, 4]; let drain = vec.drain_filter(|&mut x| x % 2 != 0); drop(drain); assert_eq!(vec, [2, 4]); } #[test] fn test_reserve_exact() { // This is all the same as test_reserve let mut v = Vec::new(); assert_eq!(v.capacity(), 0); v.reserve_exact(2); assert!(v.capacity() >= 2); for i in 0..16 { v.push(i); } assert!(v.capacity() >= 16); v.reserve_exact(16); assert!(v.capacity() >= 32); v.push(16); v.reserve_exact(16); assert!(v.capacity() >= 33) } #[test] #[cfg_attr(miri, ignore)] // Miri does not support signalling OOM #[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc fn test_try_reserve() { // These are the interesting cases: // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM) // * > isize::MAX should always fail // * On 16/32-bit should CapacityOverflow // * On 64-bit should OOM // * overflow may trigger when adding `len` to `cap` (in number of elements) // * overflow may trigger when multiplying `new_cap` by size_of:: (to get bytes) const MAX_CAP: usize = isize::MAX as usize; const MAX_USIZE: usize = usize::MAX; // On 16/32-bit, we check that allocations don't exceed isize::MAX, // on 64-bit, we assume the OS will give an OOM for such a ridiculous size. // Any platform that succeeds for these requests is technically broken with // ptr::offset because LLVM is the worst. let guards_against_isize = usize::BITS < 64; { // Note: basic stuff is checked by test_reserve let mut empty_bytes: Vec = Vec::new(); // Check isize::MAX doesn't count as an overflow if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } // Play it again, frank! (just to be sure) if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { // Check isize::MAX + 1 does count as overflow if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP + 1) { } else { panic!("isize::MAX + 1 should trigger an overflow!") } // Check usize::MAX does count as overflow if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } else { // Check isize::MAX + 1 is an OOM if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_CAP + 1) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } // Check usize::MAX is an OOM if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_USIZE) { } else { panic!("usize::MAX should trigger an OOM!") } } } { // Same basic idea, but with non-zero len let mut ten_bytes: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an overflow!"); } } else { if let Err(AllocError { .. }) = ten_bytes.try_reserve(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } // Should always overflow in the add-to-len if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } { // Same basic idea, but with interesting type size let mut ten_u32s: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) { } else { panic!("isize::MAX + 1 should trigger an overflow!"); } } else { if let Err(AllocError { .. }) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } // Should fail in the mul-by-size if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_USIZE - 20) { } else { panic!("usize::MAX should trigger an overflow!"); } } } #[test] #[cfg_attr(miri, ignore)] // Miri does not support signalling OOM #[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc fn test_try_reserve_exact() { // This is exactly the same as test_try_reserve with the method changed. // See that test for comments. const MAX_CAP: usize = isize::MAX as usize; const MAX_USIZE: usize = usize::MAX; let guards_against_isize = size_of::() < 8; { let mut empty_bytes: Vec = Vec::new(); if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP + 1) { } else { panic!("isize::MAX + 1 should trigger an overflow!") } if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } else { if let Err(AllocError { .. }) = empty_bytes.try_reserve_exact(MAX_CAP + 1) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } if let Err(AllocError { .. }) = empty_bytes.try_reserve_exact(MAX_USIZE) { } else { panic!("usize::MAX should trigger an OOM!") } } } { let mut ten_bytes: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an overflow!"); } } else { if let Err(AllocError { .. }) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } { let mut ten_u32s: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) { } else { panic!("isize::MAX + 1 should trigger an overflow!"); } } else { if let Err(AllocError { .. }) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_USIZE - 20) { } else { panic!("usize::MAX should trigger an overflow!") } } } #[test] fn test_stable_pointers() { /// Pull an element from the iterator, then drop it. /// Useful to cover both the `next` and `drop` paths of an iterator. fn next_then_drop(mut i: I) { i.next().unwrap(); drop(i); } // Test that, if we reserved enough space, adding and removing elements does not // invalidate references into the vector (such as `v0`). This test also // runs in Miri, which would detect such problems. // Note that this test does *not* constitute a stable guarantee that all these functions do not // reallocate! Only what is explicitly documented at // is stably guaranteed. let mut v = Vec::with_capacity(128); v.push(13); // Laundering the lifetime -- we take care that `v` does not reallocate, so that's okay. let v0 = &mut v[0]; let v0 = unsafe { &mut *(v0 as *mut _) }; // Now do a bunch of things and occasionally use `v0` again to assert it is still valid. // Pushing/inserting and popping/removing v.push(1); v.push(2); v.insert(1, 1); assert_eq!(*v0, 13); v.remove(1); v.pop().unwrap(); assert_eq!(*v0, 13); v.push(1); v.swap_remove(1); assert_eq!(v.len(), 2); v.swap_remove(1); // swap_remove the last element assert_eq!(*v0, 13); // Appending v.append(&mut vec![27, 19]); assert_eq!(*v0, 13); // Extending v.extend_from_slice(&[1, 2]); v.extend(&[1, 2]); // `slice::Iter` (with `T: Copy`) specialization v.extend(vec![2, 3]); // `vec::IntoIter` specialization v.extend(std::iter::once(3)); // `TrustedLen` specialization v.extend(std::iter::empty::()); // `TrustedLen` specialization with empty iterator v.extend(std::iter::once(3).filter(|_| true)); // base case v.extend(std::iter::once(&3)); // `cloned` specialization assert_eq!(*v0, 13); // Truncation v.truncate(2); assert_eq!(*v0, 13); // Resizing v.resize_with(v.len() + 10, || 42); assert_eq!(*v0, 13); v.resize_with(2, || panic!()); assert_eq!(*v0, 13); // No-op reservation v.reserve(32); v.reserve_exact(32); assert_eq!(*v0, 13); // Partial draining v.resize_with(10, || 42); next_then_drop(v.drain(5..)); assert_eq!(*v0, 13); // Splicing v.resize_with(10, || 42); next_then_drop(v.splice(5.., vec![1, 2, 3, 4, 5])); // empty tail after range assert_eq!(*v0, 13); next_then_drop(v.splice(5..8, vec![1])); // replacement is smaller than original range assert_eq!(*v0, 13); next_then_drop(v.splice(5..6, vec![1; 10].into_iter().filter(|_| true))); // lower bound not exact assert_eq!(*v0, 13); // spare_capacity_mut v.spare_capacity_mut(); assert_eq!(*v0, 13); // Smoke test that would fire even outside Miri if an actual relocation happened. *v0 -= 13; assert_eq!(v[0], 0); } // https://github.com/rust-lang/rust/pull/49496 introduced specialization based on: // // ``` // unsafe impl IsZero for *mut T { // fn is_zero(&self) -> bool { // (*self).is_null() // } // } // ``` // // … to call `RawVec::with_capacity_zeroed` for creating `Vec<*mut T>`, // which is incorrect for fat pointers since `<*mut T>::is_null` only looks at the data component. // That is, a fat pointer can be “null” without being made entirely of zero bits. #[test] fn vec_macro_repeating_null_raw_fat_pointer() { let raw_dyn = &mut (|| ()) as &mut dyn Fn() as *mut dyn Fn(); let vtable = dbg!(ptr_metadata(raw_dyn)); let null_raw_dyn = ptr_from_raw_parts(std::ptr::null_mut(), vtable); assert!(null_raw_dyn.is_null()); let vec = vec![null_raw_dyn; 1]; dbg!(ptr_metadata(vec[0])); assert!(vec[0] == null_raw_dyn); // Polyfill for https://github.com/rust-lang/rfcs/pull/2580 fn ptr_metadata(ptr: *mut dyn Fn()) -> *mut () { unsafe { std::mem::transmute::<*mut dyn Fn(), DynRepr>(ptr).vtable } } fn ptr_from_raw_parts(data: *mut (), vtable: *mut ()) -> *mut dyn Fn() { unsafe { std::mem::transmute::(DynRepr { data, vtable }) } } #[repr(C)] struct DynRepr { data: *mut (), vtable: *mut (), } } // This test will likely fail if you change the capacities used in // `RawVec::grow_amortized`. #[test] fn test_push_growth_strategy() { // If the element size is 1, we jump from 0 to 8, then double. { let mut v1: Vec = vec![]; assert_eq!(v1.capacity(), 0); for _ in 0..8 { v1.push(0); assert_eq!(v1.capacity(), 8); } for _ in 8..16 { v1.push(0); assert_eq!(v1.capacity(), 16); } for _ in 16..32 { v1.push(0); assert_eq!(v1.capacity(), 32); } for _ in 32..64 { v1.push(0); assert_eq!(v1.capacity(), 64); } } // If the element size is 2..=1024, we jump from 0 to 4, then double. { let mut v2: Vec = vec![]; let mut v1024: Vec<[u8; 1024]> = vec![]; assert_eq!(v2.capacity(), 0); assert_eq!(v1024.capacity(), 0); for _ in 0..4 { v2.push(0); v1024.push([0; 1024]); assert_eq!(v2.capacity(), 4); assert_eq!(v1024.capacity(), 4); } for _ in 4..8 { v2.push(0); v1024.push([0; 1024]); assert_eq!(v2.capacity(), 8); assert_eq!(v1024.capacity(), 8); } for _ in 8..16 { v2.push(0); v1024.push([0; 1024]); assert_eq!(v2.capacity(), 16); assert_eq!(v1024.capacity(), 16); } for _ in 16..32 { v2.push(0); v1024.push([0; 1024]); assert_eq!(v2.capacity(), 32); assert_eq!(v1024.capacity(), 32); } for _ in 32..64 { v2.push(0); v1024.push([0; 1024]); assert_eq!(v2.capacity(), 64); assert_eq!(v1024.capacity(), 64); } } // If the element size is > 1024, we jump from 0 to 1, then double. { let mut v1025: Vec<[u8; 1025]> = vec![]; assert_eq!(v1025.capacity(), 0); for _ in 0..1 { v1025.push([0; 1025]); assert_eq!(v1025.capacity(), 1); } for _ in 1..2 { v1025.push([0; 1025]); assert_eq!(v1025.capacity(), 2); } for _ in 2..4 { v1025.push([0; 1025]); assert_eq!(v1025.capacity(), 4); } for _ in 4..8 { v1025.push([0; 1025]); assert_eq!(v1025.capacity(), 8); } for _ in 8..16 { v1025.push([0; 1025]); assert_eq!(v1025.capacity(), 16); } for _ in 16..32 { v1025.push([0; 1025]); assert_eq!(v1025.capacity(), 32); } for _ in 32..64 { v1025.push([0; 1025]); assert_eq!(v1025.capacity(), 64); } } } macro_rules! generate_assert_eq_vec_and_prim { ($name:ident<$B:ident>($type:ty)) => { fn $name + Debug, $B: Debug>(a: Vec, b: $type) { assert!(a == b); assert_eq!(a, b); } }; } generate_assert_eq_vec_and_prim! { assert_eq_vec_and_slice (&[B]) } generate_assert_eq_vec_and_prim! { assert_eq_vec_and_array_3([B; 3]) } #[test] fn partialeq_vec_and_prim() { assert_eq_vec_and_slice(vec![1, 2, 3], &[1, 2, 3]); assert_eq_vec_and_array_3(vec![1, 2, 3], [1, 2, 3]); } macro_rules! assert_partial_eq_valid { ($a2:expr, $a3:expr; $b2:expr, $b3: expr) => { assert!($a2 == $b2); assert!($a2 != $b3); assert!($a3 != $b2); assert!($a3 == $b3); assert_eq!($a2, $b2); assert_ne!($a2, $b3); assert_ne!($a3, $b2); assert_eq!($a3, $b3); }; } #[test] fn partialeq_vec_full() { let vec2: Vec<_> = vec![1, 2]; let vec3: Vec<_> = vec![1, 2, 3]; let slice2: &[_] = &[1, 2]; let slice3: &[_] = &[1, 2, 3]; let slicemut2: &[_] = &mut [1, 2]; let slicemut3: &[_] = &mut [1, 2, 3]; let array2: [_; 2] = [1, 2]; let array3: [_; 3] = [1, 2, 3]; let arrayref2: &[_; 2] = &[1, 2]; let arrayref3: &[_; 3] = &[1, 2, 3]; assert_partial_eq_valid!(vec2,vec3; vec2,vec3); assert_partial_eq_valid!(vec2,vec3; slice2,slice3); assert_partial_eq_valid!(vec2,vec3; slicemut2,slicemut3); assert_partial_eq_valid!(slice2,slice3; vec2,vec3); assert_partial_eq_valid!(slicemut2,slicemut3; vec2,vec3); assert_partial_eq_valid!(vec2,vec3; array2,array3); assert_partial_eq_valid!(vec2,vec3; arrayref2,arrayref3); assert_partial_eq_valid!(vec2,vec3; arrayref2[..],arrayref3[..]); } #[test] fn test_vec_cycle() { #[derive(Debug)] struct C<'a> { v: Vec>>>, } impl<'a> C<'a> { fn new() -> C<'a> { C { v: Vec::new() } } } let mut c1 = C::new(); let mut c2 = C::new(); let mut c3 = C::new(); // Push c1.v.push(Cell::new(None)); c1.v.push(Cell::new(None)); c2.v.push(Cell::new(None)); c2.v.push(Cell::new(None)); c3.v.push(Cell::new(None)); c3.v.push(Cell::new(None)); // Set c1.v[0].set(Some(&c2)); c1.v[1].set(Some(&c3)); c2.v[0].set(Some(&c2)); c2.v[1].set(Some(&c3)); c3.v[0].set(Some(&c1)); c3.v[1].set(Some(&c2)); } #[test] fn test_vec_cycle_wrapped() { struct Refs<'a> { v: Vec>>>, } struct C<'a> { refs: Refs<'a>, } impl<'a> Refs<'a> { fn new() -> Refs<'a> { Refs { v: Vec::new() } } } impl<'a> C<'a> { fn new() -> C<'a> { C { refs: Refs::new() } } } let mut c1 = C::new(); let mut c2 = C::new(); let mut c3 = C::new(); c1.refs.v.push(Cell::new(None)); c1.refs.v.push(Cell::new(None)); c2.refs.v.push(Cell::new(None)); c2.refs.v.push(Cell::new(None)); c3.refs.v.push(Cell::new(None)); c3.refs.v.push(Cell::new(None)); c1.refs.v[0].set(Some(&c2)); c1.refs.v[1].set(Some(&c3)); c2.refs.v[0].set(Some(&c2)); c2.refs.v[1].set(Some(&c3)); c3.refs.v[0].set(Some(&c1)); c3.refs.v[1].set(Some(&c2)); } #[test] fn test_zero_sized_vec_push() { const N: usize = 8; for len in 0..N { let mut tester = Vec::with_capacity(len); assert_eq!(tester.len(), 0); assert!(tester.capacity() >= len); for _ in 0..len { tester.push(()); } assert_eq!(tester.len(), len); assert_eq!(tester.iter().count(), len); tester.clear(); } } #[test] fn test_vec_macro_repeat() { assert_eq!(vec![1; 3], vec![1, 1, 1]); assert_eq!(vec![1; 2], vec![1, 1]); assert_eq!(vec![1; 1], vec![1]); assert_eq!(vec![1; 0], vec![]); // from_elem syntax (see RFC 832) let el = Box::new(1); let n = 3; assert_eq!(vec![el; n], vec![Box::new(1), Box::new(1), Box::new(1)]); } #[test] fn test_vec_swap() { let mut a: Vec = vec![0, 1, 2, 3, 4, 5, 6]; a.swap(2, 4); assert_eq!(a[2], 4); assert_eq!(a[4], 2); let mut n = 42; swap(&mut n, &mut a[0]); assert_eq!(a[0], 42); assert_eq!(n, 0); } #[test] fn test_extend_from_within_spec() { #[derive(Copy)] struct CopyOnly; impl Clone for CopyOnly { fn clone(&self) -> Self { panic!("extend_from_within must use specialization on copy"); } } vec![CopyOnly, CopyOnly].extend_from_within(..); } #[test] fn test_extend_from_within_clone() { let mut v = vec![String::from("sssss"), String::from("12334567890"), String::from("c")]; v.extend_from_within(1..); assert_eq!(v, ["sssss", "12334567890", "c", "12334567890", "c"]); } #[test] fn test_extend_from_within_complete_rande() { let mut v = vec![0, 1, 2, 3]; v.extend_from_within(..); assert_eq!(v, [0, 1, 2, 3, 0, 1, 2, 3]); } #[test] fn test_extend_from_within_empty_rande() { let mut v = vec![0, 1, 2, 3]; v.extend_from_within(1..1); assert_eq!(v, [0, 1, 2, 3]); } #[test] #[should_panic] fn test_extend_from_within_out_of_rande() { let mut v = vec![0, 1]; v.extend_from_within(..3); } #[test] fn test_extend_from_within_zst() { let mut v = vec![(); 8]; v.extend_from_within(3..7); assert_eq!(v, [(); 12]); } #[test] fn test_extend_from_within_empty_vec() { let mut v = Vec::::new(); v.extend_from_within(..); assert_eq!(v, []); } #[test] fn test_extend_from_within() { let mut v = vec![String::from("a"), String::from("b"), String::from("c")]; v.extend_from_within(1..=2); v.extend_from_within(..=1); assert_eq!(v, ["a", "b", "c", "b", "c", "a", "b"]); } #[test] fn test_vec_dedup_by() { let mut vec: Vec = vec![1, -1, 2, 3, 1, -5, 5, -2, 2]; vec.dedup_by(|a, b| a.abs() == b.abs()); assert_eq!(vec, [1, 2, 3, 1, -5, -2]); } #[test] fn test_vec_dedup_empty() { let mut vec: Vec = Vec::new(); vec.dedup(); assert_eq!(vec, []); } #[test] fn test_vec_dedup_one() { let mut vec = vec![12i32]; vec.dedup(); assert_eq!(vec, [12]); } #[test] fn test_vec_dedup_multiple_ident() { let mut vec = vec![12, 12, 12, 12, 12, 11, 11, 11, 11, 11, 11]; vec.dedup(); assert_eq!(vec, [12, 11]); } #[test] fn test_vec_dedup_partialeq() { #[derive(Debug)] struct Foo(i32, i32); impl PartialEq for Foo { fn eq(&self, other: &Foo) -> bool { self.0 == other.0 } } let mut vec = vec![Foo(0, 1), Foo(0, 5), Foo(1, 7), Foo(1, 9)]; vec.dedup(); assert_eq!(vec, [Foo(0, 1), Foo(1, 7)]); } #[test] fn test_vec_dedup() { let mut vec: Vec = Vec::with_capacity(8); let mut template = vec.clone(); for x in 0u8..255u8 { vec.clear(); template.clear(); let iter = (0..8).map(move |bit| (x >> bit) & 1 == 1); vec.extend(iter); template.extend_from_slice(&vec); let (dedup, _) = template.partition_dedup(); vec.dedup(); assert_eq!(vec, dedup); } } #[test] fn test_vec_dedup_panicking() { #[derive(Debug)] struct Panic { drop_counter: &'static AtomicU32, value: bool, index: usize, } impl PartialEq for Panic { fn eq(&self, other: &Self) -> bool { self.value == other.value } } impl Drop for Panic { fn drop(&mut self) { let x = self.drop_counter.fetch_add(1, Ordering::SeqCst); assert!(x != 4); } } static DROP_COUNTER: AtomicU32 = AtomicU32::new(0); let expected = [ Panic { drop_counter: &DROP_COUNTER, value: false, index: 0 }, Panic { drop_counter: &DROP_COUNTER, value: false, index: 5 }, Panic { drop_counter: &DROP_COUNTER, value: true, index: 6 }, Panic { drop_counter: &DROP_COUNTER, value: true, index: 7 }, ]; let mut vec = vec![ Panic { drop_counter: &DROP_COUNTER, value: false, index: 0 }, // these elements get deduplicated Panic { drop_counter: &DROP_COUNTER, value: false, index: 1 }, Panic { drop_counter: &DROP_COUNTER, value: false, index: 2 }, Panic { drop_counter: &DROP_COUNTER, value: false, index: 3 }, Panic { drop_counter: &DROP_COUNTER, value: false, index: 4 }, // here it panics Panic { drop_counter: &DROP_COUNTER, value: false, index: 5 }, Panic { drop_counter: &DROP_COUNTER, value: true, index: 6 }, Panic { drop_counter: &DROP_COUNTER, value: true, index: 7 }, ]; let _ = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { vec.dedup(); })); let ok = vec.iter().zip(expected.iter()).all(|(x, y)| x.index == y.index); if !ok { panic!("expected: {:?}\ngot: {:?}\n", expected, vec); } } // Regression test for issue #82533 #[test] fn test_extend_from_within_panicing_clone() { struct Panic<'dc> { drop_count: &'dc AtomicU32, aaaaa: bool, } impl Clone for Panic<'_> { fn clone(&self) -> Self { if self.aaaaa { panic!("panic! at the clone"); } Self { ..*self } } } impl Drop for Panic<'_> { fn drop(&mut self) { self.drop_count.fetch_add(1, Ordering::SeqCst); } } let count = core::sync::atomic::AtomicU32::new(0); let mut vec = vec![ Panic { drop_count: &count, aaaaa: false }, Panic { drop_count: &count, aaaaa: true }, Panic { drop_count: &count, aaaaa: false }, ]; // This should clone&append one Panic{..} at the end, and then panic while // cloning second Panic{..}. This means that `Panic::drop` should be called // 4 times (3 for items already in vector, 1 for just appended). // // Previously just appended item was leaked, making drop_count = 3, instead of 4. std::panic::catch_unwind(move || vec.extend_from_within(..)).unwrap_err(); assert_eq!(count.load(Ordering::SeqCst), 4); } #![feature(allocator_api)] #![feature(box_syntax)] #![feature(cow_is_borrowed)] #![feature(const_cow_is_borrowed)] #![feature(drain_filter)] #![feature(exact_size_is_empty)] #![feature(new_uninit)] #![feature(pattern)] #![feature(trusted_len)] #![feature(try_reserve)] #![feature(unboxed_closures)] #![feature(associated_type_bounds)] #![feature(binary_heap_into_iter_sorted)] #![feature(binary_heap_drain_sorted)] #![feature(slice_ptr_get)] #![feature(binary_heap_retain)] #![feature(binary_heap_as_slice)] #![feature(inplace_iteration)] #![feature(iter_map_while)] #![feature(vecdeque_binary_search)] #![feature(slice_group_by)] #![feature(slice_partition_dedup)] #![feature(vec_spare_capacity)] #![feature(string_remove_matches)] use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; mod arc; mod binary_heap; mod borrow; mod boxed; mod btree_set_hash; mod cow_str; mod fmt; mod heap; mod linked_list; mod rc; mod slice; mod str; mod string; mod vec; mod vec_deque; fn hash(t: &T) -> u64 { let mut s = DefaultHasher::new(); t.hash(&mut s); s.finish() } // FIXME: Instantiated functions with i128 in the signature is not supported in Emscripten. // See https://github.com/kripken/emscripten-fastcomp/issues/169 #[cfg(not(target_os = "emscripten"))] #[test] fn test_boxed_hasher() { let ordinary_hash = hash(&5u32); let mut hasher_1 = Box::new(DefaultHasher::new()); 5u32.hash(&mut hasher_1); assert_eq!(ordinary_hash, hasher_1.finish()); let mut hasher_2 = Box::new(DefaultHasher::new()) as Box; 5u32.hash(&mut hasher_2); assert_eq!(ordinary_hash, hasher_2.finish()); } use std::borrow::Cow; // check that Cow<'a, str> implements addition #[test] fn check_cow_add_cow() { let borrowed1 = Cow::Borrowed("Hello, "); let borrowed2 = Cow::Borrowed("World!"); let borrow_empty = Cow::Borrowed(""); let owned1: Cow<'_, str> = Cow::Owned(String::from("Hi, ")); let owned2: Cow<'_, str> = Cow::Owned(String::from("Rustaceans!")); let owned_empty: Cow<'_, str> = Cow::Owned(String::new()); assert_eq!("Hello, World!", borrowed1.clone() + borrowed2.clone()); assert_eq!("Hello, Rustaceans!", borrowed1.clone() + owned2.clone()); assert_eq!("Hi, World!", owned1.clone() + borrowed2.clone()); assert_eq!("Hi, Rustaceans!", owned1.clone() + owned2.clone()); if let Cow::Owned(_) = borrowed1.clone() + borrow_empty.clone() { panic!("Adding empty strings to a borrow should note allocate"); } if let Cow::Owned(_) = borrow_empty.clone() + borrowed1.clone() { panic!("Adding empty strings to a borrow should note allocate"); } if let Cow::Owned(_) = borrowed1.clone() + owned_empty.clone() { panic!("Adding empty strings to a borrow should note allocate"); } if let Cow::Owned(_) = owned_empty.clone() + borrowed1.clone() { panic!("Adding empty strings to a borrow should note allocate"); } } #[test] fn check_cow_add_str() { let borrowed = Cow::Borrowed("Hello, "); let borrow_empty = Cow::Borrowed(""); let owned: Cow<'_, str> = Cow::Owned(String::from("Hi, ")); let owned_empty: Cow<'_, str> = Cow::Owned(String::new()); assert_eq!("Hello, World!", borrowed.clone() + "World!"); assert_eq!("Hi, World!", owned.clone() + "World!"); if let Cow::Owned(_) = borrowed.clone() + "" { panic!("Adding empty strings to a borrow should note allocate"); } if let Cow::Owned(_) = borrow_empty.clone() + "Hello, " { panic!("Adding empty strings to a borrow should note allocate"); } if let Cow::Owned(_) = owned_empty.clone() + "Hello, " { panic!("Adding empty strings to a borrow should note allocate"); } } #[test] fn check_cow_add_assign_cow() { let mut borrowed1 = Cow::Borrowed("Hello, "); let borrowed2 = Cow::Borrowed("World!"); let borrow_empty = Cow::Borrowed(""); let mut owned1: Cow<'_, str> = Cow::Owned(String::from("Hi, ")); let owned2: Cow<'_, str> = Cow::Owned(String::from("Rustaceans!")); let owned_empty: Cow<'_, str> = Cow::Owned(String::new()); let mut s = borrowed1.clone(); s += borrow_empty.clone(); assert_eq!("Hello, ", s); if let Cow::Owned(_) = s { panic!("Adding empty strings to a borrow should note allocate"); } let mut s = borrow_empty.clone(); s += borrowed1.clone(); assert_eq!("Hello, ", s); if let Cow::Owned(_) = s { panic!("Adding empty strings to a borrow should note allocate"); } let mut s = borrowed1.clone(); s += owned_empty.clone(); assert_eq!("Hello, ", s); if let Cow::Owned(_) = s { panic!("Adding empty strings to a borrow should note allocate"); } let mut s = owned_empty.clone(); s += borrowed1.clone(); assert_eq!("Hello, ", s); if let Cow::Owned(_) = s { panic!("Adding empty strings to a borrow should note allocate"); } owned1 += borrowed2; borrowed1 += owned2; assert_eq!("Hi, World!", owned1); assert_eq!("Hello, Rustaceans!", borrowed1); } #[test] fn check_cow_add_assign_str() { let mut borrowed = Cow::Borrowed("Hello, "); let borrow_empty = Cow::Borrowed(""); let mut owned: Cow<'_, str> = Cow::Owned(String::from("Hi, ")); let owned_empty: Cow<'_, str> = Cow::Owned(String::new()); let mut s = borrowed.clone(); s += ""; assert_eq!("Hello, ", s); if let Cow::Owned(_) = s { panic!("Adding empty strings to a borrow should note allocate"); } let mut s = borrow_empty.clone(); s += "World!"; assert_eq!("World!", s); if let Cow::Owned(_) = s { panic!("Adding empty strings to a borrow should note allocate"); } let mut s = owned_empty.clone(); s += "World!"; assert_eq!("World!", s); if let Cow::Owned(_) = s { panic!("Adding empty strings to a borrow should note allocate"); } owned += "World!"; borrowed += "World!"; assert_eq!("Hi, World!", owned); assert_eq!("Hello, World!", borrowed); } #[test] fn check_cow_clone_from() { let mut c1: Cow<'_, str> = Cow::Owned(String::with_capacity(25)); let s: String = "hi".to_string(); assert!(s.capacity() < 25); let c2: Cow<'_, str> = Cow::Owned(s); c1.clone_from(&c2); assert!(c1.into_owned().capacity() >= 25); let mut c3: Cow<'_, str> = Cow::Borrowed("bye"); c3.clone_from(&c2); assert_eq!(c2, c3); } use std::any::Any; use std::cell::RefCell; use std::cmp::PartialEq; use std::iter::TrustedLen; use std::mem; use std::rc::{Rc, Weak}; #[test] fn uninhabited() { enum Void {} let mut a = Weak::::new(); a = a.clone(); assert!(a.upgrade().is_none()); let mut a: Weak = a; // Unsizing a = a.clone(); assert!(a.upgrade().is_none()); } #[test] fn slice() { let a: Rc<[u32; 3]> = Rc::new([3, 2, 1]); let a: Rc<[u32]> = a; // Unsizing let b: Rc<[u32]> = Rc::from(&[3, 2, 1][..]); // Conversion assert_eq!(a, b); // Exercise is_dangling() with a DST let mut a = Rc::downgrade(&a); a = a.clone(); assert!(a.upgrade().is_some()); } #[test] fn trait_object() { let a: Rc = Rc::new(4); let a: Rc = a; // Unsizing // Exercise is_dangling() with a DST let mut a = Rc::downgrade(&a); a = a.clone(); assert!(a.upgrade().is_some()); let mut b = Weak::::new(); b = b.clone(); assert!(b.upgrade().is_none()); let mut b: Weak = b; // Unsizing b = b.clone(); assert!(b.upgrade().is_none()); } #[test] fn float_nan_ne() { let x = Rc::new(f32::NAN); assert!(x != x); assert!(!(x == x)); } #[test] fn partial_eq() { struct TestPEq(RefCell); impl PartialEq for TestPEq { fn eq(&self, other: &TestPEq) -> bool { *self.0.borrow_mut() += 1; *other.0.borrow_mut() += 1; true } } let x = Rc::new(TestPEq(RefCell::new(0))); assert!(x == x); assert!(!(x != x)); assert_eq!(*x.0.borrow(), 4); } #[test] fn eq() { #[derive(Eq)] struct TestEq(RefCell); impl PartialEq for TestEq { fn eq(&self, other: &TestEq) -> bool { *self.0.borrow_mut() += 1; *other.0.borrow_mut() += 1; true } } let x = Rc::new(TestEq(RefCell::new(0))); assert!(x == x); assert!(!(x != x)); assert_eq!(*x.0.borrow(), 0); } const SHARED_ITER_MAX: u16 = 100; fn assert_trusted_len(_: &I) {} #[test] fn shared_from_iter_normal() { // Exercise the base implementation for non-`TrustedLen` iterators. { // `Filter` is never `TrustedLen` since we don't // know statically how many elements will be kept: let iter = (0..SHARED_ITER_MAX).filter(|x| x % 2 == 0).map(Box::new); // Collecting into a `Vec` or `Rc<[T]>` should make no difference: let vec = iter.clone().collect::>(); let rc = iter.collect::>(); assert_eq!(&*vec, &*rc); // Clone a bit and let these get dropped. { let _rc_2 = rc.clone(); let _rc_3 = rc.clone(); let _rc_4 = Rc::downgrade(&_rc_3); } } // Drop what hasn't been here. } #[test] fn shared_from_iter_trustedlen_normal() { // Exercise the `TrustedLen` implementation under normal circumstances // where `size_hint()` matches `(_, Some(exact_len))`. { let iter = (0..SHARED_ITER_MAX).map(Box::new); assert_trusted_len(&iter); // Collecting into a `Vec` or `Rc<[T]>` should make no difference: let vec = iter.clone().collect::>(); let rc = iter.collect::>(); assert_eq!(&*vec, &*rc); assert_eq!(mem::size_of::>() * SHARED_ITER_MAX as usize, mem::size_of_val(&*rc)); // Clone a bit and let these get dropped. { let _rc_2 = rc.clone(); let _rc_3 = rc.clone(); let _rc_4 = Rc::downgrade(&_rc_3); } } // Drop what hasn't been here. // Try a ZST to make sure it is handled well. { let iter = (0..SHARED_ITER_MAX).map(drop); let vec = iter.clone().collect::>(); let rc = iter.collect::>(); assert_eq!(&*vec, &*rc); assert_eq!(0, mem::size_of_val(&*rc)); { let _rc_2 = rc.clone(); let _rc_3 = rc.clone(); let _rc_4 = Rc::downgrade(&_rc_3); } } } #[test] #[should_panic = "I've almost got 99 problems."] fn shared_from_iter_trustedlen_panic() { // Exercise the `TrustedLen` implementation when `size_hint()` matches // `(_, Some(exact_len))` but where `.next()` drops before the last iteration. let iter = (0..SHARED_ITER_MAX).map(|val| match val { 98 => panic!("I've almost got 99 problems."), _ => Box::new(val), }); assert_trusted_len(&iter); let _ = iter.collect::>(); panic!("I am unreachable."); } #[test] fn shared_from_iter_trustedlen_no_fuse() { // Exercise the `TrustedLen` implementation when `size_hint()` matches // `(_, Some(exact_len))` but where the iterator does not behave in a fused manner. struct Iter(std::vec::IntoIter>>); unsafe impl TrustedLen for Iter {} impl Iterator for Iter { fn size_hint(&self) -> (usize, Option) { (2, Some(2)) } type Item = Box; fn next(&mut self) -> Option { self.0.next().flatten() } } let vec = vec![Some(Box::new(42)), Some(Box::new(24)), None, Some(Box::new(12))]; let iter = Iter(vec.into_iter()); assert_trusted_len(&iter); assert_eq!(&[Box::new(42), Box::new(24)], &*iter.collect::>()); } use std::cell::Cell; use std::cmp::Ordering::{self, Equal, Greater, Less}; use std::convert::identity; use std::mem; use std::panic; use std::rc::Rc; use std::sync::atomic::{AtomicUsize, Ordering::Relaxed}; use rand::distributions::Standard; use rand::seq::SliceRandom; use rand::{thread_rng, Rng, RngCore}; fn square(n: usize) -> usize { n * n } fn is_odd(n: &usize) -> bool { *n % 2 == 1 } #[test] fn test_from_fn() { // Test on-stack from_fn. let mut v: Vec<_> = (0..3).map(square).collect(); { let v = v; assert_eq!(v.len(), 3); assert_eq!(v[0], 0); assert_eq!(v[1], 1); assert_eq!(v[2], 4); } // Test on-heap from_fn. v = (0..5).map(square).collect(); { let v = v; assert_eq!(v.len(), 5); assert_eq!(v[0], 0); assert_eq!(v[1], 1); assert_eq!(v[2], 4); assert_eq!(v[3], 9); assert_eq!(v[4], 16); } } #[test] fn test_from_elem() { // Test on-stack from_elem. let mut v = vec![10, 10]; { let v = v; assert_eq!(v.len(), 2); assert_eq!(v[0], 10); assert_eq!(v[1], 10); } // Test on-heap from_elem. v = vec![20; 6]; { let v = &v[..]; assert_eq!(v[0], 20); assert_eq!(v[1], 20); assert_eq!(v[2], 20); assert_eq!(v[3], 20); assert_eq!(v[4], 20); assert_eq!(v[5], 20); } } #[test] fn test_is_empty() { let xs: [i32; 0] = []; assert!(xs.is_empty()); assert!(![0].is_empty()); } #[test] fn test_len_divzero() { type Z = [i8; 0]; let v0: &[Z] = &[]; let v1: &[Z] = &[[]]; let v2: &[Z] = &[[], []]; assert_eq!(mem::size_of::(), 0); assert_eq!(v0.len(), 0); assert_eq!(v1.len(), 1); assert_eq!(v2.len(), 2); } #[test] fn test_get() { let mut a = vec![11]; assert_eq!(a.get(1), None); a = vec![11, 12]; assert_eq!(a.get(1).unwrap(), &12); a = vec![11, 12, 13]; assert_eq!(a.get(1).unwrap(), &12); } #[test] fn test_first() { let mut a = vec![]; assert_eq!(a.first(), None); a = vec![11]; assert_eq!(a.first().unwrap(), &11); a = vec![11, 12]; assert_eq!(a.first().unwrap(), &11); } #[test] fn test_first_mut() { let mut a = vec![]; assert_eq!(a.first_mut(), None); a = vec![11]; assert_eq!(*a.first_mut().unwrap(), 11); a = vec![11, 12]; assert_eq!(*a.first_mut().unwrap(), 11); } #[test] fn test_split_first() { let mut a = vec![11]; let b: &[i32] = &[]; assert!(b.split_first().is_none()); assert_eq!(a.split_first(), Some((&11, b))); a = vec![11, 12]; let b: &[i32] = &[12]; assert_eq!(a.split_first(), Some((&11, b))); } #[test] fn test_split_first_mut() { let mut a = vec![11]; let b: &mut [i32] = &mut []; assert!(b.split_first_mut().is_none()); assert!(a.split_first_mut() == Some((&mut 11, b))); a = vec![11, 12]; let b: &mut [_] = &mut [12]; assert!(a.split_first_mut() == Some((&mut 11, b))); } #[test] fn test_split_last() { let mut a = vec![11]; let b: &[i32] = &[]; assert!(b.split_last().is_none()); assert_eq!(a.split_last(), Some((&11, b))); a = vec![11, 12]; let b: &[_] = &[11]; assert_eq!(a.split_last(), Some((&12, b))); } #[test] fn test_split_last_mut() { let mut a = vec![11]; let b: &mut [i32] = &mut []; assert!(b.split_last_mut().is_none()); assert!(a.split_last_mut() == Some((&mut 11, b))); a = vec![11, 12]; let b: &mut [_] = &mut [11]; assert!(a.split_last_mut() == Some((&mut 12, b))); } #[test] fn test_last() { let mut a = vec![]; assert_eq!(a.last(), None); a = vec![11]; assert_eq!(a.last().unwrap(), &11); a = vec![11, 12]; assert_eq!(a.last().unwrap(), &12); } #[test] fn test_last_mut() { let mut a = vec![]; assert_eq!(a.last_mut(), None); a = vec![11]; assert_eq!(*a.last_mut().unwrap(), 11); a = vec![11, 12]; assert_eq!(*a.last_mut().unwrap(), 12); } #[test] fn test_slice() { // Test fixed length vector. let vec_fixed = [1, 2, 3, 4]; let v_a = vec_fixed[1..vec_fixed.len()].to_vec(); assert_eq!(v_a.len(), 3); assert_eq!(v_a[0], 2); assert_eq!(v_a[1], 3); assert_eq!(v_a[2], 4); // Test on stack. let vec_stack: &[_] = &[1, 2, 3]; let v_b = vec_stack[1..3].to_vec(); assert_eq!(v_b.len(), 2); assert_eq!(v_b[0], 2); assert_eq!(v_b[1], 3); // Test `Box<[T]>` let vec_unique = vec![1, 2, 3, 4, 5, 6]; let v_d = vec_unique[1..6].to_vec(); assert_eq!(v_d.len(), 5); assert_eq!(v_d[0], 2); assert_eq!(v_d[1], 3); assert_eq!(v_d[2], 4); assert_eq!(v_d[3], 5); assert_eq!(v_d[4], 6); } #[test] fn test_slice_from() { let vec: &[_] = &[1, 2, 3, 4]; assert_eq!(&vec[..], vec); let b: &[_] = &[3, 4]; assert_eq!(&vec[2..], b); let b: &[_] = &[]; assert_eq!(&vec[4..], b); } #[test] fn test_slice_to() { let vec: &[_] = &[1, 2, 3, 4]; assert_eq!(&vec[..4], vec); let b: &[_] = &[1, 2]; assert_eq!(&vec[..2], b); let b: &[_] = &[]; assert_eq!(&vec[..0], b); } #[test] fn test_pop() { let mut v = vec![5]; let e = v.pop(); assert_eq!(v.len(), 0); assert_eq!(e, Some(5)); let f = v.pop(); assert_eq!(f, None); let g = v.pop(); assert_eq!(g, None); } #[test] fn test_swap_remove() { let mut v = vec![1, 2, 3, 4, 5]; let mut e = v.swap_remove(0); assert_eq!(e, 1); assert_eq!(v, [5, 2, 3, 4]); e = v.swap_remove(3); assert_eq!(e, 4); assert_eq!(v, [5, 2, 3]); } #[test] #[should_panic] fn test_swap_remove_fail() { let mut v = vec![1]; let _ = v.swap_remove(0); let _ = v.swap_remove(0); } #[test] fn test_swap_remove_noncopyable() { // Tests that we don't accidentally run destructors twice. let mut v: Vec> = Vec::new(); v.push(box 0); v.push(box 0); v.push(box 0); let mut _e = v.swap_remove(0); assert_eq!(v.len(), 2); _e = v.swap_remove(1); assert_eq!(v.len(), 1); _e = v.swap_remove(0); assert_eq!(v.len(), 0); } #[test] fn test_push() { // Test on-stack push(). let mut v = vec![]; v.push(1); assert_eq!(v.len(), 1); assert_eq!(v[0], 1); // Test on-heap push(). v.push(2); assert_eq!(v.len(), 2); assert_eq!(v[0], 1); assert_eq!(v[1], 2); } #[test] fn test_truncate() { let mut v: Vec> = vec![box 6, box 5, box 4]; v.truncate(1); let v = v; assert_eq!(v.len(), 1); assert_eq!(*(v[0]), 6); // If the unsafe block didn't drop things properly, we blow up here. } #[test] fn test_clear() { let mut v: Vec> = vec![box 6, box 5, box 4]; v.clear(); assert_eq!(v.len(), 0); // If the unsafe block didn't drop things properly, we blow up here. } #[test] fn test_retain() { let mut v = vec![1, 2, 3, 4, 5]; v.retain(is_odd); assert_eq!(v, [1, 3, 5]); } #[test] fn test_binary_search() { assert_eq!([1, 2, 3, 4, 5].binary_search(&5).ok(), Some(4)); assert_eq!([1, 2, 3, 4, 5].binary_search(&4).ok(), Some(3)); assert_eq!([1, 2, 3, 4, 5].binary_search(&3).ok(), Some(2)); assert_eq!([1, 2, 3, 4, 5].binary_search(&2).ok(), Some(1)); assert_eq!([1, 2, 3, 4, 5].binary_search(&1).ok(), Some(0)); assert_eq!([2, 4, 6, 8, 10].binary_search(&1).ok(), None); assert_eq!([2, 4, 6, 8, 10].binary_search(&5).ok(), None); assert_eq!([2, 4, 6, 8, 10].binary_search(&4).ok(), Some(1)); assert_eq!([2, 4, 6, 8, 10].binary_search(&10).ok(), Some(4)); assert_eq!([2, 4, 6, 8].binary_search(&1).ok(), None); assert_eq!([2, 4, 6, 8].binary_search(&5).ok(), None); assert_eq!([2, 4, 6, 8].binary_search(&4).ok(), Some(1)); assert_eq!([2, 4, 6, 8].binary_search(&8).ok(), Some(3)); assert_eq!([2, 4, 6].binary_search(&1).ok(), None); assert_eq!([2, 4, 6].binary_search(&5).ok(), None); assert_eq!([2, 4, 6].binary_search(&4).ok(), Some(1)); assert_eq!([2, 4, 6].binary_search(&6).ok(), Some(2)); assert_eq!([2, 4].binary_search(&1).ok(), None); assert_eq!([2, 4].binary_search(&5).ok(), None); assert_eq!([2, 4].binary_search(&2).ok(), Some(0)); assert_eq!([2, 4].binary_search(&4).ok(), Some(1)); assert_eq!([2].binary_search(&1).ok(), None); assert_eq!([2].binary_search(&5).ok(), None); assert_eq!([2].binary_search(&2).ok(), Some(0)); assert_eq!([].binary_search(&1).ok(), None); assert_eq!([].binary_search(&5).ok(), None); assert!([1, 1, 1, 1, 1].binary_search(&1).ok() != None); assert!([1, 1, 1, 1, 2].binary_search(&1).ok() != None); assert!([1, 1, 1, 2, 2].binary_search(&1).ok() != None); assert!([1, 1, 2, 2, 2].binary_search(&1).ok() != None); assert_eq!([1, 2, 2, 2, 2].binary_search(&1).ok(), Some(0)); assert_eq!([1, 2, 3, 4, 5].binary_search(&6).ok(), None); assert_eq!([1, 2, 3, 4, 5].binary_search(&0).ok(), None); } #[test] fn test_reverse() { let mut v = vec![10, 20]; assert_eq!(v[0], 10); assert_eq!(v[1], 20); v.reverse(); assert_eq!(v[0], 20); assert_eq!(v[1], 10); let mut v3 = Vec::::new(); v3.reverse(); assert!(v3.is_empty()); // check the 1-byte-types path let mut v = (-50..51i8).collect::>(); v.reverse(); assert_eq!(v, (-50..51i8).rev().collect::>()); // check the 2-byte-types path let mut v = (-50..51i16).collect::>(); v.reverse(); assert_eq!(v, (-50..51i16).rev().collect::>()); } #[test] #[cfg_attr(miri, ignore)] // Miri is too slow fn test_sort() { let mut rng = thread_rng(); for len in (2..25).chain(500..510) { for &modulus in &[5, 10, 100, 1000] { for _ in 0..10 { let orig: Vec<_> = rng.sample_iter::(&Standard).map(|x| x % modulus).take(len).collect(); // Sort in default order. let mut v = orig.clone(); v.sort(); assert!(v.windows(2).all(|w| w[0] <= w[1])); // Sort in ascending order. let mut v = orig.clone(); v.sort_by(|a, b| a.cmp(b)); assert!(v.windows(2).all(|w| w[0] <= w[1])); // Sort in descending order. let mut v = orig.clone(); v.sort_by(|a, b| b.cmp(a)); assert!(v.windows(2).all(|w| w[0] >= w[1])); // Sort in lexicographic order. let mut v1 = orig.clone(); let mut v2 = orig.clone(); v1.sort_by_key(|x| x.to_string()); v2.sort_by_cached_key(|x| x.to_string()); assert!(v1.windows(2).all(|w| w[0].to_string() <= w[1].to_string())); assert!(v1 == v2); // Sort with many pre-sorted runs. let mut v = orig.clone(); v.sort(); v.reverse(); for _ in 0..5 { let a = rng.gen::() % len; let b = rng.gen::() % len; if a < b { v[a..b].reverse(); } else { v.swap(a, b); } } v.sort(); assert!(v.windows(2).all(|w| w[0] <= w[1])); } } } // Sort using a completely random comparison function. // This will reorder the elements *somehow*, but won't panic. let mut v = [0; 500]; for i in 0..v.len() { v[i] = i as i32; } v.sort_by(|_, _| *[Less, Equal, Greater].choose(&mut rng).unwrap()); v.sort(); for i in 0..v.len() { assert_eq!(v[i], i as i32); } // Should not panic. [0i32; 0].sort(); [(); 10].sort(); [(); 100].sort(); let mut v = [0xDEADBEEFu64]; v.sort(); assert!(v == [0xDEADBEEF]); } #[test] fn test_sort_stability() { // Miri is too slow let large_range = if cfg!(miri) { 0..0 } else { 500..510 }; let rounds = if cfg!(miri) { 1 } else { 10 }; for len in (2..25).chain(large_range) { for _ in 0..rounds { let mut counts = [0; 10]; // create a vector like [(6, 1), (5, 1), (6, 2), ...], // where the first item of each tuple is random, but // the second item represents which occurrence of that // number this element is, i.e., the second elements // will occur in sorted order. let orig: Vec<_> = (0..len) .map(|_| { let n = thread_rng().gen::() % 10; counts[n] += 1; (n, counts[n]) }) .collect(); let mut v = orig.clone(); // Only sort on the first element, so an unstable sort // may mix up the counts. v.sort_by(|&(a, _), &(b, _)| a.cmp(&b)); // This comparison includes the count (the second item // of the tuple), so elements with equal first items // will need to be ordered with increasing // counts... i.e., exactly asserting that this sort is // stable. assert!(v.windows(2).all(|w| w[0] <= w[1])); let mut v = orig.clone(); v.sort_by_cached_key(|&(x, _)| x); assert!(v.windows(2).all(|w| w[0] <= w[1])); } } } #[test] fn test_rotate_left() { let expected: Vec<_> = (0..13).collect(); let mut v = Vec::new(); // no-ops v.clone_from(&expected); v.rotate_left(0); assert_eq!(v, expected); v.rotate_left(expected.len()); assert_eq!(v, expected); let mut zst_array = [(), (), ()]; zst_array.rotate_left(2); // happy path v = (5..13).chain(0..5).collect(); v.rotate_left(8); assert_eq!(v, expected); let expected: Vec<_> = (0..1000).collect(); // small rotations in large slice, uses ptr::copy v = (2..1000).chain(0..2).collect(); v.rotate_left(998); assert_eq!(v, expected); v = (998..1000).chain(0..998).collect(); v.rotate_left(2); assert_eq!(v, expected); // non-small prime rotation, has a few rounds of swapping v = (389..1000).chain(0..389).collect(); v.rotate_left(1000 - 389); assert_eq!(v, expected); } #[test] fn test_rotate_right() { let expected: Vec<_> = (0..13).collect(); let mut v = Vec::new(); // no-ops v.clone_from(&expected); v.rotate_right(0); assert_eq!(v, expected); v.rotate_right(expected.len()); assert_eq!(v, expected); let mut zst_array = [(), (), ()]; zst_array.rotate_right(2); // happy path v = (5..13).chain(0..5).collect(); v.rotate_right(5); assert_eq!(v, expected); let expected: Vec<_> = (0..1000).collect(); // small rotations in large slice, uses ptr::copy v = (2..1000).chain(0..2).collect(); v.rotate_right(2); assert_eq!(v, expected); v = (998..1000).chain(0..998).collect(); v.rotate_right(998); assert_eq!(v, expected); // non-small prime rotation, has a few rounds of swapping v = (389..1000).chain(0..389).collect(); v.rotate_right(389); assert_eq!(v, expected); } #[test] fn test_concat() { let v: [Vec; 0] = []; let c = v.concat(); assert_eq!(c, []); let d = [vec![1], vec![2, 3]].concat(); assert_eq!(d, [1, 2, 3]); let v: &[&[_]] = &[&[1], &[2, 3]]; assert_eq!(v.join(&0), [1, 0, 2, 3]); let v: &[&[_]] = &[&[1], &[2], &[3]]; assert_eq!(v.join(&0), [1, 0, 2, 0, 3]); } #[test] fn test_join() { let v: [Vec; 0] = []; assert_eq!(v.join(&0), []); assert_eq!([vec![1], vec![2, 3]].join(&0), [1, 0, 2, 3]); assert_eq!([vec![1], vec![2], vec![3]].join(&0), [1, 0, 2, 0, 3]); let v: [&[_]; 2] = [&[1], &[2, 3]]; assert_eq!(v.join(&0), [1, 0, 2, 3]); let v: [&[_]; 3] = [&[1], &[2], &[3]]; assert_eq!(v.join(&0), [1, 0, 2, 0, 3]); } #[test] fn test_join_nocopy() { let v: [String; 0] = []; assert_eq!(v.join(","), ""); assert_eq!(["a".to_string(), "ab".into()].join(","), "a,ab"); assert_eq!(["a".to_string(), "ab".into(), "abc".into()].join(","), "a,ab,abc"); assert_eq!(["a".to_string(), "ab".into(), "".into()].join(","), "a,ab,"); } #[test] fn test_insert() { let mut a = vec![1, 2, 4]; a.insert(2, 3); assert_eq!(a, [1, 2, 3, 4]); let mut a = vec![1, 2, 3]; a.insert(0, 0); assert_eq!(a, [0, 1, 2, 3]); let mut a = vec![1, 2, 3]; a.insert(3, 4); assert_eq!(a, [1, 2, 3, 4]); let mut a = vec![]; a.insert(0, 1); assert_eq!(a, [1]); } #[test] #[should_panic] fn test_insert_oob() { let mut a = vec![1, 2, 3]; a.insert(4, 5); } #[test] fn test_remove() { let mut a = vec![1, 2, 3, 4]; assert_eq!(a.remove(2), 3); assert_eq!(a, [1, 2, 4]); assert_eq!(a.remove(2), 4); assert_eq!(a, [1, 2]); assert_eq!(a.remove(0), 1); assert_eq!(a, [2]); assert_eq!(a.remove(0), 2); assert_eq!(a, []); } #[test] #[should_panic] fn test_remove_fail() { let mut a = vec![1]; let _ = a.remove(0); let _ = a.remove(0); } #[test] fn test_capacity() { let mut v = vec![0]; v.reserve_exact(10); assert!(v.capacity() >= 11); } #[test] fn test_slice_2() { let v = vec![1, 2, 3, 4, 5]; let v = &v[1..3]; assert_eq!(v.len(), 2); assert_eq!(v[0], 2); assert_eq!(v[1], 3); } macro_rules! assert_order { (Greater, $a:expr, $b:expr) => { assert_eq!($a.cmp($b), Greater); assert!($a > $b); }; (Less, $a:expr, $b:expr) => { assert_eq!($a.cmp($b), Less); assert!($a < $b); }; (Equal, $a:expr, $b:expr) => { assert_eq!($a.cmp($b), Equal); assert_eq!($a, $b); }; } #[test] fn test_total_ord_u8() { let c = &[1u8, 2, 3]; assert_order!(Greater, &[1u8, 2, 3, 4][..], &c[..]); let c = &[1u8, 2, 3, 4]; assert_order!(Less, &[1u8, 2, 3][..], &c[..]); let c = &[1u8, 2, 3, 6]; assert_order!(Equal, &[1u8, 2, 3, 6][..], &c[..]); let c = &[1u8, 2, 3, 4, 5, 6]; assert_order!(Less, &[1u8, 2, 3, 4, 5, 5, 5, 5][..], &c[..]); let c = &[1u8, 2, 3, 4]; assert_order!(Greater, &[2u8, 2][..], &c[..]); } #[test] fn test_total_ord_i32() { let c = &[1, 2, 3]; assert_order!(Greater, &[1, 2, 3, 4][..], &c[..]); let c = &[1, 2, 3, 4]; assert_order!(Less, &[1, 2, 3][..], &c[..]); let c = &[1, 2, 3, 6]; assert_order!(Equal, &[1, 2, 3, 6][..], &c[..]); let c = &[1, 2, 3, 4, 5, 6]; assert_order!(Less, &[1, 2, 3, 4, 5, 5, 5, 5][..], &c[..]); let c = &[1, 2, 3, 4]; assert_order!(Greater, &[2, 2][..], &c[..]); } #[test] fn test_iterator() { let xs = [1, 2, 5, 10, 11]; let mut it = xs.iter(); assert_eq!(it.size_hint(), (5, Some(5))); assert_eq!(it.next().unwrap(), &1); assert_eq!(it.size_hint(), (4, Some(4))); assert_eq!(it.next().unwrap(), &2); assert_eq!(it.size_hint(), (3, Some(3))); assert_eq!(it.next().unwrap(), &5); assert_eq!(it.size_hint(), (2, Some(2))); assert_eq!(it.next().unwrap(), &10); assert_eq!(it.size_hint(), (1, Some(1))); assert_eq!(it.next().unwrap(), &11); assert_eq!(it.size_hint(), (0, Some(0))); assert!(it.next().is_none()); } #[test] fn test_iter_size_hints() { let mut xs = [1, 2, 5, 10, 11]; assert_eq!(xs.iter().size_hint(), (5, Some(5))); assert_eq!(xs.iter_mut().size_hint(), (5, Some(5))); } #[test] fn test_iter_as_slice() { let xs = [1, 2, 5, 10, 11]; let mut iter = xs.iter(); assert_eq!(iter.as_slice(), &[1, 2, 5, 10, 11]); iter.next(); assert_eq!(iter.as_slice(), &[2, 5, 10, 11]); } #[test] fn test_iter_as_ref() { let xs = [1, 2, 5, 10, 11]; let mut iter = xs.iter(); assert_eq!(iter.as_ref(), &[1, 2, 5, 10, 11]); iter.next(); assert_eq!(iter.as_ref(), &[2, 5, 10, 11]); } #[test] fn test_iter_clone() { let xs = [1, 2, 5]; let mut it = xs.iter(); it.next(); let mut jt = it.clone(); assert_eq!(it.next(), jt.next()); assert_eq!(it.next(), jt.next()); assert_eq!(it.next(), jt.next()); } #[test] fn test_iter_is_empty() { let xs = [1, 2, 5, 10, 11]; for i in 0..xs.len() { for j in i..xs.len() { assert_eq!(xs[i..j].iter().is_empty(), xs[i..j].is_empty()); } } } #[test] fn test_mut_iterator() { let mut xs = [1, 2, 3, 4, 5]; for x in &mut xs { *x += 1; } assert!(xs == [2, 3, 4, 5, 6]) } #[test] fn test_rev_iterator() { let xs = [1, 2, 5, 10, 11]; let ys = [11, 10, 5, 2, 1]; let mut i = 0; for &x in xs.iter().rev() { assert_eq!(x, ys[i]); i += 1; } assert_eq!(i, 5); } #[test] fn test_mut_rev_iterator() { let mut xs = [1, 2, 3, 4, 5]; for (i, x) in xs.iter_mut().rev().enumerate() { *x += i; } assert!(xs == [5, 5, 5, 5, 5]) } #[test] fn test_move_iterator() { let xs = vec![1, 2, 3, 4, 5]; assert_eq!(xs.into_iter().fold(0, |a: usize, b: usize| 10 * a + b), 12345); } #[test] fn test_move_rev_iterator() { let xs = vec![1, 2, 3, 4, 5]; assert_eq!(xs.into_iter().rev().fold(0, |a: usize, b: usize| 10 * a + b), 54321); } #[test] fn test_splitator() { let xs = &[1, 2, 3, 4, 5]; let splits: &[&[_]] = &[&[1], &[3], &[5]]; assert_eq!(xs.split(|x| *x % 2 == 0).collect::>(), splits); let splits: &[&[_]] = &[&[], &[2, 3, 4, 5]]; assert_eq!(xs.split(|x| *x == 1).collect::>(), splits); let splits: &[&[_]] = &[&[1, 2, 3, 4], &[]]; assert_eq!(xs.split(|x| *x == 5).collect::>(), splits); let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.split(|x| *x == 10).collect::>(), splits); let splits: &[&[_]] = &[&[], &[], &[], &[], &[], &[]]; assert_eq!(xs.split(|_| true).collect::>(), splits); let xs: &[i32] = &[]; let splits: &[&[i32]] = &[&[]]; assert_eq!(xs.split(|x| *x == 5).collect::>(), splits); } #[test] fn test_splitator_inclusive() { let xs = &[1, 2, 3, 4, 5]; let splits: &[&[_]] = &[&[1, 2], &[3, 4], &[5]]; assert_eq!(xs.split_inclusive(|x| *x % 2 == 0).collect::>(), splits); let splits: &[&[_]] = &[&[1], &[2, 3, 4, 5]]; assert_eq!(xs.split_inclusive(|x| *x == 1).collect::>(), splits); let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.split_inclusive(|x| *x == 5).collect::>(), splits); let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.split_inclusive(|x| *x == 10).collect::>(), splits); let splits: &[&[_]] = &[&[1], &[2], &[3], &[4], &[5]]; assert_eq!(xs.split_inclusive(|_| true).collect::>(), splits); let xs: &[i32] = &[]; let splits: &[&[i32]] = &[&[]]; assert_eq!(xs.split_inclusive(|x| *x == 5).collect::>(), splits); } #[test] fn test_splitator_inclusive_reverse() { let xs = &[1, 2, 3, 4, 5]; let splits: &[&[_]] = &[&[5], &[3, 4], &[1, 2]]; assert_eq!(xs.split_inclusive(|x| *x % 2 == 0).rev().collect::>(), splits); let splits: &[&[_]] = &[&[2, 3, 4, 5], &[1]]; assert_eq!(xs.split_inclusive(|x| *x == 1).rev().collect::>(), splits); let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.split_inclusive(|x| *x == 5).rev().collect::>(), splits); let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.split_inclusive(|x| *x == 10).rev().collect::>(), splits); let splits: &[&[_]] = &[&[5], &[4], &[3], &[2], &[1]]; assert_eq!(xs.split_inclusive(|_| true).rev().collect::>(), splits); let xs: &[i32] = &[]; let splits: &[&[i32]] = &[&[]]; assert_eq!(xs.split_inclusive(|x| *x == 5).rev().collect::>(), splits); } #[test] fn test_splitator_mut_inclusive() { let xs = &mut [1, 2, 3, 4, 5]; let splits: &[&[_]] = &[&[1, 2], &[3, 4], &[5]]; assert_eq!(xs.split_inclusive_mut(|x| *x % 2 == 0).collect::>(), splits); let splits: &[&[_]] = &[&[1], &[2, 3, 4, 5]]; assert_eq!(xs.split_inclusive_mut(|x| *x == 1).collect::>(), splits); let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.split_inclusive_mut(|x| *x == 5).collect::>(), splits); let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.split_inclusive_mut(|x| *x == 10).collect::>(), splits); let splits: &[&[_]] = &[&[1], &[2], &[3], &[4], &[5]]; assert_eq!(xs.split_inclusive_mut(|_| true).collect::>(), splits); let xs: &mut [i32] = &mut []; let splits: &[&[i32]] = &[&[]]; assert_eq!(xs.split_inclusive_mut(|x| *x == 5).collect::>(), splits); } #[test] fn test_splitator_mut_inclusive_reverse() { let xs = &mut [1, 2, 3, 4, 5]; let splits: &[&[_]] = &[&[5], &[3, 4], &[1, 2]]; assert_eq!(xs.split_inclusive_mut(|x| *x % 2 == 0).rev().collect::>(), splits); let splits: &[&[_]] = &[&[2, 3, 4, 5], &[1]]; assert_eq!(xs.split_inclusive_mut(|x| *x == 1).rev().collect::>(), splits); let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.split_inclusive_mut(|x| *x == 5).rev().collect::>(), splits); let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.split_inclusive_mut(|x| *x == 10).rev().collect::>(), splits); let splits: &[&[_]] = &[&[5], &[4], &[3], &[2], &[1]]; assert_eq!(xs.split_inclusive_mut(|_| true).rev().collect::>(), splits); let xs: &mut [i32] = &mut []; let splits: &[&[i32]] = &[&[]]; assert_eq!(xs.split_inclusive_mut(|x| *x == 5).rev().collect::>(), splits); } #[test] fn test_splitnator() { let xs = &[1, 2, 3, 4, 5]; let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.splitn(1, |x| *x % 2 == 0).collect::>(), splits); let splits: &[&[_]] = &[&[1], &[3, 4, 5]]; assert_eq!(xs.splitn(2, |x| *x % 2 == 0).collect::>(), splits); let splits: &[&[_]] = &[&[], &[], &[], &[4, 5]]; assert_eq!(xs.splitn(4, |_| true).collect::>(), splits); let xs: &[i32] = &[]; let splits: &[&[i32]] = &[&[]]; assert_eq!(xs.splitn(2, |x| *x == 5).collect::>(), splits); } #[test] fn test_splitnator_mut() { let xs = &mut [1, 2, 3, 4, 5]; let splits: &[&mut [_]] = &[&mut [1, 2, 3, 4, 5]]; assert_eq!(xs.splitn_mut(1, |x| *x % 2 == 0).collect::>(), splits); let splits: &[&mut [_]] = &[&mut [1], &mut [3, 4, 5]]; assert_eq!(xs.splitn_mut(2, |x| *x % 2 == 0).collect::>(), splits); let splits: &[&mut [_]] = &[&mut [], &mut [], &mut [], &mut [4, 5]]; assert_eq!(xs.splitn_mut(4, |_| true).collect::>(), splits); let xs: &mut [i32] = &mut []; let splits: &[&mut [i32]] = &[&mut []]; assert_eq!(xs.splitn_mut(2, |x| *x == 5).collect::>(), splits); } #[test] fn test_rsplitator() { let xs = &[1, 2, 3, 4, 5]; let splits: &[&[_]] = &[&[5], &[3], &[1]]; assert_eq!(xs.split(|x| *x % 2 == 0).rev().collect::>(), splits); let splits: &[&[_]] = &[&[2, 3, 4, 5], &[]]; assert_eq!(xs.split(|x| *x == 1).rev().collect::>(), splits); let splits: &[&[_]] = &[&[], &[1, 2, 3, 4]]; assert_eq!(xs.split(|x| *x == 5).rev().collect::>(), splits); let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.split(|x| *x == 10).rev().collect::>(), splits); let xs: &[i32] = &[]; let splits: &[&[i32]] = &[&[]]; assert_eq!(xs.split(|x| *x == 5).rev().collect::>(), splits); } #[test] fn test_rsplitnator() { let xs = &[1, 2, 3, 4, 5]; let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(xs.rsplitn(1, |x| *x % 2 == 0).collect::>(), splits); let splits: &[&[_]] = &[&[5], &[1, 2, 3]]; assert_eq!(xs.rsplitn(2, |x| *x % 2 == 0).collect::>(), splits); let splits: &[&[_]] = &[&[], &[], &[], &[1, 2]]; assert_eq!(xs.rsplitn(4, |_| true).collect::>(), splits); let xs: &[i32] = &[]; let splits: &[&[i32]] = &[&[]]; assert_eq!(xs.rsplitn(2, |x| *x == 5).collect::>(), splits); assert!(xs.rsplitn(0, |x| *x % 2 == 0).next().is_none()); } #[test] fn test_windowsator() { let v = &[1, 2, 3, 4]; let wins: &[&[_]] = &[&[1, 2], &[2, 3], &[3, 4]]; assert_eq!(v.windows(2).collect::>(), wins); let wins: &[&[_]] = &[&[1, 2, 3], &[2, 3, 4]]; assert_eq!(v.windows(3).collect::>(), wins); assert!(v.windows(6).next().is_none()); let wins: &[&[_]] = &[&[3, 4], &[2, 3], &[1, 2]]; assert_eq!(v.windows(2).rev().collect::>(), wins); } #[test] #[should_panic] fn test_windowsator_0() { let v = &[1, 2, 3, 4]; let _it = v.windows(0); } #[test] fn test_chunksator() { let v = &[1, 2, 3, 4, 5]; assert_eq!(v.chunks(2).len(), 3); let chunks: &[&[_]] = &[&[1, 2], &[3, 4], &[5]]; assert_eq!(v.chunks(2).collect::>(), chunks); let chunks: &[&[_]] = &[&[1, 2, 3], &[4, 5]]; assert_eq!(v.chunks(3).collect::>(), chunks); let chunks: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(v.chunks(6).collect::>(), chunks); let chunks: &[&[_]] = &[&[5], &[3, 4], &[1, 2]]; assert_eq!(v.chunks(2).rev().collect::>(), chunks); } #[test] #[should_panic] fn test_chunksator_0() { let v = &[1, 2, 3, 4]; let _it = v.chunks(0); } #[test] fn test_chunks_exactator() { let v = &[1, 2, 3, 4, 5]; assert_eq!(v.chunks_exact(2).len(), 2); let chunks: &[&[_]] = &[&[1, 2], &[3, 4]]; assert_eq!(v.chunks_exact(2).collect::>(), chunks); let chunks: &[&[_]] = &[&[1, 2, 3]]; assert_eq!(v.chunks_exact(3).collect::>(), chunks); let chunks: &[&[_]] = &[]; assert_eq!(v.chunks_exact(6).collect::>(), chunks); let chunks: &[&[_]] = &[&[3, 4], &[1, 2]]; assert_eq!(v.chunks_exact(2).rev().collect::>(), chunks); } #[test] #[should_panic] fn test_chunks_exactator_0() { let v = &[1, 2, 3, 4]; let _it = v.chunks_exact(0); } #[test] fn test_rchunksator() { let v = &[1, 2, 3, 4, 5]; assert_eq!(v.rchunks(2).len(), 3); let chunks: &[&[_]] = &[&[4, 5], &[2, 3], &[1]]; assert_eq!(v.rchunks(2).collect::>(), chunks); let chunks: &[&[_]] = &[&[3, 4, 5], &[1, 2]]; assert_eq!(v.rchunks(3).collect::>(), chunks); let chunks: &[&[_]] = &[&[1, 2, 3, 4, 5]]; assert_eq!(v.rchunks(6).collect::>(), chunks); let chunks: &[&[_]] = &[&[1], &[2, 3], &[4, 5]]; assert_eq!(v.rchunks(2).rev().collect::>(), chunks); } #[test] #[should_panic] fn test_rchunksator_0() { let v = &[1, 2, 3, 4]; let _it = v.rchunks(0); } #[test] fn test_rchunks_exactator() { let v = &[1, 2, 3, 4, 5]; assert_eq!(v.rchunks_exact(2).len(), 2); let chunks: &[&[_]] = &[&[4, 5], &[2, 3]]; assert_eq!(v.rchunks_exact(2).collect::>(), chunks); let chunks: &[&[_]] = &[&[3, 4, 5]]; assert_eq!(v.rchunks_exact(3).collect::>(), chunks); let chunks: &[&[_]] = &[]; assert_eq!(v.rchunks_exact(6).collect::>(), chunks); let chunks: &[&[_]] = &[&[2, 3], &[4, 5]]; assert_eq!(v.rchunks_exact(2).rev().collect::>(), chunks); } #[test] #[should_panic] fn test_rchunks_exactator_0() { let v = &[1, 2, 3, 4]; let _it = v.rchunks_exact(0); } #[test] fn test_reverse_part() { let mut values = [1, 2, 3, 4, 5]; values[1..4].reverse(); assert!(values == [1, 4, 3, 2, 5]); } #[test] fn test_show() { macro_rules! test_show_vec { ($x:expr, $x_str:expr) => {{ let (x, x_str) = ($x, $x_str); assert_eq!(format!("{:?}", x), x_str); assert_eq!(format!("{:?}", x), x_str); }}; } let empty = Vec::::new(); test_show_vec!(empty, "[]"); test_show_vec!(vec![1], "[1]"); test_show_vec!(vec![1, 2, 3], "[1, 2, 3]"); test_show_vec!(vec![vec![], vec![1], vec![1, 1]], "[[], [1], [1, 1]]"); let empty_mut: &mut [i32] = &mut []; test_show_vec!(empty_mut, "[]"); let v = &mut [1]; test_show_vec!(v, "[1]"); let v = &mut [1, 2, 3]; test_show_vec!(v, "[1, 2, 3]"); let v: &mut [&mut [_]] = &mut [&mut [], &mut [1], &mut [1, 1]]; test_show_vec!(v, "[[], [1], [1, 1]]"); } #[test] fn test_vec_default() { macro_rules! t { ($ty:ty) => {{ let v: $ty = Default::default(); assert!(v.is_empty()); }}; } t!(&[i32]); t!(Vec); } #[test] #[should_panic] fn test_overflow_does_not_cause_segfault() { let mut v = vec![]; v.reserve_exact(!0); v.push(1); v.push(2); } #[test] #[should_panic] fn test_overflow_does_not_cause_segfault_managed() { let mut v = vec![Rc::new(1)]; v.reserve_exact(!0); v.push(Rc::new(2)); } #[test] fn test_mut_split_at() { let mut values = [1, 2, 3, 4, 5]; { let (left, right) = values.split_at_mut(2); { let left: &[_] = left; assert!(left[..left.len()] == [1, 2]); } for p in left { *p += 1; } { let right: &[_] = right; assert!(right[..right.len()] == [3, 4, 5]); } for p in right { *p += 2; } } assert!(values == [2, 3, 5, 6, 7]); } #[derive(Clone, PartialEq)] struct Foo; #[test] fn test_iter_zero_sized() { let mut v = vec![Foo, Foo, Foo]; assert_eq!(v.len(), 3); let mut cnt = 0; for f in &v { assert!(*f == Foo); cnt += 1; } assert_eq!(cnt, 3); for f in &v[1..3] { assert!(*f == Foo); cnt += 1; } assert_eq!(cnt, 5); for f in &mut v { assert!(*f == Foo); cnt += 1; } assert_eq!(cnt, 8); for f in v { assert!(f == Foo); cnt += 1; } assert_eq!(cnt, 11); let xs: [Foo; 3] = [Foo, Foo, Foo]; cnt = 0; for f in &xs { assert!(*f == Foo); cnt += 1; } assert!(cnt == 3); } #[test] fn test_shrink_to_fit() { let mut xs = vec![0, 1, 2, 3]; for i in 4..100 { xs.push(i) } assert_eq!(xs.capacity(), 128); xs.shrink_to_fit(); assert_eq!(xs.capacity(), 100); assert_eq!(xs, (0..100).collect::>()); } #[test] fn test_starts_with() { assert!(b"foobar".starts_with(b"foo")); assert!(!b"foobar".starts_with(b"oob")); assert!(!b"foobar".starts_with(b"bar")); assert!(!b"foo".starts_with(b"foobar")); assert!(!b"bar".starts_with(b"foobar")); assert!(b"foobar".starts_with(b"foobar")); let empty: &[u8] = &[]; assert!(empty.starts_with(empty)); assert!(!empty.starts_with(b"foo")); assert!(b"foobar".starts_with(empty)); } #[test] fn test_ends_with() { assert!(b"foobar".ends_with(b"bar")); assert!(!b"foobar".ends_with(b"oba")); assert!(!b"foobar".ends_with(b"foo")); assert!(!b"foo".ends_with(b"foobar")); assert!(!b"bar".ends_with(b"foobar")); assert!(b"foobar".ends_with(b"foobar")); let empty: &[u8] = &[]; assert!(empty.ends_with(empty)); assert!(!empty.ends_with(b"foo")); assert!(b"foobar".ends_with(empty)); } #[test] fn test_mut_splitator() { let mut xs = [0, 1, 0, 2, 3, 0, 0, 4, 5, 0]; assert_eq!(xs.split_mut(|x| *x == 0).count(), 6); for slice in xs.split_mut(|x| *x == 0) { slice.reverse(); } assert!(xs == [0, 1, 0, 3, 2, 0, 0, 5, 4, 0]); let mut xs = [0, 1, 0, 2, 3, 0, 0, 4, 5, 0, 6, 7]; for slice in xs.split_mut(|x| *x == 0).take(5) { slice.reverse(); } assert!(xs == [0, 1, 0, 3, 2, 0, 0, 5, 4, 0, 6, 7]); } #[test] fn test_mut_splitator_rev() { let mut xs = [1, 2, 0, 3, 4, 0, 0, 5, 6, 0]; for slice in xs.split_mut(|x| *x == 0).rev().take(4) { slice.reverse(); } assert!(xs == [1, 2, 0, 4, 3, 0, 0, 6, 5, 0]); } #[test] fn test_get_mut() { let mut v = [0, 1, 2]; assert_eq!(v.get_mut(3), None); v.get_mut(1).map(|e| *e = 7); assert_eq!(v[1], 7); let mut x = 2; assert_eq!(v.get_mut(2), Some(&mut x)); } #[test] fn test_mut_chunks() { let mut v = [0, 1, 2, 3, 4, 5, 6]; assert_eq!(v.chunks_mut(3).len(), 3); for (i, chunk) in v.chunks_mut(3).enumerate() { for x in chunk { *x = i as u8; } } let result = [0, 0, 0, 1, 1, 1, 2]; assert_eq!(v, result); } #[test] fn test_mut_chunks_rev() { let mut v = [0, 1, 2, 3, 4, 5, 6]; for (i, chunk) in v.chunks_mut(3).rev().enumerate() { for x in chunk { *x = i as u8; } } let result = [2, 2, 2, 1, 1, 1, 0]; assert_eq!(v, result); } #[test] #[should_panic] fn test_mut_chunks_0() { let mut v = [1, 2, 3, 4]; let _it = v.chunks_mut(0); } #[test] fn test_mut_chunks_exact() { let mut v = [0, 1, 2, 3, 4, 5, 6]; assert_eq!(v.chunks_exact_mut(3).len(), 2); for (i, chunk) in v.chunks_exact_mut(3).enumerate() { for x in chunk { *x = i as u8; } } let result = [0, 0, 0, 1, 1, 1, 6]; assert_eq!(v, result); } #[test] fn test_mut_chunks_exact_rev() { let mut v = [0, 1, 2, 3, 4, 5, 6]; for (i, chunk) in v.chunks_exact_mut(3).rev().enumerate() { for x in chunk { *x = i as u8; } } let result = [1, 1, 1, 0, 0, 0, 6]; assert_eq!(v, result); } #[test] #[should_panic] fn test_mut_chunks_exact_0() { let mut v = [1, 2, 3, 4]; let _it = v.chunks_exact_mut(0); } #[test] fn test_mut_rchunks() { let mut v = [0, 1, 2, 3, 4, 5, 6]; assert_eq!(v.rchunks_mut(3).len(), 3); for (i, chunk) in v.rchunks_mut(3).enumerate() { for x in chunk { *x = i as u8; } } let result = [2, 1, 1, 1, 0, 0, 0]; assert_eq!(v, result); } #[test] fn test_mut_rchunks_rev() { let mut v = [0, 1, 2, 3, 4, 5, 6]; for (i, chunk) in v.rchunks_mut(3).rev().enumerate() { for x in chunk { *x = i as u8; } } let result = [0, 1, 1, 1, 2, 2, 2]; assert_eq!(v, result); } #[test] #[should_panic] fn test_mut_rchunks_0() { let mut v = [1, 2, 3, 4]; let _it = v.rchunks_mut(0); } #[test] fn test_mut_rchunks_exact() { let mut v = [0, 1, 2, 3, 4, 5, 6]; assert_eq!(v.rchunks_exact_mut(3).len(), 2); for (i, chunk) in v.rchunks_exact_mut(3).enumerate() { for x in chunk { *x = i as u8; } } let result = [0, 1, 1, 1, 0, 0, 0]; assert_eq!(v, result); } #[test] fn test_mut_rchunks_exact_rev() { let mut v = [0, 1, 2, 3, 4, 5, 6]; for (i, chunk) in v.rchunks_exact_mut(3).rev().enumerate() { for x in chunk { *x = i as u8; } } let result = [0, 0, 0, 0, 1, 1, 1]; assert_eq!(v, result); } #[test] #[should_panic] fn test_mut_rchunks_exact_0() { let mut v = [1, 2, 3, 4]; let _it = v.rchunks_exact_mut(0); } #[test] fn test_mut_last() { let mut x = [1, 2, 3, 4, 5]; let h = x.last_mut(); assert_eq!(*h.unwrap(), 5); let y: &mut [i32] = &mut []; assert!(y.last_mut().is_none()); } #[test] fn test_to_vec() { let xs: Box<_> = box [1, 2, 3]; let ys = xs.to_vec(); assert_eq!(ys, [1, 2, 3]); } #[test] fn test_in_place_iterator_specialization() { let src: Box<[usize]> = box [1, 2, 3]; let src_ptr = src.as_ptr(); let sink: Box<_> = src.into_vec().into_iter().map(std::convert::identity).collect(); let sink_ptr = sink.as_ptr(); assert_eq!(src_ptr, sink_ptr); } #[test] fn test_box_slice_clone() { let data = vec![vec![0, 1], vec![0], vec![1]]; let data2 = data.clone().into_boxed_slice().clone().to_vec(); assert_eq!(data, data2); } #[test] #[allow(unused_must_use)] // here, we care about the side effects of `.clone()` #[cfg_attr(target_os = "emscripten", ignore)] fn test_box_slice_clone_panics() { use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; struct Canary { count: Arc, panics: bool, } impl Drop for Canary { fn drop(&mut self) { self.count.fetch_add(1, Ordering::SeqCst); } } impl Clone for Canary { fn clone(&self) -> Self { if self.panics { panic!() } Canary { count: self.count.clone(), panics: self.panics } } } let drop_count = Arc::new(AtomicUsize::new(0)); let canary = Canary { count: drop_count.clone(), panics: false }; let panic = Canary { count: drop_count.clone(), panics: true }; std::panic::catch_unwind(move || { // When xs is dropped, +5. let xs = vec![canary.clone(), canary.clone(), canary.clone(), panic, canary].into_boxed_slice(); // When panic is cloned, +3. xs.clone(); }) .unwrap_err(); // Total = 8 assert_eq!(drop_count.load(Ordering::SeqCst), 8); } #[test] fn test_copy_from_slice() { let src = [0, 1, 2, 3, 4, 5]; let mut dst = [0; 6]; dst.copy_from_slice(&src); assert_eq!(src, dst) } #[test] #[should_panic(expected = "source slice length (4) does not match destination slice length (5)")] fn test_copy_from_slice_dst_longer() { let src = [0, 1, 2, 3]; let mut dst = [0; 5]; dst.copy_from_slice(&src); } #[test] #[should_panic(expected = "source slice length (4) does not match destination slice length (3)")] fn test_copy_from_slice_dst_shorter() { let src = [0, 1, 2, 3]; let mut dst = [0; 3]; dst.copy_from_slice(&src); } const MAX_LEN: usize = 80; static DROP_COUNTS: [AtomicUsize; MAX_LEN] = [ // FIXME(RFC 1109): AtomicUsize is not Copy. AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), ]; static VERSIONS: AtomicUsize = AtomicUsize::new(0); #[derive(Clone, Eq)] struct DropCounter { x: u32, id: usize, version: Cell, } impl PartialEq for DropCounter { fn eq(&self, other: &Self) -> bool { self.partial_cmp(other) == Some(Ordering::Equal) } } impl PartialOrd for DropCounter { fn partial_cmp(&self, other: &Self) -> Option { self.version.set(self.version.get() + 1); other.version.set(other.version.get() + 1); VERSIONS.fetch_add(2, Relaxed); self.x.partial_cmp(&other.x) } } impl Ord for DropCounter { fn cmp(&self, other: &Self) -> Ordering { self.partial_cmp(other).unwrap() } } impl Drop for DropCounter { fn drop(&mut self) { DROP_COUNTS[self.id].fetch_add(1, Relaxed); VERSIONS.fetch_sub(self.version.get(), Relaxed); } } macro_rules! test { ($input:ident, $func:ident) => { let len = $input.len(); // Work out the total number of comparisons required to sort // this array... let mut count = 0usize; $input.to_owned().$func(|a, b| { count += 1; a.cmp(b) }); // ... and then panic on each and every single one. for panic_countdown in 0..count { // Refresh the counters. VERSIONS.store(0, Relaxed); for i in 0..len { DROP_COUNTS[i].store(0, Relaxed); } let v = $input.to_owned(); let _ = std::panic::catch_unwind(move || { let mut v = v; let mut panic_countdown = panic_countdown; v.$func(|a, b| { if panic_countdown == 0 { SILENCE_PANIC.with(|s| s.set(true)); panic!(); } panic_countdown -= 1; a.cmp(b) }) }); // Check that the number of things dropped is exactly // what we expect (i.e., the contents of `v`). for (i, c) in DROP_COUNTS.iter().enumerate().take(len) { let count = c.load(Relaxed); assert!(count == 1, "found drop count == {} for i == {}, len == {}", count, i, len); } // Check that the most recent versions of values were dropped. assert_eq!(VERSIONS.load(Relaxed), 0); } }; } thread_local!(static SILENCE_PANIC: Cell = Cell::new(false)); #[test] #[cfg_attr(target_os = "emscripten", ignore)] // no threads fn panic_safe() { let prev = panic::take_hook(); panic::set_hook(Box::new(move |info| { if !SILENCE_PANIC.with(|s| s.get()) { prev(info); } })); let mut rng = thread_rng(); // Miri is too slow (but still need to `chain` to make the types match) let lens = if cfg!(miri) { (1..10).chain(0..0) } else { (1..20).chain(70..MAX_LEN) }; let moduli: &[u32] = if cfg!(miri) { &[5] } else { &[5, 20, 50] }; for len in lens { for &modulus in moduli { for &has_runs in &[false, true] { let mut input = (0..len) .map(|id| DropCounter { x: rng.next_u32() % modulus, id: id, version: Cell::new(0), }) .collect::>(); if has_runs { for c in &mut input { c.x = c.id as u32; } for _ in 0..5 { let a = rng.gen::() % len; let b = rng.gen::() % len; if a < b { input[a..b].reverse(); } else { input.swap(a, b); } } } test!(input, sort_by); test!(input, sort_unstable_by); } } } // Set default panic hook again. drop(panic::take_hook()); } #[test] fn repeat_generic_slice() { assert_eq!([1, 2].repeat(2), vec![1, 2, 1, 2]); assert_eq!([1, 2, 3, 4].repeat(0), vec![]); assert_eq!([1, 2, 3, 4].repeat(1), vec![1, 2, 3, 4]); assert_eq!([1, 2, 3, 4].repeat(3), vec![1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4]); } #[test] #[allow(unreachable_patterns)] fn subslice_patterns() { // This test comprehensively checks the passing static and dynamic semantics // of subslice patterns `..`, `x @ ..`, `ref x @ ..`, and `ref mut @ ..` // in slice patterns `[$($pat), $(,)?]` . #[derive(PartialEq, Debug, Clone)] struct N(u8); macro_rules! n { ($($e:expr),* $(,)?) => { [$(N($e)),*] } } macro_rules! c { ($inp:expr, $typ:ty, $out:expr $(,)?) => { assert_eq!($out, identity::<$typ>($inp)) }; } macro_rules! m { ($e:expr, $p:pat => $b:expr) => { match $e { $p => $b, _ => panic!(), } }; } // == Slices == // Matching slices using `ref` patterns: let mut v = vec![N(0), N(1), N(2), N(3), N(4)]; let mut vc = (0..=4).collect::>(); let [..] = v[..]; // Always matches. m!(v[..], [N(0), ref sub @ .., N(4)] => c!(sub, &[N], n![1, 2, 3])); m!(v[..], [N(0), ref sub @ ..] => c!(sub, &[N], n![1, 2, 3, 4])); m!(v[..], [ref sub @ .., N(4)] => c!(sub, &[N], n![0, 1, 2, 3])); m!(v[..], [ref sub @ .., _, _, _, _, _] => c!(sub, &[N], &n![] as &[N])); m!(v[..], [_, _, _, _, _, ref sub @ ..] => c!(sub, &[N], &n![] as &[N])); m!(vc[..], [x, .., y] => c!((x, y), (u8, u8), (0, 4))); // Matching slices using `ref mut` patterns: let [..] = v[..]; // Always matches. m!(v[..], [N(0), ref mut sub @ .., N(4)] => c!(sub, &mut [N], n![1, 2, 3])); m!(v[..], [N(0), ref mut sub @ ..] => c!(sub, &mut [N], n![1, 2, 3, 4])); m!(v[..], [ref mut sub @ .., N(4)] => c!(sub, &mut [N], n![0, 1, 2, 3])); m!(v[..], [ref mut sub @ .., _, _, _, _, _] => c!(sub, &mut [N], &mut n![] as &mut [N])); m!(v[..], [_, _, _, _, _, ref mut sub @ ..] => c!(sub, &mut [N], &mut n![] as &mut [N])); m!(vc[..], [x, .., y] => c!((x, y), (u8, u8), (0, 4))); // Matching slices using default binding modes (&): let [..] = &v[..]; // Always matches. m!(&v[..], [N(0), sub @ .., N(4)] => c!(sub, &[N], n![1, 2, 3])); m!(&v[..], [N(0), sub @ ..] => c!(sub, &[N], n![1, 2, 3, 4])); m!(&v[..], [sub @ .., N(4)] => c!(sub, &[N], n![0, 1, 2, 3])); m!(&v[..], [sub @ .., _, _, _, _, _] => c!(sub, &[N], &n![] as &[N])); m!(&v[..], [_, _, _, _, _, sub @ ..] => c!(sub, &[N], &n![] as &[N])); m!(&vc[..], [x, .., y] => c!((x, y), (&u8, &u8), (&0, &4))); // Matching slices using default binding modes (&mut): let [..] = &mut v[..]; // Always matches. m!(&mut v[..], [N(0), sub @ .., N(4)] => c!(sub, &mut [N], n![1, 2, 3])); m!(&mut v[..], [N(0), sub @ ..] => c!(sub, &mut [N], n![1, 2, 3, 4])); m!(&mut v[..], [sub @ .., N(4)] => c!(sub, &mut [N], n![0, 1, 2, 3])); m!(&mut v[..], [sub @ .., _, _, _, _, _] => c!(sub, &mut [N], &mut n![] as &mut [N])); m!(&mut v[..], [_, _, _, _, _, sub @ ..] => c!(sub, &mut [N], &mut n![] as &mut [N])); m!(&mut vc[..], [x, .., y] => c!((x, y), (&mut u8, &mut u8), (&mut 0, &mut 4))); // == Arrays == let mut v = n![0, 1, 2, 3, 4]; let vc = [0, 1, 2, 3, 4]; // Matching arrays by value: m!(v.clone(), [N(0), sub @ .., N(4)] => c!(sub, [N; 3], n![1, 2, 3])); m!(v.clone(), [N(0), sub @ ..] => c!(sub, [N; 4], n![1, 2, 3, 4])); m!(v.clone(), [sub @ .., N(4)] => c!(sub, [N; 4], n![0, 1, 2, 3])); m!(v.clone(), [sub @ .., _, _, _, _, _] => c!(sub, [N; 0], n![] as [N; 0])); m!(v.clone(), [_, _, _, _, _, sub @ ..] => c!(sub, [N; 0], n![] as [N; 0])); m!(v.clone(), [x, .., y] => c!((x, y), (N, N), (N(0), N(4)))); m!(v.clone(), [..] => ()); // Matching arrays by ref patterns: m!(v, [N(0), ref sub @ .., N(4)] => c!(sub, &[N; 3], &n![1, 2, 3])); m!(v, [N(0), ref sub @ ..] => c!(sub, &[N; 4], &n![1, 2, 3, 4])); m!(v, [ref sub @ .., N(4)] => c!(sub, &[N; 4], &n![0, 1, 2, 3])); m!(v, [ref sub @ .., _, _, _, _, _] => c!(sub, &[N; 0], &n![] as &[N; 0])); m!(v, [_, _, _, _, _, ref sub @ ..] => c!(sub, &[N; 0], &n![] as &[N; 0])); m!(vc, [x, .., y] => c!((x, y), (u8, u8), (0, 4))); // Matching arrays by ref mut patterns: m!(v, [N(0), ref mut sub @ .., N(4)] => c!(sub, &mut [N; 3], &mut n![1, 2, 3])); m!(v, [N(0), ref mut sub @ ..] => c!(sub, &mut [N; 4], &mut n![1, 2, 3, 4])); m!(v, [ref mut sub @ .., N(4)] => c!(sub, &mut [N; 4], &mut n![0, 1, 2, 3])); m!(v, [ref mut sub @ .., _, _, _, _, _] => c!(sub, &mut [N; 0], &mut n![] as &mut [N; 0])); m!(v, [_, _, _, _, _, ref mut sub @ ..] => c!(sub, &mut [N; 0], &mut n![] as &mut [N; 0])); // Matching arrays by default binding modes (&): m!(&v, [N(0), sub @ .., N(4)] => c!(sub, &[N; 3], &n![1, 2, 3])); m!(&v, [N(0), sub @ ..] => c!(sub, &[N; 4], &n![1, 2, 3, 4])); m!(&v, [sub @ .., N(4)] => c!(sub, &[N; 4], &n![0, 1, 2, 3])); m!(&v, [sub @ .., _, _, _, _, _] => c!(sub, &[N; 0], &n![] as &[N; 0])); m!(&v, [_, _, _, _, _, sub @ ..] => c!(sub, &[N; 0], &n![] as &[N; 0])); m!(&v, [..] => ()); m!(&v, [x, .., y] => c!((x, y), (&N, &N), (&N(0), &N(4)))); // Matching arrays by default binding modes (&mut): m!(&mut v, [N(0), sub @ .., N(4)] => c!(sub, &mut [N; 3], &mut n![1, 2, 3])); m!(&mut v, [N(0), sub @ ..] => c!(sub, &mut [N; 4], &mut n![1, 2, 3, 4])); m!(&mut v, [sub @ .., N(4)] => c!(sub, &mut [N; 4], &mut n![0, 1, 2, 3])); m!(&mut v, [sub @ .., _, _, _, _, _] => c!(sub, &mut [N; 0], &mut n![] as &[N; 0])); m!(&mut v, [_, _, _, _, _, sub @ ..] => c!(sub, &mut [N; 0], &mut n![] as &[N; 0])); m!(&mut v, [..] => ()); m!(&mut v, [x, .., y] => c!((x, y), (&mut N, &mut N), (&mut N(0), &mut N(4)))); } #[test] fn test_group_by() { let slice = &[1, 1, 1, 3, 3, 2, 2, 2, 1, 0]; let mut iter = slice.group_by(|a, b| a == b); assert_eq!(iter.next(), Some(&[1, 1, 1][..])); assert_eq!(iter.next(), Some(&[3, 3][..])); assert_eq!(iter.next(), Some(&[2, 2, 2][..])); assert_eq!(iter.next(), Some(&[1][..])); assert_eq!(iter.next(), Some(&[0][..])); assert_eq!(iter.next(), None); let mut iter = slice.group_by(|a, b| a == b); assert_eq!(iter.next_back(), Some(&[0][..])); assert_eq!(iter.next_back(), Some(&[1][..])); assert_eq!(iter.next_back(), Some(&[2, 2, 2][..])); assert_eq!(iter.next_back(), Some(&[3, 3][..])); assert_eq!(iter.next_back(), Some(&[1, 1, 1][..])); assert_eq!(iter.next_back(), None); let mut iter = slice.group_by(|a, b| a == b); assert_eq!(iter.next(), Some(&[1, 1, 1][..])); assert_eq!(iter.next_back(), Some(&[0][..])); assert_eq!(iter.next(), Some(&[3, 3][..])); assert_eq!(iter.next_back(), Some(&[1][..])); assert_eq!(iter.next(), Some(&[2, 2, 2][..])); assert_eq!(iter.next_back(), None); } #[test] fn test_group_by_mut() { let slice = &mut [1, 1, 1, 3, 3, 2, 2, 2, 1, 0]; let mut iter = slice.group_by_mut(|a, b| a == b); assert_eq!(iter.next(), Some(&mut [1, 1, 1][..])); assert_eq!(iter.next(), Some(&mut [3, 3][..])); assert_eq!(iter.next(), Some(&mut [2, 2, 2][..])); assert_eq!(iter.next(), Some(&mut [1][..])); assert_eq!(iter.next(), Some(&mut [0][..])); assert_eq!(iter.next(), None); let mut iter = slice.group_by_mut(|a, b| a == b); assert_eq!(iter.next_back(), Some(&mut [0][..])); assert_eq!(iter.next_back(), Some(&mut [1][..])); assert_eq!(iter.next_back(), Some(&mut [2, 2, 2][..])); assert_eq!(iter.next_back(), Some(&mut [3, 3][..])); assert_eq!(iter.next_back(), Some(&mut [1, 1, 1][..])); assert_eq!(iter.next_back(), None); let mut iter = slice.group_by_mut(|a, b| a == b); assert_eq!(iter.next(), Some(&mut [1, 1, 1][..])); assert_eq!(iter.next_back(), Some(&mut [0][..])); assert_eq!(iter.next(), Some(&mut [3, 3][..])); assert_eq!(iter.next_back(), Some(&mut [1][..])); assert_eq!(iter.next(), Some(&mut [2, 2, 2][..])); assert_eq!(iter.next_back(), None); } use std::collections::TryReserveError::*; use std::collections::{vec_deque::Drain, VecDeque}; use std::fmt::Debug; use std::mem::size_of; use std::ops::Bound::*; use std::panic::{catch_unwind, AssertUnwindSafe}; use crate::hash; use Taggy::*; use Taggypar::*; #[test] fn test_simple() { let mut d = VecDeque::new(); assert_eq!(d.len(), 0); d.push_front(17); d.push_front(42); d.push_back(137); assert_eq!(d.len(), 3); d.push_back(137); assert_eq!(d.len(), 4); assert_eq!(*d.front().unwrap(), 42); assert_eq!(*d.back().unwrap(), 137); let mut i = d.pop_front(); assert_eq!(i, Some(42)); i = d.pop_back(); assert_eq!(i, Some(137)); i = d.pop_back(); assert_eq!(i, Some(137)); i = d.pop_back(); assert_eq!(i, Some(17)); assert_eq!(d.len(), 0); d.push_back(3); assert_eq!(d.len(), 1); d.push_front(2); assert_eq!(d.len(), 2); d.push_back(4); assert_eq!(d.len(), 3); d.push_front(1); assert_eq!(d.len(), 4); assert_eq!(d[0], 1); assert_eq!(d[1], 2); assert_eq!(d[2], 3); assert_eq!(d[3], 4); } fn test_parameterized(a: T, b: T, c: T, d: T) { let mut deq = VecDeque::new(); assert_eq!(deq.len(), 0); deq.push_front(a.clone()); deq.push_front(b.clone()); deq.push_back(c.clone()); assert_eq!(deq.len(), 3); deq.push_back(d.clone()); assert_eq!(deq.len(), 4); assert_eq!((*deq.front().unwrap()).clone(), b.clone()); assert_eq!((*deq.back().unwrap()).clone(), d.clone()); assert_eq!(deq.pop_front().unwrap(), b.clone()); assert_eq!(deq.pop_back().unwrap(), d.clone()); assert_eq!(deq.pop_back().unwrap(), c.clone()); assert_eq!(deq.pop_back().unwrap(), a.clone()); assert_eq!(deq.len(), 0); deq.push_back(c.clone()); assert_eq!(deq.len(), 1); deq.push_front(b.clone()); assert_eq!(deq.len(), 2); deq.push_back(d.clone()); assert_eq!(deq.len(), 3); deq.push_front(a.clone()); assert_eq!(deq.len(), 4); assert_eq!(deq[0].clone(), a.clone()); assert_eq!(deq[1].clone(), b.clone()); assert_eq!(deq[2].clone(), c.clone()); assert_eq!(deq[3].clone(), d.clone()); } #[test] fn test_push_front_grow() { let mut deq = VecDeque::new(); for i in 0..66 { deq.push_front(i); } assert_eq!(deq.len(), 66); for i in 0..66 { assert_eq!(deq[i], 65 - i); } let mut deq = VecDeque::new(); for i in 0..66 { deq.push_back(i); } for i in 0..66 { assert_eq!(deq[i], i); } } #[test] fn test_index() { let mut deq = VecDeque::new(); for i in 1..4 { deq.push_front(i); } assert_eq!(deq[1], 2); } #[test] #[should_panic] fn test_index_out_of_bounds() { let mut deq = VecDeque::new(); for i in 1..4 { deq.push_front(i); } deq[3]; } #[test] #[should_panic] fn test_range_start_overflow() { let deq = VecDeque::from(vec![1, 2, 3]); deq.range((Included(0), Included(usize::MAX))); } #[test] #[should_panic] fn test_range_end_overflow() { let deq = VecDeque::from(vec![1, 2, 3]); deq.range((Excluded(usize::MAX), Included(0))); } #[derive(Clone, PartialEq, Debug)] enum Taggy { One(i32), Two(i32, i32), Three(i32, i32, i32), } #[derive(Clone, PartialEq, Debug)] enum Taggypar { Onepar(T), Twopar(T, T), Threepar(T, T, T), } #[derive(Clone, PartialEq, Debug)] struct RecCy { x: i32, y: i32, t: Taggy, } #[test] fn test_param_int() { test_parameterized::(5, 72, 64, 175); } #[test] fn test_param_taggy() { test_parameterized::(One(1), Two(1, 2), Three(1, 2, 3), Two(17, 42)); } #[test] fn test_param_taggypar() { test_parameterized::>( Onepar::(1), Twopar::(1, 2), Threepar::(1, 2, 3), Twopar::(17, 42), ); } #[test] fn test_param_reccy() { let reccy1 = RecCy { x: 1, y: 2, t: One(1) }; let reccy2 = RecCy { x: 345, y: 2, t: Two(1, 2) }; let reccy3 = RecCy { x: 1, y: 777, t: Three(1, 2, 3) }; let reccy4 = RecCy { x: 19, y: 252, t: Two(17, 42) }; test_parameterized::(reccy1, reccy2, reccy3, reccy4); } #[test] fn test_with_capacity() { let mut d = VecDeque::with_capacity(0); d.push_back(1); assert_eq!(d.len(), 1); let mut d = VecDeque::with_capacity(50); d.push_back(1); assert_eq!(d.len(), 1); } #[test] fn test_with_capacity_non_power_two() { let mut d3 = VecDeque::with_capacity(3); d3.push_back(1); // X = None, | = lo // [|1, X, X] assert_eq!(d3.pop_front(), Some(1)); // [X, |X, X] assert_eq!(d3.front(), None); // [X, |3, X] d3.push_back(3); // [X, |3, 6] d3.push_back(6); // [X, X, |6] assert_eq!(d3.pop_front(), Some(3)); // Pushing the lo past half way point to trigger // the 'B' scenario for growth // [9, X, |6] d3.push_back(9); // [9, 12, |6] d3.push_back(12); d3.push_back(15); // There used to be a bug here about how the // VecDeque made growth assumptions about the // underlying Vec which didn't hold and lead // to corruption. // (Vec grows to next power of two) // good- [9, 12, 15, X, X, X, X, |6] // bug- [15, 12, X, X, X, |6, X, X] assert_eq!(d3.pop_front(), Some(6)); // Which leads us to the following state which // would be a failure case. // bug- [15, 12, X, X, X, X, |X, X] assert_eq!(d3.front(), Some(&9)); } #[test] fn test_reserve_exact() { let mut d = VecDeque::new(); d.push_back(0); d.reserve_exact(50); assert!(d.capacity() >= 51); } #[test] fn test_reserve() { let mut d = VecDeque::new(); d.push_back(0); d.reserve(50); assert!(d.capacity() >= 51); } #[test] fn test_swap() { let mut d: VecDeque<_> = (0..5).collect(); d.pop_front(); d.swap(0, 3); assert_eq!(d.iter().cloned().collect::>(), [4, 2, 3, 1]); } #[test] fn test_iter() { let mut d = VecDeque::new(); assert_eq!(d.iter().next(), None); assert_eq!(d.iter().size_hint(), (0, Some(0))); for i in 0..5 { d.push_back(i); } { let b: &[_] = &[&0, &1, &2, &3, &4]; assert_eq!(d.iter().collect::>(), b); } for i in 6..9 { d.push_front(i); } { let b: &[_] = &[&8, &7, &6, &0, &1, &2, &3, &4]; assert_eq!(d.iter().collect::>(), b); } let mut it = d.iter(); let mut len = d.len(); loop { match it.next() { None => break, _ => { len -= 1; assert_eq!(it.size_hint(), (len, Some(len))) } } } } #[test] fn test_rev_iter() { let mut d = VecDeque::new(); assert_eq!(d.iter().rev().next(), None); for i in 0..5 { d.push_back(i); } { let b: &[_] = &[&4, &3, &2, &1, &0]; assert_eq!(d.iter().rev().collect::>(), b); } for i in 6..9 { d.push_front(i); } let b: &[_] = &[&4, &3, &2, &1, &0, &6, &7, &8]; assert_eq!(d.iter().rev().collect::>(), b); } #[test] fn test_mut_rev_iter_wrap() { let mut d = VecDeque::with_capacity(3); assert!(d.iter_mut().rev().next().is_none()); d.push_back(1); d.push_back(2); d.push_back(3); assert_eq!(d.pop_front(), Some(1)); d.push_back(4); assert_eq!(d.iter_mut().rev().map(|x| *x).collect::>(), vec![4, 3, 2]); } #[test] fn test_mut_iter() { let mut d = VecDeque::new(); assert!(d.iter_mut().next().is_none()); for i in 0..3 { d.push_front(i); } for (i, elt) in d.iter_mut().enumerate() { assert_eq!(*elt, 2 - i); *elt = i; } { let mut it = d.iter_mut(); assert_eq!(*it.next().unwrap(), 0); assert_eq!(*it.next().unwrap(), 1); assert_eq!(*it.next().unwrap(), 2); assert!(it.next().is_none()); } } #[test] fn test_mut_rev_iter() { let mut d = VecDeque::new(); assert!(d.iter_mut().rev().next().is_none()); for i in 0..3 { d.push_front(i); } for (i, elt) in d.iter_mut().rev().enumerate() { assert_eq!(*elt, i); *elt = i; } { let mut it = d.iter_mut().rev(); assert_eq!(*it.next().unwrap(), 0); assert_eq!(*it.next().unwrap(), 1); assert_eq!(*it.next().unwrap(), 2); assert!(it.next().is_none()); } } #[test] fn test_into_iter() { // Empty iter { let d: VecDeque = VecDeque::new(); let mut iter = d.into_iter(); assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); } // simple iter { let mut d = VecDeque::new(); for i in 0..5 { d.push_back(i); } let b = vec![0, 1, 2, 3, 4]; assert_eq!(d.into_iter().collect::>(), b); } // wrapped iter { let mut d = VecDeque::new(); for i in 0..5 { d.push_back(i); } for i in 6..9 { d.push_front(i); } let b = vec![8, 7, 6, 0, 1, 2, 3, 4]; assert_eq!(d.into_iter().collect::>(), b); } // partially used { let mut d = VecDeque::new(); for i in 0..5 { d.push_back(i); } for i in 6..9 { d.push_front(i); } let mut it = d.into_iter(); assert_eq!(it.size_hint(), (8, Some(8))); assert_eq!(it.next(), Some(8)); assert_eq!(it.size_hint(), (7, Some(7))); assert_eq!(it.next_back(), Some(4)); assert_eq!(it.size_hint(), (6, Some(6))); assert_eq!(it.next(), Some(7)); assert_eq!(it.size_hint(), (5, Some(5))); } } #[test] fn test_drain() { // Empty iter { let mut d: VecDeque = VecDeque::new(); { let mut iter = d.drain(..); assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (0, Some(0))); } assert!(d.is_empty()); } // simple iter { let mut d = VecDeque::new(); for i in 0..5 { d.push_back(i); } assert_eq!(d.drain(..).collect::>(), [0, 1, 2, 3, 4]); assert!(d.is_empty()); } // wrapped iter { let mut d = VecDeque::new(); for i in 0..5 { d.push_back(i); } for i in 6..9 { d.push_front(i); } assert_eq!(d.drain(..).collect::>(), [8, 7, 6, 0, 1, 2, 3, 4]); assert!(d.is_empty()); } // partially used { let mut d: VecDeque<_> = VecDeque::new(); for i in 0..5 { d.push_back(i); } for i in 6..9 { d.push_front(i); } { let mut it = d.drain(..); assert_eq!(it.size_hint(), (8, Some(8))); assert_eq!(it.next(), Some(8)); assert_eq!(it.size_hint(), (7, Some(7))); assert_eq!(it.next_back(), Some(4)); assert_eq!(it.size_hint(), (6, Some(6))); assert_eq!(it.next(), Some(7)); assert_eq!(it.size_hint(), (5, Some(5))); } assert!(d.is_empty()); } } #[test] fn test_from_iter() { let v = vec![1, 2, 3, 4, 5, 6, 7]; let deq: VecDeque<_> = v.iter().cloned().collect(); let u: Vec<_> = deq.iter().cloned().collect(); assert_eq!(u, v); let seq = (0..).step_by(2).take(256); let deq: VecDeque<_> = seq.collect(); for (i, &x) in deq.iter().enumerate() { assert_eq!(2 * i, x); } assert_eq!(deq.len(), 256); } #[test] fn test_clone() { let mut d = VecDeque::new(); d.push_front(17); d.push_front(42); d.push_back(137); d.push_back(137); assert_eq!(d.len(), 4); let mut e = d.clone(); assert_eq!(e.len(), 4); while !d.is_empty() { assert_eq!(d.pop_back(), e.pop_back()); } assert_eq!(d.len(), 0); assert_eq!(e.len(), 0); } #[test] fn test_eq() { let mut d = VecDeque::new(); assert!(d == VecDeque::with_capacity(0)); d.push_front(137); d.push_front(17); d.push_front(42); d.push_back(137); let mut e = VecDeque::with_capacity(0); e.push_back(42); e.push_back(17); e.push_back(137); e.push_back(137); assert!(&e == &d); e.pop_back(); e.push_back(0); assert!(e != d); e.clear(); assert!(e == VecDeque::new()); } #[test] fn test_partial_eq_array() { let d = VecDeque::::new(); assert!(d == []); let mut d = VecDeque::new(); d.push_front('a'); assert!(d == ['a']); let mut d = VecDeque::new(); d.push_back('a'); assert!(d == ['a']); let mut d = VecDeque::new(); d.push_back('a'); d.push_back('b'); assert!(d == ['a', 'b']); } #[test] fn test_hash() { let mut x = VecDeque::new(); let mut y = VecDeque::new(); x.push_back(1); x.push_back(2); x.push_back(3); y.push_back(0); y.push_back(1); y.pop_front(); y.push_back(2); y.push_back(3); assert!(hash(&x) == hash(&y)); } #[test] fn test_hash_after_rotation() { // test that two deques hash equal even if elements are laid out differently let len = 28; let mut ring: VecDeque = (0..len as i32).collect(); let orig = ring.clone(); for _ in 0..ring.capacity() { // shift values 1 step to the right by pop, sub one, push ring.pop_front(); for elt in &mut ring { *elt -= 1; } ring.push_back(len - 1); assert_eq!(hash(&orig), hash(&ring)); assert_eq!(orig, ring); assert_eq!(ring, orig); } } #[test] fn test_eq_after_rotation() { // test that two deques are equal even if elements are laid out differently let len = 28; let mut ring: VecDeque = (0..len as i32).collect(); let mut shifted = ring.clone(); for _ in 0..10 { // shift values 1 step to the right by pop, sub one, push ring.pop_front(); for elt in &mut ring { *elt -= 1; } ring.push_back(len - 1); } // try every shift for _ in 0..shifted.capacity() { shifted.pop_front(); for elt in &mut shifted { *elt -= 1; } shifted.push_back(len - 1); assert_eq!(shifted, ring); assert_eq!(ring, shifted); } } #[test] fn test_ord() { let x = VecDeque::new(); let mut y = VecDeque::new(); y.push_back(1); y.push_back(2); y.push_back(3); assert!(x < y); assert!(y > x); assert!(x <= x); assert!(x >= x); } #[test] fn test_show() { let ringbuf: VecDeque<_> = (0..10).collect(); assert_eq!(format!("{:?}", ringbuf), "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"); let ringbuf: VecDeque<_> = vec!["just", "one", "test", "more"].iter().cloned().collect(); assert_eq!(format!("{:?}", ringbuf), "[\"just\", \"one\", \"test\", \"more\"]"); } #[test] fn test_drop() { static mut DROPS: i32 = 0; struct Elem; impl Drop for Elem { fn drop(&mut self) { unsafe { DROPS += 1; } } } let mut ring = VecDeque::new(); ring.push_back(Elem); ring.push_front(Elem); ring.push_back(Elem); ring.push_front(Elem); drop(ring); assert_eq!(unsafe { DROPS }, 4); } #[test] fn test_drop_with_pop() { static mut DROPS: i32 = 0; struct Elem; impl Drop for Elem { fn drop(&mut self) { unsafe { DROPS += 1; } } } let mut ring = VecDeque::new(); ring.push_back(Elem); ring.push_front(Elem); ring.push_back(Elem); ring.push_front(Elem); drop(ring.pop_back()); drop(ring.pop_front()); assert_eq!(unsafe { DROPS }, 2); drop(ring); assert_eq!(unsafe { DROPS }, 4); } #[test] fn test_drop_clear() { static mut DROPS: i32 = 0; struct Elem; impl Drop for Elem { fn drop(&mut self) { unsafe { DROPS += 1; } } } let mut ring = VecDeque::new(); ring.push_back(Elem); ring.push_front(Elem); ring.push_back(Elem); ring.push_front(Elem); ring.clear(); assert_eq!(unsafe { DROPS }, 4); drop(ring); assert_eq!(unsafe { DROPS }, 4); } #[test] fn test_drop_panic() { static mut DROPS: i32 = 0; struct D(bool); impl Drop for D { fn drop(&mut self) { unsafe { DROPS += 1; } if self.0 { panic!("panic in `drop`"); } } } let mut q = VecDeque::new(); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_front(D(false)); q.push_front(D(false)); q.push_front(D(true)); catch_unwind(move || drop(q)).ok(); assert_eq!(unsafe { DROPS }, 8); } #[test] fn test_reserve_grow() { // test growth path A // [T o o H] -> [T o o H . . . . ] let mut ring = VecDeque::with_capacity(4); for i in 0..3 { ring.push_back(i); } ring.reserve(7); for i in 0..3 { assert_eq!(ring.pop_front(), Some(i)); } // test growth path B // [H T o o] -> [. T o o H . . . ] let mut ring = VecDeque::with_capacity(4); for i in 0..1 { ring.push_back(i); assert_eq!(ring.pop_front(), Some(i)); } for i in 0..3 { ring.push_back(i); } ring.reserve(7); for i in 0..3 { assert_eq!(ring.pop_front(), Some(i)); } // test growth path C // [o o H T] -> [o o H . . . . T ] let mut ring = VecDeque::with_capacity(4); for i in 0..3 { ring.push_back(i); assert_eq!(ring.pop_front(), Some(i)); } for i in 0..3 { ring.push_back(i); } ring.reserve(7); for i in 0..3 { assert_eq!(ring.pop_front(), Some(i)); } } #[test] fn test_get() { let mut ring = VecDeque::new(); ring.push_back(0); assert_eq!(ring.get(0), Some(&0)); assert_eq!(ring.get(1), None); ring.push_back(1); assert_eq!(ring.get(0), Some(&0)); assert_eq!(ring.get(1), Some(&1)); assert_eq!(ring.get(2), None); ring.push_back(2); assert_eq!(ring.get(0), Some(&0)); assert_eq!(ring.get(1), Some(&1)); assert_eq!(ring.get(2), Some(&2)); assert_eq!(ring.get(3), None); assert_eq!(ring.pop_front(), Some(0)); assert_eq!(ring.get(0), Some(&1)); assert_eq!(ring.get(1), Some(&2)); assert_eq!(ring.get(2), None); assert_eq!(ring.pop_front(), Some(1)); assert_eq!(ring.get(0), Some(&2)); assert_eq!(ring.get(1), None); assert_eq!(ring.pop_front(), Some(2)); assert_eq!(ring.get(0), None); assert_eq!(ring.get(1), None); } #[test] fn test_get_mut() { let mut ring = VecDeque::new(); for i in 0..3 { ring.push_back(i); } match ring.get_mut(1) { Some(x) => *x = -1, None => (), }; assert_eq!(ring.get_mut(0), Some(&mut 0)); assert_eq!(ring.get_mut(1), Some(&mut -1)); assert_eq!(ring.get_mut(2), Some(&mut 2)); assert_eq!(ring.get_mut(3), None); assert_eq!(ring.pop_front(), Some(0)); assert_eq!(ring.get_mut(0), Some(&mut -1)); assert_eq!(ring.get_mut(1), Some(&mut 2)); assert_eq!(ring.get_mut(2), None); } #[test] fn test_front() { let mut ring = VecDeque::new(); ring.push_back(10); ring.push_back(20); assert_eq!(ring.front(), Some(&10)); ring.pop_front(); assert_eq!(ring.front(), Some(&20)); ring.pop_front(); assert_eq!(ring.front(), None); } #[test] fn test_as_slices() { let mut ring: VecDeque = VecDeque::with_capacity(127); let cap = ring.capacity() as i32; let first = cap / 2; let last = cap - first; for i in 0..first { ring.push_back(i); let (left, right) = ring.as_slices(); let expected: Vec<_> = (0..=i).collect(); assert_eq!(left, &expected[..]); assert_eq!(right, []); } for j in -last..0 { ring.push_front(j); let (left, right) = ring.as_slices(); let expected_left: Vec<_> = (-last..=j).rev().collect(); let expected_right: Vec<_> = (0..first).collect(); assert_eq!(left, &expected_left[..]); assert_eq!(right, &expected_right[..]); } assert_eq!(ring.len() as i32, cap); assert_eq!(ring.capacity() as i32, cap); } #[test] fn test_as_mut_slices() { let mut ring: VecDeque = VecDeque::with_capacity(127); let cap = ring.capacity() as i32; let first = cap / 2; let last = cap - first; for i in 0..first { ring.push_back(i); let (left, right) = ring.as_mut_slices(); let expected: Vec<_> = (0..=i).collect(); assert_eq!(left, &expected[..]); assert_eq!(right, []); } for j in -last..0 { ring.push_front(j); let (left, right) = ring.as_mut_slices(); let expected_left: Vec<_> = (-last..=j).rev().collect(); let expected_right: Vec<_> = (0..first).collect(); assert_eq!(left, &expected_left[..]); assert_eq!(right, &expected_right[..]); } assert_eq!(ring.len() as i32, cap); assert_eq!(ring.capacity() as i32, cap); } #[test] fn test_append() { let mut a: VecDeque<_> = vec![1, 2, 3].into_iter().collect(); let mut b: VecDeque<_> = vec![4, 5, 6].into_iter().collect(); // normal append a.append(&mut b); assert_eq!(a.iter().cloned().collect::>(), [1, 2, 3, 4, 5, 6]); assert_eq!(b.iter().cloned().collect::>(), []); // append nothing to something a.append(&mut b); assert_eq!(a.iter().cloned().collect::>(), [1, 2, 3, 4, 5, 6]); assert_eq!(b.iter().cloned().collect::>(), []); // append something to nothing b.append(&mut a); assert_eq!(b.iter().cloned().collect::>(), [1, 2, 3, 4, 5, 6]); assert_eq!(a.iter().cloned().collect::>(), []); } #[test] fn test_append_permutations() { fn construct_vec_deque( push_back: usize, pop_back: usize, push_front: usize, pop_front: usize, ) -> VecDeque { let mut out = VecDeque::new(); for a in 0..push_back { out.push_back(a); } for b in 0..push_front { out.push_front(push_back + b); } for _ in 0..pop_back { out.pop_back(); } for _ in 0..pop_front { out.pop_front(); } out } // Miri is too slow let max = if cfg!(miri) { 3 } else { 5 }; // Many different permutations of both the `VecDeque` getting appended to // and the one getting appended are generated to check `append`. // This ensures all 6 code paths of `append` are tested. for src_push_back in 0..max { for src_push_front in 0..max { // doesn't pop more values than are pushed for src_pop_back in 0..(src_push_back + src_push_front) { for src_pop_front in 0..(src_push_back + src_push_front - src_pop_back) { let src = construct_vec_deque( src_push_back, src_pop_back, src_push_front, src_pop_front, ); for dst_push_back in 0..max { for dst_push_front in 0..max { for dst_pop_back in 0..(dst_push_back + dst_push_front) { for dst_pop_front in 0..(dst_push_back + dst_push_front - dst_pop_back) { let mut dst = construct_vec_deque( dst_push_back, dst_pop_back, dst_push_front, dst_pop_front, ); let mut src = src.clone(); // Assert that appending `src` to `dst` gives the same order // of values as iterating over both in sequence. let correct = dst .iter() .chain(src.iter()) .cloned() .collect::>(); dst.append(&mut src); assert_eq!(dst, correct); assert!(src.is_empty()); } } } } } } } } } struct DropCounter<'a> { count: &'a mut u32, } impl Drop for DropCounter<'_> { fn drop(&mut self) { *self.count += 1; } } #[test] fn test_append_double_drop() { let (mut count_a, mut count_b) = (0, 0); { let mut a = VecDeque::new(); let mut b = VecDeque::new(); a.push_back(DropCounter { count: &mut count_a }); b.push_back(DropCounter { count: &mut count_b }); a.append(&mut b); } assert_eq!(count_a, 1); assert_eq!(count_b, 1); } #[test] fn test_retain() { let mut buf = VecDeque::new(); buf.extend(1..5); buf.retain(|&x| x % 2 == 0); let v: Vec<_> = buf.into_iter().collect(); assert_eq!(&v[..], &[2, 4]); } #[test] fn test_extend_ref() { let mut v = VecDeque::new(); v.push_back(1); v.extend(&[2, 3, 4]); assert_eq!(v.len(), 4); assert_eq!(v[0], 1); assert_eq!(v[1], 2); assert_eq!(v[2], 3); assert_eq!(v[3], 4); let mut w = VecDeque::new(); w.push_back(5); w.push_back(6); v.extend(&w); assert_eq!(v.len(), 6); assert_eq!(v[0], 1); assert_eq!(v[1], 2); assert_eq!(v[2], 3); assert_eq!(v[3], 4); assert_eq!(v[4], 5); assert_eq!(v[5], 6); } #[test] fn test_contains() { let mut v = VecDeque::new(); v.extend(&[2, 3, 4]); assert!(v.contains(&3)); assert!(!v.contains(&1)); v.clear(); assert!(!v.contains(&3)); } #[allow(dead_code)] fn assert_covariance() { fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { d } } #[test] fn test_is_empty() { let mut v = VecDeque::::new(); assert!(v.is_empty()); assert!(v.iter().is_empty()); assert!(v.iter_mut().is_empty()); v.extend(&[2, 3, 4]); assert!(!v.is_empty()); assert!(!v.iter().is_empty()); assert!(!v.iter_mut().is_empty()); while let Some(_) = v.pop_front() { assert_eq!(v.is_empty(), v.len() == 0); assert_eq!(v.iter().is_empty(), v.iter().len() == 0); assert_eq!(v.iter_mut().is_empty(), v.iter_mut().len() == 0); } assert!(v.is_empty()); assert!(v.iter().is_empty()); assert!(v.iter_mut().is_empty()); assert!(v.into_iter().is_empty()); } #[test] fn test_reserve_exact_2() { // This is all the same as test_reserve let mut v = VecDeque::new(); v.reserve_exact(2); assert!(v.capacity() >= 2); for i in 0..16 { v.push_back(i); } assert!(v.capacity() >= 16); v.reserve_exact(16); assert!(v.capacity() >= 32); v.push_back(16); v.reserve_exact(16); assert!(v.capacity() >= 48) } #[test] #[cfg_attr(miri, ignore)] // Miri does not support signalling OOM #[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc fn test_try_reserve() { // These are the interesting cases: // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM) // * > isize::MAX should always fail // * On 16/32-bit should CapacityOverflow // * On 64-bit should OOM // * overflow may trigger when adding `len` to `cap` (in number of elements) // * overflow may trigger when multiplying `new_cap` by size_of:: (to get bytes) const MAX_CAP: usize = (isize::MAX as usize + 1) / 2 - 1; const MAX_USIZE: usize = usize::MAX; // On 16/32-bit, we check that allocations don't exceed isize::MAX, // on 64-bit, we assume the OS will give an OOM for such a ridiculous size. // Any platform that succeeds for these requests is technically broken with // ptr::offset because LLVM is the worst. let guards_against_isize = size_of::() < 8; { // Note: basic stuff is checked by test_reserve let mut empty_bytes: VecDeque = VecDeque::new(); // Check isize::MAX doesn't count as an overflow if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } // Play it again, frank! (just to be sure) if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { // Check isize::MAX + 1 does count as overflow if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP + 1) { } else { panic!("isize::MAX + 1 should trigger an overflow!") } // Check usize::MAX does count as overflow if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } else { // Check isize::MAX is an OOM // VecDeque starts with capacity 7, always adds 1 to the capacity // and also rounds the number to next power of 2 so this is the // furthest we can go without triggering CapacityOverflow if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_CAP) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } } { // Same basic idea, but with non-zero len let mut ten_bytes: VecDeque = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an overflow!"); } } else { if let Err(AllocError { .. }) = ten_bytes.try_reserve(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } // Should always overflow in the add-to-len if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } { // Same basic idea, but with interesting type size let mut ten_u32s: VecDeque = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) { } else { panic!("isize::MAX + 1 should trigger an overflow!"); } } else { if let Err(AllocError { .. }) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } // Should fail in the mul-by-size if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_USIZE - 20) { } else { panic!("usize::MAX should trigger an overflow!"); } } } #[test] #[cfg_attr(miri, ignore)] // Miri does not support signalling OOM #[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc fn test_try_reserve_exact() { // This is exactly the same as test_try_reserve with the method changed. // See that test for comments. const MAX_CAP: usize = (isize::MAX as usize + 1) / 2 - 1; const MAX_USIZE: usize = usize::MAX; let guards_against_isize = size_of::() < 8; { let mut empty_bytes: VecDeque = VecDeque::new(); if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP + 1) { } else { panic!("isize::MAX + 1 should trigger an overflow!") } if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } else { // Check isize::MAX is an OOM // VecDeque starts with capacity 7, always adds 1 to the capacity // and also rounds the number to next power of 2 so this is the // furthest we can go without triggering CapacityOverflow if let Err(AllocError { .. }) = empty_bytes.try_reserve_exact(MAX_CAP) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } } { let mut ten_bytes: VecDeque = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an overflow!"); } } else { if let Err(AllocError { .. }) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!") } } { let mut ten_u32s: VecDeque = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) { } else { panic!("isize::MAX + 1 should trigger an overflow!"); } } else { if let Err(AllocError { .. }) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_USIZE - 20) { } else { panic!("usize::MAX should trigger an overflow!") } } } #[test] fn test_rotate_nop() { let mut v: VecDeque<_> = (0..10).collect(); assert_unchanged(&v); v.rotate_left(0); assert_unchanged(&v); v.rotate_left(10); assert_unchanged(&v); v.rotate_right(0); assert_unchanged(&v); v.rotate_right(10); assert_unchanged(&v); v.rotate_left(3); v.rotate_right(3); assert_unchanged(&v); v.rotate_right(3); v.rotate_left(3); assert_unchanged(&v); v.rotate_left(6); v.rotate_right(6); assert_unchanged(&v); v.rotate_right(6); v.rotate_left(6); assert_unchanged(&v); v.rotate_left(3); v.rotate_left(7); assert_unchanged(&v); v.rotate_right(4); v.rotate_right(6); assert_unchanged(&v); v.rotate_left(1); v.rotate_left(2); v.rotate_left(3); v.rotate_left(4); assert_unchanged(&v); v.rotate_right(1); v.rotate_right(2); v.rotate_right(3); v.rotate_right(4); assert_unchanged(&v); fn assert_unchanged(v: &VecDeque) { assert_eq!(v, &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); } } #[test] fn test_rotate_left_parts() { let mut v: VecDeque<_> = (1..=7).collect(); v.rotate_left(2); assert_eq!(v.as_slices(), (&[3, 4, 5, 6, 7, 1][..], &[2][..])); v.rotate_left(2); assert_eq!(v.as_slices(), (&[5, 6, 7, 1][..], &[2, 3, 4][..])); v.rotate_left(2); assert_eq!(v.as_slices(), (&[7, 1][..], &[2, 3, 4, 5, 6][..])); v.rotate_left(2); assert_eq!(v.as_slices(), (&[2, 3, 4, 5, 6, 7, 1][..], &[][..])); v.rotate_left(2); assert_eq!(v.as_slices(), (&[4, 5, 6, 7, 1, 2][..], &[3][..])); v.rotate_left(2); assert_eq!(v.as_slices(), (&[6, 7, 1, 2][..], &[3, 4, 5][..])); v.rotate_left(2); assert_eq!(v.as_slices(), (&[1, 2][..], &[3, 4, 5, 6, 7][..])); } #[test] fn test_rotate_right_parts() { let mut v: VecDeque<_> = (1..=7).collect(); v.rotate_right(2); assert_eq!(v.as_slices(), (&[6, 7][..], &[1, 2, 3, 4, 5][..])); v.rotate_right(2); assert_eq!(v.as_slices(), (&[4, 5, 6, 7][..], &[1, 2, 3][..])); v.rotate_right(2); assert_eq!(v.as_slices(), (&[2, 3, 4, 5, 6, 7][..], &[1][..])); v.rotate_right(2); assert_eq!(v.as_slices(), (&[7, 1, 2, 3, 4, 5, 6][..], &[][..])); v.rotate_right(2); assert_eq!(v.as_slices(), (&[5, 6][..], &[7, 1, 2, 3, 4][..])); v.rotate_right(2); assert_eq!(v.as_slices(), (&[3, 4, 5, 6][..], &[7, 1, 2][..])); v.rotate_right(2); assert_eq!(v.as_slices(), (&[1, 2, 3, 4, 5, 6][..], &[7][..])); } #[test] fn test_rotate_left_random() { let shifts = [ 6, 1, 0, 11, 12, 1, 11, 7, 9, 3, 6, 1, 4, 0, 5, 1, 3, 1, 12, 8, 3, 1, 11, 11, 9, 4, 12, 3, 12, 9, 11, 1, 7, 9, 7, 2, ]; let n = 12; let mut v: VecDeque<_> = (0..n).collect(); let mut total_shift = 0; for shift in shifts.iter().cloned() { v.rotate_left(shift); total_shift += shift; for i in 0..n { assert_eq!(v[i], (i + total_shift) % n); } } } #[test] fn test_rotate_right_random() { let shifts = [ 6, 1, 0, 11, 12, 1, 11, 7, 9, 3, 6, 1, 4, 0, 5, 1, 3, 1, 12, 8, 3, 1, 11, 11, 9, 4, 12, 3, 12, 9, 11, 1, 7, 9, 7, 2, ]; let n = 12; let mut v: VecDeque<_> = (0..n).collect(); let mut total_shift = 0; for shift in shifts.iter().cloned() { v.rotate_right(shift); total_shift += shift; for i in 0..n { assert_eq!(v[(i + total_shift) % n], i); } } } #[test] fn test_try_fold_empty() { assert_eq!(Some(0), VecDeque::::new().iter().try_fold(0, |_, _| None)); } #[test] fn test_try_fold_none() { let v: VecDeque = (0..12).collect(); assert_eq!(None, v.into_iter().try_fold(0, |a, b| if b < 11 { Some(a + b) } else { None })); } #[test] fn test_try_fold_ok() { let v: VecDeque = (0..12).collect(); assert_eq!(Ok::<_, ()>(66), v.into_iter().try_fold(0, |a, b| Ok(a + b))); } #[test] fn test_try_fold_unit() { let v: VecDeque<()> = std::iter::repeat(()).take(42).collect(); assert_eq!(Some(()), v.into_iter().try_fold((), |(), ()| Some(()))); } #[test] fn test_try_fold_unit_none() { let v: std::collections::VecDeque<()> = [(); 10].iter().cloned().collect(); let mut iter = v.into_iter(); assert!(iter.try_fold((), |_, _| None).is_none()); assert_eq!(iter.len(), 9); } #[test] fn test_try_fold_rotated() { let mut v: VecDeque<_> = (0..12).collect(); for n in 0..10 { if n & 1 == 0 { v.rotate_left(n); } else { v.rotate_right(n); } assert_eq!(Ok::<_, ()>(66), v.iter().try_fold(0, |a, b| Ok(a + b))); } } #[test] fn test_try_fold_moves_iter() { let v: VecDeque<_> = [10, 20, 30, 40, 100, 60, 70, 80, 90].iter().collect(); let mut iter = v.into_iter(); assert_eq!(iter.try_fold(0_i8, |acc, &x| acc.checked_add(x)), None); assert_eq!(iter.next(), Some(&60)); } #[test] fn test_try_fold_exhaust_wrap() { let mut v = VecDeque::with_capacity(7); v.push_back(1); v.push_back(1); v.push_back(1); v.pop_front(); v.pop_front(); let mut iter = v.iter(); let _ = iter.try_fold(0, |_, _| Some(1)); assert!(iter.is_empty()); } #[test] fn test_try_fold_wraparound() { let mut v = VecDeque::with_capacity(8); v.push_back(7); v.push_back(8); v.push_back(9); v.push_front(2); v.push_front(1); let mut iter = v.iter(); let _ = iter.find(|&&x| x == 2); assert_eq!(Some(&7), iter.next()); } #[test] fn test_try_rfold_rotated() { let mut v: VecDeque<_> = (0..12).collect(); for n in 0..10 { if n & 1 == 0 { v.rotate_left(n); } else { v.rotate_right(n); } assert_eq!(Ok::<_, ()>(66), v.iter().try_rfold(0, |a, b| Ok(a + b))); } } #[test] fn test_try_rfold_moves_iter() { let v: VecDeque<_> = [10, 20, 30, 40, 100, 60, 70, 80, 90].iter().collect(); let mut iter = v.into_iter(); assert_eq!(iter.try_rfold(0_i8, |acc, &x| acc.checked_add(x)), None); assert_eq!(iter.next_back(), Some(&70)); } #[test] fn truncate_leak() { static mut DROPS: i32 = 0; struct D(bool); impl Drop for D { fn drop(&mut self) { unsafe { DROPS += 1; } if self.0 { panic!("panic in `drop`"); } } } let mut q = VecDeque::new(); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_back(D(false)); q.push_front(D(true)); q.push_front(D(false)); q.push_front(D(false)); catch_unwind(AssertUnwindSafe(|| q.truncate(1))).ok(); assert_eq!(unsafe { DROPS }, 7); } #[test] fn test_drain_leak() { static mut DROPS: i32 = 0; #[derive(Debug, PartialEq)] struct D(u32, bool); impl Drop for D { fn drop(&mut self) { unsafe { DROPS += 1; } if self.1 { panic!("panic in `drop`"); } } } let mut v = VecDeque::new(); v.push_back(D(4, false)); v.push_back(D(5, false)); v.push_back(D(6, false)); v.push_front(D(3, false)); v.push_front(D(2, true)); v.push_front(D(1, false)); v.push_front(D(0, false)); catch_unwind(AssertUnwindSafe(|| { v.drain(1..=4); })) .ok(); assert_eq!(unsafe { DROPS }, 4); assert_eq!(v.len(), 3); drop(v); assert_eq!(unsafe { DROPS }, 7); } #[test] fn test_binary_search() { // Contiguous (front only) search: let deque: VecDeque<_> = vec![1, 2, 3, 5, 6].into(); assert!(deque.as_slices().1.is_empty()); assert_eq!(deque.binary_search(&3), Ok(2)); assert_eq!(deque.binary_search(&4), Err(3)); // Split search (both front & back non-empty): let mut deque: VecDeque<_> = vec![5, 6].into(); deque.push_front(3); deque.push_front(2); deque.push_front(1); deque.push_back(10); assert!(!deque.as_slices().0.is_empty()); assert!(!deque.as_slices().1.is_empty()); assert_eq!(deque.binary_search(&0), Err(0)); assert_eq!(deque.binary_search(&1), Ok(0)); assert_eq!(deque.binary_search(&5), Ok(3)); assert_eq!(deque.binary_search(&7), Err(5)); assert_eq!(deque.binary_search(&20), Err(6)); } #[test] fn test_binary_search_by() { let deque: VecDeque<_> = vec![(1,), (2,), (3,), (5,), (6,)].into(); assert_eq!(deque.binary_search_by(|&(v,)| v.cmp(&3)), Ok(2)); assert_eq!(deque.binary_search_by(|&(v,)| v.cmp(&4)), Err(3)); } #[test] fn test_binary_search_by_key() { let deque: VecDeque<_> = vec![(1,), (2,), (3,), (5,), (6,)].into(); assert_eq!(deque.binary_search_by_key(&3, |&(v,)| v), Ok(2)); assert_eq!(deque.binary_search_by_key(&4, |&(v,)| v), Err(3)); } #[test] fn test_partition_point() { // Contiguous (front only) search: let deque: VecDeque<_> = vec![1, 2, 3, 5, 6].into(); assert!(deque.as_slices().1.is_empty()); assert_eq!(deque.partition_point(|&v| v <= 3), 3); // Split search (both front & back non-empty): let mut deque: VecDeque<_> = vec![5, 6].into(); deque.push_front(3); deque.push_front(2); deque.push_front(1); deque.push_back(10); assert!(!deque.as_slices().0.is_empty()); assert!(!deque.as_slices().1.is_empty()); assert_eq!(deque.partition_point(|&v| v <= 5), 4); } #[test] fn test_zero_sized_push() { const N: usize = 8; // Zero sized type struct Zst; // Test that for all possible sequences of push_front / push_back, // we end up with a deque of the correct size for len in 0..N { let mut tester = VecDeque::with_capacity(len); assert_eq!(tester.len(), 0); assert!(tester.capacity() >= len); for case in 0..(1 << len) { assert_eq!(tester.len(), 0); for bit in 0..len { if case & (1 << bit) != 0 { tester.push_front(Zst); } else { tester.push_back(Zst); } } assert_eq!(tester.len(), len); assert_eq!(tester.iter().count(), len); tester.clear(); } } } #[test] fn test_from_zero_sized_vec() { let v = vec![(); 100]; let queue = VecDeque::from(v); assert_eq!(queue.len(), 100); } #![deny(warnings)] use std::cell::RefCell; use std::fmt::{self, Write}; #[test] fn test_format() { let s = fmt::format(format_args!("Hello, {}!", "world")); assert_eq!(s, "Hello, world!"); } struct A; struct B; struct C; struct D; impl fmt::LowerHex for A { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("aloha") } } impl fmt::UpperHex for B { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("adios") } } impl fmt::Display for C { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.pad_integral(true, "☃", "123") } } impl fmt::Binary for D { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("aa")?; f.write_char('☃')?; f.write_str("bb") } } macro_rules! t { ($a:expr, $b:expr) => { assert_eq!($a, $b) }; } #[test] fn test_format_macro_interface() { // Various edge cases without formats t!(format!(""), ""); t!(format!("hello"), "hello"); t!(format!("hello {{"), "hello {"); // default formatters should work t!(format!("{}", 1.0f32), "1"); t!(format!("{}", 1.0f64), "1"); t!(format!("{}", "a"), "a"); t!(format!("{}", "a".to_string()), "a"); t!(format!("{}", false), "false"); t!(format!("{}", 'a'), "a"); // At least exercise all the formats t!(format!("{}", true), "true"); t!(format!("{}", '☃'), "☃"); t!(format!("{}", 10), "10"); t!(format!("{}", 10_usize), "10"); t!(format!("{:?}", '☃'), "'☃'"); t!(format!("{:?}", 10), "10"); t!(format!("{:?}", 10_usize), "10"); t!(format!("{:?}", "true"), "\"true\""); t!(format!("{:?}", "foo\nbar"), "\"foo\\nbar\""); t!(format!("{:?}", "foo\n\"bar\"\r\n\'baz\'\t\\qux\\"), r#""foo\n\"bar\"\r\n'baz'\t\\qux\\""#); t!(format!("{:?}", "foo\0bar\x01baz\u{7f}q\u{75}x"), r#""foo\u{0}bar\u{1}baz\u{7f}qux""#); t!(format!("{:o}", 10_usize), "12"); t!(format!("{:x}", 10_usize), "a"); t!(format!("{:X}", 10_usize), "A"); t!(format!("{}", "foo"), "foo"); t!(format!("{}", "foo".to_string()), "foo"); if cfg!(target_pointer_width = "32") { t!(format!("{:#p}", 0x1234 as *const isize), "0x00001234"); t!(format!("{:#p}", 0x1234 as *mut isize), "0x00001234"); } else { t!(format!("{:#p}", 0x1234 as *const isize), "0x0000000000001234"); t!(format!("{:#p}", 0x1234 as *mut isize), "0x0000000000001234"); } t!(format!("{:p}", 0x1234 as *const isize), "0x1234"); t!(format!("{:p}", 0x1234 as *mut isize), "0x1234"); t!(format!("{:x}", A), "aloha"); t!(format!("{:X}", B), "adios"); t!(format!("foo {} ☃☃☃☃☃☃", "bar"), "foo bar ☃☃☃☃☃☃"); t!(format!("{1} {0}", 0, 1), "1 0"); t!(format!("{foo} {bar}", foo = 0, bar = 1), "0 1"); t!(format!("{foo} {1} {bar} {0}", 0, 1, foo = 2, bar = 3), "2 1 3 0"); t!(format!("{} {0}", "a"), "a a"); t!(format!("{_foo}", _foo = 6usize), "6"); t!(format!("{foo_bar}", foo_bar = 1), "1"); t!(format!("{}", 5 + 5), "10"); t!(format!("{:#4}", C), "☃123"); t!(format!("{:b}", D), "aa☃bb"); let a: &dyn fmt::Debug = &1; t!(format!("{:?}", a), "1"); // Formatting strings and their arguments t!(format!("{}", "a"), "a"); t!(format!("{:4}", "a"), "a "); t!(format!("{:4}", "☃"), "☃ "); t!(format!("{:>4}", "a"), " a"); t!(format!("{:<4}", "a"), "a "); t!(format!("{:^5}", "a"), " a "); t!(format!("{:^5}", "aa"), " aa "); t!(format!("{:^4}", "a"), " a "); t!(format!("{:^4}", "aa"), " aa "); t!(format!("{:.4}", "a"), "a"); t!(format!("{:4.4}", "a"), "a "); t!(format!("{:4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa"); t!(format!("{:<4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa"); t!(format!("{:>4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa"); t!(format!("{:^4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa"); t!(format!("{:>10.4}", "aaaaaaaaaaaaaaaaaa"), " aaaa"); t!(format!("{:2.4}", "aaaaa"), "aaaa"); t!(format!("{:2.4}", "aaaa"), "aaaa"); t!(format!("{:2.4}", "aaa"), "aaa"); t!(format!("{:2.4}", "aa"), "aa"); t!(format!("{:2.4}", "a"), "a "); t!(format!("{:0>2}", "a"), "0a"); t!(format!("{:.*}", 4, "aaaaaaaaaaaaaaaaaa"), "aaaa"); t!(format!("{:.1$}", "aaaaaaaaaaaaaaaaaa", 4), "aaaa"); t!(format!("{:.a$}", "aaaaaaaaaaaaaaaaaa", a = 4), "aaaa"); t!(format!("{:._a$}", "aaaaaaaaaaaaaaaaaa", _a = 4), "aaaa"); t!(format!("{:1$}", "a", 4), "a "); t!(format!("{1:0$}", 4, "a"), "a "); t!(format!("{:a$}", "a", a = 4), "a "); t!(format!("{:-#}", "a"), "a"); t!(format!("{:+#}", "a"), "a"); t!(format!("{:/^10.8}", "1234567890"), "/12345678/"); // Some float stuff t!(format!("{:}", 1.0f32), "1"); t!(format!("{:}", 1.0f64), "1"); t!(format!("{:.3}", 1.0f64), "1.000"); t!(format!("{:10.3}", 1.0f64), " 1.000"); t!(format!("{:+10.3}", 1.0f64), " +1.000"); t!(format!("{:+10.3}", -1.0f64), " -1.000"); t!(format!("{:e}", 1.2345e6f32), "1.2345e6"); t!(format!("{:e}", 1.2345e6f64), "1.2345e6"); t!(format!("{:E}", 1.2345e6f64), "1.2345E6"); t!(format!("{:.3e}", 1.2345e6f64), "1.234e6"); t!(format!("{:10.3e}", 1.2345e6f64), " 1.234e6"); t!(format!("{:+10.3e}", 1.2345e6f64), " +1.234e6"); t!(format!("{:+10.3e}", -1.2345e6f64), " -1.234e6"); // Float edge cases t!(format!("{}", -0.0), "-0"); t!(format!("{:?}", 0.0), "0.0"); // sign aware zero padding t!(format!("{:<3}", 1), "1 "); t!(format!("{:>3}", 1), " 1"); t!(format!("{:^3}", 1), " 1 "); t!(format!("{:03}", 1), "001"); t!(format!("{:<03}", 1), "001"); t!(format!("{:>03}", 1), "001"); t!(format!("{:^03}", 1), "001"); t!(format!("{:+03}", 1), "+01"); t!(format!("{:<+03}", 1), "+01"); t!(format!("{:>+03}", 1), "+01"); t!(format!("{:^+03}", 1), "+01"); t!(format!("{:#05x}", 1), "0x001"); t!(format!("{:<#05x}", 1), "0x001"); t!(format!("{:>#05x}", 1), "0x001"); t!(format!("{:^#05x}", 1), "0x001"); t!(format!("{:05}", 1.2), "001.2"); t!(format!("{:<05}", 1.2), "001.2"); t!(format!("{:>05}", 1.2), "001.2"); t!(format!("{:^05}", 1.2), "001.2"); t!(format!("{:05}", -1.2), "-01.2"); t!(format!("{:<05}", -1.2), "-01.2"); t!(format!("{:>05}", -1.2), "-01.2"); t!(format!("{:^05}", -1.2), "-01.2"); t!(format!("{:+05}", 1.2), "+01.2"); t!(format!("{:<+05}", 1.2), "+01.2"); t!(format!("{:>+05}", 1.2), "+01.2"); t!(format!("{:^+05}", 1.2), "+01.2"); // Ergonomic format_args! t!(format!("{0:x} {0:X}", 15), "f F"); t!(format!("{0:x} {0:X} {}", 15), "f F 15"); t!(format!("{:x}{0:X}{a:x}{:X}{1:x}{a:X}", 13, 14, a = 15), "dDfEeF"); t!(format!("{a:x} {a:X}", a = 15), "f F"); // And its edge cases t!( format!( "{a:.0$} {b:.0$} {0:.0$}\n{a:.c$} {b:.c$} {c:.c$}", 4, a = "abcdefg", b = "hijklmn", c = 3 ), "abcd hijk 4\nabc hij 3" ); t!(format!("{a:.*} {0} {:.*}", 4, 3, "efgh", a = "abcdef"), "abcd 4 efg"); t!(format!("{:.a$} {a} {a:#x}", "aaaaaa", a = 2), "aa 2 0x2"); // Test that pointers don't get truncated. { let val = usize::MAX; let exp = format!("{:#x}", val); t!(format!("{:p}", val as *const isize), exp); } // Escaping t!(format!("{{"), "{"); t!(format!("}}"), "}"); // make sure that format! doesn't move out of local variables let a = Box::new(3); format!("{}", a); format!("{}", a); // make sure that format! doesn't cause spurious unused-unsafe warnings when // it's inside of an outer unsafe block unsafe { let a: isize = ::std::mem::transmute(3_usize); format!("{}", a); } // test that trailing commas are acceptable format!("{}", "test",); format!("{foo}", foo = "test",); } // Basic test to make sure that we can invoke the `write!` macro with an // fmt::Write instance. #[test] fn test_write() { let mut buf = String::new(); let _ = write!(&mut buf, "{}", 3); { let w = &mut buf; let _ = write!(w, "{foo}", foo = 4); let _ = write!(w, "{}", "hello"); let _ = writeln!(w, "{}", "line"); let _ = writeln!(w, "{foo}", foo = "bar"); let _ = w.write_char('☃'); let _ = w.write_str("str"); } t!(buf, "34helloline\nbar\n☃str"); } // Just make sure that the macros are defined, there's not really a lot that we // can do with them just yet (to test the output) #[test] fn test_print() { print!("hi"); print!("{:?}", vec![0u8]); println!("hello"); println!("this is a {}", "test"); println!("{foo}", foo = "bar"); } // Just make sure that the macros are defined, there's not really a lot that we // can do with them just yet (to test the output) #[test] fn test_format_args() { let mut buf = String::new(); { let w = &mut buf; let _ = write!(w, "{}", format_args!("{}", 1)); let _ = write!(w, "{}", format_args!("test")); let _ = write!(w, "{}", format_args!("{test}", test = 3)); } let s = buf; t!(s, "1test3"); let s = fmt::format(format_args!("hello {}", "world")); t!(s, "hello world"); let s = format!("{}: {}", "args were", format_args!("hello {}", "world")); t!(s, "args were: hello world"); } #[test] fn test_order() { // Make sure format!() arguments are always evaluated in a left-to-right // ordering fn foo() -> isize { static mut FOO: isize = 0; unsafe { FOO += 1; FOO } } assert_eq!( format!("{} {} {a} {b} {} {c}", foo(), foo(), foo(), a = foo(), b = foo(), c = foo()), "1 2 4 5 3 6".to_string() ); } #[test] fn test_once() { // Make sure each argument are evaluated only once even though it may be // formatted multiple times fn foo() -> isize { static mut FOO: isize = 0; unsafe { FOO += 1; FOO } } assert_eq!(format!("{0} {0} {0} {a} {a} {a}", foo(), a = foo()), "1 1 1 2 2 2".to_string()); } #[test] fn test_refcell() { let refcell = RefCell::new(5); assert_eq!(format!("{:?}", refcell), "RefCell { value: 5 }"); let borrow = refcell.borrow_mut(); assert_eq!(format!("{:?}", refcell), "RefCell { value: }"); drop(borrow); assert_eq!(format!("{:?}", refcell), "RefCell { value: 5 }"); } use std::collections::binary_heap::{Drain, PeekMut}; use std::collections::BinaryHeap; use std::iter::TrustedLen; use std::panic::{catch_unwind, AssertUnwindSafe}; use std::sync::atomic::{AtomicU32, Ordering}; #[test] fn test_iterator() { let data = vec![5, 9, 3]; let iterout = [9, 5, 3]; let heap = BinaryHeap::from(data); let mut i = 0; for el in &heap { assert_eq!(*el, iterout[i]); i += 1; } } #[test] fn test_iter_rev_cloned_collect() { let data = vec![5, 9, 3]; let iterout = vec![3, 5, 9]; let pq = BinaryHeap::from(data); let v: Vec<_> = pq.iter().rev().cloned().collect(); assert_eq!(v, iterout); } #[test] fn test_into_iter_collect() { let data = vec![5, 9, 3]; let iterout = vec![9, 5, 3]; let pq = BinaryHeap::from(data); let v: Vec<_> = pq.into_iter().collect(); assert_eq!(v, iterout); } #[test] fn test_into_iter_size_hint() { let data = vec![5, 9]; let pq = BinaryHeap::from(data); let mut it = pq.into_iter(); assert_eq!(it.size_hint(), (2, Some(2))); assert_eq!(it.next(), Some(9)); assert_eq!(it.size_hint(), (1, Some(1))); assert_eq!(it.next(), Some(5)); assert_eq!(it.size_hint(), (0, Some(0))); assert_eq!(it.next(), None); } #[test] fn test_into_iter_rev_collect() { let data = vec![5, 9, 3]; let iterout = vec![3, 5, 9]; let pq = BinaryHeap::from(data); let v: Vec<_> = pq.into_iter().rev().collect(); assert_eq!(v, iterout); } #[test] fn test_into_iter_sorted_collect() { let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]); let it = heap.into_iter_sorted(); let sorted = it.collect::>(); assert_eq!(sorted, vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 2, 1, 1, 0]); } #[test] fn test_drain_sorted_collect() { let mut heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]); let it = heap.drain_sorted(); let sorted = it.collect::>(); assert_eq!(sorted, vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 2, 1, 1, 0]); } fn check_exact_size_iterator(len: usize, it: I) { let mut it = it; for i in 0..it.len() { let (lower, upper) = it.size_hint(); assert_eq!(Some(lower), upper); assert_eq!(lower, len - i); assert_eq!(it.len(), len - i); it.next(); } assert_eq!(it.len(), 0); assert!(it.is_empty()); } #[test] fn test_exact_size_iterator() { let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]); check_exact_size_iterator(heap.len(), heap.iter()); check_exact_size_iterator(heap.len(), heap.clone().into_iter()); check_exact_size_iterator(heap.len(), heap.clone().into_iter_sorted()); check_exact_size_iterator(heap.len(), heap.clone().drain()); check_exact_size_iterator(heap.len(), heap.clone().drain_sorted()); } fn check_trusted_len(len: usize, it: I) { let mut it = it; for i in 0..len { let (lower, upper) = it.size_hint(); if upper.is_some() { assert_eq!(Some(lower), upper); assert_eq!(lower, len - i); } it.next(); } } #[test] fn test_trusted_len() { let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]); check_trusted_len(heap.len(), heap.clone().into_iter_sorted()); check_trusted_len(heap.len(), heap.clone().drain_sorted()); } #[test] fn test_peek_and_pop() { let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]; let mut sorted = data.clone(); sorted.sort(); let mut heap = BinaryHeap::from(data); while !heap.is_empty() { assert_eq!(heap.peek().unwrap(), sorted.last().unwrap()); assert_eq!(heap.pop().unwrap(), sorted.pop().unwrap()); } } #[test] fn test_peek_mut() { let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]; let mut heap = BinaryHeap::from(data); assert_eq!(heap.peek(), Some(&10)); { let mut top = heap.peek_mut().unwrap(); *top -= 2; } assert_eq!(heap.peek(), Some(&9)); } #[test] fn test_peek_mut_pop() { let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]; let mut heap = BinaryHeap::from(data); assert_eq!(heap.peek(), Some(&10)); { let mut top = heap.peek_mut().unwrap(); *top -= 2; assert_eq!(PeekMut::pop(top), 8); } assert_eq!(heap.peek(), Some(&9)); } #[test] fn test_push() { let mut heap = BinaryHeap::from(vec![2, 4, 9]); assert_eq!(heap.len(), 3); assert!(*heap.peek().unwrap() == 9); heap.push(11); assert_eq!(heap.len(), 4); assert!(*heap.peek().unwrap() == 11); heap.push(5); assert_eq!(heap.len(), 5); assert!(*heap.peek().unwrap() == 11); heap.push(27); assert_eq!(heap.len(), 6); assert!(*heap.peek().unwrap() == 27); heap.push(3); assert_eq!(heap.len(), 7); assert!(*heap.peek().unwrap() == 27); heap.push(103); assert_eq!(heap.len(), 8); assert!(*heap.peek().unwrap() == 103); } #[test] fn test_push_unique() { let mut heap = BinaryHeap::>::from(vec![box 2, box 4, box 9]); assert_eq!(heap.len(), 3); assert!(**heap.peek().unwrap() == 9); heap.push(box 11); assert_eq!(heap.len(), 4); assert!(**heap.peek().unwrap() == 11); heap.push(box 5); assert_eq!(heap.len(), 5); assert!(**heap.peek().unwrap() == 11); heap.push(box 27); assert_eq!(heap.len(), 6); assert!(**heap.peek().unwrap() == 27); heap.push(box 3); assert_eq!(heap.len(), 7); assert!(**heap.peek().unwrap() == 27); heap.push(box 103); assert_eq!(heap.len(), 8); assert!(**heap.peek().unwrap() == 103); } fn check_to_vec(mut data: Vec) { let heap = BinaryHeap::from(data.clone()); let mut v = heap.clone().into_vec(); v.sort(); data.sort(); assert_eq!(v, data); assert_eq!(heap.into_sorted_vec(), data); } #[test] fn test_to_vec() { check_to_vec(vec![]); check_to_vec(vec![5]); check_to_vec(vec![3, 2]); check_to_vec(vec![2, 3]); check_to_vec(vec![5, 1, 2]); check_to_vec(vec![1, 100, 2, 3]); check_to_vec(vec![1, 3, 5, 7, 9, 2, 4, 6, 8, 0]); check_to_vec(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]); check_to_vec(vec![9, 11, 9, 9, 9, 9, 11, 2, 3, 4, 11, 9, 0, 0, 0, 0]); check_to_vec(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); check_to_vec(vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]); check_to_vec(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 1, 2]); check_to_vec(vec![5, 4, 3, 2, 1, 5, 4, 3, 2, 1, 5, 4, 3, 2, 1]); } #[test] fn test_in_place_iterator_specialization() { let src: Vec = vec![1, 2, 3]; let src_ptr = src.as_ptr(); let heap: BinaryHeap<_> = src.into_iter().map(std::convert::identity).collect(); let heap_ptr = heap.iter().next().unwrap() as *const usize; assert_eq!(src_ptr, heap_ptr); let sink: Vec<_> = heap.into_iter().map(std::convert::identity).collect(); let sink_ptr = sink.as_ptr(); assert_eq!(heap_ptr, sink_ptr); } #[test] fn test_empty_pop() { let mut heap = BinaryHeap::::new(); assert!(heap.pop().is_none()); } #[test] fn test_empty_peek() { let empty = BinaryHeap::::new(); assert!(empty.peek().is_none()); } #[test] fn test_empty_peek_mut() { let mut empty = BinaryHeap::::new(); assert!(empty.peek_mut().is_none()); } #[test] fn test_from_iter() { let xs = vec![9, 8, 7, 6, 5, 4, 3, 2, 1]; let mut q: BinaryHeap<_> = xs.iter().rev().cloned().collect(); for &x in &xs { assert_eq!(q.pop().unwrap(), x); } } #[test] fn test_drain() { let mut q: BinaryHeap<_> = [9, 8, 7, 6, 5, 4, 3, 2, 1].iter().cloned().collect(); assert_eq!(q.drain().take(5).count(), 5); assert!(q.is_empty()); } #[test] fn test_drain_sorted() { let mut q: BinaryHeap<_> = [9, 8, 7, 6, 5, 4, 3, 2, 1].iter().cloned().collect(); assert_eq!(q.drain_sorted().take(5).collect::>(), vec![9, 8, 7, 6, 5]); assert!(q.is_empty()); } #[test] fn test_drain_sorted_leak() { static DROPS: AtomicU32 = AtomicU32::new(0); #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] struct D(u32, bool); impl Drop for D { fn drop(&mut self) { DROPS.fetch_add(1, Ordering::SeqCst); if self.1 { panic!("panic in `drop`"); } } } let mut q = BinaryHeap::from(vec![ D(0, false), D(1, false), D(2, false), D(3, true), D(4, false), D(5, false), ]); catch_unwind(AssertUnwindSafe(|| drop(q.drain_sorted()))).ok(); assert_eq!(DROPS.load(Ordering::SeqCst), 6); } #[test] fn test_extend_ref() { let mut a = BinaryHeap::new(); a.push(1); a.push(2); a.extend(&[3, 4, 5]); assert_eq!(a.len(), 5); assert_eq!(a.into_sorted_vec(), [1, 2, 3, 4, 5]); let mut a = BinaryHeap::new(); a.push(1); a.push(2); let mut b = BinaryHeap::new(); b.push(3); b.push(4); b.push(5); a.extend(&b); assert_eq!(a.len(), 5); assert_eq!(a.into_sorted_vec(), [1, 2, 3, 4, 5]); } #[test] fn test_append() { let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]); let mut b = BinaryHeap::from(vec![-20, 5, 43]); a.append(&mut b); assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]); assert!(b.is_empty()); } #[test] fn test_append_to_empty() { let mut a = BinaryHeap::new(); let mut b = BinaryHeap::from(vec![-20, 5, 43]); a.append(&mut b); assert_eq!(a.into_sorted_vec(), [-20, 5, 43]); assert!(b.is_empty()); } #[test] fn test_extend_specialization() { let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]); let b = BinaryHeap::from(vec![-20, 5, 43]); a.extend(b); assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]); } #[allow(dead_code)] fn assert_covariance() { fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { d } } #[test] fn test_retain() { let mut a = BinaryHeap::from(vec![100, 10, 50, 1, 2, 20, 30]); a.retain(|&x| x != 2); // Check that 20 moved into 10's place. assert_eq!(a.clone().into_vec(), [100, 20, 50, 1, 10, 30]); a.retain(|_| true); assert_eq!(a.clone().into_vec(), [100, 20, 50, 1, 10, 30]); a.retain(|&x| x < 50); assert_eq!(a.clone().into_vec(), [30, 20, 10, 1]); a.retain(|_| false); assert!(a.is_empty()); } // old binaryheap failed this test // // Integrity means that all elements are present after a comparison panics, // even if the order may not be correct. // // Destructors must be called exactly once per element. // FIXME: re-enable emscripten once it can unwind again #[test] #[cfg(not(target_os = "emscripten"))] fn panic_safe() { use rand::{seq::SliceRandom, thread_rng}; use std::cmp; use std::panic::{self, AssertUnwindSafe}; use std::sync::atomic::{AtomicUsize, Ordering}; static DROP_COUNTER: AtomicUsize = AtomicUsize::new(0); #[derive(Eq, PartialEq, Ord, Clone, Debug)] struct PanicOrd(T, bool); impl Drop for PanicOrd { fn drop(&mut self) { // update global drop count DROP_COUNTER.fetch_add(1, Ordering::SeqCst); } } impl PartialOrd for PanicOrd { fn partial_cmp(&self, other: &Self) -> Option { if self.1 || other.1 { panic!("Panicking comparison"); } self.0.partial_cmp(&other.0) } } let mut rng = thread_rng(); const DATASZ: usize = 32; // Miri is too slow let ntest = if cfg!(miri) { 1 } else { 10 }; // don't use 0 in the data -- we want to catch the zeroed-out case. let data = (1..=DATASZ).collect::>(); // since it's a fuzzy test, run several tries. for _ in 0..ntest { for i in 1..=DATASZ { DROP_COUNTER.store(0, Ordering::SeqCst); let mut panic_ords: Vec<_> = data.iter().filter(|&&x| x != i).map(|&x| PanicOrd(x, false)).collect(); let panic_item = PanicOrd(i, true); // heapify the sane items panic_ords.shuffle(&mut rng); let mut heap = BinaryHeap::from(panic_ords); let inner_data; { // push the panicking item to the heap and catch the panic let thread_result = { let mut heap_ref = AssertUnwindSafe(&mut heap); panic::catch_unwind(move || { heap_ref.push(panic_item); }) }; assert!(thread_result.is_err()); // Assert no elements were dropped let drops = DROP_COUNTER.load(Ordering::SeqCst); assert!(drops == 0, "Must not drop items. drops={}", drops); inner_data = heap.clone().into_vec(); drop(heap); } let drops = DROP_COUNTER.load(Ordering::SeqCst); assert_eq!(drops, DATASZ); let mut data_sorted = inner_data.into_iter().map(|p| p.0).collect::>(); data_sorted.sort(); assert_eq!(data_sorted, data); } } } use std::cell::Cell; use std::mem::MaybeUninit; use std::ptr::NonNull; #[test] fn unitialized_zero_size_box() { assert_eq!( &*Box::<()>::new_uninit() as *const _, NonNull::>::dangling().as_ptr(), ); assert_eq!( Box::<[()]>::new_uninit_slice(4).as_ptr(), NonNull::>::dangling().as_ptr(), ); assert_eq!( Box::<[String]>::new_uninit_slice(0).as_ptr(), NonNull::>::dangling().as_ptr(), ); } #[derive(Clone, PartialEq, Eq, Debug)] struct Dummy { _data: u8, } #[test] fn box_clone_and_clone_from_equivalence() { for size in (0..8).map(|i| 2usize.pow(i)) { let control = vec![Dummy { _data: 42 }; size].into_boxed_slice(); let clone = control.clone(); let mut copy = vec![Dummy { _data: 84 }; size].into_boxed_slice(); copy.clone_from(&control); assert_eq!(control, clone); assert_eq!(control, copy); } } /// This test might give a false positive in case the box realocates, but the alocator keeps the /// original pointer. /// /// On the other hand it won't give a false negative, if it fails than the memory was definitely not /// reused #[test] fn box_clone_from_ptr_stability() { for size in (0..8).map(|i| 2usize.pow(i)) { let control = vec![Dummy { _data: 42 }; size].into_boxed_slice(); let mut copy = vec![Dummy { _data: 84 }; size].into_boxed_slice(); let copy_raw = copy.as_ptr() as usize; copy.clone_from(&control); assert_eq!(copy.as_ptr() as usize, copy_raw); } } #[test] fn box_deref_lval() { let x = Box::new(Cell::new(5)); x.set(1000); assert_eq!(x.get(), 1000); } use std::alloc::{Allocator, Global, Layout, System}; /// Issue #45955 and #62251. #[test] fn alloc_system_overaligned_request() { check_overalign_requests(System) } #[test] fn std_heap_overaligned_request() { check_overalign_requests(Global) } fn check_overalign_requests(allocator: T) { for &align in &[4, 8, 16, 32] { // less than and bigger than `MIN_ALIGN` for &size in &[align / 2, align - 1] { // size less than alignment let iterations = 128; unsafe { let pointers: Vec<_> = (0..iterations) .map(|_| { allocator.allocate(Layout::from_size_align(size, align).unwrap()).unwrap() }) .collect(); for &ptr in &pointers { assert_eq!( (ptr.as_non_null_ptr().as_ptr() as usize) % align, 0, "Got a pointer less aligned than requested" ) } // Clean up for &ptr in &pointers { allocator.deallocate( ptr.as_non_null_ptr(), Layout::from_size_align(size, align).unwrap(), ) } } } } } /// Creates a [`Vec`] containing the arguments. /// /// `vec!` allows `Vec`s to be defined with the same syntax as array expressions. /// There are two forms of this macro: /// /// - Create a [`Vec`] containing a given list of elements: /// /// ``` /// let v = vec![1, 2, 3]; /// assert_eq!(v[0], 1); /// assert_eq!(v[1], 2); /// assert_eq!(v[2], 3); /// ``` /// /// - Create a [`Vec`] from a given element and size: /// /// ``` /// let v = vec![1; 3]; /// assert_eq!(v, [1, 1, 1]); /// ``` /// /// Note that unlike array expressions this syntax supports all elements /// which implement [`Clone`] and the number of elements doesn't have to be /// a constant. /// /// This will use `clone` to duplicate an expression, so one should be careful /// using this with types having a nonstandard `Clone` implementation. For /// example, `vec![Rc::new(1); 5]` will create a vector of five references /// to the same boxed integer value, not five references pointing to independently /// boxed integers. /// /// Also, note that `vec![expr; 0]` is allowed, and produces an empty vector. /// This will still evaluate `expr`, however, and immediately drop the resulting value, so /// be mindful of side effects. /// /// [`Vec`]: crate::vec::Vec #[cfg(not(test))] #[doc(alias = "alloc")] #[doc(alias = "malloc")] #[macro_export] #[stable(feature = "rust1", since = "1.0.0")] #[allow_internal_unstable(box_syntax, liballoc_internals)] macro_rules! vec { () => ( $crate::__rust_force_expr!($crate::vec::Vec::new()) ); ($elem:expr; $n:expr) => ( $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n)) ); ($($x:expr),+ $(,)?) => ( $crate::__rust_force_expr!(<[_]>::into_vec(box [$($x),+])) ); } // HACK(japaric): with cfg(test) the inherent `[T]::into_vec` method, which is // required for this macro definition, is not available. Instead use the // `slice::into_vec` function which is only available with cfg(test) // NB see the slice::hack module in slice.rs for more information #[cfg(test)] macro_rules! vec { () => ( $crate::vec::Vec::new() ); ($elem:expr; $n:expr) => ( $crate::vec::from_elem($elem, $n) ); ($($x:expr),*) => ( $crate::slice::into_vec(box [$($x),*]) ); ($($x:expr,)*) => (vec![$($x),*]) } /// Creates a `String` using interpolation of runtime expressions. /// /// The first argument `format!` receives is a format string. This must be a string /// literal. The power of the formatting string is in the `{}`s contained. /// /// Additional parameters passed to `format!` replace the `{}`s within the /// formatting string in the order given unless named or positional parameters /// are used; see [`std::fmt`] for more information. /// /// A common use for `format!` is concatenation and interpolation of strings. /// The same convention is used with [`print!`] and [`write!`] macros, /// depending on the intended destination of the string. /// /// To convert a single value to a string, use the [`to_string`] method. This /// will use the [`Display`] formatting trait. /// /// [`std::fmt`]: ../std/fmt/index.html /// [`print!`]: ../std/macro.print.html /// [`write!`]: core::write /// [`to_string`]: crate::string::ToString /// [`Display`]: core::fmt::Display /// /// # Panics /// /// `format!` panics if a formatting trait implementation returns an error. /// This indicates an incorrect implementation /// since `fmt::Write for String` never returns an error itself. /// /// # Examples /// /// ``` /// format!("test"); /// format!("hello {}", "world!"); /// format!("x = {}, y = {y}", 10, y = 30); /// ``` #[macro_export] #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "format_macro")] macro_rules! format { ($($arg:tt)*) => {{ let res = $crate::fmt::format($crate::__export::format_args!($($arg)*)); res }} } /// Force AST node to an expression to improve diagnostics in pattern position. #[doc(hidden)] #[macro_export] #[unstable(feature = "liballoc_internals", issue = "none", reason = "implementation detail")] macro_rules! __rust_force_expr { ($e:expr) => { $e }; } //! Memory allocation APIs #![stable(feature = "alloc_module", since = "1.28.0")] #[cfg(not(test))] use core::intrinsics; use core::intrinsics::{min_align_of_val, size_of_val}; use core::ptr::Unique; #[cfg(not(test))] use core::ptr::{self, NonNull}; #[stable(feature = "alloc_module", since = "1.28.0")] #[doc(inline)] pub use core::alloc::*; #[cfg(test)] mod tests; extern "Rust" { // These are the magic symbols to call the global allocator. rustc generates // them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute // (the code expanding that attribute macro generates those functions), or to call // the default implementations in libstd (`__rdl_alloc` etc. in `library/std/src/alloc.rs`) // otherwise. // The rustc fork of LLVM also special-cases these function names to be able to optimize them // like `malloc`, `realloc`, and `free`, respectively. #[rustc_allocator] #[rustc_allocator_nounwind] fn __rust_alloc(size: usize, align: usize) -> *mut u8; #[rustc_allocator_nounwind] fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize); #[rustc_allocator_nounwind] fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8; #[rustc_allocator_nounwind] fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8; } /// The global memory allocator. /// /// This type implements the [`Allocator`] trait by forwarding calls /// to the allocator registered with the `#[global_allocator]` attribute /// if there is one, or the `std` crate’s default. /// /// Note: while this type is unstable, the functionality it provides can be /// accessed through the [free functions in `alloc`](self#functions). #[unstable(feature = "allocator_api", issue = "32838")] #[derive(Copy, Clone, Default, Debug)] #[cfg(not(test))] pub struct Global; #[cfg(test)] pub use std::alloc::Global; /// Allocate memory with the global allocator. /// /// This function forwards calls to the [`GlobalAlloc::alloc`] method /// of the allocator registered with the `#[global_allocator]` attribute /// if there is one, or the `std` crate’s default. /// /// This function is expected to be deprecated in favor of the `alloc` method /// of the [`Global`] type when it and the [`Allocator`] trait become stable. /// /// # Safety /// /// See [`GlobalAlloc::alloc`]. /// /// # Examples /// /// ``` /// use std::alloc::{alloc, dealloc, Layout}; /// /// unsafe { /// let layout = Layout::new::(); /// let ptr = alloc(layout); /// /// *(ptr as *mut u16) = 42; /// assert_eq!(*(ptr as *mut u16), 42); /// /// dealloc(ptr, layout); /// } /// ``` #[stable(feature = "global_alloc", since = "1.28.0")] #[inline] pub unsafe fn alloc(layout: Layout) -> *mut u8 { unsafe { __rust_alloc(layout.size(), layout.align()) } } /// Deallocate memory with the global allocator. /// /// This function forwards calls to the [`GlobalAlloc::dealloc`] method /// of the allocator registered with the `#[global_allocator]` attribute /// if there is one, or the `std` crate’s default. /// /// This function is expected to be deprecated in favor of the `dealloc` method /// of the [`Global`] type when it and the [`Allocator`] trait become stable. /// /// # Safety /// /// See [`GlobalAlloc::dealloc`]. #[stable(feature = "global_alloc", since = "1.28.0")] #[inline] pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) { unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) } } /// Reallocate memory with the global allocator. /// /// This function forwards calls to the [`GlobalAlloc::realloc`] method /// of the allocator registered with the `#[global_allocator]` attribute /// if there is one, or the `std` crate’s default. /// /// This function is expected to be deprecated in favor of the `realloc` method /// of the [`Global`] type when it and the [`Allocator`] trait become stable. /// /// # Safety /// /// See [`GlobalAlloc::realloc`]. #[stable(feature = "global_alloc", since = "1.28.0")] #[inline] pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) } } /// Allocate zero-initialized memory with the global allocator. /// /// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method /// of the allocator registered with the `#[global_allocator]` attribute /// if there is one, or the `std` crate’s default. /// /// This function is expected to be deprecated in favor of the `alloc_zeroed` method /// of the [`Global`] type when it and the [`Allocator`] trait become stable. /// /// # Safety /// /// See [`GlobalAlloc::alloc_zeroed`]. /// /// # Examples /// /// ``` /// use std::alloc::{alloc_zeroed, dealloc, Layout}; /// /// unsafe { /// let layout = Layout::new::(); /// let ptr = alloc_zeroed(layout); /// /// assert_eq!(*(ptr as *mut u16), 0); /// /// dealloc(ptr, layout); /// } /// ``` #[stable(feature = "global_alloc", since = "1.28.0")] #[inline] pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) } } #[cfg(not(test))] impl Global { #[inline] fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result, AllocError> { match layout.size() { 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), // SAFETY: `layout` is non-zero in size, size => unsafe { let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) }; let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; Ok(NonNull::slice_from_raw_parts(ptr, size)) }, } } // SAFETY: Same as `Allocator::grow` #[inline] unsafe fn grow_impl( &self, ptr: NonNull, old_layout: Layout, new_layout: Layout, zeroed: bool, ) -> Result, AllocError> { debug_assert!( new_layout.size() >= old_layout.size(), "`new_layout.size()` must be greater than or equal to `old_layout.size()`" ); match old_layout.size() { 0 => self.alloc_impl(new_layout, zeroed), // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size` // as required by safety conditions. Other conditions must be upheld by the caller old_size if old_layout.align() == new_layout.align() => unsafe { let new_size = new_layout.size(); // `realloc` probably checks for `new_size >= old_layout.size()` or something similar. intrinsics::assume(new_size >= old_layout.size()); let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; if zeroed { raw_ptr.add(old_size).write_bytes(0, new_size - old_size); } Ok(NonNull::slice_from_raw_parts(ptr, new_size)) }, // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`, // both the old and new memory allocation are valid for reads and writes for `old_size` // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract // for `dealloc` must be upheld by the caller. old_size => unsafe { let new_ptr = self.alloc_impl(new_layout, zeroed)?; ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size); self.deallocate(ptr, old_layout); Ok(new_ptr) }, } } } #[unstable(feature = "allocator_api", issue = "32838")] #[cfg(not(test))] unsafe impl Allocator for Global { #[inline] fn allocate(&self, layout: Layout) -> Result, AllocError> { self.alloc_impl(layout, false) } #[inline] fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { self.alloc_impl(layout, true) } #[inline] unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { if layout.size() != 0 { // SAFETY: `layout` is non-zero in size, // other conditions must be upheld by the caller unsafe { dealloc(ptr.as_ptr(), layout) } } } #[inline] unsafe fn grow( &self, ptr: NonNull, old_layout: Layout, new_layout: Layout, ) -> Result, AllocError> { // SAFETY: all conditions must be upheld by the caller unsafe { self.grow_impl(ptr, old_layout, new_layout, false) } } #[inline] unsafe fn grow_zeroed( &self, ptr: NonNull, old_layout: Layout, new_layout: Layout, ) -> Result, AllocError> { // SAFETY: all conditions must be upheld by the caller unsafe { self.grow_impl(ptr, old_layout, new_layout, true) } } #[inline] unsafe fn shrink( &self, ptr: NonNull, old_layout: Layout, new_layout: Layout, ) -> Result, AllocError> { debug_assert!( new_layout.size() <= old_layout.size(), "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" ); match new_layout.size() { // SAFETY: conditions must be upheld by the caller 0 => unsafe { self.deallocate(ptr, old_layout); Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0)) }, // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller new_size if old_layout.align() == new_layout.align() => unsafe { // `realloc` probably checks for `new_size <= old_layout.size()` or something similar. intrinsics::assume(new_size <= old_layout.size()); let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; Ok(NonNull::slice_from_raw_parts(ptr, new_size)) }, // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`, // both the old and new memory allocation are valid for reads and writes for `new_size` // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract // for `dealloc` must be upheld by the caller. new_size => unsafe { let new_ptr = self.allocate(new_layout)?; ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); self.deallocate(ptr, old_layout); Ok(new_ptr) }, } } } /// The allocator for unique pointers. // This function must not unwind. If it does, MIR codegen will fail. #[cfg(not(test))] #[lang = "exchange_malloc"] #[inline] unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { let layout = unsafe { Layout::from_size_align_unchecked(size, align) }; match Global.allocate(layout) { Ok(ptr) => ptr.as_mut_ptr(), Err(_) => handle_alloc_error(layout), } } #[cfg_attr(not(test), lang = "box_free")] #[inline] // This signature has to be the same as `Box`, otherwise an ICE will happen. // When an additional parameter to `Box` is added (like `A: Allocator`), this has to be added here as // well. // For example if `Box` is changed to `struct Box(Unique, A)`, // this function has to be changed to `fn box_free(Unique, A)` as well. pub(crate) unsafe fn box_free(ptr: Unique, alloc: A) { unsafe { let size = size_of_val(ptr.as_ref()); let align = min_align_of_val(ptr.as_ref()); let layout = Layout::from_size_align_unchecked(size, align); alloc.deallocate(ptr.cast().into(), layout) } } // # Allocation error handler extern "Rust" { // This is the magic symbol to call the global alloc error handler. rustc generates // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the // default implementations below (`__rdl_oom`) otherwise. #[rustc_allocator_nounwind] fn __rust_alloc_error_handler(size: usize, align: usize) -> !; } /// Abort on memory allocation error or failure. /// /// Callers of memory allocation APIs wishing to abort computation /// in response to an allocation error are encouraged to call this function, /// rather than directly invoking `panic!` or similar. /// /// The default behavior of this function is to print a message to standard error /// and abort the process. /// It can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`]. /// /// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html /// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html #[stable(feature = "global_alloc", since = "1.28.0")] #[cfg(not(test))] #[rustc_allocator_nounwind] #[cold] pub fn handle_alloc_error(layout: Layout) -> ! { unsafe { __rust_alloc_error_handler(layout.size(), layout.align()); } } // For alloc test `std::alloc::handle_alloc_error` can be used directly. #[cfg(test)] pub use std::alloc::handle_alloc_error; #[cfg(not(any(target_os = "hermit", test)))] #[doc(hidden)] #[allow(unused_attributes)] #[unstable(feature = "alloc_internals", issue = "none")] pub mod __alloc_error_handler { use crate::alloc::Layout; // called via generated `__rust_alloc_error_handler` // if there is no `#[alloc_error_handler]` #[rustc_std_internal_symbol] pub unsafe extern "C" fn __rdl_oom(size: usize, _align: usize) -> ! { panic!("memory allocation of {} bytes failed", size) } // if there is a `#[alloc_error_handler]` #[rustc_std_internal_symbol] pub unsafe extern "C" fn __rg_oom(size: usize, align: usize) -> ! { let layout = unsafe { Layout::from_size_align_unchecked(size, align) }; extern "Rust" { #[lang = "oom"] fn oom_impl(layout: Layout) -> !; } unsafe { oom_impl(layout) } } } /// Specialize clones into pre-allocated, uninitialized memory. /// Used by `Box::clone` and `Rc`/`Arc::make_mut`. pub(crate) trait WriteCloneIntoRaw: Sized { unsafe fn write_clone_into_raw(&self, target: *mut Self); } impl WriteCloneIntoRaw for T { #[inline] default unsafe fn write_clone_into_raw(&self, target: *mut Self) { // Having allocated *first* may allow the optimizer to create // the cloned value in-place, skipping the local and move. unsafe { target.write(self.clone()) }; } } impl WriteCloneIntoRaw for T { #[inline] unsafe fn write_clone_into_raw(&self, target: *mut Self) { // We can always copy in-place, without ever involving a local value. unsafe { target.copy_from_nonoverlapping(self, 1) }; } } #![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")] #![doc(hidden)] use core::alloc::LayoutError; use core::cmp; use core::intrinsics; use core::mem::{self, ManuallyDrop, MaybeUninit}; use core::ops::Drop; use core::ptr::{self, NonNull, Unique}; use core::slice; use crate::alloc::{handle_alloc_error, Allocator, Global, Layout}; use crate::boxed::Box; use crate::collections::TryReserveError::{self, *}; #[cfg(test)] mod tests; enum AllocInit { /// The contents of the new memory are uninitialized. Uninitialized, /// The new memory is guaranteed to be zeroed. Zeroed, } /// A low-level utility for more ergonomically allocating, reallocating, and deallocating /// a buffer of memory on the heap without having to worry about all the corner cases /// involved. This type is excellent for building your own data structures like Vec and VecDeque. /// In particular: /// /// * Produces `Unique::dangling()` on zero-sized types. /// * Produces `Unique::dangling()` on zero-length allocations. /// * Avoids freeing `Unique::dangling()`. /// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics). /// * Guards against 32-bit systems allocating more than isize::MAX bytes. /// * Guards against overflowing your length. /// * Calls `handle_alloc_error` for fallible allocations. /// * Contains a `ptr::Unique` and thus endows the user with all related benefits. /// * Uses the excess returned from the allocator to use the largest available capacity. /// /// This type does not in anyway inspect the memory that it manages. When dropped it *will* /// free its memory, but it *won't* try to drop its contents. It is up to the user of `RawVec` /// to handle the actual things *stored* inside of a `RawVec`. /// /// Note that the excess of a zero-sized types is always infinite, so `capacity()` always returns /// `usize::MAX`. This means that you need to be careful when round-tripping this type with a /// `Box<[T]>`, since `capacity()` won't yield the length. #[allow(missing_debug_implementations)] pub struct RawVec { ptr: Unique, cap: usize, alloc: A, } impl RawVec { /// HACK(Centril): This exists because stable `const fn` can only call stable `const fn`, so /// they cannot call `Self::new()`. /// /// If you change `RawVec::new` or dependencies, please take care to not introduce anything /// that would truly const-call something unstable. pub const NEW: Self = Self::new(); /// Creates the biggest possible `RawVec` (on the system heap) /// without allocating. If `T` has positive size, then this makes a /// `RawVec` with capacity `0`. If `T` is zero-sized, then it makes a /// `RawVec` with capacity `usize::MAX`. Useful for implementing /// delayed allocation. pub const fn new() -> Self { Self::new_in(Global) } /// Creates a `RawVec` (on the system heap) with exactly the /// capacity and alignment requirements for a `[T; capacity]`. This is /// equivalent to calling `RawVec::new` when `capacity` is `0` or `T` is /// zero-sized. Note that if `T` is zero-sized this means you will /// *not* get a `RawVec` with the requested capacity. /// /// # Panics /// /// Panics if the requested capacity exceeds `isize::MAX` bytes. /// /// # Aborts /// /// Aborts on OOM. #[inline] pub fn with_capacity(capacity: usize) -> Self { Self::with_capacity_in(capacity, Global) } /// Like `with_capacity`, but guarantees the buffer is zeroed. #[inline] pub fn with_capacity_zeroed(capacity: usize) -> Self { Self::with_capacity_zeroed_in(capacity, Global) } /// Reconstitutes a `RawVec` from a pointer and capacity. /// /// # Safety /// /// The `ptr` must be allocated (on the system heap), and with the given `capacity`. /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit /// systems). ZST vectors may have a capacity up to `usize::MAX`. /// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed. #[inline] pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self { unsafe { Self::from_raw_parts_in(ptr, capacity, Global) } } } impl RawVec { // Tiny Vecs are dumb. Skip to: // - 8 if the element size is 1, because any heap allocators is likely // to round up a request of less than 8 bytes to at least 8 bytes. // - 4 if elements are moderate-sized (<= 1 KiB). // - 1 otherwise, to avoid wasting too much space for very short Vecs. const MIN_NON_ZERO_CAP: usize = if mem::size_of::() == 1 { 8 } else if mem::size_of::() <= 1024 { 4 } else { 1 }; /// Like `new`, but parameterized over the choice of allocator for /// the returned `RawVec`. #[rustc_allow_const_fn_unstable(const_fn)] pub const fn new_in(alloc: A) -> Self { // `cap: 0` means "unallocated". zero-sized types are ignored. Self { ptr: Unique::dangling(), cap: 0, alloc } } /// Like `with_capacity`, but parameterized over the choice of /// allocator for the returned `RawVec`. #[inline] pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { Self::allocate_in(capacity, AllocInit::Uninitialized, alloc) } /// Like `with_capacity_zeroed`, but parameterized over the choice /// of allocator for the returned `RawVec`. #[inline] pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { Self::allocate_in(capacity, AllocInit::Zeroed, alloc) } /// Converts a `Box<[T]>` into a `RawVec`. pub fn from_box(slice: Box<[T], A>) -> Self { unsafe { let (slice, alloc) = Box::into_raw_with_allocator(slice); RawVec::from_raw_parts_in(slice.as_mut_ptr(), slice.len(), alloc) } } /// Converts the entire buffer into `Box<[MaybeUninit]>` with the specified `len`. /// /// Note that this will correctly reconstitute any `cap` changes /// that may have been performed. (See description of type for details.) /// /// # Safety /// /// * `len` must be greater than or equal to the most recently requested capacity, and /// * `len` must be less than or equal to `self.capacity()`. /// /// Note, that the requested capacity and `self.capacity()` could differ, as /// an allocator could overallocate and return a greater memory block than requested. pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit], A> { // Sanity-check one half of the safety requirement (we cannot check the other half). debug_assert!( len <= self.capacity(), "`len` must be smaller than or equal to `self.capacity()`" ); let me = ManuallyDrop::new(self); unsafe { let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit, len); Box::from_raw_in(slice, ptr::read(&me.alloc)) } } fn allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Self { if mem::size_of::() == 0 { Self::new_in(alloc) } else { // We avoid `unwrap_or_else` here because it bloats the amount of // LLVM IR generated. let layout = match Layout::array::(capacity) { Ok(layout) => layout, Err(_) => capacity_overflow(), }; match alloc_guard(layout.size()) { Ok(_) => {} Err(_) => capacity_overflow(), } let result = match init { AllocInit::Uninitialized => alloc.allocate(layout), AllocInit::Zeroed => alloc.allocate_zeroed(layout), }; let ptr = match result { Ok(ptr) => ptr, Err(_) => handle_alloc_error(layout), }; Self { ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) }, cap: Self::capacity_from_bytes(ptr.len()), alloc, } } } /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator. /// /// # Safety /// /// The `ptr` must be allocated (via the given allocator `alloc`), and with the given /// `capacity`. /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit /// systems). ZST vectors may have a capacity up to `usize::MAX`. /// If the `ptr` and `capacity` come from a `RawVec` created via `alloc`, then this is /// guaranteed. #[inline] pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self { Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc } } /// Gets a raw pointer to the start of the allocation. Note that this is /// `Unique::dangling()` if `capacity == 0` or `T` is zero-sized. In the former case, you must /// be careful. #[inline] pub fn ptr(&self) -> *mut T { self.ptr.as_ptr() } /// Gets the capacity of the allocation. /// /// This will always be `usize::MAX` if `T` is zero-sized. #[inline(always)] pub fn capacity(&self) -> usize { if mem::size_of::() == 0 { usize::MAX } else { self.cap } } /// Returns a shared reference to the allocator backing this `RawVec`. pub fn allocator(&self) -> &A { &self.alloc } fn current_memory(&self) -> Option<(NonNull, Layout)> { if mem::size_of::() == 0 || self.cap == 0 { None } else { // We have an allocated chunk of memory, so we can bypass runtime // checks to get our current layout. unsafe { let align = mem::align_of::(); let size = mem::size_of::() * self.cap; let layout = Layout::from_size_align_unchecked(size, align); Some((self.ptr.cast().into(), layout)) } } } /// Ensures that the buffer contains at least enough space to hold `len + /// additional` elements. If it doesn't already have enough capacity, will /// reallocate enough space plus comfortable slack space to get amortized /// *O*(1) behavior. Will limit this behavior if it would needlessly cause /// itself to panic. /// /// If `len` exceeds `self.capacity()`, this may fail to actually allocate /// the requested space. This is not really unsafe, but the unsafe /// code *you* write that relies on the behavior of this function may break. /// /// This is ideal for implementing a bulk-push operation like `extend`. /// /// # Panics /// /// Panics if the new capacity exceeds `isize::MAX` bytes. /// /// # Aborts /// /// Aborts on OOM. /// /// # Examples /// /// ``` /// # #![feature(raw_vec_internals)] /// # extern crate alloc; /// # use std::ptr; /// # use alloc::raw_vec::RawVec; /// struct MyVec { /// buf: RawVec, /// len: usize, /// } /// /// impl MyVec { /// pub fn push_all(&mut self, elems: &[T]) { /// self.buf.reserve(self.len, elems.len()); /// // reserve would have aborted or panicked if the len exceeded /// // `isize::MAX` so this is safe to do unchecked now. /// for x in elems { /// unsafe { /// ptr::write(self.buf.ptr().add(self.len), x.clone()); /// } /// self.len += 1; /// } /// } /// } /// # fn main() { /// # let mut vector = MyVec { buf: RawVec::new(), len: 0 }; /// # vector.push_all(&[1, 3, 5, 7, 9]); /// # } /// ``` #[inline] pub fn reserve(&mut self, len: usize, additional: usize) { // Callers expect this function to be very cheap when there is already sufficient capacity. // Therefore, we move all the resizing and error-handling logic from grow_amortized and // handle_reserve behind a call, while making sure that the this function is likely to be // inlined as just a comparison and a call if the comparison fails. #[cold] fn do_reserve_and_handle( slf: &mut RawVec, len: usize, additional: usize, ) { handle_reserve(slf.grow_amortized(len, additional)); } if self.needs_to_grow(len, additional) { do_reserve_and_handle(self, len, additional); } } /// The same as `reserve`, but returns on errors instead of panicking or aborting. pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { if self.needs_to_grow(len, additional) { self.grow_amortized(len, additional) } else { Ok(()) } } /// Ensures that the buffer contains at least enough space to hold `len + /// additional` elements. If it doesn't already, will reallocate the /// minimum possible amount of memory necessary. Generally this will be /// exactly the amount of memory necessary, but in principle the allocator /// is free to give back more than we asked for. /// /// If `len` exceeds `self.capacity()`, this may fail to actually allocate /// the requested space. This is not really unsafe, but the unsafe code /// *you* write that relies on the behavior of this function may break. /// /// # Panics /// /// Panics if the new capacity exceeds `isize::MAX` bytes. /// /// # Aborts /// /// Aborts on OOM. pub fn reserve_exact(&mut self, len: usize, additional: usize) { handle_reserve(self.try_reserve_exact(len, additional)); } /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. pub fn try_reserve_exact( &mut self, len: usize, additional: usize, ) -> Result<(), TryReserveError> { if self.needs_to_grow(len, additional) { self.grow_exact(len, additional) } else { Ok(()) } } /// Shrinks the allocation down to the specified amount. If the given amount /// is 0, actually completely deallocates. /// /// # Panics /// /// Panics if the given amount is *larger* than the current capacity. /// /// # Aborts /// /// Aborts on OOM. pub fn shrink_to_fit(&mut self, amount: usize) { handle_reserve(self.shrink(amount)); } } impl RawVec { /// Returns if the buffer needs to grow to fulfill the needed extra capacity. /// Mainly used to make inlining reserve-calls possible without inlining `grow`. fn needs_to_grow(&self, len: usize, additional: usize) -> bool { additional > self.capacity().wrapping_sub(len) } fn capacity_from_bytes(excess: usize) -> usize { debug_assert_ne!(mem::size_of::(), 0); excess / mem::size_of::() } fn set_ptr(&mut self, ptr: NonNull<[u8]>) { self.ptr = unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) }; self.cap = Self::capacity_from_bytes(ptr.len()); } // This method is usually instantiated many times. So we want it to be as // small as possible, to improve compile times. But we also want as much of // its contents to be statically computable as possible, to make the // generated code run faster. Therefore, this method is carefully written // so that all of the code that depends on `T` is within it, while as much // of the code that doesn't depend on `T` as possible is in functions that // are non-generic over `T`. fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { // This is ensured by the calling contexts. debug_assert!(additional > 0); if mem::size_of::() == 0 { // Since we return a capacity of `usize::MAX` when `elem_size` is // 0, getting to here necessarily means the `RawVec` is overfull. return Err(CapacityOverflow); } // Nothing we can really do about these checks, sadly. let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?; // This guarantees exponential growth. The doubling cannot overflow // because `cap <= isize::MAX` and the type of `cap` is `usize`. let cap = cmp::max(self.cap * 2, required_cap); let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap); let new_layout = Layout::array::(cap); // `finish_grow` is non-generic over `T`. let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?; self.set_ptr(ptr); Ok(()) } // The constraints on this method are much the same as those on // `grow_amortized`, but this method is usually instantiated less often so // it's less critical. fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { if mem::size_of::() == 0 { // Since we return a capacity of `usize::MAX` when the type size is // 0, getting to here necessarily means the `RawVec` is overfull. return Err(CapacityOverflow); } let cap = len.checked_add(additional).ok_or(CapacityOverflow)?; let new_layout = Layout::array::(cap); // `finish_grow` is non-generic over `T`. let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?; self.set_ptr(ptr); Ok(()) } fn shrink(&mut self, amount: usize) -> Result<(), TryReserveError> { assert!(amount <= self.capacity(), "Tried to shrink to a larger capacity"); let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; let new_size = amount * mem::size_of::(); let ptr = unsafe { let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); self.alloc.shrink(ptr, layout, new_layout).map_err(|_| TryReserveError::AllocError { layout: new_layout, non_exhaustive: (), })? }; self.set_ptr(ptr); Ok(()) } } // This function is outside `RawVec` to minimize compile times. See the comment // above `RawVec::grow_amortized` for details. (The `A` parameter isn't // significant, because the number of different `A` types seen in practice is // much smaller than the number of `T` types.) #[inline(never)] fn finish_grow( new_layout: Result, current_memory: Option<(NonNull, Layout)>, alloc: &mut A, ) -> Result, TryReserveError> where A: Allocator, { // Check for the error here to minimize the size of `RawVec::grow_*`. let new_layout = new_layout.map_err(|_| CapacityOverflow)?; alloc_guard(new_layout.size())?; let memory = if let Some((ptr, old_layout)) = current_memory { debug_assert_eq!(old_layout.align(), new_layout.align()); unsafe { // The allocator checks for alignment equality intrinsics::assume(old_layout.align() == new_layout.align()); alloc.grow(ptr, old_layout, new_layout) } } else { alloc.allocate(new_layout) }; memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }) } unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec { /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. fn drop(&mut self) { if let Some((ptr, layout)) = self.current_memory() { unsafe { self.alloc.deallocate(ptr, layout) } } } } // Central function for reserve error handling. #[inline] fn handle_reserve(result: Result<(), TryReserveError>) { match result { Err(CapacityOverflow) => capacity_overflow(), Err(AllocError { layout, .. }) => handle_alloc_error(layout), Ok(()) => { /* yay */ } } } // We need to guarantee the following: // * We don't ever allocate `> isize::MAX` byte-size objects. // * We don't overflow `usize::MAX` and actually allocate too little. // // On 64-bit we just need to check for overflow since trying to allocate // `> isize::MAX` bytes will surely fail. On 32-bit and 16-bit we need to add // an extra guard for this in case we're running on a platform which can use // all 4GB in user-space, e.g., PAE or x32. #[inline] fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> { if usize::BITS < 64 && alloc_size > isize::MAX as usize { Err(CapacityOverflow) } else { Ok(()) } } // One central function responsible for reporting capacity overflows. This'll // ensure that the code generation related to these panics is minimal as there's // only one location which panics rather than a bunch throughout the module. fn capacity_overflow() -> ! { panic!("capacity overflow"); } //! Unicode string slices. //! //! *[See also the `str` primitive type](str).* //! //! The `&str` type is one of the two main string types, the other being `String`. //! Unlike its `String` counterpart, its contents are borrowed. //! //! # Basic Usage //! //! A basic string declaration of `&str` type: //! //! ``` //! let hello_world = "Hello, World!"; //! ``` //! //! Here we have declared a string literal, also known as a string slice. //! String literals have a static lifetime, which means the string `hello_world` //! is guaranteed to be valid for the duration of the entire program. //! We can explicitly specify `hello_world`'s lifetime as well: //! //! ``` //! let hello_world: &'static str = "Hello, world!"; //! ``` #![stable(feature = "rust1", since = "1.0.0")] // Many of the usings in this module are only used in the test configuration. // It's cleaner to just turn off the unused_imports warning than to fix them. #![allow(unused_imports)] use core::borrow::{Borrow, BorrowMut}; use core::iter::FusedIterator; use core::mem; use core::ptr; use core::str::pattern::{DoubleEndedSearcher, Pattern, ReverseSearcher, Searcher}; use core::unicode::conversions; use crate::borrow::ToOwned; use crate::boxed::Box; use crate::slice::{Concat, Join, SliceIndex}; use crate::string::String; use crate::vec::Vec; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::pattern; #[stable(feature = "encode_utf16", since = "1.8.0")] pub use core::str::EncodeUtf16; #[stable(feature = "split_ascii_whitespace", since = "1.34.0")] pub use core::str::SplitAsciiWhitespace; #[stable(feature = "split_inclusive", since = "1.53.0")] pub use core::str::SplitInclusive; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::SplitWhitespace; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::{from_utf8, from_utf8_mut, Bytes, CharIndices, Chars}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::{from_utf8_unchecked, from_utf8_unchecked_mut, ParseBoolError}; #[stable(feature = "str_escape", since = "1.34.0")] pub use core::str::{EscapeDebug, EscapeDefault, EscapeUnicode}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::{FromStr, Utf8Error}; #[allow(deprecated)] #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::{Lines, LinesAny}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::{MatchIndices, RMatchIndices}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::{Matches, RMatches}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::{RSplit, Split}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::{RSplitN, SplitN}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::{RSplitTerminator, SplitTerminator}; /// Note: `str` in `Concat` is not meaningful here. /// This type parameter of the trait only exists to enable another impl. #[unstable(feature = "slice_concat_ext", issue = "27747")] impl> Concat for [S] { type Output = String; fn concat(slice: &Self) -> String { Join::join(slice, "") } } #[unstable(feature = "slice_concat_ext", issue = "27747")] impl> Join<&str> for [S] { type Output = String; fn join(slice: &Self, sep: &str) -> String { unsafe { String::from_utf8_unchecked(join_generic_copy(slice, sep.as_bytes())) } } } macro_rules! specialize_for_lengths { ($separator:expr, $target:expr, $iter:expr; $($num:expr),*) => {{ let mut target = $target; let iter = $iter; let sep_bytes = $separator; match $separator.len() { $( // loops with hardcoded sizes run much faster // specialize the cases with small separator lengths $num => { for s in iter { copy_slice_and_advance!(target, sep_bytes); let content_bytes = s.borrow().as_ref(); copy_slice_and_advance!(target, content_bytes); } }, )* _ => { // arbitrary non-zero size fallback for s in iter { copy_slice_and_advance!(target, sep_bytes); let content_bytes = s.borrow().as_ref(); copy_slice_and_advance!(target, content_bytes); } } } target }} } macro_rules! copy_slice_and_advance { ($target:expr, $bytes:expr) => { let len = $bytes.len(); let (head, tail) = { $target }.split_at_mut(len); head.copy_from_slice($bytes); $target = tail; }; } // Optimized join implementation that works for both Vec (T: Copy) and String's inner vec // Currently (2018-05-13) there is a bug with type inference and specialization (see issue #36262) // For this reason SliceConcat is not specialized for T: Copy and SliceConcat is the // only user of this function. It is left in place for the time when that is fixed. // // the bounds for String-join are S: Borrow and for Vec-join Borrow<[T]> // [T] and str both impl AsRef<[T]> for some T // => s.borrow().as_ref() and we always have slices fn join_generic_copy(slice: &[S], sep: &[T]) -> Vec where T: Copy, B: AsRef<[T]> + ?Sized, S: Borrow, { let sep_len = sep.len(); let mut iter = slice.iter(); // the first slice is the only one without a separator preceding it let first = match iter.next() { Some(first) => first, None => return vec![], }; // compute the exact total length of the joined Vec // if the `len` calculation overflows, we'll panic // we would have run out of memory anyway and the rest of the function requires // the entire Vec pre-allocated for safety let reserved_len = sep_len .checked_mul(iter.len()) .and_then(|n| { slice.iter().map(|s| s.borrow().as_ref().len()).try_fold(n, usize::checked_add) }) .expect("attempt to join into collection with len > usize::MAX"); // prepare an uninitialized buffer let mut result = Vec::with_capacity(reserved_len); debug_assert!(result.capacity() >= reserved_len); result.extend_from_slice(first.borrow().as_ref()); unsafe { let pos = result.len(); let target = result.get_unchecked_mut(pos..reserved_len); // copy separator and slices over without bounds checks // generate loops with hardcoded offsets for small separators // massive improvements possible (~ x2) let remain = specialize_for_lengths!(sep, target, iter; 0, 1, 2, 3, 4); // A weird borrow implementation may return different // slices for the length calculation and the actual copy. // Make sure we don't expose uninitialized bytes to the caller. let result_len = reserved_len - remain.len(); result.set_len(result_len); } result } #[stable(feature = "rust1", since = "1.0.0")] impl Borrow for String { #[inline] fn borrow(&self) -> &str { &self[..] } } #[stable(feature = "string_borrow_mut", since = "1.36.0")] impl BorrowMut for String { #[inline] fn borrow_mut(&mut self) -> &mut str { &mut self[..] } } #[stable(feature = "rust1", since = "1.0.0")] impl ToOwned for str { type Owned = String; #[inline] fn to_owned(&self) -> String { unsafe { String::from_utf8_unchecked(self.as_bytes().to_owned()) } } fn clone_into(&self, target: &mut String) { let mut b = mem::take(target).into_bytes(); self.as_bytes().clone_into(&mut b); *target = unsafe { String::from_utf8_unchecked(b) } } } /// Methods for string slices. #[lang = "str_alloc"] #[cfg(not(test))] impl str { /// Converts a `Box` into a `Box<[u8]>` without copying or allocating. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s = "this is a string"; /// let boxed_str = s.to_owned().into_boxed_str(); /// let boxed_bytes = boxed_str.into_boxed_bytes(); /// assert_eq!(*boxed_bytes, *s.as_bytes()); /// ``` #[stable(feature = "str_box_extras", since = "1.20.0")] #[inline] pub fn into_boxed_bytes(self: Box) -> Box<[u8]> { self.into() } /// Replaces all matches of a pattern with another string. /// /// `replace` creates a new [`String`], and copies the data from this string slice into it. /// While doing so, it attempts to find matches of a pattern. If it finds any, it /// replaces them with the replacement string slice. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s = "this is old"; /// /// assert_eq!("this is new", s.replace("old", "new")); /// ``` /// /// When the pattern doesn't match: /// /// ``` /// let s = "this is old"; /// assert_eq!(s, s.replace("cookie monster", "little lamb")); /// ``` #[must_use = "this returns the replaced string as a new allocation, \ without modifying the original"] #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn replace<'a, P: Pattern<'a>>(&'a self, from: P, to: &str) -> String { let mut result = String::new(); let mut last_end = 0; for (start, part) in self.match_indices(from) { result.push_str(unsafe { self.get_unchecked(last_end..start) }); result.push_str(to); last_end = start + part.len(); } result.push_str(unsafe { self.get_unchecked(last_end..self.len()) }); result } /// Replaces first N matches of a pattern with another string. /// /// `replacen` creates a new [`String`], and copies the data from this string slice into it. /// While doing so, it attempts to find matches of a pattern. If it finds any, it /// replaces them with the replacement string slice at most `count` times. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s = "foo foo 123 foo"; /// assert_eq!("new new 123 foo", s.replacen("foo", "new", 2)); /// assert_eq!("faa fao 123 foo", s.replacen('o', "a", 3)); /// assert_eq!("foo foo new23 foo", s.replacen(char::is_numeric, "new", 1)); /// ``` /// /// When the pattern doesn't match: /// /// ``` /// let s = "this is old"; /// assert_eq!(s, s.replacen("cookie monster", "little lamb", 10)); /// ``` #[must_use = "this returns the replaced string as a new allocation, \ without modifying the original"] #[stable(feature = "str_replacen", since = "1.16.0")] pub fn replacen<'a, P: Pattern<'a>>(&'a self, pat: P, to: &str, count: usize) -> String { // Hope to reduce the times of re-allocation let mut result = String::with_capacity(32); let mut last_end = 0; for (start, part) in self.match_indices(pat).take(count) { result.push_str(unsafe { self.get_unchecked(last_end..start) }); result.push_str(to); last_end = start + part.len(); } result.push_str(unsafe { self.get_unchecked(last_end..self.len()) }); result } /// Returns the lowercase equivalent of this string slice, as a new [`String`]. /// /// 'Lowercase' is defined according to the terms of the Unicode Derived Core Property /// `Lowercase`. /// /// Since some characters can expand into multiple characters when changing /// the case, this function returns a [`String`] instead of modifying the /// parameter in-place. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s = "HELLO"; /// /// assert_eq!("hello", s.to_lowercase()); /// ``` /// /// A tricky example, with sigma: /// /// ``` /// let sigma = "Σ"; /// /// assert_eq!("σ", sigma.to_lowercase()); /// /// // but at the end of a word, it's ς, not σ: /// let odysseus = "ὈΔΥΣΣΕΎΣ"; /// /// assert_eq!("ὀδυσσεύς", odysseus.to_lowercase()); /// ``` /// /// Languages without case are not changed: /// /// ``` /// let new_year = "农历新年"; /// /// assert_eq!(new_year, new_year.to_lowercase()); /// ``` #[stable(feature = "unicode_case_mapping", since = "1.2.0")] pub fn to_lowercase(&self) -> String { let mut s = String::with_capacity(self.len()); for (i, c) in self[..].char_indices() { if c == 'Σ' { // Σ maps to σ, except at the end of a word where it maps to ς. // This is the only conditional (contextual) but language-independent mapping // in `SpecialCasing.txt`, // so hard-code it rather than have a generic "condition" mechanism. // See https://github.com/rust-lang/rust/issues/26035 map_uppercase_sigma(self, i, &mut s) } else { match conversions::to_lower(c) { [a, '\0', _] => s.push(a), [a, b, '\0'] => { s.push(a); s.push(b); } [a, b, c] => { s.push(a); s.push(b); s.push(c); } } } } return s; fn map_uppercase_sigma(from: &str, i: usize, to: &mut String) { // See http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992 // for the definition of `Final_Sigma`. debug_assert!('Σ'.len_utf8() == 2); let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev()) && !case_ignoreable_then_cased(from[i + 2..].chars()); to.push_str(if is_word_final { "ς" } else { "σ" }); } fn case_ignoreable_then_cased>(iter: I) -> bool { use core::unicode::{Case_Ignorable, Cased}; match iter.skip_while(|&c| Case_Ignorable(c)).next() { Some(c) => Cased(c), None => false, } } } /// Returns the uppercase equivalent of this string slice, as a new [`String`]. /// /// 'Uppercase' is defined according to the terms of the Unicode Derived Core Property /// `Uppercase`. /// /// Since some characters can expand into multiple characters when changing /// the case, this function returns a [`String`] instead of modifying the /// parameter in-place. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s = "hello"; /// /// assert_eq!("HELLO", s.to_uppercase()); /// ``` /// /// Scripts without case are not changed: /// /// ``` /// let new_year = "农历新年"; /// /// assert_eq!(new_year, new_year.to_uppercase()); /// ``` /// /// One character can become multiple: /// ``` /// let s = "tschüß"; /// /// assert_eq!("TSCHÜSS", s.to_uppercase()); /// ``` #[stable(feature = "unicode_case_mapping", since = "1.2.0")] pub fn to_uppercase(&self) -> String { let mut s = String::with_capacity(self.len()); for c in self[..].chars() { match conversions::to_upper(c) { [a, '\0', _] => s.push(a), [a, b, '\0'] => { s.push(a); s.push(b); } [a, b, c] => { s.push(a); s.push(b); s.push(c); } } } s } /// Converts a [`Box`] into a [`String`] without copying or allocating. /// /// # Examples /// /// Basic usage: /// /// ``` /// let string = String::from("birthday gift"); /// let boxed_str = string.clone().into_boxed_str(); /// /// assert_eq!(boxed_str.into_string(), string); /// ``` #[stable(feature = "box_str", since = "1.4.0")] #[inline] pub fn into_string(self: Box) -> String { let slice = Box::<[u8]>::from(self); unsafe { String::from_utf8_unchecked(slice.into_vec()) } } /// Creates a new [`String`] by repeating a string `n` times. /// /// # Panics /// /// This function will panic if the capacity would overflow. /// /// # Examples /// /// Basic usage: /// /// ``` /// assert_eq!("abc".repeat(4), String::from("abcabcabcabc")); /// ``` /// /// A panic upon overflow: /// /// ```should_panic /// // this will panic at runtime /// "0123456789abcdef".repeat(usize::MAX); /// ``` #[stable(feature = "repeat_str", since = "1.16.0")] pub fn repeat(&self, n: usize) -> String { unsafe { String::from_utf8_unchecked(self.as_bytes().repeat(n)) } } /// Returns a copy of this string where each character is mapped to its /// ASCII upper case equivalent. /// /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z', /// but non-ASCII letters are unchanged. /// /// To uppercase the value in-place, use [`make_ascii_uppercase`]. /// /// To uppercase ASCII characters in addition to non-ASCII characters, use /// [`to_uppercase`]. /// /// # Examples /// /// ``` /// let s = "Grüße, Jürgen ❤"; /// /// assert_eq!("GRüßE, JüRGEN ❤", s.to_ascii_uppercase()); /// ``` /// /// [`make_ascii_uppercase`]: str::make_ascii_uppercase /// [`to_uppercase`]: #method.to_uppercase #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")] #[inline] pub fn to_ascii_uppercase(&self) -> String { let mut bytes = self.as_bytes().to_vec(); bytes.make_ascii_uppercase(); // make_ascii_uppercase() preserves the UTF-8 invariant. unsafe { String::from_utf8_unchecked(bytes) } } /// Returns a copy of this string where each character is mapped to its /// ASCII lower case equivalent. /// /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z', /// but non-ASCII letters are unchanged. /// /// To lowercase the value in-place, use [`make_ascii_lowercase`]. /// /// To lowercase ASCII characters in addition to non-ASCII characters, use /// [`to_lowercase`]. /// /// # Examples /// /// ``` /// let s = "Grüße, Jürgen ❤"; /// /// assert_eq!("grüße, jürgen ❤", s.to_ascii_lowercase()); /// ``` /// /// [`make_ascii_lowercase`]: str::make_ascii_lowercase /// [`to_lowercase`]: #method.to_lowercase #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")] #[inline] pub fn to_ascii_lowercase(&self) -> String { let mut bytes = self.as_bytes().to_vec(); bytes.make_ascii_lowercase(); // make_ascii_lowercase() preserves the UTF-8 invariant. unsafe { String::from_utf8_unchecked(bytes) } } } /// Converts a boxed slice of bytes to a boxed string slice without checking /// that the string contains valid UTF-8. /// /// # Examples /// /// Basic usage: /// /// ``` /// let smile_utf8 = Box::new([226, 152, 186]); /// let smile = unsafe { std::str::from_boxed_utf8_unchecked(smile_utf8) }; /// /// assert_eq!("☺", &*smile); /// ``` #[stable(feature = "str_box_extras", since = "1.20.0")] #[inline] pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box { unsafe { Box::from_raw(Box::into_raw(v) as *mut str) } } use super::*; use std::boxed::Box; use std::clone::Clone; use std::convert::{From, TryInto}; use std::mem::drop; use std::ops::Drop; use std::option::Option::{self, None, Some}; use std::sync::atomic::{ self, Ordering::{Acquire, SeqCst}, }; use std::sync::mpsc::channel; use std::sync::Mutex; use std::thread; use crate::vec::Vec; struct Canary(*mut atomic::AtomicUsize); impl Drop for Canary { fn drop(&mut self) { unsafe { match *self { Canary(c) => { (*c).fetch_add(1, SeqCst); } } } } } #[test] #[cfg_attr(target_os = "emscripten", ignore)] fn manually_share_arc() { let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; let arc_v = Arc::new(v); let (tx, rx) = channel(); let _t = thread::spawn(move || { let arc_v: Arc> = rx.recv().unwrap(); assert_eq!((*arc_v)[3], 4); }); tx.send(arc_v.clone()).unwrap(); assert_eq!((*arc_v)[2], 3); assert_eq!((*arc_v)[4], 5); } #[test] fn test_arc_get_mut() { let mut x = Arc::new(3); *Arc::get_mut(&mut x).unwrap() = 4; assert_eq!(*x, 4); let y = x.clone(); assert!(Arc::get_mut(&mut x).is_none()); drop(y); assert!(Arc::get_mut(&mut x).is_some()); let _w = Arc::downgrade(&x); assert!(Arc::get_mut(&mut x).is_none()); } #[test] fn weak_counts() { assert_eq!(Weak::weak_count(&Weak::::new()), 0); assert_eq!(Weak::strong_count(&Weak::::new()), 0); let a = Arc::new(0); let w = Arc::downgrade(&a); assert_eq!(Weak::strong_count(&w), 1); assert_eq!(Weak::weak_count(&w), 1); let w2 = w.clone(); assert_eq!(Weak::strong_count(&w), 1); assert_eq!(Weak::weak_count(&w), 2); assert_eq!(Weak::strong_count(&w2), 1); assert_eq!(Weak::weak_count(&w2), 2); drop(w); assert_eq!(Weak::strong_count(&w2), 1); assert_eq!(Weak::weak_count(&w2), 1); let a2 = a.clone(); assert_eq!(Weak::strong_count(&w2), 2); assert_eq!(Weak::weak_count(&w2), 1); drop(a2); drop(a); assert_eq!(Weak::strong_count(&w2), 0); assert_eq!(Weak::weak_count(&w2), 0); drop(w2); } #[test] fn try_unwrap() { let x = Arc::new(3); assert_eq!(Arc::try_unwrap(x), Ok(3)); let x = Arc::new(4); let _y = x.clone(); assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4))); let x = Arc::new(5); let _w = Arc::downgrade(&x); assert_eq!(Arc::try_unwrap(x), Ok(5)); } #[test] fn into_from_raw() { let x = Arc::new(box "hello"); let y = x.clone(); let x_ptr = Arc::into_raw(x); drop(y); unsafe { assert_eq!(**x_ptr, "hello"); let x = Arc::from_raw(x_ptr); assert_eq!(**x, "hello"); assert_eq!(Arc::try_unwrap(x).map(|x| *x), Ok("hello")); } } #[test] fn test_into_from_raw_unsized() { use std::fmt::Display; use std::string::ToString; let arc: Arc = Arc::from("foo"); let ptr = Arc::into_raw(arc.clone()); let arc2 = unsafe { Arc::from_raw(ptr) }; assert_eq!(unsafe { &*ptr }, "foo"); assert_eq!(arc, arc2); let arc: Arc = Arc::new(123); let ptr = Arc::into_raw(arc.clone()); let arc2 = unsafe { Arc::from_raw(ptr) }; assert_eq!(unsafe { &*ptr }.to_string(), "123"); assert_eq!(arc2.to_string(), "123"); } #[test] fn into_from_weak_raw() { let x = Arc::new(box "hello"); let y = Arc::downgrade(&x); let y_ptr = Weak::into_raw(y); unsafe { assert_eq!(**y_ptr, "hello"); let y = Weak::from_raw(y_ptr); let y_up = Weak::upgrade(&y).unwrap(); assert_eq!(**y_up, "hello"); drop(y_up); assert_eq!(Arc::try_unwrap(x).map(|x| *x), Ok("hello")); } } #[test] fn test_into_from_weak_raw_unsized() { use std::fmt::Display; use std::string::ToString; let arc: Arc = Arc::from("foo"); let weak: Weak = Arc::downgrade(&arc); let ptr = Weak::into_raw(weak.clone()); let weak2 = unsafe { Weak::from_raw(ptr) }; assert_eq!(unsafe { &*ptr }, "foo"); assert!(weak.ptr_eq(&weak2)); let arc: Arc = Arc::new(123); let weak: Weak = Arc::downgrade(&arc); let ptr = Weak::into_raw(weak.clone()); let weak2 = unsafe { Weak::from_raw(ptr) }; assert_eq!(unsafe { &*ptr }.to_string(), "123"); assert!(weak.ptr_eq(&weak2)); } #[test] fn test_cowarc_clone_make_mut() { let mut cow0 = Arc::new(75); let mut cow1 = cow0.clone(); let mut cow2 = cow1.clone(); assert!(75 == *Arc::make_mut(&mut cow0)); assert!(75 == *Arc::make_mut(&mut cow1)); assert!(75 == *Arc::make_mut(&mut cow2)); *Arc::make_mut(&mut cow0) += 1; *Arc::make_mut(&mut cow1) += 2; *Arc::make_mut(&mut cow2) += 3; assert!(76 == *cow0); assert!(77 == *cow1); assert!(78 == *cow2); // none should point to the same backing memory assert!(*cow0 != *cow1); assert!(*cow0 != *cow2); assert!(*cow1 != *cow2); } #[test] fn test_cowarc_clone_unique2() { let mut cow0 = Arc::new(75); let cow1 = cow0.clone(); let cow2 = cow1.clone(); assert!(75 == *cow0); assert!(75 == *cow1); assert!(75 == *cow2); *Arc::make_mut(&mut cow0) += 1; assert!(76 == *cow0); assert!(75 == *cow1); assert!(75 == *cow2); // cow1 and cow2 should share the same contents // cow0 should have a unique reference assert!(*cow0 != *cow1); assert!(*cow0 != *cow2); assert!(*cow1 == *cow2); } #[test] fn test_cowarc_clone_weak() { let mut cow0 = Arc::new(75); let cow1_weak = Arc::downgrade(&cow0); assert!(75 == *cow0); assert!(75 == *cow1_weak.upgrade().unwrap()); *Arc::make_mut(&mut cow0) += 1; assert!(76 == *cow0); assert!(cow1_weak.upgrade().is_none()); } #[test] fn test_live() { let x = Arc::new(5); let y = Arc::downgrade(&x); assert!(y.upgrade().is_some()); } #[test] fn test_dead() { let x = Arc::new(5); let y = Arc::downgrade(&x); drop(x); assert!(y.upgrade().is_none()); } #[test] fn weak_self_cyclic() { struct Cycle { x: Mutex>>, } let a = Arc::new(Cycle { x: Mutex::new(None) }); let b = Arc::downgrade(&a.clone()); *a.x.lock().unwrap() = Some(b); // hopefully we don't double-free (or leak)... } #[test] fn drop_arc() { let mut canary = atomic::AtomicUsize::new(0); let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize)); drop(x); assert!(canary.load(Acquire) == 1); } #[test] fn drop_arc_weak() { let mut canary = atomic::AtomicUsize::new(0); let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize)); let arc_weak = Arc::downgrade(&arc); assert!(canary.load(Acquire) == 0); drop(arc); assert!(canary.load(Acquire) == 1); drop(arc_weak); } #[test] fn test_strong_count() { let a = Arc::new(0); assert!(Arc::strong_count(&a) == 1); let w = Arc::downgrade(&a); assert!(Arc::strong_count(&a) == 1); let b = w.upgrade().expect(""); assert!(Arc::strong_count(&b) == 2); assert!(Arc::strong_count(&a) == 2); drop(w); drop(a); assert!(Arc::strong_count(&b) == 1); let c = b.clone(); assert!(Arc::strong_count(&b) == 2); assert!(Arc::strong_count(&c) == 2); } #[test] fn test_weak_count() { let a = Arc::new(0); assert!(Arc::strong_count(&a) == 1); assert!(Arc::weak_count(&a) == 0); let w = Arc::downgrade(&a); assert!(Arc::strong_count(&a) == 1); assert!(Arc::weak_count(&a) == 1); let x = w.clone(); assert!(Arc::weak_count(&a) == 2); drop(w); drop(x); assert!(Arc::strong_count(&a) == 1); assert!(Arc::weak_count(&a) == 0); let c = a.clone(); assert!(Arc::strong_count(&a) == 2); assert!(Arc::weak_count(&a) == 0); let d = Arc::downgrade(&c); assert!(Arc::weak_count(&c) == 1); assert!(Arc::strong_count(&c) == 2); drop(a); drop(c); drop(d); } #[test] fn show_arc() { let a = Arc::new(5); assert_eq!(format!("{:?}", a), "5"); } // Make sure deriving works with Arc #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)] struct Foo { inner: Arc, } #[test] fn test_unsized() { let x: Arc<[i32]> = Arc::new([1, 2, 3]); assert_eq!(format!("{:?}", x), "[1, 2, 3]"); let y = Arc::downgrade(&x.clone()); drop(x); assert!(y.upgrade().is_none()); } #[test] fn test_maybe_thin_unsized() { // If/when custom thin DSTs exist, this test should be updated to use one use std::ffi::{CStr, CString}; let x: Arc = Arc::from(CString::new("swordfish").unwrap().into_boxed_c_str()); assert_eq!(format!("{:?}", x), "\"swordfish\""); let y: Weak = Arc::downgrade(&x); drop(x); // At this point, the weak points to a dropped DST assert!(y.upgrade().is_none()); // But we still need to be able to get the alloc layout to drop. // CStr has no drop glue, but custom DSTs might, and need to work. drop(y); } #[test] fn test_from_owned() { let foo = 123; let foo_arc = Arc::from(foo); assert!(123 == *foo_arc); } #[test] fn test_new_weak() { let foo: Weak = Weak::new(); assert!(foo.upgrade().is_none()); } #[test] fn test_ptr_eq() { let five = Arc::new(5); let same_five = five.clone(); let other_five = Arc::new(5); assert!(Arc::ptr_eq(&five, &same_five)); assert!(!Arc::ptr_eq(&five, &other_five)); } #[test] #[cfg_attr(target_os = "emscripten", ignore)] fn test_weak_count_locked() { let mut a = Arc::new(atomic::AtomicBool::new(false)); let a2 = a.clone(); let t = thread::spawn(move || { // Miri is too slow let count = if cfg!(miri) { 1000 } else { 1000000 }; for _i in 0..count { Arc::get_mut(&mut a); } a.store(true, SeqCst); }); while !a2.load(SeqCst) { let n = Arc::weak_count(&a2); assert!(n < 2, "bad weak count: {}", n); #[cfg(miri)] // Miri's scheduler does not guarantee liveness, and thus needs this hint. std::hint::spin_loop(); } t.join().unwrap(); } #[test] fn test_from_str() { let r: Arc = Arc::from("foo"); assert_eq!(&r[..], "foo"); } #[test] fn test_copy_from_slice() { let s: &[u32] = &[1, 2, 3]; let r: Arc<[u32]> = Arc::from(s); assert_eq!(&r[..], [1, 2, 3]); } #[test] fn test_clone_from_slice() { #[derive(Clone, Debug, Eq, PartialEq)] struct X(u32); let s: &[X] = &[X(1), X(2), X(3)]; let r: Arc<[X]> = Arc::from(s); assert_eq!(&r[..], s); } #[test] #[should_panic] fn test_clone_from_slice_panic() { use std::string::{String, ToString}; struct Fail(u32, String); impl Clone for Fail { fn clone(&self) -> Fail { if self.0 == 2 { panic!(); } Fail(self.0, self.1.clone()) } } let s: &[Fail] = &[Fail(0, "foo".to_string()), Fail(1, "bar".to_string()), Fail(2, "baz".to_string())]; // Should panic, but not cause memory corruption let _r: Arc<[Fail]> = Arc::from(s); } #[test] fn test_from_box() { let b: Box = box 123; let r: Arc = Arc::from(b); assert_eq!(*r, 123); } #[test] fn test_from_box_str() { use std::string::String; let s = String::from("foo").into_boxed_str(); let r: Arc = Arc::from(s); assert_eq!(&r[..], "foo"); } #[test] fn test_from_box_slice() { let s = vec![1, 2, 3].into_boxed_slice(); let r: Arc<[u32]> = Arc::from(s); assert_eq!(&r[..], [1, 2, 3]); } #[test] fn test_from_box_trait() { use std::fmt::Display; use std::string::ToString; let b: Box = box 123; let r: Arc = Arc::from(b); assert_eq!(r.to_string(), "123"); } #[test] fn test_from_box_trait_zero_sized() { use std::fmt::Debug; let b: Box = box (); let r: Arc = Arc::from(b); assert_eq!(format!("{:?}", r), "()"); } #[test] fn test_from_vec() { let v = vec![1, 2, 3]; let r: Arc<[u32]> = Arc::from(v); assert_eq!(&r[..], [1, 2, 3]); } #[test] fn test_downcast() { use std::any::Any; let r1: Arc = Arc::new(i32::MAX); let r2: Arc = Arc::new("abc"); assert!(r1.clone().downcast::().is_err()); let r1i32 = r1.downcast::(); assert!(r1i32.is_ok()); assert_eq!(r1i32.unwrap(), Arc::new(i32::MAX)); assert!(r2.clone().downcast::().is_err()); let r2str = r2.downcast::<&'static str>(); assert!(r2str.is_ok()); assert_eq!(r2str.unwrap(), Arc::new("abc")); } #[test] fn test_array_from_slice() { let v = vec![1, 2, 3]; let r: Arc<[u32]> = Arc::from(v); let a: Result, _> = r.clone().try_into(); assert!(a.is_ok()); let a: Result, _> = r.clone().try_into(); assert!(a.is_err()); } #[test] fn test_arc_cyclic_with_zero_refs() { struct ZeroRefs { inner: Weak, } let zero_refs = Arc::new_cyclic(|inner| { assert_eq!(inner.strong_count(), 0); assert!(inner.upgrade().is_none()); ZeroRefs { inner: Weak::new() } }); assert_eq!(Arc::strong_count(&zero_refs), 1); assert_eq!(Arc::weak_count(&zero_refs), 0); assert_eq!(zero_refs.inner.strong_count(), 0); assert_eq!(zero_refs.inner.weak_count(), 0); } #[test] fn test_arc_new_cyclic_one_ref() { struct OneRef { inner: Weak, } let one_ref = Arc::new_cyclic(|inner| { assert_eq!(inner.strong_count(), 0); assert!(inner.upgrade().is_none()); OneRef { inner: inner.clone() } }); assert_eq!(Arc::strong_count(&one_ref), 1); assert_eq!(Arc::weak_count(&one_ref), 1); let one_ref2 = Weak::upgrade(&one_ref.inner).unwrap(); assert!(Arc::ptr_eq(&one_ref, &one_ref2)); assert_eq!(Arc::strong_count(&one_ref), 2); assert_eq!(Arc::weak_count(&one_ref), 1); } #[test] fn test_arc_cyclic_two_refs() { struct TwoRefs { inner1: Weak, inner2: Weak, } let two_refs = Arc::new_cyclic(|inner| { assert_eq!(inner.strong_count(), 0); assert!(inner.upgrade().is_none()); let inner1 = inner.clone(); let inner2 = inner1.clone(); TwoRefs { inner1, inner2 } }); assert_eq!(Arc::strong_count(&two_refs), 1); assert_eq!(Arc::weak_count(&two_refs), 2); let two_refs1 = Weak::upgrade(&two_refs.inner1).unwrap(); assert!(Arc::ptr_eq(&two_refs, &two_refs1)); let two_refs2 = Weak::upgrade(&two_refs.inner2).unwrap(); assert!(Arc::ptr_eq(&two_refs, &two_refs2)); assert_eq!(Arc::strong_count(&two_refs), 3); assert_eq!(Arc::weak_count(&two_refs), 2); } //! A UTF-8–encoded, growable string. //! //! This module contains the [`String`] type, the [`ToString`] trait for //! converting to strings, and several error types that may result from //! working with [`String`]s. //! //! # Examples //! //! There are multiple ways to create a new [`String`] from a string literal: //! //! ``` //! let s = "Hello".to_string(); //! //! let s = String::from("world"); //! let s: String = "also this".into(); //! ``` //! //! You can create a new [`String`] from an existing one by concatenating with //! `+`: //! //! ``` //! let s = "Hello".to_string(); //! //! let message = s + " world!"; //! ``` //! //! If you have a vector of valid UTF-8 bytes, you can make a [`String`] out of //! it. You can do the reverse too. //! //! ``` //! let sparkle_heart = vec![240, 159, 146, 150]; //! //! // We know these bytes are valid, so we'll use `unwrap()`. //! let sparkle_heart = String::from_utf8(sparkle_heart).unwrap(); //! //! assert_eq!("💖", sparkle_heart); //! //! let bytes = sparkle_heart.into_bytes(); //! //! assert_eq!(bytes, [240, 159, 146, 150]); //! ``` #![stable(feature = "rust1", since = "1.0.0")] use core::char::{decode_utf16, REPLACEMENT_CHARACTER}; use core::fmt; use core::hash; use core::iter::{FromIterator, FusedIterator}; use core::ops::Bound::{Excluded, Included, Unbounded}; use core::ops::{self, Add, AddAssign, Index, IndexMut, Range, RangeBounds}; use core::ptr; use core::slice; use core::str::{lossy, pattern::Pattern}; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; use crate::collections::TryReserveError; use crate::str::{self, from_boxed_utf8_unchecked, Chars, FromStr, Utf8Error}; use crate::vec::Vec; /// A UTF-8–encoded, growable string. /// /// The `String` type is the most common string type that has ownership over the /// contents of the string. It has a close relationship with its borrowed /// counterpart, the primitive [`str`]. /// /// # Examples /// /// You can create a `String` from [a literal string][`str`] with [`String::from`]: /// /// [`String::from`]: From::from /// /// ``` /// let hello = String::from("Hello, world!"); /// ``` /// /// You can append a [`char`] to a `String` with the [`push`] method, and /// append a [`&str`] with the [`push_str`] method: /// /// ``` /// let mut hello = String::from("Hello, "); /// /// hello.push('w'); /// hello.push_str("orld!"); /// ``` /// /// [`push`]: String::push /// [`push_str`]: String::push_str /// /// If you have a vector of UTF-8 bytes, you can create a `String` from it with /// the [`from_utf8`] method: /// /// ``` /// // some bytes, in a vector /// let sparkle_heart = vec![240, 159, 146, 150]; /// /// // We know these bytes are valid, so we'll use `unwrap()`. /// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap(); /// /// assert_eq!("💖", sparkle_heart); /// ``` /// /// [`from_utf8`]: String::from_utf8 /// /// # UTF-8 /// /// `String`s are always valid UTF-8. This has a few implications, the first of /// which is that if you need a non-UTF-8 string, consider [`OsString`]. It is /// similar, but without the UTF-8 constraint. The second implication is that /// you cannot index into a `String`: /// /// ```compile_fail,E0277 /// let s = "hello"; /// /// println!("The first letter of s is {}", s[0]); // ERROR!!! /// ``` /// /// [`OsString`]: ../../std/ffi/struct.OsString.html /// /// Indexing is intended to be a constant-time operation, but UTF-8 encoding /// does not allow us to do this. Furthermore, it's not clear what sort of /// thing the index should return: a byte, a codepoint, or a grapheme cluster. /// The [`bytes`] and [`chars`] methods return iterators over the first /// two, respectively. /// /// [`bytes`]: str::bytes /// [`chars`]: str::chars /// /// # Deref /// /// `String`s implement [`Deref`]``, and so inherit all of [`str`]'s /// methods. In addition, this means that you can pass a `String` to a /// function which takes a [`&str`] by using an ampersand (`&`): /// /// ``` /// fn takes_str(s: &str) { } /// /// let s = String::from("Hello"); /// /// takes_str(&s); /// ``` /// /// This will create a [`&str`] from the `String` and pass it in. This /// conversion is very inexpensive, and so generally, functions will accept /// [`&str`]s as arguments unless they need a `String` for some specific /// reason. /// /// In certain cases Rust doesn't have enough information to make this /// conversion, known as [`Deref`] coercion. In the following example a string /// slice [`&'a str`][`&str`] implements the trait `TraitExample`, and the function /// `example_func` takes anything that implements the trait. In this case Rust /// would need to make two implicit conversions, which Rust doesn't have the /// means to do. For that reason, the following example will not compile. /// /// ```compile_fail,E0277 /// trait TraitExample {} /// /// impl<'a> TraitExample for &'a str {} /// /// fn example_func(example_arg: A) {} /// /// let example_string = String::from("example_string"); /// example_func(&example_string); /// ``` /// /// There are two options that would work instead. The first would be to /// change the line `example_func(&example_string);` to /// `example_func(example_string.as_str());`, using the method [`as_str()`] /// to explicitly extract the string slice containing the string. The second /// way changes `example_func(&example_string);` to /// `example_func(&*example_string);`. In this case we are dereferencing a /// `String` to a [`str`][`&str`], then referencing the [`str`][`&str`] back to /// [`&str`]. The second way is more idiomatic, however both work to do the /// conversion explicitly rather than relying on the implicit conversion. /// /// # Representation /// /// A `String` is made up of three components: a pointer to some bytes, a /// length, and a capacity. The pointer points to an internal buffer `String` /// uses to store its data. The length is the number of bytes currently stored /// in the buffer, and the capacity is the size of the buffer in bytes. As such, /// the length will always be less than or equal to the capacity. /// /// This buffer is always stored on the heap. /// /// You can look at these with the [`as_ptr`], [`len`], and [`capacity`] /// methods: /// /// ``` /// use std::mem; /// /// let story = String::from("Once upon a time..."); /// // FIXME Update this when vec_into_raw_parts is stabilized /// // Prevent automatically dropping the String's data /// let mut story = mem::ManuallyDrop::new(story); /// /// let ptr = story.as_mut_ptr(); /// let len = story.len(); /// let capacity = story.capacity(); /// /// // story has nineteen bytes /// assert_eq!(19, len); /// /// // We can re-build a String out of ptr, len, and capacity. This is all /// // unsafe because we are responsible for making sure the components are /// // valid: /// let s = unsafe { String::from_raw_parts(ptr, len, capacity) } ; /// /// assert_eq!(String::from("Once upon a time..."), s); /// ``` /// /// [`as_ptr`]: str::as_ptr /// [`len`]: String::len /// [`capacity`]: String::capacity /// /// If a `String` has enough capacity, adding elements to it will not /// re-allocate. For example, consider this program: /// /// ``` /// let mut s = String::new(); /// /// println!("{}", s.capacity()); /// /// for _ in 0..5 { /// s.push_str("hello"); /// println!("{}", s.capacity()); /// } /// ``` /// /// This will output the following: /// /// ```text /// 0 /// 5 /// 10 /// 20 /// 20 /// 40 /// ``` /// /// At first, we have no memory allocated at all, but as we append to the /// string, it increases its capacity appropriately. If we instead use the /// [`with_capacity`] method to allocate the correct capacity initially: /// /// ``` /// let mut s = String::with_capacity(25); /// /// println!("{}", s.capacity()); /// /// for _ in 0..5 { /// s.push_str("hello"); /// println!("{}", s.capacity()); /// } /// ``` /// /// [`with_capacity`]: String::with_capacity /// /// We end up with a different output: /// /// ```text /// 25 /// 25 /// 25 /// 25 /// 25 /// 25 /// ``` /// /// Here, there's no need to allocate more memory inside the loop. /// /// [`str`]: prim@str /// [`&str`]: prim@str /// [`Deref`]: core::ops::Deref /// [`as_str()`]: String::as_str #[derive(PartialOrd, Eq, Ord)] #[cfg_attr(not(test), rustc_diagnostic_item = "string_type")] #[stable(feature = "rust1", since = "1.0.0")] pub struct String { vec: Vec, } /// A possible error value when converting a `String` from a UTF-8 byte vector. /// /// This type is the error type for the [`from_utf8`] method on [`String`]. It /// is designed in such a way to carefully avoid reallocations: the /// [`into_bytes`] method will give back the byte vector that was used in the /// conversion attempt. /// /// [`from_utf8`]: String::from_utf8 /// [`into_bytes`]: FromUtf8Error::into_bytes /// /// The [`Utf8Error`] type provided by [`std::str`] represents an error that may /// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's /// an analogue to `FromUtf8Error`, and you can get one from a `FromUtf8Error` /// through the [`utf8_error`] method. /// /// [`Utf8Error`]: core::str::Utf8Error /// [`std::str`]: core::str /// [`&str`]: prim@str /// [`utf8_error`]: Self::utf8_error /// /// # Examples /// /// Basic usage: /// /// ``` /// // some invalid bytes, in a vector /// let bytes = vec![0, 159]; /// /// let value = String::from_utf8(bytes); /// /// assert!(value.is_err()); /// assert_eq!(vec![0, 159], value.unwrap_err().into_bytes()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug, Clone, PartialEq, Eq)] pub struct FromUtf8Error { bytes: Vec, error: Utf8Error, } /// A possible error value when converting a `String` from a UTF-16 byte slice. /// /// This type is the error type for the [`from_utf16`] method on [`String`]. /// /// [`from_utf16`]: String::from_utf16 /// # Examples /// /// Basic usage: /// /// ``` /// // 𝄞muic /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, /// 0xD800, 0x0069, 0x0063]; /// /// assert!(String::from_utf16(v).is_err()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] pub struct FromUtf16Error(()); impl String { /// Creates a new empty `String`. /// /// Given that the `String` is empty, this will not allocate any initial /// buffer. While that means that this initial operation is very /// inexpensive, it may cause excessive allocation later when you add /// data. If you have an idea of how much data the `String` will hold, /// consider the [`with_capacity`] method to prevent excessive /// re-allocation. /// /// [`with_capacity`]: String::with_capacity /// /// # Examples /// /// Basic usage: /// /// ``` /// let s = String::new(); /// ``` #[inline] #[rustc_const_stable(feature = "const_string_new", since = "1.39.0")] #[stable(feature = "rust1", since = "1.0.0")] pub const fn new() -> String { String { vec: Vec::new() } } /// Creates a new empty `String` with a particular capacity. /// /// `String`s have an internal buffer to hold their data. The capacity is /// the length of that buffer, and can be queried with the [`capacity`] /// method. This method creates an empty `String`, but one with an initial /// buffer that can hold `capacity` bytes. This is useful when you may be /// appending a bunch of data to the `String`, reducing the number of /// reallocations it needs to do. /// /// [`capacity`]: String::capacity /// /// If the given capacity is `0`, no allocation will occur, and this method /// is identical to the [`new`] method. /// /// [`new`]: String::new /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::with_capacity(10); /// /// // The String contains no chars, even though it has capacity for more /// assert_eq!(s.len(), 0); /// /// // These are all done without reallocating... /// let cap = s.capacity(); /// for _ in 0..10 { /// s.push('a'); /// } /// /// assert_eq!(s.capacity(), cap); /// /// // ...but this may make the string reallocate /// s.push('a'); /// ``` #[inline] #[doc(alias = "alloc")] #[doc(alias = "malloc")] #[stable(feature = "rust1", since = "1.0.0")] pub fn with_capacity(capacity: usize) -> String { String { vec: Vec::with_capacity(capacity) } } // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is // required for this method definition, is not available. Since we don't // require this method for testing purposes, I'll just stub it // NB see the slice::hack module in slice.rs for more information #[inline] #[cfg(test)] pub fn from_str(_: &str) -> String { panic!("not available with cfg(test)"); } /// Converts a vector of bytes to a `String`. /// /// A string ([`String`]) is made of bytes ([`u8`]), and a vector of bytes /// ([`Vec`]) is made of bytes, so this function converts between the /// two. Not all byte slices are valid `String`s, however: `String` /// requires that it is valid UTF-8. `from_utf8()` checks to ensure that /// the bytes are valid UTF-8, and then does the conversion. /// /// If you are sure that the byte slice is valid UTF-8, and you don't want /// to incur the overhead of the validity check, there is an unsafe version /// of this function, [`from_utf8_unchecked`], which has the same behavior /// but skips the check. /// /// This method will take care to not copy the vector, for efficiency's /// sake. /// /// If you need a [`&str`] instead of a `String`, consider /// [`str::from_utf8`]. /// /// The inverse of this method is [`into_bytes`]. /// /// # Errors /// /// Returns [`Err`] if the slice is not UTF-8 with a description as to why the /// provided bytes are not UTF-8. The vector you moved in is also included. /// /// # Examples /// /// Basic usage: /// /// ``` /// // some bytes, in a vector /// let sparkle_heart = vec![240, 159, 146, 150]; /// /// // We know these bytes are valid, so we'll use `unwrap()`. /// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap(); /// /// assert_eq!("💖", sparkle_heart); /// ``` /// /// Incorrect bytes: /// /// ``` /// // some invalid bytes, in a vector /// let sparkle_heart = vec![0, 159, 146, 150]; /// /// assert!(String::from_utf8(sparkle_heart).is_err()); /// ``` /// /// See the docs for [`FromUtf8Error`] for more details on what you can do /// with this error. /// /// [`from_utf8_unchecked`]: String::from_utf8_unchecked /// [`Vec`]: crate::vec::Vec /// [`&str`]: prim@str /// [`into_bytes`]: String::into_bytes #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn from_utf8(vec: Vec) -> Result { match str::from_utf8(&vec) { Ok(..) => Ok(String { vec }), Err(e) => Err(FromUtf8Error { bytes: vec, error: e }), } } /// Converts a slice of bytes to a string, including invalid characters. /// /// Strings are made of bytes ([`u8`]), and a slice of bytes /// ([`&[u8]`][byteslice]) is made of bytes, so this function converts /// between the two. Not all byte slices are valid strings, however: strings /// are required to be valid UTF-8. During this conversion, /// `from_utf8_lossy()` will replace any invalid UTF-8 sequences with /// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD], which looks like this: � /// /// [byteslice]: prim@slice /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER /// /// If you are sure that the byte slice is valid UTF-8, and you don't want /// to incur the overhead of the conversion, there is an unsafe version /// of this function, [`from_utf8_unchecked`], which has the same behavior /// but skips the checks. /// /// [`from_utf8_unchecked`]: String::from_utf8_unchecked /// /// This function returns a [`Cow<'a, str>`]. If our byte slice is invalid /// UTF-8, then we need to insert the replacement characters, which will /// change the size of the string, and hence, require a `String`. But if /// it's already valid UTF-8, we don't need a new allocation. This return /// type allows us to handle both cases. /// /// [`Cow<'a, str>`]: crate::borrow::Cow /// /// # Examples /// /// Basic usage: /// /// ``` /// // some bytes, in a vector /// let sparkle_heart = vec![240, 159, 146, 150]; /// /// let sparkle_heart = String::from_utf8_lossy(&sparkle_heart); /// /// assert_eq!("💖", sparkle_heart); /// ``` /// /// Incorrect bytes: /// /// ``` /// // some invalid bytes /// let input = b"Hello \xF0\x90\x80World"; /// let output = String::from_utf8_lossy(input); /// /// assert_eq!("Hello �World", output); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn from_utf8_lossy(v: &[u8]) -> Cow<'_, str> { let mut iter = lossy::Utf8Lossy::from_bytes(v).chunks(); let (first_valid, first_broken) = if let Some(chunk) = iter.next() { let lossy::Utf8LossyChunk { valid, broken } = chunk; if valid.len() == v.len() { debug_assert!(broken.is_empty()); return Cow::Borrowed(valid); } (valid, broken) } else { return Cow::Borrowed(""); }; const REPLACEMENT: &str = "\u{FFFD}"; let mut res = String::with_capacity(v.len()); res.push_str(first_valid); if !first_broken.is_empty() { res.push_str(REPLACEMENT); } for lossy::Utf8LossyChunk { valid, broken } in iter { res.push_str(valid); if !broken.is_empty() { res.push_str(REPLACEMENT); } } Cow::Owned(res) } /// Decode a UTF-16–encoded vector `v` into a `String`, returning [`Err`] /// if `v` contains any invalid data. /// /// # Examples /// /// Basic usage: /// /// ``` /// // 𝄞music /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, /// 0x0073, 0x0069, 0x0063]; /// assert_eq!(String::from("𝄞music"), /// String::from_utf16(v).unwrap()); /// /// // 𝄞muic /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, /// 0xD800, 0x0069, 0x0063]; /// assert!(String::from_utf16(v).is_err()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn from_utf16(v: &[u16]) -> Result { // This isn't done via collect::>() for performance reasons. // FIXME: the function can be simplified again when #48994 is closed. let mut ret = String::with_capacity(v.len()); for c in decode_utf16(v.iter().cloned()) { if let Ok(c) = c { ret.push(c); } else { return Err(FromUtf16Error(())); } } Ok(ret) } /// Decode a UTF-16–encoded slice `v` into a `String`, replacing /// invalid data with [the replacement character (`U+FFFD`)][U+FFFD]. /// /// Unlike [`from_utf8_lossy`] which returns a [`Cow<'a, str>`], /// `from_utf16_lossy` returns a `String` since the UTF-16 to UTF-8 /// conversion requires a memory allocation. /// /// [`from_utf8_lossy`]: String::from_utf8_lossy /// [`Cow<'a, str>`]: crate::borrow::Cow /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER /// /// # Examples /// /// Basic usage: /// /// ``` /// // 𝄞music /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, /// 0x0073, 0xDD1E, 0x0069, 0x0063, /// 0xD834]; /// /// assert_eq!(String::from("𝄞mus\u{FFFD}ic\u{FFFD}"), /// String::from_utf16_lossy(v)); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn from_utf16_lossy(v: &[u16]) -> String { decode_utf16(v.iter().cloned()).map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)).collect() } /// Decomposes a `String` into its raw components. /// /// Returns the raw pointer to the underlying data, the length of /// the string (in bytes), and the allocated capacity of the data /// (in bytes). These are the same arguments in the same order as /// the arguments to [`from_raw_parts`]. /// /// After calling this function, the caller is responsible for the /// memory previously managed by the `String`. The only way to do /// this is to convert the raw pointer, length, and capacity back /// into a `String` with the [`from_raw_parts`] function, allowing /// the destructor to perform the cleanup. /// /// [`from_raw_parts`]: String::from_raw_parts /// /// # Examples /// /// ``` /// #![feature(vec_into_raw_parts)] /// let s = String::from("hello"); /// /// let (ptr, len, cap) = s.into_raw_parts(); /// /// let rebuilt = unsafe { String::from_raw_parts(ptr, len, cap) }; /// assert_eq!(rebuilt, "hello"); /// ``` #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] pub fn into_raw_parts(self) -> (*mut u8, usize, usize) { self.vec.into_raw_parts() } /// Creates a new `String` from a length, capacity, and pointer. /// /// # Safety /// /// This is highly unsafe, due to the number of invariants that aren't /// checked: /// /// * The memory at `buf` needs to have been previously allocated by the /// same allocator the standard library uses, with a required alignment of exactly 1. /// * `length` needs to be less than or equal to `capacity`. /// * `capacity` needs to be the correct value. /// * The first `length` bytes at `buf` need to be valid UTF-8. /// /// Violating these may cause problems like corrupting the allocator's /// internal data structures. /// /// The ownership of `buf` is effectively transferred to the /// `String` which may then deallocate, reallocate or change the /// contents of memory pointed to by the pointer at will. Ensure /// that nothing else uses the pointer after calling this /// function. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::mem; /// /// unsafe { /// let s = String::from("hello"); /// // FIXME Update this when vec_into_raw_parts is stabilized /// // Prevent automatically dropping the String's data /// let mut s = mem::ManuallyDrop::new(s); /// /// let ptr = s.as_mut_ptr(); /// let len = s.len(); /// let capacity = s.capacity(); /// /// let s = String::from_raw_parts(ptr, len, capacity); /// /// assert_eq!(String::from("hello"), s); /// } /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String { unsafe { String { vec: Vec::from_raw_parts(buf, length, capacity) } } } /// Converts a vector of bytes to a `String` without checking that the /// string contains valid UTF-8. /// /// See the safe version, [`from_utf8`], for more details. /// /// [`from_utf8`]: String::from_utf8 /// /// # Safety /// /// This function is unsafe because it does not check that the bytes passed /// to it are valid UTF-8. If this constraint is violated, it may cause /// memory unsafety issues with future users of the `String`, as the rest of /// the standard library assumes that `String`s are valid UTF-8. /// /// # Examples /// /// Basic usage: /// /// ``` /// // some bytes, in a vector /// let sparkle_heart = vec![240, 159, 146, 150]; /// /// let sparkle_heart = unsafe { /// String::from_utf8_unchecked(sparkle_heart) /// }; /// /// assert_eq!("💖", sparkle_heart); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_utf8_unchecked(bytes: Vec) -> String { String { vec: bytes } } /// Converts a `String` into a byte vector. /// /// This consumes the `String`, so we do not need to copy its contents. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s = String::from("hello"); /// let bytes = s.into_bytes(); /// /// assert_eq!(&[104, 101, 108, 108, 111][..], &bytes[..]); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn into_bytes(self) -> Vec { self.vec } /// Extracts a string slice containing the entire `String`. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s = String::from("foo"); /// /// assert_eq!("foo", s.as_str()); /// ``` #[inline] #[stable(feature = "string_as_str", since = "1.7.0")] pub fn as_str(&self) -> &str { self } /// Converts a `String` into a mutable string slice. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("foobar"); /// let s_mut_str = s.as_mut_str(); /// /// s_mut_str.make_ascii_uppercase(); /// /// assert_eq!("FOOBAR", s_mut_str); /// ``` #[inline] #[stable(feature = "string_as_str", since = "1.7.0")] pub fn as_mut_str(&mut self) -> &mut str { self } /// Appends a given string slice onto the end of this `String`. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("foo"); /// /// s.push_str("bar"); /// /// assert_eq!("foobar", s); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn push_str(&mut self, string: &str) { self.vec.extend_from_slice(string.as_bytes()) } /// Returns this `String`'s capacity, in bytes. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s = String::with_capacity(10); /// /// assert!(s.capacity() >= 10); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn capacity(&self) -> usize { self.vec.capacity() } /// Ensures that this `String`'s capacity is at least `additional` bytes /// larger than its length. /// /// The capacity may be increased by more than `additional` bytes if it /// chooses, to prevent frequent reallocations. /// /// If you do not want this "at least" behavior, see the [`reserve_exact`] /// method. /// /// # Panics /// /// Panics if the new capacity overflows [`usize`]. /// /// [`reserve_exact`]: String::reserve_exact /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::new(); /// /// s.reserve(10); /// /// assert!(s.capacity() >= 10); /// ``` /// /// This may not actually increase the capacity: /// /// ``` /// let mut s = String::with_capacity(10); /// s.push('a'); /// s.push('b'); /// /// // s now has a length of 2 and a capacity of 10 /// assert_eq!(2, s.len()); /// assert_eq!(10, s.capacity()); /// /// // Since we already have an extra 8 capacity, calling this... /// s.reserve(8); /// /// // ... doesn't actually increase. /// assert_eq!(10, s.capacity()); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { self.vec.reserve(additional) } /// Ensures that this `String`'s capacity is `additional` bytes /// larger than its length. /// /// Consider using the [`reserve`] method unless you absolutely know /// better than the allocator. /// /// [`reserve`]: String::reserve /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::new(); /// /// s.reserve_exact(10); /// /// assert!(s.capacity() >= 10); /// ``` /// /// This may not actually increase the capacity: /// /// ``` /// let mut s = String::with_capacity(10); /// s.push('a'); /// s.push('b'); /// /// // s now has a length of 2 and a capacity of 10 /// assert_eq!(2, s.len()); /// assert_eq!(10, s.capacity()); /// /// // Since we already have an extra 8 capacity, calling this... /// s.reserve_exact(8); /// /// // ... doesn't actually increase. /// assert_eq!(10, s.capacity()); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve_exact(&mut self, additional: usize) { self.vec.reserve_exact(additional) } /// Tries to reserve capacity for at least `additional` more elements to be inserted /// in the given `String`. The collection may reserve more space to avoid /// frequent reallocations. After calling `reserve`, capacity will be /// greater than or equal to `self.len() + additional`. Does nothing if /// capacity is already sufficient. /// /// # Errors /// /// If the capacity overflows, or the allocator reports a failure, then an error /// is returned. /// /// # Examples /// /// ``` /// #![feature(try_reserve)] /// use std::collections::TryReserveError; /// /// fn process_data(data: &str) -> Result { /// let mut output = String::new(); /// /// // Pre-reserve the memory, exiting if we can't /// output.try_reserve(data.len())?; /// /// // Now we know this can't OOM in the middle of our complex work /// output.push_str(data); /// /// Ok(output) /// } /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { self.vec.try_reserve(additional) } /// Tries to reserve the minimum capacity for exactly `additional` more elements to /// be inserted in the given `String`. After calling `reserve_exact`, /// capacity will be greater than or equal to `self.len() + additional`. /// Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it /// requests. Therefore, capacity can not be relied upon to be precisely /// minimal. Prefer `reserve` if future insertions are expected. /// /// # Errors /// /// If the capacity overflows, or the allocator reports a failure, then an error /// is returned. /// /// # Examples /// /// ``` /// #![feature(try_reserve)] /// use std::collections::TryReserveError; /// /// fn process_data(data: &str) -> Result { /// let mut output = String::new(); /// /// // Pre-reserve the memory, exiting if we can't /// output.try_reserve(data.len())?; /// /// // Now we know this can't OOM in the middle of our complex work /// output.push_str(data); /// /// Ok(output) /// } /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> { self.vec.try_reserve_exact(additional) } /// Shrinks the capacity of this `String` to match its length. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("foo"); /// /// s.reserve(100); /// assert!(s.capacity() >= 100); /// /// s.shrink_to_fit(); /// assert_eq!(3, s.capacity()); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn shrink_to_fit(&mut self) { self.vec.shrink_to_fit() } /// Shrinks the capacity of this `String` with a lower bound. /// /// The capacity will remain at least as large as both the length /// and the supplied value. /// /// If the current capacity is less than the lower limit, this is a no-op. /// /// # Examples /// /// ``` /// #![feature(shrink_to)] /// let mut s = String::from("foo"); /// /// s.reserve(100); /// assert!(s.capacity() >= 100); /// /// s.shrink_to(10); /// assert!(s.capacity() >= 10); /// s.shrink_to(0); /// assert!(s.capacity() >= 3); /// ``` #[inline] #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")] pub fn shrink_to(&mut self, min_capacity: usize) { self.vec.shrink_to(min_capacity) } /// Appends the given [`char`] to the end of this `String`. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("abc"); /// /// s.push('1'); /// s.push('2'); /// s.push('3'); /// /// assert_eq!("abc123", s); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn push(&mut self, ch: char) { match ch.len_utf8() { 1 => self.vec.push(ch as u8), _ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()), } } /// Returns a byte slice of this `String`'s contents. /// /// The inverse of this method is [`from_utf8`]. /// /// [`from_utf8`]: String::from_utf8 /// /// # Examples /// /// Basic usage: /// /// ``` /// let s = String::from("hello"); /// /// assert_eq!(&[104, 101, 108, 108, 111], s.as_bytes()); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn as_bytes(&self) -> &[u8] { &self.vec } /// Shortens this `String` to the specified length. /// /// If `new_len` is greater than the string's current length, this has no /// effect. /// /// Note that this method has no effect on the allocated capacity /// of the string /// /// # Panics /// /// Panics if `new_len` does not lie on a [`char`] boundary. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("hello"); /// /// s.truncate(2); /// /// assert_eq!("he", s); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn truncate(&mut self, new_len: usize) { if new_len <= self.len() { assert!(self.is_char_boundary(new_len)); self.vec.truncate(new_len) } } /// Removes the last character from the string buffer and returns it. /// /// Returns [`None`] if this `String` is empty. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("foo"); /// /// assert_eq!(s.pop(), Some('o')); /// assert_eq!(s.pop(), Some('o')); /// assert_eq!(s.pop(), Some('f')); /// /// assert_eq!(s.pop(), None); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn pop(&mut self) -> Option { let ch = self.chars().rev().next()?; let newlen = self.len() - ch.len_utf8(); unsafe { self.vec.set_len(newlen); } Some(ch) } /// Removes a [`char`] from this `String` at a byte position and returns it. /// /// This is an *O*(*n*) operation, as it requires copying every element in the /// buffer. /// /// # Panics /// /// Panics if `idx` is larger than or equal to the `String`'s length, /// or if it does not lie on a [`char`] boundary. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("foo"); /// /// assert_eq!(s.remove(0), 'f'); /// assert_eq!(s.remove(1), 'o'); /// assert_eq!(s.remove(0), 'o'); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(&mut self, idx: usize) -> char { let ch = match self[idx..].chars().next() { Some(ch) => ch, None => panic!("cannot remove a char from the end of a string"), }; let next = idx + ch.len_utf8(); let len = self.len(); unsafe { ptr::copy(self.vec.as_ptr().add(next), self.vec.as_mut_ptr().add(idx), len - next); self.vec.set_len(len - (next - idx)); } ch } /// Remove all matches of pattern `pat` in the `String`. /// /// # Examples /// /// ``` /// #![feature(string_remove_matches)] /// let mut s = String::from("Trees are not green, the sky is not blue."); /// s.remove_matches("not "); /// assert_eq!("Trees are green, the sky is blue.", s); /// ``` /// /// Matches will be detected and removed iteratively, so in cases where /// patterns overlap, only the first pattern will be removed: /// /// ``` /// #![feature(string_remove_matches)] /// let mut s = String::from("banana"); /// s.remove_matches("ana"); /// assert_eq!("bna", s); /// ``` #[unstable(feature = "string_remove_matches", reason = "new API", issue = "72826")] pub fn remove_matches<'a, P>(&'a mut self, pat: P) where P: for<'x> Pattern<'x>, { use core::str::pattern::Searcher; let matches = { let mut searcher = pat.into_searcher(self); let mut matches = Vec::new(); while let Some(m) = searcher.next_match() { matches.push(m); } matches }; let len = self.len(); let mut shrunk_by = 0; // SAFETY: start and end will be on utf8 byte boundaries per // the Searcher docs unsafe { for (start, end) in matches { ptr::copy( self.vec.as_mut_ptr().add(end - shrunk_by), self.vec.as_mut_ptr().add(start - shrunk_by), len - end, ); shrunk_by += end - start; } self.vec.set_len(len - shrunk_by); } } /// Retains only the characters specified by the predicate. /// /// In other words, remove all characters `c` such that `f(c)` returns `false`. /// This method operates in place, visiting each character exactly once in the /// original order, and preserves the order of the retained characters. /// /// # Examples /// /// ``` /// let mut s = String::from("f_o_ob_ar"); /// /// s.retain(|c| c != '_'); /// /// assert_eq!(s, "foobar"); /// ``` /// /// The exact order may be useful for tracking external state, like an index. /// /// ``` /// let mut s = String::from("abcde"); /// let keep = [false, true, true, false, true]; /// let mut i = 0; /// s.retain(|_| (keep[i], i += 1).0); /// assert_eq!(s, "bce"); /// ``` #[inline] #[stable(feature = "string_retain", since = "1.26.0")] pub fn retain(&mut self, mut f: F) where F: FnMut(char) -> bool, { struct SetLenOnDrop<'a> { s: &'a mut String, idx: usize, del_bytes: usize, } impl<'a> Drop for SetLenOnDrop<'a> { fn drop(&mut self) { let new_len = self.idx - self.del_bytes; debug_assert!(new_len <= self.s.len()); unsafe { self.s.vec.set_len(new_len) }; } } let len = self.len(); let mut guard = SetLenOnDrop { s: self, idx: 0, del_bytes: 0 }; while guard.idx < len { let ch = unsafe { guard.s.get_unchecked(guard.idx..len).chars().next().unwrap() }; let ch_len = ch.len_utf8(); if !f(ch) { guard.del_bytes += ch_len; } else if guard.del_bytes > 0 { unsafe { ptr::copy( guard.s.vec.as_ptr().add(guard.idx), guard.s.vec.as_mut_ptr().add(guard.idx - guard.del_bytes), ch_len, ); } } // Point idx to the next char guard.idx += ch_len; } drop(guard); } /// Inserts a character into this `String` at a byte position. /// /// This is an *O*(*n*) operation as it requires copying every element in the /// buffer. /// /// # Panics /// /// Panics if `idx` is larger than the `String`'s length, or if it does not /// lie on a [`char`] boundary. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::with_capacity(3); /// /// s.insert(0, 'f'); /// s.insert(1, 'o'); /// s.insert(2, 'o'); /// /// assert_eq!("foo", s); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(&mut self, idx: usize, ch: char) { assert!(self.is_char_boundary(idx)); let mut bits = [0; 4]; let bits = ch.encode_utf8(&mut bits).as_bytes(); unsafe { self.insert_bytes(idx, bits); } } unsafe fn insert_bytes(&mut self, idx: usize, bytes: &[u8]) { let len = self.len(); let amt = bytes.len(); self.vec.reserve(amt); unsafe { ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx); ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt); self.vec.set_len(len + amt); } } /// Inserts a string slice into this `String` at a byte position. /// /// This is an *O*(*n*) operation as it requires copying every element in the /// buffer. /// /// # Panics /// /// Panics if `idx` is larger than the `String`'s length, or if it does not /// lie on a [`char`] boundary. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("bar"); /// /// s.insert_str(0, "foo"); /// /// assert_eq!("foobar", s); /// ``` #[inline] #[stable(feature = "insert_str", since = "1.16.0")] pub fn insert_str(&mut self, idx: usize, string: &str) { assert!(self.is_char_boundary(idx)); unsafe { self.insert_bytes(idx, string.as_bytes()); } } /// Returns a mutable reference to the contents of this `String`. /// /// # Safety /// /// This function is unsafe because it does not check that the bytes passed /// to it are valid UTF-8. If this constraint is violated, it may cause /// memory unsafety issues with future users of the `String`, as the rest of /// the standard library assumes that `String`s are valid UTF-8. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("hello"); /// /// unsafe { /// let vec = s.as_mut_vec(); /// assert_eq!(&[104, 101, 108, 108, 111][..], &vec[..]); /// /// vec.reverse(); /// } /// assert_eq!(s, "olleh"); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn as_mut_vec(&mut self) -> &mut Vec { &mut self.vec } /// Returns the length of this `String`, in bytes, not [`char`]s or /// graphemes. In other words, it may not be what a human considers the /// length of the string. /// /// # Examples /// /// Basic usage: /// /// ``` /// let a = String::from("foo"); /// assert_eq!(a.len(), 3); /// /// let fancy_f = String::from("ƒoo"); /// assert_eq!(fancy_f.len(), 4); /// assert_eq!(fancy_f.chars().count(), 3); /// ``` #[doc(alias = "length")] #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn len(&self) -> usize { self.vec.len() } /// Returns `true` if this `String` has a length of zero, and `false` otherwise. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut v = String::new(); /// assert!(v.is_empty()); /// /// v.push('a'); /// assert!(!v.is_empty()); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn is_empty(&self) -> bool { self.len() == 0 } /// Splits the string into two at the given byte index. /// /// Returns a newly allocated `String`. `self` contains bytes `[0, at)`, and /// the returned `String` contains bytes `[at, len)`. `at` must be on the /// boundary of a UTF-8 code point. /// /// Note that the capacity of `self` does not change. /// /// # Panics /// /// Panics if `at` is not on a `UTF-8` code point boundary, or if it is beyond the last /// code point of the string. /// /// # Examples /// /// ``` /// # fn main() { /// let mut hello = String::from("Hello, World!"); /// let world = hello.split_off(7); /// assert_eq!(hello, "Hello, "); /// assert_eq!(world, "World!"); /// # } /// ``` #[inline] #[stable(feature = "string_split_off", since = "1.16.0")] #[must_use = "use `.truncate()` if you don't need the other half"] pub fn split_off(&mut self, at: usize) -> String { assert!(self.is_char_boundary(at)); let other = self.vec.split_off(at); unsafe { String::from_utf8_unchecked(other) } } /// Truncates this `String`, removing all contents. /// /// While this means the `String` will have a length of zero, it does not /// touch its capacity. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("foo"); /// /// s.clear(); /// /// assert!(s.is_empty()); /// assert_eq!(0, s.len()); /// assert_eq!(3, s.capacity()); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn clear(&mut self) { self.vec.clear() } /// Creates a draining iterator that removes the specified range in the `String` /// and yields the removed `chars`. /// /// Note: The element range is removed even if the iterator is not /// consumed until the end. /// /// # Panics /// /// Panics if the starting point or end point do not lie on a [`char`] /// boundary, or if they're out of bounds. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("α is alpha, β is beta"); /// let beta_offset = s.find('β').unwrap_or(s.len()); /// /// // Remove the range up until the β from the string /// let t: String = s.drain(..beta_offset).collect(); /// assert_eq!(t, "α is alpha, "); /// assert_eq!(s, "β is beta"); /// /// // A full range clears the string /// s.drain(..); /// assert_eq!(s, ""); /// ``` #[stable(feature = "drain", since = "1.6.0")] pub fn drain(&mut self, range: R) -> Drain<'_> where R: RangeBounds, { // Memory safety // // The String version of Drain does not have the memory safety issues // of the vector version. The data is just plain bytes. // Because the range removal happens in Drop, if the Drain iterator is leaked, // the removal will not happen. let Range { start, end } = slice::range(range, ..self.len()); assert!(self.is_char_boundary(start)); assert!(self.is_char_boundary(end)); // Take out two simultaneous borrows. The &mut String won't be accessed // until iteration is over, in Drop. let self_ptr = self as *mut _; // SAFETY: `slice::range` and `is_char_boundary` do the appropriate bounds checks. let chars_iter = unsafe { self.get_unchecked(start..end) }.chars(); Drain { start, end, iter: chars_iter, string: self_ptr } } /// Removes the specified range in the string, /// and replaces it with the given string. /// The given string doesn't need to be the same length as the range. /// /// # Panics /// /// Panics if the starting point or end point do not lie on a [`char`] /// boundary, or if they're out of bounds. /// /// # Examples /// /// Basic usage: /// /// ``` /// let mut s = String::from("α is alpha, β is beta"); /// let beta_offset = s.find('β').unwrap_or(s.len()); /// /// // Replace the range up until the β from the string /// s.replace_range(..beta_offset, "Α is capital alpha; "); /// assert_eq!(s, "Α is capital alpha; β is beta"); /// ``` #[stable(feature = "splice", since = "1.27.0")] pub fn replace_range(&mut self, range: R, replace_with: &str) where R: RangeBounds, { // Memory safety // // Replace_range does not have the memory safety issues of a vector Splice. // of the vector version. The data is just plain bytes. // WARNING: Inlining this variable would be unsound (#81138) let start = range.start_bound(); match start { Included(&n) => assert!(self.is_char_boundary(n)), Excluded(&n) => assert!(self.is_char_boundary(n + 1)), Unbounded => {} }; // WARNING: Inlining this variable would be unsound (#81138) let end = range.end_bound(); match end { Included(&n) => assert!(self.is_char_boundary(n + 1)), Excluded(&n) => assert!(self.is_char_boundary(n)), Unbounded => {} }; // Using `range` again would be unsound (#81138) // We assume the bounds reported by `range` remain the same, but // an adversarial implementation could change between calls unsafe { self.as_mut_vec() }.splice((start, end), replace_with.bytes()); } /// Converts this `String` into a [`Box`]`<`[`str`]`>`. /// /// This will drop any excess capacity. /// /// [`str`]: prim@str /// /// # Examples /// /// Basic usage: /// /// ``` /// let s = String::from("hello"); /// /// let b = s.into_boxed_str(); /// ``` #[stable(feature = "box_str", since = "1.4.0")] #[inline] pub fn into_boxed_str(self) -> Box { let slice = self.vec.into_boxed_slice(); unsafe { from_boxed_utf8_unchecked(slice) } } } impl FromUtf8Error { /// Returns a slice of [`u8`]s bytes that were attempted to convert to a `String`. /// /// # Examples /// /// Basic usage: /// /// ``` /// // some invalid bytes, in a vector /// let bytes = vec![0, 159]; /// /// let value = String::from_utf8(bytes); /// /// assert_eq!(&[0, 159], value.unwrap_err().as_bytes()); /// ``` #[stable(feature = "from_utf8_error_as_bytes", since = "1.26.0")] pub fn as_bytes(&self) -> &[u8] { &self.bytes[..] } /// Returns the bytes that were attempted to convert to a `String`. /// /// This method is carefully constructed to avoid allocation. It will /// consume the error, moving out the bytes, so that a copy of the bytes /// does not need to be made. /// /// # Examples /// /// Basic usage: /// /// ``` /// // some invalid bytes, in a vector /// let bytes = vec![0, 159]; /// /// let value = String::from_utf8(bytes); /// /// assert_eq!(vec![0, 159], value.unwrap_err().into_bytes()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn into_bytes(self) -> Vec { self.bytes } /// Fetch a `Utf8Error` to get more details about the conversion failure. /// /// The [`Utf8Error`] type provided by [`std::str`] represents an error that may /// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's /// an analogue to `FromUtf8Error`. See its documentation for more details /// on using it. /// /// [`std::str`]: core::str /// [`&str`]: prim@str /// /// # Examples /// /// Basic usage: /// /// ``` /// // some invalid bytes, in a vector /// let bytes = vec![0, 159]; /// /// let error = String::from_utf8(bytes).unwrap_err().utf8_error(); /// /// // the first byte is invalid here /// assert_eq!(1, error.valid_up_to()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn utf8_error(&self) -> Utf8Error { self.error } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for FromUtf8Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.error, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for FromUtf16Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt("invalid utf-16: lone surrogate found", f) } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for String { fn clone(&self) -> Self { String { vec: self.vec.clone() } } fn clone_from(&mut self, source: &Self) { self.vec.clone_from(&source.vec); } } #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator for String { fn from_iter>(iter: I) -> String { let mut buf = String::new(); buf.extend(iter); buf } } #[stable(feature = "string_from_iter_by_ref", since = "1.17.0")] impl<'a> FromIterator<&'a char> for String { fn from_iter>(iter: I) -> String { let mut buf = String::new(); buf.extend(iter); buf } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a> FromIterator<&'a str> for String { fn from_iter>(iter: I) -> String { let mut buf = String::new(); buf.extend(iter); buf } } #[stable(feature = "extend_string", since = "1.4.0")] impl FromIterator for String { fn from_iter>(iter: I) -> String { let mut iterator = iter.into_iter(); // Because we're iterating over `String`s, we can avoid at least // one allocation by getting the first string from the iterator // and appending to it all the subsequent strings. match iterator.next() { None => String::new(), Some(mut buf) => { buf.extend(iterator); buf } } } } #[stable(feature = "box_str2", since = "1.45.0")] impl FromIterator> for String { fn from_iter>>(iter: I) -> String { let mut buf = String::new(); buf.extend(iter); buf } } #[stable(feature = "herd_cows", since = "1.19.0")] impl<'a> FromIterator> for String { fn from_iter>>(iter: I) -> String { let mut iterator = iter.into_iter(); // Because we're iterating over CoWs, we can (potentially) avoid at least // one allocation by getting the first item and appending to it all the // subsequent items. match iterator.next() { None => String::new(), Some(cow) => { let mut buf = cow.into_owned(); buf.extend(iterator); buf } } } } #[stable(feature = "rust1", since = "1.0.0")] impl Extend for String { fn extend>(&mut self, iter: I) { let iterator = iter.into_iter(); let (lower_bound, _) = iterator.size_hint(); self.reserve(lower_bound); iterator.for_each(move |c| self.push(c)); } #[inline] fn extend_one(&mut self, c: char) { self.push(c); } #[inline] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } } #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a> Extend<&'a char> for String { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } #[inline] fn extend_one(&mut self, &c: &'a char) { self.push(c); } #[inline] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a> Extend<&'a str> for String { fn extend>(&mut self, iter: I) { iter.into_iter().for_each(move |s| self.push_str(s)); } #[inline] fn extend_one(&mut self, s: &'a str) { self.push_str(s); } } #[stable(feature = "box_str2", since = "1.45.0")] impl Extend> for String { fn extend>>(&mut self, iter: I) { iter.into_iter().for_each(move |s| self.push_str(&s)); } } #[stable(feature = "extend_string", since = "1.4.0")] impl Extend for String { fn extend>(&mut self, iter: I) { iter.into_iter().for_each(move |s| self.push_str(&s)); } #[inline] fn extend_one(&mut self, s: String) { self.push_str(&s); } } #[stable(feature = "herd_cows", since = "1.19.0")] impl<'a> Extend> for String { fn extend>>(&mut self, iter: I) { iter.into_iter().for_each(move |s| self.push_str(&s)); } #[inline] fn extend_one(&mut self, s: Cow<'a, str>) { self.push_str(&s); } } /// A convenience impl that delegates to the impl for `&str`. /// /// # Examples /// /// ``` /// assert_eq!(String::from("Hello world").find("world"), Some(6)); /// ``` #[unstable( feature = "pattern", reason = "API not fully fleshed out and ready to be stabilized", issue = "27721" )] impl<'a, 'b> Pattern<'a> for &'b String { type Searcher = <&'b str as Pattern<'a>>::Searcher; fn into_searcher(self, haystack: &'a str) -> <&'b str as Pattern<'a>>::Searcher { self[..].into_searcher(haystack) } #[inline] fn is_contained_in(self, haystack: &'a str) -> bool { self[..].is_contained_in(haystack) } #[inline] fn is_prefix_of(self, haystack: &'a str) -> bool { self[..].is_prefix_of(haystack) } #[inline] fn strip_prefix_of(self, haystack: &'a str) -> Option<&'a str> { self[..].strip_prefix_of(haystack) } #[inline] fn is_suffix_of(self, haystack: &'a str) -> bool { self[..].is_suffix_of(haystack) } #[inline] fn strip_suffix_of(self, haystack: &'a str) -> Option<&'a str> { self[..].strip_suffix_of(haystack) } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for String { #[inline] fn eq(&self, other: &String) -> bool { PartialEq::eq(&self[..], &other[..]) } #[inline] fn ne(&self, other: &String) -> bool { PartialEq::ne(&self[..], &other[..]) } } macro_rules! impl_eq { ($lhs:ty, $rhs: ty) => { #[stable(feature = "rust1", since = "1.0.0")] #[allow(unused_lifetimes)] impl<'a, 'b> PartialEq<$rhs> for $lhs { #[inline] fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&self[..], &other[..]) } #[inline] fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&self[..], &other[..]) } } #[stable(feature = "rust1", since = "1.0.0")] #[allow(unused_lifetimes)] impl<'a, 'b> PartialEq<$lhs> for $rhs { #[inline] fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&self[..], &other[..]) } #[inline] fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&self[..], &other[..]) } } }; } impl_eq! { String, str } impl_eq! { String, &'a str } impl_eq! { Cow<'a, str>, str } impl_eq! { Cow<'a, str>, &'b str } impl_eq! { Cow<'a, str>, String } #[stable(feature = "rust1", since = "1.0.0")] impl Default for String { /// Creates an empty `String`. #[inline] fn default() -> String { String::new() } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for String { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for String { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl hash::Hash for String { #[inline] fn hash(&self, hasher: &mut H) { (**self).hash(hasher) } } /// Implements the `+` operator for concatenating two strings. /// /// This consumes the `String` on the left-hand side and re-uses its buffer (growing it if /// necessary). This is done to avoid allocating a new `String` and copying the entire contents on /// every operation, which would lead to *O*(*n*^2) running time when building an *n*-byte string by /// repeated concatenation. /// /// The string on the right-hand side is only borrowed; its contents are copied into the returned /// `String`. /// /// # Examples /// /// Concatenating two `String`s takes the first by value and borrows the second: /// /// ``` /// let a = String::from("hello"); /// let b = String::from(" world"); /// let c = a + &b; /// // `a` is moved and can no longer be used here. /// ``` /// /// If you want to keep using the first `String`, you can clone it and append to the clone instead: /// /// ``` /// let a = String::from("hello"); /// let b = String::from(" world"); /// let c = a.clone() + &b; /// // `a` is still valid here. /// ``` /// /// Concatenating `&str` slices can be done by converting the first to a `String`: /// /// ``` /// let a = "hello"; /// let b = " world"; /// let c = a.to_string() + b; /// ``` #[stable(feature = "rust1", since = "1.0.0")] impl Add<&str> for String { type Output = String; #[inline] fn add(mut self, other: &str) -> String { self.push_str(other); self } } /// Implements the `+=` operator for appending to a `String`. /// /// This has the same behavior as the [`push_str`][String::push_str] method. #[stable(feature = "stringaddassign", since = "1.12.0")] impl AddAssign<&str> for String { #[inline] fn add_assign(&mut self, other: &str) { self.push_str(other); } } #[stable(feature = "rust1", since = "1.0.0")] impl ops::Index> for String { type Output = str; #[inline] fn index(&self, index: ops::Range) -> &str { &self[..][index] } } #[stable(feature = "rust1", since = "1.0.0")] impl ops::Index> for String { type Output = str; #[inline] fn index(&self, index: ops::RangeTo) -> &str { &self[..][index] } } #[stable(feature = "rust1", since = "1.0.0")] impl ops::Index> for String { type Output = str; #[inline] fn index(&self, index: ops::RangeFrom) -> &str { &self[..][index] } } #[stable(feature = "rust1", since = "1.0.0")] impl ops::Index for String { type Output = str; #[inline] fn index(&self, _index: ops::RangeFull) -> &str { unsafe { str::from_utf8_unchecked(&self.vec) } } } #[stable(feature = "inclusive_range", since = "1.26.0")] impl ops::Index> for String { type Output = str; #[inline] fn index(&self, index: ops::RangeInclusive) -> &str { Index::index(&**self, index) } } #[stable(feature = "inclusive_range", since = "1.26.0")] impl ops::Index> for String { type Output = str; #[inline] fn index(&self, index: ops::RangeToInclusive) -> &str { Index::index(&**self, index) } } #[stable(feature = "derefmut_for_string", since = "1.3.0")] impl ops::IndexMut> for String { #[inline] fn index_mut(&mut self, index: ops::Range) -> &mut str { &mut self[..][index] } } #[stable(feature = "derefmut_for_string", since = "1.3.0")] impl ops::IndexMut> for String { #[inline] fn index_mut(&mut self, index: ops::RangeTo) -> &mut str { &mut self[..][index] } } #[stable(feature = "derefmut_for_string", since = "1.3.0")] impl ops::IndexMut> for String { #[inline] fn index_mut(&mut self, index: ops::RangeFrom) -> &mut str { &mut self[..][index] } } #[stable(feature = "derefmut_for_string", since = "1.3.0")] impl ops::IndexMut for String { #[inline] fn index_mut(&mut self, _index: ops::RangeFull) -> &mut str { unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) } } } #[stable(feature = "inclusive_range", since = "1.26.0")] impl ops::IndexMut> for String { #[inline] fn index_mut(&mut self, index: ops::RangeInclusive) -> &mut str { IndexMut::index_mut(&mut **self, index) } } #[stable(feature = "inclusive_range", since = "1.26.0")] impl ops::IndexMut> for String { #[inline] fn index_mut(&mut self, index: ops::RangeToInclusive) -> &mut str { IndexMut::index_mut(&mut **self, index) } } #[stable(feature = "rust1", since = "1.0.0")] impl ops::Deref for String { type Target = str; #[inline] fn deref(&self) -> &str { unsafe { str::from_utf8_unchecked(&self.vec) } } } #[stable(feature = "derefmut_for_string", since = "1.3.0")] impl ops::DerefMut for String { #[inline] fn deref_mut(&mut self) -> &mut str { unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) } } } /// A type alias for [`Infallible`]. /// /// This alias exists for backwards compatibility, and may be eventually deprecated. /// /// [`Infallible`]: core::convert::Infallible #[stable(feature = "str_parse_error", since = "1.5.0")] pub type ParseError = core::convert::Infallible; #[stable(feature = "rust1", since = "1.0.0")] impl FromStr for String { type Err = core::convert::Infallible; #[inline] fn from_str(s: &str) -> Result { Ok(String::from(s)) } } /// A trait for converting a value to a `String`. /// /// This trait is automatically implemented for any type which implements the /// [`Display`] trait. As such, `ToString` shouldn't be implemented directly: /// [`Display`] should be implemented instead, and you get the `ToString` /// implementation for free. /// /// [`Display`]: fmt::Display #[cfg_attr(not(test), rustc_diagnostic_item = "ToString")] #[stable(feature = "rust1", since = "1.0.0")] pub trait ToString { /// Converts the given value to a `String`. /// /// # Examples /// /// Basic usage: /// /// ``` /// let i = 5; /// let five = String::from("5"); /// /// assert_eq!(five, i.to_string()); /// ``` #[rustc_conversion_suggestion] #[stable(feature = "rust1", since = "1.0.0")] fn to_string(&self) -> String; } /// # Panics /// /// In this implementation, the `to_string` method panics /// if the `Display` implementation returns an error. /// This indicates an incorrect `Display` implementation /// since `fmt::Write for String` never returns an error itself. #[stable(feature = "rust1", since = "1.0.0")] impl ToString for T { // A common guideline is to not inline generic functions. However, // removing `#[inline]` from this method causes non-negligible regressions. // See , the last attempt // to try to remove it. #[inline] default fn to_string(&self) -> String { use fmt::Write; let mut buf = String::new(); buf.write_fmt(format_args!("{}", self)) .expect("a Display implementation returned an error unexpectedly"); buf } } #[stable(feature = "char_to_string_specialization", since = "1.46.0")] impl ToString for char { #[inline] fn to_string(&self) -> String { String::from(self.encode_utf8(&mut [0; 4])) } } #[stable(feature = "str_to_string_specialization", since = "1.9.0")] impl ToString for str { #[inline] fn to_string(&self) -> String { String::from(self) } } #[stable(feature = "cow_str_to_string_specialization", since = "1.17.0")] impl ToString for Cow<'_, str> { #[inline] fn to_string(&self) -> String { self[..].to_owned() } } #[stable(feature = "string_to_string_specialization", since = "1.17.0")] impl ToString for String { #[inline] fn to_string(&self) -> String { self.to_owned() } } #[stable(feature = "rust1", since = "1.0.0")] impl AsRef for String { #[inline] fn as_ref(&self) -> &str { self } } #[stable(feature = "string_as_mut", since = "1.43.0")] impl AsMut for String { #[inline] fn as_mut(&mut self) -> &mut str { self } } #[stable(feature = "rust1", since = "1.0.0")] impl AsRef<[u8]> for String { #[inline] fn as_ref(&self) -> &[u8] { self.as_bytes() } } #[stable(feature = "rust1", since = "1.0.0")] impl From<&str> for String { #[inline] fn from(s: &str) -> String { s.to_owned() } } #[stable(feature = "from_mut_str_for_string", since = "1.44.0")] impl From<&mut str> for String { /// Converts a `&mut str` into a `String`. /// /// The result is allocated on the heap. #[inline] fn from(s: &mut str) -> String { s.to_owned() } } #[stable(feature = "from_ref_string", since = "1.35.0")] impl From<&String> for String { #[inline] fn from(s: &String) -> String { s.clone() } } // note: test pulls in libstd, which causes errors here #[cfg(not(test))] #[stable(feature = "string_from_box", since = "1.18.0")] impl From> for String { /// Converts the given boxed `str` slice to a `String`. /// It is notable that the `str` slice is owned. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s1: String = String::from("hello world"); /// let s2: Box = s1.into_boxed_str(); /// let s3: String = String::from(s2); /// /// assert_eq!("hello world", s3) /// ``` fn from(s: Box) -> String { s.into_string() } } #[stable(feature = "box_from_str", since = "1.20.0")] impl From for Box { /// Converts the given `String` to a boxed `str` slice that is owned. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s1: String = String::from("hello world"); /// let s2: Box = Box::from(s1); /// let s3: String = String::from(s2); /// /// assert_eq!("hello world", s3) /// ``` fn from(s: String) -> Box { s.into_boxed_str() } } #[stable(feature = "string_from_cow_str", since = "1.14.0")] impl<'a> From> for String { fn from(s: Cow<'a, str>) -> String { s.into_owned() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a> From<&'a str> for Cow<'a, str> { /// Converts a string slice into a Borrowed variant. /// No heap allocation is performed, and the string /// is not copied. /// /// # Example /// /// ``` /// # use std::borrow::Cow; /// assert_eq!(Cow::from("eggplant"), Cow::Borrowed("eggplant")); /// ``` #[inline] fn from(s: &'a str) -> Cow<'a, str> { Cow::Borrowed(s) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a> From for Cow<'a, str> { /// Converts a String into an Owned variant. /// No heap allocation is performed, and the string /// is not copied. /// /// # Example /// /// ``` /// # use std::borrow::Cow; /// let s = "eggplant".to_string(); /// let s2 = "eggplant".to_string(); /// assert_eq!(Cow::from(s), Cow::<'static, str>::Owned(s2)); /// ``` #[inline] fn from(s: String) -> Cow<'a, str> { Cow::Owned(s) } } #[stable(feature = "cow_from_string_ref", since = "1.28.0")] impl<'a> From<&'a String> for Cow<'a, str> { /// Converts a String reference into a Borrowed variant. /// No heap allocation is performed, and the string /// is not copied. /// /// # Example /// /// ``` /// # use std::borrow::Cow; /// let s = "eggplant".to_string(); /// assert_eq!(Cow::from(&s), Cow::Borrowed("eggplant")); /// ``` #[inline] fn from(s: &'a String) -> Cow<'a, str> { Cow::Borrowed(s.as_str()) } } #[stable(feature = "cow_str_from_iter", since = "1.12.0")] impl<'a> FromIterator for Cow<'a, str> { fn from_iter>(it: I) -> Cow<'a, str> { Cow::Owned(FromIterator::from_iter(it)) } } #[stable(feature = "cow_str_from_iter", since = "1.12.0")] impl<'a, 'b> FromIterator<&'b str> for Cow<'a, str> { fn from_iter>(it: I) -> Cow<'a, str> { Cow::Owned(FromIterator::from_iter(it)) } } #[stable(feature = "cow_str_from_iter", since = "1.12.0")] impl<'a> FromIterator for Cow<'a, str> { fn from_iter>(it: I) -> Cow<'a, str> { Cow::Owned(FromIterator::from_iter(it)) } } #[stable(feature = "from_string_for_vec_u8", since = "1.14.0")] impl From for Vec { /// Converts the given `String` to a vector `Vec` that holds values of type `u8`. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s1 = String::from("hello world"); /// let v1 = Vec::from(s1); /// /// for b in v1 { /// println!("{}", b); /// } /// ``` fn from(string: String) -> Vec { string.into_bytes() } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Write for String { #[inline] fn write_str(&mut self, s: &str) -> fmt::Result { self.push_str(s); Ok(()) } #[inline] fn write_char(&mut self, c: char) -> fmt::Result { self.push(c); Ok(()) } } /// A draining iterator for `String`. /// /// This struct is created by the [`drain`] method on [`String`]. See its /// documentation for more. /// /// [`drain`]: String::drain #[stable(feature = "drain", since = "1.6.0")] pub struct Drain<'a> { /// Will be used as &'a mut String in the destructor string: *mut String, /// Start of part to remove start: usize, /// End of part to remove end: usize, /// Current remaining range to remove iter: Chars<'a>, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Drain<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Drain").field(&self.as_str()).finish() } } #[stable(feature = "drain", since = "1.6.0")] unsafe impl Sync for Drain<'_> {} #[stable(feature = "drain", since = "1.6.0")] unsafe impl Send for Drain<'_> {} #[stable(feature = "drain", since = "1.6.0")] impl Drop for Drain<'_> { fn drop(&mut self) { unsafe { // Use Vec::drain. "Reaffirm" the bounds checks to avoid // panic code being inserted again. let self_vec = (*self.string).as_mut_vec(); if self.start <= self.end && self.end <= self_vec.len() { self_vec.drain(self.start..self.end); } } } } impl<'a> Drain<'a> { /// Returns the remaining (sub)string of this iterator as a slice. /// /// # Examples /// /// ``` /// #![feature(string_drain_as_str)] /// let mut s = String::from("abc"); /// let mut drain = s.drain(..); /// assert_eq!(drain.as_str(), "abc"); /// let _ = drain.next().unwrap(); /// assert_eq!(drain.as_str(), "bc"); /// ``` #[unstable(feature = "string_drain_as_str", issue = "76905")] // Note: uncomment AsRef impls below when stabilizing. pub fn as_str(&self) -> &str { self.iter.as_str() } } // Uncomment when stabilizing `string_drain_as_str`. // #[unstable(feature = "string_drain_as_str", issue = "76905")] // impl<'a> AsRef for Drain<'a> { // fn as_ref(&self) -> &str { // self.as_str() // } // } // // #[unstable(feature = "string_drain_as_str", issue = "76905")] // impl<'a> AsRef<[u8]> for Drain<'a> { // fn as_ref(&self) -> &[u8] { // self.as_str().as_bytes() // } // } #[stable(feature = "drain", since = "1.6.0")] impl Iterator for Drain<'_> { type Item = char; #[inline] fn next(&mut self) -> Option { self.iter.next() } fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } #[inline] fn last(mut self) -> Option { self.next_back() } } #[stable(feature = "drain", since = "1.6.0")] impl DoubleEndedIterator for Drain<'_> { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Drain<'_> {} #[stable(feature = "from_char_for_string", since = "1.46.0")] impl From for String { #[inline] fn from(c: char) -> Self { c.to_string() } } //! A module for working with borrowed data. #![stable(feature = "rust1", since = "1.0.0")] use core::cmp::Ordering; use core::hash::{Hash, Hasher}; use core::ops::{Add, AddAssign, Deref}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::borrow::{Borrow, BorrowMut}; use crate::fmt; use crate::string::String; use Cow::*; #[stable(feature = "rust1", since = "1.0.0")] impl<'a, B: ?Sized> Borrow for Cow<'a, B> where B: ToOwned, ::Owned: 'a, { fn borrow(&self) -> &B { &**self } } /// A generalization of `Clone` to borrowed data. /// /// Some types make it possible to go from borrowed to owned, usually by /// implementing the `Clone` trait. But `Clone` works only for going from `&T` /// to `T`. The `ToOwned` trait generalizes `Clone` to construct owned data /// from any borrow of a given type. #[cfg_attr(not(test), rustc_diagnostic_item = "ToOwned")] #[stable(feature = "rust1", since = "1.0.0")] pub trait ToOwned { /// The resulting type after obtaining ownership. #[stable(feature = "rust1", since = "1.0.0")] type Owned: Borrow; /// Creates owned data from borrowed data, usually by cloning. /// /// # Examples /// /// Basic usage: /// /// ``` /// let s: &str = "a"; /// let ss: String = s.to_owned(); /// /// let v: &[i32] = &[1, 2]; /// let vv: Vec = v.to_owned(); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[must_use = "cloning is often expensive and is not expected to have side effects"] fn to_owned(&self) -> Self::Owned; /// Uses borrowed data to replace owned data, usually by cloning. /// /// This is borrow-generalized version of `Clone::clone_from`. /// /// # Examples /// /// Basic usage: /// /// ``` /// # #![feature(toowned_clone_into)] /// let mut s: String = String::new(); /// "hello".clone_into(&mut s); /// /// let mut v: Vec = Vec::new(); /// [1, 2][..].clone_into(&mut v); /// ``` #[unstable(feature = "toowned_clone_into", reason = "recently added", issue = "41263")] fn clone_into(&self, target: &mut Self::Owned) { *target = self.to_owned(); } } #[stable(feature = "rust1", since = "1.0.0")] impl ToOwned for T where T: Clone, { type Owned = T; fn to_owned(&self) -> T { self.clone() } fn clone_into(&self, target: &mut T) { target.clone_from(self); } } /// A clone-on-write smart pointer. /// /// The type `Cow` is a smart pointer providing clone-on-write functionality: it /// can enclose and provide immutable access to borrowed data, and clone the /// data lazily when mutation or ownership is required. The type is designed to /// work with general borrowed data via the `Borrow` trait. /// /// `Cow` implements `Deref`, which means that you can call /// non-mutating methods directly on the data it encloses. If mutation /// is desired, `to_mut` will obtain a mutable reference to an owned /// value, cloning if necessary. /// /// If you need reference-counting pointers, note that /// [`Rc::make_mut`][crate::rc::Rc::make_mut] and /// [`Arc::make_mut`][crate::sync::Arc::make_mut] can provide clone-on-write /// functionality as well. /// /// # Examples /// /// ``` /// use std::borrow::Cow; /// /// fn abs_all(input: &mut Cow<[i32]>) { /// for i in 0..input.len() { /// let v = input[i]; /// if v < 0 { /// // Clones into a vector if not already owned. /// input.to_mut()[i] = -v; /// } /// } /// } /// /// // No clone occurs because `input` doesn't need to be mutated. /// let slice = [0, 1, 2]; /// let mut input = Cow::from(&slice[..]); /// abs_all(&mut input); /// /// // Clone occurs because `input` needs to be mutated. /// let slice = [-1, 0, 1]; /// let mut input = Cow::from(&slice[..]); /// abs_all(&mut input); /// /// // No clone occurs because `input` is already owned. /// let mut input = Cow::from(vec![-1, 0, 1]); /// abs_all(&mut input); /// ``` /// /// Another example showing how to keep `Cow` in a struct: /// /// ``` /// use std::borrow::Cow; /// /// struct Items<'a, X: 'a> where [X]: ToOwned> { /// values: Cow<'a, [X]>, /// } /// /// impl<'a, X: Clone + 'a> Items<'a, X> where [X]: ToOwned> { /// fn new(v: Cow<'a, [X]>) -> Self { /// Items { values: v } /// } /// } /// /// // Creates a container from borrowed values of a slice /// let readonly = [1, 2]; /// let borrowed = Items::new((&readonly[..]).into()); /// match borrowed { /// Items { values: Cow::Borrowed(b) } => println!("borrowed {:?}", b), /// _ => panic!("expect borrowed value"), /// } /// /// let mut clone_on_write = borrowed; /// // Mutates the data from slice into owned vec and pushes a new value on top /// clone_on_write.values.to_mut().push(3); /// println!("clone_on_write = {:?}", clone_on_write.values); /// /// // The data was mutated. Let check it out. /// match clone_on_write { /// Items { values: Cow::Owned(_) } => println!("clone_on_write contains owned data"), /// _ => panic!("expect owned data"), /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub enum Cow<'a, B: ?Sized + 'a> where B: ToOwned, { /// Borrowed data. #[stable(feature = "rust1", since = "1.0.0")] Borrowed(#[stable(feature = "rust1", since = "1.0.0")] &'a B), /// Owned data. #[stable(feature = "rust1", since = "1.0.0")] Owned(#[stable(feature = "rust1", since = "1.0.0")] ::Owned), } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Cow<'_, B> { fn clone(&self) -> Self { match *self { Borrowed(b) => Borrowed(b), Owned(ref o) => { let b: &B = o.borrow(); Owned(b.to_owned()) } } } fn clone_from(&mut self, source: &Self) { match (self, source) { (&mut Owned(ref mut dest), &Owned(ref o)) => o.borrow().clone_into(dest), (t, s) => *t = s.clone(), } } } impl Cow<'_, B> { /// Returns true if the data is borrowed, i.e. if `to_mut` would require additional work. /// /// # Examples /// /// ``` /// #![feature(cow_is_borrowed)] /// use std::borrow::Cow; /// /// let cow = Cow::Borrowed("moo"); /// assert!(cow.is_borrowed()); /// /// let bull: Cow<'_, str> = Cow::Owned("...moo?".to_string()); /// assert!(!bull.is_borrowed()); /// ``` #[unstable(feature = "cow_is_borrowed", issue = "65143")] #[rustc_const_unstable(feature = "const_cow_is_borrowed", issue = "65143")] pub const fn is_borrowed(&self) -> bool { match *self { Borrowed(_) => true, Owned(_) => false, } } /// Returns true if the data is owned, i.e. if `to_mut` would be a no-op. /// /// # Examples /// /// ``` /// #![feature(cow_is_borrowed)] /// use std::borrow::Cow; /// /// let cow: Cow<'_, str> = Cow::Owned("moo".to_string()); /// assert!(cow.is_owned()); /// /// let bull = Cow::Borrowed("...moo?"); /// assert!(!bull.is_owned()); /// ``` #[unstable(feature = "cow_is_borrowed", issue = "65143")] #[rustc_const_unstable(feature = "const_cow_is_borrowed", issue = "65143")] pub const fn is_owned(&self) -> bool { !self.is_borrowed() } /// Acquires a mutable reference to the owned form of the data. /// /// Clones the data if it is not already owned. /// /// # Examples /// /// ``` /// use std::borrow::Cow; /// /// let mut cow = Cow::Borrowed("foo"); /// cow.to_mut().make_ascii_uppercase(); /// /// assert_eq!( /// cow, /// Cow::Owned(String::from("FOO")) as Cow /// ); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn to_mut(&mut self) -> &mut ::Owned { match *self { Borrowed(borrowed) => { *self = Owned(borrowed.to_owned()); match *self { Borrowed(..) => unreachable!(), Owned(ref mut owned) => owned, } } Owned(ref mut owned) => owned, } } /// Extracts the owned data. /// /// Clones the data if it is not already owned. /// /// # Examples /// /// Calling `into_owned` on a `Cow::Borrowed` clones the underlying data /// and becomes a `Cow::Owned`: /// /// ``` /// use std::borrow::Cow; /// /// let s = "Hello world!"; /// let cow = Cow::Borrowed(s); /// /// assert_eq!( /// cow.into_owned(), /// String::from(s) /// ); /// ``` /// /// Calling `into_owned` on a `Cow::Owned` is a no-op: /// /// ``` /// use std::borrow::Cow; /// /// let s = "Hello world!"; /// let cow: Cow = Cow::Owned(String::from(s)); /// /// assert_eq!( /// cow.into_owned(), /// String::from(s) /// ); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn into_owned(self) -> ::Owned { match self { Borrowed(borrowed) => borrowed.to_owned(), Owned(owned) => owned, } } } #[stable(feature = "rust1", since = "1.0.0")] impl Deref for Cow<'_, B> { type Target = B; fn deref(&self) -> &B { match *self { Borrowed(borrowed) => borrowed, Owned(ref owned) => owned.borrow(), } } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for Cow<'_, B> where B: Eq + ToOwned {} #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Cow<'_, B> where B: Ord + ToOwned, { #[inline] fn cmp(&self, other: &Self) -> Ordering { Ord::cmp(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq> for Cow<'a, B> where B: PartialEq + ToOwned, C: ToOwned, { #[inline] fn eq(&self, other: &Cow<'b, C>) -> bool { PartialEq::eq(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, B: ?Sized> PartialOrd for Cow<'a, B> where B: PartialOrd + ToOwned, { #[inline] fn partial_cmp(&self, other: &Cow<'a, B>) -> Option { PartialOrd::partial_cmp(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Cow<'_, B> where B: fmt::Debug + ToOwned, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Borrowed(ref b) => fmt::Debug::fmt(b, f), Owned(ref o) => fmt::Debug::fmt(o, f), } } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for Cow<'_, B> where B: fmt::Display + ToOwned, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Borrowed(ref b) => fmt::Display::fmt(b, f), Owned(ref o) => fmt::Display::fmt(o, f), } } } #[stable(feature = "default", since = "1.11.0")] impl Default for Cow<'_, B> where B: ToOwned, { /// Creates an owned Cow<'a, B> with the default value for the contained owned value. fn default() -> Self { Owned(::Owned::default()) } } #[stable(feature = "rust1", since = "1.0.0")] impl Hash for Cow<'_, B> where B: Hash + ToOwned, { #[inline] fn hash(&self, state: &mut H) { Hash::hash(&**self, state) } } #[stable(feature = "rust1", since = "1.0.0")] impl AsRef for Cow<'_, T> { fn as_ref(&self) -> &T { self } } #[stable(feature = "cow_add", since = "1.14.0")] impl<'a> Add<&'a str> for Cow<'a, str> { type Output = Cow<'a, str>; #[inline] fn add(mut self, rhs: &'a str) -> Self::Output { self += rhs; self } } #[stable(feature = "cow_add", since = "1.14.0")] impl<'a> Add> for Cow<'a, str> { type Output = Cow<'a, str>; #[inline] fn add(mut self, rhs: Cow<'a, str>) -> Self::Output { self += rhs; self } } #[stable(feature = "cow_add", since = "1.14.0")] impl<'a> AddAssign<&'a str> for Cow<'a, str> { fn add_assign(&mut self, rhs: &'a str) { if self.is_empty() { *self = Cow::Borrowed(rhs) } else if !rhs.is_empty() { if let Cow::Borrowed(lhs) = *self { let mut s = String::with_capacity(lhs.len() + rhs.len()); s.push_str(lhs); *self = Cow::Owned(s); } self.to_mut().push_str(rhs); } } } #[stable(feature = "cow_add", since = "1.14.0")] impl<'a> AddAssign> for Cow<'a, str> { fn add_assign(&mut self, rhs: Cow<'a, str>) { if self.is_empty() { *self = rhs } else if !rhs.is_empty() { if let Cow::Borrowed(lhs) = *self { let mut s = String::with_capacity(lhs.len() + rhs.len()); s.push_str(lhs); *self = Cow::Owned(s); } self.to_mut().push_str(&rhs); } } } //! # The Rust core allocation and collections library //! //! This library provides smart pointers and collections for managing //! heap-allocated values. //! //! This library, like libcore, normally doesn’t need to be used directly //! since its contents are re-exported in the [`std` crate](../std/index.html). //! Crates that use the `#![no_std]` attribute however will typically //! not depend on `std`, so they’d use this crate instead. //! //! ## Boxed values //! //! The [`Box`] type is a smart pointer type. There can only be one owner of a //! [`Box`], and the owner can decide to mutate the contents, which live on the //! heap. //! //! This type can be sent among threads efficiently as the size of a `Box` value //! is the same as that of a pointer. Tree-like data structures are often built //! with boxes because each node often has only one owner, the parent. //! //! ## Reference counted pointers //! //! The [`Rc`] type is a non-threadsafe reference-counted pointer type intended //! for sharing memory within a thread. An [`Rc`] pointer wraps a type, `T`, and //! only allows access to `&T`, a shared reference. //! //! This type is useful when inherited mutability (such as using [`Box`]) is too //! constraining for an application, and is often paired with the [`Cell`] or //! [`RefCell`] types in order to allow mutation. //! //! ## Atomically reference counted pointers //! //! The [`Arc`] type is the threadsafe equivalent of the [`Rc`] type. It //! provides all the same functionality of [`Rc`], except it requires that the //! contained type `T` is shareable. Additionally, [`Arc`][`Arc`] is itself //! sendable while [`Rc`][`Rc`] is not. //! //! This type allows for shared access to the contained data, and is often //! paired with synchronization primitives such as mutexes to allow mutation of //! shared resources. //! //! ## Collections //! //! Implementations of the most common general purpose data structures are //! defined in this library. They are re-exported through the //! [standard collections library](../std/collections/index.html). //! //! ## Heap interfaces //! //! The [`alloc`](alloc/index.html) module defines the low-level interface to the //! default global allocator. It is not compatible with the libc allocator API. //! //! [`Arc`]: sync //! [`Box`]: boxed //! [`Cell`]: core::cell //! [`Rc`]: rc //! [`RefCell`]: core::cell #![allow(unused_attributes)] #![stable(feature = "alloc", since = "1.36.0")] #![doc( html_root_url = "https://doc.rust-lang.org/nightly/", html_playground_url = "https://play.rust-lang.org/", issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", test(no_crate_inject, attr(allow(unused_variables), deny(warnings))) )] #![no_std] #![needs_allocator] #![warn(deprecated_in_future)] #![warn(missing_docs)] #![warn(missing_debug_implementations)] #![allow(explicit_outlives_requirements)] #![deny(unsafe_op_in_unsafe_fn)] #![feature(rustc_allow_const_fn_unstable)] #![cfg_attr(not(test), feature(generator_trait))] #![cfg_attr(test, feature(test))] #![cfg_attr(test, feature(new_uninit))] #![feature(allocator_api)] #![feature(array_chunks)] #![feature(array_methods)] #![feature(array_windows)] #![feature(allow_internal_unstable)] #![feature(arbitrary_self_types)] #![feature(async_stream)] #![feature(box_patterns)] #![feature(box_syntax)] #![feature(cfg_sanitize)] #![feature(cfg_target_has_atomic)] #![feature(coerce_unsized)] #![feature(const_btree_new)] #![cfg_attr(bootstrap, feature(const_fn))] #![cfg_attr(not(bootstrap), feature(const_fn_trait_bound))] #![feature(cow_is_borrowed)] #![feature(const_cow_is_borrowed)] #![feature(destructuring_assignment)] #![feature(dispatch_from_dyn)] #![feature(core_intrinsics)] #![feature(dropck_eyepatch)] #![feature(exact_size_is_empty)] #![feature(exclusive_range_pattern)] #![feature(extend_one)] #![feature(fmt_internals)] #![feature(fn_traits)] #![feature(fundamental)] #![feature(inplace_iteration)] // Technically, this is a bug in rustdoc: rustdoc sees the documentation on `#[lang = slice_alloc]` // blocks is for `&[T]`, which also has documentation using this feature in `core`, and gets mad // that the feature-gate isn't enabled. Ideally, it wouldn't check for the feature gate for docs // from other crates, but since this can only appear for lang items, it doesn't seem worth fixing. #![feature(intra_doc_pointers)] #![feature(iter_zip)] #![feature(lang_items)] #![feature(layout_for_ptr)] #![feature(maybe_uninit_ref)] #![feature(negative_impls)] #![feature(never_type)] #![feature(nll)] #![feature(nonnull_slice_from_raw_parts)] #![feature(auto_traits)] #![feature(option_result_unwrap_unchecked)] #![cfg_attr(bootstrap, feature(or_patterns))] #![feature(pattern)] #![feature(ptr_internals)] #![feature(rustc_attrs)] #![feature(receiver_trait)] #![feature(min_specialization)] #![feature(set_ptr_value)] #![feature(slice_ptr_get)] #![feature(slice_ptr_len)] #![feature(slice_range)] #![feature(staged_api)] #![feature(str_internals)] #![feature(trusted_len)] #![feature(unboxed_closures)] #![feature(unicode_internals)] #![feature(unsize)] #![feature(unsized_fn_params)] #![feature(allocator_internals)] #![feature(slice_partition_dedup)] #![feature(maybe_uninit_extra, maybe_uninit_slice, maybe_uninit_uninit_array)] #![feature(alloc_layout_extra)] #![feature(trusted_random_access)] #![feature(try_trait)] #![feature(min_type_alias_impl_trait)] #![feature(associated_type_bounds)] #![feature(slice_group_by)] #![feature(decl_macro)] // Allow testing this library #[cfg(test)] #[macro_use] extern crate std; #[cfg(test)] extern crate test; // Module with internal macros used by other modules (needs to be included before other modules). #[macro_use] mod macros; // Heaps provided for low-level allocation strategies pub mod alloc; // Primitive types using the heaps above // Need to conditionally define the mod from `boxed.rs` to avoid // duplicating the lang-items when building in test cfg; but also need // to allow code to have `use boxed::Box;` declarations. #[cfg(not(test))] pub mod boxed; #[cfg(test)] mod boxed { pub use std::boxed::Box; } pub mod borrow; pub mod collections; pub mod fmt; pub mod prelude; pub mod raw_vec; pub mod rc; pub mod slice; pub mod str; pub mod string; #[cfg(target_has_atomic = "ptr")] pub mod sync; #[cfg(target_has_atomic = "ptr")] pub mod task; #[cfg(test)] mod tests; pub mod vec; #[doc(hidden)] #[unstable(feature = "liballoc_internals", issue = "none", reason = "implementation detail")] pub mod __export { pub use core::format_args; } //! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference //! Counted'. //! //! The type [`Rc`][`Rc`] provides shared ownership of a value of type `T`, //! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new //! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a //! given allocation is destroyed, the value stored in that allocation (often //! referred to as "inner value") is also dropped. //! //! Shared references in Rust disallow mutation by default, and [`Rc`] //! is no exception: you cannot generally obtain a mutable reference to //! something inside an [`Rc`]. If you need mutability, put a [`Cell`] //! or [`RefCell`] inside the [`Rc`]; see [an example of mutability //! inside an `Rc`][mutability]. //! //! [`Rc`] uses non-atomic reference counting. This means that overhead is very //! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`] //! does not implement [`Send`][send]. As a result, the Rust compiler //! will check *at compile time* that you are not sending [`Rc`]s between //! threads. If you need multi-threaded, atomic reference counting, use //! [`sync::Arc`][arc]. //! //! The [`downgrade`][downgrade] method can be used to create a non-owning //! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d //! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has //! already been dropped. In other words, `Weak` pointers do not keep the value //! inside the allocation alive; however, they *do* keep the allocation //! (the backing store for the inner value) alive. //! //! A cycle between [`Rc`] pointers will never be deallocated. For this reason, //! [`Weak`] is used to break cycles. For example, a tree could have strong //! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from //! children back to their parents. //! //! `Rc` automatically dereferences to `T` (via the [`Deref`] trait), //! so you can call `T`'s methods on a value of type [`Rc`][`Rc`]. To avoid name //! clashes with `T`'s methods, the methods of [`Rc`][`Rc`] itself are associated //! functions, called using [fully qualified syntax]: //! //! ``` //! use std::rc::Rc; //! //! let my_rc = Rc::new(()); //! Rc::downgrade(&my_rc); //! ``` //! //! `Rc`'s implementations of traits like `Clone` may also be called using //! fully qualified syntax. Some people prefer to use fully qualified syntax, //! while others prefer using method-call syntax. //! //! ``` //! use std::rc::Rc; //! //! let rc = Rc::new(()); //! // Method-call syntax //! let rc2 = rc.clone(); //! // Fully qualified syntax //! let rc3 = Rc::clone(&rc); //! ``` //! //! [`Weak`][`Weak`] does not auto-dereference to `T`, because the inner value may have //! already been dropped. //! //! # Cloning references //! //! Creating a new reference to the same allocation as an existing reference counted pointer //! is done using the `Clone` trait implemented for [`Rc`][`Rc`] and [`Weak`][`Weak`]. //! //! ``` //! use std::rc::Rc; //! //! let foo = Rc::new(vec![1.0, 2.0, 3.0]); //! // The two syntaxes below are equivalent. //! let a = foo.clone(); //! let b = Rc::clone(&foo); //! // a and b both point to the same memory location as foo. //! ``` //! //! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly //! the meaning of the code. In the example above, this syntax makes it easier to see that //! this code is creating a new reference rather than copying the whole content of foo. //! //! # Examples //! //! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`. //! We want to have our `Gadget`s point to their `Owner`. We can't do this with //! unique ownership, because more than one gadget may belong to the same //! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s, //! and have the `Owner` remain allocated as long as any `Gadget` points at it. //! //! ``` //! use std::rc::Rc; //! //! struct Owner { //! name: String, //! // ...other fields //! } //! //! struct Gadget { //! id: i32, //! owner: Rc, //! // ...other fields //! } //! //! fn main() { //! // Create a reference-counted `Owner`. //! let gadget_owner: Rc = Rc::new( //! Owner { //! name: "Gadget Man".to_string(), //! } //! ); //! //! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc` //! // gives us a new pointer to the same `Owner` allocation, incrementing //! // the reference count in the process. //! let gadget1 = Gadget { //! id: 1, //! owner: Rc::clone(&gadget_owner), //! }; //! let gadget2 = Gadget { //! id: 2, //! owner: Rc::clone(&gadget_owner), //! }; //! //! // Dispose of our local variable `gadget_owner`. //! drop(gadget_owner); //! //! // Despite dropping `gadget_owner`, we're still able to print out the name //! // of the `Owner` of the `Gadget`s. This is because we've only dropped a //! // single `Rc`, not the `Owner` it points to. As long as there are //! // other `Rc` pointing at the same `Owner` allocation, it will remain //! // live. The field projection `gadget1.owner.name` works because //! // `Rc` automatically dereferences to `Owner`. //! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name); //! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name); //! //! // At the end of the function, `gadget1` and `gadget2` are destroyed, and //! // with them the last counted references to our `Owner`. Gadget Man now //! // gets destroyed as well. //! } //! ``` //! //! If our requirements change, and we also need to be able to traverse from //! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner` //! to `Gadget` introduces a cycle. This means that their //! reference counts can never reach 0, and the allocation will never be destroyed: //! a memory leak. In order to get around this, we can use [`Weak`] //! pointers. //! //! Rust actually makes it somewhat difficult to produce this loop in the first //! place. In order to end up with two values that point at each other, one of //! them needs to be mutable. This is difficult because [`Rc`] enforces //! memory safety by only giving out shared references to the value it wraps, //! and these don't allow direct mutation. We need to wrap the part of the //! value we wish to mutate in a [`RefCell`], which provides *interior //! mutability*: a method to achieve mutability through a shared reference. //! [`RefCell`] enforces Rust's borrowing rules at runtime. //! //! ``` //! use std::rc::Rc; //! use std::rc::Weak; //! use std::cell::RefCell; //! //! struct Owner { //! name: String, //! gadgets: RefCell>>, //! // ...other fields //! } //! //! struct Gadget { //! id: i32, //! owner: Rc, //! // ...other fields //! } //! //! fn main() { //! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s //! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through //! // a shared reference. //! let gadget_owner: Rc = Rc::new( //! Owner { //! name: "Gadget Man".to_string(), //! gadgets: RefCell::new(vec![]), //! } //! ); //! //! // Create `Gadget`s belonging to `gadget_owner`, as before. //! let gadget1 = Rc::new( //! Gadget { //! id: 1, //! owner: Rc::clone(&gadget_owner), //! } //! ); //! let gadget2 = Rc::new( //! Gadget { //! id: 2, //! owner: Rc::clone(&gadget_owner), //! } //! ); //! //! // Add the `Gadget`s to their `Owner`. //! { //! let mut gadgets = gadget_owner.gadgets.borrow_mut(); //! gadgets.push(Rc::downgrade(&gadget1)); //! gadgets.push(Rc::downgrade(&gadget2)); //! //! // `RefCell` dynamic borrow ends here. //! } //! //! // Iterate over our `Gadget`s, printing their details out. //! for gadget_weak in gadget_owner.gadgets.borrow().iter() { //! //! // `gadget_weak` is a `Weak`. Since `Weak` pointers can't //! // guarantee the allocation still exists, we need to call //! // `upgrade`, which returns an `Option>`. //! // //! // In this case we know the allocation still exists, so we simply //! // `unwrap` the `Option`. In a more complicated program, you might //! // need graceful error handling for a `None` result. //! //! let gadget = gadget_weak.upgrade().unwrap(); //! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name); //! } //! //! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2` //! // are destroyed. There are now no strong (`Rc`) pointers to the //! // gadgets, so they are destroyed. This zeroes the reference count on //! // Gadget Man, so he gets destroyed as well. //! } //! ``` //! //! [clone]: Clone::clone //! [`Cell`]: core::cell::Cell //! [`RefCell`]: core::cell::RefCell //! [send]: core::marker::Send //! [arc]: crate::sync::Arc //! [`Deref`]: core::ops::Deref //! [downgrade]: Rc::downgrade //! [upgrade]: Weak::upgrade //! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable //! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name #![stable(feature = "rust1", since = "1.0.0")] #[cfg(not(test))] use crate::boxed::Box; #[cfg(test)] use std::boxed::Box; use core::any::Any; use core::borrow; use core::cell::Cell; use core::cmp::Ordering; use core::convert::{From, TryFrom}; use core::fmt; use core::hash::{Hash, Hasher}; use core::intrinsics::abort; use core::iter; use core::marker::{self, PhantomData, Unpin, Unsize}; use core::mem::{self, align_of_val_raw, forget, size_of_val}; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; use core::slice::from_raw_parts_mut; use crate::alloc::{ box_free, handle_alloc_error, AllocError, Allocator, Global, Layout, WriteCloneIntoRaw, }; use crate::borrow::{Cow, ToOwned}; use crate::string::String; use crate::vec::Vec; #[cfg(test)] mod tests; // This is repr(C) to future-proof against possible field-reordering, which // would interfere with otherwise safe [into|from]_raw() of transmutable // inner types. #[repr(C)] struct RcBox { strong: Cell, weak: Cell, value: T, } /// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference /// Counted'. /// /// See the [module-level documentation](./index.html) for more details. /// /// The inherent methods of `Rc` are all associated functions, which means /// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of /// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`. /// /// [get_mut]: Rc::get_mut #[cfg_attr(not(test), rustc_diagnostic_item = "Rc")] #[stable(feature = "rust1", since = "1.0.0")] pub struct Rc { ptr: NonNull>, phantom: PhantomData>, } #[stable(feature = "rust1", since = "1.0.0")] impl !marker::Send for Rc {} #[stable(feature = "rust1", since = "1.0.0")] impl !marker::Sync for Rc {} #[unstable(feature = "coerce_unsized", issue = "27732")] impl, U: ?Sized> CoerceUnsized> for Rc {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Rc {} impl Rc { #[inline(always)] fn inner(&self) -> &RcBox { // This unsafety is ok because while this Rc is alive we're guaranteed // that the inner pointer is valid. unsafe { self.ptr.as_ref() } } fn from_inner(ptr: NonNull>) -> Self { Self { ptr, phantom: PhantomData } } unsafe fn from_ptr(ptr: *mut RcBox) -> Self { Self::from_inner(unsafe { NonNull::new_unchecked(ptr) }) } } impl Rc { /// Constructs a new `Rc`. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { // There is an implicit weak pointer owned by all the strong // pointers, which ensures that the weak destructor never frees // the allocation while the strong destructor is running, even // if the weak pointer is stored inside the strong one. Self::from_inner( Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(), ) } /// Constructs a new `Rc` using a weak reference to itself. Attempting /// to upgrade the weak reference before this function returns will result /// in a `None` value. However, the weak reference may be cloned freely and /// stored for use at a later time. /// /// # Examples /// /// ``` /// #![feature(arc_new_cyclic)] /// #![allow(dead_code)] /// use std::rc::{Rc, Weak}; /// /// struct Gadget { /// self_weak: Weak, /// // ... more fields /// } /// impl Gadget { /// pub fn new() -> Rc { /// Rc::new_cyclic(|self_weak| { /// Gadget { self_weak: self_weak.clone(), /* ... */ } /// }) /// } /// } /// ``` #[unstable(feature = "arc_new_cyclic", issue = "75861")] pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Rc { // Construct the inner in the "uninitialized" state with a single // weak reference. let uninit_ptr: NonNull<_> = Box::leak(box RcBox { strong: Cell::new(0), weak: Cell::new(1), value: mem::MaybeUninit::::uninit(), }) .into(); let init_ptr: NonNull> = uninit_ptr.cast(); let weak = Weak { ptr: init_ptr }; // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If // we really wanted to pass ownership, we could create an additional // weak pointer for ourselves, but this would result in additional // updates to the weak reference count which might not be necessary // otherwise. let data = data_fn(&weak); unsafe { let inner = init_ptr.as_ptr(); ptr::write(ptr::addr_of_mut!((*inner).value), data); let prev_value = (*inner).strong.get(); debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); (*inner).strong.set(1); } let strong = Rc::from_inner(init_ptr); // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. mem::forget(weak); strong } /// Constructs a new `Rc` with uninitialized contents. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// #![feature(get_mut_unchecked)] /// /// use std::rc::Rc; /// /// let mut five = Rc::::new_uninit(); /// /// let five = unsafe { /// // Deferred initialization: /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); /// /// five.assume_init() /// }; /// /// assert_eq!(*five, 5) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Rc> { unsafe { Rc::from_ptr(Rc::allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), |mem| mem as *mut RcBox>, )) } } /// Constructs a new `Rc` with uninitialized contents, with the memory /// being filled with `0` bytes. /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and /// incorrect usage of this method. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// /// use std::rc::Rc; /// /// let zero = Rc::::new_zeroed(); /// let zero = unsafe { zero.assume_init() }; /// /// assert_eq!(*zero, 0) /// ``` /// /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Rc> { unsafe { Rc::from_ptr(Rc::allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), |mem| mem as *mut RcBox>, )) } } /// Constructs a new `Rc`, returning an error if the allocation fails /// /// # Examples /// /// ``` /// #![feature(allocator_api)] /// use std::rc::Rc; /// /// let five = Rc::try_new(5); /// # Ok::<(), std::alloc::AllocError>(()) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] pub fn try_new(value: T) -> Result, AllocError> { // There is an implicit weak pointer owned by all the strong // pointers, which ensures that the weak destructor never frees // the allocation while the strong destructor is running, even // if the weak pointer is stored inside the strong one. Ok(Self::from_inner( Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?) .into(), )) } /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails /// /// # Examples /// /// ``` /// #![feature(allocator_api, new_uninit)] /// #![feature(get_mut_unchecked)] /// /// use std::rc::Rc; /// /// let mut five = Rc::::try_new_uninit()?; /// /// let five = unsafe { /// // Deferred initialization: /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); /// /// five.assume_init() /// }; /// /// assert_eq!(*five, 5); /// # Ok::<(), std::alloc::AllocError>(()) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { unsafe { Ok(Rc::from_ptr(Rc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), |mem| mem as *mut RcBox>, )?)) } } /// Constructs a new `Rc` with uninitialized contents, with the memory /// being filled with `0` bytes, returning an error if the allocation fails /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and /// incorrect usage of this method. /// /// # Examples /// /// ``` /// #![feature(allocator_api, new_uninit)] /// /// use std::rc::Rc; /// /// let zero = Rc::::try_new_zeroed()?; /// let zero = unsafe { zero.assume_init() }; /// /// assert_eq!(*zero, 0); /// # Ok::<(), std::alloc::AllocError>(()) /// ``` /// /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "allocator_api", issue = "32838")] //#[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { unsafe { Ok(Rc::from_ptr(Rc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), |mem| mem as *mut RcBox>, )?)) } } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `value` will be pinned in memory and unable to be moved. #[stable(feature = "pin", since = "1.33.0")] pub fn pin(value: T) -> Pin> { unsafe { Pin::new_unchecked(Rc::new(value)) } } /// Returns the inner value, if the `Rc` has exactly one strong reference. /// /// Otherwise, an [`Err`] is returned with the same `Rc` that was /// passed in. /// /// This will succeed even if there are outstanding weak references. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let x = Rc::new(3); /// assert_eq!(Rc::try_unwrap(x), Ok(3)); /// /// let x = Rc::new(4); /// let _y = Rc::clone(&x); /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4); /// ``` #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { if Rc::strong_count(&this) == 1 { unsafe { let val = ptr::read(&*this); // copy the contained object // Indicate to Weaks that they can't be promoted by decrementing // the strong count, and then remove the implicit "strong weak" // pointer while also handling drop logic by just crafting a // fake Weak. this.inner().dec_strong(); let _weak = Weak { ptr: this.ptr }; forget(this); Ok(val) } } else { Err(this) } } } impl Rc<[T]> { /// Constructs a new reference-counted slice with uninitialized contents. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// #![feature(get_mut_unchecked)] /// /// use std::rc::Rc; /// /// let mut values = Rc::<[u32]>::new_uninit_slice(3); /// /// let values = unsafe { /// // Deferred initialization: /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1); /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2); /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3); /// /// values.assume_init() /// }; /// /// assert_eq!(*values, [1, 2, 3]) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit]> { unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) } } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being /// filled with `0` bytes. /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and /// incorrect usage of this method. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// /// use std::rc::Rc; /// /// let values = Rc::<[u32]>::new_zeroed_slice(3); /// let values = unsafe { values.assume_init() }; /// /// assert_eq!(*values, [0, 0, 0]) /// ``` /// /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit]> { unsafe { Rc::from_ptr(Rc::allocate_for_layout( Layout::array::(len).unwrap(), |layout| Global.allocate_zeroed(layout), |mem| { ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[mem::MaybeUninit]> }, )) } } } impl Rc> { /// Converts to `Rc`. /// /// # Safety /// /// As with [`MaybeUninit::assume_init`], /// it is up to the caller to guarantee that the inner value /// really is in an initialized state. /// Calling this when the content is not yet fully initialized /// causes immediate undefined behavior. /// /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// #![feature(get_mut_unchecked)] /// /// use std::rc::Rc; /// /// let mut five = Rc::::new_uninit(); /// /// let five = unsafe { /// // Deferred initialization: /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); /// /// five.assume_init() /// }; /// /// assert_eq!(*five, 5) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Rc { Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) } } impl Rc<[mem::MaybeUninit]> { /// Converts to `Rc<[T]>`. /// /// # Safety /// /// As with [`MaybeUninit::assume_init`], /// it is up to the caller to guarantee that the inner value /// really is in an initialized state. /// Calling this when the content is not yet fully initialized /// causes immediate undefined behavior. /// /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// #![feature(get_mut_unchecked)] /// /// use std::rc::Rc; /// /// let mut values = Rc::<[u32]>::new_uninit_slice(3); /// /// let values = unsafe { /// // Deferred initialization: /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1); /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2); /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3); /// /// values.assume_init() /// }; /// /// assert_eq!(*values, [1, 2, 3]) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Rc<[T]> { unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } } } impl Rc { /// Consumes the `Rc`, returning the wrapped pointer. /// /// To avoid a memory leak the pointer must be converted back to an `Rc` using /// [`Rc::from_raw`][from_raw]. /// /// [from_raw]: Rc::from_raw /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let x = Rc::new("hello".to_owned()); /// let x_ptr = Rc::into_raw(x); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub fn into_raw(this: Self) -> *const T { let ptr = Self::as_ptr(&this); mem::forget(this); ptr } /// Provides a raw pointer to the data. /// /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid /// for as long there are strong counts in the `Rc`. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let x = Rc::new("hello".to_owned()); /// let y = Rc::clone(&x); /// let x_ptr = Rc::as_ptr(&x); /// assert_eq!(x_ptr, Rc::as_ptr(&y)); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn as_ptr(this: &Self) -> *const T { let ptr: *mut RcBox = NonNull::as_ptr(this.ptr); // SAFETY: This cannot go through Deref::deref or Rc::inner because // this is required to retain raw/mut provenance such that e.g. `get_mut` can // write through the pointer after the Rc is recovered through `from_raw`. unsafe { ptr::addr_of_mut!((*ptr).value) } } /// Constructs an `Rc` from a raw pointer. /// /// The raw pointer must have been previously returned by a call to /// [`Rc::into_raw`][into_raw] where `U` must have the same size /// and alignment as `T`. This is trivially true if `U` is `T`. /// Note that if `U` is not `T` but has the same size and alignment, this is /// basically like transmuting references of different types. See /// [`mem::transmute`][transmute] for more information on what /// restrictions apply in this case. /// /// The user of `from_raw` has to make sure a specific value of `T` is only /// dropped once. /// /// This function is unsafe because improper use may lead to memory unsafety, /// even if the returned `Rc` is never accessed. /// /// [into_raw]: Rc::into_raw /// [transmute]: core::mem::transmute /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let x = Rc::new("hello".to_owned()); /// let x_ptr = Rc::into_raw(x); /// /// unsafe { /// // Convert back to an `Rc` to prevent leak. /// let x = Rc::from_raw(x_ptr); /// assert_eq!(&*x, "hello"); /// /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe. /// } /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { let offset = unsafe { data_offset(ptr) }; // Reverse the offset to find the original RcBox. let rc_ptr = unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) }; unsafe { Self::from_ptr(rc_ptr) } } /// Creates a new [`Weak`] pointer to this allocation. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// let weak_five = Rc::downgrade(&five); /// ``` #[stable(feature = "rc_weak", since = "1.4.0")] pub fn downgrade(this: &Self) -> Weak { this.inner().inc_weak(); // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); Weak { ptr: this.ptr } } /// Gets the number of [`Weak`] pointers to this allocation. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// let _weak_five = Rc::downgrade(&five); /// /// assert_eq!(1, Rc::weak_count(&five)); /// ``` #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { this.inner().weak() - 1 } /// Gets the number of strong (`Rc`) pointers to this allocation. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// let _also_five = Rc::clone(&five); /// /// assert_eq!(2, Rc::strong_count(&five)); /// ``` #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { this.inner().strong() } /// Increments the strong reference count on the `Rc` associated with the /// provided pointer by one. /// /// # Safety /// /// The pointer must have been obtained through `Rc::into_raw`, and the /// associated `Rc` instance must be valid (i.e. the strong count must be at /// least 1) for the duration of this method. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// unsafe { /// let ptr = Rc::into_raw(five); /// Rc::increment_strong_count(ptr); /// /// let five = Rc::from_raw(ptr); /// assert_eq!(2, Rc::strong_count(&five)); /// } /// ``` #[inline] #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] pub unsafe fn increment_strong_count(ptr: *const T) { // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop let rc = unsafe { mem::ManuallyDrop::new(Rc::::from_raw(ptr)) }; // Now increase refcount, but don't drop new refcount either let _rc_clone: mem::ManuallyDrop<_> = rc.clone(); } /// Decrements the strong reference count on the `Rc` associated with the /// provided pointer by one. /// /// # Safety /// /// The pointer must have been obtained through `Rc::into_raw`, and the /// associated `Rc` instance must be valid (i.e. the strong count must be at /// least 1) when invoking this method. This method can be used to release /// the final `Rc` and backing storage, but **should not** be called after /// the final `Rc` has been released. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// unsafe { /// let ptr = Rc::into_raw(five); /// Rc::increment_strong_count(ptr); /// /// let five = Rc::from_raw(ptr); /// assert_eq!(2, Rc::strong_count(&five)); /// Rc::decrement_strong_count(ptr); /// assert_eq!(1, Rc::strong_count(&five)); /// } /// ``` #[inline] #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] pub unsafe fn decrement_strong_count(ptr: *const T) { unsafe { mem::drop(Rc::from_raw(ptr)) }; } /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to /// this allocation. #[inline] fn is_unique(this: &Self) -> bool { Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1 } /// Returns a mutable reference into the given `Rc`, if there are /// no other `Rc` or [`Weak`] pointers to the same allocation. /// /// Returns [`None`] otherwise, because it is not safe to /// mutate a shared value. /// /// See also [`make_mut`][make_mut], which will [`clone`][clone] /// the inner value when there are other pointers. /// /// [make_mut]: Rc::make_mut /// [clone]: Clone::clone /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let mut x = Rc::new(3); /// *Rc::get_mut(&mut x).unwrap() = 4; /// assert_eq!(*x, 4); /// /// let _y = Rc::clone(&x); /// assert!(Rc::get_mut(&mut x).is_none()); /// ``` #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None } } /// Returns a mutable reference into the given `Rc`, /// without any check. /// /// See also [`get_mut`], which is safe and does appropriate checks. /// /// [`get_mut`]: Rc::get_mut /// /// # Safety /// /// Any other `Rc` or [`Weak`] pointers to the same allocation must not be dereferenced /// for the duration of the returned borrow. /// This is trivially the case if no such pointers exist, /// for example immediately after `Rc::new`. /// /// # Examples /// /// ``` /// #![feature(get_mut_unchecked)] /// /// use std::rc::Rc; /// /// let mut x = Rc::new(String::new()); /// unsafe { /// Rc::get_mut_unchecked(&mut x).push_str("foo") /// } /// assert_eq!(*x, "foo"); /// ``` #[inline] #[unstable(feature = "get_mut_unchecked", issue = "63292")] pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { // We are careful to *not* create a reference covering the "count" fields, as // this would conflict with accesses to the reference counts (e.g. by `Weak`). unsafe { &mut (*this.ptr.as_ptr()).value } } #[inline] #[stable(feature = "ptr_eq", since = "1.17.0")] /// Returns `true` if the two `Rc`s point to the same allocation /// (in a vein similar to [`ptr::eq`]). /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// let same_five = Rc::clone(&five); /// let other_five = Rc::new(5); /// /// assert!(Rc::ptr_eq(&five, &same_five)); /// assert!(!Rc::ptr_eq(&five, &other_five)); /// ``` /// /// [`ptr::eq`]: core::ptr::eq pub fn ptr_eq(this: &Self, other: &Self) -> bool { this.ptr.as_ptr() == other.ptr.as_ptr() } } impl Rc { /// Makes a mutable reference into the given `Rc`. /// /// If there are other `Rc` pointers to the same allocation, then `make_mut` will /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also /// referred to as clone-on-write. /// /// If there are no other `Rc` pointers to this allocation, then [`Weak`] /// pointers to this allocation will be disassociated. /// /// See also [`get_mut`], which will fail rather than cloning. /// /// [`clone`]: Clone::clone /// [`get_mut`]: Rc::get_mut /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let mut data = Rc::new(5); /// /// *Rc::make_mut(&mut data) += 1; // Won't clone anything /// let mut other_data = Rc::clone(&data); // Won't clone inner data /// *Rc::make_mut(&mut data) += 1; // Clones inner data /// *Rc::make_mut(&mut data) += 1; // Won't clone anything /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything /// /// // Now `data` and `other_data` point to different allocations. /// assert_eq!(*data, 8); /// assert_eq!(*other_data, 12); /// ``` /// /// [`Weak`] pointers will be disassociated: /// /// ``` /// use std::rc::Rc; /// /// let mut data = Rc::new(75); /// let weak = Rc::downgrade(&data); /// /// assert!(75 == *data); /// assert!(75 == *weak.upgrade().unwrap()); /// /// *Rc::make_mut(&mut data) += 1; /// /// assert!(76 == *data); /// assert!(weak.upgrade().is_none()); /// ``` #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn make_mut(this: &mut Self) -> &mut T { if Rc::strong_count(this) != 1 { // Gotta clone the data, there are other Rcs. // Pre-allocate memory to allow writing the cloned value directly. let mut rc = Self::new_uninit(); unsafe { let data = Rc::get_mut_unchecked(&mut rc); (**this).write_clone_into_raw(data.as_mut_ptr()); *this = rc.assume_init(); } } else if Rc::weak_count(this) != 0 { // Can just steal the data, all that's left is Weaks let mut rc = Self::new_uninit(); unsafe { let data = Rc::get_mut_unchecked(&mut rc); data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); this.inner().dec_strong(); // Remove implicit strong-weak ref (no need to craft a fake // Weak here -- we know other Weaks can clean up for us) this.inner().dec_weak(); ptr::write(this, rc.assume_init()); } } // This unsafety is ok because we're guaranteed that the pointer // returned is the *only* pointer that will ever be returned to T. Our // reference count is guaranteed to be 1 at this point, and we required // the `Rc` itself to be `mut`, so we're returning the only possible // reference to the allocation. unsafe { &mut this.ptr.as_mut().value } } } impl Rc { #[inline] #[stable(feature = "rc_downcast", since = "1.29.0")] /// Attempt to downcast the `Rc` to a concrete type. /// /// # Examples /// /// ``` /// use std::any::Any; /// use std::rc::Rc; /// /// fn print_if_string(value: Rc) { /// if let Ok(string) = value.downcast::() { /// println!("String ({}): {}", string.len(), string); /// } /// } /// /// let my_string = "Hello World".to_string(); /// print_if_string(Rc::new(my_string)); /// print_if_string(Rc::new(0i8)); /// ``` pub fn downcast(self) -> Result, Rc> { if (*self).is::() { let ptr = self.ptr.cast::>(); forget(self); Ok(Rc::from_inner(ptr)) } else { Err(self) } } } impl Rc { /// Allocates an `RcBox` with sufficient space for /// a possibly-unsized inner value where the value has the layout provided. /// /// The function `mem_to_rcbox` is called with the data pointer /// and must return back a (potentially fat)-pointer for the `RcBox`. unsafe fn allocate_for_layout( value_layout: Layout, allocate: impl FnOnce(Layout) -> Result, AllocError>, mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox, ) -> *mut RcBox { // Calculate layout using the given value layout. // Previously, layout was calculated on the expression // `&*(ptr as *const RcBox)`, but this created a misaligned // reference (see #54908). let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); unsafe { Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rcbox) .unwrap_or_else(|_| handle_alloc_error(layout)) } } /// Allocates an `RcBox` with sufficient space for /// a possibly-unsized inner value where the value has the layout provided, /// returning an error if allocation fails. /// /// The function `mem_to_rcbox` is called with the data pointer /// and must return back a (potentially fat)-pointer for the `RcBox`. #[inline] unsafe fn try_allocate_for_layout( value_layout: Layout, allocate: impl FnOnce(Layout) -> Result, AllocError>, mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox, ) -> Result<*mut RcBox, AllocError> { // Calculate layout using the given value layout. // Previously, layout was calculated on the expression // `&*(ptr as *const RcBox)`, but this created a misaligned // reference (see #54908). let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); // Allocate for the layout. let ptr = allocate(layout)?; // Initialize the RcBox let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr()); unsafe { debug_assert_eq!(Layout::for_value(&*inner), layout); ptr::write(&mut (*inner).strong, Cell::new(1)); ptr::write(&mut (*inner).weak, Cell::new(1)); } Ok(inner) } /// Allocates an `RcBox` with sufficient space for an unsized inner value unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox { // Allocate for the `RcBox` using the given value. unsafe { Self::allocate_for_layout( Layout::for_value(&*ptr), |layout| Global.allocate(layout), |mem| (ptr as *mut RcBox).set_ptr_value(mem), ) } } fn from_box(v: Box) -> Rc { unsafe { let (box_unique, alloc) = Box::into_unique(v); let bptr = box_unique.as_ptr(); let value_size = size_of_val(&*bptr); let ptr = Self::allocate_for_ptr(bptr); // Copy value as bytes ptr::copy_nonoverlapping( bptr as *const T as *const u8, &mut (*ptr).value as *mut _ as *mut u8, value_size, ); // Free the allocation without dropping its contents box_free(box_unique, alloc); Self::from_ptr(ptr) } } } impl Rc<[T]> { /// Allocates an `RcBox<[T]>` with the given length. unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> { unsafe { Self::allocate_for_layout( Layout::array::(len).unwrap(), |layout| Global.allocate(layout), |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>, ) } } /// Copy elements from slice into newly allocated Rc<\[T\]> /// /// Unsafe because the caller must either take ownership or bind `T: Copy` unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { unsafe { let ptr = Self::allocate_for_slice(v.len()); ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len()); Self::from_ptr(ptr) } } /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size. /// /// Behavior is undefined should the size be wrong. unsafe fn from_iter_exact(iter: impl iter::Iterator, len: usize) -> Rc<[T]> { // Panic guard while cloning T elements. // In the event of a panic, elements that have been written // into the new RcBox will be dropped, then the memory freed. struct Guard { mem: NonNull, elems: *mut T, layout: Layout, n_elems: usize, } impl Drop for Guard { fn drop(&mut self) { unsafe { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); Global.deallocate(self.mem, self.layout); } } } unsafe { let ptr = Self::allocate_for_slice(len); let mem = ptr as *mut _ as *mut u8; let layout = Layout::for_value(&*ptr); // Pointer to first element let elems = &mut (*ptr).value as *mut [T] as *mut T; let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; for (i, item) in iter.enumerate() { ptr::write(elems.add(i), item); guard.n_elems += 1; } // All clear. Forget the guard so it doesn't free the new RcBox. forget(guard); Self::from_ptr(ptr) } } } /// Specialization trait used for `From<&[T]>`. trait RcFromSlice { fn from_slice(slice: &[T]) -> Self; } impl RcFromSlice for Rc<[T]> { #[inline] default fn from_slice(v: &[T]) -> Self { unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) } } } impl RcFromSlice for Rc<[T]> { #[inline] fn from_slice(v: &[T]) -> Self { unsafe { Rc::copy_from_slice(v) } } } #[stable(feature = "rust1", since = "1.0.0")] impl Deref for Rc { type Target = T; #[inline(always)] fn deref(&self) -> &T { &self.inner().value } } #[unstable(feature = "receiver_trait", issue = "none")] impl Receiver for Rc {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { /// Drops the `Rc`. /// /// This will decrement the strong reference count. If the strong reference /// count reaches zero then the only other references (if any) are /// [`Weak`], so we `drop` the inner value. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// struct Foo; /// /// impl Drop for Foo { /// fn drop(&mut self) { /// println!("dropped!"); /// } /// } /// /// let foo = Rc::new(Foo); /// let foo2 = Rc::clone(&foo); /// /// drop(foo); // Doesn't print anything /// drop(foo2); // Prints "dropped!" /// ``` fn drop(&mut self) { unsafe { self.inner().dec_strong(); if self.inner().strong() == 0 { // destroy the contained object ptr::drop_in_place(Self::get_mut_unchecked(self)); // remove the implicit "strong weak" pointer now that we've // destroyed the contents. self.inner().dec_weak(); if self.inner().weak() == 0 { Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Rc { /// Makes a clone of the `Rc` pointer. /// /// This creates another pointer to the same allocation, increasing the /// strong reference count. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// let _ = Rc::clone(&five); /// ``` #[inline] fn clone(&self) -> Rc { self.inner().inc_strong(); Self::from_inner(self.ptr) } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for Rc { /// Creates a new `Rc`, with the `Default` value for `T`. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let x: Rc = Default::default(); /// assert_eq!(*x, 0); /// ``` #[inline] fn default() -> Rc { Rc::new(Default::default()) } } #[stable(feature = "rust1", since = "1.0.0")] trait RcEqIdent { fn eq(&self, other: &Rc) -> bool; fn ne(&self, other: &Rc) -> bool; } #[stable(feature = "rust1", since = "1.0.0")] impl RcEqIdent for Rc { #[inline] default fn eq(&self, other: &Rc) -> bool { **self == **other } #[inline] default fn ne(&self, other: &Rc) -> bool { **self != **other } } // Hack to allow specializing on `Eq` even though `Eq` has a method. #[rustc_unsafe_specialization_marker] pub(crate) trait MarkerEq: PartialEq {} impl MarkerEq for T {} /// We're doing this specialization here, and not as a more general optimization on `&T`, because it /// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to /// store large values, that are slow to clone, but also heavy to check for equality, causing this /// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to /// the same value, than two `&T`s. /// /// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. #[stable(feature = "rust1", since = "1.0.0")] impl RcEqIdent for Rc { #[inline] fn eq(&self, other: &Rc) -> bool { Rc::ptr_eq(self, other) || **self == **other } #[inline] fn ne(&self, other: &Rc) -> bool { !Rc::ptr_eq(self, other) && **self != **other } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for Rc { /// Equality for two `Rc`s. /// /// Two `Rc`s are equal if their inner values are equal, even if they are /// stored in different allocation. /// /// If `T` also implements `Eq` (implying reflexivity of equality), /// two `Rc`s that point to the same allocation are /// always equal. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// assert!(five == Rc::new(5)); /// ``` #[inline] fn eq(&self, other: &Rc) -> bool { RcEqIdent::eq(self, other) } /// Inequality for two `Rc`s. /// /// Two `Rc`s are unequal if their inner values are unequal. /// /// If `T` also implements `Eq` (implying reflexivity of equality), /// two `Rc`s that point to the same allocation are /// never unequal. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// assert!(five != Rc::new(6)); /// ``` #[inline] fn ne(&self, other: &Rc) -> bool { RcEqIdent::ne(self, other) } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for Rc {} #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Rc { /// Partial comparison for two `Rc`s. /// /// The two are compared by calling `partial_cmp()` on their inner values. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// use std::cmp::Ordering; /// /// let five = Rc::new(5); /// /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6))); /// ``` #[inline(always)] fn partial_cmp(&self, other: &Rc) -> Option { (**self).partial_cmp(&**other) } /// Less-than comparison for two `Rc`s. /// /// The two are compared by calling `<` on their inner values. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// assert!(five < Rc::new(6)); /// ``` #[inline(always)] fn lt(&self, other: &Rc) -> bool { **self < **other } /// 'Less than or equal to' comparison for two `Rc`s. /// /// The two are compared by calling `<=` on their inner values. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// assert!(five <= Rc::new(5)); /// ``` #[inline(always)] fn le(&self, other: &Rc) -> bool { **self <= **other } /// Greater-than comparison for two `Rc`s. /// /// The two are compared by calling `>` on their inner values. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// assert!(five > Rc::new(4)); /// ``` #[inline(always)] fn gt(&self, other: &Rc) -> bool { **self > **other } /// 'Greater than or equal to' comparison for two `Rc`s. /// /// The two are compared by calling `>=` on their inner values. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// assert!(five >= Rc::new(5)); /// ``` #[inline(always)] fn ge(&self, other: &Rc) -> bool { **self >= **other } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Rc { /// Comparison for two `Rc`s. /// /// The two are compared by calling `cmp()` on their inner values. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// use std::cmp::Ordering; /// /// let five = Rc::new(5); /// /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6))); /// ``` #[inline] fn cmp(&self, other: &Rc) -> Ordering { (**self).cmp(&**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl Hash for Rc { fn hash(&self, state: &mut H) { (**self).hash(state); } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Pointer for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&(&**self as *const T), f) } } #[stable(feature = "from_for_ptrs", since = "1.6.0")] impl From for Rc { fn from(t: T) -> Self { Rc::new(t) } } #[stable(feature = "shared_from_slice", since = "1.21.0")] impl From<&[T]> for Rc<[T]> { /// Allocate a reference-counted slice and fill it by cloning `v`'s items. /// /// # Example /// /// ``` /// # use std::rc::Rc; /// let original: &[i32] = &[1, 2, 3]; /// let shared: Rc<[i32]> = Rc::from(original); /// assert_eq!(&[1, 2, 3], &shared[..]); /// ``` #[inline] fn from(v: &[T]) -> Rc<[T]> { >::from_slice(v) } } #[stable(feature = "shared_from_slice", since = "1.21.0")] impl From<&str> for Rc { /// Allocate a reference-counted string slice and copy `v` into it. /// /// # Example /// /// ``` /// # use std::rc::Rc; /// let shared: Rc = Rc::from("statue"); /// assert_eq!("statue", &shared[..]); /// ``` #[inline] fn from(v: &str) -> Rc { let rc = Rc::<[u8]>::from(v.as_bytes()); unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) } } } #[stable(feature = "shared_from_slice", since = "1.21.0")] impl From for Rc { /// Allocate a reference-counted string slice and copy `v` into it. /// /// # Example /// /// ``` /// # use std::rc::Rc; /// let original: String = "statue".to_owned(); /// let shared: Rc = Rc::from(original); /// assert_eq!("statue", &shared[..]); /// ``` #[inline] fn from(v: String) -> Rc { Rc::from(&v[..]) } } #[stable(feature = "shared_from_slice", since = "1.21.0")] impl From> for Rc { /// Move a boxed object to a new, reference counted, allocation. /// /// # Example /// /// ``` /// # use std::rc::Rc; /// let original: Box = Box::new(1); /// let shared: Rc = Rc::from(original); /// assert_eq!(1, *shared); /// ``` #[inline] fn from(v: Box) -> Rc { Rc::from_box(v) } } #[stable(feature = "shared_from_slice", since = "1.21.0")] impl From> for Rc<[T]> { /// Allocate a reference-counted slice and move `v`'s items into it. /// /// # Example /// /// ``` /// # use std::rc::Rc; /// let original: Box> = Box::new(vec![1, 2, 3]); /// let shared: Rc> = Rc::from(original); /// assert_eq!(vec![1, 2, 3], *shared); /// ``` #[inline] fn from(mut v: Vec) -> Rc<[T]> { unsafe { let rc = Rc::copy_from_slice(&v); // Allow the Vec to free its memory, but not destroy its contents v.set_len(0); rc } } } #[stable(feature = "shared_from_cow", since = "1.45.0")] impl<'a, B> From> for Rc where B: ToOwned + ?Sized, Rc: From<&'a B> + From, { #[inline] fn from(cow: Cow<'a, B>) -> Rc { match cow { Cow::Borrowed(s) => Rc::from(s), Cow::Owned(s) => Rc::from(s), } } } #[stable(feature = "boxed_slice_try_from", since = "1.43.0")] impl TryFrom> for Rc<[T; N]> { type Error = Rc<[T]>; fn try_from(boxed_slice: Rc<[T]>) -> Result { if boxed_slice.len() == N { Ok(unsafe { Rc::from_raw(Rc::into_raw(boxed_slice) as *mut [T; N]) }) } else { Err(boxed_slice) } } } #[stable(feature = "shared_from_iter", since = "1.37.0")] impl iter::FromIterator for Rc<[T]> { /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`. /// /// # Performance characteristics /// /// ## The general case /// /// In the general case, collecting into `Rc<[T]>` is done by first /// collecting into a `Vec`. That is, when writing the following: /// /// ```rust /// # use std::rc::Rc; /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect(); /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]); /// ``` /// /// this behaves as if we wrote: /// /// ```rust /// # use std::rc::Rc; /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0) /// .collect::>() // The first set of allocations happens here. /// .into(); // A second allocation for `Rc<[T]>` happens here. /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]); /// ``` /// /// This will allocate as many times as needed for constructing the `Vec` /// and then it will allocate once for turning the `Vec` into the `Rc<[T]>`. /// /// ## Iterators of known length /// /// When your `Iterator` implements `TrustedLen` and is of an exact size, /// a single allocation will be made for the `Rc<[T]>`. For example: /// /// ```rust /// # use std::rc::Rc; /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here. /// # assert_eq!(&*evens, &*(0..10).collect::>()); /// ``` fn from_iter>(iter: I) -> Self { ToRcSlice::to_rc_slice(iter.into_iter()) } } /// Specialization trait used for collecting into `Rc<[T]>`. trait ToRcSlice: Iterator + Sized { fn to_rc_slice(self) -> Rc<[T]>; } impl> ToRcSlice for I { default fn to_rc_slice(self) -> Rc<[T]> { self.collect::>().into() } } impl> ToRcSlice for I { fn to_rc_slice(self) -> Rc<[T]> { // This is the case for a `TrustedLen` iterator. let (low, high) = self.size_hint(); if let Some(high) = high { debug_assert_eq!( low, high, "TrustedLen iterator's size hint is not exact: {:?}", (low, high) ); unsafe { // SAFETY: We need to ensure that the iterator has an exact length and we have. Rc::from_iter_exact(self, low) } } else { // TrustedLen contract guarantees that `upper_bound == `None` implies an iterator // length exceeding `usize::MAX`. // The default implementation would collect into a vec which would panic. // Thus we panic here immediately without invoking `Vec` code. panic!("capacity overflow"); } } } /// `Weak` is a version of [`Rc`] that holds a non-owning reference to the /// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak` /// pointer, which returns an [`Option`]`<`[`Rc`]`>`. /// /// Since a `Weak` reference does not count towards ownership, it will not /// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no /// guarantees about the value still being present. Thus it may return [`None`] /// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation /// itself (the backing store) from being deallocated. /// /// A `Weak` pointer is useful for keeping a temporary reference to the allocation /// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to /// prevent circular references between [`Rc`] pointers, since mutual owning references /// would never allow either [`Rc`] to be dropped. For example, a tree could /// have strong [`Rc`] pointers from parent nodes to children, and `Weak` /// pointers from children back to their parents. /// /// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`]. /// /// [`upgrade`]: Weak::upgrade #[stable(feature = "rc_weak", since = "1.4.0")] pub struct Weak { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need // to allocate space on the heap. That's not a value a real pointer // will ever have because RcBox has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. ptr: NonNull>, } #[stable(feature = "rc_weak", since = "1.4.0")] impl !marker::Send for Weak {} #[stable(feature = "rc_weak", since = "1.4.0")] impl !marker::Sync for Weak {} #[unstable(feature = "coerce_unsized", issue = "27732")] impl, U: ?Sized> CoerceUnsized> for Weak {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Weak {} impl Weak { /// Constructs a new `Weak`, without allocating any memory. /// Calling [`upgrade`] on the return value always gives [`None`]. /// /// [`upgrade`]: Weak::upgrade /// /// # Examples /// /// ``` /// use std::rc::Weak; /// /// let empty: Weak = Weak::new(); /// assert!(empty.upgrade().is_none()); /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { Weak { ptr: NonNull::new(usize::MAX as *mut RcBox).expect("MAX is not 0") } } } pub(crate) fn is_dangling(ptr: *mut T) -> bool { let address = ptr as *mut () as usize; address == usize::MAX } /// Helper type to allow accessing the reference counts without /// making any assertions about the data field. struct WeakInner<'a> { weak: &'a Cell, strong: &'a Cell, } impl Weak { /// Returns a raw pointer to the object `T` pointed to by this `Weak`. /// /// The pointer is valid only if there are some strong references. The pointer may be dangling, /// unaligned or even [`null`] otherwise. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// use std::ptr; /// /// let strong = Rc::new("hello".to_owned()); /// let weak = Rc::downgrade(&strong); /// // Both point to the same object /// assert!(ptr::eq(&*strong, weak.as_ptr())); /// // The strong here keeps it alive, so we can still access the object. /// assert_eq!("hello", unsafe { &*weak.as_ptr() }); /// /// drop(strong); /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to /// // undefined behaviour. /// // assert_eq!("hello", unsafe { &*weak.as_ptr() }); /// ``` /// /// [`null`]: core::ptr::null #[stable(feature = "rc_as_ptr", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { let ptr: *mut RcBox = NonNull::as_ptr(self.ptr); if is_dangling(ptr) { // If the pointer is dangling, we return the sentinel directly. This cannot be // a valid payload address, as the payload is at least as aligned as RcBox (usize). ptr as *const T } else { // SAFETY: if is_dangling returns false, then the pointer is dereferencable. // The payload may be dropped at this point, and we have to maintain provenance, // so use raw pointer manipulation. unsafe { ptr::addr_of_mut!((*ptr).value) } } } /// Consumes the `Weak` and turns it into a raw pointer. /// /// This converts the weak pointer into a raw pointer, while still preserving the ownership of /// one weak reference (the weak count is not modified by this operation). It can be turned /// back into the `Weak` with [`from_raw`]. /// /// The same restrictions of accessing the target of the pointer as with /// [`as_ptr`] apply. /// /// # Examples /// /// ``` /// use std::rc::{Rc, Weak}; /// /// let strong = Rc::new("hello".to_owned()); /// let weak = Rc::downgrade(&strong); /// let raw = weak.into_raw(); /// /// assert_eq!(1, Rc::weak_count(&strong)); /// assert_eq!("hello", unsafe { &*raw }); /// /// drop(unsafe { Weak::from_raw(raw) }); /// assert_eq!(0, Rc::weak_count(&strong)); /// ``` /// /// [`from_raw`]: Weak::from_raw /// [`as_ptr`]: Weak::as_ptr #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn into_raw(self) -> *const T { let result = self.as_ptr(); mem::forget(self); result } /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. /// /// This can be used to safely get a strong reference (by calling [`upgrade`] /// later) or to deallocate the weak count by dropping the `Weak`. /// /// It takes ownership of one weak reference (with the exception of pointers created by [`new`], /// as these don't own anything; the method still works on them). /// /// # Safety /// /// The pointer must have originated from the [`into_raw`] and must still own its potential /// weak reference. /// /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this /// takes ownership of one weak reference currently represented as a raw pointer (the weak /// count is not modified by this operation) and therefore it must be paired with a previous /// call to [`into_raw`]. /// /// # Examples /// /// ``` /// use std::rc::{Rc, Weak}; /// /// let strong = Rc::new("hello".to_owned()); /// /// let raw_1 = Rc::downgrade(&strong).into_raw(); /// let raw_2 = Rc::downgrade(&strong).into_raw(); /// /// assert_eq!(2, Rc::weak_count(&strong)); /// /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap()); /// assert_eq!(1, Rc::weak_count(&strong)); /// /// drop(strong); /// /// // Decrement the last weak count. /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none()); /// ``` /// /// [`into_raw`]: Weak::into_raw /// [`upgrade`]: Weak::upgrade /// [`new`]: Weak::new #[stable(feature = "weak_into_raw", since = "1.45.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { // See Weak::as_ptr for context on how the input pointer is derived. let ptr = if is_dangling(ptr as *mut T) { // This is a dangling Weak. ptr as *mut RcBox } else { // Otherwise, we're guaranteed the pointer came from a nondangling Weak. // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. let offset = unsafe { data_offset(ptr) }; // Thus, we reverse the offset to get the whole RcBox. // SAFETY: the pointer originated from a Weak, so this offset is safe. unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) } }; // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } } } /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying /// dropping of the inner value if successful. /// /// Returns [`None`] if the inner value has since been dropped. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// let weak_five = Rc::downgrade(&five); /// /// let strong_five: Option> = weak_five.upgrade(); /// assert!(strong_five.is_some()); /// /// // Destroy all strong pointers. /// drop(strong_five); /// drop(five); /// /// assert!(weak_five.upgrade().is_none()); /// ``` #[stable(feature = "rc_weak", since = "1.4.0")] pub fn upgrade(&self) -> Option> { let inner = self.inner()?; if inner.strong() == 0 { None } else { inner.inc_strong(); Some(Rc::from_inner(self.ptr)) } } /// Gets the number of strong (`Rc`) pointers pointing to this allocation. /// /// If `self` was created using [`Weak::new`], this will return 0. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { if let Some(inner) = self.inner() { inner.strong() } else { 0 } } /// Gets the number of `Weak` pointers pointing to this allocation. /// /// If no strong pointers remain, this will return zero. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { self.inner() .map(|inner| { if inner.strong() > 0 { inner.weak() - 1 // subtract the implicit weak ptr } else { 0 } }) .unwrap_or(0) } /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`, /// (i.e., when this `Weak` was created by `Weak::new`). #[inline] fn inner(&self) -> Option> { if is_dangling(self.ptr.as_ptr()) { None } else { // We are careful to *not* create a reference covering the "data" field, as // the field may be mutated concurrently (for example, if the last `Rc` // is dropped, the data field will be dropped in-place). Some(unsafe { let ptr = self.ptr.as_ptr(); WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } }) } } /// Returns `true` if the two `Weak`s point to the same allocation (similar to /// [`ptr::eq`]), or if both don't point to any allocation /// (because they were created with `Weak::new()`). /// /// # Notes /// /// Since this compares pointers it means that `Weak::new()` will equal each /// other, even though they don't point to any allocation. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let first_rc = Rc::new(5); /// let first = Rc::downgrade(&first_rc); /// let second = Rc::downgrade(&first_rc); /// /// assert!(first.ptr_eq(&second)); /// /// let third_rc = Rc::new(5); /// let third = Rc::downgrade(&third_rc); /// /// assert!(!first.ptr_eq(&third)); /// ``` /// /// Comparing `Weak::new`. /// /// ``` /// use std::rc::{Rc, Weak}; /// /// let first = Weak::new(); /// let second = Weak::new(); /// assert!(first.ptr_eq(&second)); /// /// let third_rc = Rc::new(()); /// let third = Rc::downgrade(&third_rc); /// assert!(!first.ptr_eq(&third)); /// ``` /// /// [`ptr::eq`]: core::ptr::eq #[inline] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { self.ptr.as_ptr() == other.ptr.as_ptr() } } #[stable(feature = "rc_weak", since = "1.4.0")] impl Drop for Weak { /// Drops the `Weak` pointer. /// /// # Examples /// /// ``` /// use std::rc::{Rc, Weak}; /// /// struct Foo; /// /// impl Drop for Foo { /// fn drop(&mut self) { /// println!("dropped!"); /// } /// } /// /// let foo = Rc::new(Foo); /// let weak_foo = Rc::downgrade(&foo); /// let other_weak_foo = Weak::clone(&weak_foo); /// /// drop(weak_foo); // Doesn't print anything /// drop(foo); // Prints "dropped!" /// /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { let inner = if let Some(inner) = self.inner() { inner } else { return }; inner.dec_weak(); // the weak count starts at 1, and will only go to zero if all // the strong pointers have disappeared. if inner.weak() == 0 { unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); } } } } #[stable(feature = "rc_weak", since = "1.4.0")] impl Clone for Weak { /// Makes a clone of the `Weak` pointer that points to the same allocation. /// /// # Examples /// /// ``` /// use std::rc::{Rc, Weak}; /// /// let weak_five = Rc::downgrade(&Rc::new(5)); /// /// let _ = Weak::clone(&weak_five); /// ``` #[inline] fn clone(&self) -> Weak { if let Some(inner) = self.inner() { inner.inc_weak() } Weak { ptr: self.ptr } } } #[stable(feature = "rc_weak", since = "1.4.0")] impl fmt::Debug for Weak { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "(Weak)") } } #[stable(feature = "downgraded_weak", since = "1.10.0")] impl Default for Weak { /// Constructs a new `Weak`, without allocating any memory. /// Calling [`upgrade`] on the return value always gives [`None`]. /// /// [`None`]: Option /// [`upgrade`]: Weak::upgrade /// /// # Examples /// /// ``` /// use std::rc::Weak; /// /// let empty: Weak = Default::default(); /// assert!(empty.upgrade().is_none()); /// ``` fn default() -> Weak { Weak::new() } } // NOTE: We checked_add here to deal with mem::forget safely. In particular // if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then // you can free the allocation while outstanding Rcs (or Weaks) exist. // We abort because this is such a degenerate scenario that we don't care about // what happens -- no real program should ever experience this. // // This should have negligible overhead since you don't actually need to // clone these much in Rust thanks to ownership and move-semantics. #[doc(hidden)] trait RcInnerPtr { fn weak_ref(&self) -> &Cell; fn strong_ref(&self) -> &Cell; #[inline] fn strong(&self) -> usize { self.strong_ref().get() } #[inline] fn inc_strong(&self) { let strong = self.strong(); // We want to abort on overflow instead of dropping the value. // The reference count will never be zero when this is called; // nevertheless, we insert an abort here to hint LLVM at // an otherwise missed optimization. if strong == 0 || strong == usize::MAX { abort(); } self.strong_ref().set(strong + 1); } #[inline] fn dec_strong(&self) { self.strong_ref().set(self.strong() - 1); } #[inline] fn weak(&self) -> usize { self.weak_ref().get() } #[inline] fn inc_weak(&self) { let weak = self.weak(); // We want to abort on overflow instead of dropping the value. // The reference count will never be zero when this is called; // nevertheless, we insert an abort here to hint LLVM at // an otherwise missed optimization. if weak == 0 || weak == usize::MAX { abort(); } self.weak_ref().set(weak + 1); } #[inline] fn dec_weak(&self) { self.weak_ref().set(self.weak() - 1); } } impl RcInnerPtr for RcBox { #[inline(always)] fn weak_ref(&self) -> &Cell { &self.weak } #[inline(always)] fn strong_ref(&self) -> &Cell { &self.strong } } impl<'a> RcInnerPtr for WeakInner<'a> { #[inline(always)] fn weak_ref(&self) -> &Cell { self.weak } #[inline(always)] fn strong_ref(&self) -> &Cell { self.strong } } #[stable(feature = "rust1", since = "1.0.0")] impl borrow::Borrow for Rc { fn borrow(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] impl AsRef for Rc { fn as_ref(&self) -> &T { &**self } } #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Rc {} /// Get the offset within an `RcBox` for the payload behind a pointer. /// /// # Safety /// /// The pointer must point to (and have valid metadata for) a previously /// valid instance of T, but the T is allowed to be dropped. unsafe fn data_offset(ptr: *const T) -> isize { // Align the unsized value to the end of the RcBox. // Because RcBox is repr(C), it will always be the last field in memory. // SAFETY: since the only unsized types possible are slices, trait objects, // and extern types, the input safety requirement is currently enough to // satisfy the requirements of align_of_val_raw; this is an implementation // detail of the language that may not be relied upon outside of std. unsafe { data_offset_align(align_of_val_raw(ptr)) } } #[inline] fn data_offset_align(align: usize) -> isize { let layout = Layout::new::>(); (layout.size() + layout.padding_needed_for(align)) as isize } //! A dynamically-sized view into a contiguous sequence, `[T]`. //! //! *[See also the slice primitive type](slice).* //! //! Slices are a view into a block of memory represented as a pointer and a //! length. //! //! ``` //! // slicing a Vec //! let vec = vec![1, 2, 3]; //! let int_slice = &vec[..]; //! // coercing an array to a slice //! let str_slice: &[&str] = &["one", "two", "three"]; //! ``` //! //! Slices are either mutable or shared. The shared slice type is `&[T]`, //! while the mutable slice type is `&mut [T]`, where `T` represents the element //! type. For example, you can mutate the block of memory that a mutable slice //! points to: //! //! ``` //! let x = &mut [1, 2, 3]; //! x[1] = 7; //! assert_eq!(x, &[1, 7, 3]); //! ``` //! //! Here are some of the things this module contains: //! //! ## Structs //! //! There are several structs that are useful for slices, such as [`Iter`], which //! represents iteration over a slice. //! //! ## Trait Implementations //! //! There are several implementations of common traits for slices. Some examples //! include: //! //! * [`Clone`] //! * [`Eq`], [`Ord`] - for slices whose element type are [`Eq`] or [`Ord`]. //! * [`Hash`] - for slices whose element type is [`Hash`]. //! //! ## Iteration //! //! The slices implement `IntoIterator`. The iterator yields references to the //! slice elements. //! //! ``` //! let numbers = &[0, 1, 2]; //! for n in numbers { //! println!("{} is a number!", n); //! } //! ``` //! //! The mutable slice yields mutable references to the elements: //! //! ``` //! let mut scores = [7, 8, 9]; //! for score in &mut scores[..] { //! *score += 1; //! } //! ``` //! //! This iterator yields mutable references to the slice's elements, so while //! the element type of the slice is `i32`, the element type of the iterator is //! `&mut i32`. //! //! * [`.iter`] and [`.iter_mut`] are the explicit methods to return the default //! iterators. //! * Further methods that return iterators are [`.split`], [`.splitn`], //! [`.chunks`], [`.windows`] and more. //! //! [`Hash`]: core::hash::Hash //! [`.iter`]: slice::iter //! [`.iter_mut`]: slice::iter_mut //! [`.split`]: slice::split //! [`.splitn`]: slice::splitn //! [`.chunks`]: slice::chunks //! [`.windows`]: slice::windows #![stable(feature = "rust1", since = "1.0.0")] // Many of the usings in this module are only used in the test configuration. // It's cleaner to just turn off the unused_imports warning than to fix them. #![cfg_attr(test, allow(unused_imports, dead_code))] use core::borrow::{Borrow, BorrowMut}; use core::cmp::Ordering::{self, Less}; use core::mem::{self, size_of}; use core::ptr; use crate::alloc::{Allocator, Global}; use crate::borrow::ToOwned; use crate::boxed::Box; use crate::vec::Vec; #[unstable(feature = "slice_range", issue = "76393")] pub use core::slice::range; #[unstable(feature = "array_chunks", issue = "74985")] pub use core::slice::ArrayChunks; #[unstable(feature = "array_chunks", issue = "74985")] pub use core::slice::ArrayChunksMut; #[unstable(feature = "array_windows", issue = "75027")] pub use core::slice::ArrayWindows; #[stable(feature = "slice_get_slice", since = "1.28.0")] pub use core::slice::SliceIndex; #[stable(feature = "from_ref", since = "1.28.0")] pub use core::slice::{from_mut, from_ref}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::slice::{from_raw_parts, from_raw_parts_mut}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::slice::{Chunks, Windows}; #[stable(feature = "chunks_exact", since = "1.31.0")] pub use core::slice::{ChunksExact, ChunksExactMut}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::slice::{ChunksMut, Split, SplitMut}; #[unstable(feature = "slice_group_by", issue = "80552")] pub use core::slice::{GroupBy, GroupByMut}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::slice::{Iter, IterMut}; #[stable(feature = "rchunks", since = "1.31.0")] pub use core::slice::{RChunks, RChunksExact, RChunksExactMut, RChunksMut}; #[stable(feature = "slice_rsplit", since = "1.27.0")] pub use core::slice::{RSplit, RSplitMut}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::slice::{RSplitN, RSplitNMut, SplitN, SplitNMut}; //////////////////////////////////////////////////////////////////////////////// // Basic slice extension methods //////////////////////////////////////////////////////////////////////////////// // HACK(japaric) needed for the implementation of `vec!` macro during testing // N.B., see the `hack` module in this file for more details. #[cfg(test)] pub use hack::into_vec; // HACK(japaric) needed for the implementation of `Vec::clone` during testing // N.B., see the `hack` module in this file for more details. #[cfg(test)] pub use hack::to_vec; // HACK(japaric): With cfg(test) `impl [T]` is not available, these three // functions are actually methods that are in `impl [T]` but not in // `core::slice::SliceExt` - we need to supply these functions for the // `test_permutations` test mod hack { use core::alloc::Allocator; use crate::boxed::Box; use crate::vec::Vec; // We shouldn't add inline attribute to this since this is used in // `vec!` macro mostly and causes perf regression. See #71204 for // discussion and perf results. pub fn into_vec(b: Box<[T], A>) -> Vec { unsafe { let len = b.len(); let (b, alloc) = Box::into_raw_with_allocator(b); Vec::from_raw_parts_in(b as *mut T, len, len, alloc) } } #[inline] pub fn to_vec(s: &[T], alloc: A) -> Vec { T::to_vec(s, alloc) } pub trait ConvertVec { fn to_vec(s: &[Self], alloc: A) -> Vec where Self: Sized; } impl ConvertVec for T { #[inline] default fn to_vec(s: &[Self], alloc: A) -> Vec { struct DropGuard<'a, T, A: Allocator> { vec: &'a mut Vec, num_init: usize, } impl<'a, T, A: Allocator> Drop for DropGuard<'a, T, A> { #[inline] fn drop(&mut self) { // SAFETY: // items were marked initialized in the loop below unsafe { self.vec.set_len(self.num_init); } } } let mut vec = Vec::with_capacity_in(s.len(), alloc); let mut guard = DropGuard { vec: &mut vec, num_init: 0 }; let slots = guard.vec.spare_capacity_mut(); // .take(slots.len()) is necessary for LLVM to remove bounds checks // and has better codegen than zip. for (i, b) in s.iter().enumerate().take(slots.len()) { guard.num_init = i; slots[i].write(b.clone()); } core::mem::forget(guard); // SAFETY: // the vec was allocated and initialized above to at least this length. unsafe { vec.set_len(s.len()); } vec } } impl ConvertVec for T { #[inline] fn to_vec(s: &[Self], alloc: A) -> Vec { let mut v = Vec::with_capacity_in(s.len(), alloc); // SAFETY: // allocated above with the capacity of `s`, and initialize to `s.len()` in // ptr::copy_to_non_overlapping below. unsafe { s.as_ptr().copy_to_nonoverlapping(v.as_mut_ptr(), s.len()); v.set_len(s.len()); } v } } } #[lang = "slice_alloc"] #[cfg(not(test))] impl [T] { /// Sorts the slice. /// /// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*)) worst-case. /// /// When applicable, unstable sorting is preferred because it is generally faster than stable /// sorting and it doesn't allocate auxiliary memory. /// See [`sort_unstable`](slice::sort_unstable). /// /// # Current implementation /// /// The current algorithm is an adaptive, iterative merge sort inspired by /// [timsort](https://en.wikipedia.org/wiki/Timsort). /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of /// two or more sorted sequences concatenated one after another. /// /// Also, it allocates temporary storage half the size of `self`, but for short slices a /// non-allocating insertion sort is used instead. /// /// # Examples /// /// ``` /// let mut v = [-5, 4, 1, -3, 2]; /// /// v.sort(); /// assert!(v == [-5, -3, 1, 2, 4]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn sort(&mut self) where T: Ord, { merge_sort(self, |a, b| a.lt(b)); } /// Sorts the slice with a comparator function. /// /// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*)) worst-case. /// /// The comparator function must define a total ordering for the elements in the slice. If /// the ordering is not total, the order of the elements is unspecified. An order is a /// total order if it is (for all `a`, `b` and `c`): /// /// * total and antisymmetric: exactly one of `a < b`, `a == b` or `a > b` is true, and /// * transitive, `a < b` and `b < c` implies `a < c`. The same must hold for both `==` and `>`. /// /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`. /// /// ``` /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0]; /// floats.sort_by(|a, b| a.partial_cmp(b).unwrap()); /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]); /// ``` /// /// When applicable, unstable sorting is preferred because it is generally faster than stable /// sorting and it doesn't allocate auxiliary memory. /// See [`sort_unstable_by`](slice::sort_unstable_by). /// /// # Current implementation /// /// The current algorithm is an adaptive, iterative merge sort inspired by /// [timsort](https://en.wikipedia.org/wiki/Timsort). /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of /// two or more sorted sequences concatenated one after another. /// /// Also, it allocates temporary storage half the size of `self`, but for short slices a /// non-allocating insertion sort is used instead. /// /// # Examples /// /// ``` /// let mut v = [5, 4, 1, 3, 2]; /// v.sort_by(|a, b| a.cmp(b)); /// assert!(v == [1, 2, 3, 4, 5]); /// /// // reverse sorting /// v.sort_by(|a, b| b.cmp(a)); /// assert!(v == [5, 4, 3, 2, 1]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn sort_by(&mut self, mut compare: F) where F: FnMut(&T, &T) -> Ordering, { merge_sort(self, |a, b| compare(a, b) == Less); } /// Sorts the slice with a key extraction function. /// /// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* \* log(*n*)) /// worst-case, where the key function is *O*(*m*). /// /// For expensive key functions (e.g. functions that are not simple property accesses or /// basic operations), [`sort_by_cached_key`](slice::sort_by_cached_key) is likely to be /// significantly faster, as it does not recompute element keys. /// /// When applicable, unstable sorting is preferred because it is generally faster than stable /// sorting and it doesn't allocate auxiliary memory. /// See [`sort_unstable_by_key`](slice::sort_unstable_by_key). /// /// # Current implementation /// /// The current algorithm is an adaptive, iterative merge sort inspired by /// [timsort](https://en.wikipedia.org/wiki/Timsort). /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of /// two or more sorted sequences concatenated one after another. /// /// Also, it allocates temporary storage half the size of `self`, but for short slices a /// non-allocating insertion sort is used instead. /// /// # Examples /// /// ``` /// let mut v = [-5i32, 4, 1, -3, 2]; /// /// v.sort_by_key(|k| k.abs()); /// assert!(v == [1, 2, -3, 4, -5]); /// ``` #[stable(feature = "slice_sort_by_key", since = "1.7.0")] #[inline] pub fn sort_by_key(&mut self, mut f: F) where F: FnMut(&T) -> K, K: Ord, { merge_sort(self, |a, b| f(a).lt(&f(b))); } /// Sorts the slice with a key extraction function. /// /// During sorting, the key function is called only once per element. /// /// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* + *n* \* log(*n*)) /// worst-case, where the key function is *O*(*m*). /// /// For simple key functions (e.g., functions that are property accesses or /// basic operations), [`sort_by_key`](slice::sort_by_key) is likely to be /// faster. /// /// # Current implementation /// /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters, /// which combines the fast average case of randomized quicksort with the fast worst case of /// heapsort, while achieving linear time on slices with certain patterns. It uses some /// randomization to avoid degenerate cases, but with a fixed seed to always provide /// deterministic behavior. /// /// In the worst case, the algorithm allocates temporary storage in a `Vec<(K, usize)>` the /// length of the slice. /// /// # Examples /// /// ``` /// let mut v = [-5i32, 4, 32, -3, 2]; /// /// v.sort_by_cached_key(|k| k.to_string()); /// assert!(v == [-3, -5, 2, 32, 4]); /// ``` /// /// [pdqsort]: https://github.com/orlp/pdqsort #[stable(feature = "slice_sort_by_cached_key", since = "1.34.0")] #[inline] pub fn sort_by_cached_key(&mut self, f: F) where F: FnMut(&T) -> K, K: Ord, { // Helper macro for indexing our vector by the smallest possible type, to reduce allocation. macro_rules! sort_by_key { ($t:ty, $slice:ident, $f:ident) => {{ let mut indices: Vec<_> = $slice.iter().map($f).enumerate().map(|(i, k)| (k, i as $t)).collect(); // The elements of `indices` are unique, as they are indexed, so any sort will be // stable with respect to the original slice. We use `sort_unstable` here because // it requires less memory allocation. indices.sort_unstable(); for i in 0..$slice.len() { let mut index = indices[i].1; while (index as usize) < i { index = indices[index as usize].1; } indices[i].1 = index; $slice.swap(i, index as usize); } }}; } let sz_u8 = mem::size_of::<(K, u8)>(); let sz_u16 = mem::size_of::<(K, u16)>(); let sz_u32 = mem::size_of::<(K, u32)>(); let sz_usize = mem::size_of::<(K, usize)>(); let len = self.len(); if len < 2 { return; } if sz_u8 < sz_u16 && len <= (u8::MAX as usize) { return sort_by_key!(u8, self, f); } if sz_u16 < sz_u32 && len <= (u16::MAX as usize) { return sort_by_key!(u16, self, f); } if sz_u32 < sz_usize && len <= (u32::MAX as usize) { return sort_by_key!(u32, self, f); } sort_by_key!(usize, self, f) } /// Copies `self` into a new `Vec`. /// /// # Examples /// /// ``` /// let s = [10, 40, 30]; /// let x = s.to_vec(); /// // Here, `s` and `x` can be modified independently. /// ``` #[rustc_conversion_suggestion] #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn to_vec(&self) -> Vec where T: Clone, { self.to_vec_in(Global) } /// Copies `self` into a new `Vec` with an allocator. /// /// # Examples /// /// ``` /// #![feature(allocator_api)] /// /// use std::alloc::System; /// /// let s = [10, 40, 30]; /// let x = s.to_vec_in(System); /// // Here, `s` and `x` can be modified independently. /// ``` #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn to_vec_in(&self, alloc: A) -> Vec where T: Clone, { // N.B., see the `hack` module in this file for more details. hack::to_vec(self, alloc) } /// Converts `self` into a vector without clones or allocation. /// /// The resulting vector can be converted back into a box via /// `Vec`'s `into_boxed_slice` method. /// /// # Examples /// /// ``` /// let s: Box<[i32]> = Box::new([10, 40, 30]); /// let x = s.into_vec(); /// // `s` cannot be used anymore because it has been converted into `x`. /// /// assert_eq!(x, vec![10, 40, 30]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn into_vec(self: Box) -> Vec { // N.B., see the `hack` module in this file for more details. hack::into_vec(self) } /// Creates a vector by repeating a slice `n` times. /// /// # Panics /// /// This function will panic if the capacity would overflow. /// /// # Examples /// /// Basic usage: /// /// ``` /// assert_eq!([1, 2].repeat(3), vec![1, 2, 1, 2, 1, 2]); /// ``` /// /// A panic upon overflow: /// /// ```should_panic /// // this will panic at runtime /// b"0123456789abcdef".repeat(usize::MAX); /// ``` #[stable(feature = "repeat_generic_slice", since = "1.40.0")] pub fn repeat(&self, n: usize) -> Vec where T: Copy, { if n == 0 { return Vec::new(); } // If `n` is larger than zero, it can be split as // `n = 2^expn + rem (2^expn > rem, expn >= 0, rem >= 0)`. // `2^expn` is the number represented by the leftmost '1' bit of `n`, // and `rem` is the remaining part of `n`. // Using `Vec` to access `set_len()`. let capacity = self.len().checked_mul(n).expect("capacity overflow"); let mut buf = Vec::with_capacity(capacity); // `2^expn` repetition is done by doubling `buf` `expn`-times. buf.extend(self); { let mut m = n >> 1; // If `m > 0`, there are remaining bits up to the leftmost '1'. while m > 0 { // `buf.extend(buf)`: unsafe { ptr::copy_nonoverlapping( buf.as_ptr(), (buf.as_mut_ptr() as *mut T).add(buf.len()), buf.len(), ); // `buf` has capacity of `self.len() * n`. let buf_len = buf.len(); buf.set_len(buf_len * 2); } m >>= 1; } } // `rem` (`= n - 2^expn`) repetition is done by copying // first `rem` repetitions from `buf` itself. let rem_len = capacity - buf.len(); // `self.len() * rem` if rem_len > 0 { // `buf.extend(buf[0 .. rem_len])`: unsafe { // This is non-overlapping since `2^expn > rem`. ptr::copy_nonoverlapping( buf.as_ptr(), (buf.as_mut_ptr() as *mut T).add(buf.len()), rem_len, ); // `buf.len() + rem_len` equals to `buf.capacity()` (`= self.len() * n`). buf.set_len(capacity); } } buf } /// Flattens a slice of `T` into a single value `Self::Output`. /// /// # Examples /// /// ``` /// assert_eq!(["hello", "world"].concat(), "helloworld"); /// assert_eq!([[1, 2], [3, 4]].concat(), [1, 2, 3, 4]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn concat(&self) -> >::Output where Self: Concat, { Concat::concat(self) } /// Flattens a slice of `T` into a single value `Self::Output`, placing a /// given separator between each. /// /// # Examples /// /// ``` /// assert_eq!(["hello", "world"].join(" "), "hello world"); /// assert_eq!([[1, 2], [3, 4]].join(&0), [1, 2, 0, 3, 4]); /// assert_eq!([[1, 2], [3, 4]].join(&[0, 0][..]), [1, 2, 0, 0, 3, 4]); /// ``` #[stable(feature = "rename_connect_to_join", since = "1.3.0")] pub fn join(&self, sep: Separator) -> >::Output where Self: Join, { Join::join(self, sep) } /// Flattens a slice of `T` into a single value `Self::Output`, placing a /// given separator between each. /// /// # Examples /// /// ``` /// # #![allow(deprecated)] /// assert_eq!(["hello", "world"].connect(" "), "hello world"); /// assert_eq!([[1, 2], [3, 4]].connect(&0), [1, 2, 0, 3, 4]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_deprecated(since = "1.3.0", reason = "renamed to join")] pub fn connect(&self, sep: Separator) -> >::Output where Self: Join, { Join::join(self, sep) } } #[lang = "slice_u8_alloc"] #[cfg(not(test))] impl [u8] { /// Returns a vector containing a copy of this slice where each byte /// is mapped to its ASCII upper case equivalent. /// /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z', /// but non-ASCII letters are unchanged. /// /// To uppercase the value in-place, use [`make_ascii_uppercase`]. /// /// [`make_ascii_uppercase`]: slice::make_ascii_uppercase #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")] #[inline] pub fn to_ascii_uppercase(&self) -> Vec { let mut me = self.to_vec(); me.make_ascii_uppercase(); me } /// Returns a vector containing a copy of this slice where each byte /// is mapped to its ASCII lower case equivalent. /// /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z', /// but non-ASCII letters are unchanged. /// /// To lowercase the value in-place, use [`make_ascii_lowercase`]. /// /// [`make_ascii_lowercase`]: slice::make_ascii_lowercase #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")] #[inline] pub fn to_ascii_lowercase(&self) -> Vec { let mut me = self.to_vec(); me.make_ascii_lowercase(); me } } //////////////////////////////////////////////////////////////////////////////// // Extension traits for slices over specific kinds of data //////////////////////////////////////////////////////////////////////////////// /// Helper trait for [`[T]::concat`](slice::concat). /// /// Note: the `Item` type parameter is not used in this trait, /// but it allows impls to be more generic. /// Without it, we get this error: /// /// ```error /// error[E0207]: the type parameter `T` is not constrained by the impl trait, self type, or predica /// --> src/liballoc/slice.rs:608:6 /// | /// 608 | impl> Concat for [V] { /// | ^ unconstrained type parameter /// ``` /// /// This is because there could exist `V` types with multiple `Borrow<[_]>` impls, /// such that multiple `T` types would apply: /// /// ``` /// # #[allow(dead_code)] /// pub struct Foo(Vec, Vec); /// /// impl std::borrow::Borrow<[u32]> for Foo { /// fn borrow(&self) -> &[u32] { &self.0 } /// } /// /// impl std::borrow::Borrow<[String]> for Foo { /// fn borrow(&self) -> &[String] { &self.1 } /// } /// ``` #[unstable(feature = "slice_concat_trait", issue = "27747")] pub trait Concat { #[unstable(feature = "slice_concat_trait", issue = "27747")] /// The resulting type after concatenation type Output; /// Implementation of [`[T]::concat`](slice::concat) #[unstable(feature = "slice_concat_trait", issue = "27747")] fn concat(slice: &Self) -> Self::Output; } /// Helper trait for [`[T]::join`](slice::join) #[unstable(feature = "slice_concat_trait", issue = "27747")] pub trait Join { #[unstable(feature = "slice_concat_trait", issue = "27747")] /// The resulting type after concatenation type Output; /// Implementation of [`[T]::join`](slice::join) #[unstable(feature = "slice_concat_trait", issue = "27747")] fn join(slice: &Self, sep: Separator) -> Self::Output; } #[unstable(feature = "slice_concat_ext", issue = "27747")] impl> Concat for [V] { type Output = Vec; fn concat(slice: &Self) -> Vec { let size = slice.iter().map(|slice| slice.borrow().len()).sum(); let mut result = Vec::with_capacity(size); for v in slice { result.extend_from_slice(v.borrow()) } result } } #[unstable(feature = "slice_concat_ext", issue = "27747")] impl> Join<&T> for [V] { type Output = Vec; fn join(slice: &Self, sep: &T) -> Vec { let mut iter = slice.iter(); let first = match iter.next() { Some(first) => first, None => return vec![], }; let size = slice.iter().map(|v| v.borrow().len()).sum::() + slice.len() - 1; let mut result = Vec::with_capacity(size); result.extend_from_slice(first.borrow()); for v in iter { result.push(sep.clone()); result.extend_from_slice(v.borrow()) } result } } #[unstable(feature = "slice_concat_ext", issue = "27747")] impl> Join<&[T]> for [V] { type Output = Vec; fn join(slice: &Self, sep: &[T]) -> Vec { let mut iter = slice.iter(); let first = match iter.next() { Some(first) => first, None => return vec![], }; let size = slice.iter().map(|v| v.borrow().len()).sum::() + sep.len() * (slice.len() - 1); let mut result = Vec::with_capacity(size); result.extend_from_slice(first.borrow()); for v in iter { result.extend_from_slice(sep); result.extend_from_slice(v.borrow()) } result } } //////////////////////////////////////////////////////////////////////////////// // Standard trait implementations for slices //////////////////////////////////////////////////////////////////////////////// #[stable(feature = "rust1", since = "1.0.0")] impl Borrow<[T]> for Vec { fn borrow(&self) -> &[T] { &self[..] } } #[stable(feature = "rust1", since = "1.0.0")] impl BorrowMut<[T]> for Vec { fn borrow_mut(&mut self) -> &mut [T] { &mut self[..] } } #[stable(feature = "rust1", since = "1.0.0")] impl ToOwned for [T] { type Owned = Vec; #[cfg(not(test))] fn to_owned(&self) -> Vec { self.to_vec() } #[cfg(test)] fn to_owned(&self) -> Vec { hack::to_vec(self, Global) } fn clone_into(&self, target: &mut Vec) { // drop anything in target that will not be overwritten target.truncate(self.len()); // target.len <= self.len due to the truncate above, so the // slices here are always in-bounds. let (init, tail) = self.split_at(target.len()); // reuse the contained values' allocations/resources. target.clone_from_slice(init); target.extend_from_slice(tail); } } //////////////////////////////////////////////////////////////////////////////// // Sorting //////////////////////////////////////////////////////////////////////////////// /// Inserts `v[0]` into pre-sorted sequence `v[1..]` so that whole `v[..]` becomes sorted. /// /// This is the integral subroutine of insertion sort. fn insert_head(v: &mut [T], is_less: &mut F) where F: FnMut(&T, &T) -> bool, { if v.len() >= 2 && is_less(&v[1], &v[0]) { unsafe { // There are three ways to implement insertion here: // // 1. Swap adjacent elements until the first one gets to its final destination. // However, this way we copy data around more than is necessary. If elements are big // structures (costly to copy), this method will be slow. // // 2. Iterate until the right place for the first element is found. Then shift the // elements succeeding it to make room for it and finally place it into the // remaining hole. This is a good method. // // 3. Copy the first element into a temporary variable. Iterate until the right place // for it is found. As we go along, copy every traversed element into the slot // preceding it. Finally, copy data from the temporary variable into the remaining // hole. This method is very good. Benchmarks demonstrated slightly better // performance than with the 2nd method. // // All methods were benchmarked, and the 3rd showed best results. So we chose that one. let mut tmp = mem::ManuallyDrop::new(ptr::read(&v[0])); // Intermediate state of the insertion process is always tracked by `hole`, which // serves two purposes: // 1. Protects integrity of `v` from panics in `is_less`. // 2. Fills the remaining hole in `v` in the end. // // Panic safety: // // If `is_less` panics at any point during the process, `hole` will get dropped and // fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it // initially held exactly once. let mut hole = InsertionHole { src: &mut *tmp, dest: &mut v[1] }; ptr::copy_nonoverlapping(&v[1], &mut v[0], 1); for i in 2..v.len() { if !is_less(&v[i], &*tmp) { break; } ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1); hole.dest = &mut v[i]; } // `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`. } } // When dropped, copies from `src` into `dest`. struct InsertionHole { src: *mut T, dest: *mut T, } impl Drop for InsertionHole { fn drop(&mut self) { unsafe { ptr::copy_nonoverlapping(self.src, self.dest, 1); } } } } /// Merges non-decreasing runs `v[..mid]` and `v[mid..]` using `buf` as temporary storage, and /// stores the result into `v[..]`. /// /// # Safety /// /// The two slices must be non-empty and `mid` must be in bounds. Buffer `buf` must be long enough /// to hold a copy of the shorter slice. Also, `T` must not be a zero-sized type. unsafe fn merge(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F) where F: FnMut(&T, &T) -> bool, { let len = v.len(); let v = v.as_mut_ptr(); let (v_mid, v_end) = unsafe { (v.add(mid), v.add(len)) }; // The merge process first copies the shorter run into `buf`. Then it traces the newly copied // run and the longer run forwards (or backwards), comparing their next unconsumed elements and // copying the lesser (or greater) one into `v`. // // As soon as the shorter run is fully consumed, the process is done. If the longer run gets // consumed first, then we must copy whatever is left of the shorter run into the remaining // hole in `v`. // // Intermediate state of the process is always tracked by `hole`, which serves two purposes: // 1. Protects integrity of `v` from panics in `is_less`. // 2. Fills the remaining hole in `v` if the longer run gets consumed first. // // Panic safety: // // If `is_less` panics at any point during the process, `hole` will get dropped and fill the // hole in `v` with the unconsumed range in `buf`, thus ensuring that `v` still holds every // object it initially held exactly once. let mut hole; if mid <= len - mid { // The left run is shorter. unsafe { ptr::copy_nonoverlapping(v, buf, mid); hole = MergeHole { start: buf, end: buf.add(mid), dest: v }; } // Initially, these pointers point to the beginnings of their arrays. let left = &mut hole.start; let mut right = v_mid; let out = &mut hole.dest; while *left < hole.end && right < v_end { // Consume the lesser side. // If equal, prefer the left run to maintain stability. unsafe { let to_copy = if is_less(&*right, &**left) { get_and_increment(&mut right) } else { get_and_increment(left) }; ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1); } } } else { // The right run is shorter. unsafe { ptr::copy_nonoverlapping(v_mid, buf, len - mid); hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid }; } // Initially, these pointers point past the ends of their arrays. let left = &mut hole.dest; let right = &mut hole.end; let mut out = v_end; while v < *left && buf < *right { // Consume the greater side. // If equal, prefer the right run to maintain stability. unsafe { let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) { decrement_and_get(left) } else { decrement_and_get(right) }; ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1); } } } // Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of // it will now be copied into the hole in `v`. unsafe fn get_and_increment(ptr: &mut *mut T) -> *mut T { let old = *ptr; *ptr = unsafe { ptr.offset(1) }; old } unsafe fn decrement_and_get(ptr: &mut *mut T) -> *mut T { *ptr = unsafe { ptr.offset(-1) }; *ptr } // When dropped, copies the range `start..end` into `dest..`. struct MergeHole { start: *mut T, end: *mut T, dest: *mut T, } impl Drop for MergeHole { fn drop(&mut self) { // `T` is not a zero-sized type, so it's okay to divide by its size. let len = (self.end as usize - self.start as usize) / mem::size_of::(); unsafe { ptr::copy_nonoverlapping(self.start, self.dest, len); } } } } /// This merge sort borrows some (but not all) ideas from TimSort, which is described in detail /// [here](http://svn.python.org/projects/python/trunk/Objects/listsort.txt). /// /// The algorithm identifies strictly descending and non-descending subsequences, which are called /// natural runs. There is a stack of pending runs yet to be merged. Each newly found run is pushed /// onto the stack, and then some pairs of adjacent runs are merged until these two invariants are /// satisfied: /// /// 1. for every `i` in `1..runs.len()`: `runs[i - 1].len > runs[i].len` /// 2. for every `i` in `2..runs.len()`: `runs[i - 2].len > runs[i - 1].len + runs[i].len` /// /// The invariants ensure that the total running time is *O*(*n* \* log(*n*)) worst-case. fn merge_sort(v: &mut [T], mut is_less: F) where F: FnMut(&T, &T) -> bool, { // Slices of up to this length get sorted using insertion sort. const MAX_INSERTION: usize = 20; // Very short runs are extended using insertion sort to span at least this many elements. const MIN_RUN: usize = 10; // Sorting has no meaningful behavior on zero-sized types. if size_of::() == 0 { return; } let len = v.len(); // Short arrays get sorted in-place via insertion sort to avoid allocations. if len <= MAX_INSERTION { if len >= 2 { for i in (0..len - 1).rev() { insert_head(&mut v[i..], &mut is_less); } } return; } // Allocate a buffer to use as scratch memory. We keep the length 0 so we can keep in it // shallow copies of the contents of `v` without risking the dtors running on copies if // `is_less` panics. When merging two sorted runs, this buffer holds a copy of the shorter run, // which will always have length at most `len / 2`. let mut buf = Vec::with_capacity(len / 2); // In order to identify natural runs in `v`, we traverse it backwards. That might seem like a // strange decision, but consider the fact that merges more often go in the opposite direction // (forwards). According to benchmarks, merging forwards is slightly faster than merging // backwards. To conclude, identifying runs by traversing backwards improves performance. let mut runs = vec![]; let mut end = len; while end > 0 { // Find the next natural run, and reverse it if it's strictly descending. let mut start = end - 1; if start > 0 { start -= 1; unsafe { if is_less(v.get_unchecked(start + 1), v.get_unchecked(start)) { while start > 0 && is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) { start -= 1; } v[start..end].reverse(); } else { while start > 0 && !is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) { start -= 1; } } } } // Insert some more elements into the run if it's too short. Insertion sort is faster than // merge sort on short sequences, so this significantly improves performance. while start > 0 && end - start < MIN_RUN { start -= 1; insert_head(&mut v[start..end], &mut is_less); } // Push this run onto the stack. runs.push(Run { start, len: end - start }); end = start; // Merge some pairs of adjacent runs to satisfy the invariants. while let Some(r) = collapse(&runs) { let left = runs[r + 1]; let right = runs[r]; unsafe { merge( &mut v[left.start..right.start + right.len], left.len, buf.as_mut_ptr(), &mut is_less, ); } runs[r] = Run { start: left.start, len: left.len + right.len }; runs.remove(r + 1); } } // Finally, exactly one run must remain in the stack. debug_assert!(runs.len() == 1 && runs[0].start == 0 && runs[0].len == len); // Examines the stack of runs and identifies the next pair of runs to merge. More specifically, // if `Some(r)` is returned, that means `runs[r]` and `runs[r + 1]` must be merged next. If the // algorithm should continue building a new run instead, `None` is returned. // // TimSort is infamous for its buggy implementations, as described here: // http://envisage-project.eu/timsort-specification-and-verification/ // // The gist of the story is: we must enforce the invariants on the top four runs on the stack. // Enforcing them on just top three is not sufficient to ensure that the invariants will still // hold for *all* runs in the stack. // // This function correctly checks invariants for the top four runs. Additionally, if the top // run starts at index 0, it will always demand a merge operation until the stack is fully // collapsed, in order to complete the sort. #[inline] fn collapse(runs: &[Run]) -> Option { let n = runs.len(); if n >= 2 && (runs[n - 1].start == 0 || runs[n - 2].len <= runs[n - 1].len || (n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len) || (n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len)) { if n >= 3 && runs[n - 3].len < runs[n - 1].len { Some(n - 3) } else { Some(n - 2) } } else { None } } #[derive(Clone, Copy)] struct Run { start: usize, len: usize, } } #![stable(feature = "wake_trait", since = "1.51.0")] //! Types and Traits for working with asynchronous tasks. use core::mem::ManuallyDrop; use core::task::{RawWaker, RawWakerVTable, Waker}; use crate::sync::Arc; /// The implementation of waking a task on an executor. /// /// This trait can be used to create a [`Waker`]. An executor can define an /// implementation of this trait, and use that to construct a Waker to pass /// to the tasks that are executed on that executor. /// /// This trait is a memory-safe and ergonomic alternative to constructing a /// [`RawWaker`]. It supports the common executor design in which the data used /// to wake up a task is stored in an [`Arc`]. Some executors (especially /// those for embedded systems) cannot use this API, which is why [`RawWaker`] /// exists as an alternative for those systems. /// /// [arc]: ../../std/sync/struct.Arc.html /// /// # Examples /// /// A basic `block_on` function that takes a future and runs it to completion on /// the current thread. /// /// **Note:** This example trades correctness for simplicity. In order to prevent /// deadlocks, production-grade implementations will also need to handle /// intermediate calls to `thread::unpark` as well as nested invocations. /// /// ```rust /// use std::future::Future; /// use std::sync::Arc; /// use std::task::{Context, Poll, Wake}; /// use std::thread::{self, Thread}; /// /// /// A waker that wakes up the current thread when called. /// struct ThreadWaker(Thread); /// /// impl Wake for ThreadWaker { /// fn wake(self: Arc) { /// self.0.unpark(); /// } /// } /// /// /// Run a future to completion on the current thread. /// fn block_on(fut: impl Future) -> T { /// // Pin the future so it can be polled. /// let mut fut = Box::pin(fut); /// /// // Create a new context to be passed to the future. /// let t = thread::current(); /// let waker = Arc::new(ThreadWaker(t)).into(); /// let mut cx = Context::from_waker(&waker); /// /// // Run the future to completion. /// loop { /// match fut.as_mut().poll(&mut cx) { /// Poll::Ready(res) => return res, /// Poll::Pending => thread::park(), /// } /// } /// } /// /// block_on(async { /// println!("Hi from inside a future!"); /// }); /// ``` #[stable(feature = "wake_trait", since = "1.51.0")] pub trait Wake { /// Wake this task. #[stable(feature = "wake_trait", since = "1.51.0")] fn wake(self: Arc); /// Wake this task without consuming the waker. /// /// If an executor supports a cheaper way to wake without consuming the /// waker, it should override this method. By default, it clones the /// [`Arc`] and calls [`wake`] on the clone. /// /// [`wake`]: Wake::wake #[stable(feature = "wake_trait", since = "1.51.0")] fn wake_by_ref(self: &Arc) { self.clone().wake(); } } #[stable(feature = "wake_trait", since = "1.51.0")] impl From> for Waker { /// Use a `Wake`-able type as a `Waker`. /// /// No heap allocations or atomic operations are used for this conversion. fn from(waker: Arc) -> Waker { // SAFETY: This is safe because raw_waker safely constructs // a RawWaker from Arc. unsafe { Waker::from_raw(raw_waker(waker)) } } } #[stable(feature = "wake_trait", since = "1.51.0")] impl From> for RawWaker { /// Use a `Wake`-able type as a `RawWaker`. /// /// No heap allocations or atomic operations are used for this conversion. fn from(waker: Arc) -> RawWaker { raw_waker(waker) } } // NB: This private function for constructing a RawWaker is used, rather than // inlining this into the `From> for RawWaker` impl, to ensure that // the safety of `From> for Waker` does not depend on the correct // trait dispatch - instead both impls call this function directly and // explicitly. #[inline(always)] fn raw_waker(waker: Arc) -> RawWaker { // Increment the reference count of the arc to clone it. unsafe fn clone_waker(waker: *const ()) -> RawWaker { unsafe { Arc::increment_strong_count(waker as *const W) }; RawWaker::new( waker as *const (), &RawWakerVTable::new(clone_waker::, wake::, wake_by_ref::, drop_waker::), ) } // Wake by value, moving the Arc into the Wake::wake function unsafe fn wake(waker: *const ()) { let waker = unsafe { Arc::from_raw(waker as *const W) }; ::wake(waker); } // Wake by reference, wrap the waker in ManuallyDrop to avoid dropping it unsafe fn wake_by_ref(waker: *const ()) { let waker = unsafe { ManuallyDrop::new(Arc::from_raw(waker as *const W)) }; ::wake_by_ref(&waker); } // Decrement the reference count of the Arc on drop unsafe fn drop_waker(waker: *const ()) { unsafe { Arc::decrement_strong_count(waker as *const W) }; } RawWaker::new( Arc::into_raw(waker) as *const (), &RawWakerVTable::new(clone_waker::, wake::, wake_by_ref::, drop_waker::), ) } #![stable(feature = "rust1", since = "1.0.0")] //! Thread-safe reference-counting pointers. //! //! See the [`Arc`][Arc] documentation for more details. use core::any::Any; use core::borrow; use core::cmp::Ordering; use core::convert::{From, TryFrom}; use core::fmt; use core::hash::{Hash, Hasher}; use core::hint; use core::intrinsics::abort; use core::iter; use core::marker::{PhantomData, Unpin, Unsize}; use core::mem::{self, align_of_val_raw, size_of_val}; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; use core::slice::from_raw_parts_mut; use core::sync::atomic; use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst}; use crate::alloc::{ box_free, handle_alloc_error, AllocError, Allocator, Global, Layout, WriteCloneIntoRaw, }; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; use crate::rc::is_dangling; use crate::string::String; use crate::vec::Vec; #[cfg(test)] mod tests; /// A soft limit on the amount of references that may be made to an `Arc`. /// /// Going above this limit will abort your program (although not /// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references. const MAX_REFCOUNT: usize = (isize::MAX) as usize; #[cfg(not(sanitize = "thread"))] macro_rules! acquire { ($x:expr) => { atomic::fence(Acquire) }; } // ThreadSanitizer does not support memory fences. To avoid false positive // reports in Arc / Weak implementation use atomic loads for synchronization // instead. #[cfg(sanitize = "thread")] macro_rules! acquire { ($x:expr) => { $x.load(Acquire) }; } /// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically /// Reference Counted'. /// /// The type `Arc` provides shared ownership of a value of type `T`, /// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces /// a new `Arc` instance, which points to the same allocation on the heap as the /// source `Arc`, while increasing a reference count. When the last `Arc` /// pointer to a given allocation is destroyed, the value stored in that allocation (often /// referred to as "inner value") is also dropped. /// /// Shared references in Rust disallow mutation by default, and `Arc` is no /// exception: you cannot generally obtain a mutable reference to something /// inside an `Arc`. If you need to mutate through an `Arc`, use /// [`Mutex`][mutex], [`RwLock`][rwlock], or one of the [`Atomic`][atomic] /// types. /// /// ## Thread Safety /// /// Unlike [`Rc`], `Arc` uses atomic operations for its reference /// counting. This means that it is thread-safe. The disadvantage is that /// atomic operations are more expensive than ordinary memory accesses. If you /// are not sharing reference-counted allocations between threads, consider using /// [`Rc`] for lower overhead. [`Rc`] is a safe default, because the /// compiler will catch any attempt to send an [`Rc`] between threads. /// However, a library might choose `Arc` in order to give library consumers /// more flexibility. /// /// `Arc` will implement [`Send`] and [`Sync`] as long as the `T` implements /// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an /// `Arc` to make it thread-safe? This may be a bit counter-intuitive at /// first: after all, isn't the point of `Arc` thread safety? The key is /// this: `Arc` makes it thread safe to have multiple ownership of the same /// data, but it doesn't add thread safety to its data. Consider /// `Arc<`[`RefCell`]`>`. [`RefCell`] isn't [`Sync`], and if `Arc` was always /// [`Send`], `Arc<`[`RefCell`]`>` would be as well. But then we'd have a problem: /// [`RefCell`] is not thread safe; it keeps track of the borrowing count using /// non-atomic operations. /// /// In the end, this means that you may need to pair `Arc` with some sort of /// [`std::sync`] type, usually [`Mutex`][mutex]. /// /// ## Breaking cycles with `Weak` /// /// The [`downgrade`][downgrade] method can be used to create a non-owning /// [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d /// to an `Arc`, but this will return [`None`] if the value stored in the allocation has /// already been dropped. In other words, `Weak` pointers do not keep the value /// inside the allocation alive; however, they *do* keep the allocation /// (the backing store for the value) alive. /// /// A cycle between `Arc` pointers will never be deallocated. For this reason, /// [`Weak`] is used to break cycles. For example, a tree could have /// strong `Arc` pointers from parent nodes to children, and [`Weak`] /// pointers from children back to their parents. /// /// # Cloning references /// /// Creating a new reference from an existing reference-counted pointer is done using the /// `Clone` trait implemented for [`Arc`][Arc] and [`Weak`][Weak]. /// /// ``` /// use std::sync::Arc; /// let foo = Arc::new(vec![1.0, 2.0, 3.0]); /// // The two syntaxes below are equivalent. /// let a = foo.clone(); /// let b = Arc::clone(&foo); /// // a, b, and foo are all Arcs that point to the same memory location /// ``` /// /// ## `Deref` behavior /// /// `Arc` automatically dereferences to `T` (via the [`Deref`][deref] trait), /// so you can call `T`'s methods on a value of type `Arc`. To avoid name /// clashes with `T`'s methods, the methods of `Arc` itself are associated /// functions, called using [fully qualified syntax]: /// /// ``` /// use std::sync::Arc; /// /// let my_arc = Arc::new(()); /// Arc::downgrade(&my_arc); /// ``` /// /// `Arc`'s implementations of traits like `Clone` may also be called using /// fully qualified syntax. Some people prefer to use fully qualified syntax, /// while others prefer using method-call syntax. /// /// ``` /// use std::sync::Arc; /// /// let arc = Arc::new(()); /// // Method-call syntax /// let arc2 = arc.clone(); /// // Fully qualified syntax /// let arc3 = Arc::clone(&arc); /// ``` /// /// [`Weak`][Weak] does not auto-dereference to `T`, because the inner value may have /// already been dropped. /// /// [`Rc`]: crate::rc::Rc /// [clone]: Clone::clone /// [mutex]: ../../std/sync/struct.Mutex.html /// [rwlock]: ../../std/sync/struct.RwLock.html /// [atomic]: core::sync::atomic /// [`Send`]: core::marker::Send /// [`Sync`]: core::marker::Sync /// [deref]: core::ops::Deref /// [downgrade]: Arc::downgrade /// [upgrade]: Weak::upgrade /// [`RefCell`]: core::cell::RefCell /// [`std::sync`]: ../../std/sync/index.html /// [`Arc::clone(&from)`]: Arc::clone /// [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name /// /// # Examples /// /// Sharing some immutable data between threads: /// // Note that we **do not** run these tests here. The windows builders get super // unhappy if a thread outlives the main thread and then exits at the same time // (something deadlocks) so we just avoid this entirely by not running these // tests. /// ```no_run /// use std::sync::Arc; /// use std::thread; /// /// let five = Arc::new(5); /// /// for _ in 0..10 { /// let five = Arc::clone(&five); /// /// thread::spawn(move || { /// println!("{:?}", five); /// }); /// } /// ``` /// /// Sharing a mutable [`AtomicUsize`]: /// /// [`AtomicUsize`]: core::sync::atomic::AtomicUsize /// /// ```no_run /// use std::sync::Arc; /// use std::sync::atomic::{AtomicUsize, Ordering}; /// use std::thread; /// /// let val = Arc::new(AtomicUsize::new(5)); /// /// for _ in 0..10 { /// let val = Arc::clone(&val); /// /// thread::spawn(move || { /// let v = val.fetch_add(1, Ordering::SeqCst); /// println!("{:?}", v); /// }); /// } /// ``` /// /// See the [`rc` documentation][rc_examples] for more examples of reference /// counting in general. /// /// [rc_examples]: crate::rc#examples #[cfg_attr(not(test), rustc_diagnostic_item = "Arc")] #[stable(feature = "rust1", since = "1.0.0")] pub struct Arc { ptr: NonNull>, phantom: PhantomData>, } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for Arc {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for Arc {} #[unstable(feature = "coerce_unsized", issue = "27732")] impl, U: ?Sized> CoerceUnsized> for Arc {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Arc {} impl Arc { fn from_inner(ptr: NonNull>) -> Self { Self { ptr, phantom: PhantomData } } unsafe fn from_ptr(ptr: *mut ArcInner) -> Self { unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } } } /// `Weak` is a version of [`Arc`] that holds a non-owning reference to the /// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak` /// pointer, which returns an [`Option`]`<`[`Arc`]`>`. /// /// Since a `Weak` reference does not count towards ownership, it will not /// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no /// guarantees about the value still being present. Thus it may return [`None`] /// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation /// itself (the backing store) from being deallocated. /// /// A `Weak` pointer is useful for keeping a temporary reference to the allocation /// managed by [`Arc`] without preventing its inner value from being dropped. It is also used to /// prevent circular references between [`Arc`] pointers, since mutual owning references /// would never allow either [`Arc`] to be dropped. For example, a tree could /// have strong [`Arc`] pointers from parent nodes to children, and `Weak` /// pointers from children back to their parents. /// /// The typical way to obtain a `Weak` pointer is to call [`Arc::downgrade`]. /// /// [`upgrade`]: Weak::upgrade #[stable(feature = "arc_weak", since = "1.4.0")] pub struct Weak { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need // to allocate space on the heap. That's not a value a real pointer // will ever have because RcBox has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. ptr: NonNull>, } #[stable(feature = "arc_weak", since = "1.4.0")] unsafe impl Send for Weak {} #[stable(feature = "arc_weak", since = "1.4.0")] unsafe impl Sync for Weak {} #[unstable(feature = "coerce_unsized", issue = "27732")] impl, U: ?Sized> CoerceUnsized> for Weak {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Weak {} #[stable(feature = "arc_weak", since = "1.4.0")] impl fmt::Debug for Weak { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "(Weak)") } } // This is repr(C) to future-proof against possible field-reordering, which // would interfere with otherwise safe [into|from]_raw() of transmutable // inner types. #[repr(C)] struct ArcInner { strong: atomic::AtomicUsize, // the value usize::MAX acts as a sentinel for temporarily "locking" the // ability to upgrade weak pointers or downgrade strong ones; this is used // to avoid races in `make_mut` and `get_mut`. weak: atomic::AtomicUsize, data: T, } unsafe impl Send for ArcInner {} unsafe impl Sync for ArcInner {} impl Arc { /// Constructs a new `Arc`. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(data: T) -> Arc { // Start the weak pointer count as 1 which is the weak pointer that's // held by all the strong pointers (kinda), see std/rc.rs for more info let x: Box<_> = box ArcInner { strong: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1), data, }; Self::from_inner(Box::leak(x).into()) } /// Constructs a new `Arc` using a weak reference to itself. Attempting /// to upgrade the weak reference before this function returns will result /// in a `None` value. However, the weak reference may be cloned freely and /// stored for use at a later time. /// /// # Examples /// ``` /// #![feature(arc_new_cyclic)] /// #![allow(dead_code)] /// /// use std::sync::{Arc, Weak}; /// /// struct Foo { /// me: Weak, /// } /// /// let foo = Arc::new_cyclic(|me| Foo { /// me: me.clone(), /// }); /// ``` #[inline] #[unstable(feature = "arc_new_cyclic", issue = "75861")] pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Arc { // Construct the inner in the "uninitialized" state with a single // weak reference. let uninit_ptr: NonNull<_> = Box::leak(box ArcInner { strong: atomic::AtomicUsize::new(0), weak: atomic::AtomicUsize::new(1), data: mem::MaybeUninit::::uninit(), }) .into(); let init_ptr: NonNull> = uninit_ptr.cast(); let weak = Weak { ptr: init_ptr }; // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If // we really wanted to pass ownership, we could create an additional // weak pointer for ourselves, but this would result in additional // updates to the weak reference count which might not be necessary // otherwise. let data = data_fn(&weak); // Now we can properly initialize the inner value and turn our weak // reference into a strong reference. unsafe { let inner = init_ptr.as_ptr(); ptr::write(ptr::addr_of_mut!((*inner).data), data); // The above write to the data field must be visible to any threads which // observe a non-zero strong count. Therefore we need at least "Release" ordering // in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`. // // "Acquire" ordering is not required. When considering the possible behaviours // of `data_fn` we only need to look at what it could do with a reference to a // non-upgradeable `Weak`: // - It can *clone* the `Weak`, increasing the weak reference count. // - It can drop those clones, decreasing the weak reference count (but never to zero). // // These side effects do not impact us in any way, and no other side effects are // possible with safe code alone. let prev_value = (*inner).strong.fetch_add(1, Release); debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); } let strong = Arc::from_inner(init_ptr); // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. mem::forget(weak); strong } /// Constructs a new `Arc` with uninitialized contents. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// #![feature(get_mut_unchecked)] /// /// use std::sync::Arc; /// /// let mut five = Arc::::new_uninit(); /// /// let five = unsafe { /// // Deferred initialization: /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); /// /// five.assume_init() /// }; /// /// assert_eq!(*five, 5) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Arc> { unsafe { Arc::from_ptr(Arc::allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), |mem| mem as *mut ArcInner>, )) } } /// Constructs a new `Arc` with uninitialized contents, with the memory /// being filled with `0` bytes. /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage /// of this method. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// /// use std::sync::Arc; /// /// let zero = Arc::::new_zeroed(); /// let zero = unsafe { zero.assume_init() }; /// /// assert_eq!(*zero, 0) /// ``` /// /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Arc> { unsafe { Arc::from_ptr(Arc::allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), |mem| mem as *mut ArcInner>, )) } } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `data` will be pinned in memory and unable to be moved. #[stable(feature = "pin", since = "1.33.0")] pub fn pin(data: T) -> Pin> { unsafe { Pin::new_unchecked(Arc::new(data)) } } /// Constructs a new `Arc`, returning an error if allocation fails. /// /// # Examples /// /// ``` /// #![feature(allocator_api)] /// use std::sync::Arc; /// /// let five = Arc::try_new(5)?; /// # Ok::<(), std::alloc::AllocError>(()) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new(data: T) -> Result, AllocError> { // Start the weak pointer count as 1 which is the weak pointer that's // held by all the strong pointers (kinda), see std/rc.rs for more info let x: Box<_> = Box::try_new(ArcInner { strong: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1), data, })?; Ok(Self::from_inner(Box::leak(x).into())) } /// Constructs a new `Arc` with uninitialized contents, returning an error /// if allocation fails. /// /// # Examples /// /// ``` /// #![feature(new_uninit, allocator_api)] /// #![feature(get_mut_unchecked)] /// /// use std::sync::Arc; /// /// let mut five = Arc::::try_new_uninit()?; /// /// let five = unsafe { /// // Deferred initialization: /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); /// /// five.assume_init() /// }; /// /// assert_eq!(*five, 5); /// # Ok::<(), std::alloc::AllocError>(()) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { unsafe { Ok(Arc::from_ptr(Arc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), |mem| mem as *mut ArcInner>, )?)) } } /// Constructs a new `Arc` with uninitialized contents, with the memory /// being filled with `0` bytes, returning an error if allocation fails. /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage /// of this method. /// /// # Examples /// /// ``` /// #![feature(new_uninit, allocator_api)] /// /// use std::sync::Arc; /// /// let zero = Arc::::try_new_zeroed()?; /// let zero = unsafe { zero.assume_init() }; /// /// assert_eq!(*zero, 0); /// # Ok::<(), std::alloc::AllocError>(()) /// ``` /// /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { unsafe { Ok(Arc::from_ptr(Arc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), |mem| mem as *mut ArcInner>, )?)) } } /// Returns the inner value, if the `Arc` has exactly one strong reference. /// /// Otherwise, an [`Err`] is returned with the same `Arc` that was /// passed in. /// /// This will succeed even if there are outstanding weak references. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let x = Arc::new(3); /// assert_eq!(Arc::try_unwrap(x), Ok(3)); /// /// let x = Arc::new(4); /// let _y = Arc::clone(&x); /// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4); /// ``` #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { return Err(this); } acquire!(this.inner().strong); unsafe { let elem = ptr::read(&this.ptr.as_ref().data); // Make a weak pointer to clean up the implicit strong-weak reference let _weak = Weak { ptr: this.ptr }; mem::forget(this); Ok(elem) } } } impl Arc<[T]> { /// Constructs a new atomically reference-counted slice with uninitialized contents. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// #![feature(get_mut_unchecked)] /// /// use std::sync::Arc; /// /// let mut values = Arc::<[u32]>::new_uninit_slice(3); /// /// let values = unsafe { /// // Deferred initialization: /// Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1); /// Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2); /// Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3); /// /// values.assume_init() /// }; /// /// assert_eq!(*values, [1, 2, 3]) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit]> { unsafe { Arc::from_ptr(Arc::allocate_for_slice(len)) } } /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being /// filled with `0` bytes. /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and /// incorrect usage of this method. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// /// use std::sync::Arc; /// /// let values = Arc::<[u32]>::new_zeroed_slice(3); /// let values = unsafe { values.assume_init() }; /// /// assert_eq!(*values, [0, 0, 0]) /// ``` /// /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_slice(len: usize) -> Arc<[mem::MaybeUninit]> { unsafe { Arc::from_ptr(Arc::allocate_for_layout( Layout::array::(len).unwrap(), |layout| Global.allocate_zeroed(layout), |mem| { ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[mem::MaybeUninit]> }, )) } } } impl Arc> { /// Converts to `Arc`. /// /// # Safety /// /// As with [`MaybeUninit::assume_init`], /// it is up to the caller to guarantee that the inner value /// really is in an initialized state. /// Calling this when the content is not yet fully initialized /// causes immediate undefined behavior. /// /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// #![feature(get_mut_unchecked)] /// /// use std::sync::Arc; /// /// let mut five = Arc::::new_uninit(); /// /// let five = unsafe { /// // Deferred initialization: /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); /// /// five.assume_init() /// }; /// /// assert_eq!(*five, 5) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Arc { Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) } } impl Arc<[mem::MaybeUninit]> { /// Converts to `Arc<[T]>`. /// /// # Safety /// /// As with [`MaybeUninit::assume_init`], /// it is up to the caller to guarantee that the inner value /// really is in an initialized state. /// Calling this when the content is not yet fully initialized /// causes immediate undefined behavior. /// /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// #![feature(get_mut_unchecked)] /// /// use std::sync::Arc; /// /// let mut values = Arc::<[u32]>::new_uninit_slice(3); /// /// let values = unsafe { /// // Deferred initialization: /// Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1); /// Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2); /// Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3); /// /// values.assume_init() /// }; /// /// assert_eq!(*values, [1, 2, 3]) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Arc<[T]> { unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } } } impl Arc { /// Consumes the `Arc`, returning the wrapped pointer. /// /// To avoid a memory leak the pointer must be converted back to an `Arc` using /// [`Arc::from_raw`]. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let x = Arc::new("hello".to_owned()); /// let x_ptr = Arc::into_raw(x); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub fn into_raw(this: Self) -> *const T { let ptr = Self::as_ptr(&this); mem::forget(this); ptr } /// Provides a raw pointer to the data. /// /// The counts are not affected in any way and the `Arc` is not consumed. The pointer is valid for /// as long as there are strong counts in the `Arc`. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let x = Arc::new("hello".to_owned()); /// let y = Arc::clone(&x); /// let x_ptr = Arc::as_ptr(&x); /// assert_eq!(x_ptr, Arc::as_ptr(&y)); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` #[stable(feature = "rc_as_ptr", since = "1.45.0")] pub fn as_ptr(this: &Self) -> *const T { let ptr: *mut ArcInner = NonNull::as_ptr(this.ptr); // SAFETY: This cannot go through Deref::deref or RcBoxPtr::inner because // this is required to retain raw/mut provenance such that e.g. `get_mut` can // write through the pointer after the Rc is recovered through `from_raw`. unsafe { ptr::addr_of_mut!((*ptr).data) } } /// Constructs an `Arc` from a raw pointer. /// /// The raw pointer must have been previously returned by a call to /// [`Arc::into_raw`][into_raw] where `U` must have the same size and /// alignment as `T`. This is trivially true if `U` is `T`. /// Note that if `U` is not `T` but has the same size and alignment, this is /// basically like transmuting references of different types. See /// [`mem::transmute`][transmute] for more information on what /// restrictions apply in this case. /// /// The user of `from_raw` has to make sure a specific value of `T` is only /// dropped once. /// /// This function is unsafe because improper use may lead to memory unsafety, /// even if the returned `Arc` is never accessed. /// /// [into_raw]: Arc::into_raw /// [transmute]: core::mem::transmute /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let x = Arc::new("hello".to_owned()); /// let x_ptr = Arc::into_raw(x); /// /// unsafe { /// // Convert back to an `Arc` to prevent leak. /// let x = Arc::from_raw(x_ptr); /// assert_eq!(&*x, "hello"); /// /// // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe. /// } /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { unsafe { let offset = data_offset(ptr); // Reverse the offset to find the original ArcInner. let arc_ptr = (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset)); Self::from_ptr(arc_ptr) } } /// Creates a new [`Weak`] pointer to this allocation. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// /// let weak_five = Arc::downgrade(&five); /// ``` #[stable(feature = "arc_weak", since = "1.4.0")] pub fn downgrade(this: &Self) -> Weak { // This Relaxed is OK because we're checking the value in the CAS // below. let mut cur = this.inner().weak.load(Relaxed); loop { // check if the weak counter is currently "locked"; if so, spin. if cur == usize::MAX { hint::spin_loop(); cur = this.inner().weak.load(Relaxed); continue; } // NOTE: this code currently ignores the possibility of overflow // into usize::MAX; in general both Rc and Arc need to be adjusted // to deal with overflow. // Unlike with Clone(), we need this to be an Acquire read to // synchronize with the write coming from `is_unique`, so that the // events prior to that write happen before this read. match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { Ok(_) => { // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); return Weak { ptr: this.ptr }; } Err(old) => cur = old, } } } /// Gets the number of [`Weak`] pointers to this allocation. /// /// # Safety /// /// This method by itself is safe, but using it correctly requires extra care. /// Another thread can change the weak count at any time, /// including potentially between calling this method and acting on the result. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// let _weak_five = Arc::downgrade(&five); /// /// // This assertion is deterministic because we haven't shared /// // the `Arc` or `Weak` between threads. /// assert_eq!(1, Arc::weak_count(&five)); /// ``` #[inline] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { let cnt = this.inner().weak.load(SeqCst); // If the weak count is currently locked, the value of the // count was 0 just before taking the lock. if cnt == usize::MAX { 0 } else { cnt - 1 } } /// Gets the number of strong (`Arc`) pointers to this allocation. /// /// # Safety /// /// This method by itself is safe, but using it correctly requires extra care. /// Another thread can change the strong count at any time, /// including potentially between calling this method and acting on the result. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// let _also_five = Arc::clone(&five); /// /// // This assertion is deterministic because we haven't shared /// // the `Arc` between threads. /// assert_eq!(2, Arc::strong_count(&five)); /// ``` #[inline] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { this.inner().strong.load(SeqCst) } /// Increments the strong reference count on the `Arc` associated with the /// provided pointer by one. /// /// # Safety /// /// The pointer must have been obtained through `Arc::into_raw`, and the /// associated `Arc` instance must be valid (i.e. the strong count must be at /// least 1) for the duration of this method. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// /// unsafe { /// let ptr = Arc::into_raw(five); /// Arc::increment_strong_count(ptr); /// /// // This assertion is deterministic because we haven't shared /// // the `Arc` between threads. /// let five = Arc::from_raw(ptr); /// assert_eq!(2, Arc::strong_count(&five)); /// } /// ``` #[inline] #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] pub unsafe fn increment_strong_count(ptr: *const T) { // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop let arc = unsafe { mem::ManuallyDrop::new(Arc::::from_raw(ptr)) }; // Now increase refcount, but don't drop new refcount either let _arc_clone: mem::ManuallyDrop<_> = arc.clone(); } /// Decrements the strong reference count on the `Arc` associated with the /// provided pointer by one. /// /// # Safety /// /// The pointer must have been obtained through `Arc::into_raw`, and the /// associated `Arc` instance must be valid (i.e. the strong count must be at /// least 1) when invoking this method. This method can be used to release the final /// `Arc` and backing storage, but **should not** be called after the final `Arc` has been /// released. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// /// unsafe { /// let ptr = Arc::into_raw(five); /// Arc::increment_strong_count(ptr); /// /// // Those assertions are deterministic because we haven't shared /// // the `Arc` between threads. /// let five = Arc::from_raw(ptr); /// assert_eq!(2, Arc::strong_count(&five)); /// Arc::decrement_strong_count(ptr); /// assert_eq!(1, Arc::strong_count(&five)); /// } /// ``` #[inline] #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] pub unsafe fn decrement_strong_count(ptr: *const T) { unsafe { mem::drop(Arc::from_raw(ptr)) }; } #[inline] fn inner(&self) -> &ArcInner { // This unsafety is ok because while this arc is alive we're guaranteed // that the inner pointer is valid. Furthermore, we know that the // `ArcInner` structure itself is `Sync` because the inner data is // `Sync` as well, so we're ok loaning out an immutable pointer to these // contents. unsafe { self.ptr.as_ref() } } // Non-inlined part of `drop`. #[inline(never)] unsafe fn drop_slow(&mut self) { // Destroy the data at this time, even though we may not free the box // allocation itself (there may still be weak pointers lying around). unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; // Drop the weak ref collectively held by all strong references drop(Weak { ptr: self.ptr }); } #[inline] #[stable(feature = "ptr_eq", since = "1.17.0")] /// Returns `true` if the two `Arc`s point to the same allocation /// (in a vein similar to [`ptr::eq`]). /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// let same_five = Arc::clone(&five); /// let other_five = Arc::new(5); /// /// assert!(Arc::ptr_eq(&five, &same_five)); /// assert!(!Arc::ptr_eq(&five, &other_five)); /// ``` /// /// [`ptr::eq`]: core::ptr::eq pub fn ptr_eq(this: &Self, other: &Self) -> bool { this.ptr.as_ptr() == other.ptr.as_ptr() } } impl Arc { /// Allocates an `ArcInner` with sufficient space for /// a possibly-unsized inner value where the value has the layout provided. /// /// The function `mem_to_arcinner` is called with the data pointer /// and must return back a (potentially fat)-pointer for the `ArcInner`. unsafe fn allocate_for_layout( value_layout: Layout, allocate: impl FnOnce(Layout) -> Result, AllocError>, mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, ) -> *mut ArcInner { // Calculate layout using the given value layout. // Previously, layout was calculated on the expression // `&*(ptr as *const ArcInner)`, but this created a misaligned // reference (see #54908). let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); unsafe { Arc::try_allocate_for_layout(value_layout, allocate, mem_to_arcinner) .unwrap_or_else(|_| handle_alloc_error(layout)) } } /// Allocates an `ArcInner` with sufficient space for /// a possibly-unsized inner value where the value has the layout provided, /// returning an error if allocation fails. /// /// The function `mem_to_arcinner` is called with the data pointer /// and must return back a (potentially fat)-pointer for the `ArcInner`. unsafe fn try_allocate_for_layout( value_layout: Layout, allocate: impl FnOnce(Layout) -> Result, AllocError>, mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, ) -> Result<*mut ArcInner, AllocError> { // Calculate layout using the given value layout. // Previously, layout was calculated on the expression // `&*(ptr as *const ArcInner)`, but this created a misaligned // reference (see #54908). let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); let ptr = allocate(layout)?; // Initialize the ArcInner let inner = mem_to_arcinner(ptr.as_non_null_ptr().as_ptr()); debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout); unsafe { ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); } Ok(inner) } /// Allocates an `ArcInner` with sufficient space for an unsized inner value. unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner { // Allocate for the `ArcInner` using the given value. unsafe { Self::allocate_for_layout( Layout::for_value(&*ptr), |layout| Global.allocate(layout), |mem| (ptr as *mut ArcInner).set_ptr_value(mem) as *mut ArcInner, ) } } fn from_box(v: Box) -> Arc { unsafe { let (box_unique, alloc) = Box::into_unique(v); let bptr = box_unique.as_ptr(); let value_size = size_of_val(&*bptr); let ptr = Self::allocate_for_ptr(bptr); // Copy value as bytes ptr::copy_nonoverlapping( bptr as *const T as *const u8, &mut (*ptr).data as *mut _ as *mut u8, value_size, ); // Free the allocation without dropping its contents box_free(box_unique, alloc); Self::from_ptr(ptr) } } } impl Arc<[T]> { /// Allocates an `ArcInner<[T]>` with the given length. unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> { unsafe { Self::allocate_for_layout( Layout::array::(len).unwrap(), |layout| Global.allocate(layout), |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>, ) } } /// Copy elements from slice into newly allocated Arc<\[T\]> /// /// Unsafe because the caller must either take ownership or bind `T: Copy`. unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { unsafe { let ptr = Self::allocate_for_slice(v.len()); ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len()); Self::from_ptr(ptr) } } /// Constructs an `Arc<[T]>` from an iterator known to be of a certain size. /// /// Behavior is undefined should the size be wrong. unsafe fn from_iter_exact(iter: impl iter::Iterator, len: usize) -> Arc<[T]> { // Panic guard while cloning T elements. // In the event of a panic, elements that have been written // into the new ArcInner will be dropped, then the memory freed. struct Guard { mem: NonNull, elems: *mut T, layout: Layout, n_elems: usize, } impl Drop for Guard { fn drop(&mut self) { unsafe { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); Global.deallocate(self.mem, self.layout); } } } unsafe { let ptr = Self::allocate_for_slice(len); let mem = ptr as *mut _ as *mut u8; let layout = Layout::for_value(&*ptr); // Pointer to first element let elems = &mut (*ptr).data as *mut [T] as *mut T; let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; for (i, item) in iter.enumerate() { ptr::write(elems.add(i), item); guard.n_elems += 1; } // All clear. Forget the guard so it doesn't free the new ArcInner. mem::forget(guard); Self::from_ptr(ptr) } } } /// Specialization trait used for `From<&[T]>`. trait ArcFromSlice { fn from_slice(slice: &[T]) -> Self; } impl ArcFromSlice for Arc<[T]> { #[inline] default fn from_slice(v: &[T]) -> Self { unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) } } } impl ArcFromSlice for Arc<[T]> { #[inline] fn from_slice(v: &[T]) -> Self { unsafe { Arc::copy_from_slice(v) } } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Arc { /// Makes a clone of the `Arc` pointer. /// /// This creates another pointer to the same allocation, increasing the /// strong reference count. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// /// let _ = Arc::clone(&five); /// ``` #[inline] fn clone(&self) -> Arc { // Using a relaxed ordering is alright here, as knowledge of the // original reference prevents other threads from erroneously deleting // the object. // // As explained in the [Boost documentation][1], Increasing the // reference counter can always be done with memory_order_relaxed: New // references to an object can only be formed from an existing // reference, and passing an existing reference from one thread to // another must already provide any required synchronization. // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) let old_size = self.inner().strong.fetch_add(1, Relaxed); // However we need to guard against massive refcounts in case someone // is `mem::forget`ing Arcs. If we don't do this the count can overflow // and users will use-after free. We racily saturate to `isize::MAX` on // the assumption that there aren't ~2 billion threads incrementing // the reference count at once. This branch will never be taken in // any realistic program. // // We abort because such a program is incredibly degenerate, and we // don't care to support it. if old_size > MAX_REFCOUNT { abort(); } Self::from_inner(self.ptr) } } #[stable(feature = "rust1", since = "1.0.0")] impl Deref for Arc { type Target = T; #[inline] fn deref(&self) -> &T { &self.inner().data } } #[unstable(feature = "receiver_trait", issue = "none")] impl Receiver for Arc {} impl Arc { /// Makes a mutable reference into the given `Arc`. /// /// If there are other `Arc` or [`Weak`] pointers to the same allocation, /// then `make_mut` will create a new allocation and invoke [`clone`][clone] on the inner value /// to ensure unique ownership. This is also referred to as clone-on-write. /// /// Note that this differs from the behavior of [`Rc::make_mut`] which disassociates /// any remaining `Weak` pointers. /// /// See also [`get_mut`][get_mut], which will fail rather than cloning. /// /// [clone]: Clone::clone /// [get_mut]: Arc::get_mut /// [`Rc::make_mut`]: super::rc::Rc::make_mut /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let mut data = Arc::new(5); /// /// *Arc::make_mut(&mut data) += 1; // Won't clone anything /// let mut other_data = Arc::clone(&data); // Won't clone inner data /// *Arc::make_mut(&mut data) += 1; // Clones inner data /// *Arc::make_mut(&mut data) += 1; // Won't clone anything /// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything /// /// // Now `data` and `other_data` point to different allocations. /// assert_eq!(*data, 8); /// assert_eq!(*other_data, 12); /// ``` #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn make_mut(this: &mut Self) -> &mut T { // Note that we hold both a strong reference and a weak reference. // Thus, releasing our strong reference only will not, by itself, cause // the memory to be deallocated. // // Use Acquire to ensure that we see any writes to `weak` that happen // before release writes (i.e., decrements) to `strong`. Since we hold a // weak count, there's no chance the ArcInner itself could be // deallocated. if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { // Another strong pointer exists, so we must clone. // Pre-allocate memory to allow writing the cloned value directly. let mut arc = Self::new_uninit(); unsafe { let data = Arc::get_mut_unchecked(&mut arc); (**this).write_clone_into_raw(data.as_mut_ptr()); *this = arc.assume_init(); } } else if this.inner().weak.load(Relaxed) != 1 { // Relaxed suffices in the above because this is fundamentally an // optimization: we are always racing with weak pointers being // dropped. Worst case, we end up allocated a new Arc unnecessarily. // We removed the last strong ref, but there are additional weak // refs remaining. We'll move the contents to a new Arc, and // invalidate the other weak refs. // Note that it is not possible for the read of `weak` to yield // usize::MAX (i.e., locked), since the weak count can only be // locked by a thread with a strong reference. // Materialize our own implicit weak pointer, so that it can clean // up the ArcInner as needed. let _weak = Weak { ptr: this.ptr }; // Can just steal the data, all that's left is Weaks let mut arc = Self::new_uninit(); unsafe { let data = Arc::get_mut_unchecked(&mut arc); data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); ptr::write(this, arc.assume_init()); } } else { // We were the sole reference of either kind; bump back up the // strong ref count. this.inner().strong.store(1, Release); } // As with `get_mut()`, the unsafety is ok because our reference was // either unique to begin with, or became one upon cloning the contents. unsafe { Self::get_mut_unchecked(this) } } } impl Arc { /// Returns a mutable reference into the given `Arc`, if there are /// no other `Arc` or [`Weak`] pointers to the same allocation. /// /// Returns [`None`] otherwise, because it is not safe to /// mutate a shared value. /// /// See also [`make_mut`][make_mut], which will [`clone`][clone] /// the inner value when there are other pointers. /// /// [make_mut]: Arc::make_mut /// [clone]: Clone::clone /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let mut x = Arc::new(3); /// *Arc::get_mut(&mut x).unwrap() = 4; /// assert_eq!(*x, 4); /// /// let _y = Arc::clone(&x); /// assert!(Arc::get_mut(&mut x).is_none()); /// ``` #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { if this.is_unique() { // This unsafety is ok because we're guaranteed that the pointer // returned is the *only* pointer that will ever be returned to T. Our // reference count is guaranteed to be 1 at this point, and we required // the Arc itself to be `mut`, so we're returning the only possible // reference to the inner data. unsafe { Some(Arc::get_mut_unchecked(this)) } } else { None } } /// Returns a mutable reference into the given `Arc`, /// without any check. /// /// See also [`get_mut`], which is safe and does appropriate checks. /// /// [`get_mut`]: Arc::get_mut /// /// # Safety /// /// Any other `Arc` or [`Weak`] pointers to the same allocation must not be dereferenced /// for the duration of the returned borrow. /// This is trivially the case if no such pointers exist, /// for example immediately after `Arc::new`. /// /// # Examples /// /// ``` /// #![feature(get_mut_unchecked)] /// /// use std::sync::Arc; /// /// let mut x = Arc::new(String::new()); /// unsafe { /// Arc::get_mut_unchecked(&mut x).push_str("foo") /// } /// assert_eq!(*x, "foo"); /// ``` #[inline] #[unstable(feature = "get_mut_unchecked", issue = "63292")] pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { // We are careful to *not* create a reference covering the "count" fields, as // this would alias with concurrent access to the reference counts (e.g. by `Weak`). unsafe { &mut (*this.ptr.as_ptr()).data } } /// Determine whether this is the unique reference (including weak refs) to /// the underlying data. /// /// Note that this requires locking the weak ref count. fn is_unique(&mut self) -> bool { // lock the weak pointer count if we appear to be the sole weak pointer // holder. // // The acquire label here ensures a happens-before relationship with any // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded // weak ref was never dropped, the CAS here will fail so we do not care to synchronize. if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { // This needs to be an `Acquire` to synchronize with the decrement of the `strong` // counter in `drop` -- the only access that happens when any but the last reference // is being dropped. let unique = self.inner().strong.load(Acquire) == 1; // The release write here synchronizes with a read in `downgrade`, // effectively preventing the above read of `strong` from happening // after the write. self.inner().weak.store(1, Release); // release the lock unique } else { false } } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { /// Drops the `Arc`. /// /// This will decrement the strong reference count. If the strong reference /// count reaches zero then the only other references (if any) are /// [`Weak`], so we `drop` the inner value. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// struct Foo; /// /// impl Drop for Foo { /// fn drop(&mut self) { /// println!("dropped!"); /// } /// } /// /// let foo = Arc::new(Foo); /// let foo2 = Arc::clone(&foo); /// /// drop(foo); // Doesn't print anything /// drop(foo2); // Prints "dropped!" /// ``` #[inline] fn drop(&mut self) { // Because `fetch_sub` is already atomic, we do not need to synchronize // with other threads unless we are going to delete the object. This // same logic applies to the below `fetch_sub` to the `weak` count. if self.inner().strong.fetch_sub(1, Release) != 1 { return; } // This fence is needed to prevent reordering of use of the data and // deletion of the data. Because it is marked `Release`, the decreasing // of the reference count synchronizes with this `Acquire` fence. This // means that use of the data happens before decreasing the reference // count, which happens before this fence, which happens before the // deletion of the data. // // As explained in the [Boost documentation][1], // // > It is important to enforce any possible access to the object in one // > thread (through an existing reference) to *happen before* deleting // > the object in a different thread. This is achieved by a "release" // > operation after dropping a reference (any access to the object // > through this reference must obviously happened before), and an // > "acquire" operation before deleting the object. // // In particular, while the contents of an Arc are usually immutable, it's // possible to have interior writes to something like a Mutex. Since a // Mutex is not acquired when it is deleted, we can't rely on its // synchronization logic to make writes in thread A visible to a destructor // running in thread B. // // Also note that the Acquire fence here could probably be replaced with an // Acquire load, which could improve performance in highly-contended // situations. See [2]. // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) // [2]: (https://github.com/rust-lang/rust/pull/41714) acquire!(self.inner().strong); unsafe { self.drop_slow(); } } } impl Arc { #[inline] #[stable(feature = "rc_downcast", since = "1.29.0")] /// Attempt to downcast the `Arc` to a concrete type. /// /// # Examples /// /// ``` /// use std::any::Any; /// use std::sync::Arc; /// /// fn print_if_string(value: Arc) { /// if let Ok(string) = value.downcast::() { /// println!("String ({}): {}", string.len(), string); /// } /// } /// /// let my_string = "Hello World".to_string(); /// print_if_string(Arc::new(my_string)); /// print_if_string(Arc::new(0i8)); /// ``` pub fn downcast(self) -> Result, Self> where T: Any + Send + Sync + 'static, { if (*self).is::() { let ptr = self.ptr.cast::>(); mem::forget(self); Ok(Arc::from_inner(ptr)) } else { Err(self) } } } impl Weak { /// Constructs a new `Weak`, without allocating any memory. /// Calling [`upgrade`] on the return value always gives [`None`]. /// /// [`upgrade`]: Weak::upgrade /// /// # Examples /// /// ``` /// use std::sync::Weak; /// /// let empty: Weak = Weak::new(); /// assert!(empty.upgrade().is_none()); /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { Weak { ptr: NonNull::new(usize::MAX as *mut ArcInner).expect("MAX is not 0") } } } /// Helper type to allow accessing the reference counts without /// making any assertions about the data field. struct WeakInner<'a> { weak: &'a atomic::AtomicUsize, strong: &'a atomic::AtomicUsize, } impl Weak { /// Returns a raw pointer to the object `T` pointed to by this `Weak`. /// /// The pointer is valid only if there are some strong references. The pointer may be dangling, /// unaligned or even [`null`] otherwise. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// use std::ptr; /// /// let strong = Arc::new("hello".to_owned()); /// let weak = Arc::downgrade(&strong); /// // Both point to the same object /// assert!(ptr::eq(&*strong, weak.as_ptr())); /// // The strong here keeps it alive, so we can still access the object. /// assert_eq!("hello", unsafe { &*weak.as_ptr() }); /// /// drop(strong); /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to /// // undefined behaviour. /// // assert_eq!("hello", unsafe { &*weak.as_ptr() }); /// ``` /// /// [`null`]: core::ptr::null #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { let ptr: *mut ArcInner = NonNull::as_ptr(self.ptr); if is_dangling(ptr) { // If the pointer is dangling, we return the sentinel directly. This cannot be // a valid payload address, as the payload is at least as aligned as ArcInner (usize). ptr as *const T } else { // SAFETY: if is_dangling returns false, then the pointer is dereferencable. // The payload may be dropped at this point, and we have to maintain provenance, // so use raw pointer manipulation. unsafe { ptr::addr_of_mut!((*ptr).data) } } } /// Consumes the `Weak` and turns it into a raw pointer. /// /// This converts the weak pointer into a raw pointer, while still preserving the ownership of /// one weak reference (the weak count is not modified by this operation). It can be turned /// back into the `Weak` with [`from_raw`]. /// /// The same restrictions of accessing the target of the pointer as with /// [`as_ptr`] apply. /// /// # Examples /// /// ``` /// use std::sync::{Arc, Weak}; /// /// let strong = Arc::new("hello".to_owned()); /// let weak = Arc::downgrade(&strong); /// let raw = weak.into_raw(); /// /// assert_eq!(1, Arc::weak_count(&strong)); /// assert_eq!("hello", unsafe { &*raw }); /// /// drop(unsafe { Weak::from_raw(raw) }); /// assert_eq!(0, Arc::weak_count(&strong)); /// ``` /// /// [`from_raw`]: Weak::from_raw /// [`as_ptr`]: Weak::as_ptr #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn into_raw(self) -> *const T { let result = self.as_ptr(); mem::forget(self); result } /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. /// /// This can be used to safely get a strong reference (by calling [`upgrade`] /// later) or to deallocate the weak count by dropping the `Weak`. /// /// It takes ownership of one weak reference (with the exception of pointers created by [`new`], /// as these don't own anything; the method still works on them). /// /// # Safety /// /// The pointer must have originated from the [`into_raw`] and must still own its potential /// weak reference. /// /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this /// takes ownership of one weak reference currently represented as a raw pointer (the weak /// count is not modified by this operation) and therefore it must be paired with a previous /// call to [`into_raw`]. /// # Examples /// /// ``` /// use std::sync::{Arc, Weak}; /// /// let strong = Arc::new("hello".to_owned()); /// /// let raw_1 = Arc::downgrade(&strong).into_raw(); /// let raw_2 = Arc::downgrade(&strong).into_raw(); /// /// assert_eq!(2, Arc::weak_count(&strong)); /// /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap()); /// assert_eq!(1, Arc::weak_count(&strong)); /// /// drop(strong); /// /// // Decrement the last weak count. /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none()); /// ``` /// /// [`new`]: Weak::new /// [`into_raw`]: Weak::into_raw /// [`upgrade`]: Weak::upgrade /// [`forget`]: std::mem::forget #[stable(feature = "weak_into_raw", since = "1.45.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { // See Weak::as_ptr for context on how the input pointer is derived. let ptr = if is_dangling(ptr as *mut T) { // This is a dangling Weak. ptr as *mut ArcInner } else { // Otherwise, we're guaranteed the pointer came from a nondangling Weak. // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. let offset = unsafe { data_offset(ptr) }; // Thus, we reverse the offset to get the whole RcBox. // SAFETY: the pointer originated from a Weak, so this offset is safe. unsafe { (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset)) } }; // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } } } } impl Weak { /// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying /// dropping of the inner value if successful. /// /// Returns [`None`] if the inner value has since been dropped. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// /// let weak_five = Arc::downgrade(&five); /// /// let strong_five: Option> = weak_five.upgrade(); /// assert!(strong_five.is_some()); /// /// // Destroy all strong pointers. /// drop(strong_five); /// drop(five); /// /// assert!(weak_five.upgrade().is_none()); /// ``` #[stable(feature = "arc_weak", since = "1.4.0")] pub fn upgrade(&self) -> Option> { // We use a CAS loop to increment the strong count instead of a // fetch_add as this function should never take the reference count // from zero to one. let inner = self.inner()?; // Relaxed load because any write of 0 that we can observe // leaves the field in a permanently zero state (so a // "stale" read of 0 is fine), and any other value is // confirmed via the CAS below. let mut n = inner.strong.load(Relaxed); loop { if n == 0 { return None; } // See comments in `Arc::clone` for why we do this (for `mem::forget`). if n > MAX_REFCOUNT { abort(); } // Relaxed is fine for the failure case because we don't have any expectations about the new state. // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner // value can be initialized after `Weak` references have already been created. In that case, we // expect to observe the fully initialized value. match inner.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) { Ok(_) => return Some(Arc::from_inner(self.ptr)), // null checked above Err(old) => n = old, } } } /// Gets the number of strong (`Arc`) pointers pointing to this allocation. /// /// If `self` was created using [`Weak::new`], this will return 0. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { if let Some(inner) = self.inner() { inner.strong.load(SeqCst) } else { 0 } } /// Gets an approximation of the number of `Weak` pointers pointing to this /// allocation. /// /// If `self` was created using [`Weak::new`], or if there are no remaining /// strong pointers, this will return 0. /// /// # Accuracy /// /// Due to implementation details, the returned value can be off by 1 in /// either direction when other threads are manipulating any `Arc`s or /// `Weak`s pointing to the same allocation. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { self.inner() .map(|inner| { let weak = inner.weak.load(SeqCst); let strong = inner.strong.load(SeqCst); if strong == 0 { 0 } else { // Since we observed that there was at least one strong pointer // after reading the weak count, we know that the implicit weak // reference (present whenever any strong references are alive) // was still around when we observed the weak count, and can // therefore safely subtract it. weak - 1 } }) .unwrap_or(0) } /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`, /// (i.e., when this `Weak` was created by `Weak::new`). #[inline] fn inner(&self) -> Option> { if is_dangling(self.ptr.as_ptr()) { None } else { // We are careful to *not* create a reference covering the "data" field, as // the field may be mutated concurrently (for example, if the last `Arc` // is dropped, the data field will be dropped in-place). Some(unsafe { let ptr = self.ptr.as_ptr(); WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } }) } } /// Returns `true` if the two `Weak`s point to the same allocation (similar to /// [`ptr::eq`]), or if both don't point to any allocation /// (because they were created with `Weak::new()`). /// /// # Notes /// /// Since this compares pointers it means that `Weak::new()` will equal each /// other, even though they don't point to any allocation. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let first_rc = Arc::new(5); /// let first = Arc::downgrade(&first_rc); /// let second = Arc::downgrade(&first_rc); /// /// assert!(first.ptr_eq(&second)); /// /// let third_rc = Arc::new(5); /// let third = Arc::downgrade(&third_rc); /// /// assert!(!first.ptr_eq(&third)); /// ``` /// /// Comparing `Weak::new`. /// /// ``` /// use std::sync::{Arc, Weak}; /// /// let first = Weak::new(); /// let second = Weak::new(); /// assert!(first.ptr_eq(&second)); /// /// let third_rc = Arc::new(()); /// let third = Arc::downgrade(&third_rc); /// assert!(!first.ptr_eq(&third)); /// ``` /// /// [`ptr::eq`]: core::ptr::eq #[inline] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { self.ptr.as_ptr() == other.ptr.as_ptr() } } #[stable(feature = "arc_weak", since = "1.4.0")] impl Clone for Weak { /// Makes a clone of the `Weak` pointer that points to the same allocation. /// /// # Examples /// /// ``` /// use std::sync::{Arc, Weak}; /// /// let weak_five = Arc::downgrade(&Arc::new(5)); /// /// let _ = Weak::clone(&weak_five); /// ``` #[inline] fn clone(&self) -> Weak { let inner = if let Some(inner) = self.inner() { inner } else { return Weak { ptr: self.ptr }; }; // See comments in Arc::clone() for why this is relaxed. This can use a // fetch_add (ignoring the lock) because the weak count is only locked // where are *no other* weak pointers in existence. (So we can't be // running this code in that case). let old_size = inner.weak.fetch_add(1, Relaxed); // See comments in Arc::clone() for why we do this (for mem::forget). if old_size > MAX_REFCOUNT { abort(); } Weak { ptr: self.ptr } } } #[stable(feature = "downgraded_weak", since = "1.10.0")] impl Default for Weak { /// Constructs a new `Weak`, without allocating memory. /// Calling [`upgrade`] on the return value always /// gives [`None`]. /// /// [`upgrade`]: Weak::upgrade /// /// # Examples /// /// ``` /// use std::sync::Weak; /// /// let empty: Weak = Default::default(); /// assert!(empty.upgrade().is_none()); /// ``` fn default() -> Weak { Weak::new() } } #[stable(feature = "arc_weak", since = "1.4.0")] impl Drop for Weak { /// Drops the `Weak` pointer. /// /// # Examples /// /// ``` /// use std::sync::{Arc, Weak}; /// /// struct Foo; /// /// impl Drop for Foo { /// fn drop(&mut self) { /// println!("dropped!"); /// } /// } /// /// let foo = Arc::new(Foo); /// let weak_foo = Arc::downgrade(&foo); /// let other_weak_foo = Weak::clone(&weak_foo); /// /// drop(weak_foo); // Doesn't print anything /// drop(foo); // Prints "dropped!" /// /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { // If we find out that we were the last weak pointer, then its time to // deallocate the data entirely. See the discussion in Arc::drop() about // the memory orderings // // It's not necessary to check for the locked state here, because the // weak count can only be locked if there was precisely one weak ref, // meaning that drop could only subsequently run ON that remaining weak // ref, which can only happen after the lock is released. let inner = if let Some(inner) = self.inner() { inner } else { return }; if inner.weak.fetch_sub(1, Release) == 1 { acquire!(inner.weak); unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) } } } } #[stable(feature = "rust1", since = "1.0.0")] trait ArcEqIdent { fn eq(&self, other: &Arc) -> bool; fn ne(&self, other: &Arc) -> bool; } #[stable(feature = "rust1", since = "1.0.0")] impl ArcEqIdent for Arc { #[inline] default fn eq(&self, other: &Arc) -> bool { **self == **other } #[inline] default fn ne(&self, other: &Arc) -> bool { **self != **other } } /// We're doing this specialization here, and not as a more general optimization on `&T`, because it /// would otherwise add a cost to all equality checks on refs. We assume that `Arc`s are used to /// store large values, that are slow to clone, but also heavy to check for equality, causing this /// cost to pay off more easily. It's also more likely to have two `Arc` clones, that point to /// the same value, than two `&T`s. /// /// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. #[stable(feature = "rust1", since = "1.0.0")] impl ArcEqIdent for Arc { #[inline] fn eq(&self, other: &Arc) -> bool { Arc::ptr_eq(self, other) || **self == **other } #[inline] fn ne(&self, other: &Arc) -> bool { !Arc::ptr_eq(self, other) && **self != **other } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for Arc { /// Equality for two `Arc`s. /// /// Two `Arc`s are equal if their inner values are equal, even if they are /// stored in different allocation. /// /// If `T` also implements `Eq` (implying reflexivity of equality), /// two `Arc`s that point to the same allocation are always equal. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// /// assert!(five == Arc::new(5)); /// ``` #[inline] fn eq(&self, other: &Arc) -> bool { ArcEqIdent::eq(self, other) } /// Inequality for two `Arc`s. /// /// Two `Arc`s are unequal if their inner values are unequal. /// /// If `T` also implements `Eq` (implying reflexivity of equality), /// two `Arc`s that point to the same value are never unequal. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// /// assert!(five != Arc::new(6)); /// ``` #[inline] fn ne(&self, other: &Arc) -> bool { ArcEqIdent::ne(self, other) } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Arc { /// Partial comparison for two `Arc`s. /// /// The two are compared by calling `partial_cmp()` on their inner values. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// use std::cmp::Ordering; /// /// let five = Arc::new(5); /// /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6))); /// ``` fn partial_cmp(&self, other: &Arc) -> Option { (**self).partial_cmp(&**other) } /// Less-than comparison for two `Arc`s. /// /// The two are compared by calling `<` on their inner values. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// /// assert!(five < Arc::new(6)); /// ``` fn lt(&self, other: &Arc) -> bool { *(*self) < *(*other) } /// 'Less than or equal to' comparison for two `Arc`s. /// /// The two are compared by calling `<=` on their inner values. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// /// assert!(five <= Arc::new(5)); /// ``` fn le(&self, other: &Arc) -> bool { *(*self) <= *(*other) } /// Greater-than comparison for two `Arc`s. /// /// The two are compared by calling `>` on their inner values. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// /// assert!(five > Arc::new(4)); /// ``` fn gt(&self, other: &Arc) -> bool { *(*self) > *(*other) } /// 'Greater than or equal to' comparison for two `Arc`s. /// /// The two are compared by calling `>=` on their inner values. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let five = Arc::new(5); /// /// assert!(five >= Arc::new(5)); /// ``` fn ge(&self, other: &Arc) -> bool { *(*self) >= *(*other) } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Arc { /// Comparison for two `Arc`s. /// /// The two are compared by calling `cmp()` on their inner values. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// use std::cmp::Ordering; /// /// let five = Arc::new(5); /// /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6))); /// ``` fn cmp(&self, other: &Arc) -> Ordering { (**self).cmp(&**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for Arc {} #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Pointer for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&(&**self as *const T), f) } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for Arc { /// Creates a new `Arc`, with the `Default` value for `T`. /// /// # Examples /// /// ``` /// use std::sync::Arc; /// /// let x: Arc = Default::default(); /// assert_eq!(*x, 0); /// ``` fn default() -> Arc { Arc::new(Default::default()) } } #[stable(feature = "rust1", since = "1.0.0")] impl Hash for Arc { fn hash(&self, state: &mut H) { (**self).hash(state) } } #[stable(feature = "from_for_ptrs", since = "1.6.0")] impl From for Arc { fn from(t: T) -> Self { Arc::new(t) } } #[stable(feature = "shared_from_slice", since = "1.21.0")] impl From<&[T]> for Arc<[T]> { /// Allocate a reference-counted slice and fill it by cloning `v`'s items. /// /// # Example /// /// ``` /// # use std::sync::Arc; /// let original: &[i32] = &[1, 2, 3]; /// let shared: Arc<[i32]> = Arc::from(original); /// assert_eq!(&[1, 2, 3], &shared[..]); /// ``` #[inline] fn from(v: &[T]) -> Arc<[T]> { >::from_slice(v) } } #[stable(feature = "shared_from_slice", since = "1.21.0")] impl From<&str> for Arc { /// Allocate a reference-counted `str` and copy `v` into it. /// /// # Example /// /// ``` /// # use std::sync::Arc; /// let shared: Arc = Arc::from("eggplant"); /// assert_eq!("eggplant", &shared[..]); /// ``` #[inline] fn from(v: &str) -> Arc { let arc = Arc::<[u8]>::from(v.as_bytes()); unsafe { Arc::from_raw(Arc::into_raw(arc) as *const str) } } } #[stable(feature = "shared_from_slice", since = "1.21.0")] impl From for Arc { /// Allocate a reference-counted `str` and copy `v` into it. /// /// # Example /// /// ``` /// # use std::sync::Arc; /// let unique: String = "eggplant".to_owned(); /// let shared: Arc = Arc::from(unique); /// assert_eq!("eggplant", &shared[..]); /// ``` #[inline] fn from(v: String) -> Arc { Arc::from(&v[..]) } } #[stable(feature = "shared_from_slice", since = "1.21.0")] impl From> for Arc { /// Move a boxed object to a new, reference-counted allocation. /// /// # Example /// /// ``` /// # use std::sync::Arc; /// let unique: Box = Box::from("eggplant"); /// let shared: Arc = Arc::from(unique); /// assert_eq!("eggplant", &shared[..]); /// ``` #[inline] fn from(v: Box) -> Arc { Arc::from_box(v) } } #[stable(feature = "shared_from_slice", since = "1.21.0")] impl From> for Arc<[T]> { /// Allocate a reference-counted slice and move `v`'s items into it. /// /// # Example /// /// ``` /// # use std::sync::Arc; /// let unique: Vec = vec![1, 2, 3]; /// let shared: Arc<[i32]> = Arc::from(unique); /// assert_eq!(&[1, 2, 3], &shared[..]); /// ``` #[inline] fn from(mut v: Vec) -> Arc<[T]> { unsafe { let arc = Arc::copy_from_slice(&v); // Allow the Vec to free its memory, but not destroy its contents v.set_len(0); arc } } } #[stable(feature = "shared_from_cow", since = "1.45.0")] impl<'a, B> From> for Arc where B: ToOwned + ?Sized, Arc: From<&'a B> + From, { #[inline] fn from(cow: Cow<'a, B>) -> Arc { match cow { Cow::Borrowed(s) => Arc::from(s), Cow::Owned(s) => Arc::from(s), } } } #[stable(feature = "boxed_slice_try_from", since = "1.43.0")] impl TryFrom> for Arc<[T; N]> { type Error = Arc<[T]>; fn try_from(boxed_slice: Arc<[T]>) -> Result { if boxed_slice.len() == N { Ok(unsafe { Arc::from_raw(Arc::into_raw(boxed_slice) as *mut [T; N]) }) } else { Err(boxed_slice) } } } #[stable(feature = "shared_from_iter", since = "1.37.0")] impl iter::FromIterator for Arc<[T]> { /// Takes each element in the `Iterator` and collects it into an `Arc<[T]>`. /// /// # Performance characteristics /// /// ## The general case /// /// In the general case, collecting into `Arc<[T]>` is done by first /// collecting into a `Vec`. That is, when writing the following: /// /// ```rust /// # use std::sync::Arc; /// let evens: Arc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect(); /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]); /// ``` /// /// this behaves as if we wrote: /// /// ```rust /// # use std::sync::Arc; /// let evens: Arc<[u8]> = (0..10).filter(|&x| x % 2 == 0) /// .collect::>() // The first set of allocations happens here. /// .into(); // A second allocation for `Arc<[T]>` happens here. /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]); /// ``` /// /// This will allocate as many times as needed for constructing the `Vec` /// and then it will allocate once for turning the `Vec` into the `Arc<[T]>`. /// /// ## Iterators of known length /// /// When your `Iterator` implements `TrustedLen` and is of an exact size, /// a single allocation will be made for the `Arc<[T]>`. For example: /// /// ```rust /// # use std::sync::Arc; /// let evens: Arc<[u8]> = (0..10).collect(); // Just a single allocation happens here. /// # assert_eq!(&*evens, &*(0..10).collect::>()); /// ``` fn from_iter>(iter: I) -> Self { ToArcSlice::to_arc_slice(iter.into_iter()) } } /// Specialization trait used for collecting into `Arc<[T]>`. trait ToArcSlice: Iterator + Sized { fn to_arc_slice(self) -> Arc<[T]>; } impl> ToArcSlice for I { default fn to_arc_slice(self) -> Arc<[T]> { self.collect::>().into() } } impl> ToArcSlice for I { fn to_arc_slice(self) -> Arc<[T]> { // This is the case for a `TrustedLen` iterator. let (low, high) = self.size_hint(); if let Some(high) = high { debug_assert_eq!( low, high, "TrustedLen iterator's size hint is not exact: {:?}", (low, high) ); unsafe { // SAFETY: We need to ensure that the iterator has an exact length and we have. Arc::from_iter_exact(self, low) } } else { // TrustedLen contract guarantees that `upper_bound == `None` implies an iterator // length exceeding `usize::MAX`. // The default implementation would collect into a vec which would panic. // Thus we panic here immediately without invoking `Vec` code. panic!("capacity overflow"); } } } #[stable(feature = "rust1", since = "1.0.0")] impl borrow::Borrow for Arc { fn borrow(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] impl AsRef for Arc { fn as_ref(&self) -> &T { &**self } } #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Arc {} /// Get the offset within an `ArcInner` for the payload behind a pointer. /// /// # Safety /// /// The pointer must point to (and have valid metadata for) a previously /// valid instance of T, but the T is allowed to be dropped. unsafe fn data_offset(ptr: *const T) -> isize { // Align the unsized value to the end of the ArcInner. // Because RcBox is repr(C), it will always be the last field in memory. // SAFETY: since the only unsized types possible are slices, trait objects, // and extern types, the input safety requirement is currently enough to // satisfy the requirements of align_of_val_raw; this is an implementation // detail of the language that may not be relied upon outside of std. unsafe { data_offset_align(align_of_val_raw(ptr)) } } #[inline] fn data_offset_align(align: usize) -> isize { let layout = Layout::new::>(); (layout.size() + layout.padding_needed_for(align)) as isize } //! Utilities for formatting and printing `String`s. //! //! This module contains the runtime support for the [`format!`] syntax extension. //! This macro is implemented in the compiler to emit calls to this module in //! order to format arguments at runtime into strings. //! //! # Usage //! //! The [`format!`] macro is intended to be familiar to those coming from C's //! `printf`/`fprintf` functions or Python's `str.format` function. //! //! Some examples of the [`format!`] extension are: //! //! ``` //! format!("Hello"); // => "Hello" //! format!("Hello, {}!", "world"); // => "Hello, world!" //! format!("The number is {}", 1); // => "The number is 1" //! format!("{:?}", (3, 4)); // => "(3, 4)" //! format!("{value}", value=4); // => "4" //! format!("{} {}", 1, 2); // => "1 2" //! format!("{:04}", 42); // => "0042" with leading zeros //! format!("{:#?}", (100, 200)); // => "( //! // 100, //! // 200, //! // )" //! ``` //! //! From these, you can see that the first argument is a format string. It is //! required by the compiler for this to be a string literal; it cannot be a //! variable passed in (in order to perform validity checking). The compiler //! will then parse the format string and determine if the list of arguments //! provided is suitable to pass to this format string. //! //! To convert a single value to a string, use the [`to_string`] method. This //! will use the [`Display`] formatting trait. //! //! ## Positional parameters //! //! Each formatting argument is allowed to specify which value argument it's //! referencing, and if omitted it is assumed to be "the next argument". For //! example, the format string `{} {} {}` would take three parameters, and they //! would be formatted in the same order as they're given. The format string //! `{2} {1} {0}`, however, would format arguments in reverse order. //! //! Things can get a little tricky once you start intermingling the two types of //! positional specifiers. The "next argument" specifier can be thought of as an //! iterator over the argument. Each time a "next argument" specifier is seen, //! the iterator advances. This leads to behavior like this: //! //! ``` //! format!("{1} {} {0} {}", 1, 2); // => "2 1 1 2" //! ``` //! //! The internal iterator over the argument has not been advanced by the time //! the first `{}` is seen, so it prints the first argument. Then upon reaching //! the second `{}`, the iterator has advanced forward to the second argument. //! Essentially, parameters that explicitly name their argument do not affect //! parameters that do not name an argument in terms of positional specifiers. //! //! A format string is required to use all of its arguments, otherwise it is a //! compile-time error. You may refer to the same argument more than once in the //! format string. //! //! ## Named parameters //! //! Rust itself does not have a Python-like equivalent of named parameters to a //! function, but the [`format!`] macro is a syntax extension that allows it to //! leverage named parameters. Named parameters are listed at the end of the //! argument list and have the syntax: //! //! ```text //! identifier '=' expression //! ``` //! //! For example, the following [`format!`] expressions all use named argument: //! //! ``` //! format!("{argument}", argument = "test"); // => "test" //! format!("{name} {}", 1, name = 2); // => "2 1" //! format!("{a} {c} {b}", a="a", b='b', c=3); // => "a 3 b" //! ``` //! //! It is not valid to put positional parameters (those without names) after //! arguments that have names. Like with positional parameters, it is not //! valid to provide named parameters that are unused by the format string. //! //! # Formatting Parameters //! //! Each argument being formatted can be transformed by a number of formatting //! parameters (corresponding to `format_spec` in [the syntax](#syntax)). These //! parameters affect the string representation of what's being formatted. //! //! ## Width //! //! ``` //! // All of these print "Hello x !" //! println!("Hello {:5}!", "x"); //! println!("Hello {:1$}!", "x", 5); //! println!("Hello {1:0$}!", 5, "x"); //! println!("Hello {:width$}!", "x", width = 5); //! ``` //! //! This is a parameter for the "minimum width" that the format should take up. //! If the value's string does not fill up this many characters, then the //! padding specified by fill/alignment will be used to take up the required //! space (see below). //! //! The value for the width can also be provided as a [`usize`] in the list of //! parameters by adding a postfix `$`, indicating that the second argument is //! a [`usize`] specifying the width. //! //! Referring to an argument with the dollar syntax does not affect the "next //! argument" counter, so it's usually a good idea to refer to arguments by //! position, or use named arguments. //! //! ## Fill/Alignment //! //! ``` //! assert_eq!(format!("Hello {:<5}!", "x"), "Hello x !"); //! assert_eq!(format!("Hello {:-<5}!", "x"), "Hello x----!"); //! assert_eq!(format!("Hello {:^5}!", "x"), "Hello x !"); //! assert_eq!(format!("Hello {:>5}!", "x"), "Hello x!"); //! ``` //! //! The optional fill character and alignment is provided normally in conjunction with the //! [`width`](#width) parameter. It must be defined before `width`, right after the `:`. //! This indicates that if the value being formatted is smaller than //! `width` some extra characters will be printed around it. //! Filling comes in the following variants for different alignments: //! //! * `[fill]<` - the argument is left-aligned in `width` columns //! * `[fill]^` - the argument is center-aligned in `width` columns //! * `[fill]>` - the argument is right-aligned in `width` columns //! //! The default [fill/alignment](#fillalignment) for non-numerics is a space and //! left-aligned. The //! default for numeric formatters is also a space character but with right-alignment. If //! the `0` flag (see below) is specified for numerics, then the implicit fill character is //! `0`. //! //! Note that alignment may not be implemented by some types. In particular, it //! is not generally implemented for the `Debug` trait. A good way to ensure //! padding is applied is to format your input, then pad this resulting string //! to obtain your output: //! //! ``` //! println!("Hello {:^15}!", format!("{:?}", Some("hi"))); // => "Hello Some("hi") !" //! ``` //! //! ## Sign/`#`/`0` //! //! ``` //! assert_eq!(format!("Hello {:+}!", 5), "Hello +5!"); //! assert_eq!(format!("{:#x}!", 27), "0x1b!"); //! assert_eq!(format!("Hello {:05}!", 5), "Hello 00005!"); //! assert_eq!(format!("Hello {:05}!", -5), "Hello -0005!"); //! assert_eq!(format!("{:#010x}!", 27), "0x0000001b!"); //! ``` //! //! These are all flags altering the behavior of the formatter. //! //! * `+` - This is intended for numeric types and indicates that the sign //! should always be printed. Positive signs are never printed by //! default, and the negative sign is only printed by default for signed values. //! This flag indicates that the correct sign (`+` or `-`) should always be printed. //! * `-` - Currently not used //! * `#` - This flag indicates that the "alternate" form of printing should //! be used. The alternate forms are: //! * `#?` - pretty-print the [`Debug`] formatting (adds linebreaks and indentation) //! * `#x` - precedes the argument with a `0x` //! * `#X` - precedes the argument with a `0x` //! * `#b` - precedes the argument with a `0b` //! * `#o` - precedes the argument with a `0o` //! * `0` - This is used to indicate for integer formats that the padding to `width` should //! both be done with a `0` character as well as be sign-aware. A format //! like `{:08}` would yield `00000001` for the integer `1`, while the //! same format would yield `-0000001` for the integer `-1`. Notice that //! the negative version has one fewer zero than the positive version. //! Note that padding zeros are always placed after the sign (if any) //! and before the digits. When used together with the `#` flag, a similar //! rule applies: padding zeros are inserted after the prefix but before //! the digits. The prefix is included in the total width. //! //! ## Precision //! //! For non-numeric types, this can be considered a "maximum width". If the resulting string is //! longer than this width, then it is truncated down to this many characters and that truncated //! value is emitted with proper `fill`, `alignment` and `width` if those parameters are set. //! //! For integral types, this is ignored. //! //! For floating-point types, this indicates how many digits after the decimal point should be //! printed. //! //! There are three possible ways to specify the desired `precision`: //! //! 1. An integer `.N`: //! //! the integer `N` itself is the precision. //! //! 2. An integer or name followed by dollar sign `.N$`: //! //! use format *argument* `N` (which must be a `usize`) as the precision. //! //! 3. An asterisk `.*`: //! //! `.*` means that this `{...}` is associated with *two* format inputs rather than one: the //! first input holds the `usize` precision, and the second holds the value to print. Note that //! in this case, if one uses the format string `{:.*}`, then the `` part refers //! to the *value* to print, and the `precision` must come in the input preceding ``. //! //! For example, the following calls all print the same thing `Hello x is 0.01000`: //! //! ``` //! // Hello {arg 0 ("x")} is {arg 1 (0.01) with precision specified inline (5)} //! println!("Hello {0} is {1:.5}", "x", 0.01); //! //! // Hello {arg 1 ("x")} is {arg 2 (0.01) with precision specified in arg 0 (5)} //! println!("Hello {1} is {2:.0$}", 5, "x", 0.01); //! //! // Hello {arg 0 ("x")} is {arg 2 (0.01) with precision specified in arg 1 (5)} //! println!("Hello {0} is {2:.1$}", "x", 5, 0.01); //! //! // Hello {next arg ("x")} is {second of next two args (0.01) with precision //! // specified in first of next two args (5)} //! println!("Hello {} is {:.*}", "x", 5, 0.01); //! //! // Hello {next arg ("x")} is {arg 2 (0.01) with precision //! // specified in its predecessor (5)} //! println!("Hello {} is {2:.*}", "x", 5, 0.01); //! //! // Hello {next arg ("x")} is {arg "number" (0.01) with precision specified //! // in arg "prec" (5)} //! println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01); //! ``` //! //! While these: //! //! ``` //! println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56); //! println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56"); //! println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56"); //! ``` //! //! print three significantly different things: //! //! ```text //! Hello, `1234.560` has 3 fractional digits //! Hello, `123` has 3 characters //! Hello, ` 123` has 3 right-aligned characters //! ``` //! //! ## Localization //! //! In some programming languages, the behavior of string formatting functions //! depends on the operating system's locale setting. The format functions //! provided by Rust's standard library do not have any concept of locale and //! will produce the same results on all systems regardless of user //! configuration. //! //! For example, the following code will always print `1.5` even if the system //! locale uses a decimal separator other than a dot. //! //! ``` //! println!("The value is {}", 1.5); //! ``` //! //! # Escaping //! //! The literal characters `{` and `}` may be included in a string by preceding //! them with the same character. For example, the `{` character is escaped with //! `{{` and the `}` character is escaped with `}}`. //! //! ``` //! assert_eq!(format!("Hello {{}}"), "Hello {}"); //! assert_eq!(format!("{{ Hello"), "{ Hello"); //! ``` //! //! # Syntax //! //! To summarize, here you can find the full grammar of format strings. //! The syntax for the formatting language used is drawn from other languages, //! so it should not be too alien. Arguments are formatted with Python-like //! syntax, meaning that arguments are surrounded by `{}` instead of the C-like //! `%`. The actual grammar for the formatting syntax is: //! //! ```text //! format_string := text [ maybe_format text ] * //! maybe_format := '{' '{' | '}' '}' | format //! format := '{' [ argument ] [ ':' format_spec ] '}' //! argument := integer | identifier //! //! format_spec := [[fill]align][sign]['#']['0'][width]['.' precision]type //! fill := character //! align := '<' | '^' | '>' //! sign := '+' | '-' //! width := count //! precision := count | '*' //! type := '' | '?' | 'x?' | 'X?' | identifier //! count := parameter | integer //! parameter := argument '$' //! ``` //! In the above grammar, `text` may not contain any `'{'` or `'}'` characters. //! //! # Formatting traits //! //! When requesting that an argument be formatted with a particular type, you //! are actually requesting that an argument ascribes to a particular trait. //! This allows multiple actual types to be formatted via `{:x}` (like [`i8`] as //! well as [`isize`]). The current mapping of types to traits is: //! //! * *nothing* ⇒ [`Display`] //! * `?` ⇒ [`Debug`] //! * `x?` ⇒ [`Debug`] with lower-case hexadecimal integers //! * `X?` ⇒ [`Debug`] with upper-case hexadecimal integers //! * `o` ⇒ [`Octal`] //! * `x` ⇒ [`LowerHex`] //! * `X` ⇒ [`UpperHex`] //! * `p` ⇒ [`Pointer`] //! * `b` ⇒ [`Binary`] //! * `e` ⇒ [`LowerExp`] //! * `E` ⇒ [`UpperExp`] //! //! What this means is that any type of argument which implements the //! [`fmt::Binary`][`Binary`] trait can then be formatted with `{:b}`. Implementations //! are provided for these traits for a number of primitive types by the //! standard library as well. If no format is specified (as in `{}` or `{:6}`), //! then the format trait used is the [`Display`] trait. //! //! When implementing a format trait for your own type, you will have to //! implement a method of the signature: //! //! ``` //! # #![allow(dead_code)] //! # use std::fmt; //! # struct Foo; // our custom type //! # impl fmt::Display for Foo { //! fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { //! # write!(f, "testing, testing") //! # } } //! ``` //! //! Your type will be passed as `self` by-reference, and then the function //! should emit output into the `f.buf` stream. It is up to each format trait //! implementation to correctly adhere to the requested formatting parameters. //! The values of these parameters will be listed in the fields of the //! [`Formatter`] struct. In order to help with this, the [`Formatter`] struct also //! provides some helper methods. //! //! Additionally, the return value of this function is [`fmt::Result`] which is a //! type alias of [`Result`]`<(), `[`std::fmt::Error`]`>`. Formatting implementations //! should ensure that they propagate errors from the [`Formatter`] (e.g., when //! calling [`write!`]). However, they should never return errors spuriously. That //! is, a formatting implementation must and may only return an error if the //! passed-in [`Formatter`] returns an error. This is because, contrary to what //! the function signature might suggest, string formatting is an infallible //! operation. This function only returns a result because writing to the //! underlying stream might fail and it must provide a way to propagate the fact //! that an error has occurred back up the stack. //! //! An example of implementing the formatting traits would look //! like: //! //! ``` //! use std::fmt; //! //! #[derive(Debug)] //! struct Vector2D { //! x: isize, //! y: isize, //! } //! //! impl fmt::Display for Vector2D { //! fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { //! // The `f` value implements the `Write` trait, which is what the //! // write! macro is expecting. Note that this formatting ignores the //! // various flags provided to format strings. //! write!(f, "({}, {})", self.x, self.y) //! } //! } //! //! // Different traits allow different forms of output of a type. The meaning //! // of this format is to print the magnitude of a vector. //! impl fmt::Binary for Vector2D { //! fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { //! let magnitude = (self.x * self.x + self.y * self.y) as f64; //! let magnitude = magnitude.sqrt(); //! //! // Respect the formatting flags by using the helper method //! // `pad_integral` on the Formatter object. See the method //! // documentation for details, and the function `pad` can be used //! // to pad strings. //! let decimals = f.precision().unwrap_or(3); //! let string = format!("{:.*}", decimals, magnitude); //! f.pad_integral(true, "", &string) //! } //! } //! //! fn main() { //! let myvector = Vector2D { x: 3, y: 4 }; //! //! println!("{}", myvector); // => "(3, 4)" //! println!("{:?}", myvector); // => "Vector2D {x: 3, y:4}" //! println!("{:10.3b}", myvector); // => " 5.000" //! } //! ``` //! //! ### `fmt::Display` vs `fmt::Debug` //! //! These two formatting traits have distinct purposes: //! //! - [`fmt::Display`][`Display`] implementations assert that the type can be faithfully //! represented as a UTF-8 string at all times. It is **not** expected that //! all types implement the [`Display`] trait. //! - [`fmt::Debug`][`Debug`] implementations should be implemented for **all** public types. //! Output will typically represent the internal state as faithfully as possible. //! The purpose of the [`Debug`] trait is to facilitate debugging Rust code. In //! most cases, using `#[derive(Debug)]` is sufficient and recommended. //! //! Some examples of the output from both traits: //! //! ``` //! assert_eq!(format!("{} {:?}", 3, 4), "3 4"); //! assert_eq!(format!("{} {:?}", 'a', 'b'), "a 'b'"); //! assert_eq!(format!("{} {:?}", "foo\n", "bar\n"), "foo\n \"bar\\n\""); //! ``` //! //! # Related macros //! //! There are a number of related macros in the [`format!`] family. The ones that //! are currently implemented are: //! //! ```ignore (only-for-syntax-highlight) //! format! // described above //! write! // first argument is a &mut io::Write, the destination //! writeln! // same as write but appends a newline //! print! // the format string is printed to the standard output //! println! // same as print but appends a newline //! eprint! // the format string is printed to the standard error //! eprintln! // same as eprint but appends a newline //! format_args! // described below. //! ``` //! //! ### `write!` //! //! This and [`writeln!`] are two macros which are used to emit the format string //! to a specified stream. This is used to prevent intermediate allocations of //! format strings and instead directly write the output. Under the hood, this //! function is actually invoking the [`write_fmt`] function defined on the //! [`std::io::Write`] trait. Example usage is: //! //! ``` //! # #![allow(unused_must_use)] //! use std::io::Write; //! let mut w = Vec::new(); //! write!(&mut w, "Hello {}!", "world"); //! ``` //! //! ### `print!` //! //! This and [`println!`] emit their output to stdout. Similarly to the [`write!`] //! macro, the goal of these macros is to avoid intermediate allocations when //! printing output. Example usage is: //! //! ``` //! print!("Hello {}!", "world"); //! println!("I have a newline {}", "character at the end"); //! ``` //! ### `eprint!` //! //! The [`eprint!`] and [`eprintln!`] macros are identical to //! [`print!`] and [`println!`], respectively, except they emit their //! output to stderr. //! //! ### `format_args!` //! //! This is a curious macro used to safely pass around //! an opaque object describing the format string. This object //! does not require any heap allocations to create, and it only //! references information on the stack. Under the hood, all of //! the related macros are implemented in terms of this. First //! off, some example usage is: //! //! ``` //! # #![allow(unused_must_use)] //! use std::fmt; //! use std::io::{self, Write}; //! //! let mut some_writer = io::stdout(); //! write!(&mut some_writer, "{}", format_args!("print with a {}", "macro")); //! //! fn my_fmt_fn(args: fmt::Arguments) { //! write!(&mut io::stdout(), "{}", args); //! } //! my_fmt_fn(format_args!(", or a {} too", "function")); //! ``` //! //! The result of the [`format_args!`] macro is a value of type [`fmt::Arguments`]. //! This structure can then be passed to the [`write`] and [`format`] functions //! inside this module in order to process the format string. //! The goal of this macro is to even further prevent intermediate allocations //! when dealing with formatting strings. //! //! For example, a logging library could use the standard formatting syntax, but //! it would internally pass around this structure until it has been determined //! where output should go to. //! //! [`fmt::Result`]: Result //! [`Result`]: core::result::Result //! [`std::fmt::Error`]: Error //! [`write!`]: core::write //! [`write`]: core::write //! [`format!`]: crate::format //! [`to_string`]: crate::string::ToString //! [`writeln!`]: core::writeln //! [`write_fmt`]: ../../std/io/trait.Write.html#method.write_fmt //! [`std::io::Write`]: ../../std/io/trait.Write.html //! [`print!`]: ../../std/macro.print.html //! [`println!`]: ../../std/macro.println.html //! [`eprint!`]: ../../std/macro.eprint.html //! [`eprintln!`]: ../../std/macro.eprintln.html //! [`format_args!`]: core::format_args //! [`fmt::Arguments`]: Arguments //! [`format`]: crate::format #![stable(feature = "rust1", since = "1.0.0")] #[unstable(feature = "fmt_internals", issue = "none")] pub use core::fmt::rt; #[stable(feature = "fmt_flags_align", since = "1.28.0")] pub use core::fmt::Alignment; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::Error; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{write, ArgumentV1, Arguments}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{Binary, Octal}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{Debug, Display}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{Formatter, Result, Write}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{LowerExp, UpperExp}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{LowerHex, Pointer, UpperHex}; use crate::string; /// The `format` function takes an [`Arguments`] struct and returns the resulting /// formatted string. /// /// The [`Arguments`] instance can be created with the [`format_args!`] macro. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::fmt; /// /// let s = fmt::format(format_args!("Hello, {}!", "world")); /// assert_eq!(s, "Hello, world!"); /// ``` /// /// Please note that using [`format!`] might be preferable. /// Example: /// /// ``` /// let s = format!("Hello, {}!", "world"); /// assert_eq!(s, "Hello, world!"); /// ``` /// /// [`format_args!`]: core::format_args /// [`format!`]: crate::format #[stable(feature = "rust1", since = "1.0.0")] pub fn format(args: Arguments<'_>) -> string::String { let capacity = args.estimated_capacity(); let mut output = string::String::with_capacity(capacity); output.write_fmt(args).expect("a formatting trait implementation returned an error"); output } use super::*; use std::boxed::Box; use std::cell::RefCell; use std::clone::Clone; use std::convert::{From, TryInto}; use std::mem::drop; use std::option::Option::{self, None, Some}; use std::result::Result::{Err, Ok}; #[test] fn test_clone() { let x = Rc::new(RefCell::new(5)); let y = x.clone(); *x.borrow_mut() = 20; assert_eq!(*y.borrow(), 20); } #[test] fn test_simple() { let x = Rc::new(5); assert_eq!(*x, 5); } #[test] fn test_simple_clone() { let x = Rc::new(5); let y = x.clone(); assert_eq!(*x, 5); assert_eq!(*y, 5); } #[test] fn test_destructor() { let x: Rc> = Rc::new(box 5); assert_eq!(**x, 5); } #[test] fn test_live() { let x = Rc::new(5); let y = Rc::downgrade(&x); assert!(y.upgrade().is_some()); } #[test] fn test_dead() { let x = Rc::new(5); let y = Rc::downgrade(&x); drop(x); assert!(y.upgrade().is_none()); } #[test] fn weak_self_cyclic() { struct Cycle { x: RefCell>>, } let a = Rc::new(Cycle { x: RefCell::new(None) }); let b = Rc::downgrade(&a.clone()); *a.x.borrow_mut() = Some(b); // hopefully we don't double-free (or leak)... } #[test] fn is_unique() { let x = Rc::new(3); assert!(Rc::is_unique(&x)); let y = x.clone(); assert!(!Rc::is_unique(&x)); drop(y); assert!(Rc::is_unique(&x)); let w = Rc::downgrade(&x); assert!(!Rc::is_unique(&x)); drop(w); assert!(Rc::is_unique(&x)); } #[test] fn test_strong_count() { let a = Rc::new(0); assert!(Rc::strong_count(&a) == 1); let w = Rc::downgrade(&a); assert!(Rc::strong_count(&a) == 1); let b = w.upgrade().expect("upgrade of live rc failed"); assert!(Rc::strong_count(&b) == 2); assert!(Rc::strong_count(&a) == 2); drop(w); drop(a); assert!(Rc::strong_count(&b) == 1); let c = b.clone(); assert!(Rc::strong_count(&b) == 2); assert!(Rc::strong_count(&c) == 2); } #[test] fn test_weak_count() { let a = Rc::new(0); assert!(Rc::strong_count(&a) == 1); assert!(Rc::weak_count(&a) == 0); let w = Rc::downgrade(&a); assert!(Rc::strong_count(&a) == 1); assert!(Rc::weak_count(&a) == 1); drop(w); assert!(Rc::strong_count(&a) == 1); assert!(Rc::weak_count(&a) == 0); let c = a.clone(); assert!(Rc::strong_count(&a) == 2); assert!(Rc::weak_count(&a) == 0); drop(c); } #[test] fn weak_counts() { assert_eq!(Weak::weak_count(&Weak::::new()), 0); assert_eq!(Weak::strong_count(&Weak::::new()), 0); let a = Rc::new(0); let w = Rc::downgrade(&a); assert_eq!(Weak::strong_count(&w), 1); assert_eq!(Weak::weak_count(&w), 1); let w2 = w.clone(); assert_eq!(Weak::strong_count(&w), 1); assert_eq!(Weak::weak_count(&w), 2); assert_eq!(Weak::strong_count(&w2), 1); assert_eq!(Weak::weak_count(&w2), 2); drop(w); assert_eq!(Weak::strong_count(&w2), 1); assert_eq!(Weak::weak_count(&w2), 1); let a2 = a.clone(); assert_eq!(Weak::strong_count(&w2), 2); assert_eq!(Weak::weak_count(&w2), 1); drop(a2); drop(a); assert_eq!(Weak::strong_count(&w2), 0); assert_eq!(Weak::weak_count(&w2), 0); drop(w2); } #[test] fn try_unwrap() { let x = Rc::new(3); assert_eq!(Rc::try_unwrap(x), Ok(3)); let x = Rc::new(4); let _y = x.clone(); assert_eq!(Rc::try_unwrap(x), Err(Rc::new(4))); let x = Rc::new(5); let _w = Rc::downgrade(&x); assert_eq!(Rc::try_unwrap(x), Ok(5)); } #[test] fn into_from_raw() { let x = Rc::new(box "hello"); let y = x.clone(); let x_ptr = Rc::into_raw(x); drop(y); unsafe { assert_eq!(**x_ptr, "hello"); let x = Rc::from_raw(x_ptr); assert_eq!(**x, "hello"); assert_eq!(Rc::try_unwrap(x).map(|x| *x), Ok("hello")); } } #[test] fn test_into_from_raw_unsized() { use std::fmt::Display; use std::string::ToString; let rc: Rc = Rc::from("foo"); let ptr = Rc::into_raw(rc.clone()); let rc2 = unsafe { Rc::from_raw(ptr) }; assert_eq!(unsafe { &*ptr }, "foo"); assert_eq!(rc, rc2); let rc: Rc = Rc::new(123); let ptr = Rc::into_raw(rc.clone()); let rc2 = unsafe { Rc::from_raw(ptr) }; assert_eq!(unsafe { &*ptr }.to_string(), "123"); assert_eq!(rc2.to_string(), "123"); } #[test] fn into_from_weak_raw() { let x = Rc::new(box "hello"); let y = Rc::downgrade(&x); let y_ptr = Weak::into_raw(y); unsafe { assert_eq!(**y_ptr, "hello"); let y = Weak::from_raw(y_ptr); let y_up = Weak::upgrade(&y).unwrap(); assert_eq!(**y_up, "hello"); drop(y_up); assert_eq!(Rc::try_unwrap(x).map(|x| *x), Ok("hello")); } } #[test] fn test_into_from_weak_raw_unsized() { use std::fmt::Display; use std::string::ToString; let arc: Rc = Rc::from("foo"); let weak: Weak = Rc::downgrade(&arc); let ptr = Weak::into_raw(weak.clone()); let weak2 = unsafe { Weak::from_raw(ptr) }; assert_eq!(unsafe { &*ptr }, "foo"); assert!(weak.ptr_eq(&weak2)); let arc: Rc = Rc::new(123); let weak: Weak = Rc::downgrade(&arc); let ptr = Weak::into_raw(weak.clone()); let weak2 = unsafe { Weak::from_raw(ptr) }; assert_eq!(unsafe { &*ptr }.to_string(), "123"); assert!(weak.ptr_eq(&weak2)); } #[test] fn get_mut() { let mut x = Rc::new(3); *Rc::get_mut(&mut x).unwrap() = 4; assert_eq!(*x, 4); let y = x.clone(); assert!(Rc::get_mut(&mut x).is_none()); drop(y); assert!(Rc::get_mut(&mut x).is_some()); let _w = Rc::downgrade(&x); assert!(Rc::get_mut(&mut x).is_none()); } #[test] fn test_cowrc_clone_make_unique() { let mut cow0 = Rc::new(75); let mut cow1 = cow0.clone(); let mut cow2 = cow1.clone(); assert!(75 == *Rc::make_mut(&mut cow0)); assert!(75 == *Rc::make_mut(&mut cow1)); assert!(75 == *Rc::make_mut(&mut cow2)); *Rc::make_mut(&mut cow0) += 1; *Rc::make_mut(&mut cow1) += 2; *Rc::make_mut(&mut cow2) += 3; assert!(76 == *cow0); assert!(77 == *cow1); assert!(78 == *cow2); // none should point to the same backing memory assert!(*cow0 != *cow1); assert!(*cow0 != *cow2); assert!(*cow1 != *cow2); } #[test] fn test_cowrc_clone_unique2() { let mut cow0 = Rc::new(75); let cow1 = cow0.clone(); let cow2 = cow1.clone(); assert!(75 == *cow0); assert!(75 == *cow1); assert!(75 == *cow2); *Rc::make_mut(&mut cow0) += 1; assert!(76 == *cow0); assert!(75 == *cow1); assert!(75 == *cow2); // cow1 and cow2 should share the same contents // cow0 should have a unique reference assert!(*cow0 != *cow1); assert!(*cow0 != *cow2); assert!(*cow1 == *cow2); } #[test] fn test_cowrc_clone_weak() { let mut cow0 = Rc::new(75); let cow1_weak = Rc::downgrade(&cow0); assert!(75 == *cow0); assert!(75 == *cow1_weak.upgrade().unwrap()); *Rc::make_mut(&mut cow0) += 1; assert!(76 == *cow0); assert!(cow1_weak.upgrade().is_none()); } #[test] fn test_show() { let foo = Rc::new(75); assert_eq!(format!("{:?}", foo), "75"); } #[test] fn test_unsized() { let foo: Rc<[i32]> = Rc::new([1, 2, 3]); assert_eq!(foo, foo.clone()); } #[test] fn test_maybe_thin_unsized() { // If/when custom thin DSTs exist, this test should be updated to use one use std::ffi::{CStr, CString}; let x: Rc = Rc::from(CString::new("swordfish").unwrap().into_boxed_c_str()); assert_eq!(format!("{:?}", x), "\"swordfish\""); let y: Weak = Rc::downgrade(&x); drop(x); // At this point, the weak points to a dropped DST assert!(y.upgrade().is_none()); // But we still need to be able to get the alloc layout to drop. // CStr has no drop glue, but custom DSTs might, and need to work. drop(y); } #[test] fn test_from_owned() { let foo = 123; let foo_rc = Rc::from(foo); assert!(123 == *foo_rc); } #[test] fn test_new_weak() { let foo: Weak = Weak::new(); assert!(foo.upgrade().is_none()); } #[test] fn test_ptr_eq() { let five = Rc::new(5); let same_five = five.clone(); let other_five = Rc::new(5); assert!(Rc::ptr_eq(&five, &same_five)); assert!(!Rc::ptr_eq(&five, &other_five)); } #[test] fn test_from_str() { let r: Rc = Rc::from("foo"); assert_eq!(&r[..], "foo"); } #[test] fn test_copy_from_slice() { let s: &[u32] = &[1, 2, 3]; let r: Rc<[u32]> = Rc::from(s); assert_eq!(&r[..], [1, 2, 3]); } #[test] fn test_clone_from_slice() { #[derive(Clone, Debug, Eq, PartialEq)] struct X(u32); let s: &[X] = &[X(1), X(2), X(3)]; let r: Rc<[X]> = Rc::from(s); assert_eq!(&r[..], s); } #[test] #[should_panic] fn test_clone_from_slice_panic() { use std::string::{String, ToString}; struct Fail(u32, String); impl Clone for Fail { fn clone(&self) -> Fail { if self.0 == 2 { panic!(); } Fail(self.0, self.1.clone()) } } let s: &[Fail] = &[Fail(0, "foo".to_string()), Fail(1, "bar".to_string()), Fail(2, "baz".to_string())]; // Should panic, but not cause memory corruption let _r: Rc<[Fail]> = Rc::from(s); } #[test] fn test_from_box() { let b: Box = box 123; let r: Rc = Rc::from(b); assert_eq!(*r, 123); } #[test] fn test_from_box_str() { use std::string::String; let s = String::from("foo").into_boxed_str(); let r: Rc = Rc::from(s); assert_eq!(&r[..], "foo"); } #[test] fn test_from_box_slice() { let s = vec![1, 2, 3].into_boxed_slice(); let r: Rc<[u32]> = Rc::from(s); assert_eq!(&r[..], [1, 2, 3]); } #[test] fn test_from_box_trait() { use std::fmt::Display; use std::string::ToString; let b: Box = box 123; let r: Rc = Rc::from(b); assert_eq!(r.to_string(), "123"); } #[test] fn test_from_box_trait_zero_sized() { use std::fmt::Debug; let b: Box = box (); let r: Rc = Rc::from(b); assert_eq!(format!("{:?}", r), "()"); } #[test] fn test_from_vec() { let v = vec![1, 2, 3]; let r: Rc<[u32]> = Rc::from(v); assert_eq!(&r[..], [1, 2, 3]); } #[test] fn test_downcast() { use std::any::Any; let r1: Rc = Rc::new(i32::MAX); let r2: Rc = Rc::new("abc"); assert!(r1.clone().downcast::().is_err()); let r1i32 = r1.downcast::(); assert!(r1i32.is_ok()); assert_eq!(r1i32.unwrap(), Rc::new(i32::MAX)); assert!(r2.clone().downcast::().is_err()); let r2str = r2.downcast::<&'static str>(); assert!(r2str.is_ok()); assert_eq!(r2str.unwrap(), Rc::new("abc")); } #[test] fn test_array_from_slice() { let v = vec![1, 2, 3]; let r: Rc<[u32]> = Rc::from(v); let a: Result, _> = r.clone().try_into(); assert!(a.is_ok()); let a: Result, _> = r.clone().try_into(); assert!(a.is_err()); } #[test] fn test_rc_cyclic_with_zero_refs() { struct ZeroRefs { inner: Weak, } let zero_refs = Rc::new_cyclic(|inner| { assert_eq!(inner.strong_count(), 0); assert!(inner.upgrade().is_none()); ZeroRefs { inner: Weak::new() } }); assert_eq!(Rc::strong_count(&zero_refs), 1); assert_eq!(Rc::weak_count(&zero_refs), 0); assert_eq!(zero_refs.inner.strong_count(), 0); assert_eq!(zero_refs.inner.weak_count(), 0); } #[test] fn test_rc_cyclic_with_one_ref() { struct OneRef { inner: Weak, } let one_ref = Rc::new_cyclic(|inner| { assert_eq!(inner.strong_count(), 0); assert!(inner.upgrade().is_none()); OneRef { inner: inner.clone() } }); assert_eq!(Rc::strong_count(&one_ref), 1); assert_eq!(Rc::weak_count(&one_ref), 1); let one_ref2 = Weak::upgrade(&one_ref.inner).unwrap(); assert!(Rc::ptr_eq(&one_ref, &one_ref2)); assert_eq!(one_ref.inner.strong_count(), 2); assert_eq!(one_ref.inner.weak_count(), 1); } #[test] fn test_rc_cyclic_with_two_ref() { struct TwoRefs { inner: Weak, inner1: Weak, } let two_refs = Rc::new_cyclic(|inner| { assert_eq!(inner.strong_count(), 0); assert!(inner.upgrade().is_none()); TwoRefs { inner: inner.clone(), inner1: inner.clone() } }); assert_eq!(Rc::strong_count(&two_refs), 1); assert_eq!(Rc::weak_count(&two_refs), 2); let two_ref3 = Weak::upgrade(&two_refs.inner).unwrap(); assert!(Rc::ptr_eq(&two_refs, &two_ref3)); let two_ref2 = Weak::upgrade(&two_refs.inner1).unwrap(); assert!(Rc::ptr_eq(&two_refs, &two_ref2)); assert_eq!(Rc::strong_count(&two_refs), 3); assert_eq!(Rc::weak_count(&two_refs), 2); } //! A doubly-linked list with owned nodes. //! //! The `LinkedList` allows pushing and popping elements at either end //! in constant time. //! //! NOTE: It is almost always better to use [`Vec`] or [`VecDeque`] because //! array-based containers are generally faster, //! more memory efficient, and make better use of CPU cache. //! //! [`Vec`]: crate::vec::Vec //! [`VecDeque`]: super::vec_deque::VecDeque #![stable(feature = "rust1", since = "1.0.0")] use core::cmp::Ordering; use core::fmt; use core::hash::{Hash, Hasher}; use core::iter::{FromIterator, FusedIterator}; use core::marker::PhantomData; use core::mem; use core::ptr::NonNull; use super::SpecExtend; use crate::boxed::Box; #[cfg(test)] mod tests; /// A doubly-linked list with owned nodes. /// /// The `LinkedList` allows pushing and popping elements at either end /// in constant time. /// /// NOTE: It is almost always better to use `Vec` or `VecDeque` because /// array-based containers are generally faster, /// more memory efficient, and make better use of CPU cache. #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "LinkedList")] pub struct LinkedList { head: Option>>, tail: Option>>, len: usize, marker: PhantomData>>, } struct Node { next: Option>>, prev: Option>>, element: T, } /// An iterator over the elements of a `LinkedList`. /// /// This `struct` is created by [`LinkedList::iter()`]. See its /// documentation for more. #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, T: 'a> { head: Option>>, tail: Option>>, len: usize, marker: PhantomData<&'a Node>, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Iter<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Iter").field(&self.len).finish() } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Iter<'_, T> { fn clone(&self) -> Self { Iter { ..*self } } } /// A mutable iterator over the elements of a `LinkedList`. /// /// This `struct` is created by [`LinkedList::iter_mut()`]. See its /// documentation for more. #[stable(feature = "rust1", since = "1.0.0")] pub struct IterMut<'a, T: 'a> { // We do *not* exclusively own the entire list here, references to node's `element` // have been handed out by the iterator! So be careful when using this; the methods // called must be aware that there can be aliasing pointers to `element`. list: &'a mut LinkedList, head: Option>>, tail: Option>>, len: usize, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for IterMut<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IterMut").field(&self.list).field(&self.len).finish() } } /// An owning iterator over the elements of a `LinkedList`. /// /// This `struct` is created by the [`into_iter`] method on [`LinkedList`] /// (provided by the `IntoIterator` trait). See its documentation for more. /// /// [`into_iter`]: LinkedList::into_iter #[derive(Clone)] #[stable(feature = "rust1", since = "1.0.0")] pub struct IntoIter { list: LinkedList, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.list).finish() } } impl Node { fn new(element: T) -> Self { Node { next: None, prev: None, element } } fn into_element(self: Box) -> T { self.element } } // private methods impl LinkedList { /// Adds the given node to the front of the list. #[inline] fn push_front_node(&mut self, mut node: Box>) { // This method takes care not to create mutable references to whole nodes, // to maintain validity of aliasing pointers into `element`. unsafe { node.next = self.head; node.prev = None; let node = Some(Box::leak(node).into()); match self.head { None => self.tail = node, // Not creating new mutable (unique!) references overlapping `element`. Some(head) => (*head.as_ptr()).prev = node, } self.head = node; self.len += 1; } } /// Removes and returns the node at the front of the list. #[inline] fn pop_front_node(&mut self) -> Option>> { // This method takes care not to create mutable references to whole nodes, // to maintain validity of aliasing pointers into `element`. self.head.map(|node| unsafe { let node = Box::from_raw(node.as_ptr()); self.head = node.next; match self.head { None => self.tail = None, // Not creating new mutable (unique!) references overlapping `element`. Some(head) => (*head.as_ptr()).prev = None, } self.len -= 1; node }) } /// Adds the given node to the back of the list. #[inline] fn push_back_node(&mut self, mut node: Box>) { // This method takes care not to create mutable references to whole nodes, // to maintain validity of aliasing pointers into `element`. unsafe { node.next = None; node.prev = self.tail; let node = Some(Box::leak(node).into()); match self.tail { None => self.head = node, // Not creating new mutable (unique!) references overlapping `element`. Some(tail) => (*tail.as_ptr()).next = node, } self.tail = node; self.len += 1; } } /// Removes and returns the node at the back of the list. #[inline] fn pop_back_node(&mut self) -> Option>> { // This method takes care not to create mutable references to whole nodes, // to maintain validity of aliasing pointers into `element`. self.tail.map(|node| unsafe { let node = Box::from_raw(node.as_ptr()); self.tail = node.prev; match self.tail { None => self.head = None, // Not creating new mutable (unique!) references overlapping `element`. Some(tail) => (*tail.as_ptr()).next = None, } self.len -= 1; node }) } /// Unlinks the specified node from the current list. /// /// Warning: this will not check that the provided node belongs to the current list. /// /// This method takes care not to create mutable references to `element`, to /// maintain validity of aliasing pointers. #[inline] unsafe fn unlink_node(&mut self, mut node: NonNull>) { let node = unsafe { node.as_mut() }; // this one is ours now, we can create an &mut. // Not creating new mutable (unique!) references overlapping `element`. match node.prev { Some(prev) => unsafe { (*prev.as_ptr()).next = node.next }, // this node is the head node None => self.head = node.next, }; match node.next { Some(next) => unsafe { (*next.as_ptr()).prev = node.prev }, // this node is the tail node None => self.tail = node.prev, }; self.len -= 1; } /// Splices a series of nodes between two existing nodes. /// /// Warning: this will not check that the provided node belongs to the two existing lists. #[inline] unsafe fn splice_nodes( &mut self, existing_prev: Option>>, existing_next: Option>>, mut splice_start: NonNull>, mut splice_end: NonNull>, splice_length: usize, ) { // This method takes care not to create multiple mutable references to whole nodes at the same time, // to maintain validity of aliasing pointers into `element`. if let Some(mut existing_prev) = existing_prev { unsafe { existing_prev.as_mut().next = Some(splice_start); } } else { self.head = Some(splice_start); } if let Some(mut existing_next) = existing_next { unsafe { existing_next.as_mut().prev = Some(splice_end); } } else { self.tail = Some(splice_end); } unsafe { splice_start.as_mut().prev = existing_prev; splice_end.as_mut().next = existing_next; } self.len += splice_length; } /// Detaches all nodes from a linked list as a series of nodes. #[inline] fn detach_all_nodes(mut self) -> Option<(NonNull>, NonNull>, usize)> { let head = self.head.take(); let tail = self.tail.take(); let len = mem::replace(&mut self.len, 0); if let Some(head) = head { let tail = tail.unwrap_or_else(|| unsafe { core::hint::unreachable_unchecked() }); Some((head, tail, len)) } else { None } } #[inline] unsafe fn split_off_before_node( &mut self, split_node: Option>>, at: usize, ) -> Self { // The split node is the new head node of the second part if let Some(mut split_node) = split_node { let first_part_head; let first_part_tail; unsafe { first_part_tail = split_node.as_mut().prev.take(); } if let Some(mut tail) = first_part_tail { unsafe { tail.as_mut().next = None; } first_part_head = self.head; } else { first_part_head = None; } let first_part = LinkedList { head: first_part_head, tail: first_part_tail, len: at, marker: PhantomData, }; // Fix the head ptr of the second part self.head = Some(split_node); self.len = self.len - at; first_part } else { mem::replace(self, LinkedList::new()) } } #[inline] unsafe fn split_off_after_node( &mut self, split_node: Option>>, at: usize, ) -> Self { // The split node is the new tail node of the first part and owns // the head of the second part. if let Some(mut split_node) = split_node { let second_part_head; let second_part_tail; unsafe { second_part_head = split_node.as_mut().next.take(); } if let Some(mut head) = second_part_head { unsafe { head.as_mut().prev = None; } second_part_tail = self.tail; } else { second_part_tail = None; } let second_part = LinkedList { head: second_part_head, tail: second_part_tail, len: self.len - at, marker: PhantomData, }; // Fix the tail ptr of the first part self.tail = Some(split_node); self.len = at; second_part } else { mem::replace(self, LinkedList::new()) } } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for LinkedList { /// Creates an empty `LinkedList`. #[inline] fn default() -> Self { Self::new() } } impl LinkedList { /// Creates an empty `LinkedList`. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let list: LinkedList = LinkedList::new(); /// ``` #[inline] #[rustc_const_stable(feature = "const_linked_list_new", since = "1.32.0")] #[stable(feature = "rust1", since = "1.0.0")] pub const fn new() -> Self { LinkedList { head: None, tail: None, len: 0, marker: PhantomData } } /// Moves all elements from `other` to the end of the list. /// /// This reuses all the nodes from `other` and moves them into `self`. After /// this operation, `other` becomes empty. /// /// This operation should compute in *O*(1) time and *O*(1) memory. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut list1 = LinkedList::new(); /// list1.push_back('a'); /// /// let mut list2 = LinkedList::new(); /// list2.push_back('b'); /// list2.push_back('c'); /// /// list1.append(&mut list2); /// /// let mut iter = list1.iter(); /// assert_eq!(iter.next(), Some(&'a')); /// assert_eq!(iter.next(), Some(&'b')); /// assert_eq!(iter.next(), Some(&'c')); /// assert!(iter.next().is_none()); /// /// assert!(list2.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn append(&mut self, other: &mut Self) { match self.tail { None => mem::swap(self, other), Some(mut tail) => { // `as_mut` is okay here because we have exclusive access to the entirety // of both lists. if let Some(mut other_head) = other.head.take() { unsafe { tail.as_mut().next = Some(other_head); other_head.as_mut().prev = Some(tail); } self.tail = other.tail.take(); self.len += mem::replace(&mut other.len, 0); } } } } /// Moves all elements from `other` to the begin of the list. #[unstable(feature = "linked_list_prepend", issue = "none")] pub fn prepend(&mut self, other: &mut Self) { match self.head { None => mem::swap(self, other), Some(mut head) => { // `as_mut` is okay here because we have exclusive access to the entirety // of both lists. if let Some(mut other_tail) = other.tail.take() { unsafe { head.as_mut().prev = Some(other_tail); other_tail.as_mut().next = Some(head); } self.head = other.head.take(); self.len += mem::replace(&mut other.len, 0); } } } } /// Provides a forward iterator. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut list: LinkedList = LinkedList::new(); /// /// list.push_back(0); /// list.push_back(1); /// list.push_back(2); /// /// let mut iter = list.iter(); /// assert_eq!(iter.next(), Some(&0)); /// assert_eq!(iter.next(), Some(&1)); /// assert_eq!(iter.next(), Some(&2)); /// assert_eq!(iter.next(), None); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter<'_, T> { Iter { head: self.head, tail: self.tail, len: self.len, marker: PhantomData } } /// Provides a forward iterator with mutable references. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut list: LinkedList = LinkedList::new(); /// /// list.push_back(0); /// list.push_back(1); /// list.push_back(2); /// /// for element in list.iter_mut() { /// *element += 10; /// } /// /// let mut iter = list.iter(); /// assert_eq!(iter.next(), Some(&10)); /// assert_eq!(iter.next(), Some(&11)); /// assert_eq!(iter.next(), Some(&12)); /// assert_eq!(iter.next(), None); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn iter_mut(&mut self) -> IterMut<'_, T> { IterMut { head: self.head, tail: self.tail, len: self.len, list: self } } /// Provides a cursor at the front element. /// /// The cursor is pointing to the "ghost" non-element if the list is empty. #[inline] #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn cursor_front(&self) -> Cursor<'_, T> { Cursor { index: 0, current: self.head, list: self } } /// Provides a cursor with editing operations at the front element. /// /// The cursor is pointing to the "ghost" non-element if the list is empty. #[inline] #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn cursor_front_mut(&mut self) -> CursorMut<'_, T> { CursorMut { index: 0, current: self.head, list: self } } /// Provides a cursor at the back element. /// /// The cursor is pointing to the "ghost" non-element if the list is empty. #[inline] #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn cursor_back(&self) -> Cursor<'_, T> { Cursor { index: self.len.checked_sub(1).unwrap_or(0), current: self.tail, list: self } } /// Provides a cursor with editing operations at the back element. /// /// The cursor is pointing to the "ghost" non-element if the list is empty. #[inline] #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn cursor_back_mut(&mut self) -> CursorMut<'_, T> { CursorMut { index: self.len.checked_sub(1).unwrap_or(0), current: self.tail, list: self } } /// Returns `true` if the `LinkedList` is empty. /// /// This operation should compute in *O*(1) time. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut dl = LinkedList::new(); /// assert!(dl.is_empty()); /// /// dl.push_front("foo"); /// assert!(!dl.is_empty()); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn is_empty(&self) -> bool { self.head.is_none() } /// Returns the length of the `LinkedList`. /// /// This operation should compute in *O*(1) time. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut dl = LinkedList::new(); /// /// dl.push_front(2); /// assert_eq!(dl.len(), 1); /// /// dl.push_front(1); /// assert_eq!(dl.len(), 2); /// /// dl.push_back(3); /// assert_eq!(dl.len(), 3); /// ``` #[doc(alias = "length")] #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn len(&self) -> usize { self.len } /// Removes all elements from the `LinkedList`. /// /// This operation should compute in *O*(*n*) time. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut dl = LinkedList::new(); /// /// dl.push_front(2); /// dl.push_front(1); /// assert_eq!(dl.len(), 2); /// assert_eq!(dl.front(), Some(&1)); /// /// dl.clear(); /// assert_eq!(dl.len(), 0); /// assert_eq!(dl.front(), None); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn clear(&mut self) { *self = Self::new(); } /// Returns `true` if the `LinkedList` contains an element equal to the /// given value. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut list: LinkedList = LinkedList::new(); /// /// list.push_back(0); /// list.push_back(1); /// list.push_back(2); /// /// assert_eq!(list.contains(&0), true); /// assert_eq!(list.contains(&10), false); /// ``` #[stable(feature = "linked_list_contains", since = "1.12.0")] pub fn contains(&self, x: &T) -> bool where T: PartialEq, { self.iter().any(|e| e == x) } /// Provides a reference to the front element, or `None` if the list is /// empty. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut dl = LinkedList::new(); /// assert_eq!(dl.front(), None); /// /// dl.push_front(1); /// assert_eq!(dl.front(), Some(&1)); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn front(&self) -> Option<&T> { unsafe { self.head.as_ref().map(|node| &node.as_ref().element) } } /// Provides a mutable reference to the front element, or `None` if the list /// is empty. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut dl = LinkedList::new(); /// assert_eq!(dl.front(), None); /// /// dl.push_front(1); /// assert_eq!(dl.front(), Some(&1)); /// /// match dl.front_mut() { /// None => {}, /// Some(x) => *x = 5, /// } /// assert_eq!(dl.front(), Some(&5)); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn front_mut(&mut self) -> Option<&mut T> { unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) } } /// Provides a reference to the back element, or `None` if the list is /// empty. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut dl = LinkedList::new(); /// assert_eq!(dl.back(), None); /// /// dl.push_back(1); /// assert_eq!(dl.back(), Some(&1)); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn back(&self) -> Option<&T> { unsafe { self.tail.as_ref().map(|node| &node.as_ref().element) } } /// Provides a mutable reference to the back element, or `None` if the list /// is empty. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut dl = LinkedList::new(); /// assert_eq!(dl.back(), None); /// /// dl.push_back(1); /// assert_eq!(dl.back(), Some(&1)); /// /// match dl.back_mut() { /// None => {}, /// Some(x) => *x = 5, /// } /// assert_eq!(dl.back(), Some(&5)); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn back_mut(&mut self) -> Option<&mut T> { unsafe { self.tail.as_mut().map(|node| &mut node.as_mut().element) } } /// Adds an element first in the list. /// /// This operation should compute in *O*(1) time. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut dl = LinkedList::new(); /// /// dl.push_front(2); /// assert_eq!(dl.front().unwrap(), &2); /// /// dl.push_front(1); /// assert_eq!(dl.front().unwrap(), &1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn push_front(&mut self, elt: T) { self.push_front_node(box Node::new(elt)); } /// Removes the first element and returns it, or `None` if the list is /// empty. /// /// This operation should compute in *O*(1) time. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut d = LinkedList::new(); /// assert_eq!(d.pop_front(), None); /// /// d.push_front(1); /// d.push_front(3); /// assert_eq!(d.pop_front(), Some(3)); /// assert_eq!(d.pop_front(), Some(1)); /// assert_eq!(d.pop_front(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn pop_front(&mut self) -> Option { self.pop_front_node().map(Node::into_element) } /// Appends an element to the back of a list. /// /// This operation should compute in *O*(1) time. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut d = LinkedList::new(); /// d.push_back(1); /// d.push_back(3); /// assert_eq!(3, *d.back().unwrap()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn push_back(&mut self, elt: T) { self.push_back_node(box Node::new(elt)); } /// Removes the last element from a list and returns it, or `None` if /// it is empty. /// /// This operation should compute in *O*(1) time. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut d = LinkedList::new(); /// assert_eq!(d.pop_back(), None); /// d.push_back(1); /// d.push_back(3); /// assert_eq!(d.pop_back(), Some(3)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn pop_back(&mut self) -> Option { self.pop_back_node().map(Node::into_element) } /// Splits the list into two at the given index. Returns everything after the given index, /// including the index. /// /// This operation should compute in *O*(*n*) time. /// /// # Panics /// /// Panics if `at > len`. /// /// # Examples /// /// ``` /// use std::collections::LinkedList; /// /// let mut d = LinkedList::new(); /// /// d.push_front(1); /// d.push_front(2); /// d.push_front(3); /// /// let mut split = d.split_off(2); /// /// assert_eq!(split.pop_front(), Some(1)); /// assert_eq!(split.pop_front(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn split_off(&mut self, at: usize) -> LinkedList { let len = self.len(); assert!(at <= len, "Cannot split off at a nonexistent index"); if at == 0 { return mem::take(self); } else if at == len { return Self::new(); } // Below, we iterate towards the `i-1`th node, either from the start or the end, // depending on which would be faster. let split_node = if at - 1 <= len - 1 - (at - 1) { let mut iter = self.iter_mut(); // instead of skipping using .skip() (which creates a new struct), // we skip manually so we can access the head field without // depending on implementation details of Skip for _ in 0..at - 1 { iter.next(); } iter.head } else { // better off starting from the end let mut iter = self.iter_mut(); for _ in 0..len - 1 - (at - 1) { iter.next_back(); } iter.tail }; unsafe { self.split_off_after_node(split_node, at) } } /// Removes the element at the given index and returns it. /// /// This operation should compute in *O*(*n*) time. /// /// # Panics /// Panics if at >= len /// /// # Examples /// /// ``` /// #![feature(linked_list_remove)] /// use std::collections::LinkedList; /// /// let mut d = LinkedList::new(); /// /// d.push_front(1); /// d.push_front(2); /// d.push_front(3); /// /// assert_eq!(d.remove(1), 2); /// assert_eq!(d.remove(0), 3); /// assert_eq!(d.remove(0), 1); /// ``` #[unstable(feature = "linked_list_remove", issue = "69210")] pub fn remove(&mut self, at: usize) -> T { let len = self.len(); assert!(at < len, "Cannot remove at an index outside of the list bounds"); // Below, we iterate towards the node at the given index, either from // the start or the end, depending on which would be faster. let offset_from_end = len - at - 1; if at <= offset_from_end { let mut cursor = self.cursor_front_mut(); for _ in 0..at { cursor.move_next(); } cursor.remove_current().unwrap() } else { let mut cursor = self.cursor_back_mut(); for _ in 0..offset_from_end { cursor.move_prev(); } cursor.remove_current().unwrap() } } /// Creates an iterator which uses a closure to determine if an element should be removed. /// /// If the closure returns true, then the element is removed and yielded. /// If the closure returns false, the element will remain in the list and will not be yielded /// by the iterator. /// /// Note that `drain_filter` lets you mutate every element in the filter closure, regardless of /// whether you choose to keep or remove it. /// /// # Examples /// /// Splitting a list into evens and odds, reusing the original list: /// /// ``` /// #![feature(drain_filter)] /// use std::collections::LinkedList; /// /// let mut numbers: LinkedList = LinkedList::new(); /// numbers.extend(&[1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]); /// /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::>(); /// let odds = numbers; /// /// assert_eq!(evens.into_iter().collect::>(), vec![2, 4, 6, 8, 14]); /// assert_eq!(odds.into_iter().collect::>(), vec![1, 3, 5, 9, 11, 13, 15]); /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F> where F: FnMut(&mut T) -> bool, { // avoid borrow issues. let it = self.head; let old_len = self.len; DrainFilter { list: self, it, pred: filter, idx: 0, old_len } } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<#[may_dangle] T> Drop for LinkedList { fn drop(&mut self) { struct DropGuard<'a, T>(&'a mut LinkedList); impl<'a, T> Drop for DropGuard<'a, T> { fn drop(&mut self) { // Continue the same loop we do below. This only runs when a destructor has // panicked. If another one panics this will abort. while self.0.pop_front_node().is_some() {} } } while let Some(node) = self.pop_front_node() { let guard = DropGuard(self); drop(node); mem::forget(guard); } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for Iter<'a, T> { type Item = &'a T; #[inline] fn next(&mut self) -> Option<&'a T> { if self.len == 0 { None } else { self.head.map(|node| unsafe { // Need an unbound lifetime to get 'a let node = &*node.as_ptr(); self.len -= 1; self.head = node.next; &node.element }) } } #[inline] fn size_hint(&self) -> (usize, Option) { (self.len, Some(self.len)) } #[inline] fn last(mut self) -> Option<&'a T> { self.next_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for Iter<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a T> { if self.len == 0 { None } else { self.tail.map(|node| unsafe { // Need an unbound lifetime to get 'a let node = &*node.as_ptr(); self.len -= 1; self.tail = node.prev; &node.element }) } } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Iter<'_, T> {} #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Iter<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for IterMut<'a, T> { type Item = &'a mut T; #[inline] fn next(&mut self) -> Option<&'a mut T> { if self.len == 0 { None } else { self.head.map(|node| unsafe { // Need an unbound lifetime to get 'a let node = &mut *node.as_ptr(); self.len -= 1; self.head = node.next; &mut node.element }) } } #[inline] fn size_hint(&self) -> (usize, Option) { (self.len, Some(self.len)) } #[inline] fn last(mut self) -> Option<&'a mut T> { self.next_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a mut T> { if self.len == 0 { None } else { self.tail.map(|node| unsafe { // Need an unbound lifetime to get 'a let node = &mut *node.as_ptr(); self.len -= 1; self.tail = node.prev; &mut node.element }) } } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IterMut<'_, T> {} #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for IterMut<'_, T> {} /// A cursor over a `LinkedList`. /// /// A `Cursor` is like an iterator, except that it can freely seek back-and-forth. /// /// Cursors always rest between two elements in the list, and index in a logically circular way. /// To accommodate this, there is a "ghost" non-element that yields `None` between the head and /// tail of the list. /// /// When created, cursors start at the front of the list, or the "ghost" non-element if the list is empty. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub struct Cursor<'a, T: 'a> { index: usize, current: Option>>, list: &'a LinkedList, } #[unstable(feature = "linked_list_cursors", issue = "58533")] impl Clone for Cursor<'_, T> { fn clone(&self) -> Self { let Cursor { index, current, list } = *self; Cursor { index, current, list } } } #[unstable(feature = "linked_list_cursors", issue = "58533")] impl fmt::Debug for Cursor<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Cursor").field(&self.list).field(&self.index()).finish() } } /// A cursor over a `LinkedList` with editing operations. /// /// A `Cursor` is like an iterator, except that it can freely seek back-and-forth, and can /// safely mutate the list during iteration. This is because the lifetime of its yielded /// references is tied to its own lifetime, instead of just the underlying list. This means /// cursors cannot yield multiple elements at once. /// /// Cursors always rest between two elements in the list, and index in a logically circular way. /// To accommodate this, there is a "ghost" non-element that yields `None` between the head and /// tail of the list. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub struct CursorMut<'a, T: 'a> { index: usize, current: Option>>, list: &'a mut LinkedList, } #[unstable(feature = "linked_list_cursors", issue = "58533")] impl fmt::Debug for CursorMut<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("CursorMut").field(&self.list).field(&self.index()).finish() } } impl<'a, T> Cursor<'a, T> { /// Returns the cursor position index within the `LinkedList`. /// /// This returns `None` if the cursor is currently pointing to the /// "ghost" non-element. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn index(&self) -> Option { let _ = self.current?; Some(self.index) } /// Moves the cursor to the next element of the `LinkedList`. /// /// If the cursor is pointing to the "ghost" non-element then this will move it to /// the first element of the `LinkedList`. If it is pointing to the last /// element of the `LinkedList` then this will move it to the "ghost" non-element. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn move_next(&mut self) { match self.current.take() { // We had no current element; the cursor was sitting at the start position // Next element should be the head of the list None => { self.current = self.list.head; self.index = 0; } // We had a previous element, so let's go to its next Some(current) => unsafe { self.current = current.as_ref().next; self.index += 1; }, } } /// Moves the cursor to the previous element of the `LinkedList`. /// /// If the cursor is pointing to the "ghost" non-element then this will move it to /// the last element of the `LinkedList`. If it is pointing to the first /// element of the `LinkedList` then this will move it to the "ghost" non-element. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn move_prev(&mut self) { match self.current.take() { // No current. We're at the start of the list. Yield None and jump to the end. None => { self.current = self.list.tail; self.index = self.list.len().checked_sub(1).unwrap_or(0); } // Have a prev. Yield it and go to the previous element. Some(current) => unsafe { self.current = current.as_ref().prev; self.index = self.index.checked_sub(1).unwrap_or_else(|| self.list.len()); }, } } /// Returns a reference to the element that the cursor is currently /// pointing to. /// /// This returns `None` if the cursor is currently pointing to the /// "ghost" non-element. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn current(&self) -> Option<&'a T> { unsafe { self.current.map(|current| &(*current.as_ptr()).element) } } /// Returns a reference to the next element. /// /// If the cursor is pointing to the "ghost" non-element then this returns /// the first element of the `LinkedList`. If it is pointing to the last /// element of the `LinkedList` then this returns `None`. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn peek_next(&self) -> Option<&'a T> { unsafe { let next = match self.current { None => self.list.head, Some(current) => current.as_ref().next, }; next.map(|next| &(*next.as_ptr()).element) } } /// Returns a reference to the previous element. /// /// If the cursor is pointing to the "ghost" non-element then this returns /// the last element of the `LinkedList`. If it is pointing to the first /// element of the `LinkedList` then this returns `None`. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn peek_prev(&self) -> Option<&'a T> { unsafe { let prev = match self.current { None => self.list.tail, Some(current) => current.as_ref().prev, }; prev.map(|prev| &(*prev.as_ptr()).element) } } } impl<'a, T> CursorMut<'a, T> { /// Returns the cursor position index within the `LinkedList`. /// /// This returns `None` if the cursor is currently pointing to the /// "ghost" non-element. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn index(&self) -> Option { let _ = self.current?; Some(self.index) } /// Moves the cursor to the next element of the `LinkedList`. /// /// If the cursor is pointing to the "ghost" non-element then this will move it to /// the first element of the `LinkedList`. If it is pointing to the last /// element of the `LinkedList` then this will move it to the "ghost" non-element. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn move_next(&mut self) { match self.current.take() { // We had no current element; the cursor was sitting at the start position // Next element should be the head of the list None => { self.current = self.list.head; self.index = 0; } // We had a previous element, so let's go to its next Some(current) => unsafe { self.current = current.as_ref().next; self.index += 1; }, } } /// Moves the cursor to the previous element of the `LinkedList`. /// /// If the cursor is pointing to the "ghost" non-element then this will move it to /// the last element of the `LinkedList`. If it is pointing to the first /// element of the `LinkedList` then this will move it to the "ghost" non-element. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn move_prev(&mut self) { match self.current.take() { // No current. We're at the start of the list. Yield None and jump to the end. None => { self.current = self.list.tail; self.index = self.list.len().checked_sub(1).unwrap_or(0); } // Have a prev. Yield it and go to the previous element. Some(current) => unsafe { self.current = current.as_ref().prev; self.index = self.index.checked_sub(1).unwrap_or_else(|| self.list.len()); }, } } /// Returns a reference to the element that the cursor is currently /// pointing to. /// /// This returns `None` if the cursor is currently pointing to the /// "ghost" non-element. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn current(&mut self) -> Option<&mut T> { unsafe { self.current.map(|current| &mut (*current.as_ptr()).element) } } /// Returns a reference to the next element. /// /// If the cursor is pointing to the "ghost" non-element then this returns /// the first element of the `LinkedList`. If it is pointing to the last /// element of the `LinkedList` then this returns `None`. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn peek_next(&mut self) -> Option<&mut T> { unsafe { let next = match self.current { None => self.list.head, Some(current) => current.as_ref().next, }; next.map(|next| &mut (*next.as_ptr()).element) } } /// Returns a reference to the previous element. /// /// If the cursor is pointing to the "ghost" non-element then this returns /// the last element of the `LinkedList`. If it is pointing to the first /// element of the `LinkedList` then this returns `None`. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn peek_prev(&mut self) -> Option<&mut T> { unsafe { let prev = match self.current { None => self.list.tail, Some(current) => current.as_ref().prev, }; prev.map(|prev| &mut (*prev.as_ptr()).element) } } /// Returns a read-only cursor pointing to the current element. /// /// The lifetime of the returned `Cursor` is bound to that of the /// `CursorMut`, which means it cannot outlive the `CursorMut` and that the /// `CursorMut` is frozen for the lifetime of the `Cursor`. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn as_cursor(&self) -> Cursor<'_, T> { Cursor { list: self.list, current: self.current, index: self.index } } } // Now the list editing operations impl<'a, T> CursorMut<'a, T> { /// Inserts a new element into the `LinkedList` after the current one. /// /// If the cursor is pointing at the "ghost" non-element then the new element is /// inserted at the front of the `LinkedList`. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn insert_after(&mut self, item: T) { unsafe { let spliced_node = Box::leak(Box::new(Node::new(item))).into(); let node_next = match self.current { None => self.list.head, Some(node) => node.as_ref().next, }; self.list.splice_nodes(self.current, node_next, spliced_node, spliced_node, 1); if self.current.is_none() { // The "ghost" non-element's index has changed. self.index = self.list.len; } } } /// Inserts a new element into the `LinkedList` before the current one. /// /// If the cursor is pointing at the "ghost" non-element then the new element is /// inserted at the end of the `LinkedList`. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn insert_before(&mut self, item: T) { unsafe { let spliced_node = Box::leak(Box::new(Node::new(item))).into(); let node_prev = match self.current { None => self.list.tail, Some(node) => node.as_ref().prev, }; self.list.splice_nodes(node_prev, self.current, spliced_node, spliced_node, 1); self.index += 1; } } /// Removes the current element from the `LinkedList`. /// /// The element that was removed is returned, and the cursor is /// moved to point to the next element in the `LinkedList`. /// /// If the cursor is currently pointing to the "ghost" non-element then no element /// is removed and `None` is returned. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn remove_current(&mut self) -> Option { let unlinked_node = self.current?; unsafe { self.current = unlinked_node.as_ref().next; self.list.unlink_node(unlinked_node); let unlinked_node = Box::from_raw(unlinked_node.as_ptr()); Some(unlinked_node.element) } } /// Removes the current element from the `LinkedList` without deallocating the list node. /// /// The node that was removed is returned as a new `LinkedList` containing only this node. /// The cursor is moved to point to the next element in the current `LinkedList`. /// /// If the cursor is currently pointing to the "ghost" non-element then no element /// is removed and `None` is returned. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn remove_current_as_list(&mut self) -> Option> { let mut unlinked_node = self.current?; unsafe { self.current = unlinked_node.as_ref().next; self.list.unlink_node(unlinked_node); unlinked_node.as_mut().prev = None; unlinked_node.as_mut().next = None; Some(LinkedList { head: Some(unlinked_node), tail: Some(unlinked_node), len: 1, marker: PhantomData, }) } } /// Inserts the elements from the given `LinkedList` after the current one. /// /// If the cursor is pointing at the "ghost" non-element then the new elements are /// inserted at the start of the `LinkedList`. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn splice_after(&mut self, list: LinkedList) { unsafe { let (splice_head, splice_tail, splice_len) = match list.detach_all_nodes() { Some(parts) => parts, _ => return, }; let node_next = match self.current { None => self.list.head, Some(node) => node.as_ref().next, }; self.list.splice_nodes(self.current, node_next, splice_head, splice_tail, splice_len); if self.current.is_none() { // The "ghost" non-element's index has changed. self.index = self.list.len; } } } /// Inserts the elements from the given `LinkedList` before the current one. /// /// If the cursor is pointing at the "ghost" non-element then the new elements are /// inserted at the end of the `LinkedList`. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn splice_before(&mut self, list: LinkedList) { unsafe { let (splice_head, splice_tail, splice_len) = match list.detach_all_nodes() { Some(parts) => parts, _ => return, }; let node_prev = match self.current { None => self.list.tail, Some(node) => node.as_ref().prev, }; self.list.splice_nodes(node_prev, self.current, splice_head, splice_tail, splice_len); self.index += splice_len; } } /// Splits the list into two after the current element. This will return a /// new list consisting of everything after the cursor, with the original /// list retaining everything before. /// /// If the cursor is pointing at the "ghost" non-element then the entire contents /// of the `LinkedList` are moved. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn split_after(&mut self) -> LinkedList { let split_off_idx = if self.index == self.list.len { 0 } else { self.index + 1 }; if self.index == self.list.len { // The "ghost" non-element's index has changed to 0. self.index = 0; } unsafe { self.list.split_off_after_node(self.current, split_off_idx) } } /// Splits the list into two before the current element. This will return a /// new list consisting of everything before the cursor, with the original /// list retaining everything after. /// /// If the cursor is pointing at the "ghost" non-element then the entire contents /// of the `LinkedList` are moved. #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn split_before(&mut self) -> LinkedList { let split_off_idx = self.index; self.index = 0; unsafe { self.list.split_off_before_node(self.current, split_off_idx) } } } /// An iterator produced by calling `drain_filter` on LinkedList. #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] pub struct DrainFilter<'a, T: 'a, F: 'a> where F: FnMut(&mut T) -> bool, { list: &'a mut LinkedList, it: Option>>, pred: F, idx: usize, old_len: usize, } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] impl Iterator for DrainFilter<'_, T, F> where F: FnMut(&mut T) -> bool, { type Item = T; fn next(&mut self) -> Option { while let Some(mut node) = self.it { unsafe { self.it = node.as_ref().next; self.idx += 1; if (self.pred)(&mut node.as_mut().element) { // `unlink_node` is okay with aliasing `element` references. self.list.unlink_node(node); return Some(Box::from_raw(node.as_ptr()).element); } } } None } fn size_hint(&self) -> (usize, Option) { (0, Some(self.old_len - self.idx)) } } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] impl Drop for DrainFilter<'_, T, F> where F: FnMut(&mut T) -> bool, { fn drop(&mut self) { struct DropGuard<'r, 'a, T, F>(&'r mut DrainFilter<'a, T, F>) where F: FnMut(&mut T) -> bool; impl<'r, 'a, T, F> Drop for DropGuard<'r, 'a, T, F> where F: FnMut(&mut T) -> bool, { fn drop(&mut self) { self.0.for_each(drop); } } while let Some(item) = self.next() { let guard = DropGuard(self); drop(item); mem::forget(guard); } } } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] impl fmt::Debug for DrainFilter<'_, T, F> where F: FnMut(&mut T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("DrainFilter").field(&self.list).finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for IntoIter { type Item = T; #[inline] fn next(&mut self) -> Option { self.list.pop_front() } #[inline] fn size_hint(&self) -> (usize, Option) { (self.list.len, Some(self.list.len)) } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { self.list.pop_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter {} #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for IntoIter {} #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator for LinkedList { fn from_iter>(iter: I) -> Self { let mut list = Self::new(); list.extend(iter); list } } #[stable(feature = "rust1", since = "1.0.0")] impl IntoIterator for LinkedList { type Item = T; type IntoIter = IntoIter; /// Consumes the list into an iterator yielding elements by value. #[inline] fn into_iter(self) -> IntoIter { IntoIter { list: self } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a LinkedList { type Item = &'a T; type IntoIter = Iter<'a, T>; fn into_iter(self) -> Iter<'a, T> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a mut LinkedList { type Item = &'a mut T; type IntoIter = IterMut<'a, T>; fn into_iter(self) -> IterMut<'a, T> { self.iter_mut() } } #[stable(feature = "rust1", since = "1.0.0")] impl Extend for LinkedList { fn extend>(&mut self, iter: I) { >::spec_extend(self, iter); } #[inline] fn extend_one(&mut self, elem: T) { self.push_back(elem); } } impl SpecExtend for LinkedList { default fn spec_extend(&mut self, iter: I) { iter.into_iter().for_each(move |elt| self.push_back(elt)); } } impl SpecExtend> for LinkedList { fn spec_extend(&mut self, ref mut other: LinkedList) { self.append(other); } } #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: 'a + Copy> Extend<&'a T> for LinkedList { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } #[inline] fn extend_one(&mut self, &elem: &'a T) { self.push_back(elem); } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for LinkedList { fn eq(&self, other: &Self) -> bool { self.len() == other.len() && self.iter().eq(other) } fn ne(&self, other: &Self) -> bool { self.len() != other.len() || self.iter().ne(other) } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for LinkedList {} #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for LinkedList { fn partial_cmp(&self, other: &Self) -> Option { self.iter().partial_cmp(other) } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for LinkedList { #[inline] fn cmp(&self, other: &Self) -> Ordering { self.iter().cmp(other) } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for LinkedList { fn clone(&self) -> Self { self.iter().cloned().collect() } fn clone_from(&mut self, other: &Self) { let mut iter_other = other.iter(); if self.len() > other.len() { self.split_off(other.len()); } for (elem, elem_other) in self.iter_mut().zip(&mut iter_other) { elem.clone_from(elem_other); } if !iter_other.is_empty() { self.extend(iter_other.cloned()); } } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for LinkedList { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self).finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl Hash for LinkedList { fn hash(&self, state: &mut H) { self.len().hash(state); for elt in self { elt.hash(state); } } } // Ensure that `LinkedList` and its read-only iterators are covariant in their type parameters. #[allow(dead_code)] fn assert_covariance() { fn a<'a>(x: LinkedList<&'static str>) -> LinkedList<&'a str> { x } fn b<'i, 'a>(x: Iter<'i, &'static str>) -> Iter<'i, &'a str> { x } fn c<'a>(x: IntoIter<&'static str>) -> IntoIter<&'a str> { x } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for LinkedList {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for LinkedList {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for Iter<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for Iter<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for IterMut<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for IterMut<'_, T> {} #[unstable(feature = "linked_list_cursors", issue = "58533")] unsafe impl Send for Cursor<'_, T> {} #[unstable(feature = "linked_list_cursors", issue = "58533")] unsafe impl Sync for Cursor<'_, T> {} #[unstable(feature = "linked_list_cursors", issue = "58533")] unsafe impl Send for CursorMut<'_, T> {} #[unstable(feature = "linked_list_cursors", issue = "58533")] unsafe impl Sync for CursorMut<'_, T> {} use super::*; use std::thread; use std::vec::Vec; use rand::{thread_rng, RngCore}; fn list_from(v: &[T]) -> LinkedList { v.iter().cloned().collect() } pub fn check_links(list: &LinkedList) { unsafe { let mut len = 0; let mut last_ptr: Option<&Node> = None; let mut node_ptr: &Node; match list.head { None => { // tail node should also be None. assert!(list.tail.is_none()); assert_eq!(0, list.len); return; } Some(node) => node_ptr = &*node.as_ptr(), } loop { match (last_ptr, node_ptr.prev) { (None, None) => {} (None, _) => panic!("prev link for head"), (Some(p), Some(pptr)) => { assert_eq!(p as *const Node, pptr.as_ptr() as *const Node); } _ => panic!("prev link is none, not good"), } match node_ptr.next { Some(next) => { last_ptr = Some(node_ptr); node_ptr = &*next.as_ptr(); len += 1; } None => { len += 1; break; } } } // verify that the tail node points to the last node. let tail = list.tail.as_ref().expect("some tail node").as_ref(); assert_eq!(tail as *const Node, node_ptr as *const Node); // check that len matches interior links. assert_eq!(len, list.len); } } #[test] fn test_append() { // Empty to empty { let mut m = LinkedList::::new(); let mut n = LinkedList::new(); m.append(&mut n); check_links(&m); assert_eq!(m.len(), 0); assert_eq!(n.len(), 0); } // Non-empty to empty { let mut m = LinkedList::new(); let mut n = LinkedList::new(); n.push_back(2); m.append(&mut n); check_links(&m); assert_eq!(m.len(), 1); assert_eq!(m.pop_back(), Some(2)); assert_eq!(n.len(), 0); check_links(&m); } // Empty to non-empty { let mut m = LinkedList::new(); let mut n = LinkedList::new(); m.push_back(2); m.append(&mut n); check_links(&m); assert_eq!(m.len(), 1); assert_eq!(m.pop_back(), Some(2)); check_links(&m); } // Non-empty to non-empty let v = vec![1, 2, 3, 4, 5]; let u = vec![9, 8, 1, 2, 3, 4, 5]; let mut m = list_from(&v); let mut n = list_from(&u); m.append(&mut n); check_links(&m); let mut sum = v; sum.extend_from_slice(&u); assert_eq!(sum.len(), m.len()); for elt in sum { assert_eq!(m.pop_front(), Some(elt)) } assert_eq!(n.len(), 0); // Let's make sure it's working properly, since we // did some direct changes to private members. n.push_back(3); assert_eq!(n.len(), 1); assert_eq!(n.pop_front(), Some(3)); check_links(&n); } #[test] fn test_clone_from() { // Short cloned from long { let v = vec![1, 2, 3, 4, 5]; let u = vec![8, 7, 6, 2, 3, 4, 5]; let mut m = list_from(&v); let n = list_from(&u); m.clone_from(&n); check_links(&m); assert_eq!(m, n); for elt in u { assert_eq!(m.pop_front(), Some(elt)) } } // Long cloned from short { let v = vec![1, 2, 3, 4, 5]; let u = vec![6, 7, 8]; let mut m = list_from(&v); let n = list_from(&u); m.clone_from(&n); check_links(&m); assert_eq!(m, n); for elt in u { assert_eq!(m.pop_front(), Some(elt)) } } // Two equal length lists { let v = vec![1, 2, 3, 4, 5]; let u = vec![9, 8, 1, 2, 3]; let mut m = list_from(&v); let n = list_from(&u); m.clone_from(&n); check_links(&m); assert_eq!(m, n); for elt in u { assert_eq!(m.pop_front(), Some(elt)) } } } #[test] #[cfg_attr(target_os = "emscripten", ignore)] fn test_send() { let n = list_from(&[1, 2, 3]); thread::spawn(move || { check_links(&n); let a: &[_] = &[&1, &2, &3]; assert_eq!(a, &*n.iter().collect::>()); }) .join() .ok() .unwrap(); } #[test] fn test_fuzz() { for _ in 0..25 { fuzz_test(3); fuzz_test(16); #[cfg(not(miri))] // Miri is too slow fuzz_test(189); } } #[test] fn test_26021() { // There was a bug in split_off that failed to null out the RHS's head's prev ptr. // This caused the RHS's dtor to walk up into the LHS at drop and delete all of // its nodes. // // https://github.com/rust-lang/rust/issues/26021 let mut v1 = LinkedList::new(); v1.push_front(1); v1.push_front(1); v1.push_front(1); v1.push_front(1); let _ = v1.split_off(3); // Dropping this now should not cause laundry consumption assert_eq!(v1.len(), 3); assert_eq!(v1.iter().len(), 3); assert_eq!(v1.iter().collect::>().len(), 3); } #[test] fn test_split_off() { let mut v1 = LinkedList::new(); v1.push_front(1); v1.push_front(1); v1.push_front(1); v1.push_front(1); // test all splits for ix in 0..1 + v1.len() { let mut a = v1.clone(); let b = a.split_off(ix); check_links(&a); check_links(&b); a.extend(b); assert_eq!(v1, a); } } fn fuzz_test(sz: i32) { let mut m: LinkedList<_> = LinkedList::new(); let mut v = vec![]; for i in 0..sz { check_links(&m); let r: u8 = thread_rng().next_u32() as u8; match r % 6 { 0 => { m.pop_back(); v.pop(); } 1 => { if !v.is_empty() { m.pop_front(); v.remove(0); } } 2 | 4 => { m.push_front(-i); v.insert(0, -i); } 3 | 5 | _ => { m.push_back(i); v.push(i); } } } check_links(&m); let mut i = 0; for (a, &b) in m.into_iter().zip(&v) { i += 1; assert_eq!(a, b); } assert_eq!(i, v.len()); } #[test] fn drain_filter_test() { let mut m: LinkedList = LinkedList::new(); m.extend(&[1, 2, 3, 4, 5, 6]); let deleted = m.drain_filter(|v| *v < 4).collect::>(); check_links(&m); assert_eq!(deleted, &[1, 2, 3]); assert_eq!(m.into_iter().collect::>(), &[4, 5, 6]); } #[test] fn drain_to_empty_test() { let mut m: LinkedList = LinkedList::new(); m.extend(&[1, 2, 3, 4, 5, 6]); let deleted = m.drain_filter(|_| true).collect::>(); check_links(&m); assert_eq!(deleted, &[1, 2, 3, 4, 5, 6]); assert_eq!(m.into_iter().collect::>(), &[]); } #[test] fn test_cursor_move_peek() { let mut m: LinkedList = LinkedList::new(); m.extend(&[1, 2, 3, 4, 5, 6]); let mut cursor = m.cursor_front(); assert_eq!(cursor.current(), Some(&1)); assert_eq!(cursor.peek_next(), Some(&2)); assert_eq!(cursor.peek_prev(), None); assert_eq!(cursor.index(), Some(0)); cursor.move_prev(); assert_eq!(cursor.current(), None); assert_eq!(cursor.peek_next(), Some(&1)); assert_eq!(cursor.peek_prev(), Some(&6)); assert_eq!(cursor.index(), None); cursor.move_next(); cursor.move_next(); assert_eq!(cursor.current(), Some(&2)); assert_eq!(cursor.peek_next(), Some(&3)); assert_eq!(cursor.peek_prev(), Some(&1)); assert_eq!(cursor.index(), Some(1)); let mut cursor = m.cursor_back(); assert_eq!(cursor.current(), Some(&6)); assert_eq!(cursor.peek_next(), None); assert_eq!(cursor.peek_prev(), Some(&5)); assert_eq!(cursor.index(), Some(5)); cursor.move_next(); assert_eq!(cursor.current(), None); assert_eq!(cursor.peek_next(), Some(&1)); assert_eq!(cursor.peek_prev(), Some(&6)); assert_eq!(cursor.index(), None); cursor.move_prev(); cursor.move_prev(); assert_eq!(cursor.current(), Some(&5)); assert_eq!(cursor.peek_next(), Some(&6)); assert_eq!(cursor.peek_prev(), Some(&4)); assert_eq!(cursor.index(), Some(4)); let mut m: LinkedList = LinkedList::new(); m.extend(&[1, 2, 3, 4, 5, 6]); let mut cursor = m.cursor_front_mut(); assert_eq!(cursor.current(), Some(&mut 1)); assert_eq!(cursor.peek_next(), Some(&mut 2)); assert_eq!(cursor.peek_prev(), None); assert_eq!(cursor.index(), Some(0)); cursor.move_prev(); assert_eq!(cursor.current(), None); assert_eq!(cursor.peek_next(), Some(&mut 1)); assert_eq!(cursor.peek_prev(), Some(&mut 6)); assert_eq!(cursor.index(), None); cursor.move_next(); cursor.move_next(); assert_eq!(cursor.current(), Some(&mut 2)); assert_eq!(cursor.peek_next(), Some(&mut 3)); assert_eq!(cursor.peek_prev(), Some(&mut 1)); assert_eq!(cursor.index(), Some(1)); let mut cursor2 = cursor.as_cursor(); assert_eq!(cursor2.current(), Some(&2)); assert_eq!(cursor2.index(), Some(1)); cursor2.move_next(); assert_eq!(cursor2.current(), Some(&3)); assert_eq!(cursor2.index(), Some(2)); assert_eq!(cursor.current(), Some(&mut 2)); assert_eq!(cursor.index(), Some(1)); let mut m: LinkedList = LinkedList::new(); m.extend(&[1, 2, 3, 4, 5, 6]); let mut cursor = m.cursor_back_mut(); assert_eq!(cursor.current(), Some(&mut 6)); assert_eq!(cursor.peek_next(), None); assert_eq!(cursor.peek_prev(), Some(&mut 5)); assert_eq!(cursor.index(), Some(5)); cursor.move_next(); assert_eq!(cursor.current(), None); assert_eq!(cursor.peek_next(), Some(&mut 1)); assert_eq!(cursor.peek_prev(), Some(&mut 6)); assert_eq!(cursor.index(), None); cursor.move_prev(); cursor.move_prev(); assert_eq!(cursor.current(), Some(&mut 5)); assert_eq!(cursor.peek_next(), Some(&mut 6)); assert_eq!(cursor.peek_prev(), Some(&mut 4)); assert_eq!(cursor.index(), Some(4)); let mut cursor2 = cursor.as_cursor(); assert_eq!(cursor2.current(), Some(&5)); assert_eq!(cursor2.index(), Some(4)); cursor2.move_prev(); assert_eq!(cursor2.current(), Some(&4)); assert_eq!(cursor2.index(), Some(3)); assert_eq!(cursor.current(), Some(&mut 5)); assert_eq!(cursor.index(), Some(4)); } #[test] fn test_cursor_mut_insert() { let mut m: LinkedList = LinkedList::new(); m.extend(&[1, 2, 3, 4, 5, 6]); let mut cursor = m.cursor_front_mut(); cursor.insert_before(7); cursor.insert_after(8); check_links(&m); assert_eq!(m.iter().cloned().collect::>(), &[7, 1, 8, 2, 3, 4, 5, 6]); let mut cursor = m.cursor_front_mut(); cursor.move_prev(); cursor.insert_before(9); cursor.insert_after(10); check_links(&m); assert_eq!(m.iter().cloned().collect::>(), &[10, 7, 1, 8, 2, 3, 4, 5, 6, 9]); let mut cursor = m.cursor_front_mut(); cursor.move_prev(); assert_eq!(cursor.remove_current(), None); cursor.move_next(); cursor.move_next(); assert_eq!(cursor.remove_current(), Some(7)); cursor.move_prev(); cursor.move_prev(); cursor.move_prev(); assert_eq!(cursor.remove_current(), Some(9)); cursor.move_next(); assert_eq!(cursor.remove_current(), Some(10)); check_links(&m); assert_eq!(m.iter().cloned().collect::>(), &[1, 8, 2, 3, 4, 5, 6]); let mut cursor = m.cursor_front_mut(); let mut p: LinkedList = LinkedList::new(); p.extend(&[100, 101, 102, 103]); let mut q: LinkedList = LinkedList::new(); q.extend(&[200, 201, 202, 203]); cursor.splice_after(p); cursor.splice_before(q); check_links(&m); assert_eq!( m.iter().cloned().collect::>(), &[200, 201, 202, 203, 1, 100, 101, 102, 103, 8, 2, 3, 4, 5, 6] ); let mut cursor = m.cursor_front_mut(); cursor.move_prev(); let tmp = cursor.split_before(); assert_eq!(m.into_iter().collect::>(), &[]); m = tmp; let mut cursor = m.cursor_front_mut(); cursor.move_next(); cursor.move_next(); cursor.move_next(); cursor.move_next(); cursor.move_next(); cursor.move_next(); let tmp = cursor.split_after(); assert_eq!(tmp.into_iter().collect::>(), &[102, 103, 8, 2, 3, 4, 5, 6]); check_links(&m); assert_eq!(m.iter().cloned().collect::>(), &[200, 201, 202, 203, 1, 100, 101]); } use super::merge_iter::MergeIterInner; use super::node::{self, Root}; use core::iter::FusedIterator; impl Root { /// Appends all key-value pairs from the union of two ascending iterators, /// incrementing a `length` variable along the way. The latter makes it /// easier for the caller to avoid a leak when a drop handler panicks. /// /// If both iterators produce the same key, this method drops the pair from /// the left iterator and appends the pair from the right iterator. /// /// If you want the tree to end up in a strictly ascending order, like for /// a `BTreeMap`, both iterators should produce keys in strictly ascending /// order, each greater than all keys in the tree, including any keys /// already in the tree upon entry. pub fn append_from_sorted_iters(&mut self, left: I, right: I, length: &mut usize) where K: Ord, I: Iterator + FusedIterator, { // We prepare to merge `left` and `right` into a sorted sequence in linear time. let iter = MergeIter(MergeIterInner::new(left, right)); // Meanwhile, we build a tree from the sorted sequence in linear time. self.bulk_push(iter, length) } /// Pushes all key-value pairs to the end of the tree, incrementing a /// `length` variable along the way. The latter makes it easier for the /// caller to avoid a leak when the iterator panicks. pub fn bulk_push(&mut self, iter: I, length: &mut usize) where I: Iterator, { let mut cur_node = self.borrow_mut().last_leaf_edge().into_node(); // Iterate through all key-value pairs, pushing them into nodes at the right level. for (key, value) in iter { // Try to push key-value pair into the current leaf node. if cur_node.len() < node::CAPACITY { cur_node.push(key, value); } else { // No space left, go up and push there. let mut open_node; let mut test_node = cur_node.forget_type(); loop { match test_node.ascend() { Ok(parent) => { let parent = parent.into_node(); if parent.len() < node::CAPACITY { // Found a node with space left, push here. open_node = parent; break; } else { // Go up again. test_node = parent.forget_type(); } } Err(_) => { // We are at the top, create a new root node and push there. open_node = self.push_internal_level(); break; } } } // Push key-value pair and new right subtree. let tree_height = open_node.height() - 1; let mut right_tree = Root::new(); for _ in 0..tree_height { right_tree.push_internal_level(); } open_node.push(key, value, right_tree); // Go down to the right-most leaf again. cur_node = open_node.forget_type().last_leaf_edge().into_node(); } // Increment length every iteration, to make sure the map drops // the appended elements even if advancing the iterator panicks. *length += 1; } self.fix_right_border_of_plentiful(); } } // An iterator for merging two sorted sequences into one struct MergeIter>(MergeIterInner); impl Iterator for MergeIter where I: Iterator + FusedIterator, { type Item = (K, V); /// If two keys are equal, returns the key-value pair from the right source. fn next(&mut self) -> Option<(K, V)> { let (a_next, b_next) = self.0.nexts(|a: &(K, V), b: &(K, V)| K::cmp(&a.0, &b.0)); b_next.or(a_next) } } use crate::fmt::Debug; use std::cmp::Ordering; use std::sync::atomic::{AtomicUsize, Ordering::SeqCst}; /// A blueprint for crash test dummy instances that monitor particular events. /// Some instances may be configured to panic at some point. /// Events are `clone`, `drop` or some anonymous `query`. /// /// Crash test dummies are identified and ordered by an id, so they can be used /// as keys in a BTreeMap. The implementation intentionally uses does not rely /// on anything defined in the crate, apart from the `Debug` trait. #[derive(Debug)] pub struct CrashTestDummy { id: usize, cloned: AtomicUsize, dropped: AtomicUsize, queried: AtomicUsize, } impl CrashTestDummy { /// Creates a crash test dummy design. The `id` determines order and equality of instances. pub fn new(id: usize) -> CrashTestDummy { CrashTestDummy { id, cloned: AtomicUsize::new(0), dropped: AtomicUsize::new(0), queried: AtomicUsize::new(0), } } /// Creates an instance of a crash test dummy that records what events it experiences /// and optionally panics. pub fn spawn(&self, panic: Panic) -> Instance<'_> { Instance { origin: self, panic } } /// Returns how many times instances of the dummy have been cloned. pub fn cloned(&self) -> usize { self.cloned.load(SeqCst) } /// Returns how many times instances of the dummy have been dropped. pub fn dropped(&self) -> usize { self.dropped.load(SeqCst) } /// Returns how many times instances of the dummy have had their `query` member invoked. pub fn queried(&self) -> usize { self.queried.load(SeqCst) } } #[derive(Debug)] pub struct Instance<'a> { origin: &'a CrashTestDummy, panic: Panic, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Panic { Never, InClone, InDrop, InQuery, } impl Instance<'_> { pub fn id(&self) -> usize { self.origin.id } /// Some anonymous query, the result of which is already given. pub fn query(&self, result: R) -> R { self.origin.queried.fetch_add(1, SeqCst); if self.panic == Panic::InQuery { panic!("panic in `query`"); } result } } impl Clone for Instance<'_> { fn clone(&self) -> Self { self.origin.cloned.fetch_add(1, SeqCst); if self.panic == Panic::InClone { panic!("panic in `clone`"); } Self { origin: self.origin, panic: Panic::Never } } } impl Drop for Instance<'_> { fn drop(&mut self) { self.origin.dropped.fetch_add(1, SeqCst); if self.panic == Panic::InDrop { panic!("panic in `drop`"); } } } impl PartialOrd for Instance<'_> { fn partial_cmp(&self, other: &Self) -> Option { self.id().partial_cmp(&other.id()) } } impl Ord for Instance<'_> { fn cmp(&self, other: &Self) -> Ordering { self.id().cmp(&other.id()) } } impl PartialEq for Instance<'_> { fn eq(&self, other: &Self) -> bool { self.id().eq(&other.id()) } } impl Eq for Instance<'_> {} /// XorShiftRng pub struct DeterministicRng { count: usize, x: u32, y: u32, z: u32, w: u32, } impl DeterministicRng { pub fn new() -> Self { DeterministicRng { count: 0, x: 0x193a6754, y: 0xa8a7d469, z: 0x97830e05, w: 0x113ba7bb } } /// Guarantees that each returned number is unique. pub fn next(&mut self) -> u32 { self.count += 1; assert!(self.count <= 70029); let x = self.x; let t = x ^ (x << 11); self.x = self.y; self.y = self.z; self.z = self.w; let w_ = self.w; self.w = w_ ^ (w_ >> 19) ^ (t ^ (t >> 8)); self.w } } use std::cell::Cell; use std::cmp::Ordering::{self, *}; use std::ptr; // Minimal type with an `Ord` implementation violating transitivity. #[derive(Debug)] pub enum Cyclic3 { A, B, C, } use Cyclic3::*; impl PartialOrd for Cyclic3 { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } impl Ord for Cyclic3 { fn cmp(&self, other: &Self) -> Ordering { match (self, other) { (A, A) | (B, B) | (C, C) => Equal, (A, B) | (B, C) | (C, A) => Less, (A, C) | (B, A) | (C, B) => Greater, } } } impl PartialEq for Cyclic3 { fn eq(&self, other: &Self) -> bool { self.cmp(&other) == Equal } } impl Eq for Cyclic3 {} // Controls the ordering of values wrapped by `Governed`. #[derive(Debug)] pub struct Governor { flipped: Cell, } impl Governor { pub fn new() -> Self { Governor { flipped: Cell::new(false) } } pub fn flip(&self) { self.flipped.set(!self.flipped.get()); } } // Type with an `Ord` implementation that forms a total order at any moment // (assuming that `T` respects total order), but can suddenly be made to invert // that total order. #[derive(Debug)] pub struct Governed<'a, T>(pub T, pub &'a Governor); impl PartialOrd for Governed<'_, T> { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } impl Ord for Governed<'_, T> { fn cmp(&self, other: &Self) -> Ordering { assert!(ptr::eq(self.1, other.1)); let ord = self.0.cmp(&other.0); if self.1.flipped.get() { ord.reverse() } else { ord } } } impl PartialEq for Governed<'_, T> { fn eq(&self, other: &Self) -> bool { assert!(ptr::eq(self.1, other.1)); self.0.eq(&other.0) } } impl Eq for Governed<'_, T> {} pub mod crash_test; pub mod ord_chaos; pub mod rng; use core::borrow::Borrow; use core::cmp::Ordering; use core::ops::{Bound, RangeBounds}; use super::node::{marker, ForceResult::*, Handle, NodeRef}; use SearchBound::*; use SearchResult::*; pub enum SearchBound { /// An inclusive bound to look for, just like `Bound::Included(T)`. Included(T), /// An exclusive bound to look for, just like `Bound::Excluded(T)`. Excluded(T), /// An unconditional inclusive bound, just like `Bound::Unbounded`. AllIncluded, /// An unconditional exclusive bound. AllExcluded, } impl SearchBound { pub fn from_range(range_bound: Bound) -> Self { match range_bound { Bound::Included(t) => Included(t), Bound::Excluded(t) => Excluded(t), Bound::Unbounded => AllIncluded, } } } pub enum SearchResult { Found(Handle, marker::KV>), GoDown(Handle, marker::Edge>), } pub enum IndexResult { KV(usize), Edge(usize), } impl NodeRef { /// Looks up a given key in a (sub)tree headed by the node, recursively. /// Returns a `Found` with the handle of the matching KV, if any. Otherwise, /// returns a `GoDown` with the handle of the leaf edge where the key belongs. /// /// The result is meaningful only if the tree is ordered by key, like the tree /// in a `BTreeMap` is. pub fn search_tree( mut self, key: &Q, ) -> SearchResult where Q: Ord, K: Borrow, { loop { self = match self.search_node(key) { Found(handle) => return Found(handle), GoDown(handle) => match handle.force() { Leaf(leaf) => return GoDown(leaf), Internal(internal) => internal.descend(), }, } } } /// Descends to the nearest node where the edge matching the lower bound /// of the range is different from the edge matching the upper bound, i.e., /// the nearest node that has at least one key contained in the range. /// /// If found, returns an `Ok` with that node, the strictly ascending pair of /// edge indices in the node delimiting the range, and the corresponding /// pair of bounds for continuing the search in the child nodes, in case /// the node is internal. /// /// If not found, returns an `Err` with the leaf edge matching the entire /// range. /// /// As a diagnostic service, panics if the range specifies impossible bounds. /// /// The result is meaningful only if the tree is ordered by key. pub fn search_tree_for_bifurcation<'r, Q: ?Sized, R>( mut self, range: &'r R, ) -> Result< ( NodeRef, usize, usize, SearchBound<&'r Q>, SearchBound<&'r Q>, ), Handle, marker::Edge>, > where Q: Ord, K: Borrow, R: RangeBounds, { // Inlining these variables should be avoided. We assume the bounds reported by `range` // remain the same, but an adversarial implementation could change between calls (#81138). let (start, end) = (range.start_bound(), range.end_bound()); match (start, end) { (Bound::Excluded(s), Bound::Excluded(e)) if s == e => { panic!("range start and end are equal and excluded in BTreeMap") } (Bound::Included(s) | Bound::Excluded(s), Bound::Included(e) | Bound::Excluded(e)) if s > e => { panic!("range start is greater than range end in BTreeMap") } _ => {} } let mut lower_bound = SearchBound::from_range(start); let mut upper_bound = SearchBound::from_range(end); loop { let (lower_edge_idx, lower_child_bound) = self.find_lower_bound_index(lower_bound); let (upper_edge_idx, upper_child_bound) = unsafe { self.find_upper_bound_index(upper_bound, lower_edge_idx) }; if lower_edge_idx < upper_edge_idx { return Ok(( self, lower_edge_idx, upper_edge_idx, lower_child_bound, upper_child_bound, )); } debug_assert_eq!(lower_edge_idx, upper_edge_idx); let common_edge = unsafe { Handle::new_edge(self, lower_edge_idx) }; match common_edge.force() { Leaf(common_edge) => return Err(common_edge), Internal(common_edge) => { self = common_edge.descend(); lower_bound = lower_child_bound; upper_bound = upper_child_bound; } } } } /// Finds an edge in the node delimiting the lower bound of a range. /// Also returns the lower bound to be used for continuing the search in /// the matching child node, if `self` is an internal node. /// /// The result is meaningful only if the tree is ordered by key. pub fn find_lower_bound_edge<'r, Q>( self, bound: SearchBound<&'r Q>, ) -> (Handle, SearchBound<&'r Q>) where Q: ?Sized + Ord, K: Borrow, { let (edge_idx, bound) = self.find_lower_bound_index(bound); let edge = unsafe { Handle::new_edge(self, edge_idx) }; (edge, bound) } /// Clone of `find_lower_bound_edge` for the upper bound. pub fn find_upper_bound_edge<'r, Q>( self, bound: SearchBound<&'r Q>, ) -> (Handle, SearchBound<&'r Q>) where Q: ?Sized + Ord, K: Borrow, { let (edge_idx, bound) = unsafe { self.find_upper_bound_index(bound, 0) }; let edge = unsafe { Handle::new_edge(self, edge_idx) }; (edge, bound) } } impl NodeRef { /// Looks up a given key in the node, without recursion. /// Returns a `Found` with the handle of the matching KV, if any. Otherwise, /// returns a `GoDown` with the handle of the edge where the key might be found /// (if the node is internal) or where the key can be inserted. /// /// The result is meaningful only if the tree is ordered by key, like the tree /// in a `BTreeMap` is. pub fn search_node(self, key: &Q) -> SearchResult where Q: Ord, K: Borrow, { match unsafe { self.find_key_index(key, 0) } { IndexResult::KV(idx) => Found(unsafe { Handle::new_kv(self, idx) }), IndexResult::Edge(idx) => GoDown(unsafe { Handle::new_edge(self, idx) }), } } /// Returns either the KV index in the node at which the key (or an equivalent) /// exists, or the edge index where the key belongs, starting from a particular index. /// /// The result is meaningful only if the tree is ordered by key, like the tree /// in a `BTreeMap` is. /// /// # Safety /// `start_index` must be a valid edge index for the node. unsafe fn find_key_index(&self, key: &Q, start_index: usize) -> IndexResult where Q: Ord, K: Borrow, { let node = self.reborrow(); let keys = node.keys(); debug_assert!(start_index <= keys.len()); for (offset, k) in unsafe { keys.get_unchecked(start_index..) }.iter().enumerate() { match key.cmp(k.borrow()) { Ordering::Greater => {} Ordering::Equal => return IndexResult::KV(start_index + offset), Ordering::Less => return IndexResult::Edge(start_index + offset), } } IndexResult::Edge(keys.len()) } /// Finds an edge index in the node delimiting the lower bound of a range. /// Also returns the lower bound to be used for continuing the search in /// the matching child node, if `self` is an internal node. /// /// The result is meaningful only if the tree is ordered by key. fn find_lower_bound_index<'r, Q>( &self, bound: SearchBound<&'r Q>, ) -> (usize, SearchBound<&'r Q>) where Q: ?Sized + Ord, K: Borrow, { match bound { Included(key) => match unsafe { self.find_key_index(key, 0) } { IndexResult::KV(idx) => (idx, AllExcluded), IndexResult::Edge(idx) => (idx, bound), }, Excluded(key) => match unsafe { self.find_key_index(key, 0) } { IndexResult::KV(idx) => (idx + 1, AllIncluded), IndexResult::Edge(idx) => (idx, bound), }, AllIncluded => (0, AllIncluded), AllExcluded => (self.len(), AllExcluded), } } /// Mirror image of `find_lower_bound_index` for the upper bound, /// with an additional parameter to skip part of the key array. /// /// # Safety /// `start_index` must be a valid edge index for the node. unsafe fn find_upper_bound_index<'r, Q>( &self, bound: SearchBound<&'r Q>, start_index: usize, ) -> (usize, SearchBound<&'r Q>) where Q: ?Sized + Ord, K: Borrow, { match bound { Included(key) => match unsafe { self.find_key_index(key, start_index) } { IndexResult::KV(idx) => (idx + 1, AllExcluded), IndexResult::Edge(idx) => (idx, bound), }, Excluded(key) => match unsafe { self.find_key_index(key, start_index) } { IndexResult::KV(idx) => (idx, AllIncluded), IndexResult::Edge(idx) => (idx, bound), }, AllIncluded => (self.len(), AllIncluded), AllExcluded => (start_index, AllExcluded), } } } use core::borrow::Borrow; use core::cmp::Ordering; use core::fmt::{self, Debug}; use core::hash::{Hash, Hasher}; use core::iter::{FromIterator, FusedIterator}; use core::marker::PhantomData; use core::mem::{self, ManuallyDrop}; use core::ops::{Index, RangeBounds}; use core::ptr; use super::borrow::DormantMutRef; use super::navigate::LeafRange; use super::node::{self, marker, ForceResult::*, Handle, NodeRef, Root}; use super::search::SearchResult::*; mod entry; pub use entry::{Entry, OccupiedEntry, OccupiedError, VacantEntry}; use Entry::*; /// Minimum number of elements in nodes that are not a root. /// We might temporarily have fewer elements during methods. pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT; // A tree in a `BTreeMap` is a tree in the `node` module with additional invariants: // - Keys must appear in ascending order (according to the key's type). // - If the root node is internal, it must contain at least 1 element. // - Every non-root node contains at least MIN_LEN elements. // // An empty map may be represented both by the absence of a root node or by a // root node that is an empty leaf. /// A map based on a [B-Tree]. /// /// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing /// the amount of work performed in a search. In theory, a binary search tree (BST) is the optimal /// choice for a sorted map, as a perfectly balanced BST performs the theoretical minimum amount of /// comparisons necessary to find an element (log2n). However, in practice the way this /// is done is *very* inefficient for modern computer architectures. In particular, every element /// is stored in its own individually heap-allocated node. This means that every single insertion /// triggers a heap-allocation, and every single comparison should be a cache-miss. Since these /// are both notably expensive things to do in practice, we are forced to at very least reconsider /// the BST strategy. /// /// A B-Tree instead makes each node contain B-1 to 2B-1 elements in a contiguous array. By doing /// this, we reduce the number of allocations by a factor of B, and improve cache efficiency in /// searches. However, this does mean that searches will have to do *more* comparisons on average. /// The precise number of comparisons depends on the node search strategy used. For optimal cache /// efficiency, one could search the nodes linearly. For optimal comparisons, one could search /// the node using binary search. As a compromise, one could also perform a linear search /// that initially only checks every ith element for some choice of i. /// /// Currently, our implementation simply performs naive linear search. This provides excellent /// performance on *small* nodes of elements which are cheap to compare. However in the future we /// would like to further explore choosing the optimal search strategy based on the choice of B, /// and possibly other factors. Using linear search, searching for a random element is expected /// to take O(B * log(n)) comparisons, which is generally worse than a BST. In practice, /// however, performance is excellent. /// /// It is a logic error for a key to be modified in such a way that the key's ordering relative to /// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is /// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code. /// The behavior resulting from such a logic error is not specified, but will not result in /// undefined behavior. This could include panics, incorrect results, aborts, memory leaks, and /// non-termination. /// /// [B-Tree]: https://en.wikipedia.org/wiki/B-tree /// [`Cell`]: core::cell::Cell /// [`RefCell`]: core::cell::RefCell /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// // type inference lets us omit an explicit type signature (which /// // would be `BTreeMap<&str, &str>` in this example). /// let mut movie_reviews = BTreeMap::new(); /// /// // review some movies. /// movie_reviews.insert("Office Space", "Deals with real issues in the workplace."); /// movie_reviews.insert("Pulp Fiction", "Masterpiece."); /// movie_reviews.insert("The Godfather", "Very enjoyable."); /// movie_reviews.insert("The Blues Brothers", "Eye lyked it a lot."); /// /// // check for a specific one. /// if !movie_reviews.contains_key("Les Misérables") { /// println!("We've got {} reviews, but Les Misérables ain't one.", /// movie_reviews.len()); /// } /// /// // oops, this review has a lot of spelling mistakes, let's delete it. /// movie_reviews.remove("The Blues Brothers"); /// /// // look up the values associated with some keys. /// let to_find = ["Up!", "Office Space"]; /// for movie in &to_find { /// match movie_reviews.get(movie) { /// Some(review) => println!("{}: {}", movie, review), /// None => println!("{} is unreviewed.", movie) /// } /// } /// /// // Look up the value for a key (will panic if the key is not found). /// println!("Movie review: {}", movie_reviews["Office Space"]); /// /// // iterate over everything. /// for (movie, review) in &movie_reviews { /// println!("{}: \"{}\"", movie, review); /// } /// ``` /// /// `BTreeMap` also implements an [`Entry API`], which allows for more complex /// methods of getting, setting, updating and removing keys and their values: /// /// [`Entry API`]: BTreeMap::entry /// /// ``` /// use std::collections::BTreeMap; /// /// // type inference lets us omit an explicit type signature (which /// // would be `BTreeMap<&str, u8>` in this example). /// let mut player_stats = BTreeMap::new(); /// /// fn random_stat_buff() -> u8 { /// // could actually return some random value here - let's just return /// // some fixed value for now /// 42 /// } /// /// // insert a key only if it doesn't already exist /// player_stats.entry("health").or_insert(100); /// /// // insert a key using a function that provides a new value only if it /// // doesn't already exist /// player_stats.entry("defence").or_insert_with(random_stat_buff); /// /// // update a key, guarding against the key possibly not being set /// let stat = player_stats.entry("attack").or_insert(100); /// *stat += random_stat_buff(); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "BTreeMap")] pub struct BTreeMap { root: Option>, length: usize, } #[stable(feature = "btree_drop", since = "1.7.0")] unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap { fn drop(&mut self) { if let Some(root) = self.root.take() { Dropper { front: root.into_dying().first_leaf_edge(), remaining_length: self.length }; } } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for BTreeMap { fn clone(&self) -> BTreeMap { fn clone_subtree<'a, K: Clone, V: Clone>( node: NodeRef, K, V, marker::LeafOrInternal>, ) -> BTreeMap where K: 'a, V: 'a, { match node.force() { Leaf(leaf) => { let mut out_tree = BTreeMap { root: Some(Root::new()), length: 0 }; { let root = out_tree.root.as_mut().unwrap(); // unwrap succeeds because we just wrapped let mut out_node = match root.borrow_mut().force() { Leaf(leaf) => leaf, Internal(_) => unreachable!(), }; let mut in_edge = leaf.first_edge(); while let Ok(kv) = in_edge.right_kv() { let (k, v) = kv.into_kv(); in_edge = kv.right_edge(); out_node.push(k.clone(), v.clone()); out_tree.length += 1; } } out_tree } Internal(internal) => { let mut out_tree = clone_subtree(internal.first_edge().descend()); { let out_root = BTreeMap::ensure_is_owned(&mut out_tree.root); let mut out_node = out_root.push_internal_level(); let mut in_edge = internal.first_edge(); while let Ok(kv) = in_edge.right_kv() { let (k, v) = kv.into_kv(); in_edge = kv.right_edge(); let k = (*k).clone(); let v = (*v).clone(); let subtree = clone_subtree(in_edge.descend()); // We can't destructure subtree directly // because BTreeMap implements Drop let (subroot, sublength) = unsafe { let subtree = ManuallyDrop::new(subtree); let root = ptr::read(&subtree.root); let length = subtree.length; (root, length) }; out_node.push(k, v, subroot.unwrap_or_else(Root::new)); out_tree.length += 1 + sublength; } } out_tree } } } if self.is_empty() { // Ideally we'd call `BTreeMap::new` here, but that has the `K: // Ord` constraint, which this method lacks. BTreeMap { root: None, length: 0 } } else { clone_subtree(self.root.as_ref().unwrap().reborrow()) // unwrap succeeds because not empty } } } impl super::Recover for BTreeMap where K: Borrow + Ord, Q: Ord, { type Key = K; fn get(&self, key: &Q) -> Option<&K> { let root_node = self.root.as_ref()?.reborrow(); match root_node.search_tree(key) { Found(handle) => Some(handle.into_kv().0), GoDown(_) => None, } } fn take(&mut self, key: &Q) -> Option { let (map, dormant_map) = DormantMutRef::new(self); let root_node = map.root.as_mut()?.borrow_mut(); match root_node.search_tree(key) { Found(handle) => { Some(OccupiedEntry { handle, dormant_map, _marker: PhantomData }.remove_kv().0) } GoDown(_) => None, } } fn replace(&mut self, key: K) -> Option { let (map, dormant_map) = DormantMutRef::new(self); let root_node = Self::ensure_is_owned(&mut map.root).borrow_mut(); match root_node.search_tree::(&key) { Found(mut kv) => Some(mem::replace(kv.key_mut(), key)), GoDown(handle) => { VacantEntry { key, handle, dormant_map, _marker: PhantomData }.insert(()); None } } } } /// An iterator over the entries of a `BTreeMap`. /// /// This `struct` is created by the [`iter`] method on [`BTreeMap`]. See its /// documentation for more. /// /// [`iter`]: BTreeMap::iter #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, K: 'a, V: 'a> { range: Range<'a, K, V>, length: usize, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Iter<'_, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } /// A mutable iterator over the entries of a `BTreeMap`. /// /// This `struct` is created by the [`iter_mut`] method on [`BTreeMap`]. See its /// documentation for more. /// /// [`iter_mut`]: BTreeMap::iter_mut #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] pub struct IterMut<'a, K: 'a, V: 'a> { range: RangeMut<'a, K, V>, length: usize, } /// An owning iterator over the entries of a `BTreeMap`. /// /// This `struct` is created by the [`into_iter`] method on [`BTreeMap`] /// (provided by the `IntoIterator` trait). See its documentation for more. /// /// [`into_iter`]: IntoIterator::into_iter #[stable(feature = "rust1", since = "1.0.0")] pub struct IntoIter { range: LeafRange, length: usize, } impl IntoIter { /// Returns an iterator of references over the remaining items. #[inline] pub(super) fn iter(&self) -> Iter<'_, K, V> { let range = Range { inner: self.range.reborrow() }; Iter { range: range, length: self.length } } } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } /// A simplified version of `IntoIter` that is not double-ended and has only one /// purpose: to drop the remainder of an `IntoIter`. Therefore it also serves to /// drop an entire tree without the need to first look up a `back` leaf edge. struct Dropper { front: Handle, marker::Edge>, remaining_length: usize, } /// An iterator over the keys of a `BTreeMap`. /// /// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its /// documentation for more. /// /// [`keys`]: BTreeMap::keys #[stable(feature = "rust1", since = "1.0.0")] pub struct Keys<'a, K: 'a, V: 'a> { inner: Iter<'a, K, V>, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Keys<'_, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } /// An iterator over the values of a `BTreeMap`. /// /// This `struct` is created by the [`values`] method on [`BTreeMap`]. See its /// documentation for more. /// /// [`values`]: BTreeMap::values #[stable(feature = "rust1", since = "1.0.0")] pub struct Values<'a, K: 'a, V: 'a> { inner: Iter<'a, K, V>, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Values<'_, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } /// A mutable iterator over the values of a `BTreeMap`. /// /// This `struct` is created by the [`values_mut`] method on [`BTreeMap`]. See its /// documentation for more. /// /// [`values_mut`]: BTreeMap::values_mut #[stable(feature = "map_values_mut", since = "1.10.0")] pub struct ValuesMut<'a, K: 'a, V: 'a> { inner: IterMut<'a, K, V>, } #[stable(feature = "map_values_mut", since = "1.10.0")] impl fmt::Debug for ValuesMut<'_, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.inner.iter().map(|(_, val)| val)).finish() } } /// An owning iterator over the keys of a `BTreeMap`. /// /// This `struct` is created by the [`into_keys`] method on [`BTreeMap`]. /// See its documentation for more. /// /// [`into_keys`]: BTreeMap::into_keys #[unstable(feature = "map_into_keys_values", issue = "75294")] pub struct IntoKeys { inner: IntoIter, } #[unstable(feature = "map_into_keys_values", issue = "75294")] impl fmt::Debug for IntoKeys { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.inner.iter().map(|(key, _)| key)).finish() } } /// An owning iterator over the values of a `BTreeMap`. /// /// This `struct` is created by the [`into_values`] method on [`BTreeMap`]. /// See its documentation for more. /// /// [`into_values`]: BTreeMap::into_values #[unstable(feature = "map_into_keys_values", issue = "75294")] pub struct IntoValues { inner: IntoIter, } #[unstable(feature = "map_into_keys_values", issue = "75294")] impl fmt::Debug for IntoValues { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.inner.iter().map(|(_, val)| val)).finish() } } /// An iterator over a sub-range of entries in a `BTreeMap`. /// /// This `struct` is created by the [`range`] method on [`BTreeMap`]. See its /// documentation for more. /// /// [`range`]: BTreeMap::range #[stable(feature = "btree_range", since = "1.17.0")] pub struct Range<'a, K: 'a, V: 'a> { inner: LeafRange, K, V>, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Range<'_, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } /// A mutable iterator over a sub-range of entries in a `BTreeMap`. /// /// This `struct` is created by the [`range_mut`] method on [`BTreeMap`]. See its /// documentation for more. /// /// [`range_mut`]: BTreeMap::range_mut #[stable(feature = "btree_range", since = "1.17.0")] pub struct RangeMut<'a, K: 'a, V: 'a> { inner: LeafRange, K, V>, // Be invariant in `K` and `V` _marker: PhantomData<&'a mut (K, V)>, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for RangeMut<'_, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let range = Range { inner: self.inner.reborrow() }; f.debug_list().entries(range).finish() } } impl BTreeMap { /// Makes a new, empty `BTreeMap`. /// /// Does not allocate anything on its own. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// /// // entries can now be inserted into the empty map /// map.insert(1, "a"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] pub const fn new() -> BTreeMap where K: Ord, { BTreeMap { root: None, length: 0 } } /// Clears the map, removing all elements. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut a = BTreeMap::new(); /// a.insert(1, "a"); /// a.clear(); /// assert!(a.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn clear(&mut self) { *self = BTreeMap { root: None, length: 0 }; } /// Returns a reference to the value corresponding to the key. /// /// The key may be any borrowed form of the map's key type, but the ordering /// on the borrowed form *must* match the ordering on the key type. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(1, "a"); /// assert_eq!(map.get(&1), Some(&"a")); /// assert_eq!(map.get(&2), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get(&self, key: &Q) -> Option<&V> where K: Borrow + Ord, Q: Ord, { let root_node = self.root.as_ref()?.reborrow(); match root_node.search_tree(key) { Found(handle) => Some(handle.into_kv().1), GoDown(_) => None, } } /// Returns the key-value pair corresponding to the supplied key. /// /// The supplied key may be any borrowed form of the map's key type, but the ordering /// on the borrowed form *must* match the ordering on the key type. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(1, "a"); /// assert_eq!(map.get_key_value(&1), Some((&1, &"a"))); /// assert_eq!(map.get_key_value(&2), None); /// ``` #[stable(feature = "map_get_key_value", since = "1.40.0")] pub fn get_key_value(&self, k: &Q) -> Option<(&K, &V)> where K: Borrow + Ord, Q: Ord, { let root_node = self.root.as_ref()?.reborrow(); match root_node.search_tree(k) { Found(handle) => Some(handle.into_kv()), GoDown(_) => None, } } /// Returns the first key-value pair in the map. /// The key in this pair is the minimum key in the map. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(map_first_last)] /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// assert_eq!(map.first_key_value(), None); /// map.insert(1, "b"); /// map.insert(2, "a"); /// assert_eq!(map.first_key_value(), Some((&1, &"b"))); /// ``` #[unstable(feature = "map_first_last", issue = "62924")] pub fn first_key_value(&self) -> Option<(&K, &V)> where K: Ord, { let root_node = self.root.as_ref()?.reborrow(); root_node.first_leaf_edge().right_kv().ok().map(Handle::into_kv) } /// Returns the first entry in the map for in-place manipulation. /// The key of this entry is the minimum key in the map. /// /// # Examples /// /// ``` /// #![feature(map_first_last)] /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(1, "a"); /// map.insert(2, "b"); /// if let Some(mut entry) = map.first_entry() { /// if *entry.key() > 0 { /// entry.insert("first"); /// } /// } /// assert_eq!(*map.get(&1).unwrap(), "first"); /// assert_eq!(*map.get(&2).unwrap(), "b"); /// ``` #[unstable(feature = "map_first_last", issue = "62924")] pub fn first_entry(&mut self) -> Option> where K: Ord, { let (map, dormant_map) = DormantMutRef::new(self); let root_node = map.root.as_mut()?.borrow_mut(); let kv = root_node.first_leaf_edge().right_kv().ok()?; Some(OccupiedEntry { handle: kv.forget_node_type(), dormant_map, _marker: PhantomData }) } /// Removes and returns the first element in the map. /// The key of this element is the minimum key that was in the map. /// /// # Examples /// /// Draining elements in ascending order, while keeping a usable map each iteration. /// /// ``` /// #![feature(map_first_last)] /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(1, "a"); /// map.insert(2, "b"); /// while let Some((key, _val)) = map.pop_first() { /// assert!(map.iter().all(|(k, _v)| *k > key)); /// } /// assert!(map.is_empty()); /// ``` #[unstable(feature = "map_first_last", issue = "62924")] pub fn pop_first(&mut self) -> Option<(K, V)> where K: Ord, { self.first_entry().map(|entry| entry.remove_entry()) } /// Returns the last key-value pair in the map. /// The key in this pair is the maximum key in the map. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(map_first_last)] /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(1, "b"); /// map.insert(2, "a"); /// assert_eq!(map.last_key_value(), Some((&2, &"a"))); /// ``` #[unstable(feature = "map_first_last", issue = "62924")] pub fn last_key_value(&self) -> Option<(&K, &V)> where K: Ord, { let root_node = self.root.as_ref()?.reborrow(); root_node.last_leaf_edge().left_kv().ok().map(Handle::into_kv) } /// Returns the last entry in the map for in-place manipulation. /// The key of this entry is the maximum key in the map. /// /// # Examples /// /// ``` /// #![feature(map_first_last)] /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(1, "a"); /// map.insert(2, "b"); /// if let Some(mut entry) = map.last_entry() { /// if *entry.key() > 0 { /// entry.insert("last"); /// } /// } /// assert_eq!(*map.get(&1).unwrap(), "a"); /// assert_eq!(*map.get(&2).unwrap(), "last"); /// ``` #[unstable(feature = "map_first_last", issue = "62924")] pub fn last_entry(&mut self) -> Option> where K: Ord, { let (map, dormant_map) = DormantMutRef::new(self); let root_node = map.root.as_mut()?.borrow_mut(); let kv = root_node.last_leaf_edge().left_kv().ok()?; Some(OccupiedEntry { handle: kv.forget_node_type(), dormant_map, _marker: PhantomData }) } /// Removes and returns the last element in the map. /// The key of this element is the maximum key that was in the map. /// /// # Examples /// /// Draining elements in descending order, while keeping a usable map each iteration. /// /// ``` /// #![feature(map_first_last)] /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(1, "a"); /// map.insert(2, "b"); /// while let Some((key, _val)) = map.pop_last() { /// assert!(map.iter().all(|(k, _v)| *k < key)); /// } /// assert!(map.is_empty()); /// ``` #[unstable(feature = "map_first_last", issue = "62924")] pub fn pop_last(&mut self) -> Option<(K, V)> where K: Ord, { self.last_entry().map(|entry| entry.remove_entry()) } /// Returns `true` if the map contains a value for the specified key. /// /// The key may be any borrowed form of the map's key type, but the ordering /// on the borrowed form *must* match the ordering on the key type. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(1, "a"); /// assert_eq!(map.contains_key(&1), true); /// assert_eq!(map.contains_key(&2), false); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn contains_key(&self, key: &Q) -> bool where K: Borrow + Ord, Q: Ord, { self.get(key).is_some() } /// Returns a mutable reference to the value corresponding to the key. /// /// The key may be any borrowed form of the map's key type, but the ordering /// on the borrowed form *must* match the ordering on the key type. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(1, "a"); /// if let Some(x) = map.get_mut(&1) { /// *x = "b"; /// } /// assert_eq!(map[&1], "b"); /// ``` // See `get` for implementation notes, this is basically a copy-paste with mut's added #[stable(feature = "rust1", since = "1.0.0")] pub fn get_mut(&mut self, key: &Q) -> Option<&mut V> where K: Borrow + Ord, Q: Ord, { let root_node = self.root.as_mut()?.borrow_mut(); match root_node.search_tree(key) { Found(handle) => Some(handle.into_val_mut()), GoDown(_) => None, } } /// Inserts a key-value pair into the map. /// /// If the map did not have this key present, `None` is returned. /// /// If the map did have this key present, the value is updated, and the old /// value is returned. The key is not updated, though; this matters for /// types that can be `==` without being identical. See the [module-level /// documentation] for more. /// /// [module-level documentation]: index.html#insert-and-complex-keys /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// assert_eq!(map.insert(37, "a"), None); /// assert_eq!(map.is_empty(), false); /// /// map.insert(37, "b"); /// assert_eq!(map.insert(37, "c"), Some("b")); /// assert_eq!(map[&37], "c"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(&mut self, key: K, value: V) -> Option where K: Ord, { match self.entry(key) { Occupied(mut entry) => Some(entry.insert(value)), Vacant(entry) => { entry.insert(value); None } } } /// Tries to insert a key-value pair into the map, and returns /// a mutable reference to the value in the entry. /// /// If the map already had this key present, nothing is updated, and /// an error containing the occupied entry and the value is returned. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(map_try_insert)] /// /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// assert_eq!(map.try_insert(37, "a").unwrap(), &"a"); /// /// let err = map.try_insert(37, "b").unwrap_err(); /// assert_eq!(err.entry.key(), &37); /// assert_eq!(err.entry.get(), &"a"); /// assert_eq!(err.value, "b"); /// ``` #[unstable(feature = "map_try_insert", issue = "82766")] pub fn try_insert(&mut self, key: K, value: V) -> Result<&mut V, OccupiedError<'_, K, V>> where K: Ord, { match self.entry(key) { Occupied(entry) => Err(OccupiedError { entry, value }), Vacant(entry) => Ok(entry.insert(value)), } } /// Removes a key from the map, returning the value at the key if the key /// was previously in the map. /// /// The key may be any borrowed form of the map's key type, but the ordering /// on the borrowed form *must* match the ordering on the key type. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(1, "a"); /// assert_eq!(map.remove(&1), Some("a")); /// assert_eq!(map.remove(&1), None); /// ``` #[doc(alias = "delete")] #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(&mut self, key: &Q) -> Option where K: Borrow + Ord, Q: Ord, { self.remove_entry(key).map(|(_, v)| v) } /// Removes a key from the map, returning the stored key and value if the key /// was previously in the map. /// /// The key may be any borrowed form of the map's key type, but the ordering /// on the borrowed form *must* match the ordering on the key type. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(1, "a"); /// assert_eq!(map.remove_entry(&1), Some((1, "a"))); /// assert_eq!(map.remove_entry(&1), None); /// ``` #[stable(feature = "btreemap_remove_entry", since = "1.45.0")] pub fn remove_entry(&mut self, key: &Q) -> Option<(K, V)> where K: Borrow + Ord, Q: Ord, { let (map, dormant_map) = DormantMutRef::new(self); let root_node = map.root.as_mut()?.borrow_mut(); match root_node.search_tree(key) { Found(handle) => { Some(OccupiedEntry { handle, dormant_map, _marker: PhantomData }.remove_entry()) } GoDown(_) => None, } } /// Retains only the elements specified by the predicate. /// /// In other words, remove all pairs `(k, v)` such that `f(&k, &mut v)` returns `false`. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap = (0..8).map(|x| (x, x*10)).collect(); /// // Keep only the elements with even-numbered keys. /// map.retain(|&k, _| k % 2 == 0); /// assert!(map.into_iter().eq(vec![(0, 0), (2, 20), (4, 40), (6, 60)])); /// ``` #[inline] #[stable(feature = "btree_retain", since = "1.53.0")] pub fn retain(&mut self, mut f: F) where K: Ord, F: FnMut(&K, &mut V) -> bool, { self.drain_filter(|k, v| !f(k, v)); } /// Moves all elements from `other` into `Self`, leaving `other` empty. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// let mut a = BTreeMap::new(); /// a.insert(1, "a"); /// a.insert(2, "b"); /// a.insert(3, "c"); /// /// let mut b = BTreeMap::new(); /// b.insert(3, "d"); /// b.insert(4, "e"); /// b.insert(5, "f"); /// /// a.append(&mut b); /// /// assert_eq!(a.len(), 5); /// assert_eq!(b.len(), 0); /// /// assert_eq!(a[&1], "a"); /// assert_eq!(a[&2], "b"); /// assert_eq!(a[&3], "d"); /// assert_eq!(a[&4], "e"); /// assert_eq!(a[&5], "f"); /// ``` #[stable(feature = "btree_append", since = "1.11.0")] pub fn append(&mut self, other: &mut Self) where K: Ord, { // Do we have to append anything at all? if other.is_empty() { return; } // We can just swap `self` and `other` if `self` is empty. if self.is_empty() { mem::swap(self, other); return; } let self_iter = mem::take(self).into_iter(); let other_iter = mem::take(other).into_iter(); let root = BTreeMap::ensure_is_owned(&mut self.root); root.append_from_sorted_iters(self_iter, other_iter, &mut self.length) } /// Constructs a double-ended iterator over a sub-range of elements in the map. /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will /// yield elements from min (inclusive) to max (exclusive). /// The range may also be entered as `(Bound, Bound)`, so for example /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive /// range from 4 to 10. /// /// # Panics /// /// Panics if range `start > end`. /// Panics if range `start == end` and both bounds are `Excluded`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// use std::ops::Bound::Included; /// /// let mut map = BTreeMap::new(); /// map.insert(3, "a"); /// map.insert(5, "b"); /// map.insert(8, "c"); /// for (&key, &value) in map.range((Included(&4), Included(&8))) { /// println!("{}: {}", key, value); /// } /// assert_eq!(Some((&5, &"b")), map.range(4..).next()); /// ``` #[stable(feature = "btree_range", since = "1.17.0")] pub fn range(&self, range: R) -> Range<'_, K, V> where T: Ord, K: Borrow + Ord, R: RangeBounds, { if let Some(root) = &self.root { Range { inner: root.reborrow().range_search(range) } } else { Range { inner: LeafRange::none() } } } /// Constructs a mutable double-ended iterator over a sub-range of elements in the map. /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will /// yield elements from min (inclusive) to max (exclusive). /// The range may also be entered as `(Bound, Bound)`, so for example /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive /// range from 4 to 10. /// /// # Panics /// /// Panics if range `start > end`. /// Panics if range `start == end` and both bounds are `Excluded`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"] /// .iter() /// .map(|&s| (s, 0)) /// .collect(); /// for (_, balance) in map.range_mut("B".."Cheryl") { /// *balance += 100; /// } /// for (name, balance) in &map { /// println!("{} => {}", name, balance); /// } /// ``` #[stable(feature = "btree_range", since = "1.17.0")] pub fn range_mut(&mut self, range: R) -> RangeMut<'_, K, V> where T: Ord, K: Borrow + Ord, R: RangeBounds, { if let Some(root) = &mut self.root { RangeMut { inner: root.borrow_valmut().range_search(range), _marker: PhantomData } } else { RangeMut { inner: LeafRange::none(), _marker: PhantomData } } } /// Gets the given key's corresponding entry in the map for in-place manipulation. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut count: BTreeMap<&str, usize> = BTreeMap::new(); /// /// // count the number of occurrences of letters in the vec /// for x in vec!["a", "b", "a", "c", "a", "b"] { /// *count.entry(x).or_insert(0) += 1; /// } /// /// assert_eq!(count["a"], 3); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn entry(&mut self, key: K) -> Entry<'_, K, V> where K: Ord, { // FIXME(@porglezomp) Avoid allocating if we don't insert let (map, dormant_map) = DormantMutRef::new(self); let root_node = Self::ensure_is_owned(&mut map.root).borrow_mut(); match root_node.search_tree(&key) { Found(handle) => Occupied(OccupiedEntry { handle, dormant_map, _marker: PhantomData }), GoDown(handle) => { Vacant(VacantEntry { key, handle, dormant_map, _marker: PhantomData }) } } } /// Splits the collection into two at the given key. Returns everything after the given key, /// including the key. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut a = BTreeMap::new(); /// a.insert(1, "a"); /// a.insert(2, "b"); /// a.insert(3, "c"); /// a.insert(17, "d"); /// a.insert(41, "e"); /// /// let b = a.split_off(&3); /// /// assert_eq!(a.len(), 2); /// assert_eq!(b.len(), 3); /// /// assert_eq!(a[&1], "a"); /// assert_eq!(a[&2], "b"); /// /// assert_eq!(b[&3], "c"); /// assert_eq!(b[&17], "d"); /// assert_eq!(b[&41], "e"); /// ``` #[stable(feature = "btree_split_off", since = "1.11.0")] pub fn split_off(&mut self, key: &Q) -> Self where K: Borrow + Ord, { if self.is_empty() { return Self::new(); } let total_num = self.len(); let left_root = self.root.as_mut().unwrap(); // unwrap succeeds because not empty let right_root = left_root.split_off(key); let (new_left_len, right_len) = Root::calc_split_length(total_num, &left_root, &right_root); self.length = new_left_len; BTreeMap { root: Some(right_root), length: right_len } } /// Creates an iterator that visits all elements (key-value pairs) in /// ascending key order and uses a closure to determine if an element should /// be removed. If the closure returns `true`, the element is removed from /// the map and yielded. If the closure returns `false`, or panics, the /// element remains in the map and will not be yielded. /// /// The iterator also lets you mutate the value of each element in the /// closure, regardless of whether you choose to keep or remove it. /// /// If the iterator is only partially consumed or not consumed at all, each /// of the remaining elements is still subjected to the closure, which may /// change its value and, by returning `true`, have the element removed and /// dropped. /// /// It is unspecified how many more elements will be subjected to the /// closure if a panic occurs in the closure, or a panic occurs while /// dropping an element, or if the `DrainFilter` value is leaked. /// /// # Examples /// /// Splitting a map into even and odd keys, reusing the original map: /// /// ``` /// #![feature(btree_drain_filter)] /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap = (0..8).map(|x| (x, x)).collect(); /// let evens: BTreeMap<_, _> = map.drain_filter(|k, _v| k % 2 == 0).collect(); /// let odds = map; /// assert_eq!(evens.keys().copied().collect::>(), vec![0, 2, 4, 6]); /// assert_eq!(odds.keys().copied().collect::>(), vec![1, 3, 5, 7]); /// ``` #[unstable(feature = "btree_drain_filter", issue = "70530")] pub fn drain_filter(&mut self, pred: F) -> DrainFilter<'_, K, V, F> where K: Ord, F: FnMut(&K, &mut V) -> bool, { DrainFilter { pred, inner: self.drain_filter_inner() } } pub(super) fn drain_filter_inner(&mut self) -> DrainFilterInner<'_, K, V> where K: Ord, { if let Some(root) = self.root.as_mut() { let (root, dormant_root) = DormantMutRef::new(root); let front = root.borrow_mut().first_leaf_edge(); DrainFilterInner { length: &mut self.length, dormant_root: Some(dormant_root), cur_leaf_edge: Some(front), } } else { DrainFilterInner { length: &mut self.length, dormant_root: None, cur_leaf_edge: None } } } /// Creates a consuming iterator visiting all the keys, in sorted order. /// The map cannot be used after calling this. /// The iterator element type is `K`. /// /// # Examples /// /// ``` /// #![feature(map_into_keys_values)] /// use std::collections::BTreeMap; /// /// let mut a = BTreeMap::new(); /// a.insert(2, "b"); /// a.insert(1, "a"); /// /// let keys: Vec = a.into_keys().collect(); /// assert_eq!(keys, [1, 2]); /// ``` #[inline] #[unstable(feature = "map_into_keys_values", issue = "75294")] pub fn into_keys(self) -> IntoKeys { IntoKeys { inner: self.into_iter() } } /// Creates a consuming iterator visiting all the values, in order by key. /// The map cannot be used after calling this. /// The iterator element type is `V`. /// /// # Examples /// /// ``` /// #![feature(map_into_keys_values)] /// use std::collections::BTreeMap; /// /// let mut a = BTreeMap::new(); /// a.insert(1, "hello"); /// a.insert(2, "goodbye"); /// /// let values: Vec<&str> = a.into_values().collect(); /// assert_eq!(values, ["hello", "goodbye"]); /// ``` #[inline] #[unstable(feature = "map_into_keys_values", issue = "75294")] pub fn into_values(self) -> IntoValues { IntoValues { inner: self.into_iter() } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> IntoIterator for &'a BTreeMap { type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; fn into_iter(self) -> Iter<'a, K, V> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> { type Item = (&'a K, &'a V); fn next(&mut self) -> Option<(&'a K, &'a V)> { if self.length == 0 { None } else { self.length -= 1; Some(unsafe { self.range.next_unchecked() }) } } fn size_hint(&self) -> (usize, Option) { (self.length, Some(self.length)) } fn last(mut self) -> Option<(&'a K, &'a V)> { self.next_back() } fn min(mut self) -> Option<(&'a K, &'a V)> { self.next() } fn max(mut self) -> Option<(&'a K, &'a V)> { self.next_back() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Iter<'_, K, V> {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> { fn next_back(&mut self) -> Option<(&'a K, &'a V)> { if self.length == 0 { None } else { self.length -= 1; Some(unsafe { self.range.next_back_unchecked() }) } } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Iter<'_, K, V> { fn len(&self) -> usize { self.length } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Iter<'_, K, V> { fn clone(&self) -> Self { Iter { range: self.range.clone(), length: self.length } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> IntoIterator for &'a mut BTreeMap { type Item = (&'a K, &'a mut V); type IntoIter = IterMut<'a, K, V>; fn into_iter(self) -> IterMut<'a, K, V> { self.iter_mut() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> { type Item = (&'a K, &'a mut V); fn next(&mut self) -> Option<(&'a K, &'a mut V)> { if self.length == 0 { None } else { self.length -= 1; Some(unsafe { self.range.next_unchecked() }) } } fn size_hint(&self) -> (usize, Option) { (self.length, Some(self.length)) } fn last(mut self) -> Option<(&'a K, &'a mut V)> { self.next_back() } fn min(mut self) -> Option<(&'a K, &'a mut V)> { self.next() } fn max(mut self) -> Option<(&'a K, &'a mut V)> { self.next_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> DoubleEndedIterator for IterMut<'a, K, V> { fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> { if self.length == 0 { None } else { self.length -= 1; Some(unsafe { self.range.next_back_unchecked() }) } } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IterMut<'_, K, V> { fn len(&self) -> usize { self.length } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for IterMut<'_, K, V> {} impl<'a, K, V> IterMut<'a, K, V> { /// Returns an iterator of references over the remaining items. #[inline] pub(super) fn iter(&self) -> Iter<'_, K, V> { Iter { range: self.range.iter(), length: self.length } } } #[stable(feature = "rust1", since = "1.0.0")] impl IntoIterator for BTreeMap { type Item = (K, V); type IntoIter = IntoIter; fn into_iter(self) -> IntoIter { let mut me = ManuallyDrop::new(self); if let Some(root) = me.root.take() { let full_range = root.into_dying().full_range(); IntoIter { range: full_range, length: me.length } } else { IntoIter { range: LeafRange::none(), length: 0 } } } } impl Drop for Dropper { fn drop(&mut self) { // Similar to advancing a non-fusing iterator. fn next_or_end(this: &mut Dropper) -> Option<(K, V)> { if this.remaining_length == 0 { unsafe { ptr::read(&this.front).deallocating_end() } None } else { this.remaining_length -= 1; Some(unsafe { this.front.deallocating_next_unchecked() }) } } struct DropGuard<'a, K, V>(&'a mut Dropper); impl<'a, K, V> Drop for DropGuard<'a, K, V> { fn drop(&mut self) { // Continue the same loop we perform below. This only runs when unwinding, so we // don't have to care about panics this time (they'll abort). while let Some(_pair) = next_or_end(&mut self.0) {} } } while let Some(pair) = next_or_end(self) { let guard = DropGuard(self); drop(pair); mem::forget(guard); } } } #[stable(feature = "btree_drop", since = "1.7.0")] impl Drop for IntoIter { fn drop(&mut self) { if let Some(front) = self.range.front.take() { Dropper { front, remaining_length: self.length }; } } } #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for IntoIter { type Item = (K, V); fn next(&mut self) -> Option<(K, V)> { if self.length == 0 { None } else { self.length -= 1; Some(unsafe { self.range.front.as_mut().unwrap().deallocating_next_unchecked() }) } } fn size_hint(&self) -> (usize, Option) { (self.length, Some(self.length)) } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for IntoIter { fn next_back(&mut self) -> Option<(K, V)> { if self.length == 0 { None } else { self.length -= 1; Some(unsafe { self.range.back.as_mut().unwrap().deallocating_next_back_unchecked() }) } } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { fn len(&self) -> usize { self.length } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for IntoIter {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Iterator for Keys<'a, K, V> { type Item = &'a K; fn next(&mut self) -> Option<&'a K> { self.inner.next().map(|(k, _)| k) } fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } fn last(mut self) -> Option<&'a K> { self.next_back() } fn min(mut self) -> Option<&'a K> { self.next() } fn max(mut self) -> Option<&'a K> { self.next_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> { fn next_back(&mut self) -> Option<&'a K> { self.inner.next_back().map(|(k, _)| k) } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Keys<'_, K, V> { fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Keys<'_, K, V> {} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Keys<'_, K, V> { fn clone(&self) -> Self { Keys { inner: self.inner.clone() } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Iterator for Values<'a, K, V> { type Item = &'a V; fn next(&mut self) -> Option<&'a V> { self.inner.next().map(|(_, v)| v) } fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } fn last(mut self) -> Option<&'a V> { self.next_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> { fn next_back(&mut self) -> Option<&'a V> { self.inner.next_back().map(|(_, v)| v) } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Values<'_, K, V> { fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Values<'_, K, V> {} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Values<'_, K, V> { fn clone(&self) -> Self { Values { inner: self.inner.clone() } } } /// An iterator produced by calling `drain_filter` on BTreeMap. #[unstable(feature = "btree_drain_filter", issue = "70530")] pub struct DrainFilter<'a, K, V, F> where K: 'a, V: 'a, F: 'a + FnMut(&K, &mut V) -> bool, { pred: F, inner: DrainFilterInner<'a, K, V>, } /// Most of the implementation of DrainFilter are generic over the type /// of the predicate, thus also serving for BTreeSet::DrainFilter. pub(super) struct DrainFilterInner<'a, K: 'a, V: 'a> { /// Reference to the length field in the borrowed map, updated live. length: &'a mut usize, /// Buried reference to the root field in the borrowed map. /// Wrapped in `Option` to allow drop handler to `take` it. dormant_root: Option>>, /// Contains a leaf edge preceding the next element to be returned, or the last leaf edge. /// Empty if the map has no root, if iteration went beyond the last leaf edge, /// or if a panic occurred in the predicate. cur_leaf_edge: Option, K, V, marker::Leaf>, marker::Edge>>, } #[unstable(feature = "btree_drain_filter", issue = "70530")] impl Drop for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool, { fn drop(&mut self) { self.for_each(drop); } } #[unstable(feature = "btree_drain_filter", issue = "70530")] impl fmt::Debug for DrainFilter<'_, K, V, F> where K: fmt::Debug, V: fmt::Debug, F: FnMut(&K, &mut V) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("DrainFilter").field(&self.inner.peek()).finish() } } #[unstable(feature = "btree_drain_filter", issue = "70530")] impl Iterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool, { type Item = (K, V); fn next(&mut self) -> Option<(K, V)> { self.inner.next(&mut self.pred) } fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } } impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> { /// Allow Debug implementations to predict the next element. pub(super) fn peek(&self) -> Option<(&K, &V)> { let edge = self.cur_leaf_edge.as_ref()?; edge.reborrow().next_kv().ok().map(Handle::into_kv) } /// Implementation of a typical `DrainFilter::next` method, given the predicate. pub(super) fn next(&mut self, pred: &mut F) -> Option<(K, V)> where F: FnMut(&K, &mut V) -> bool, { while let Ok(mut kv) = self.cur_leaf_edge.take()?.next_kv() { let (k, v) = kv.kv_mut(); if pred(k, v) { *self.length -= 1; let (kv, pos) = kv.remove_kv_tracking(|| { // SAFETY: we will touch the root in a way that will not // invalidate the position returned. let root = unsafe { self.dormant_root.take().unwrap().awaken() }; root.pop_internal_level(); self.dormant_root = Some(DormantMutRef::new(root).1); }); self.cur_leaf_edge = Some(pos); return Some(kv); } self.cur_leaf_edge = Some(kv.next_leaf_edge()); } None } /// Implementation of a typical `DrainFilter::size_hint` method. pub(super) fn size_hint(&self) -> (usize, Option) { // In most of the btree iterators, `self.length` is the number of elements // yet to be visited. Here, it includes elements that were visited and that // the predicate decided not to drain. Making this upper bound more accurate // requires maintaining an extra field and is not worth while. (0, Some(*self.length)) } } #[unstable(feature = "btree_drain_filter", issue = "70530")] impl FusedIterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {} #[stable(feature = "btree_range", since = "1.17.0")] impl<'a, K, V> Iterator for Range<'a, K, V> { type Item = (&'a K, &'a V); fn next(&mut self) -> Option<(&'a K, &'a V)> { if self.inner.is_empty() { None } else { Some(unsafe { self.next_unchecked() }) } } fn last(mut self) -> Option<(&'a K, &'a V)> { self.next_back() } fn min(mut self) -> Option<(&'a K, &'a V)> { self.next() } fn max(mut self) -> Option<(&'a K, &'a V)> { self.next_back() } } #[stable(feature = "map_values_mut", since = "1.10.0")] impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { type Item = &'a mut V; fn next(&mut self) -> Option<&'a mut V> { self.inner.next().map(|(_, v)| v) } fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } fn last(mut self) -> Option<&'a mut V> { self.next_back() } } #[stable(feature = "map_values_mut", since = "1.10.0")] impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> { fn next_back(&mut self) -> Option<&'a mut V> { self.inner.next_back().map(|(_, v)| v) } } #[stable(feature = "map_values_mut", since = "1.10.0")] impl ExactSizeIterator for ValuesMut<'_, K, V> { fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for ValuesMut<'_, K, V> {} impl<'a, K, V> Range<'a, K, V> { unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { unsafe { self.inner.front.as_mut().unwrap_unchecked().next_unchecked() } } } #[unstable(feature = "map_into_keys_values", issue = "75294")] impl Iterator for IntoKeys { type Item = K; fn next(&mut self) -> Option { self.inner.next().map(|(k, _)| k) } fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } fn last(mut self) -> Option { self.next_back() } fn min(mut self) -> Option { self.next() } fn max(mut self) -> Option { self.next_back() } } #[unstable(feature = "map_into_keys_values", issue = "75294")] impl DoubleEndedIterator for IntoKeys { fn next_back(&mut self) -> Option { self.inner.next_back().map(|(k, _)| k) } } #[unstable(feature = "map_into_keys_values", issue = "75294")] impl ExactSizeIterator for IntoKeys { fn len(&self) -> usize { self.inner.len() } } #[unstable(feature = "map_into_keys_values", issue = "75294")] impl FusedIterator for IntoKeys {} #[unstable(feature = "map_into_keys_values", issue = "75294")] impl Iterator for IntoValues { type Item = V; fn next(&mut self) -> Option { self.inner.next().map(|(_, v)| v) } fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } fn last(mut self) -> Option { self.next_back() } } #[unstable(feature = "map_into_keys_values", issue = "75294")] impl DoubleEndedIterator for IntoValues { fn next_back(&mut self) -> Option { self.inner.next_back().map(|(_, v)| v) } } #[unstable(feature = "map_into_keys_values", issue = "75294")] impl ExactSizeIterator for IntoValues { fn len(&self) -> usize { self.inner.len() } } #[unstable(feature = "map_into_keys_values", issue = "75294")] impl FusedIterator for IntoValues {} #[stable(feature = "btree_range", since = "1.17.0")] impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> { fn next_back(&mut self) -> Option<(&'a K, &'a V)> { if self.inner.is_empty() { None } else { Some(unsafe { self.next_back_unchecked() }) } } } impl<'a, K, V> Range<'a, K, V> { unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) { unsafe { self.inner.back.as_mut().unwrap_unchecked().next_back_unchecked() } } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Range<'_, K, V> {} #[stable(feature = "btree_range", since = "1.17.0")] impl Clone for Range<'_, K, V> { fn clone(&self) -> Self { Range { inner: LeafRange { front: self.inner.front, back: self.inner.back } } } } #[stable(feature = "btree_range", since = "1.17.0")] impl<'a, K, V> Iterator for RangeMut<'a, K, V> { type Item = (&'a K, &'a mut V); fn next(&mut self) -> Option<(&'a K, &'a mut V)> { if self.inner.is_empty() { None } else { Some(unsafe { self.next_unchecked() }) } } fn last(mut self) -> Option<(&'a K, &'a mut V)> { self.next_back() } fn min(mut self) -> Option<(&'a K, &'a mut V)> { self.next() } fn max(mut self) -> Option<(&'a K, &'a mut V)> { self.next_back() } } impl<'a, K, V> RangeMut<'a, K, V> { unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) { unsafe { self.inner.front.as_mut().unwrap_unchecked().next_unchecked() } } /// Returns an iterator of references over the remaining items. #[inline] pub(super) fn iter(&self) -> Range<'_, K, V> { Range { inner: self.inner.reborrow() } } } #[stable(feature = "btree_range", since = "1.17.0")] impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> { fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> { if self.inner.is_empty() { None } else { Some(unsafe { self.next_back_unchecked() }) } } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for RangeMut<'_, K, V> {} impl<'a, K, V> RangeMut<'a, K, V> { unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) { unsafe { self.inner.back.as_mut().unwrap_unchecked().next_back_unchecked() } } } #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator<(K, V)> for BTreeMap { fn from_iter>(iter: T) -> BTreeMap { let mut map = BTreeMap::new(); map.extend(iter); map } } #[stable(feature = "rust1", since = "1.0.0")] impl Extend<(K, V)> for BTreeMap { #[inline] fn extend>(&mut self, iter: T) { iter.into_iter().for_each(move |(k, v)| { self.insert(k, v); }); } #[inline] fn extend_one(&mut self, (k, v): (K, V)) { self.insert(k, v); } } #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().map(|(&key, &value)| (key, value))); } #[inline] fn extend_one(&mut self, (&k, &v): (&'a K, &'a V)) { self.insert(k, v); } } #[stable(feature = "rust1", since = "1.0.0")] impl Hash for BTreeMap { fn hash(&self, state: &mut H) { for elt in self { elt.hash(state); } } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for BTreeMap { /// Creates an empty `BTreeMap`. fn default() -> BTreeMap { BTreeMap::new() } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for BTreeMap { fn eq(&self, other: &BTreeMap) -> bool { self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b) } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for BTreeMap {} #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for BTreeMap { #[inline] fn partial_cmp(&self, other: &BTreeMap) -> Option { self.iter().partial_cmp(other.iter()) } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for BTreeMap { #[inline] fn cmp(&self, other: &BTreeMap) -> Ordering { self.iter().cmp(other.iter()) } } #[stable(feature = "rust1", since = "1.0.0")] impl Debug for BTreeMap { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_map().entries(self.iter()).finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl Index<&Q> for BTreeMap where K: Borrow + Ord, Q: Ord, { type Output = V; /// Returns a reference to the value corresponding to the supplied key. /// /// # Panics /// /// Panics if the key is not present in the `BTreeMap`. #[inline] fn index(&self, key: &Q) -> &V { self.get(key).expect("no entry found for key") } } impl BTreeMap { /// Gets an iterator over the entries of the map, sorted by key. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert(3, "c"); /// map.insert(2, "b"); /// map.insert(1, "a"); /// /// for (key, value) in map.iter() { /// println!("{}: {}", key, value); /// } /// /// let (first_key, first_value) = map.iter().next().unwrap(); /// assert_eq!((*first_key, *first_value), (1, "a")); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter<'_, K, V> { if let Some(root) = &self.root { let full_range = root.reborrow().full_range(); Iter { range: Range { inner: full_range }, length: self.length } } else { Iter { range: Range { inner: LeafRange::none() }, length: 0 } } } /// Gets a mutable iterator over the entries of the map, sorted by key. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); /// map.insert("a", 1); /// map.insert("b", 2); /// map.insert("c", 3); /// /// // add 10 to the value if the key isn't "a" /// for (key, value) in map.iter_mut() { /// if key != &"a" { /// *value += 10; /// } /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter_mut(&mut self) -> IterMut<'_, K, V> { if let Some(root) = &mut self.root { let full_range = root.borrow_valmut().full_range(); IterMut { range: RangeMut { inner: full_range, _marker: PhantomData }, length: self.length, } } else { IterMut { range: RangeMut { inner: LeafRange::none(), _marker: PhantomData }, length: 0, } } } /// Gets an iterator over the keys of the map, in sorted order. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut a = BTreeMap::new(); /// a.insert(2, "b"); /// a.insert(1, "a"); /// /// let keys: Vec<_> = a.keys().cloned().collect(); /// assert_eq!(keys, [1, 2]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn keys(&self) -> Keys<'_, K, V> { Keys { inner: self.iter() } } /// Gets an iterator over the values of the map, in order by key. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut a = BTreeMap::new(); /// a.insert(1, "hello"); /// a.insert(2, "goodbye"); /// /// let values: Vec<&str> = a.values().cloned().collect(); /// assert_eq!(values, ["hello", "goodbye"]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn values(&self) -> Values<'_, K, V> { Values { inner: self.iter() } } /// Gets a mutable iterator over the values of the map, in order by key. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut a = BTreeMap::new(); /// a.insert(1, String::from("hello")); /// a.insert(2, String::from("goodbye")); /// /// for value in a.values_mut() { /// value.push_str("!"); /// } /// /// let values: Vec = a.values().cloned().collect(); /// assert_eq!(values, [String::from("hello!"), /// String::from("goodbye!")]); /// ``` #[stable(feature = "map_values_mut", since = "1.10.0")] pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> { ValuesMut { inner: self.iter_mut() } } /// Returns the number of elements in the map. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut a = BTreeMap::new(); /// assert_eq!(a.len(), 0); /// a.insert(1, "a"); /// assert_eq!(a.len(), 1); /// ``` #[doc(alias = "length")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] pub const fn len(&self) -> usize { self.length } /// Returns `true` if the map contains no elements. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeMap; /// /// let mut a = BTreeMap::new(); /// assert!(a.is_empty()); /// a.insert(1, "a"); /// assert!(!a.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] pub const fn is_empty(&self) -> bool { self.len() == 0 } /// If the root node is the empty (non-allocated) root node, allocate our /// own node. Is an associated function to avoid borrowing the entire BTreeMap. fn ensure_is_owned(root: &mut Option>) -> &mut Root { root.get_or_insert_with(Root::new) } } #[cfg(test)] mod tests; use core::marker::PhantomData; use core::ptr::NonNull; /// Models a reborrow of some unique reference, when you know that the reborrow /// and all its descendants (i.e., all pointers and references derived from it) /// will not be used any more at some point, after which you want to use the /// original unique reference again. /// /// The borrow checker usually handles this stacking of borrows for you, but /// some control flows that accomplish this stacking are too complicated for /// the compiler to follow. A `DormantMutRef` allows you to check borrowing /// yourself, while still expressing its stacked nature, and encapsulating /// the raw pointer code needed to do this without undefined behavior. pub struct DormantMutRef<'a, T> { ptr: NonNull, _marker: PhantomData<&'a mut T>, } unsafe impl<'a, T> Sync for DormantMutRef<'a, T> where &'a mut T: Sync {} unsafe impl<'a, T> Send for DormantMutRef<'a, T> where &'a mut T: Send {} impl<'a, T> DormantMutRef<'a, T> { /// Capture a unique borrow, and immediately reborrow it. For the compiler, /// the lifetime of the new reference is the same as the lifetime of the /// original reference, but you promise to use it for a shorter period. pub fn new(t: &'a mut T) -> (&'a mut T, Self) { let ptr = NonNull::from(t); // SAFETY: we hold the borrow throughout 'a via `_marker`, and we expose // only this reference, so it is unique. let new_ref = unsafe { &mut *ptr.as_ptr() }; (new_ref, Self { ptr, _marker: PhantomData }) } /// Revert to the unique borrow initially captured. /// /// # Safety /// /// The reborrow must have ended, i.e., the reference returned by `new` and /// all pointers and references derived from it, must not be used anymore. pub unsafe fn awaken(self) -> &'a mut T { // SAFETY: our own safety conditions imply this reference is again unique. unsafe { &mut *self.ptr.as_ptr() } } } #[cfg(test)] mod tests; use core::intrinsics; use core::mem; use core::ptr; /// This replaces the value behind the `v` unique reference by calling the /// relevant function. /// /// If a panic occurs in the `change` closure, the entire process will be aborted. #[allow(dead_code)] // keep as illustration and for future use #[inline] pub fn take_mut(v: &mut T, change: impl FnOnce(T) -> T) { replace(v, |value| (change(value), ())) } /// This replaces the value behind the `v` unique reference by calling the /// relevant function, and returns a result obtained along the way. /// /// If a panic occurs in the `change` closure, the entire process will be aborted. #[inline] pub fn replace(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R { struct PanicGuard; impl Drop for PanicGuard { fn drop(&mut self) { intrinsics::abort() } } let guard = PanicGuard; let value = unsafe { ptr::read(v) }; let (new_value, ret) = change(value); unsafe { ptr::write(v, new_value); } mem::forget(guard); ret } use super::node::{ForceResult::*, Root}; use super::search::SearchResult::*; use core::borrow::Borrow; impl Root { /// Calculates the length of both trees that result from splitting up /// a given number of distinct key-value pairs. pub fn calc_split_length( total_num: usize, root_a: &Root, root_b: &Root, ) -> (usize, usize) { let (length_a, length_b); if root_a.height() < root_b.height() { length_a = root_a.reborrow().calc_length(); length_b = total_num - length_a; debug_assert_eq!(length_b, root_b.reborrow().calc_length()); } else { length_b = root_b.reborrow().calc_length(); length_a = total_num - length_b; debug_assert_eq!(length_a, root_a.reborrow().calc_length()); } (length_a, length_b) } /// Split off a tree with key-value pairs at and after the given key. /// The result is meaningful only if the tree is ordered by key, /// and if the ordering of `Q` corresponds to that of `K`. /// If `self` respects all `BTreeMap` tree invariants, then both /// `self` and the returned tree will respect those invariants. pub fn split_off(&mut self, key: &Q) -> Self where K: Borrow, { let left_root = self; let mut right_root = Root::new_pillar(left_root.height()); let mut left_node = left_root.borrow_mut(); let mut right_node = right_root.borrow_mut(); loop { let mut split_edge = match left_node.search_node(key) { // key is going to the right tree Found(kv) => kv.left_edge(), GoDown(edge) => edge, }; split_edge.move_suffix(&mut right_node); match (split_edge.force(), right_node.force()) { (Internal(edge), Internal(node)) => { left_node = edge.descend(); right_node = node.first_edge().descend(); } (Leaf(_), Leaf(_)) => break, _ => unreachable!(), } } left_root.fix_right_border(); right_root.fix_left_border(); right_root } /// Creates a tree consisting of empty nodes. fn new_pillar(height: usize) -> Self { let mut root = Root::new(); for _ in 0..height { root.push_internal_level(); } root } } // This is pretty much entirely stolen from TreeSet, since BTreeMap has an identical interface // to TreeMap use core::borrow::Borrow; use core::cmp::Ordering::{Equal, Greater, Less}; use core::cmp::{max, min}; use core::fmt::{self, Debug}; use core::iter::{FromIterator, FusedIterator, Peekable}; use core::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub}; use super::map::{BTreeMap, Keys}; use super::merge_iter::MergeIterInner; use super::Recover; // FIXME(conventions): implement bounded iterators /// A set based on a B-Tree. /// /// See [`BTreeMap`]'s documentation for a detailed discussion of this collection's performance /// benefits and drawbacks. /// /// It is a logic error for an item to be modified in such a way that the item's ordering relative /// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is /// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code. /// The behavior resulting from such a logic error is not specified, but will not result in /// undefined behavior. This could include panics, incorrect results, aborts, memory leaks, and /// non-termination. /// /// [`Ord`]: core::cmp::Ord /// [`Cell`]: core::cell::Cell /// [`RefCell`]: core::cell::RefCell /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// // Type inference lets us omit an explicit type signature (which /// // would be `BTreeSet<&str>` in this example). /// let mut books = BTreeSet::new(); /// /// // Add some books. /// books.insert("A Dance With Dragons"); /// books.insert("To Kill a Mockingbird"); /// books.insert("The Odyssey"); /// books.insert("The Great Gatsby"); /// /// // Check for a specific one. /// if !books.contains("The Winds of Winter") { /// println!("We have {} books, but The Winds of Winter ain't one.", /// books.len()); /// } /// /// // Remove a book. /// books.remove("The Odyssey"); /// /// // Iterate over everything. /// for book in &books { /// println!("{}", book); /// } /// ``` #[derive(Hash, PartialEq, Eq, Ord, PartialOrd)] #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "BTreeSet")] pub struct BTreeSet { map: BTreeMap, } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for BTreeSet { fn clone(&self) -> Self { BTreeSet { map: self.map.clone() } } fn clone_from(&mut self, other: &Self) { self.map.clone_from(&other.map); } } /// An iterator over the items of a `BTreeSet`. /// /// This `struct` is created by the [`iter`] method on [`BTreeSet`]. /// See its documentation for more. /// /// [`iter`]: BTreeSet::iter #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, T: 'a> { iter: Keys<'a, T, ()>, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Iter<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Iter").field(&self.iter.clone()).finish() } } /// An owning iterator over the items of a `BTreeSet`. /// /// This `struct` is created by the [`into_iter`] method on [`BTreeSet`] /// (provided by the `IntoIterator` trait). See its documentation for more. /// /// [`into_iter`]: BTreeSet#method.into_iter #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] pub struct IntoIter { iter: super::map::IntoIter, } /// An iterator over a sub-range of items in a `BTreeSet`. /// /// This `struct` is created by the [`range`] method on [`BTreeSet`]. /// See its documentation for more. /// /// [`range`]: BTreeSet::range #[derive(Debug)] #[stable(feature = "btree_range", since = "1.17.0")] pub struct Range<'a, T: 'a> { iter: super::map::Range<'a, T, ()>, } /// A lazy iterator producing elements in the difference of `BTreeSet`s. /// /// This `struct` is created by the [`difference`] method on [`BTreeSet`]. /// See its documentation for more. /// /// [`difference`]: BTreeSet::difference #[stable(feature = "rust1", since = "1.0.0")] pub struct Difference<'a, T: 'a> { inner: DifferenceInner<'a, T>, } #[derive(Debug)] enum DifferenceInner<'a, T: 'a> { Stitch { // iterate all of `self` and some of `other`, spotting matches along the way self_iter: Iter<'a, T>, other_iter: Peekable>, }, Search { // iterate `self`, look up in `other` self_iter: Iter<'a, T>, other_set: &'a BTreeSet, }, Iterate(Iter<'a, T>), // simply produce all values in `self` } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Difference<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Difference").field(&self.inner).finish() } } /// A lazy iterator producing elements in the symmetric difference of `BTreeSet`s. /// /// This `struct` is created by the [`symmetric_difference`] method on /// [`BTreeSet`]. See its documentation for more. /// /// [`symmetric_difference`]: BTreeSet::symmetric_difference #[stable(feature = "rust1", since = "1.0.0")] pub struct SymmetricDifference<'a, T: 'a>(MergeIterInner>); #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for SymmetricDifference<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("SymmetricDifference").field(&self.0).finish() } } /// A lazy iterator producing elements in the intersection of `BTreeSet`s. /// /// This `struct` is created by the [`intersection`] method on [`BTreeSet`]. /// See its documentation for more. /// /// [`intersection`]: BTreeSet::intersection #[stable(feature = "rust1", since = "1.0.0")] pub struct Intersection<'a, T: 'a> { inner: IntersectionInner<'a, T>, } #[derive(Debug)] enum IntersectionInner<'a, T: 'a> { Stitch { // iterate similarly sized sets jointly, spotting matches along the way a: Iter<'a, T>, b: Iter<'a, T>, }, Search { // iterate a small set, look up in the large set small_iter: Iter<'a, T>, large_set: &'a BTreeSet, }, Answer(Option<&'a T>), // return a specific value or emptiness } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Intersection<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Intersection").field(&self.inner).finish() } } /// A lazy iterator producing elements in the union of `BTreeSet`s. /// /// This `struct` is created by the [`union`] method on [`BTreeSet`]. /// See its documentation for more. /// /// [`union`]: BTreeSet::union #[stable(feature = "rust1", since = "1.0.0")] pub struct Union<'a, T: 'a>(MergeIterInner>); #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Union<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Union").field(&self.0).finish() } } // This constant is used by functions that compare two sets. // It estimates the relative size at which searching performs better // than iterating, based on the benchmarks in // https://github.com/ssomers/rust_bench_btreeset_intersection. // It's used to divide rather than multiply sizes, to rule out overflow, // and it's a power of two to make that division cheap. const ITER_PERFORMANCE_TIPPING_SIZE_DIFF: usize = 16; impl BTreeSet { /// Makes a new, empty `BTreeSet`. /// /// Does not allocate anything on its own. /// /// # Examples /// /// ``` /// # #![allow(unused_mut)] /// use std::collections::BTreeSet; /// /// let mut set: BTreeSet = BTreeSet::new(); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] pub const fn new() -> BTreeSet where T: Ord, { BTreeSet { map: BTreeMap::new() } } /// Constructs a double-ended iterator over a sub-range of elements in the set. /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will /// yield elements from min (inclusive) to max (exclusive). /// The range may also be entered as `(Bound, Bound)`, so for example /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive /// range from 4 to 10. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// use std::ops::Bound::Included; /// /// let mut set = BTreeSet::new(); /// set.insert(3); /// set.insert(5); /// set.insert(8); /// for &elem in set.range((Included(&4), Included(&8))) { /// println!("{}", elem); /// } /// assert_eq!(Some(&5), set.range(4..).next()); /// ``` #[stable(feature = "btree_range", since = "1.17.0")] pub fn range(&self, range: R) -> Range<'_, T> where K: Ord, T: Borrow + Ord, R: RangeBounds, { Range { iter: self.map.range(range) } } /// Visits the values representing the difference, /// i.e., the values that are in `self` but not in `other`, /// in ascending order. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut a = BTreeSet::new(); /// a.insert(1); /// a.insert(2); /// /// let mut b = BTreeSet::new(); /// b.insert(2); /// b.insert(3); /// /// let diff: Vec<_> = a.difference(&b).cloned().collect(); /// assert_eq!(diff, [1]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn difference<'a>(&'a self, other: &'a BTreeSet) -> Difference<'a, T> where T: Ord, { let (self_min, self_max) = if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) { (self_min, self_max) } else { return Difference { inner: DifferenceInner::Iterate(self.iter()) }; }; let (other_min, other_max) = if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) { (other_min, other_max) } else { return Difference { inner: DifferenceInner::Iterate(self.iter()) }; }; Difference { inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) { (Greater, _) | (_, Less) => DifferenceInner::Iterate(self.iter()), (Equal, _) => { let mut self_iter = self.iter(); self_iter.next(); DifferenceInner::Iterate(self_iter) } (_, Equal) => { let mut self_iter = self.iter(); self_iter.next_back(); DifferenceInner::Iterate(self_iter) } _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { DifferenceInner::Search { self_iter: self.iter(), other_set: other } } _ => DifferenceInner::Stitch { self_iter: self.iter(), other_iter: other.iter().peekable(), }, }, } } /// Visits the values representing the symmetric difference, /// i.e., the values that are in `self` or in `other` but not in both, /// in ascending order. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut a = BTreeSet::new(); /// a.insert(1); /// a.insert(2); /// /// let mut b = BTreeSet::new(); /// b.insert(2); /// b.insert(3); /// /// let sym_diff: Vec<_> = a.symmetric_difference(&b).cloned().collect(); /// assert_eq!(sym_diff, [1, 3]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn symmetric_difference<'a>(&'a self, other: &'a BTreeSet) -> SymmetricDifference<'a, T> where T: Ord, { SymmetricDifference(MergeIterInner::new(self.iter(), other.iter())) } /// Visits the values representing the intersection, /// i.e., the values that are both in `self` and `other`, /// in ascending order. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut a = BTreeSet::new(); /// a.insert(1); /// a.insert(2); /// /// let mut b = BTreeSet::new(); /// b.insert(2); /// b.insert(3); /// /// let intersection: Vec<_> = a.intersection(&b).cloned().collect(); /// assert_eq!(intersection, [2]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn intersection<'a>(&'a self, other: &'a BTreeSet) -> Intersection<'a, T> where T: Ord, { let (self_min, self_max) = if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) { (self_min, self_max) } else { return Intersection { inner: IntersectionInner::Answer(None) }; }; let (other_min, other_max) = if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) { (other_min, other_max) } else { return Intersection { inner: IntersectionInner::Answer(None) }; }; Intersection { inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) { (Greater, _) | (_, Less) => IntersectionInner::Answer(None), (Equal, _) => IntersectionInner::Answer(Some(self_min)), (_, Equal) => IntersectionInner::Answer(Some(self_max)), _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { IntersectionInner::Search { small_iter: self.iter(), large_set: other } } _ if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { IntersectionInner::Search { small_iter: other.iter(), large_set: self } } _ => IntersectionInner::Stitch { a: self.iter(), b: other.iter() }, }, } } /// Visits the values representing the union, /// i.e., all the values in `self` or `other`, without duplicates, /// in ascending order. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut a = BTreeSet::new(); /// a.insert(1); /// /// let mut b = BTreeSet::new(); /// b.insert(2); /// /// let union: Vec<_> = a.union(&b).cloned().collect(); /// assert_eq!(union, [1, 2]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn union<'a>(&'a self, other: &'a BTreeSet) -> Union<'a, T> where T: Ord, { Union(MergeIterInner::new(self.iter(), other.iter())) } /// Clears the set, removing all values. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut v = BTreeSet::new(); /// v.insert(1); /// v.clear(); /// assert!(v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn clear(&mut self) { self.map.clear() } /// Returns `true` if the set contains a value. /// /// The value may be any borrowed form of the set's value type, /// but the ordering on the borrowed form *must* match the /// ordering on the value type. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect(); /// assert_eq!(set.contains(&1), true); /// assert_eq!(set.contains(&4), false); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn contains(&self, value: &Q) -> bool where T: Borrow + Ord, Q: Ord, { self.map.contains_key(value) } /// Returns a reference to the value in the set, if any, that is equal to the given value. /// /// The value may be any borrowed form of the set's value type, /// but the ordering on the borrowed form *must* match the /// ordering on the value type. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect(); /// assert_eq!(set.get(&2), Some(&2)); /// assert_eq!(set.get(&4), None); /// ``` #[stable(feature = "set_recovery", since = "1.9.0")] pub fn get(&self, value: &Q) -> Option<&T> where T: Borrow + Ord, Q: Ord, { Recover::get(&self.map, value) } /// Returns `true` if `self` has no elements in common with `other`. /// This is equivalent to checking for an empty intersection. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let a: BTreeSet<_> = [1, 2, 3].iter().cloned().collect(); /// let mut b = BTreeSet::new(); /// /// assert_eq!(a.is_disjoint(&b), true); /// b.insert(4); /// assert_eq!(a.is_disjoint(&b), true); /// b.insert(1); /// assert_eq!(a.is_disjoint(&b), false); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn is_disjoint(&self, other: &BTreeSet) -> bool where T: Ord, { self.intersection(other).next().is_none() } /// Returns `true` if the set is a subset of another, /// i.e., `other` contains at least all the values in `self`. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let sup: BTreeSet<_> = [1, 2, 3].iter().cloned().collect(); /// let mut set = BTreeSet::new(); /// /// assert_eq!(set.is_subset(&sup), true); /// set.insert(2); /// assert_eq!(set.is_subset(&sup), true); /// set.insert(4); /// assert_eq!(set.is_subset(&sup), false); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn is_subset(&self, other: &BTreeSet) -> bool where T: Ord, { // Same result as self.difference(other).next().is_none() // but the code below is faster (hugely in some cases). if self.len() > other.len() { return false; } let (self_min, self_max) = if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) { (self_min, self_max) } else { return true; // self is empty }; let (other_min, other_max) = if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) { (other_min, other_max) } else { return false; // other is empty }; let mut self_iter = self.iter(); match self_min.cmp(other_min) { Less => return false, Equal => { self_iter.next(); } Greater => (), } match self_max.cmp(other_max) { Greater => return false, Equal => { self_iter.next_back(); } Less => (), } if self_iter.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF { for next in self_iter { if !other.contains(next) { return false; } } } else { let mut other_iter = other.iter(); other_iter.next(); other_iter.next_back(); let mut self_next = self_iter.next(); while let Some(self1) = self_next { match other_iter.next().map_or(Less, |other1| self1.cmp(other1)) { Less => return false, Equal => self_next = self_iter.next(), Greater => (), } } } true } /// Returns `true` if the set is a superset of another, /// i.e., `self` contains at least all the values in `other`. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let sub: BTreeSet<_> = [1, 2].iter().cloned().collect(); /// let mut set = BTreeSet::new(); /// /// assert_eq!(set.is_superset(&sub), false); /// /// set.insert(0); /// set.insert(1); /// assert_eq!(set.is_superset(&sub), false); /// /// set.insert(2); /// assert_eq!(set.is_superset(&sub), true); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn is_superset(&self, other: &BTreeSet) -> bool where T: Ord, { other.is_subset(self) } /// Returns a reference to the first value in the set, if any. /// This value is always the minimum of all values in the set. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(map_first_last)] /// use std::collections::BTreeSet; /// /// let mut set = BTreeSet::new(); /// assert_eq!(set.first(), None); /// set.insert(1); /// assert_eq!(set.first(), Some(&1)); /// set.insert(2); /// assert_eq!(set.first(), Some(&1)); /// ``` #[unstable(feature = "map_first_last", issue = "62924")] pub fn first(&self) -> Option<&T> where T: Ord, { self.map.first_key_value().map(|(k, _)| k) } /// Returns a reference to the last value in the set, if any. /// This value is always the maximum of all values in the set. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(map_first_last)] /// use std::collections::BTreeSet; /// /// let mut set = BTreeSet::new(); /// assert_eq!(set.last(), None); /// set.insert(1); /// assert_eq!(set.last(), Some(&1)); /// set.insert(2); /// assert_eq!(set.last(), Some(&2)); /// ``` #[unstable(feature = "map_first_last", issue = "62924")] pub fn last(&self) -> Option<&T> where T: Ord, { self.map.last_key_value().map(|(k, _)| k) } /// Removes the first value from the set and returns it, if any. /// The first value is always the minimum value in the set. /// /// # Examples /// /// ``` /// #![feature(map_first_last)] /// use std::collections::BTreeSet; /// /// let mut set = BTreeSet::new(); /// /// set.insert(1); /// while let Some(n) = set.pop_first() { /// assert_eq!(n, 1); /// } /// assert!(set.is_empty()); /// ``` #[unstable(feature = "map_first_last", issue = "62924")] pub fn pop_first(&mut self) -> Option where T: Ord, { self.map.pop_first().map(|kv| kv.0) } /// Removes the last value from the set and returns it, if any. /// The last value is always the maximum value in the set. /// /// # Examples /// /// ``` /// #![feature(map_first_last)] /// use std::collections::BTreeSet; /// /// let mut set = BTreeSet::new(); /// /// set.insert(1); /// while let Some(n) = set.pop_last() { /// assert_eq!(n, 1); /// } /// assert!(set.is_empty()); /// ``` #[unstable(feature = "map_first_last", issue = "62924")] pub fn pop_last(&mut self) -> Option where T: Ord, { self.map.pop_last().map(|kv| kv.0) } /// Adds a value to the set. /// /// If the set did not have this value present, `true` is returned. /// /// If the set did have this value present, `false` is returned, and the /// entry is not updated. See the [module-level documentation] for more. /// /// [module-level documentation]: index.html#insert-and-complex-keys /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut set = BTreeSet::new(); /// /// assert_eq!(set.insert(2), true); /// assert_eq!(set.insert(2), false); /// assert_eq!(set.len(), 1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(&mut self, value: T) -> bool where T: Ord, { self.map.insert(value, ()).is_none() } /// Adds a value to the set, replacing the existing value, if any, that is equal to the given /// one. Returns the replaced value. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut set = BTreeSet::new(); /// set.insert(Vec::::new()); /// /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0); /// set.replace(Vec::with_capacity(10)); /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10); /// ``` #[stable(feature = "set_recovery", since = "1.9.0")] pub fn replace(&mut self, value: T) -> Option where T: Ord, { Recover::replace(&mut self.map, value) } /// Removes a value from the set. Returns whether the value was /// present in the set. /// /// The value may be any borrowed form of the set's value type, /// but the ordering on the borrowed form *must* match the /// ordering on the value type. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut set = BTreeSet::new(); /// /// set.insert(2); /// assert_eq!(set.remove(&2), true); /// assert_eq!(set.remove(&2), false); /// ``` #[doc(alias = "delete")] #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(&mut self, value: &Q) -> bool where T: Borrow + Ord, Q: Ord, { self.map.remove(value).is_some() } /// Removes and returns the value in the set, if any, that is equal to the given one. /// /// The value may be any borrowed form of the set's value type, /// but the ordering on the borrowed form *must* match the /// ordering on the value type. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect(); /// assert_eq!(set.take(&2), Some(2)); /// assert_eq!(set.take(&2), None); /// ``` #[stable(feature = "set_recovery", since = "1.9.0")] pub fn take(&mut self, value: &Q) -> Option where T: Borrow + Ord, Q: Ord, { Recover::take(&mut self.map, value) } /// Retains only the elements specified by the predicate. /// /// In other words, remove all elements `e` such that `f(&e)` returns `false`. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let xs = [1, 2, 3, 4, 5, 6]; /// let mut set: BTreeSet = xs.iter().cloned().collect(); /// // Keep only the even numbers. /// set.retain(|&k| k % 2 == 0); /// assert!(set.iter().eq([2, 4, 6].iter())); /// ``` #[stable(feature = "btree_retain", since = "1.53.0")] pub fn retain(&mut self, mut f: F) where T: Ord, F: FnMut(&T) -> bool, { self.drain_filter(|v| !f(v)); } /// Moves all elements from `other` into `Self`, leaving `other` empty. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut a = BTreeSet::new(); /// a.insert(1); /// a.insert(2); /// a.insert(3); /// /// let mut b = BTreeSet::new(); /// b.insert(3); /// b.insert(4); /// b.insert(5); /// /// a.append(&mut b); /// /// assert_eq!(a.len(), 5); /// assert_eq!(b.len(), 0); /// /// assert!(a.contains(&1)); /// assert!(a.contains(&2)); /// assert!(a.contains(&3)); /// assert!(a.contains(&4)); /// assert!(a.contains(&5)); /// ``` #[stable(feature = "btree_append", since = "1.11.0")] pub fn append(&mut self, other: &mut Self) where T: Ord, { self.map.append(&mut other.map); } /// Splits the collection into two at the given key. Returns everything after the given key, /// including the key. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BTreeSet; /// /// let mut a = BTreeSet::new(); /// a.insert(1); /// a.insert(2); /// a.insert(3); /// a.insert(17); /// a.insert(41); /// /// let b = a.split_off(&3); /// /// assert_eq!(a.len(), 2); /// assert_eq!(b.len(), 3); /// /// assert!(a.contains(&1)); /// assert!(a.contains(&2)); /// /// assert!(b.contains(&3)); /// assert!(b.contains(&17)); /// assert!(b.contains(&41)); /// ``` #[stable(feature = "btree_split_off", since = "1.11.0")] pub fn split_off(&mut self, key: &Q) -> Self where T: Borrow + Ord, { BTreeSet { map: self.map.split_off(key) } } /// Creates an iterator which uses a closure to determine if a value should be removed. /// /// If the closure returns true, then the value is removed and yielded. /// If the closure returns false, the value will remain in the list and will not be yielded /// by the iterator. /// /// If the iterator is only partially consumed or not consumed at all, each of the remaining /// values will still be subjected to the closure and removed and dropped if it returns true. /// /// It is unspecified how many more values will be subjected to the closure /// if a panic occurs in the closure, or if a panic occurs while dropping a value, or if the /// `DrainFilter` itself is leaked. /// /// # Examples /// /// Splitting a set into even and odd values, reusing the original set: /// /// ``` /// #![feature(btree_drain_filter)] /// use std::collections::BTreeSet; /// /// let mut set: BTreeSet = (0..8).collect(); /// let evens: BTreeSet<_> = set.drain_filter(|v| v % 2 == 0).collect(); /// let odds = set; /// assert_eq!(evens.into_iter().collect::>(), vec![0, 2, 4, 6]); /// assert_eq!(odds.into_iter().collect::>(), vec![1, 3, 5, 7]); /// ``` #[unstable(feature = "btree_drain_filter", issue = "70530")] pub fn drain_filter<'a, F>(&'a mut self, pred: F) -> DrainFilter<'a, T, F> where T: Ord, F: 'a + FnMut(&T) -> bool, { DrainFilter { pred, inner: self.map.drain_filter_inner() } } /// Gets an iterator that visits the values in the `BTreeSet` in ascending order. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let set: BTreeSet = [1, 2, 3].iter().cloned().collect(); /// let mut set_iter = set.iter(); /// assert_eq!(set_iter.next(), Some(&1)); /// assert_eq!(set_iter.next(), Some(&2)); /// assert_eq!(set_iter.next(), Some(&3)); /// assert_eq!(set_iter.next(), None); /// ``` /// /// Values returned by the iterator are returned in ascending order: /// /// ``` /// use std::collections::BTreeSet; /// /// let set: BTreeSet = [3, 1, 2].iter().cloned().collect(); /// let mut set_iter = set.iter(); /// assert_eq!(set_iter.next(), Some(&1)); /// assert_eq!(set_iter.next(), Some(&2)); /// assert_eq!(set_iter.next(), Some(&3)); /// assert_eq!(set_iter.next(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter<'_, T> { Iter { iter: self.map.keys() } } /// Returns the number of elements in the set. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut v = BTreeSet::new(); /// assert_eq!(v.len(), 0); /// v.insert(1); /// assert_eq!(v.len(), 1); /// ``` #[doc(alias = "length")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] pub const fn len(&self) -> usize { self.map.len() } /// Returns `true` if the set contains no elements. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let mut v = BTreeSet::new(); /// assert!(v.is_empty()); /// v.insert(1); /// assert!(!v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] pub const fn is_empty(&self) -> bool { self.len() == 0 } } #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator for BTreeSet { fn from_iter>(iter: I) -> BTreeSet { let mut set = BTreeSet::new(); set.extend(iter); set } } #[stable(feature = "rust1", since = "1.0.0")] impl IntoIterator for BTreeSet { type Item = T; type IntoIter = IntoIter; /// Gets an iterator for moving out the `BTreeSet`'s contents. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let set: BTreeSet = [1, 2, 3, 4].iter().cloned().collect(); /// /// let v: Vec<_> = set.into_iter().collect(); /// assert_eq!(v, [1, 2, 3, 4]); /// ``` fn into_iter(self) -> IntoIter { IntoIter { iter: self.map.into_iter() } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a BTreeSet { type Item = &'a T; type IntoIter = Iter<'a, T>; fn into_iter(self) -> Iter<'a, T> { self.iter() } } /// An iterator produced by calling `drain_filter` on BTreeSet. #[unstable(feature = "btree_drain_filter", issue = "70530")] pub struct DrainFilter<'a, T, F> where T: 'a, F: 'a + FnMut(&T) -> bool, { pred: F, inner: super::map::DrainFilterInner<'a, T, ()>, } #[unstable(feature = "btree_drain_filter", issue = "70530")] impl Drop for DrainFilter<'_, T, F> where F: FnMut(&T) -> bool, { fn drop(&mut self) { self.for_each(drop); } } #[unstable(feature = "btree_drain_filter", issue = "70530")] impl fmt::Debug for DrainFilter<'_, T, F> where T: fmt::Debug, F: FnMut(&T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("DrainFilter").field(&self.inner.peek().map(|(k, _)| k)).finish() } } #[unstable(feature = "btree_drain_filter", issue = "70530")] impl<'a, T, F> Iterator for DrainFilter<'_, T, F> where F: 'a + FnMut(&T) -> bool, { type Item = T; fn next(&mut self) -> Option { let pred = &mut self.pred; let mut mapped_pred = |k: &T, _v: &mut ()| pred(k); self.inner.next(&mut mapped_pred).map(|(k, _)| k) } fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } } #[unstable(feature = "btree_drain_filter", issue = "70530")] impl FusedIterator for DrainFilter<'_, T, F> where F: FnMut(&T) -> bool {} #[stable(feature = "rust1", since = "1.0.0")] impl Extend for BTreeSet { #[inline] fn extend>(&mut self, iter: Iter) { iter.into_iter().for_each(move |elem| { self.insert(elem); }); } #[inline] fn extend_one(&mut self, elem: T) { self.insert(elem); } } #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BTreeSet { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } #[inline] fn extend_one(&mut self, &elem: &'a T) { self.insert(elem); } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for BTreeSet { /// Creates an empty `BTreeSet`. fn default() -> BTreeSet { BTreeSet::new() } } #[stable(feature = "rust1", since = "1.0.0")] impl Sub<&BTreeSet> for &BTreeSet { type Output = BTreeSet; /// Returns the difference of `self` and `rhs` as a new `BTreeSet`. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect(); /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect(); /// /// let result = &a - &b; /// let result_vec: Vec<_> = result.into_iter().collect(); /// assert_eq!(result_vec, [1, 2]); /// ``` fn sub(self, rhs: &BTreeSet) -> BTreeSet { self.difference(rhs).cloned().collect() } } #[stable(feature = "rust1", since = "1.0.0")] impl BitXor<&BTreeSet> for &BTreeSet { type Output = BTreeSet; /// Returns the symmetric difference of `self` and `rhs` as a new `BTreeSet`. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect(); /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect(); /// /// let result = &a ^ &b; /// let result_vec: Vec<_> = result.into_iter().collect(); /// assert_eq!(result_vec, [1, 4]); /// ``` fn bitxor(self, rhs: &BTreeSet) -> BTreeSet { self.symmetric_difference(rhs).cloned().collect() } } #[stable(feature = "rust1", since = "1.0.0")] impl BitAnd<&BTreeSet> for &BTreeSet { type Output = BTreeSet; /// Returns the intersection of `self` and `rhs` as a new `BTreeSet`. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect(); /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect(); /// /// let result = &a & &b; /// let result_vec: Vec<_> = result.into_iter().collect(); /// assert_eq!(result_vec, [2, 3]); /// ``` fn bitand(self, rhs: &BTreeSet) -> BTreeSet { self.intersection(rhs).cloned().collect() } } #[stable(feature = "rust1", since = "1.0.0")] impl BitOr<&BTreeSet> for &BTreeSet { type Output = BTreeSet; /// Returns the union of `self` and `rhs` as a new `BTreeSet`. /// /// # Examples /// /// ``` /// use std::collections::BTreeSet; /// /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect(); /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect(); /// /// let result = &a | &b; /// let result_vec: Vec<_> = result.into_iter().collect(); /// assert_eq!(result_vec, [1, 2, 3, 4, 5]); /// ``` fn bitor(self, rhs: &BTreeSet) -> BTreeSet { self.union(rhs).cloned().collect() } } #[stable(feature = "rust1", since = "1.0.0")] impl Debug for BTreeSet { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_set().entries(self.iter()).finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Iter<'_, T> { fn clone(&self) -> Self { Iter { iter: self.iter.clone() } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for Iter<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { self.iter.next() } fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } fn last(mut self) -> Option<&'a T> { self.next_back() } fn min(mut self) -> Option<&'a T> { self.next() } fn max(mut self) -> Option<&'a T> { self.next_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for Iter<'a, T> { fn next_back(&mut self) -> Option<&'a T> { self.iter.next_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Iter<'_, T> { fn len(&self) -> usize { self.iter.len() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Iter<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for IntoIter { type Item = T; fn next(&mut self) -> Option { self.iter.next().map(|(k, _)| k) } fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for IntoIter { fn next_back(&mut self) -> Option { self.iter.next_back().map(|(k, _)| k) } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { fn len(&self) -> usize { self.iter.len() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for IntoIter {} #[stable(feature = "btree_range", since = "1.17.0")] impl Clone for Range<'_, T> { fn clone(&self) -> Self { Range { iter: self.iter.clone() } } } #[stable(feature = "btree_range", since = "1.17.0")] impl<'a, T> Iterator for Range<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { self.iter.next().map(|(k, _)| k) } fn last(mut self) -> Option<&'a T> { self.next_back() } fn min(mut self) -> Option<&'a T> { self.next() } fn max(mut self) -> Option<&'a T> { self.next_back() } } #[stable(feature = "btree_range", since = "1.17.0")] impl<'a, T> DoubleEndedIterator for Range<'a, T> { fn next_back(&mut self) -> Option<&'a T> { self.iter.next_back().map(|(k, _)| k) } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Range<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Difference<'_, T> { fn clone(&self) -> Self { Difference { inner: match &self.inner { DifferenceInner::Stitch { self_iter, other_iter } => DifferenceInner::Stitch { self_iter: self_iter.clone(), other_iter: other_iter.clone(), }, DifferenceInner::Search { self_iter, other_set } => { DifferenceInner::Search { self_iter: self_iter.clone(), other_set } } DifferenceInner::Iterate(iter) => DifferenceInner::Iterate(iter.clone()), }, } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: Ord> Iterator for Difference<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { match &mut self.inner { DifferenceInner::Stitch { self_iter, other_iter } => { let mut self_next = self_iter.next()?; loop { match other_iter.peek().map_or(Less, |other_next| self_next.cmp(other_next)) { Less => return Some(self_next), Equal => { self_next = self_iter.next()?; other_iter.next(); } Greater => { other_iter.next(); } } } } DifferenceInner::Search { self_iter, other_set } => loop { let self_next = self_iter.next()?; if !other_set.contains(&self_next) { return Some(self_next); } }, DifferenceInner::Iterate(iter) => iter.next(), } } fn size_hint(&self) -> (usize, Option) { let (self_len, other_len) = match &self.inner { DifferenceInner::Stitch { self_iter, other_iter } => { (self_iter.len(), other_iter.len()) } DifferenceInner::Search { self_iter, other_set } => (self_iter.len(), other_set.len()), DifferenceInner::Iterate(iter) => (iter.len(), 0), }; (self_len.saturating_sub(other_len), Some(self_len)) } fn min(mut self) -> Option<&'a T> { self.next() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Difference<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for SymmetricDifference<'_, T> { fn clone(&self) -> Self { SymmetricDifference(self.0.clone()) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { loop { let (a_next, b_next) = self.0.nexts(Self::Item::cmp); if a_next.and(b_next).is_none() { return a_next.or(b_next); } } } fn size_hint(&self) -> (usize, Option) { let (a_len, b_len) = self.0.lens(); // No checked_add, because even if a and b refer to the same set, // and T is an empty type, the storage overhead of sets limits // the number of elements to less than half the range of usize. (0, Some(a_len + b_len)) } fn min(mut self) -> Option<&'a T> { self.next() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for SymmetricDifference<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Intersection<'_, T> { fn clone(&self) -> Self { Intersection { inner: match &self.inner { IntersectionInner::Stitch { a, b } => { IntersectionInner::Stitch { a: a.clone(), b: b.clone() } } IntersectionInner::Search { small_iter, large_set } => { IntersectionInner::Search { small_iter: small_iter.clone(), large_set } } IntersectionInner::Answer(answer) => IntersectionInner::Answer(*answer), }, } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: Ord> Iterator for Intersection<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { match &mut self.inner { IntersectionInner::Stitch { a, b } => { let mut a_next = a.next()?; let mut b_next = b.next()?; loop { match a_next.cmp(b_next) { Less => a_next = a.next()?, Greater => b_next = b.next()?, Equal => return Some(a_next), } } } IntersectionInner::Search { small_iter, large_set } => loop { let small_next = small_iter.next()?; if large_set.contains(&small_next) { return Some(small_next); } }, IntersectionInner::Answer(answer) => answer.take(), } } fn size_hint(&self) -> (usize, Option) { match &self.inner { IntersectionInner::Stitch { a, b } => (0, Some(min(a.len(), b.len()))), IntersectionInner::Search { small_iter, .. } => (0, Some(small_iter.len())), IntersectionInner::Answer(None) => (0, Some(0)), IntersectionInner::Answer(Some(_)) => (1, Some(1)), } } fn min(mut self) -> Option<&'a T> { self.next() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Intersection<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Union<'_, T> { fn clone(&self) -> Self { Union(self.0.clone()) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: Ord> Iterator for Union<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { let (a_next, b_next) = self.0.nexts(Self::Item::cmp); a_next.or(b_next) } fn size_hint(&self) -> (usize, Option) { let (a_len, b_len) = self.0.lens(); // No checked_add - see SymmetricDifference::size_hint. (max(a_len, b_len), Some(a_len + b_len)) } fn min(mut self) -> Option<&'a T> { self.next() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Union<'_, T> {} #[cfg(test)] mod tests; use core::borrow::Borrow; use core::ops::RangeBounds; use core::ptr; use super::node::{marker, ForceResult::*, Handle, NodeRef}; pub struct LeafRange { pub front: Option, marker::Edge>>, pub back: Option, marker::Edge>>, } impl LeafRange { pub fn none() -> Self { LeafRange { front: None, back: None } } pub fn is_empty(&self) -> bool { self.front == self.back } /// Temporarily takes out another, immutable equivalent of the same range. pub fn reborrow(&self) -> LeafRange, K, V> { LeafRange { front: self.front.as_ref().map(|f| f.reborrow()), back: self.back.as_ref().map(|b| b.reborrow()), } } } impl NodeRef { /// Finds the distinct leaf edges delimiting a specified range in a tree. /// /// If such distinct edges exist, returns them in ascending order, meaning /// that a non-zero number of calls to `next_unchecked` on the `front` of /// the result and/or calls to `next_back_unchecked` on the `back` of the /// result will eventually reach the same edge. /// /// If there are no such edges, i.e., if the tree contains no key within /// the range, returns a pair of empty options. /// /// # Safety /// Unless `BorrowType` is `Immut`, do not use the handles to visit the same /// KV twice. unsafe fn find_leaf_edges_spanning_range( self, range: R, ) -> LeafRange where Q: Ord, K: Borrow, R: RangeBounds, { match self.search_tree_for_bifurcation(&range) { Err(_) => LeafRange::none(), Ok(( node, lower_edge_idx, upper_edge_idx, mut lower_child_bound, mut upper_child_bound, )) => { let mut lower_edge = unsafe { Handle::new_edge(ptr::read(&node), lower_edge_idx) }; let mut upper_edge = unsafe { Handle::new_edge(node, upper_edge_idx) }; loop { match (lower_edge.force(), upper_edge.force()) { (Leaf(f), Leaf(b)) => return LeafRange { front: Some(f), back: Some(b) }, (Internal(f), Internal(b)) => { (lower_edge, lower_child_bound) = f.descend().find_lower_bound_edge(lower_child_bound); (upper_edge, upper_child_bound) = b.descend().find_upper_bound_edge(upper_child_bound); } _ => unreachable!("BTreeMap has different depths"), } } } } } } /// Equivalent to `(root1.first_leaf_edge(), root2.last_leaf_edge())` but more efficient. fn full_range( root1: NodeRef, root2: NodeRef, ) -> LeafRange { let mut min_node = root1; let mut max_node = root2; loop { let front = min_node.first_edge(); let back = max_node.last_edge(); match (front.force(), back.force()) { (Leaf(f), Leaf(b)) => { return LeafRange { front: Some(f), back: Some(b) }; } (Internal(min_int), Internal(max_int)) => { min_node = min_int.descend(); max_node = max_int.descend(); } _ => unreachable!("BTreeMap has different depths"), }; } } impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { /// Finds the pair of leaf edges delimiting a specific range in a tree. /// /// The result is meaningful only if the tree is ordered by key, like the tree /// in a `BTreeMap` is. pub fn range_search(self, range: R) -> LeafRange, K, V> where Q: ?Sized + Ord, K: Borrow, R: RangeBounds, { // SAFETY: our borrow type is immutable. unsafe { self.find_leaf_edges_spanning_range(range) } } /// Finds the pair of leaf edges delimiting an entire tree. pub fn full_range(self) -> LeafRange, K, V> { full_range(self, self) } } impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { /// Splits a unique reference into a pair of leaf edges delimiting a specified range. /// The result are non-unique references allowing (some) mutation, which must be used /// carefully. /// /// The result is meaningful only if the tree is ordered by key, like the tree /// in a `BTreeMap` is. /// /// # Safety /// Do not use the duplicate handles to visit the same KV twice. pub fn range_search(self, range: R) -> LeafRange, K, V> where Q: ?Sized + Ord, K: Borrow, R: RangeBounds, { unsafe { self.find_leaf_edges_spanning_range(range) } } /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree. /// The results are non-unique references allowing mutation (of values only), so must be used /// with care. pub fn full_range(self) -> LeafRange, K, V> { // We duplicate the root NodeRef here -- we will never visit the same KV // twice, and never end up with overlapping value references. let self2 = unsafe { ptr::read(&self) }; full_range(self, self2) } } impl NodeRef { /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree. /// The results are non-unique references allowing massively destructive mutation, so must be /// used with the utmost care. pub fn full_range(self) -> LeafRange { // We duplicate the root NodeRef here -- we will never access it in a way // that overlaps references obtained from the root. let self2 = unsafe { ptr::read(&self) }; full_range(self, self2) } } impl Handle, marker::Edge> { /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV /// on the right side, which is either in the same leaf node or in an ancestor node. /// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node. pub fn next_kv( self, ) -> Result< Handle, marker::KV>, NodeRef, > { let mut edge = self.forget_node_type(); loop { edge = match edge.right_kv() { Ok(kv) => return Ok(kv), Err(last_edge) => match last_edge.into_node().ascend() { Ok(parent_edge) => parent_edge.forget_node_type(), Err(root) => return Err(root), }, } } } /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV /// on the left side, which is either in the same leaf node or in an ancestor node. /// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node. pub fn next_back_kv( self, ) -> Result< Handle, marker::KV>, NodeRef, > { let mut edge = self.forget_node_type(); loop { edge = match edge.left_kv() { Ok(kv) => return Ok(kv), Err(last_edge) => match last_edge.into_node().ascend() { Ok(parent_edge) => parent_edge.forget_node_type(), Err(root) => return Err(root), }, } } } } impl Handle, marker::Edge> { /// Given an internal edge handle, returns [`Result::Ok`] with a handle to the neighboring KV /// on the right side, which is either in the same internal node or in an ancestor node. /// If the internal edge is the last one in the tree, returns [`Result::Err`] with the root node. pub fn next_kv( self, ) -> Result< Handle, marker::KV>, NodeRef, > { let mut edge = self; loop { edge = match edge.right_kv() { Ok(internal_kv) => return Ok(internal_kv), Err(last_edge) => match last_edge.into_node().ascend() { Ok(parent_edge) => parent_edge, Err(root) => return Err(root), }, } } } } impl Handle, marker::Edge> { /// Given a leaf edge handle into a dying tree, returns the next leaf edge /// on the right side, and the key-value pair in between, which is either /// in the same leaf node, in an ancestor node, or non-existent. /// /// This method also deallocates any node(s) it reaches the end of. This /// implies that if no more key-value pair exists, the entire remainder of /// the tree will have been deallocated and there is nothing left to return. /// /// # Safety /// The given edge must not have been previously returned by counterpart /// `deallocating_next_back`. unsafe fn deallocating_next(self) -> Option<(Self, (K, V))> { let mut edge = self.forget_node_type(); loop { edge = match edge.right_kv() { Ok(kv) => { let k = unsafe { ptr::read(kv.reborrow().into_kv().0) }; let v = unsafe { ptr::read(kv.reborrow().into_kv().1) }; return Some((kv.next_leaf_edge(), (k, v))); } Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } { Some(parent_edge) => parent_edge.forget_node_type(), None => return None, }, } } } /// Given a leaf edge handle into a dying tree, returns the next leaf edge /// on the left side, and the key-value pair in between, which is either /// in the same leaf node, in an ancestor node, or non-existent. /// /// This method also deallocates any node(s) it reaches the end of. This /// implies that if no more key-value pair exists, the entire remainder of /// the tree will have been deallocated and there is nothing left to return. /// /// # Safety /// The given edge must not have been previously returned by counterpart /// `deallocating_next`. unsafe fn deallocating_next_back(self) -> Option<(Self, (K, V))> { let mut edge = self.forget_node_type(); loop { edge = match edge.left_kv() { Ok(kv) => { let k = unsafe { ptr::read(kv.reborrow().into_kv().0) }; let v = unsafe { ptr::read(kv.reborrow().into_kv().1) }; return Some((kv.next_back_leaf_edge(), (k, v))); } Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } { Some(parent_edge) => parent_edge.forget_node_type(), None => return None, }, } } } /// Deallocates a pile of nodes from the leaf up to the root. /// This is the only way to deallocate the remainder of a tree after /// `deallocating_next` and `deallocating_next_back` have been nibbling at /// both sides of the tree, and have hit the same edge. As it is intended /// only to be called when all keys and values have been returned, /// no cleanup is done on any of the keys or values. pub fn deallocating_end(self) { let mut edge = self.forget_node_type(); while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend() } { edge = parent_edge.forget_node_type(); } } } impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge> { /// Moves the leaf edge handle to the next leaf edge and returns references to the /// key and value in between. /// /// # Safety /// There must be another KV in the direction travelled. pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { super::mem::replace(self, |leaf_edge| { let kv = leaf_edge.next_kv(); let kv = unsafe { kv.ok().unwrap_unchecked() }; (kv.next_leaf_edge(), kv.into_kv()) }) } /// Moves the leaf edge handle to the previous leaf edge and returns references to the /// key and value in between. /// /// # Safety /// There must be another KV in the direction travelled. pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) { super::mem::replace(self, |leaf_edge| { let kv = leaf_edge.next_back_kv(); let kv = unsafe { kv.ok().unwrap_unchecked() }; (kv.next_back_leaf_edge(), kv.into_kv()) }) } } impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge> { /// Moves the leaf edge handle to the next leaf edge and returns references to the /// key and value in between. /// /// # Safety /// There must be another KV in the direction travelled. pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) { let kv = super::mem::replace(self, |leaf_edge| { let kv = leaf_edge.next_kv(); let kv = unsafe { kv.ok().unwrap_unchecked() }; (unsafe { ptr::read(&kv) }.next_leaf_edge(), kv) }); // Doing this last is faster, according to benchmarks. kv.into_kv_valmut() } /// Moves the leaf edge handle to the previous leaf and returns references to the /// key and value in between. /// /// # Safety /// There must be another KV in the direction travelled. pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) { let kv = super::mem::replace(self, |leaf_edge| { let kv = leaf_edge.next_back_kv(); let kv = unsafe { kv.ok().unwrap_unchecked() }; (unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv) }); // Doing this last is faster, according to benchmarks. kv.into_kv_valmut() } } impl Handle, marker::Edge> { /// Moves the leaf edge handle to the next leaf edge and returns the key and value /// in between, deallocating any node left behind while leaving the corresponding /// edge in its parent node dangling. /// /// # Safety /// - There must be another KV in the direction travelled. /// - That KV was not previously returned by counterpart `next_back_unchecked` /// on any copy of the handles being used to traverse the tree. /// /// The only safe way to proceed with the updated handle is to compare it, drop it, /// call this method again subject to its safety conditions, or call counterpart /// `next_back_unchecked` subject to its safety conditions. pub unsafe fn deallocating_next_unchecked(&mut self) -> (K, V) { super::mem::replace(self, |leaf_edge| unsafe { leaf_edge.deallocating_next().unwrap_unchecked() }) } /// Moves the leaf edge handle to the previous leaf edge and returns the key and value /// in between, deallocating any node left behind while leaving the corresponding /// edge in its parent node dangling. /// /// # Safety /// - There must be another KV in the direction travelled. /// - That leaf edge was not previously returned by counterpart `next_unchecked` /// on any copy of the handles being used to traverse the tree. /// /// The only safe way to proceed with the updated handle is to compare it, drop it, /// call this method again subject to its safety conditions, or call counterpart /// `next_unchecked` subject to its safety conditions. pub unsafe fn deallocating_next_back_unchecked(&mut self) -> (K, V) { super::mem::replace(self, |leaf_edge| unsafe { leaf_edge.deallocating_next_back().unwrap_unchecked() }) } } impl NodeRef { /// Returns the leftmost leaf edge in or underneath a node - in other words, the edge /// you need first when navigating forward (or last when navigating backward). #[inline] pub fn first_leaf_edge(self) -> Handle, marker::Edge> { let mut node = self; loop { match node.force() { Leaf(leaf) => return leaf.first_edge(), Internal(internal) => node = internal.first_edge().descend(), } } } /// Returns the rightmost leaf edge in or underneath a node - in other words, the edge /// you need last when navigating forward (or first when navigating backward). #[inline] pub fn last_leaf_edge(self) -> Handle, marker::Edge> { let mut node = self; loop { match node.force() { Leaf(leaf) => return leaf.last_edge(), Internal(internal) => node = internal.last_edge().descend(), } } } } pub enum Position { Leaf(NodeRef), Internal(NodeRef), InternalKV(Handle, marker::KV>), } impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { /// Visits leaf nodes and internal KVs in order of ascending keys, and also /// visits internal nodes as a whole in a depth first order, meaning that /// internal nodes precede their individual KVs and their child nodes. pub fn visit_nodes_in_order(self, mut visit: F) where F: FnMut(Position, K, V>), { match self.force() { Leaf(leaf) => visit(Position::Leaf(leaf)), Internal(internal) => { visit(Position::Internal(internal)); let mut edge = internal.first_edge(); loop { edge = match edge.descend().force() { Leaf(leaf) => { visit(Position::Leaf(leaf)); match edge.next_kv() { Ok(kv) => { visit(Position::InternalKV(kv)); kv.right_edge() } Err(_) => return, } } Internal(internal) => { visit(Position::Internal(internal)); internal.first_edge() } } } } } } /// Calculates the number of elements in a (sub)tree. pub fn calc_length(self) -> usize { let mut result = 0; self.visit_nodes_in_order(|pos| match pos { Position::Leaf(node) => result += node.len(), Position::Internal(node) => result += node.len(), Position::InternalKV(_) => (), }); result } } impl Handle, marker::KV> { /// Returns the leaf edge closest to a KV for forward navigation. pub fn next_leaf_edge(self) -> Handle, marker::Edge> { match self.force() { Leaf(leaf_kv) => leaf_kv.right_edge(), Internal(internal_kv) => { let next_internal_edge = internal_kv.right_edge(); next_internal_edge.descend().first_leaf_edge() } } } /// Returns the leaf edge closest to a KV for backward navigation. pub fn next_back_leaf_edge( self, ) -> Handle, marker::Edge> { match self.force() { Leaf(leaf_kv) => leaf_kv.left_edge(), Internal(internal_kv) => { let next_internal_edge = internal_kv.left_edge(); next_internal_edge.descend().last_leaf_edge() } } } } mod append; mod borrow; mod fix; pub mod map; mod mem; mod merge_iter; mod navigate; mod node; mod remove; mod search; pub mod set; mod split; #[doc(hidden)] trait Recover { type Key; fn get(&self, key: &Q) -> Option<&Self::Key>; fn take(&mut self, key: &Q) -> Option; fn replace(&mut self, key: Self::Key) -> Option; } #[cfg(test)] mod testing; use super::super::testing::crash_test::{CrashTestDummy, Panic}; use super::super::testing::rng::DeterministicRng; use super::*; use crate::vec::Vec; use std::cmp::Ordering; use std::iter::FromIterator; use std::panic::{catch_unwind, AssertUnwindSafe}; #[test] fn test_clone_eq() { let mut m = BTreeSet::new(); m.insert(1); m.insert(2); assert_eq!(m.clone(), m); } #[allow(dead_code)] fn test_const() { const SET: &'static BTreeSet<()> = &BTreeSet::new(); const LEN: usize = SET.len(); const IS_EMPTY: bool = SET.is_empty(); } #[test] fn test_iter_min_max() { let mut a = BTreeSet::new(); assert_eq!(a.iter().min(), None); assert_eq!(a.iter().max(), None); assert_eq!(a.range(..).min(), None); assert_eq!(a.range(..).max(), None); assert_eq!(a.difference(&BTreeSet::new()).min(), None); assert_eq!(a.difference(&BTreeSet::new()).max(), None); assert_eq!(a.intersection(&a).min(), None); assert_eq!(a.intersection(&a).max(), None); assert_eq!(a.symmetric_difference(&BTreeSet::new()).min(), None); assert_eq!(a.symmetric_difference(&BTreeSet::new()).max(), None); assert_eq!(a.union(&a).min(), None); assert_eq!(a.union(&a).max(), None); a.insert(1); a.insert(2); assert_eq!(a.iter().min(), Some(&1)); assert_eq!(a.iter().max(), Some(&2)); assert_eq!(a.range(..).min(), Some(&1)); assert_eq!(a.range(..).max(), Some(&2)); assert_eq!(a.difference(&BTreeSet::new()).min(), Some(&1)); assert_eq!(a.difference(&BTreeSet::new()).max(), Some(&2)); assert_eq!(a.intersection(&a).min(), Some(&1)); assert_eq!(a.intersection(&a).max(), Some(&2)); assert_eq!(a.symmetric_difference(&BTreeSet::new()).min(), Some(&1)); assert_eq!(a.symmetric_difference(&BTreeSet::new()).max(), Some(&2)); assert_eq!(a.union(&a).min(), Some(&1)); assert_eq!(a.union(&a).max(), Some(&2)); } fn check(a: &[i32], b: &[i32], expected: &[i32], f: F) where F: FnOnce(&BTreeSet, &BTreeSet, &mut dyn FnMut(&i32) -> bool) -> bool, { let mut set_a = BTreeSet::new(); let mut set_b = BTreeSet::new(); for x in a { assert!(set_a.insert(*x)) } for y in b { assert!(set_b.insert(*y)) } let mut i = 0; f(&set_a, &set_b, &mut |&x| { if i < expected.len() { assert_eq!(x, expected[i]); } i += 1; true }); assert_eq!(i, expected.len()); } #[test] fn test_intersection() { fn check_intersection(a: &[i32], b: &[i32], expected: &[i32]) { check(a, b, expected, |x, y, f| x.intersection(y).all(f)) } check_intersection(&[], &[], &[]); check_intersection(&[1, 2, 3], &[], &[]); check_intersection(&[], &[1, 2, 3], &[]); check_intersection(&[2], &[1, 2, 3], &[2]); check_intersection(&[1, 2, 3], &[2], &[2]); check_intersection(&[11, 1, 3, 77, 103, 5, -5], &[2, 11, 77, -9, -42, 5, 3], &[3, 5, 11, 77]); if cfg!(miri) { // Miri is too slow return; } let large = (0..100).collect::>(); check_intersection(&[], &large, &[]); check_intersection(&large, &[], &[]); check_intersection(&[-1], &large, &[]); check_intersection(&large, &[-1], &[]); check_intersection(&[0], &large, &[0]); check_intersection(&large, &[0], &[0]); check_intersection(&[99], &large, &[99]); check_intersection(&large, &[99], &[99]); check_intersection(&[100], &large, &[]); check_intersection(&large, &[100], &[]); check_intersection(&[11, 5000, 1, 3, 77, 8924], &large, &[1, 3, 11, 77]); } #[test] fn test_intersection_size_hint() { let x: BTreeSet = [3, 4].iter().copied().collect(); let y: BTreeSet = [1, 2, 3].iter().copied().collect(); let mut iter = x.intersection(&y); assert_eq!(iter.size_hint(), (1, Some(1))); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); iter = y.intersection(&y); assert_eq!(iter.size_hint(), (0, Some(3))); assert_eq!(iter.next(), Some(&1)); assert_eq!(iter.size_hint(), (0, Some(2))); } #[test] fn test_difference() { fn check_difference(a: &[i32], b: &[i32], expected: &[i32]) { check(a, b, expected, |x, y, f| x.difference(y).all(f)) } check_difference(&[], &[], &[]); check_difference(&[1, 12], &[], &[1, 12]); check_difference(&[], &[1, 2, 3, 9], &[]); check_difference(&[1, 3, 5, 9, 11], &[3, 9], &[1, 5, 11]); check_difference(&[1, 3, 5, 9, 11], &[3, 6, 9], &[1, 5, 11]); check_difference(&[1, 3, 5, 9, 11], &[0, 1], &[3, 5, 9, 11]); check_difference(&[1, 3, 5, 9, 11], &[11, 12], &[1, 3, 5, 9]); check_difference( &[-5, 11, 22, 33, 40, 42], &[-12, -5, 14, 23, 34, 38, 39, 50], &[11, 22, 33, 40, 42], ); if cfg!(miri) { // Miri is too slow return; } let large = (0..100).collect::>(); check_difference(&[], &large, &[]); check_difference(&[-1], &large, &[-1]); check_difference(&[0], &large, &[]); check_difference(&[99], &large, &[]); check_difference(&[100], &large, &[100]); check_difference(&[11, 5000, 1, 3, 77, 8924], &large, &[5000, 8924]); check_difference(&large, &[], &large); check_difference(&large, &[-1], &large); check_difference(&large, &[100], &large); } #[test] fn test_difference_size_hint() { let s246: BTreeSet = [2, 4, 6].iter().copied().collect(); let s23456: BTreeSet = (2..=6).collect(); let mut iter = s246.difference(&s23456); assert_eq!(iter.size_hint(), (0, Some(3))); assert_eq!(iter.next(), None); let s12345: BTreeSet = (1..=5).collect(); iter = s246.difference(&s12345); assert_eq!(iter.size_hint(), (0, Some(3))); assert_eq!(iter.next(), Some(&6)); assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); let s34567: BTreeSet = (3..=7).collect(); iter = s246.difference(&s34567); assert_eq!(iter.size_hint(), (0, Some(3))); assert_eq!(iter.next(), Some(&2)); assert_eq!(iter.size_hint(), (0, Some(2))); assert_eq!(iter.next(), None); let s1: BTreeSet = (-9..=1).collect(); iter = s246.difference(&s1); assert_eq!(iter.size_hint(), (3, Some(3))); let s2: BTreeSet = (-9..=2).collect(); iter = s246.difference(&s2); assert_eq!(iter.size_hint(), (2, Some(2))); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.size_hint(), (1, Some(1))); let s23: BTreeSet = (2..=3).collect(); iter = s246.difference(&s23); assert_eq!(iter.size_hint(), (1, Some(3))); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.size_hint(), (1, Some(1))); let s4: BTreeSet = (4..=4).collect(); iter = s246.difference(&s4); assert_eq!(iter.size_hint(), (2, Some(3))); assert_eq!(iter.next(), Some(&2)); assert_eq!(iter.size_hint(), (1, Some(2))); assert_eq!(iter.next(), Some(&6)); assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); let s56: BTreeSet = (5..=6).collect(); iter = s246.difference(&s56); assert_eq!(iter.size_hint(), (1, Some(3))); assert_eq!(iter.next(), Some(&2)); assert_eq!(iter.size_hint(), (0, Some(2))); let s6: BTreeSet = (6..=19).collect(); iter = s246.difference(&s6); assert_eq!(iter.size_hint(), (2, Some(2))); assert_eq!(iter.next(), Some(&2)); assert_eq!(iter.size_hint(), (1, Some(1))); let s7: BTreeSet = (7..=19).collect(); iter = s246.difference(&s7); assert_eq!(iter.size_hint(), (3, Some(3))); } #[test] fn test_symmetric_difference() { fn check_symmetric_difference(a: &[i32], b: &[i32], expected: &[i32]) { check(a, b, expected, |x, y, f| x.symmetric_difference(y).all(f)) } check_symmetric_difference(&[], &[], &[]); check_symmetric_difference(&[1, 2, 3], &[2], &[1, 3]); check_symmetric_difference(&[2], &[1, 2, 3], &[1, 3]); check_symmetric_difference(&[1, 3, 5, 9, 11], &[-2, 3, 9, 14, 22], &[-2, 1, 5, 11, 14, 22]); } #[test] fn test_symmetric_difference_size_hint() { let x: BTreeSet = [2, 4].iter().copied().collect(); let y: BTreeSet = [1, 2, 3].iter().copied().collect(); let mut iter = x.symmetric_difference(&y); assert_eq!(iter.size_hint(), (0, Some(5))); assert_eq!(iter.next(), Some(&1)); assert_eq!(iter.size_hint(), (0, Some(4))); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.size_hint(), (0, Some(1))); } #[test] fn test_union() { fn check_union(a: &[i32], b: &[i32], expected: &[i32]) { check(a, b, expected, |x, y, f| x.union(y).all(f)) } check_union(&[], &[], &[]); check_union(&[1, 2, 3], &[2], &[1, 2, 3]); check_union(&[2], &[1, 2, 3], &[1, 2, 3]); check_union( &[1, 3, 5, 9, 11, 16, 19, 24], &[-2, 1, 5, 9, 13, 19], &[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24], ); } #[test] fn test_union_size_hint() { let x: BTreeSet = [2, 4].iter().copied().collect(); let y: BTreeSet = [1, 2, 3].iter().copied().collect(); let mut iter = x.union(&y); assert_eq!(iter.size_hint(), (3, Some(5))); assert_eq!(iter.next(), Some(&1)); assert_eq!(iter.size_hint(), (2, Some(4))); assert_eq!(iter.next(), Some(&2)); assert_eq!(iter.size_hint(), (1, Some(2))); } #[test] // Only tests the simple function definition with respect to intersection fn test_is_disjoint() { let one = [1].iter().collect::>(); let two = [2].iter().collect::>(); assert!(one.is_disjoint(&two)); } #[test] // Also implicitly tests the trivial function definition of is_superset fn test_is_subset() { fn is_subset(a: &[i32], b: &[i32]) -> bool { let set_a = a.iter().collect::>(); let set_b = b.iter().collect::>(); set_a.is_subset(&set_b) } assert_eq!(is_subset(&[], &[]), true); assert_eq!(is_subset(&[], &[1, 2]), true); assert_eq!(is_subset(&[0], &[1, 2]), false); assert_eq!(is_subset(&[1], &[1, 2]), true); assert_eq!(is_subset(&[2], &[1, 2]), true); assert_eq!(is_subset(&[3], &[1, 2]), false); assert_eq!(is_subset(&[1, 2], &[1]), false); assert_eq!(is_subset(&[1, 2], &[1, 2]), true); assert_eq!(is_subset(&[1, 2], &[2, 3]), false); assert_eq!( is_subset(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 11, 14, 22, 23, 33, 34, 38, 39, 40, 42]), true ); assert_eq!(is_subset(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 11, 14, 22, 23, 34, 38]), false); if cfg!(miri) { // Miri is too slow return; } let large = (0..100).collect::>(); assert_eq!(is_subset(&[], &large), true); assert_eq!(is_subset(&large, &[]), false); assert_eq!(is_subset(&[-1], &large), false); assert_eq!(is_subset(&[0], &large), true); assert_eq!(is_subset(&[1, 2], &large), true); assert_eq!(is_subset(&[99, 100], &large), false); } #[test] fn test_retain() { let xs = [1, 2, 3, 4, 5, 6]; let mut set: BTreeSet = xs.iter().cloned().collect(); set.retain(|&k| k % 2 == 0); assert_eq!(set.len(), 3); assert!(set.contains(&2)); assert!(set.contains(&4)); assert!(set.contains(&6)); } #[test] fn test_drain_filter() { let mut x: BTreeSet<_> = [1].iter().copied().collect(); let mut y: BTreeSet<_> = [1].iter().copied().collect(); x.drain_filter(|_| true); y.drain_filter(|_| false); assert_eq!(x.len(), 0); assert_eq!(y.len(), 1); } #[test] fn test_drain_filter_drop_panic_leak() { let a = CrashTestDummy::new(0); let b = CrashTestDummy::new(1); let c = CrashTestDummy::new(2); let mut set = BTreeSet::new(); set.insert(a.spawn(Panic::Never)); set.insert(b.spawn(Panic::InDrop)); set.insert(c.spawn(Panic::Never)); catch_unwind(move || drop(set.drain_filter(|dummy| dummy.query(true)))).ok(); assert_eq!(a.queried(), 1); assert_eq!(b.queried(), 1); assert_eq!(c.queried(), 0); assert_eq!(a.dropped(), 1); assert_eq!(b.dropped(), 1); assert_eq!(c.dropped(), 1); } #[test] fn test_drain_filter_pred_panic_leak() { let a = CrashTestDummy::new(0); let b = CrashTestDummy::new(1); let c = CrashTestDummy::new(2); let mut set = BTreeSet::new(); set.insert(a.spawn(Panic::Never)); set.insert(b.spawn(Panic::InQuery)); set.insert(c.spawn(Panic::InQuery)); catch_unwind(AssertUnwindSafe(|| drop(set.drain_filter(|dummy| dummy.query(true))))).ok(); assert_eq!(a.queried(), 1); assert_eq!(b.queried(), 1); assert_eq!(c.queried(), 0); assert_eq!(a.dropped(), 1); assert_eq!(b.dropped(), 0); assert_eq!(c.dropped(), 0); assert_eq!(set.len(), 2); assert_eq!(set.first().unwrap().id(), 1); assert_eq!(set.last().unwrap().id(), 2); } #[test] fn test_clear() { let mut x = BTreeSet::new(); x.insert(1); x.clear(); assert!(x.is_empty()); } #[test] fn test_zip() { let mut x = BTreeSet::new(); x.insert(5); x.insert(12); x.insert(11); let mut y = BTreeSet::new(); y.insert("foo"); y.insert("bar"); let x = x; let y = y; let mut z = x.iter().zip(&y); assert_eq!(z.next().unwrap(), (&5, &("bar"))); assert_eq!(z.next().unwrap(), (&11, &("foo"))); assert!(z.next().is_none()); } #[test] fn test_from_iter() { let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9]; let set: BTreeSet<_> = xs.iter().cloned().collect(); for x in &xs { assert!(set.contains(x)); } } #[test] fn test_show() { let mut set = BTreeSet::new(); let empty = BTreeSet::::new(); set.insert(1); set.insert(2); let set_str = format!("{:?}", set); assert_eq!(set_str, "{1, 2}"); assert_eq!(format!("{:?}", empty), "{}"); } #[test] fn test_extend_ref() { let mut a = BTreeSet::new(); a.insert(1); a.extend(&[2, 3, 4]); assert_eq!(a.len(), 4); assert!(a.contains(&1)); assert!(a.contains(&2)); assert!(a.contains(&3)); assert!(a.contains(&4)); let mut b = BTreeSet::new(); b.insert(5); b.insert(6); a.extend(&b); assert_eq!(a.len(), 6); assert!(a.contains(&1)); assert!(a.contains(&2)); assert!(a.contains(&3)); assert!(a.contains(&4)); assert!(a.contains(&5)); assert!(a.contains(&6)); } #[test] fn test_recovery() { #[derive(Debug)] struct Foo(&'static str, i32); impl PartialEq for Foo { fn eq(&self, other: &Self) -> bool { self.0 == other.0 } } impl Eq for Foo {} impl PartialOrd for Foo { fn partial_cmp(&self, other: &Self) -> Option { self.0.partial_cmp(&other.0) } } impl Ord for Foo { fn cmp(&self, other: &Self) -> Ordering { self.0.cmp(&other.0) } } let mut s = BTreeSet::new(); assert_eq!(s.replace(Foo("a", 1)), None); assert_eq!(s.len(), 1); assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1))); assert_eq!(s.len(), 1); { let mut it = s.iter(); assert_eq!(it.next(), Some(&Foo("a", 2))); assert_eq!(it.next(), None); } assert_eq!(s.get(&Foo("a", 1)), Some(&Foo("a", 2))); assert_eq!(s.take(&Foo("a", 1)), Some(Foo("a", 2))); assert_eq!(s.len(), 0); assert_eq!(s.get(&Foo("a", 1)), None); assert_eq!(s.take(&Foo("a", 1)), None); assert_eq!(s.iter().next(), None); } #[allow(dead_code)] fn test_variance() { fn set<'new>(v: BTreeSet<&'static str>) -> BTreeSet<&'new str> { v } fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> { v } fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> { v } fn range<'a, 'new>(v: Range<'a, &'static str>) -> Range<'a, &'new str> { v } // not applied to Difference, Intersection, SymmetricDifference, Union } #[allow(dead_code)] fn test_sync() { fn set(v: &BTreeSet) -> impl Sync + '_ { v } fn iter(v: &BTreeSet) -> impl Sync + '_ { v.iter() } fn into_iter(v: BTreeSet) -> impl Sync { v.into_iter() } fn range(v: &BTreeSet) -> impl Sync + '_ { v.range(..) } fn drain_filter(v: &mut BTreeSet) -> impl Sync + '_ { v.drain_filter(|_| false) } fn difference(v: &BTreeSet) -> impl Sync + '_ { v.difference(&v) } fn intersection(v: &BTreeSet) -> impl Sync + '_ { v.intersection(&v) } fn symmetric_difference(v: &BTreeSet) -> impl Sync + '_ { v.symmetric_difference(&v) } fn union(v: &BTreeSet) -> impl Sync + '_ { v.union(&v) } } #[allow(dead_code)] fn test_send() { fn set(v: BTreeSet) -> impl Send { v } fn iter(v: &BTreeSet) -> impl Send + '_ { v.iter() } fn into_iter(v: BTreeSet) -> impl Send { v.into_iter() } fn range(v: &BTreeSet) -> impl Send + '_ { v.range(..) } fn drain_filter(v: &mut BTreeSet) -> impl Send + '_ { v.drain_filter(|_| false) } fn difference(v: &BTreeSet) -> impl Send + '_ { v.difference(&v) } fn intersection(v: &BTreeSet) -> impl Send + '_ { v.intersection(&v) } fn symmetric_difference(v: &BTreeSet) -> impl Send + '_ { v.symmetric_difference(&v) } fn union(v: &BTreeSet) -> impl Send + '_ { v.union(&v) } } #[allow(dead_code)] fn test_ord_absence() { fn set(mut set: BTreeSet) { set.is_empty(); set.len(); set.clear(); set.iter(); set.into_iter(); } fn set_debug(set: BTreeSet) { format!("{:?}", set); format!("{:?}", set.iter()); format!("{:?}", set.into_iter()); } fn set_clone(mut set: BTreeSet) { set.clone_from(&set.clone()); } } #[test] fn test_append() { let mut a = BTreeSet::new(); a.insert(1); a.insert(2); a.insert(3); let mut b = BTreeSet::new(); b.insert(3); b.insert(4); b.insert(5); a.append(&mut b); assert_eq!(a.len(), 5); assert_eq!(b.len(), 0); assert_eq!(a.contains(&1), true); assert_eq!(a.contains(&2), true); assert_eq!(a.contains(&3), true); assert_eq!(a.contains(&4), true); assert_eq!(a.contains(&5), true); } #[test] fn test_first_last() { let mut a = BTreeSet::new(); assert_eq!(a.first(), None); assert_eq!(a.last(), None); a.insert(1); assert_eq!(a.first(), Some(&1)); assert_eq!(a.last(), Some(&1)); a.insert(2); assert_eq!(a.first(), Some(&1)); assert_eq!(a.last(), Some(&2)); for i in 3..=12 { a.insert(i); } assert_eq!(a.first(), Some(&1)); assert_eq!(a.last(), Some(&12)); assert_eq!(a.pop_first(), Some(1)); assert_eq!(a.pop_last(), Some(12)); assert_eq!(a.pop_first(), Some(2)); assert_eq!(a.pop_last(), Some(11)); assert_eq!(a.pop_first(), Some(3)); assert_eq!(a.pop_last(), Some(10)); assert_eq!(a.pop_first(), Some(4)); assert_eq!(a.pop_first(), Some(5)); assert_eq!(a.pop_first(), Some(6)); assert_eq!(a.pop_first(), Some(7)); assert_eq!(a.pop_first(), Some(8)); assert_eq!(a.clone().pop_last(), Some(9)); assert_eq!(a.pop_first(), Some(9)); assert_eq!(a.pop_first(), None); assert_eq!(a.pop_last(), None); } // Unlike the function with the same name in map/tests, returns no values. // Which also means it returns different predetermined pseudo-random keys, // and the test cases using this function explore slightly different trees. fn rand_data(len: usize) -> Vec { let mut rng = DeterministicRng::new(); Vec::from_iter((0..len).map(|_| rng.next())) } #[test] fn test_split_off_empty_right() { let mut data = rand_data(173); let mut set = BTreeSet::from_iter(data.clone()); let right = set.split_off(&(data.iter().max().unwrap() + 1)); data.sort(); assert!(set.into_iter().eq(data)); assert!(right.into_iter().eq(None)); } #[test] fn test_split_off_empty_left() { let mut data = rand_data(314); let mut set = BTreeSet::from_iter(data.clone()); let right = set.split_off(data.iter().min().unwrap()); data.sort(); assert!(set.into_iter().eq(None)); assert!(right.into_iter().eq(data)); } #[test] fn test_split_off_large_random_sorted() { // Miri is too slow let mut data = if cfg!(miri) { rand_data(529) } else { rand_data(1529) }; // special case with maximum height. data.sort(); let mut set = BTreeSet::from_iter(data.clone()); let key = data[data.len() / 2]; let right = set.split_off(&key); assert!(set.into_iter().eq(data.clone().into_iter().filter(|x| *x < key))); assert!(right.into_iter().eq(data.into_iter().filter(|x| *x >= key))); } use super::DormantMutRef; #[test] fn test_borrow() { let mut data = 1; let mut stack = vec![]; let mut rr = &mut data; for factor in [2, 3, 7].iter() { let (r, dormant_r) = DormantMutRef::new(rr); rr = r; assert_eq!(*rr, 1); stack.push((factor, dormant_r)); } while let Some((factor, dormant_r)) = stack.pop() { let r = unsafe { dormant_r.awaken() }; *r *= factor; } assert_eq!(data, 42); } use core::fmt::{self, Debug}; use core::marker::PhantomData; use core::mem; use super::super::borrow::DormantMutRef; use super::super::node::{marker, Handle, InsertResult::*, NodeRef}; use super::BTreeMap; use Entry::*; /// A view into a single entry in a map, which may either be vacant or occupied. /// /// This `enum` is constructed from the [`entry`] method on [`BTreeMap`]. /// /// [`entry`]: BTreeMap::entry #[stable(feature = "rust1", since = "1.0.0")] pub enum Entry<'a, K: 'a, V: 'a> { /// A vacant entry. #[stable(feature = "rust1", since = "1.0.0")] Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>), /// An occupied entry. #[stable(feature = "rust1", since = "1.0.0")] Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>), } #[stable(feature = "debug_btree_map", since = "1.12.0")] impl Debug for Entry<'_, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(), Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(), } } } /// A view into a vacant entry in a `BTreeMap`. /// It is part of the [`Entry`] enum. #[stable(feature = "rust1", since = "1.0.0")] pub struct VacantEntry<'a, K: 'a, V: 'a> { pub(super) key: K, pub(super) handle: Handle, K, V, marker::Leaf>, marker::Edge>, pub(super) dormant_map: DormantMutRef<'a, BTreeMap>, // Be invariant in `K` and `V` pub(super) _marker: PhantomData<&'a mut (K, V)>, } #[stable(feature = "debug_btree_map", since = "1.12.0")] impl Debug for VacantEntry<'_, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("VacantEntry").field(self.key()).finish() } } /// A view into an occupied entry in a `BTreeMap`. /// It is part of the [`Entry`] enum. #[stable(feature = "rust1", since = "1.0.0")] pub struct OccupiedEntry<'a, K: 'a, V: 'a> { pub(super) handle: Handle, K, V, marker::LeafOrInternal>, marker::KV>, pub(super) dormant_map: DormantMutRef<'a, BTreeMap>, // Be invariant in `K` and `V` pub(super) _marker: PhantomData<&'a mut (K, V)>, } #[stable(feature = "debug_btree_map", since = "1.12.0")] impl Debug for OccupiedEntry<'_, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish() } } /// The error returned by [`try_insert`](BTreeMap::try_insert) when the key already exists. /// /// Contains the occupied entry, and the value that was not inserted. #[unstable(feature = "map_try_insert", issue = "82766")] pub struct OccupiedError<'a, K: 'a, V: 'a> { /// The entry in the map that was already occupied. pub entry: OccupiedEntry<'a, K, V>, /// The value which was not inserted, because the entry was already occupied. pub value: V, } #[unstable(feature = "map_try_insert", issue = "82766")] impl Debug for OccupiedError<'_, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("OccupiedError") .field("key", self.entry.key()) .field("old_value", self.entry.get()) .field("new_value", &self.value) .finish() } } #[unstable(feature = "map_try_insert", issue = "82766")] impl<'a, K: Debug + Ord, V: Debug> fmt::Display for OccupiedError<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "failed to insert {:?}, key {:?} already exists with value {:?}", self.value, self.entry.key(), self.entry.get(), ) } } impl<'a, K: Ord, V> Entry<'a, K, V> { /// Ensures a value is in the entry by inserting the default if empty, and returns /// a mutable reference to the value in the entry. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// map.entry("poneyland").or_insert(12); /// /// assert_eq!(map["poneyland"], 12); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn or_insert(self, default: V) -> &'a mut V { match self { Occupied(entry) => entry.into_mut(), Vacant(entry) => entry.insert(default), } } /// Ensures a value is in the entry by inserting the result of the default function if empty, /// and returns a mutable reference to the value in the entry. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap<&str, String> = BTreeMap::new(); /// let s = "hoho".to_string(); /// /// map.entry("poneyland").or_insert_with(|| s); /// /// assert_eq!(map["poneyland"], "hoho".to_string()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn or_insert_with V>(self, default: F) -> &'a mut V { match self { Occupied(entry) => entry.into_mut(), Vacant(entry) => entry.insert(default()), } } /// Ensures a value is in the entry by inserting, if empty, the result of the default function. /// This method allows for generating key-derived values for insertion by providing the default /// function a reference to the key that was moved during the `.entry(key)` method call. /// /// The reference to the moved key is provided so that cloning or copying the key is /// unnecessary, unlike with `.or_insert_with(|| ... )`. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// /// map.entry("poneyland").or_insert_with_key(|key| key.chars().count()); /// /// assert_eq!(map["poneyland"], 9); /// ``` #[inline] #[stable(feature = "or_insert_with_key", since = "1.50.0")] pub fn or_insert_with_key V>(self, default: F) -> &'a mut V { match self { Occupied(entry) => entry.into_mut(), Vacant(entry) => { let value = default(entry.key()); entry.insert(value) } } } /// Returns a reference to this entry's key. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); /// ``` #[stable(feature = "map_entry_keys", since = "1.10.0")] pub fn key(&self) -> &K { match *self { Occupied(ref entry) => entry.key(), Vacant(ref entry) => entry.key(), } } /// Provides in-place mutable access to an occupied entry before any /// potential inserts into the map. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// /// map.entry("poneyland") /// .and_modify(|e| { *e += 1 }) /// .or_insert(42); /// assert_eq!(map["poneyland"], 42); /// /// map.entry("poneyland") /// .and_modify(|e| { *e += 1 }) /// .or_insert(42); /// assert_eq!(map["poneyland"], 43); /// ``` #[stable(feature = "entry_and_modify", since = "1.26.0")] pub fn and_modify(self, f: F) -> Self where F: FnOnce(&mut V), { match self { Occupied(mut entry) => { f(entry.get_mut()); Occupied(entry) } Vacant(entry) => Vacant(entry), } } } impl<'a, K: Ord, V: Default> Entry<'a, K, V> { #[stable(feature = "entry_or_default", since = "1.28.0")] /// Ensures a value is in the entry by inserting the default value if empty, /// and returns a mutable reference to the value in the entry. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap<&str, Option> = BTreeMap::new(); /// map.entry("poneyland").or_default(); /// /// assert_eq!(map["poneyland"], None); /// ``` pub fn or_default(self) -> &'a mut V { match self { Occupied(entry) => entry.into_mut(), Vacant(entry) => entry.insert(Default::default()), } } } impl<'a, K: Ord, V> VacantEntry<'a, K, V> { /// Gets a reference to the key that would be used when inserting a value /// through the VacantEntry. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); /// ``` #[stable(feature = "map_entry_keys", since = "1.10.0")] pub fn key(&self) -> &K { &self.key } /// Take ownership of the key. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// use std::collections::btree_map::Entry; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// /// if let Entry::Vacant(v) = map.entry("poneyland") { /// v.into_key(); /// } /// ``` #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")] pub fn into_key(self) -> K { self.key } /// Sets the value of the entry with the `VacantEntry`'s key, /// and returns a mutable reference to it. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// use std::collections::btree_map::Entry; /// /// let mut map: BTreeMap<&str, u32> = BTreeMap::new(); /// /// if let Entry::Vacant(o) = map.entry("poneyland") { /// o.insert(37); /// } /// assert_eq!(map["poneyland"], 37); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(self, value: V) -> &'a mut V { let out_ptr = match self.handle.insert_recursing(self.key, value) { (Fit(_), val_ptr) => { // SAFETY: We have consumed self.handle and the handle returned. let map = unsafe { self.dormant_map.awaken() }; map.length += 1; val_ptr } (Split(ins), val_ptr) => { drop(ins.left); // SAFETY: We have consumed self.handle and the reference returned. let map = unsafe { self.dormant_map.awaken() }; let root = map.root.as_mut().unwrap(); root.push_internal_level().push(ins.kv.0, ins.kv.1, ins.right); map.length += 1; val_ptr } }; // Now that we have finished growing the tree using borrowed references, // dereference the pointer to a part of it, that we picked up along the way. unsafe { &mut *out_ptr } } } impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> { /// Gets a reference to the key in the entry. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// map.entry("poneyland").or_insert(12); /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); /// ``` #[stable(feature = "map_entry_keys", since = "1.10.0")] pub fn key(&self) -> &K { self.handle.reborrow().into_kv().0 } /// Take ownership of the key and value from the map. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// use std::collections::btree_map::Entry; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// map.entry("poneyland").or_insert(12); /// /// if let Entry::Occupied(o) = map.entry("poneyland") { /// // We delete the entry from the map. /// o.remove_entry(); /// } /// /// // If now try to get the value, it will panic: /// // println!("{}", map["poneyland"]); /// ``` #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")] pub fn remove_entry(self) -> (K, V) { self.remove_kv() } /// Gets a reference to the value in the entry. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// use std::collections::btree_map::Entry; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// map.entry("poneyland").or_insert(12); /// /// if let Entry::Occupied(o) = map.entry("poneyland") { /// assert_eq!(o.get(), &12); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get(&self) -> &V { self.handle.reborrow().into_kv().1 } /// Gets a mutable reference to the value in the entry. /// /// If you need a reference to the `OccupiedEntry` that may outlive the /// destruction of the `Entry` value, see [`into_mut`]. /// /// [`into_mut`]: OccupiedEntry::into_mut /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// use std::collections::btree_map::Entry; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// map.entry("poneyland").or_insert(12); /// /// assert_eq!(map["poneyland"], 12); /// if let Entry::Occupied(mut o) = map.entry("poneyland") { /// *o.get_mut() += 10; /// assert_eq!(*o.get(), 22); /// /// // We can use the same Entry multiple times. /// *o.get_mut() += 2; /// } /// assert_eq!(map["poneyland"], 24); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get_mut(&mut self) -> &mut V { self.handle.kv_mut().1 } /// Converts the entry into a mutable reference to its value. /// /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`]. /// /// [`get_mut`]: OccupiedEntry::get_mut /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// use std::collections::btree_map::Entry; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// map.entry("poneyland").or_insert(12); /// /// assert_eq!(map["poneyland"], 12); /// if let Entry::Occupied(o) = map.entry("poneyland") { /// *o.into_mut() += 10; /// } /// assert_eq!(map["poneyland"], 22); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn into_mut(self) -> &'a mut V { self.handle.into_val_mut() } /// Sets the value of the entry with the `OccupiedEntry`'s key, /// and returns the entry's old value. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// use std::collections::btree_map::Entry; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// map.entry("poneyland").or_insert(12); /// /// if let Entry::Occupied(mut o) = map.entry("poneyland") { /// assert_eq!(o.insert(15), 12); /// } /// assert_eq!(map["poneyland"], 15); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(&mut self, value: V) -> V { mem::replace(self.get_mut(), value) } /// Takes the value of the entry out of the map, and returns it. /// /// # Examples /// /// ``` /// use std::collections::BTreeMap; /// use std::collections::btree_map::Entry; /// /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); /// map.entry("poneyland").or_insert(12); /// /// if let Entry::Occupied(o) = map.entry("poneyland") { /// assert_eq!(o.remove(), 12); /// } /// // If we try to get "poneyland"'s value, it'll panic: /// // println!("{}", map["poneyland"]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(self) -> V { self.remove_kv().1 } // Body of `remove_entry`, probably separate because the name reflects the returned pair. pub(super) fn remove_kv(self) -> (K, V) { let mut emptied_internal_root = false; let (old_kv, _) = self.handle.remove_kv_tracking(|| emptied_internal_root = true); // SAFETY: we consumed the intermediate root borrow, `self.handle`. let map = unsafe { self.dormant_map.awaken() }; map.length -= 1; if emptied_internal_root { let root = map.root.as_mut().unwrap(); root.pop_internal_level(); } old_kv } } use super::super::testing::crash_test::{CrashTestDummy, Panic}; use super::super::testing::ord_chaos::{Cyclic3, Governed, Governor}; use super::super::testing::rng::DeterministicRng; use super::Entry::{Occupied, Vacant}; use super::*; use crate::boxed::Box; use crate::fmt::Debug; use crate::rc::Rc; use crate::string::{String, ToString}; use crate::vec::Vec; use std::cmp::Ordering; use std::convert::TryFrom; use std::iter::{self, FromIterator}; use std::mem; use std::ops::Bound::{self, Excluded, Included, Unbounded}; use std::ops::RangeBounds; use std::panic::{catch_unwind, AssertUnwindSafe}; use std::sync::atomic::{AtomicUsize, Ordering::SeqCst}; // Capacity of a tree with a single level, // i.e., a tree who's root is a leaf node at height 0. const NODE_CAPACITY: usize = node::CAPACITY; // Minimum number of elements to insert, to guarantee a tree with 2 levels, // i.e., a tree who's root is an internal node at height 1, with edges to leaf nodes. // It's not the minimum size: removing an element from such a tree does not always reduce height. const MIN_INSERTS_HEIGHT_1: usize = NODE_CAPACITY + 1; // Minimum number of elements to insert in ascending order, to guarantee a tree with 3 levels, // i.e., a tree who's root is an internal node at height 2, with edges to more internal nodes. // It's not the minimum size: removing an element from such a tree does not always reduce height. const MIN_INSERTS_HEIGHT_2: usize = 89; // Gathers all references from a mutable iterator and makes sure Miri notices if // using them is dangerous. fn test_all_refs<'a, T: 'a>(dummy: &mut T, iter: impl Iterator) { // Gather all those references. let mut refs: Vec<&mut T> = iter.collect(); // Use them all. Twice, to be sure we got all interleavings. for r in refs.iter_mut() { mem::swap(dummy, r); } for r in refs { mem::swap(dummy, r); } } impl BTreeMap { // Panics if the map (or the code navigating it) is corrupted. fn check_invariants(&self) { if let Some(root) = &self.root { let root_node = root.reborrow(); // Check the back pointers top-down, before we attempt to rely on // more serious navigation code. assert!(root_node.ascend().is_err()); root_node.assert_back_pointers(); // Check consistency of `length` with what navigation code encounters. assert_eq!(self.length, root_node.calc_length()); // Lastly, check the invariant causing the least harm. root_node.assert_min_len(if root_node.height() > 0 { 1 } else { 0 }); } else { assert_eq!(self.length, 0); } // Check that `assert_strictly_ascending` will encounter all keys. assert_eq!(self.length, self.keys().count()); } // Panics if the map is corrupted or if the keys are not in strictly // ascending order, in the current opinion of the `Ord` implementation. // If the `Ord` implementation violates transitivity, this method does not // guarantee that all keys are unique, just that adjacent keys are unique. fn check(&self) where K: Debug + Ord, { self.check_invariants(); self.assert_strictly_ascending(); } // Returns the height of the root, if any. fn height(&self) -> Option { self.root.as_ref().map(node::Root::height) } fn dump_keys(&self) -> String where K: Debug, { if let Some(root) = self.root.as_ref() { root.reborrow().dump_keys() } else { String::from("not yet allocated") } } // Panics if the keys are not in strictly ascending order. fn assert_strictly_ascending(&self) where K: Debug + Ord, { let mut keys = self.keys(); if let Some(mut previous) = keys.next() { for next in keys { assert!(previous < next, "{:?} >= {:?}", previous, next); previous = next; } } } // Transform the tree to minimize wasted space, obtaining fewer nodes that // are mostly filled up to their capacity. The same compact tree could have // been obtained by inserting keys in a shrewd order. fn compact(&mut self) where K: Ord, { let iter = mem::take(self).into_iter(); let root = BTreeMap::ensure_is_owned(&mut self.root); root.bulk_push(iter, &mut self.length); } } impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { fn assert_min_len(self, min_len: usize) { assert!(self.len() >= min_len, "node len {} < {}", self.len(), min_len); if let node::ForceResult::Internal(node) = self.force() { for idx in 0..=node.len() { let edge = unsafe { Handle::new_edge(node, idx) }; edge.descend().assert_min_len(MIN_LEN); } } } } // Tests our value of MIN_INSERTS_HEIGHT_2. Failure may mean you just need to // adapt that value to match a change in node::CAPACITY or the choices made // during insertion, otherwise other test cases may fail or be less useful. #[test] fn test_levels() { let mut map = BTreeMap::new(); map.check(); assert_eq!(map.height(), None); assert_eq!(map.len(), 0); map.insert(0, ()); while map.height() == Some(0) { let last_key = *map.last_key_value().unwrap().0; map.insert(last_key + 1, ()); } map.check(); // Structure: // - 1 element in internal root node with 2 children // - 6 elements in left leaf child // - 5 elements in right leaf child assert_eq!(map.height(), Some(1)); assert_eq!(map.len(), MIN_INSERTS_HEIGHT_1, "{}", map.dump_keys()); while map.height() == Some(1) { let last_key = *map.last_key_value().unwrap().0; map.insert(last_key + 1, ()); } map.check(); // Structure: // - 1 element in internal root node with 2 children // - 6 elements in left internal child with 7 grandchildren // - 42 elements in left child's 7 grandchildren with 6 elements each // - 5 elements in right internal child with 6 grandchildren // - 30 elements in right child's 5 first grandchildren with 6 elements each // - 5 elements in right child's last grandchild assert_eq!(map.height(), Some(2)); assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2, "{}", map.dump_keys()); } // Ensures the testing infrastructure usually notices order violations. #[test] #[should_panic] fn test_check_ord_chaos() { let gov = Governor::new(); let map: BTreeMap<_, _> = (0..2).map(|i| (Governed(i, &gov), ())).collect(); gov.flip(); map.check(); } // Ensures the testing infrastructure doesn't always mind order violations. #[test] fn test_check_invariants_ord_chaos() { let gov = Governor::new(); let map: BTreeMap<_, _> = (0..2).map(|i| (Governed(i, &gov), ())).collect(); gov.flip(); map.check_invariants(); } #[test] fn test_basic_large() { let mut map = BTreeMap::new(); // Miri is too slow let size = if cfg!(miri) { MIN_INSERTS_HEIGHT_2 } else { 10000 }; let size = size + (size % 2); // round up to even number assert_eq!(map.len(), 0); for i in 0..size { assert_eq!(map.insert(i, 10 * i), None); assert_eq!(map.len(), i + 1); } assert_eq!(map.first_key_value(), Some((&0, &0))); assert_eq!(map.last_key_value(), Some((&(size - 1), &(10 * (size - 1))))); assert_eq!(map.first_entry().unwrap().key(), &0); assert_eq!(map.last_entry().unwrap().key(), &(size - 1)); for i in 0..size { assert_eq!(map.get(&i).unwrap(), &(i * 10)); } for i in size..size * 2 { assert_eq!(map.get(&i), None); } for i in 0..size { assert_eq!(map.insert(i, 100 * i), Some(10 * i)); assert_eq!(map.len(), size); } for i in 0..size { assert_eq!(map.get(&i).unwrap(), &(i * 100)); } for i in 0..size / 2 { assert_eq!(map.remove(&(i * 2)), Some(i * 200)); assert_eq!(map.len(), size - i - 1); } for i in 0..size / 2 { assert_eq!(map.get(&(2 * i)), None); assert_eq!(map.get(&(2 * i + 1)).unwrap(), &(i * 200 + 100)); } for i in 0..size / 2 { assert_eq!(map.remove(&(2 * i)), None); assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100)); assert_eq!(map.len(), size / 2 - i - 1); } map.check(); } #[test] fn test_basic_small() { let mut map = BTreeMap::new(); // Empty, root is absent (None): assert_eq!(map.remove(&1), None); assert_eq!(map.len(), 0); assert_eq!(map.get(&1), None); assert_eq!(map.get_mut(&1), None); assert_eq!(map.first_key_value(), None); assert_eq!(map.last_key_value(), None); assert_eq!(map.keys().count(), 0); assert_eq!(map.values().count(), 0); assert_eq!(map.range(..).next(), None); assert_eq!(map.range(..1).next(), None); assert_eq!(map.range(1..).next(), None); assert_eq!(map.range(1..=1).next(), None); assert_eq!(map.range(1..2).next(), None); assert_eq!(map.height(), None); assert_eq!(map.insert(1, 1), None); assert_eq!(map.height(), Some(0)); map.check(); // 1 key-value pair: assert_eq!(map.len(), 1); assert_eq!(map.get(&1), Some(&1)); assert_eq!(map.get_mut(&1), Some(&mut 1)); assert_eq!(map.first_key_value(), Some((&1, &1))); assert_eq!(map.last_key_value(), Some((&1, &1))); assert_eq!(map.keys().collect::>(), vec![&1]); assert_eq!(map.values().collect::>(), vec![&1]); assert_eq!(map.insert(1, 2), Some(1)); assert_eq!(map.len(), 1); assert_eq!(map.get(&1), Some(&2)); assert_eq!(map.get_mut(&1), Some(&mut 2)); assert_eq!(map.first_key_value(), Some((&1, &2))); assert_eq!(map.last_key_value(), Some((&1, &2))); assert_eq!(map.keys().collect::>(), vec![&1]); assert_eq!(map.values().collect::>(), vec![&2]); assert_eq!(map.insert(2, 4), None); assert_eq!(map.height(), Some(0)); map.check(); // 2 key-value pairs: assert_eq!(map.len(), 2); assert_eq!(map.get(&2), Some(&4)); assert_eq!(map.get_mut(&2), Some(&mut 4)); assert_eq!(map.first_key_value(), Some((&1, &2))); assert_eq!(map.last_key_value(), Some((&2, &4))); assert_eq!(map.keys().collect::>(), vec![&1, &2]); assert_eq!(map.values().collect::>(), vec![&2, &4]); assert_eq!(map.remove(&1), Some(2)); assert_eq!(map.height(), Some(0)); map.check(); // 1 key-value pair: assert_eq!(map.len(), 1); assert_eq!(map.get(&1), None); assert_eq!(map.get_mut(&1), None); assert_eq!(map.get(&2), Some(&4)); assert_eq!(map.get_mut(&2), Some(&mut 4)); assert_eq!(map.first_key_value(), Some((&2, &4))); assert_eq!(map.last_key_value(), Some((&2, &4))); assert_eq!(map.keys().collect::>(), vec![&2]); assert_eq!(map.values().collect::>(), vec![&4]); assert_eq!(map.remove(&2), Some(4)); assert_eq!(map.height(), Some(0)); map.check(); // Empty but root is owned (Some(...)): assert_eq!(map.len(), 0); assert_eq!(map.get(&1), None); assert_eq!(map.get_mut(&1), None); assert_eq!(map.first_key_value(), None); assert_eq!(map.last_key_value(), None); assert_eq!(map.keys().count(), 0); assert_eq!(map.values().count(), 0); assert_eq!(map.range(..).next(), None); assert_eq!(map.range(..1).next(), None); assert_eq!(map.range(1..).next(), None); assert_eq!(map.range(1..=1).next(), None); assert_eq!(map.range(1..2).next(), None); assert_eq!(map.remove(&1), None); assert_eq!(map.height(), Some(0)); map.check(); } #[test] fn test_iter() { // Miri is too slow let size = if cfg!(miri) { 200 } else { 10000 }; let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect(); fn test(size: usize, mut iter: T) where T: Iterator, { for i in 0..size { assert_eq!(iter.size_hint(), (size - i, Some(size - i))); assert_eq!(iter.next().unwrap(), (i, i)); } assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); } test(size, map.iter().map(|(&k, &v)| (k, v))); test(size, map.iter_mut().map(|(&k, &mut v)| (k, v))); test(size, map.into_iter()); } #[test] fn test_iter_rev() { // Miri is too slow let size = if cfg!(miri) { 200 } else { 10000 }; let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect(); fn test(size: usize, mut iter: T) where T: Iterator, { for i in 0..size { assert_eq!(iter.size_hint(), (size - i, Some(size - i))); assert_eq!(iter.next().unwrap(), (size - i - 1, size - i - 1)); } assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); } test(size, map.iter().rev().map(|(&k, &v)| (k, v))); test(size, map.iter_mut().rev().map(|(&k, &mut v)| (k, v))); test(size, map.into_iter().rev()); } // Specifically tests iter_mut's ability to mutate the value of pairs in-line. fn do_test_iter_mut_mutation(size: usize) where T: Copy + Debug + Ord + TryFrom, >::Error: Debug, { let zero = T::try_from(0).unwrap(); let mut map: BTreeMap = (0..size).map(|i| (T::try_from(i).unwrap(), zero)).collect(); // Forward and backward iteration sees enough pairs (also tested elsewhere) assert_eq!(map.iter_mut().count(), size); assert_eq!(map.iter_mut().rev().count(), size); // Iterate forwards, trying to mutate to unique values for (i, (k, v)) in map.iter_mut().enumerate() { assert_eq!(*k, T::try_from(i).unwrap()); assert_eq!(*v, zero); *v = T::try_from(i + 1).unwrap(); } // Iterate backwards, checking that mutations succeeded and trying to mutate again for (i, (k, v)) in map.iter_mut().rev().enumerate() { assert_eq!(*k, T::try_from(size - i - 1).unwrap()); assert_eq!(*v, T::try_from(size - i).unwrap()); *v = T::try_from(2 * size - i).unwrap(); } // Check that backward mutations succeeded for (i, (k, v)) in map.iter_mut().enumerate() { assert_eq!(*k, T::try_from(i).unwrap()); assert_eq!(*v, T::try_from(size + i + 1).unwrap()); } map.check(); } #[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)] #[repr(align(32))] struct Align32(usize); impl TryFrom for Align32 { type Error = (); fn try_from(s: usize) -> Result { Ok(Align32(s)) } } #[test] fn test_iter_mut_mutation() { // Check many alignments and trees with roots at various heights. do_test_iter_mut_mutation::(0); do_test_iter_mut_mutation::(1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); do_test_iter_mut_mutation::(1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); do_test_iter_mut_mutation::(1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); do_test_iter_mut_mutation::(1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); do_test_iter_mut_mutation::(1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); do_test_iter_mut_mutation::(1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); } #[test] fn test_values_mut() { let mut a: BTreeMap<_, _> = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)).collect(); test_all_refs(&mut 13, a.values_mut()); a.check(); } #[test] fn test_values_mut_mutation() { let mut a = BTreeMap::new(); a.insert(1, String::from("hello")); a.insert(2, String::from("goodbye")); for value in a.values_mut() { value.push_str("!"); } let values: Vec = a.values().cloned().collect(); assert_eq!(values, [String::from("hello!"), String::from("goodbye!")]); a.check(); } #[test] fn test_iter_entering_root_twice() { let mut map: BTreeMap<_, _> = (0..2).map(|i| (i, i)).collect(); let mut it = map.iter_mut(); let front = it.next().unwrap(); let back = it.next_back().unwrap(); assert_eq!(front, (&0, &mut 0)); assert_eq!(back, (&1, &mut 1)); *front.1 = 24; *back.1 = 42; assert_eq!(front, (&0, &mut 24)); assert_eq!(back, (&1, &mut 42)); assert_eq!(it.next(), None); assert_eq!(it.next_back(), None); map.check(); } #[test] fn test_iter_descending_to_same_node_twice() { let mut map: BTreeMap<_, _> = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i)).collect(); let mut it = map.iter_mut(); // Descend into first child. let front = it.next().unwrap(); // Descend into first child again, after running through second child. while it.next_back().is_some() {} // Check immutable access. assert_eq!(front, (&0, &mut 0)); // Perform mutable access. *front.1 = 42; map.check(); } #[test] fn test_iter_mixed() { // Miri is too slow let size = if cfg!(miri) { 200 } else { 10000 }; let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect(); fn test(size: usize, mut iter: T) where T: Iterator + DoubleEndedIterator, { for i in 0..size / 4 { assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2))); assert_eq!(iter.next().unwrap(), (i, i)); assert_eq!(iter.next_back().unwrap(), (size - i - 1, size - i - 1)); } for i in size / 4..size * 3 / 4 { assert_eq!(iter.size_hint(), (size * 3 / 4 - i, Some(size * 3 / 4 - i))); assert_eq!(iter.next().unwrap(), (i, i)); } assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); } test(size, map.iter().map(|(&k, &v)| (k, v))); test(size, map.iter_mut().map(|(&k, &mut v)| (k, v))); test(size, map.into_iter()); } #[test] fn test_iter_min_max() { let mut a = BTreeMap::new(); assert_eq!(a.iter().min(), None); assert_eq!(a.iter().max(), None); assert_eq!(a.iter_mut().min(), None); assert_eq!(a.iter_mut().max(), None); assert_eq!(a.range(..).min(), None); assert_eq!(a.range(..).max(), None); assert_eq!(a.range_mut(..).min(), None); assert_eq!(a.range_mut(..).max(), None); assert_eq!(a.keys().min(), None); assert_eq!(a.keys().max(), None); assert_eq!(a.values().min(), None); assert_eq!(a.values().max(), None); assert_eq!(a.values_mut().min(), None); assert_eq!(a.values_mut().max(), None); a.insert(1, 42); a.insert(2, 24); assert_eq!(a.iter().min(), Some((&1, &42))); assert_eq!(a.iter().max(), Some((&2, &24))); assert_eq!(a.iter_mut().min(), Some((&1, &mut 42))); assert_eq!(a.iter_mut().max(), Some((&2, &mut 24))); assert_eq!(a.range(..).min(), Some((&1, &42))); assert_eq!(a.range(..).max(), Some((&2, &24))); assert_eq!(a.range_mut(..).min(), Some((&1, &mut 42))); assert_eq!(a.range_mut(..).max(), Some((&2, &mut 24))); assert_eq!(a.keys().min(), Some(&1)); assert_eq!(a.keys().max(), Some(&2)); assert_eq!(a.values().min(), Some(&24)); assert_eq!(a.values().max(), Some(&42)); assert_eq!(a.values_mut().min(), Some(&mut 24)); assert_eq!(a.values_mut().max(), Some(&mut 42)); a.check(); } fn range_keys(map: &BTreeMap, range: impl RangeBounds) -> Vec { map.range(range) .map(|(&k, &v)| { assert_eq!(k, v); k }) .collect() } #[test] fn test_range_small() { let size = 4; let map: BTreeMap<_, _> = (1..=size).map(|i| (i, i)).collect(); let all: Vec<_> = (1..=size).collect(); let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]); assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all); assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all); assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all); assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all); assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all); assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all); assert_eq!(range_keys(&map, (Included(0), Included(size))), all); assert_eq!(range_keys(&map, (Included(0), Unbounded)), all); assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all); assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all); assert_eq!(range_keys(&map, (Included(1), Included(size))), all); assert_eq!(range_keys(&map, (Included(1), Unbounded)), all); assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all); assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all); assert_eq!(range_keys(&map, (Unbounded, Included(size))), all); assert_eq!(range_keys(&map, ..), all); assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]); assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]); assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]); assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]); assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]); assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]); assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first); assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first); assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first); assert_eq!(range_keys(&map, (Included(0), Included(1))), first); assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first); assert_eq!(range_keys(&map, (Included(1), Included(1))), first); assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first); assert_eq!(range_keys(&map, (Unbounded, Included(1))), first); assert_eq!(range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), last); assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size + 1))), last); assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last); assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last); assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last); assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last); assert_eq!(range_keys(&map, (Included(size), Included(size))), last); assert_eq!(range_keys(&map, (Included(size), Unbounded)), last); assert_eq!(range_keys(&map, (Excluded(size), Excluded(size + 1))), vec![]); assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]); assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]); assert_eq!(range_keys(&map, (Included(size + 1), Excluded(size + 1))), vec![]); assert_eq!(range_keys(&map, (Included(size + 1), Included(size + 1))), vec![]); assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]); assert_eq!(range_keys(&map, ..3), vec![1, 2]); assert_eq!(range_keys(&map, 3..), vec![3, 4]); assert_eq!(range_keys(&map, 2..=3), vec![2, 3]); } #[test] fn test_range_height_1() { // Tests tree with a root and 2 leaves. The single key in the root node is // close to the middle among the keys. let map: BTreeMap<_, _> = (0..MIN_INSERTS_HEIGHT_1 as i32).map(|i| (i, i)).collect(); let middle = MIN_INSERTS_HEIGHT_1 as i32 / 2; for root in middle - 2..=middle + 2 { assert_eq!(range_keys(&map, (Excluded(root), Excluded(root + 1))), vec![]); assert_eq!(range_keys(&map, (Excluded(root), Included(root + 1))), vec![root + 1]); assert_eq!(range_keys(&map, (Included(root), Excluded(root + 1))), vec![root]); assert_eq!(range_keys(&map, (Included(root), Included(root + 1))), vec![root, root + 1]); assert_eq!(range_keys(&map, (Excluded(root - 1), Excluded(root))), vec![]); assert_eq!(range_keys(&map, (Included(root - 1), Excluded(root))), vec![root - 1]); assert_eq!(range_keys(&map, (Excluded(root - 1), Included(root))), vec![root]); assert_eq!(range_keys(&map, (Included(root - 1), Included(root))), vec![root - 1, root]); } } #[test] fn test_range_large() { let size = 200; let map: BTreeMap<_, _> = (1..=size).map(|i| (i, i)).collect(); let all: Vec<_> = (1..=size).collect(); let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]); assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all); assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all); assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all); assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all); assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all); assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all); assert_eq!(range_keys(&map, (Included(0), Included(size))), all); assert_eq!(range_keys(&map, (Included(0), Unbounded)), all); assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all); assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all); assert_eq!(range_keys(&map, (Included(1), Included(size))), all); assert_eq!(range_keys(&map, (Included(1), Unbounded)), all); assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all); assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all); assert_eq!(range_keys(&map, (Unbounded, Included(size))), all); assert_eq!(range_keys(&map, ..), all); assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]); assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]); assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]); assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]); assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]); assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]); assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first); assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first); assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first); assert_eq!(range_keys(&map, (Included(0), Included(1))), first); assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first); assert_eq!(range_keys(&map, (Included(1), Included(1))), first); assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first); assert_eq!(range_keys(&map, (Unbounded, Included(1))), first); assert_eq!(range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), last); assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size + 1))), last); assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last); assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last); assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last); assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last); assert_eq!(range_keys(&map, (Included(size), Included(size))), last); assert_eq!(range_keys(&map, (Included(size), Unbounded)), last); assert_eq!(range_keys(&map, (Excluded(size), Excluded(size + 1))), vec![]); assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]); assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]); assert_eq!(range_keys(&map, (Included(size + 1), Excluded(size + 1))), vec![]); assert_eq!(range_keys(&map, (Included(size + 1), Included(size + 1))), vec![]); assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]); fn check<'a, L, R>(lhs: L, rhs: R) where L: IntoIterator, R: IntoIterator, { let lhs: Vec<_> = lhs.into_iter().collect(); let rhs: Vec<_> = rhs.into_iter().collect(); assert_eq!(lhs, rhs); } check(map.range(..=100), map.range(..101)); check(map.range(5..=8), vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)]); check(map.range(-1..=2), vec![(&1, &1), (&2, &2)]); } #[test] fn test_range_inclusive_max_value() { let max = usize::MAX; let map: BTreeMap<_, _> = vec![(max, 0)].into_iter().collect(); assert_eq!(map.range(max..=max).collect::>(), &[(&max, &0)]); } #[test] fn test_range_equal_empty_cases() { let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect(); assert_eq!(map.range((Included(2), Excluded(2))).next(), None); assert_eq!(map.range((Excluded(2), Included(2))).next(), None); } #[test] #[should_panic] fn test_range_equal_excluded() { let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect(); map.range((Excluded(2), Excluded(2))); } #[test] #[should_panic] fn test_range_backwards_1() { let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect(); map.range((Included(3), Included(2))); } #[test] #[should_panic] fn test_range_backwards_2() { let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect(); map.range((Included(3), Excluded(2))); } #[test] #[should_panic] fn test_range_backwards_3() { let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect(); map.range((Excluded(3), Included(2))); } #[test] #[should_panic] fn test_range_backwards_4() { let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect(); map.range((Excluded(3), Excluded(2))); } #[test] fn test_range_finding_ill_order_in_map() { let mut map = BTreeMap::new(); map.insert(Cyclic3::B, ()); // Lacking static_assert, call `range` conditionally, to emphasise that // we cause a different panic than `test_range_backwards_1` does. // A more refined `should_panic` would be welcome. if Cyclic3::C < Cyclic3::A { map.range(Cyclic3::C..=Cyclic3::A); } } #[test] fn test_range_finding_ill_order_in_range_ord() { // Has proper order the first time asked, then flips around. struct EvilTwin(i32); impl PartialOrd for EvilTwin { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } static COMPARES: AtomicUsize = AtomicUsize::new(0); impl Ord for EvilTwin { fn cmp(&self, other: &Self) -> Ordering { let ord = self.0.cmp(&other.0); if COMPARES.fetch_add(1, SeqCst) > 0 { ord.reverse() } else { ord } } } impl PartialEq for EvilTwin { fn eq(&self, other: &Self) -> bool { self.0.eq(&other.0) } } impl Eq for EvilTwin {} #[derive(PartialEq, Eq, PartialOrd, Ord)] struct CompositeKey(i32, EvilTwin); impl Borrow for CompositeKey { fn borrow(&self) -> &EvilTwin { &self.1 } } let map = (0..12).map(|i| (CompositeKey(i, EvilTwin(i)), ())).collect::>(); map.range(EvilTwin(5)..=EvilTwin(7)); } #[test] fn test_range_1000() { // Miri is too slow let size = if cfg!(miri) { MIN_INSERTS_HEIGHT_2 as u32 } else { 1000 }; let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect(); fn test(map: &BTreeMap, size: u32, min: Bound<&u32>, max: Bound<&u32>) { let mut kvs = map.range((min, max)).map(|(&k, &v)| (k, v)); let mut pairs = (0..size).map(|i| (i, i)); for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) { assert_eq!(kv, pair); } assert_eq!(kvs.next(), None); assert_eq!(pairs.next(), None); } test(&map, size, Included(&0), Excluded(&size)); test(&map, size, Unbounded, Excluded(&size)); test(&map, size, Included(&0), Included(&(size - 1))); test(&map, size, Unbounded, Included(&(size - 1))); test(&map, size, Included(&0), Unbounded); test(&map, size, Unbounded, Unbounded); } #[test] fn test_range_borrowed_key() { let mut map = BTreeMap::new(); map.insert("aardvark".to_string(), 1); map.insert("baboon".to_string(), 2); map.insert("coyote".to_string(), 3); map.insert("dingo".to_string(), 4); // NOTE: would like to use simply "b".."d" here... let mut iter = map.range::((Included("b"), Excluded("d"))); assert_eq!(iter.next(), Some((&"baboon".to_string(), &2))); assert_eq!(iter.next(), Some((&"coyote".to_string(), &3))); assert_eq!(iter.next(), None); } #[test] fn test_range() { let size = 200; // Miri is too slow let step = if cfg!(miri) { 66 } else { 1 }; let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect(); for i in (0..size).step_by(step) { for j in (i..size).step_by(step) { let mut kvs = map.range((Included(&i), Included(&j))).map(|(&k, &v)| (k, v)); let mut pairs = (i..=j).map(|i| (i, i)); for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) { assert_eq!(kv, pair); } assert_eq!(kvs.next(), None); assert_eq!(pairs.next(), None); } } } #[test] fn test_range_mut() { let size = 200; // Miri is too slow let step = if cfg!(miri) { 66 } else { 1 }; let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect(); for i in (0..size).step_by(step) { for j in (i..size).step_by(step) { let mut kvs = map.range_mut((Included(&i), Included(&j))).map(|(&k, &mut v)| (k, v)); let mut pairs = (i..=j).map(|i| (i, i)); for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) { assert_eq!(kv, pair); } assert_eq!(kvs.next(), None); assert_eq!(pairs.next(), None); } } map.check(); } #[test] fn test_retain() { let mut map: BTreeMap = (0..100).map(|x| (x, x * 10)).collect(); map.retain(|&k, _| k % 2 == 0); assert_eq!(map.len(), 50); assert_eq!(map[&2], 20); assert_eq!(map[&4], 40); assert_eq!(map[&6], 60); } mod test_drain_filter { use super::*; #[test] fn empty() { let mut map: BTreeMap = BTreeMap::new(); map.drain_filter(|_, _| unreachable!("there's nothing to decide on")); assert!(map.is_empty()); map.check(); } // Explicitly consumes the iterator, where most test cases drop it instantly. #[test] fn consumed_keeping_all() { let pairs = (0..3).map(|i| (i, i)); let mut map: BTreeMap<_, _> = pairs.collect(); assert!(map.drain_filter(|_, _| false).eq(iter::empty())); map.check(); } // Explicitly consumes the iterator, where most test cases drop it instantly. #[test] fn consumed_removing_all() { let pairs = (0..3).map(|i| (i, i)); let mut map: BTreeMap<_, _> = pairs.clone().collect(); assert!(map.drain_filter(|_, _| true).eq(pairs)); assert!(map.is_empty()); map.check(); } // Explicitly consumes the iterator and modifies values through it. #[test] fn mutating_and_keeping() { let pairs = (0..3).map(|i| (i, i)); let mut map: BTreeMap<_, _> = pairs.collect(); assert!( map.drain_filter(|_, v| { *v += 6; false }) .eq(iter::empty()) ); assert!(map.keys().copied().eq(0..3)); assert!(map.values().copied().eq(6..9)); map.check(); } // Explicitly consumes the iterator and modifies values through it. #[test] fn mutating_and_removing() { let pairs = (0..3).map(|i| (i, i)); let mut map: BTreeMap<_, _> = pairs.collect(); assert!( map.drain_filter(|_, v| { *v += 6; true }) .eq((0..3).map(|i| (i, i + 6))) ); assert!(map.is_empty()); map.check(); } #[test] fn underfull_keeping_all() { let pairs = (0..3).map(|i| (i, i)); let mut map: BTreeMap<_, _> = pairs.collect(); map.drain_filter(|_, _| false); assert!(map.keys().copied().eq(0..3)); map.check(); } #[test] fn underfull_removing_one() { let pairs = (0..3).map(|i| (i, i)); for doomed in 0..3 { let mut map: BTreeMap<_, _> = pairs.clone().collect(); map.drain_filter(|i, _| *i == doomed); assert_eq!(map.len(), 2); map.check(); } } #[test] fn underfull_keeping_one() { let pairs = (0..3).map(|i| (i, i)); for sacred in 0..3 { let mut map: BTreeMap<_, _> = pairs.clone().collect(); map.drain_filter(|i, _| *i != sacred); assert!(map.keys().copied().eq(sacred..=sacred)); map.check(); } } #[test] fn underfull_removing_all() { let pairs = (0..3).map(|i| (i, i)); let mut map: BTreeMap<_, _> = pairs.collect(); map.drain_filter(|_, _| true); assert!(map.is_empty()); map.check(); } #[test] fn height_0_keeping_all() { let pairs = (0..NODE_CAPACITY).map(|i| (i, i)); let mut map: BTreeMap<_, _> = pairs.collect(); map.drain_filter(|_, _| false); assert!(map.keys().copied().eq(0..NODE_CAPACITY)); map.check(); } #[test] fn height_0_removing_one() { let pairs = (0..NODE_CAPACITY).map(|i| (i, i)); for doomed in 0..NODE_CAPACITY { let mut map: BTreeMap<_, _> = pairs.clone().collect(); map.drain_filter(|i, _| *i == doomed); assert_eq!(map.len(), NODE_CAPACITY - 1); map.check(); } } #[test] fn height_0_keeping_one() { let pairs = (0..NODE_CAPACITY).map(|i| (i, i)); for sacred in 0..NODE_CAPACITY { let mut map: BTreeMap<_, _> = pairs.clone().collect(); map.drain_filter(|i, _| *i != sacred); assert!(map.keys().copied().eq(sacred..=sacred)); map.check(); } } #[test] fn height_0_removing_all() { let pairs = (0..NODE_CAPACITY).map(|i| (i, i)); let mut map: BTreeMap<_, _> = pairs.collect(); map.drain_filter(|_, _| true); assert!(map.is_empty()); map.check(); } #[test] fn height_0_keeping_half() { let mut map: BTreeMap<_, _> = (0..16).map(|i| (i, i)).collect(); assert_eq!(map.drain_filter(|i, _| *i % 2 == 0).count(), 8); assert_eq!(map.len(), 8); map.check(); } #[test] fn height_1_removing_all() { let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i)); let mut map: BTreeMap<_, _> = pairs.collect(); map.drain_filter(|_, _| true); assert!(map.is_empty()); map.check(); } #[test] fn height_1_removing_one() { let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i)); for doomed in 0..MIN_INSERTS_HEIGHT_1 { let mut map: BTreeMap<_, _> = pairs.clone().collect(); map.drain_filter(|i, _| *i == doomed); assert_eq!(map.len(), MIN_INSERTS_HEIGHT_1 - 1); map.check(); } } #[test] fn height_1_keeping_one() { let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i)); for sacred in 0..MIN_INSERTS_HEIGHT_1 { let mut map: BTreeMap<_, _> = pairs.clone().collect(); map.drain_filter(|i, _| *i != sacred); assert!(map.keys().copied().eq(sacred..=sacred)); map.check(); } } #[test] fn height_2_removing_one() { let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)); for doomed in (0..MIN_INSERTS_HEIGHT_2).step_by(12) { let mut map: BTreeMap<_, _> = pairs.clone().collect(); map.drain_filter(|i, _| *i == doomed); assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2 - 1); map.check(); } } #[test] fn height_2_keeping_one() { let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)); for sacred in (0..MIN_INSERTS_HEIGHT_2).step_by(12) { let mut map: BTreeMap<_, _> = pairs.clone().collect(); map.drain_filter(|i, _| *i != sacred); assert!(map.keys().copied().eq(sacred..=sacred)); map.check(); } } #[test] fn height_2_removing_all() { let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)); let mut map: BTreeMap<_, _> = pairs.collect(); map.drain_filter(|_, _| true); assert!(map.is_empty()); map.check(); } #[test] fn drop_panic_leak() { let a = CrashTestDummy::new(0); let b = CrashTestDummy::new(1); let c = CrashTestDummy::new(2); let mut map = BTreeMap::new(); map.insert(a.spawn(Panic::Never), ()); map.insert(b.spawn(Panic::InDrop), ()); map.insert(c.spawn(Panic::Never), ()); catch_unwind(move || drop(map.drain_filter(|dummy, _| dummy.query(true)))).unwrap_err(); assert_eq!(a.queried(), 1); assert_eq!(b.queried(), 1); assert_eq!(c.queried(), 0); assert_eq!(a.dropped(), 1); assert_eq!(b.dropped(), 1); assert_eq!(c.dropped(), 1); } #[test] fn pred_panic_leak() { let a = CrashTestDummy::new(0); let b = CrashTestDummy::new(1); let c = CrashTestDummy::new(2); let mut map = BTreeMap::new(); map.insert(a.spawn(Panic::Never), ()); map.insert(b.spawn(Panic::InQuery), ()); map.insert(c.spawn(Panic::InQuery), ()); catch_unwind(AssertUnwindSafe(|| drop(map.drain_filter(|dummy, _| dummy.query(true))))) .unwrap_err(); assert_eq!(a.queried(), 1); assert_eq!(b.queried(), 1); assert_eq!(c.queried(), 0); assert_eq!(a.dropped(), 1); assert_eq!(b.dropped(), 0); assert_eq!(c.dropped(), 0); assert_eq!(map.len(), 2); assert_eq!(map.first_entry().unwrap().key().id(), 1); assert_eq!(map.last_entry().unwrap().key().id(), 2); map.check(); } // Same as above, but attempt to use the iterator again after the panic in the predicate #[test] fn pred_panic_reuse() { let a = CrashTestDummy::new(0); let b = CrashTestDummy::new(1); let c = CrashTestDummy::new(2); let mut map = BTreeMap::new(); map.insert(a.spawn(Panic::Never), ()); map.insert(b.spawn(Panic::InQuery), ()); map.insert(c.spawn(Panic::InQuery), ()); { let mut it = map.drain_filter(|dummy, _| dummy.query(true)); catch_unwind(AssertUnwindSafe(|| while it.next().is_some() {})).unwrap_err(); // Iterator behaviour after a panic is explicitly unspecified, // so this is just the current implementation: let result = catch_unwind(AssertUnwindSafe(|| it.next())); assert!(matches!(result, Ok(None))); } assert_eq!(a.queried(), 1); assert_eq!(b.queried(), 1); assert_eq!(c.queried(), 0); assert_eq!(a.dropped(), 1); assert_eq!(b.dropped(), 0); assert_eq!(c.dropped(), 0); assert_eq!(map.len(), 2); assert_eq!(map.first_entry().unwrap().key().id(), 1); assert_eq!(map.last_entry().unwrap().key().id(), 2); map.check(); } } #[test] fn test_borrow() { // make sure these compile -- using the Borrow trait { let mut map = BTreeMap::new(); map.insert("0".to_string(), 1); assert_eq!(map["0"], 1); } { let mut map = BTreeMap::new(); map.insert(Box::new(0), 1); assert_eq!(map[&0], 1); } { let mut map = BTreeMap::new(); map.insert(Box::new([0, 1]) as Box<[i32]>, 1); assert_eq!(map[&[0, 1][..]], 1); } { let mut map = BTreeMap::new(); map.insert(Rc::new(0), 1); assert_eq!(map[&0], 1); } #[allow(dead_code)] fn get(v: &BTreeMap, ()>, t: &T) { v.get(t); } #[allow(dead_code)] fn get_mut(v: &mut BTreeMap, ()>, t: &T) { v.get_mut(t); } #[allow(dead_code)] fn get_key_value(v: &BTreeMap, ()>, t: &T) { v.get_key_value(t); } #[allow(dead_code)] fn contains_key(v: &BTreeMap, ()>, t: &T) { v.contains_key(t); } #[allow(dead_code)] fn range(v: &BTreeMap, ()>, t: T) { v.range(t..); } #[allow(dead_code)] fn range_mut(v: &mut BTreeMap, ()>, t: T) { v.range_mut(t..); } #[allow(dead_code)] fn remove(v: &mut BTreeMap, ()>, t: &T) { v.remove(t); } #[allow(dead_code)] fn remove_entry(v: &mut BTreeMap, ()>, t: &T) { v.remove_entry(t); } #[allow(dead_code)] fn split_off(v: &mut BTreeMap, ()>, t: &T) { v.split_off(t); } } #[test] fn test_entry() { let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)]; let mut map: BTreeMap<_, _> = xs.iter().cloned().collect(); // Existing key (insert) match map.entry(1) { Vacant(_) => unreachable!(), Occupied(mut view) => { assert_eq!(view.get(), &10); assert_eq!(view.insert(100), 10); } } assert_eq!(map.get(&1).unwrap(), &100); assert_eq!(map.len(), 6); // Existing key (update) match map.entry(2) { Vacant(_) => unreachable!(), Occupied(mut view) => { let v = view.get_mut(); *v *= 10; } } assert_eq!(map.get(&2).unwrap(), &200); assert_eq!(map.len(), 6); map.check(); // Existing key (take) match map.entry(3) { Vacant(_) => unreachable!(), Occupied(view) => { assert_eq!(view.remove(), 30); } } assert_eq!(map.get(&3), None); assert_eq!(map.len(), 5); map.check(); // Inexistent key (insert) match map.entry(10) { Occupied(_) => unreachable!(), Vacant(view) => { assert_eq!(*view.insert(1000), 1000); } } assert_eq!(map.get(&10).unwrap(), &1000); assert_eq!(map.len(), 6); map.check(); } #[test] fn test_extend_ref() { let mut a = BTreeMap::new(); a.insert(1, "one"); let mut b = BTreeMap::new(); b.insert(2, "two"); b.insert(3, "three"); a.extend(&b); assert_eq!(a.len(), 3); assert_eq!(a[&1], "one"); assert_eq!(a[&2], "two"); assert_eq!(a[&3], "three"); a.check(); } #[test] fn test_zst() { let mut m = BTreeMap::new(); assert_eq!(m.len(), 0); assert_eq!(m.insert((), ()), None); assert_eq!(m.len(), 1); assert_eq!(m.insert((), ()), Some(())); assert_eq!(m.len(), 1); assert_eq!(m.iter().count(), 1); m.clear(); assert_eq!(m.len(), 0); for _ in 0..100 { m.insert((), ()); } assert_eq!(m.len(), 1); assert_eq!(m.iter().count(), 1); m.check(); } // This test's only purpose is to ensure that zero-sized keys with nonsensical orderings // do not cause segfaults when used with zero-sized values. All other map behavior is // undefined. #[test] fn test_bad_zst() { #[derive(Clone, Copy, Debug)] struct Bad; impl PartialEq for Bad { fn eq(&self, _: &Self) -> bool { false } } impl Eq for Bad {} impl PartialOrd for Bad { fn partial_cmp(&self, _: &Self) -> Option { Some(Ordering::Less) } } impl Ord for Bad { fn cmp(&self, _: &Self) -> Ordering { Ordering::Less } } let mut m = BTreeMap::new(); for _ in 0..100 { m.insert(Bad, Bad); } m.check(); } #[test] fn test_clear() { let mut map = BTreeMap::new(); for &len in &[MIN_INSERTS_HEIGHT_1, MIN_INSERTS_HEIGHT_2, 0, NODE_CAPACITY] { for i in 0..len { map.insert(i, ()); } assert_eq!(map.len(), len); map.clear(); map.check(); assert!(map.is_empty()); } } #[test] fn test_clear_drop_panic_leak() { let a = CrashTestDummy::new(0); let b = CrashTestDummy::new(1); let c = CrashTestDummy::new(2); let mut map = BTreeMap::new(); map.insert(a.spawn(Panic::Never), ()); map.insert(b.spawn(Panic::InDrop), ()); map.insert(c.spawn(Panic::Never), ()); catch_unwind(AssertUnwindSafe(|| map.clear())).unwrap_err(); assert_eq!(a.dropped(), 1); assert_eq!(b.dropped(), 1); assert_eq!(c.dropped(), 1); assert_eq!(map.len(), 0); drop(map); assert_eq!(a.dropped(), 1); assert_eq!(b.dropped(), 1); assert_eq!(c.dropped(), 1); } #[test] fn test_clone() { let mut map = BTreeMap::new(); let size = MIN_INSERTS_HEIGHT_1; assert_eq!(map.len(), 0); for i in 0..size { assert_eq!(map.insert(i, 10 * i), None); assert_eq!(map.len(), i + 1); map.check(); assert_eq!(map, map.clone()); } for i in 0..size { assert_eq!(map.insert(i, 100 * i), Some(10 * i)); assert_eq!(map.len(), size); map.check(); assert_eq!(map, map.clone()); } for i in 0..size / 2 { assert_eq!(map.remove(&(i * 2)), Some(i * 200)); assert_eq!(map.len(), size - i - 1); map.check(); assert_eq!(map, map.clone()); } for i in 0..size / 2 { assert_eq!(map.remove(&(2 * i)), None); assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100)); assert_eq!(map.len(), size / 2 - i - 1); map.check(); assert_eq!(map, map.clone()); } // Test a tree with 2 semi-full levels and a tree with 3 levels. map = (1..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)).collect(); assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2 - 1); assert_eq!(map, map.clone()); map.insert(0, 0); assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2); assert_eq!(map, map.clone()); map.check(); } #[test] fn test_clone_panic_leak() { let a = CrashTestDummy::new(0); let b = CrashTestDummy::new(1); let c = CrashTestDummy::new(2); let mut map = BTreeMap::new(); map.insert(a.spawn(Panic::Never), ()); map.insert(b.spawn(Panic::InClone), ()); map.insert(c.spawn(Panic::Never), ()); catch_unwind(|| map.clone()).unwrap_err(); assert_eq!(a.cloned(), 1); assert_eq!(b.cloned(), 1); assert_eq!(c.cloned(), 0); assert_eq!(a.dropped(), 1); assert_eq!(b.dropped(), 0); assert_eq!(c.dropped(), 0); assert_eq!(map.len(), 3); drop(map); assert_eq!(a.cloned(), 1); assert_eq!(b.cloned(), 1); assert_eq!(c.cloned(), 0); assert_eq!(a.dropped(), 2); assert_eq!(b.dropped(), 1); assert_eq!(c.dropped(), 1); } #[test] fn test_clone_from() { let mut map1 = BTreeMap::new(); let max_size = MIN_INSERTS_HEIGHT_1; // Range to max_size inclusive, because i is the size of map1 being tested. for i in 0..=max_size { let mut map2 = BTreeMap::new(); for j in 0..i { let mut map1_copy = map2.clone(); map1_copy.clone_from(&map1); // small cloned from large assert_eq!(map1_copy, map1); let mut map2_copy = map1.clone(); map2_copy.clone_from(&map2); // large cloned from small assert_eq!(map2_copy, map2); map2.insert(100 * j + 1, 2 * j + 1); } map2.clone_from(&map1); // same length map2.check(); assert_eq!(map2, map1); map1.insert(i, 10 * i); map1.check(); } } #[allow(dead_code)] fn test_variance() { fn map_key<'new>(v: BTreeMap<&'static str, ()>) -> BTreeMap<&'new str, ()> { v } fn map_val<'new>(v: BTreeMap<(), &'static str>) -> BTreeMap<(), &'new str> { v } fn iter_key<'a, 'new>(v: Iter<'a, &'static str, ()>) -> Iter<'a, &'new str, ()> { v } fn iter_val<'a, 'new>(v: Iter<'a, (), &'static str>) -> Iter<'a, (), &'new str> { v } fn into_iter_key<'new>(v: IntoIter<&'static str, ()>) -> IntoIter<&'new str, ()> { v } fn into_iter_val<'new>(v: IntoIter<(), &'static str>) -> IntoIter<(), &'new str> { v } fn into_keys_key<'new>(v: IntoKeys<&'static str, ()>) -> IntoKeys<&'new str, ()> { v } fn into_keys_val<'new>(v: IntoKeys<(), &'static str>) -> IntoKeys<(), &'new str> { v } fn into_values_key<'new>(v: IntoValues<&'static str, ()>) -> IntoValues<&'new str, ()> { v } fn into_values_val<'new>(v: IntoValues<(), &'static str>) -> IntoValues<(), &'new str> { v } fn range_key<'a, 'new>(v: Range<'a, &'static str, ()>) -> Range<'a, &'new str, ()> { v } fn range_val<'a, 'new>(v: Range<'a, (), &'static str>) -> Range<'a, (), &'new str> { v } fn keys_key<'a, 'new>(v: Keys<'a, &'static str, ()>) -> Keys<'a, &'new str, ()> { v } fn keys_val<'a, 'new>(v: Keys<'a, (), &'static str>) -> Keys<'a, (), &'new str> { v } fn values_key<'a, 'new>(v: Values<'a, &'static str, ()>) -> Values<'a, &'new str, ()> { v } fn values_val<'a, 'new>(v: Values<'a, (), &'static str>) -> Values<'a, (), &'new str> { v } } #[allow(dead_code)] fn test_sync() { fn map(v: &BTreeMap) -> impl Sync + '_ { v } fn into_iter(v: BTreeMap) -> impl Sync { v.into_iter() } fn into_keys(v: BTreeMap) -> impl Sync { v.into_keys() } fn into_values(v: BTreeMap) -> impl Sync { v.into_values() } fn drain_filter(v: &mut BTreeMap) -> impl Sync + '_ { v.drain_filter(|_, _| false) } fn iter(v: &BTreeMap) -> impl Sync + '_ { v.iter() } fn iter_mut(v: &mut BTreeMap) -> impl Sync + '_ { v.iter_mut() } fn keys(v: &BTreeMap) -> impl Sync + '_ { v.keys() } fn values(v: &BTreeMap) -> impl Sync + '_ { v.values() } fn values_mut(v: &mut BTreeMap) -> impl Sync + '_ { v.values_mut() } fn range(v: &BTreeMap) -> impl Sync + '_ { v.range(..) } fn range_mut(v: &mut BTreeMap) -> impl Sync + '_ { v.range_mut(..) } fn entry(v: &mut BTreeMap) -> impl Sync + '_ { v.entry(Default::default()) } fn occupied_entry(v: &mut BTreeMap) -> impl Sync + '_ { match v.entry(Default::default()) { Occupied(entry) => entry, _ => unreachable!(), } } fn vacant_entry(v: &mut BTreeMap) -> impl Sync + '_ { match v.entry(Default::default()) { Vacant(entry) => entry, _ => unreachable!(), } } } #[allow(dead_code)] fn test_send() { fn map(v: BTreeMap) -> impl Send { v } fn into_iter(v: BTreeMap) -> impl Send { v.into_iter() } fn into_keys(v: BTreeMap) -> impl Send { v.into_keys() } fn into_values(v: BTreeMap) -> impl Send { v.into_values() } fn drain_filter(v: &mut BTreeMap) -> impl Send + '_ { v.drain_filter(|_, _| false) } fn iter(v: &BTreeMap) -> impl Send + '_ { v.iter() } fn iter_mut(v: &mut BTreeMap) -> impl Send + '_ { v.iter_mut() } fn keys(v: &BTreeMap) -> impl Send + '_ { v.keys() } fn values(v: &BTreeMap) -> impl Send + '_ { v.values() } fn values_mut(v: &mut BTreeMap) -> impl Send + '_ { v.values_mut() } fn range(v: &BTreeMap) -> impl Send + '_ { v.range(..) } fn range_mut(v: &mut BTreeMap) -> impl Send + '_ { v.range_mut(..) } fn entry(v: &mut BTreeMap) -> impl Send + '_ { v.entry(Default::default()) } fn occupied_entry(v: &mut BTreeMap) -> impl Send + '_ { match v.entry(Default::default()) { Occupied(entry) => entry, _ => unreachable!(), } } fn vacant_entry(v: &mut BTreeMap) -> impl Send + '_ { match v.entry(Default::default()) { Vacant(entry) => entry, _ => unreachable!(), } } } #[allow(dead_code)] fn test_ord_absence() { fn map(mut map: BTreeMap) { map.is_empty(); map.len(); map.clear(); map.iter(); map.iter_mut(); map.keys(); map.values(); map.values_mut(); if true { map.into_values(); } else if true { map.into_iter(); } else { map.into_keys(); } } fn map_debug(mut map: BTreeMap) { format!("{:?}", map); format!("{:?}", map.iter()); format!("{:?}", map.iter_mut()); format!("{:?}", map.keys()); format!("{:?}", map.values()); format!("{:?}", map.values_mut()); if true { format!("{:?}", map.into_iter()); } else if true { format!("{:?}", map.into_keys()); } else { format!("{:?}", map.into_values()); } } fn map_clone(mut map: BTreeMap) { map.clone_from(&map.clone()); } } #[allow(dead_code)] fn test_const() { const MAP: &'static BTreeMap<(), ()> = &BTreeMap::new(); const LEN: usize = MAP.len(); const IS_EMPTY: bool = MAP.is_empty(); } #[test] fn test_occupied_entry_key() { let mut a = BTreeMap::new(); let key = "hello there"; let value = "value goes here"; assert!(a.is_empty()); a.insert(key, value); assert_eq!(a.len(), 1); assert_eq!(a[key], value); match a.entry(key) { Vacant(_) => panic!(), Occupied(e) => assert_eq!(key, *e.key()), } assert_eq!(a.len(), 1); assert_eq!(a[key], value); a.check(); } #[test] fn test_vacant_entry_key() { let mut a = BTreeMap::new(); let key = "hello there"; let value = "value goes here"; assert!(a.is_empty()); match a.entry(key) { Occupied(_) => panic!(), Vacant(e) => { assert_eq!(key, *e.key()); e.insert(value); } } assert_eq!(a.len(), 1); assert_eq!(a[key], value); a.check(); } #[test] fn test_first_last_entry() { let mut a = BTreeMap::new(); assert!(a.first_entry().is_none()); assert!(a.last_entry().is_none()); a.insert(1, 42); assert_eq!(a.first_entry().unwrap().key(), &1); assert_eq!(a.last_entry().unwrap().key(), &1); a.insert(2, 24); assert_eq!(a.first_entry().unwrap().key(), &1); assert_eq!(a.last_entry().unwrap().key(), &2); a.insert(0, 6); assert_eq!(a.first_entry().unwrap().key(), &0); assert_eq!(a.last_entry().unwrap().key(), &2); let (k1, v1) = a.first_entry().unwrap().remove_entry(); assert_eq!(k1, 0); assert_eq!(v1, 6); let (k2, v2) = a.last_entry().unwrap().remove_entry(); assert_eq!(k2, 2); assert_eq!(v2, 24); assert_eq!(a.first_entry().unwrap().key(), &1); assert_eq!(a.last_entry().unwrap().key(), &1); a.check(); } #[test] fn test_insert_into_full_height_0() { let size = NODE_CAPACITY; for pos in 0..=size { let mut map: BTreeMap<_, _> = (0..size).map(|i| (i * 2 + 1, ())).collect(); assert!(map.insert(pos * 2, ()).is_none()); map.check(); } } #[test] fn test_insert_into_full_height_1() { let size = NODE_CAPACITY + 1 + NODE_CAPACITY; for pos in 0..=size { let mut map: BTreeMap<_, _> = (0..size).map(|i| (i * 2 + 1, ())).collect(); map.compact(); let root_node = map.root.as_ref().unwrap().reborrow(); assert_eq!(root_node.len(), 1); assert_eq!(root_node.first_leaf_edge().into_node().len(), NODE_CAPACITY); assert_eq!(root_node.last_leaf_edge().into_node().len(), NODE_CAPACITY); assert!(map.insert(pos * 2, ()).is_none()); map.check(); } } macro_rules! create_append_test { ($name:ident, $len:expr) => { #[test] fn $name() { let mut a = BTreeMap::new(); for i in 0..8 { a.insert(i, i); } let mut b = BTreeMap::new(); for i in 5..$len { b.insert(i, 2 * i); } a.append(&mut b); assert_eq!(a.len(), $len); assert_eq!(b.len(), 0); for i in 0..$len { if i < 5 { assert_eq!(a[&i], i); } else { assert_eq!(a[&i], 2 * i); } } a.check(); assert_eq!(a.remove(&($len - 1)), Some(2 * ($len - 1))); assert_eq!(a.insert($len - 1, 20), None); a.check(); } }; } // These are mostly for testing the algorithm that "fixes" the right edge after insertion. // Single node. create_append_test!(test_append_9, 9); // Two leafs that don't need fixing. create_append_test!(test_append_17, 17); // Two leafs where the second one ends up underfull and needs stealing at the end. create_append_test!(test_append_14, 14); // Two leafs where the second one ends up empty because the insertion finished at the root. create_append_test!(test_append_12, 12); // Three levels; insertion finished at the root. create_append_test!(test_append_144, 144); // Three levels; insertion finished at leaf while there is an empty node on the second level. create_append_test!(test_append_145, 145); // Tests for several randomly chosen sizes. create_append_test!(test_append_170, 170); create_append_test!(test_append_181, 181); #[cfg(not(miri))] // Miri is too slow create_append_test!(test_append_239, 239); #[cfg(not(miri))] // Miri is too slow create_append_test!(test_append_1700, 1700); #[test] fn test_append_drop_leak() { let a = CrashTestDummy::new(0); let b = CrashTestDummy::new(1); let c = CrashTestDummy::new(2); let mut left = BTreeMap::new(); let mut right = BTreeMap::new(); left.insert(a.spawn(Panic::Never), ()); left.insert(b.spawn(Panic::InDrop), ()); // first duplicate key, dropped during append left.insert(c.spawn(Panic::Never), ()); right.insert(b.spawn(Panic::Never), ()); right.insert(c.spawn(Panic::Never), ()); catch_unwind(move || left.append(&mut right)).unwrap_err(); assert_eq!(a.dropped(), 1); assert_eq!(b.dropped(), 1); // should be 2 were it not for Rust issue #47949 assert_eq!(c.dropped(), 2); } #[test] fn test_append_ord_chaos() { let mut map1 = BTreeMap::new(); map1.insert(Cyclic3::A, ()); map1.insert(Cyclic3::B, ()); let mut map2 = BTreeMap::new(); map2.insert(Cyclic3::A, ()); map2.insert(Cyclic3::B, ()); map2.insert(Cyclic3::C, ()); // lands first, before A map2.insert(Cyclic3::B, ()); // lands first, before C map1.check(); map2.check(); // keys are not unique but still strictly ascending assert_eq!(map1.len(), 2); assert_eq!(map2.len(), 4); map1.append(&mut map2); assert_eq!(map1.len(), 5); assert_eq!(map2.len(), 0); map1.check(); map2.check(); } fn rand_data(len: usize) -> Vec<(u32, u32)> { let mut rng = DeterministicRng::new(); Vec::from_iter((0..len).map(|_| (rng.next(), rng.next()))) } #[test] fn test_split_off_empty_right() { let mut data = rand_data(173); let mut map = BTreeMap::from_iter(data.clone()); let right = map.split_off(&(data.iter().max().unwrap().0 + 1)); map.check(); right.check(); data.sort(); assert!(map.into_iter().eq(data)); assert!(right.into_iter().eq(None)); } #[test] fn test_split_off_empty_left() { let mut data = rand_data(314); let mut map = BTreeMap::from_iter(data.clone()); let right = map.split_off(&data.iter().min().unwrap().0); map.check(); right.check(); data.sort(); assert!(map.into_iter().eq(None)); assert!(right.into_iter().eq(data)); } // In a tree with 3 levels, if all but a part of the first leaf node is split off, // make sure fix_top eliminates both top levels. #[test] fn test_split_off_tiny_left_height_2() { let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)); let mut left: BTreeMap<_, _> = pairs.clone().collect(); let right = left.split_off(&1); left.check(); right.check(); assert_eq!(left.len(), 1); assert_eq!(right.len(), MIN_INSERTS_HEIGHT_2 - 1); assert_eq!(*left.first_key_value().unwrap().0, 0); assert_eq!(*right.first_key_value().unwrap().0, 1); } // In a tree with 3 levels, if only part of the last leaf node is split off, // make sure fix_top eliminates both top levels. #[test] fn test_split_off_tiny_right_height_2() { let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)); let last = MIN_INSERTS_HEIGHT_2 - 1; let mut left: BTreeMap<_, _> = pairs.clone().collect(); assert_eq!(*left.last_key_value().unwrap().0, last); let right = left.split_off(&last); left.check(); right.check(); assert_eq!(left.len(), MIN_INSERTS_HEIGHT_2 - 1); assert_eq!(right.len(), 1); assert_eq!(*left.last_key_value().unwrap().0, last - 1); assert_eq!(*right.last_key_value().unwrap().0, last); } #[test] fn test_split_off_halfway() { let mut rng = DeterministicRng::new(); for &len in &[NODE_CAPACITY, 25, 50, 75, 100] { let mut data = Vec::from_iter((0..len).map(|_| (rng.next(), ()))); // Insertion in non-ascending order creates some variation in node length. let mut map = BTreeMap::from_iter(data.iter().copied()); data.sort(); let small_keys = data.iter().take(len / 2).map(|kv| kv.0); let large_keys = data.iter().skip(len / 2).map(|kv| kv.0); let split_key = large_keys.clone().next().unwrap(); let right = map.split_off(&split_key); map.check(); right.check(); assert!(map.keys().copied().eq(small_keys)); assert!(right.keys().copied().eq(large_keys)); } } #[test] fn test_split_off_large_random_sorted() { // Miri is too slow let mut data = if cfg!(miri) { rand_data(529) } else { rand_data(1529) }; // special case with maximum height. data.sort(); let mut map = BTreeMap::from_iter(data.clone()); let key = data[data.len() / 2].0; let right = map.split_off(&key); map.check(); right.check(); assert!(map.into_iter().eq(data.clone().into_iter().filter(|x| x.0 < key))); assert!(right.into_iter().eq(data.into_iter().filter(|x| x.0 >= key))); } #[test] fn test_into_iter_drop_leak_height_0() { let a = CrashTestDummy::new(0); let b = CrashTestDummy::new(1); let c = CrashTestDummy::new(2); let d = CrashTestDummy::new(3); let e = CrashTestDummy::new(4); let mut map = BTreeMap::new(); map.insert("a", a.spawn(Panic::Never)); map.insert("b", b.spawn(Panic::Never)); map.insert("c", c.spawn(Panic::Never)); map.insert("d", d.spawn(Panic::InDrop)); map.insert("e", e.spawn(Panic::Never)); catch_unwind(move || drop(map.into_iter())).unwrap_err(); assert_eq!(a.dropped(), 1); assert_eq!(b.dropped(), 1); assert_eq!(c.dropped(), 1); assert_eq!(d.dropped(), 1); assert_eq!(e.dropped(), 1); } #[test] fn test_into_iter_drop_leak_height_1() { let size = MIN_INSERTS_HEIGHT_1; for panic_point in vec![0, 1, size - 2, size - 1] { let dummies: Vec<_> = (0..size).map(|i| CrashTestDummy::new(i)).collect(); let map: BTreeMap<_, _> = (0..size) .map(|i| { let panic = if i == panic_point { Panic::InDrop } else { Panic::Never }; (dummies[i].spawn(Panic::Never), dummies[i].spawn(panic)) }) .collect(); catch_unwind(move || drop(map.into_iter())).unwrap_err(); for i in 0..size { assert_eq!(dummies[i].dropped(), 2); } } } #[test] fn test_into_keys() { let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')]; let map: BTreeMap<_, _> = vec.into_iter().collect(); let keys: Vec<_> = map.into_keys().collect(); assert_eq!(keys.len(), 3); assert!(keys.contains(&1)); assert!(keys.contains(&2)); assert!(keys.contains(&3)); } #[test] fn test_into_values() { let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')]; let map: BTreeMap<_, _> = vec.into_iter().collect(); let values: Vec<_> = map.into_values().collect(); assert_eq!(values.len(), 3); assert!(values.contains(&'a')); assert!(values.contains(&'b')); assert!(values.contains(&'c')); } #[test] fn test_insert_remove_intertwined() { let loops = if cfg!(miri) { 100 } else { 1_000_000 }; let mut map = BTreeMap::new(); let mut i = 1; let offset = 165; // somewhat arbitrarily chosen to cover some code paths for _ in 0..loops { i = (i + offset) & 0xFF; map.insert(i, i); map.remove(&(0xFF - i)); } map.check(); } #[test] fn test_insert_remove_intertwined_ord_chaos() { let loops = if cfg!(miri) { 100 } else { 1_000_000 }; let gov = Governor::new(); let mut map = BTreeMap::new(); let mut i = 1; let offset = 165; // more arbitrarily copied from above for _ in 0..loops { i = (i + offset) & 0xFF; map.insert(Governed(i, &gov), ()); map.remove(&Governed(0xFF - i, &gov)); gov.flip(); } map.check_invariants(); } use super::map::MIN_LEN; use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef}; impl<'a, K: 'a, V: 'a> Handle, K, V, marker::LeafOrInternal>, marker::KV> { /// Removes a key-value pair from the tree, and returns that pair, as well as /// the leaf edge corresponding to that former pair. It's possible this empties /// a root node that is internal, which the caller should pop from the map /// holding the tree. The caller should also decrement the map's length. pub fn remove_kv_tracking( self, handle_emptied_internal_root: F, ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { match self.force() { Leaf(node) => node.remove_leaf_kv(handle_emptied_internal_root), Internal(node) => node.remove_internal_kv(handle_emptied_internal_root), } } } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::KV> { fn remove_leaf_kv( self, handle_emptied_internal_root: F, ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { let (old_kv, mut pos) = self.remove(); let len = pos.reborrow().into_node().len(); if len < MIN_LEN { let idx = pos.idx(); // We have to temporarily forget the child type, because there is no // distinct node type for the immediate parents of a leaf. let new_pos = match pos.into_node().forget_type().choose_parent_kv() { Ok(Left(left_parent_kv)) => { debug_assert!(left_parent_kv.right_child_len() == MIN_LEN - 1); if left_parent_kv.can_merge() { left_parent_kv.merge_tracking_child_edge(Right(idx)) } else { debug_assert!(left_parent_kv.left_child_len() > MIN_LEN); left_parent_kv.steal_left(idx) } } Ok(Right(right_parent_kv)) => { debug_assert!(right_parent_kv.left_child_len() == MIN_LEN - 1); if right_parent_kv.can_merge() { right_parent_kv.merge_tracking_child_edge(Left(idx)) } else { debug_assert!(right_parent_kv.right_child_len() > MIN_LEN); right_parent_kv.steal_right(idx) } } Err(pos) => unsafe { Handle::new_edge(pos, idx) }, }; // SAFETY: `new_pos` is the leaf we started from or a sibling. pos = unsafe { new_pos.cast_to_leaf_unchecked() }; // Only if we merged, the parent (if any) has shrunk, but skipping // the following step otherwise does not pay off in benchmarks. // // SAFETY: We won't destroy or rearrange the leaf where `pos` is at // by handling its parent recursively; at worst we will destroy or // rearrange the parent through the grandparent, thus change the // link to the parent inside the leaf. if let Ok(parent) = unsafe { pos.reborrow_mut() }.into_node().ascend() { if !parent.into_node().forget_type().fix_node_and_affected_ancestors() { handle_emptied_internal_root(); } } } (old_kv, pos) } } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, marker::KV> { fn remove_internal_kv( self, handle_emptied_internal_root: F, ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { // Remove an adjacent KV from its leaf and then put it back in place of // the element we were asked to remove. Prefer the left adjacent KV, // for the reasons listed in `choose_parent_kv`. let left_leaf_kv = self.left_edge().descend().last_leaf_edge().left_kv(); let left_leaf_kv = unsafe { left_leaf_kv.ok().unwrap_unchecked() }; let (left_kv, left_hole) = left_leaf_kv.remove_leaf_kv(handle_emptied_internal_root); // The internal node may have been stolen from or merged. Go back right // to find where the original KV ended up. let mut internal = unsafe { left_hole.next_kv().ok().unwrap_unchecked() }; let old_kv = internal.replace_kv(left_kv.0, left_kv.1); let pos = internal.next_leaf_edge(); (old_kv, pos) } } use core::cmp::Ordering; use core::fmt::{self, Debug}; use core::iter::FusedIterator; /// Core of an iterator that merges the output of two strictly ascending iterators, /// for instance a union or a symmetric difference. pub struct MergeIterInner { a: I, b: I, peeked: Option>, } /// Benchmarks faster than wrapping both iterators in a Peekable, /// probably because we can afford to impose a FusedIterator bound. #[derive(Clone, Debug)] enum Peeked { A(I::Item), B(I::Item), } impl Clone for MergeIterInner where I: Clone, I::Item: Clone, { fn clone(&self) -> Self { Self { a: self.a.clone(), b: self.b.clone(), peeked: self.peeked.clone() } } } impl Debug for MergeIterInner where I: Debug, I::Item: Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("MergeIterInner").field(&self.a).field(&self.b).field(&self.peeked).finish() } } impl MergeIterInner { /// Creates a new core for an iterator merging a pair of sources. pub fn new(a: I, b: I) -> Self { MergeIterInner { a, b, peeked: None } } /// Returns the next pair of items stemming from the pair of sources /// being merged. If both returned options contain a value, that value /// is equal and occurs in both sources. If one of the returned options /// contains a value, that value doesn't occur in the other source (or /// the sources are not strictly ascending). If neither returned option /// contains a value, iteration has finished and subsequent calls will /// return the same empty pair. pub fn nexts Ordering>( &mut self, cmp: Cmp, ) -> (Option, Option) where I: FusedIterator, { let mut a_next; let mut b_next; match self.peeked.take() { Some(Peeked::A(next)) => { a_next = Some(next); b_next = self.b.next(); } Some(Peeked::B(next)) => { b_next = Some(next); a_next = self.a.next(); } None => { a_next = self.a.next(); b_next = self.b.next(); } } if let (Some(ref a1), Some(ref b1)) = (&a_next, &b_next) { match cmp(a1, b1) { Ordering::Less => self.peeked = b_next.take().map(Peeked::B), Ordering::Greater => self.peeked = a_next.take().map(Peeked::A), Ordering::Equal => (), } } (a_next, b_next) } /// Returns a pair of upper bounds for the `size_hint` of the final iterator. pub fn lens(&self) -> (usize, usize) where I: ExactSizeIterator, { match self.peeked { Some(Peeked::A(_)) => (1 + self.a.len(), self.b.len()), Some(Peeked::B(_)) => (self.a.len(), 1 + self.b.len()), _ => (self.a.len(), self.b.len()), } } } // This is an attempt at an implementation following the ideal // // ``` // struct BTreeMap { // height: usize, // root: Option>> // } // // struct Node { // keys: [K; 2 * B - 1], // vals: [V; 2 * B - 1], // edges: [if height > 0 { Box> } else { () }; 2 * B], // parent: Option<(NonNull>, u16)>, // len: u16, // } // ``` // // Since Rust doesn't actually have dependent types and polymorphic recursion, // we make do with lots of unsafety. // A major goal of this module is to avoid complexity by treating the tree as a generic (if // weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such, // this module doesn't care whether the entries are sorted, which nodes can be underfull, or // even what underfull means. However, we do rely on a few invariants: // // - Trees must have uniform depth/height. This means that every path down to a leaf from a // given node has exactly the same length. // - A node of length `n` has `n` keys, `n` values, and `n + 1` edges. // This implies that even an empty node has at least one edge. // For a leaf node, "having an edge" only means we can identify a position in the node, // since leaf edges are empty and need no data representation. In an internal node, // an edge both identifies a position and contains a pointer to a child node. use core::marker::PhantomData; use core::mem::{self, MaybeUninit}; use core::ptr::{self, NonNull}; use core::slice::SliceIndex; use crate::alloc::{Allocator, Global, Layout}; use crate::boxed::Box; const B: usize = 6; pub const CAPACITY: usize = 2 * B - 1; pub const MIN_LEN_AFTER_SPLIT: usize = B - 1; const KV_IDX_CENTER: usize = B - 1; const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1; const EDGE_IDX_RIGHT_OF_CENTER: usize = B; /// The underlying representation of leaf nodes and part of the representation of internal nodes. struct LeafNode { /// We want to be covariant in `K` and `V`. parent: Option>>, /// This node's index into the parent node's `edges` array. /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`. /// This is only guaranteed to be initialized when `parent` is non-null. parent_idx: MaybeUninit, /// The number of keys and values this node stores. len: u16, /// The arrays storing the actual data of the node. Only the first `len` elements of each /// array are initialized and valid. keys: [MaybeUninit; CAPACITY], vals: [MaybeUninit; CAPACITY], } impl LeafNode { /// Initializes a new `LeafNode` in-place. unsafe fn init(this: *mut Self) { // As a general policy, we leave fields uninitialized if they can be, as this should // be both slightly faster and easier to track in Valgrind. unsafe { // parent_idx, keys, and vals are all MaybeUninit ptr::addr_of_mut!((*this).parent).write(None); ptr::addr_of_mut!((*this).len).write(0); } } /// Creates a new boxed `LeafNode`. fn new() -> Box { unsafe { let mut leaf = Box::new_uninit(); LeafNode::init(leaf.as_mut_ptr()); leaf.assume_init() } } } /// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden /// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an /// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the /// node, allowing code to act on leaf and internal nodes generically without having to even check /// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`. #[repr(C)] // gdb_providers.py uses this type name for introspection. struct InternalNode { data: LeafNode, /// The pointers to the children of this node. `len + 1` of these are considered /// initialized and valid, except that near the end, while the tree is held /// through borrow type `Dying`, some of these pointers are dangling. edges: [MaybeUninit>; 2 * B], } impl InternalNode { /// Creates a new boxed `InternalNode`. /// /// # Safety /// An invariant of internal nodes is that they have at least one /// initialized and valid edge. This function does not set up /// such an edge. unsafe fn new() -> Box { unsafe { let mut node = Box::::new_uninit(); // We only need to initialize the data; the edges are MaybeUninit. LeafNode::init(ptr::addr_of_mut!((*node.as_mut_ptr()).data)); node.assume_init() } } } /// A managed, non-null pointer to a node. This is either an owned pointer to /// `LeafNode` or an owned pointer to `InternalNode`. /// /// However, `BoxedNode` contains no information as to which of the two types /// of nodes it actually contains, and, partially due to this lack of information, /// is not a separate type and has no destructor. type BoxedNode = NonNull>; // N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType` // is `Mut`. This is technically wrong, but cannot result in any unsafety due to // internal use of `NodeRef` because we stay completely generic over `K` and `V`. // However, whenever a public type wraps `NodeRef`, make sure that it has the // correct variance. /// /// A reference to a node. /// /// This type has a number of parameters that controls how it acts: /// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime. /// - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`. /// - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node` /// with respect to keys and tree structure, but also allows many /// mutable references to values throughout the tree to coexist. /// - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`, /// although insert methods allow a mutable pointer to a value to coexist. /// - When this is `Owned`, the `NodeRef` acts roughly like `Box`, /// but does not have a destructor, and must be cleaned up manually. /// - When this is `Dying`, the `NodeRef` still acts roughly like `Box`, /// but has methods to destroy the tree bit by bit, and ordinary methods, /// while not marked as unsafe to call, can invoke UB if called incorrectly. /// Since any `NodeRef` allows navigating through the tree, `BorrowType` /// effectively applies to the entire tree, not just to the node itself. /// - `K` and `V`: These are the types of keys and values stored in the nodes. /// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is /// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the /// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the /// `NodeRef` could be pointing to either type of node. /// `Type` is named `NodeType` when used outside `NodeRef`. /// /// Both `BorrowType` and `NodeType` restrict what methods we implement, to /// exploit static type safety. There are limitations in the way we can apply /// such restrictions: /// - For each type parameter, we can only define a method either generically /// or for one particular type. For example, we cannot define a method like /// `into_kv` generically for all `BorrowType`, or once for all types that /// carry a lifetime, because we want it to return `&'a` references. /// Therefore, we define it only for the least powerful type `Immut<'a>`. /// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`. /// Therefore, we have to explicitly call `reborrow` on a more powerfull /// `NodeRef` in order to reach a method like `into_kv`. /// /// All methods on `NodeRef` that return some kind of reference, either: /// - Take `self` by value, and return the lifetime carried by `BorrowType`. /// Sometimes, to invoke such a method, we need to call `reborrow_mut`. /// - Take `self` by reference, and (implicitly) return that reference's /// lifetime, instead of the lifetime carried by `BorrowType`. That way, /// the borrow checker guarantees that the `NodeRef` remains borrowed as long /// as the returned reference is used. /// The methods supporting insert bend this rule by returning a raw pointer, /// i.e., a reference without any lifetime. pub struct NodeRef { /// The number of levels that the node and the level of leaves are apart, a /// constant of the node that cannot be entirely described by `Type`, and that /// the node itself does not store. We only need to store the height of the root /// node, and derive every other node's height from it. /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`. height: usize, /// The pointer to the leaf or internal node. The definition of `InternalNode` /// ensures that the pointer is valid either way. node: NonNull>, _marker: PhantomData<(BorrowType, Type)>, } /// The root node of an owned tree. /// /// Note that this does not have a destructor, and must be cleaned up manually. pub type Root = NodeRef; impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef, K, V, Type> {} impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef, K, V, Type> { fn clone(&self) -> Self { *self } } unsafe impl Sync for NodeRef {} unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef, K, V, Type> {} unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef, K, V, Type> {} unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef, K, V, Type> {} unsafe impl Send for NodeRef {} unsafe impl Send for NodeRef {} impl NodeRef { fn new_leaf() -> Self { Self::from_new_leaf(LeafNode::new()) } fn from_new_leaf(leaf: Box>) -> Self { NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData } } } impl NodeRef { fn new_internal(child: Root) -> Self { let mut new_node = unsafe { InternalNode::new() }; new_node.edges[0].write(child.node); unsafe { NodeRef::from_new_internal(new_node, child.height + 1) } } /// # Safety /// `height` must not be zero. unsafe fn from_new_internal(internal: Box>, height: usize) -> Self { debug_assert!(height > 0); let node = NonNull::from(Box::leak(internal)).cast(); let mut this = NodeRef { height, node, _marker: PhantomData }; this.borrow_mut().correct_all_childrens_parent_links(); this } } impl NodeRef { /// Unpack a node reference that was packed as `NodeRef::parent`. fn from_internal(node: NonNull>, height: usize) -> Self { debug_assert!(height > 0); NodeRef { height, node: node.cast(), _marker: PhantomData } } } impl NodeRef { /// Exposes the data of an internal node. /// /// Returns a raw ptr to avoid invalidating other references to this node. fn as_internal_ptr(this: &Self) -> *mut InternalNode { // SAFETY: the static node type is `Internal`. this.node.as_ptr() as *mut InternalNode } } impl<'a, K, V> NodeRef, K, V, marker::Internal> { /// Borrows exclusive access to the data of an internal node. fn as_internal_mut(&mut self) -> &mut InternalNode { let ptr = Self::as_internal_ptr(self); unsafe { &mut *ptr } } } impl NodeRef { /// Finds the length of the node. This is the number of keys or values. /// The number of edges is `len() + 1`. /// Note that, despite being safe, calling this function can have the side effect /// of invalidating mutable references that unsafe code has created. pub fn len(&self) -> usize { // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut, // there might be outstanding mutable references to values that we must not invalidate. unsafe { usize::from((*Self::as_leaf_ptr(self)).len) } } /// Returns the number of levels that the node and leaves are apart. Zero /// height means the node is a leaf itself. If you picture trees with the /// root on top, the number says at which elevation the node appears. /// If you picture trees with leaves on top, the number says how high /// the tree extends above the node. pub fn height(&self) -> usize { self.height } /// Temporarily takes out another, immutable reference to the same node. pub fn reborrow(&self) -> NodeRef, K, V, Type> { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } /// Exposes the leaf portion of any leaf or internal node. /// /// Returns a raw ptr to avoid invalidating other references to this node. fn as_leaf_ptr(this: &Self) -> *mut LeafNode { // The node must be valid for at least the LeafNode portion. // This is not a reference in the NodeRef type because we don't know if // it should be unique or shared. this.node.as_ptr() } } impl NodeRef { /// Finds the parent of the current node. Returns `Ok(handle)` if the current /// node actually has a parent, where `handle` points to the edge of the parent /// that points to the current node. Returns `Err(self)` if the current node has /// no parent, giving back the original `NodeRef`. /// /// The method name assumes you picture trees with the root node on top. /// /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should /// both, upon success, do nothing. pub fn ascend( self, ) -> Result, marker::Edge>, Self> { assert!(BorrowType::PERMITS_TRAVERSAL); // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut, // there might be outstanding mutable references to values that we must not invalidate. let leaf_ptr: *const _ = Self::as_leaf_ptr(&self); unsafe { (*leaf_ptr).parent } .as_ref() .map(|parent| Handle { node: NodeRef::from_internal(*parent, self.height + 1), idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) }, _marker: PhantomData, }) .ok_or(self) } pub fn first_edge(self) -> Handle { unsafe { Handle::new_edge(self, 0) } } pub fn last_edge(self) -> Handle { let len = self.len(); unsafe { Handle::new_edge(self, len) } } /// Note that `self` must be nonempty. pub fn first_kv(self) -> Handle { let len = self.len(); assert!(len > 0); unsafe { Handle::new_kv(self, 0) } } /// Note that `self` must be nonempty. pub fn last_kv(self) -> Handle { let len = self.len(); assert!(len > 0); unsafe { Handle::new_kv(self, len - 1) } } } impl NodeRef { /// Could be a public implementation of PartialEq, but only used in this module. fn eq(&self, other: &Self) -> bool { let Self { node, height, _marker } = self; if node.eq(&other.node) { debug_assert_eq!(*height, other.height); true } else { false } } } impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { /// Exposes the leaf portion of any leaf or internal node in an immutable tree. fn into_leaf(self) -> &'a LeafNode { let ptr = Self::as_leaf_ptr(&self); // SAFETY: there can be no mutable references into this tree borrowed as `Immut`. unsafe { &*ptr } } /// Borrows a view into the keys stored in the node. pub fn keys(&self) -> &[K] { let leaf = self.into_leaf(); unsafe { MaybeUninit::slice_assume_init_ref(leaf.keys.get_unchecked(..usize::from(leaf.len))) } } } impl NodeRef { /// Similar to `ascend`, gets a reference to a node's parent node, but also /// deallocates the current node in the process. This is unsafe because the /// current node will still be accessible despite being deallocated. pub unsafe fn deallocate_and_ascend( self, ) -> Option, marker::Edge>> { let height = self.height; let node = self.node; let ret = self.ascend().ok(); unsafe { Global.deallocate( node.cast(), if height > 0 { Layout::new::>() } else { Layout::new::>() }, ); } ret } } impl<'a, K, V, Type> NodeRef, K, V, Type> { /// Temporarily takes out another, mutable reference to the same node. Beware, as /// this method is very dangerous, doubly so since it may not immediately appear /// dangerous. /// /// Because mutable pointers can roam anywhere around the tree, the returned /// pointer can easily be used to make the original pointer dangling, out of /// bounds, or invalid under stacked borrow rules. // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` // that restricts the use of navigation methods on reborrowed pointers, // preventing this unsafety. unsafe fn reborrow_mut(&mut self) -> NodeRef, K, V, Type> { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } /// Borrows exclusive access to the leaf portion of any leaf or internal node. fn as_leaf_mut(&mut self) -> &mut LeafNode { let ptr = Self::as_leaf_ptr(self); // SAFETY: we have exclusive access to the entire node. unsafe { &mut *ptr } } /// Offers exclusive access to the leaf portion of any leaf or internal node. fn into_leaf_mut(mut self) -> &'a mut LeafNode { let ptr = Self::as_leaf_ptr(&mut self); // SAFETY: we have exclusive access to the entire node. unsafe { &mut *ptr } } } impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { /// Borrows exclusive access to an element of the key storage area. /// /// # Safety /// `index` is in bounds of 0..CAPACITY unsafe fn key_area_mut(&mut self, index: I) -> &mut Output where I: SliceIndex<[MaybeUninit], Output = Output>, { // SAFETY: the caller will not be able to call further methods on self // until the key slice reference is dropped, as we have unique access // for the lifetime of the borrow. unsafe { self.as_leaf_mut().keys.as_mut_slice().get_unchecked_mut(index) } } /// Borrows exclusive access to an element or slice of the node's value storage area. /// /// # Safety /// `index` is in bounds of 0..CAPACITY unsafe fn val_area_mut(&mut self, index: I) -> &mut Output where I: SliceIndex<[MaybeUninit], Output = Output>, { // SAFETY: the caller will not be able to call further methods on self // until the value slice reference is dropped, as we have unique access // for the lifetime of the borrow. unsafe { self.as_leaf_mut().vals.as_mut_slice().get_unchecked_mut(index) } } } impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { /// Borrows exclusive access to an element or slice of the node's storage area for edge contents. /// /// # Safety /// `index` is in bounds of 0..CAPACITY + 1 unsafe fn edge_area_mut(&mut self, index: I) -> &mut Output where I: SliceIndex<[MaybeUninit>], Output = Output>, { // SAFETY: the caller will not be able to call further methods on self // until the edge slice reference is dropped, as we have unique access // for the lifetime of the borrow. unsafe { self.as_internal_mut().edges.as_mut_slice().get_unchecked_mut(index) } } } impl<'a, K, V, Type> NodeRef, K, V, Type> { /// # Safety /// - The node has more than `idx` initialized elements. unsafe fn into_key_val_mut_at(mut self, idx: usize) -> (&'a K, &'a mut V) { // We only create a reference to the one element we are interested in, // to avoid aliasing with outstanding references to other elements, // in particular, those returned to the caller in earlier iterations. let leaf = Self::as_leaf_ptr(&mut self); let keys = unsafe { ptr::addr_of!((*leaf).keys) }; let vals = unsafe { ptr::addr_of_mut!((*leaf).vals) }; // We must coerce to unsized array pointers because of Rust issue #74679. let keys: *const [_] = keys; let vals: *mut [_] = vals; let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() }; let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() }; (key, val) } } impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { /// Borrows exclusive access to the length of the node. pub fn len_mut(&mut self) -> &mut u16 { &mut self.as_leaf_mut().len } } impl<'a, K, V> NodeRef, K, V, marker::Internal> { /// # Safety /// Every item returned by `range` is a valid edge index for the node. unsafe fn correct_childrens_parent_links>(&mut self, range: R) { for i in range { debug_assert!(i <= self.len()); unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link(); } } fn correct_all_childrens_parent_links(&mut self) { let len = self.len(); unsafe { self.correct_childrens_parent_links(0..=len) }; } } impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { /// Sets the node's link to its parent edge, /// without invalidating other references to the node. fn set_parent_link(&mut self, parent: NonNull>, parent_idx: usize) { let leaf = Self::as_leaf_ptr(self); unsafe { (*leaf).parent = Some(parent) }; unsafe { (*leaf).parent_idx.write(parent_idx as u16) }; } } impl NodeRef { /// Clears the root's link to its parent edge. fn clear_parent_link(&mut self) { let mut root_node = self.borrow_mut(); let leaf = root_node.as_leaf_mut(); leaf.parent = None; } } impl NodeRef { /// Returns a new owned tree, with its own root node that is initially empty. pub fn new() -> Self { NodeRef::new_leaf().forget_type() } /// Adds a new internal node with a single edge pointing to the previous root node, /// make that new node the root node, and return it. This increases the height by 1 /// and is the opposite of `pop_internal_level`. pub fn push_internal_level(&mut self) -> NodeRef, K, V, marker::Internal> { super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root).forget_type()); // `self.borrow_mut()`, except that we just forgot we're internal now: NodeRef { height: self.height, node: self.node, _marker: PhantomData } } /// Removes the internal root node, using its first child as the new root node. /// As it is intended only to be called when the root node has only one child, /// no cleanup is done on any of the keys, values and other children. /// This decreases the height by 1 and is the opposite of `push_internal_level`. /// /// Requires exclusive access to the `Root` object but not to the root node; /// it will not invalidate other handles or references to the root node. /// /// Panics if there is no internal level, i.e., if the root node is a leaf. pub fn pop_internal_level(&mut self) { assert!(self.height > 0); let top = self.node; // SAFETY: we asserted to be internal. let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() }; // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive. let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) }; // SAFETY: the first edge is always initialized. self.node = unsafe { internal_node.edges[0].assume_init_read() }; self.height -= 1; self.clear_parent_link(); unsafe { Global.deallocate(top.cast(), Layout::new::>()); } } } impl NodeRef { /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe /// because the return value cannot be used to destroy the root, and there /// cannot be other references to the tree. pub fn borrow_mut(&mut self) -> NodeRef, K, V, Type> { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } /// Slightly mutably borrows the owned root node. pub fn borrow_valmut(&mut self) -> NodeRef, K, V, Type> { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } /// Irreversibly transitions to a reference that permits traversal and offers /// destructive methods and little else. pub fn into_dying(self) -> NodeRef { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } } impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Leaf> { /// Adds a key-value pair to the end of the node. pub fn push(&mut self, key: K, val: V) { let len = self.len_mut(); let idx = usize::from(*len); assert!(idx < CAPACITY); *len += 1; unsafe { self.key_area_mut(idx).write(key); self.val_area_mut(idx).write(val); } } } impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { /// Adds a key-value pair, and an edge to go to the right of that pair, /// to the end of the node. pub fn push(&mut self, key: K, val: V, edge: Root) { assert!(edge.height == self.height - 1); let len = self.len_mut(); let idx = usize::from(*len); assert!(idx < CAPACITY); *len += 1; unsafe { self.key_area_mut(idx).write(key); self.val_area_mut(idx).write(val); self.edge_area_mut(idx + 1).write(edge.node); Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link(); } } } impl NodeRef { /// Removes any static information asserting that this node is a `Leaf` node. pub fn forget_type(self) -> NodeRef { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } } impl NodeRef { /// Removes any static information asserting that this node is an `Internal` node. pub fn forget_type(self) -> NodeRef { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } } impl NodeRef { /// Checks whether a node is an `Internal` node or a `Leaf` node. pub fn force( self, ) -> ForceResult< NodeRef, NodeRef, > { if self.height == 0 { ForceResult::Leaf(NodeRef { height: self.height, node: self.node, _marker: PhantomData, }) } else { ForceResult::Internal(NodeRef { height: self.height, node: self.node, _marker: PhantomData, }) } } } impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { /// Unsafely asserts to the compiler the static information that this node is a `Leaf`. unsafe fn cast_to_leaf_unchecked(self) -> NodeRef, K, V, marker::Leaf> { debug_assert!(self.height == 0); NodeRef { height: self.height, node: self.node, _marker: PhantomData } } /// Unsafely asserts to the compiler the static information that this node is an `Internal`. unsafe fn cast_to_internal_unchecked(self) -> NodeRef, K, V, marker::Internal> { debug_assert!(self.height > 0); NodeRef { height: self.height, node: self.node, _marker: PhantomData } } } /// A reference to a specific key-value pair or edge within a node. The `Node` parameter /// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value /// pair) or `Edge` (signifying a handle on an edge). /// /// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to /// a child node, these represent the spaces where child pointers would go between the key-value /// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one /// to the left of the node, one between the two pairs, and one at the right of the node. pub struct Handle { node: Node, idx: usize, _marker: PhantomData, } impl Copy for Handle {} // We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be // `Clone`able is when it is an immutable reference and therefore `Copy`. impl Clone for Handle { fn clone(&self) -> Self { *self } } impl Handle { /// Retrieves the node that contains the edge or key-value pair this handle points to. pub fn into_node(self) -> Node { self.node } /// Returns the position of this handle in the node. pub fn idx(&self) -> usize { self.idx } } impl Handle, marker::KV> { /// Creates a new handle to a key-value pair in `node`. /// Unsafe because the caller must ensure that `idx < node.len()`. pub unsafe fn new_kv(node: NodeRef, idx: usize) -> Self { debug_assert!(idx < node.len()); Handle { node, idx, _marker: PhantomData } } pub fn left_edge(self) -> Handle, marker::Edge> { unsafe { Handle::new_edge(self.node, self.idx) } } pub fn right_edge(self) -> Handle, marker::Edge> { unsafe { Handle::new_edge(self.node, self.idx + 1) } } } impl PartialEq for Handle, HandleType> { fn eq(&self, other: &Self) -> bool { let Self { node, idx, _marker } = self; node.eq(&other.node) && *idx == other.idx } } impl Handle, HandleType> { /// Temporarily takes out another, immutable handle on the same location. pub fn reborrow(&self) -> Handle, K, V, NodeType>, HandleType> { // We can't use Handle::new_kv or Handle::new_edge because we don't know our type Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData } } } impl<'a, K, V, NodeType, HandleType> Handle, K, V, NodeType>, HandleType> { /// Temporarily takes out another, mutable handle on the same location. Beware, as /// this method is very dangerous, doubly so since it may not immediately appear /// dangerous. /// /// For details, see `NodeRef::reborrow_mut`. pub unsafe fn reborrow_mut( &mut self, ) -> Handle, K, V, NodeType>, HandleType> { // We can't use Handle::new_kv or Handle::new_edge because we don't know our type Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData } } } impl Handle, marker::Edge> { /// Creates a new handle to an edge in `node`. /// Unsafe because the caller must ensure that `idx <= node.len()`. pub unsafe fn new_edge(node: NodeRef, idx: usize) -> Self { debug_assert!(idx <= node.len()); Handle { node, idx, _marker: PhantomData } } pub fn left_kv(self) -> Result, marker::KV>, Self> { if self.idx > 0 { Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) }) } else { Err(self) } } pub fn right_kv(self) -> Result, marker::KV>, Self> { if self.idx < self.node.len() { Ok(unsafe { Handle::new_kv(self.node, self.idx) }) } else { Err(self) } } } pub enum LeftOrRight { Left(T), Right(T), } /// Given an edge index where we want to insert into a node filled to capacity, /// computes a sensible KV index of a split point and where to perform the insertion. /// The goal of the split point is for its key and value to end up in a parent node; /// the keys, values and edges to the left of the split point become the left child; /// the keys, values and edges to the right of the split point become the right child. fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight) { debug_assert!(edge_idx <= CAPACITY); // Rust issue #74834 tries to explain these symmetric rules. match edge_idx { 0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)), EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)), EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)), _ => (KV_IDX_CENTER + 1, LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))), } } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { /// Inserts a new key-value pair between the key-value pairs to the right and left of /// this edge. This method assumes that there is enough space in the node for the new /// pair to fit. /// /// The returned pointer points to the inserted value. fn insert_fit(&mut self, key: K, val: V) -> *mut V { debug_assert!(self.node.len() < CAPACITY); let new_len = self.node.len() + 1; unsafe { slice_insert(self.node.key_area_mut(..new_len), self.idx, key); slice_insert(self.node.val_area_mut(..new_len), self.idx, val); *self.node.len_mut() = new_len as u16; self.node.val_area_mut(self.idx).assume_init_mut() } } } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { /// Inserts a new key-value pair between the key-value pairs to the right and left of /// this edge. This method splits the node if there isn't enough room. /// /// The returned pointer points to the inserted value. fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) { if self.node.len() < CAPACITY { let val_ptr = self.insert_fit(key, val); let kv = unsafe { Handle::new_kv(self.node, self.idx) }; (InsertResult::Fit(kv), val_ptr) } else { let (middle_kv_idx, insertion) = splitpoint(self.idx); let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) }; let mut result = middle.split(); let mut insertion_edge = match insertion { LeftOrRight::Left(insert_idx) => unsafe { Handle::new_edge(result.left.reborrow_mut(), insert_idx) }, LeftOrRight::Right(insert_idx) => unsafe { Handle::new_edge(result.right.borrow_mut(), insert_idx) }, }; let val_ptr = insertion_edge.insert_fit(key, val); (InsertResult::Split(result), val_ptr) } } } impl<'a, K, V> Handle, K, V, marker::Internal>, marker::Edge> { /// Fixes the parent pointer and index in the child node that this edge /// links to. This is useful when the ordering of edges has been changed, fn correct_parent_link(self) { // Create backpointer without invalidating other references to the node. let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) }; let idx = self.idx; let mut child = self.descend(); child.set_parent_link(ptr, idx); } } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, marker::Edge> { /// Inserts a new key-value pair and an edge that will go to the right of that new pair /// between this edge and the key-value pair to the right of this edge. This method assumes /// that there is enough space in the node for the new pair to fit. fn insert_fit(&mut self, key: K, val: V, edge: Root) { debug_assert!(self.node.len() < CAPACITY); debug_assert!(edge.height == self.node.height - 1); let new_len = self.node.len() + 1; unsafe { slice_insert(self.node.key_area_mut(..new_len), self.idx, key); slice_insert(self.node.val_area_mut(..new_len), self.idx, val); slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node); *self.node.len_mut() = new_len as u16; self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1); } } /// Inserts a new key-value pair and an edge that will go to the right of that new pair /// between this edge and the key-value pair to the right of this edge. This method splits /// the node if there isn't enough room. fn insert( mut self, key: K, val: V, edge: Root, ) -> InsertResult<'a, K, V, marker::Internal> { assert!(edge.height == self.node.height - 1); if self.node.len() < CAPACITY { self.insert_fit(key, val, edge); let kv = unsafe { Handle::new_kv(self.node, self.idx) }; InsertResult::Fit(kv) } else { let (middle_kv_idx, insertion) = splitpoint(self.idx); let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) }; let mut result = middle.split(); let mut insertion_edge = match insertion { LeftOrRight::Left(insert_idx) => unsafe { Handle::new_edge(result.left.reborrow_mut(), insert_idx) }, LeftOrRight::Right(insert_idx) => unsafe { Handle::new_edge(result.right.borrow_mut(), insert_idx) }, }; insertion_edge.insert_fit(key, val, edge); InsertResult::Split(result) } } } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { /// Inserts a new key-value pair between the key-value pairs to the right and left of /// this edge. This method splits the node if there isn't enough room, and tries to /// insert the split off portion into the parent node recursively, until the root is reached. /// /// If the returned result is a `Fit`, its handle's node can be this edge's node or an ancestor. /// If the returned result is a `Split`, the `left` field will be the root node. /// The returned pointer points to the inserted value. pub fn insert_recursing( self, key: K, value: V, ) -> (InsertResult<'a, K, V, marker::LeafOrInternal>, *mut V) { let (mut split, val_ptr) = match self.insert(key, value) { (InsertResult::Fit(handle), ptr) => { return (InsertResult::Fit(handle.forget_node_type()), ptr); } (InsertResult::Split(split), val_ptr) => (split.forget_node_type(), val_ptr), }; loop { split = match split.left.ascend() { Ok(parent) => match parent.insert(split.kv.0, split.kv.1, split.right) { InsertResult::Fit(handle) => { return (InsertResult::Fit(handle.forget_node_type()), val_ptr); } InsertResult::Split(split) => split.forget_node_type(), }, Err(root) => { return (InsertResult::Split(SplitResult { left: root, ..split }), val_ptr); } }; } } } impl Handle, marker::Edge> { /// Finds the node pointed to by this edge. /// /// The method name assumes you picture trees with the root node on top. /// /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should /// both, upon success, do nothing. pub fn descend(self) -> NodeRef { assert!(BorrowType::PERMITS_TRAVERSAL); // We need to use raw pointers to nodes because, if BorrowType is // marker::ValMut, there might be outstanding mutable references to // values that we must not invalidate. There's no worry accessing the // height field because that value is copied. Beware that, once the // node pointer is dereferenced, we access the edges array with a // reference (Rust issue #73987) and invalidate any other references // to or inside the array, should any be around. let parent_ptr = NodeRef::as_internal_ptr(&self.node); let node = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() }; NodeRef { node, height: self.node.height - 1, _marker: PhantomData } } } impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { pub fn into_kv(self) -> (&'a K, &'a V) { debug_assert!(self.idx < self.node.len()); let leaf = self.node.into_leaf(); let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() }; let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() }; (k, v) } } impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { pub fn key_mut(&mut self) -> &mut K { unsafe { self.node.key_area_mut(self.idx).assume_init_mut() } } pub fn into_val_mut(self) -> &'a mut V { debug_assert!(self.idx < self.node.len()); let leaf = self.node.into_leaf_mut(); unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() } } } impl<'a, K, V, NodeType> Handle, K, V, NodeType>, marker::KV> { pub fn into_kv_valmut(self) -> (&'a K, &'a mut V) { unsafe { self.node.into_key_val_mut_at(self.idx) } } } impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { pub fn kv_mut(&mut self) -> (&mut K, &mut V) { debug_assert!(self.idx < self.node.len()); // We cannot call separate key and value methods, because calling the second one // invalidates the reference returned by the first. unsafe { let leaf = self.node.as_leaf_mut(); let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut(); let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut(); (key, val) } } /// Replace the key and value that the KV handle refers to. pub fn replace_kv(&mut self, k: K, v: V) -> (K, V) { let (key, val) = self.kv_mut(); (mem::replace(key, k), mem::replace(val, v)) } } impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { /// Helps implementations of `split` for a particular `NodeType`, /// by taking care of leaf data. fn split_leaf_data(&mut self, new_node: &mut LeafNode) -> (K, V) { debug_assert!(self.idx < self.node.len()); let old_len = self.node.len(); let new_len = old_len - self.idx - 1; new_node.len = new_len as u16; unsafe { let k = self.node.key_area_mut(self.idx).assume_init_read(); let v = self.node.val_area_mut(self.idx).assume_init_read(); move_to_slice( self.node.key_area_mut(self.idx + 1..old_len), &mut new_node.keys[..new_len], ); move_to_slice( self.node.val_area_mut(self.idx + 1..old_len), &mut new_node.vals[..new_len], ); *self.node.len_mut() = self.idx as u16; (k, v) } } } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::KV> { /// Splits the underlying node into three parts: /// /// - The node is truncated to only contain the key-value pairs to the left of /// this handle. /// - The key and value pointed to by this handle are extracted. /// - All the key-value pairs to the right of this handle are put into a newly /// allocated node. pub fn split(mut self) -> SplitResult<'a, K, V, marker::Leaf> { let mut new_node = LeafNode::new(); let kv = self.split_leaf_data(&mut new_node); let right = NodeRef::from_new_leaf(new_node); SplitResult { left: self.node, kv, right } } /// Removes the key-value pair pointed to by this handle and returns it, along with the edge /// that the key-value pair collapsed into. pub fn remove( mut self, ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { let old_len = self.node.len(); unsafe { let k = slice_remove(self.node.key_area_mut(..old_len), self.idx); let v = slice_remove(self.node.val_area_mut(..old_len), self.idx); *self.node.len_mut() = (old_len - 1) as u16; ((k, v), self.left_edge()) } } } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, marker::KV> { /// Splits the underlying node into three parts: /// /// - The node is truncated to only contain the edges and key-value pairs to the /// left of this handle. /// - The key and value pointed to by this handle are extracted. /// - All the edges and key-value pairs to the right of this handle are put into /// a newly allocated node. pub fn split(mut self) -> SplitResult<'a, K, V, marker::Internal> { let old_len = self.node.len(); unsafe { let mut new_node = InternalNode::new(); let kv = self.split_leaf_data(&mut new_node.data); let new_len = usize::from(new_node.data.len); move_to_slice( self.node.edge_area_mut(self.idx + 1..old_len + 1), &mut new_node.edges[..new_len + 1], ); let height = self.node.height; let right = NodeRef::from_new_internal(new_node, height); SplitResult { left: self.node, kv, right } } } } /// Represents a session for evaluating and performing a balancing operation /// around an internal key-value pair. pub struct BalancingContext<'a, K, V> { parent: Handle, K, V, marker::Internal>, marker::KV>, left_child: NodeRef, K, V, marker::LeafOrInternal>, right_child: NodeRef, K, V, marker::LeafOrInternal>, } impl<'a, K, V> Handle, K, V, marker::Internal>, marker::KV> { pub fn consider_for_balancing(self) -> BalancingContext<'a, K, V> { let self1 = unsafe { ptr::read(&self) }; let self2 = unsafe { ptr::read(&self) }; BalancingContext { parent: self, left_child: self1.left_edge().descend(), right_child: self2.right_edge().descend(), } } } impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { /// Chooses a balancing context involving the node as a child, thus between /// the KV immediately to the left or to the right in the parent node. /// Returns an `Err` if there is no parent. /// Panics if the parent is empty. /// /// Prefers the left side, to be optimal if the given node is somehow /// underfull, meaning here only that it has fewer elements than its left /// sibling and than its right sibling, if they exist. In that case, /// merging with the left sibling is faster, since we only need to move /// the node's N elements, instead of shifting them to the right and moving /// more than N elements in front. Stealing from the left sibling is also /// typically faster, since we only need to shift the node's N elements to /// the right, instead of shifting at least N of the sibling's elements to /// the left. pub fn choose_parent_kv(self) -> Result>, Self> { match unsafe { ptr::read(&self) }.ascend() { Ok(parent_edge) => match parent_edge.left_kv() { Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext { parent: unsafe { ptr::read(&left_parent_kv) }, left_child: left_parent_kv.left_edge().descend(), right_child: self, })), Err(parent_edge) => match parent_edge.right_kv() { Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext { parent: unsafe { ptr::read(&right_parent_kv) }, left_child: self, right_child: right_parent_kv.right_edge().descend(), })), Err(_) => unreachable!("empty internal node"), }, }, Err(root) => Err(root), } } } impl<'a, K, V> BalancingContext<'a, K, V> { pub fn left_child_len(&self) -> usize { self.left_child.len() } pub fn right_child_len(&self) -> usize { self.right_child.len() } pub fn into_left_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { self.left_child } pub fn into_right_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { self.right_child } /// Returns whether merging is possible, i.e., whether there is enough room /// in a node to combine the central KV with both adjacent child nodes. pub fn can_merge(&self) -> bool { self.left_child.len() + 1 + self.right_child.len() <= CAPACITY } } impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { /// Performs a merge and lets a closure decide what to return. fn do_merge< F: FnOnce( NodeRef, K, V, marker::Internal>, NodeRef, K, V, marker::LeafOrInternal>, ) -> R, R, >( self, result: F, ) -> R { let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent; let old_parent_len = parent_node.len(); let mut left_node = self.left_child; let old_left_len = left_node.len(); let mut right_node = self.right_child; let right_len = right_node.len(); let new_left_len = old_left_len + 1 + right_len; assert!(new_left_len <= CAPACITY); unsafe { *left_node.len_mut() = new_left_len as u16; let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx); left_node.key_area_mut(old_left_len).write(parent_key); move_to_slice( right_node.key_area_mut(..right_len), left_node.key_area_mut(old_left_len + 1..new_left_len), ); let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx); left_node.val_area_mut(old_left_len).write(parent_val); move_to_slice( right_node.val_area_mut(..right_len), left_node.val_area_mut(old_left_len + 1..new_left_len), ); slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1); parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len); *parent_node.len_mut() -= 1; if parent_node.height > 1 { // SAFETY: the height of the nodes being merged is one below the height // of the node of this edge, thus above zero, so they are internal. let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked(); let mut right_node = right_node.cast_to_internal_unchecked(); move_to_slice( right_node.edge_area_mut(..right_len + 1), left_node.edge_area_mut(old_left_len + 1..new_left_len + 1), ); left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1); Global.deallocate(right_node.node.cast(), Layout::new::>()); } else { Global.deallocate(right_node.node.cast(), Layout::new::>()); } } result(parent_node, left_node) } /// Merges the parent's key-value pair and both adjacent child nodes into /// the left child node and returns the shrunk parent node. /// /// Panics unless we `.can_merge()`. pub fn merge_tracking_parent(self) -> NodeRef, K, V, marker::Internal> { self.do_merge(|parent, _child| parent) } /// Merges the parent's key-value pair and both adjacent child nodes into /// the left child node and returns that child node. /// /// Panics unless we `.can_merge()`. pub fn merge_tracking_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { self.do_merge(|_parent, child| child) } /// Merges the parent's key-value pair and both adjacent child nodes into /// the left child node and returns the edge handle in that child node /// where the tracked child edge ended up, /// /// Panics unless we `.can_merge()`. pub fn merge_tracking_child_edge( self, track_edge_idx: LeftOrRight, ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { let old_left_len = self.left_child.len(); let right_len = self.right_child.len(); assert!(match track_edge_idx { LeftOrRight::Left(idx) => idx <= old_left_len, LeftOrRight::Right(idx) => idx <= right_len, }); let child = self.merge_tracking_child(); let new_idx = match track_edge_idx { LeftOrRight::Left(idx) => idx, LeftOrRight::Right(idx) => old_left_len + 1 + idx, }; unsafe { Handle::new_edge(child, new_idx) } } /// Removes a key-value pair from the left child and places it in the key-value storage /// of the parent, while pushing the old parent key-value pair into the right child. /// Returns a handle to the edge in the right child corresponding to where the original /// edge specified by `track_right_edge_idx` ended up. pub fn steal_left( mut self, track_right_edge_idx: usize, ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { self.bulk_steal_left(1); unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) } } /// Removes a key-value pair from the right child and places it in the key-value storage /// of the parent, while pushing the old parent key-value pair onto the left child. /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`, /// which didn't move. pub fn steal_right( mut self, track_left_edge_idx: usize, ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { self.bulk_steal_right(1); unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) } } /// This does stealing similar to `steal_left` but steals multiple elements at once. pub fn bulk_steal_left(&mut self, count: usize) { assert!(count > 0); unsafe { let left_node = &mut self.left_child; let old_left_len = left_node.len(); let right_node = &mut self.right_child; let old_right_len = right_node.len(); // Make sure that we may steal safely. assert!(old_right_len + count <= CAPACITY); assert!(old_left_len >= count); let new_left_len = old_left_len - count; let new_right_len = old_right_len + count; *left_node.len_mut() = new_left_len as u16; *right_node.len_mut() = new_right_len as u16; // Move leaf data. { // Make room for stolen elements in the right child. slice_shr(right_node.key_area_mut(..new_right_len), count); slice_shr(right_node.val_area_mut(..new_right_len), count); // Move elements from the left child to the right one. move_to_slice( left_node.key_area_mut(new_left_len + 1..old_left_len), right_node.key_area_mut(..count - 1), ); move_to_slice( left_node.val_area_mut(new_left_len + 1..old_left_len), right_node.val_area_mut(..count - 1), ); // Move the left-most stolen pair to the parent. let k = left_node.key_area_mut(new_left_len).assume_init_read(); let v = left_node.val_area_mut(new_left_len).assume_init_read(); let (k, v) = self.parent.replace_kv(k, v); // Move parent's key-value pair to the right child. right_node.key_area_mut(count - 1).write(k); right_node.val_area_mut(count - 1).write(v); } match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) { (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => { // Make room for stolen edges. slice_shr(right.edge_area_mut(..new_right_len + 1), count); // Steal edges. move_to_slice( left.edge_area_mut(new_left_len + 1..old_left_len + 1), right.edge_area_mut(..count), ); right.correct_childrens_parent_links(0..new_right_len + 1); } (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} _ => unreachable!(), } } } /// The symmetric clone of `bulk_steal_left`. pub fn bulk_steal_right(&mut self, count: usize) { assert!(count > 0); unsafe { let left_node = &mut self.left_child; let old_left_len = left_node.len(); let right_node = &mut self.right_child; let old_right_len = right_node.len(); // Make sure that we may steal safely. assert!(old_left_len + count <= CAPACITY); assert!(old_right_len >= count); let new_left_len = old_left_len + count; let new_right_len = old_right_len - count; *left_node.len_mut() = new_left_len as u16; *right_node.len_mut() = new_right_len as u16; // Move leaf data. { // Move the right-most stolen pair to the parent. let k = right_node.key_area_mut(count - 1).assume_init_read(); let v = right_node.val_area_mut(count - 1).assume_init_read(); let (k, v) = self.parent.replace_kv(k, v); // Move parent's key-value pair to the left child. left_node.key_area_mut(old_left_len).write(k); left_node.val_area_mut(old_left_len).write(v); // Move elements from the right child to the left one. move_to_slice( right_node.key_area_mut(..count - 1), left_node.key_area_mut(old_left_len + 1..new_left_len), ); move_to_slice( right_node.val_area_mut(..count - 1), left_node.val_area_mut(old_left_len + 1..new_left_len), ); // Fill gap where stolen elements used to be. slice_shl(right_node.key_area_mut(..old_right_len), count); slice_shl(right_node.val_area_mut(..old_right_len), count); } match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) { (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => { // Steal edges. move_to_slice( right.edge_area_mut(..count), left.edge_area_mut(old_left_len + 1..new_left_len + 1), ); // Fill gap where stolen edges used to be. slice_shl(right.edge_area_mut(..old_right_len + 1), count); left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1); right.correct_childrens_parent_links(0..new_right_len + 1); } (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} _ => unreachable!(), } } } } impl Handle, marker::Edge> { pub fn forget_node_type( self, ) -> Handle, marker::Edge> { unsafe { Handle::new_edge(self.node.forget_type(), self.idx) } } } impl Handle, marker::Edge> { pub fn forget_node_type( self, ) -> Handle, marker::Edge> { unsafe { Handle::new_edge(self.node.forget_type(), self.idx) } } } impl Handle, marker::KV> { pub fn forget_node_type( self, ) -> Handle, marker::KV> { unsafe { Handle::new_kv(self.node.forget_type(), self.idx) } } } impl Handle, marker::KV> { pub fn forget_node_type( self, ) -> Handle, marker::KV> { unsafe { Handle::new_kv(self.node.forget_type(), self.idx) } } } impl Handle, Type> { /// Checks whether the underlying node is an `Internal` node or a `Leaf` node. pub fn force( self, ) -> ForceResult< Handle, Type>, Handle, Type>, > { match self.node.force() { ForceResult::Leaf(node) => { ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData }) } ForceResult::Internal(node) => { ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData }) } } } } impl<'a, K, V, Type> Handle, K, V, marker::LeafOrInternal>, Type> { /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`. pub unsafe fn cast_to_leaf_unchecked( self, ) -> Handle, K, V, marker::Leaf>, Type> { let node = unsafe { self.node.cast_to_leaf_unchecked() }; Handle { node, idx: self.idx, _marker: PhantomData } } } impl<'a, K, V> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { /// Move the suffix after `self` from one node to another one. `right` must be empty. /// The first edge of `right` remains unchanged. pub fn move_suffix( &mut self, right: &mut NodeRef, K, V, marker::LeafOrInternal>, ) { unsafe { let new_left_len = self.idx; let mut left_node = self.reborrow_mut().into_node(); let old_left_len = left_node.len(); let new_right_len = old_left_len - new_left_len; let mut right_node = right.reborrow_mut(); assert!(right_node.len() == 0); assert!(left_node.height == right_node.height); if new_right_len > 0 { *left_node.len_mut() = new_left_len as u16; *right_node.len_mut() = new_right_len as u16; move_to_slice( left_node.key_area_mut(new_left_len..old_left_len), right_node.key_area_mut(..new_right_len), ); move_to_slice( left_node.val_area_mut(new_left_len..old_left_len), right_node.val_area_mut(..new_right_len), ); match (left_node.force(), right_node.force()) { (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => { move_to_slice( left.edge_area_mut(new_left_len + 1..old_left_len + 1), right.edge_area_mut(1..new_right_len + 1), ); right.correct_childrens_parent_links(1..new_right_len + 1); } (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} _ => unreachable!(), } } } } } pub enum ForceResult { Leaf(Leaf), Internal(Internal), } /// Result of insertion, when a node needed to expand beyond its capacity. pub struct SplitResult<'a, K, V, NodeType> { // Altered node in existing tree with elements and edges that belong to the left of `kv`. pub left: NodeRef, K, V, NodeType>, // Some key and value split off, to be inserted elsewhere. pub kv: (K, V), // Owned, unattached, new node with elements and edges that belong to the right of `kv`. pub right: NodeRef, } impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> { pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> { SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() } } } impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> { pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> { SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() } } } pub enum InsertResult<'a, K, V, NodeType> { Fit(Handle, K, V, NodeType>, marker::KV>), Split(SplitResult<'a, K, V, NodeType>), } pub mod marker { use core::marker::PhantomData; pub enum Leaf {} pub enum Internal {} pub enum LeafOrInternal {} pub enum Owned {} pub enum Dying {} pub struct Immut<'a>(PhantomData<&'a ()>); pub struct Mut<'a>(PhantomData<&'a mut ()>); pub struct ValMut<'a>(PhantomData<&'a mut ()>); pub trait BorrowType { // Whether node references of this borrow type allow traversing // to other nodes in the tree. const PERMITS_TRAVERSAL: bool = true; } impl BorrowType for Owned { // Traversal isn't needede, it happens using the result of `borrow_mut`. // By disabling traversal, and only creating new references to roots, // we know that every reference of the `Owned` type is to a root node. const PERMITS_TRAVERSAL: bool = false; } impl BorrowType for Dying {} impl<'a> BorrowType for Immut<'a> {} impl<'a> BorrowType for Mut<'a> {} impl<'a> BorrowType for ValMut<'a> {} pub enum KV {} pub enum Edge {} } /// Inserts a value into a slice of initialized elements followed by one uninitialized element. /// /// # Safety /// The slice has more than `idx` elements. unsafe fn slice_insert(slice: &mut [MaybeUninit], idx: usize, val: T) { unsafe { let len = slice.len(); debug_assert!(len > idx); let slice_ptr = slice.as_mut_ptr(); if len > idx + 1 { ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1); } (*slice_ptr.add(idx)).write(val); } } /// Removes and returns a value from a slice of all initialized elements, leaving behind one /// trailing uninitialized element. /// /// # Safety /// The slice has more than `idx` elements. unsafe fn slice_remove(slice: &mut [MaybeUninit], idx: usize) -> T { unsafe { let len = slice.len(); debug_assert!(idx < len); let slice_ptr = slice.as_mut_ptr(); let ret = (*slice_ptr.add(idx)).assume_init_read(); ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1); ret } } /// Shifts the elements in a slice `distance` positions to the left. /// /// # Safety /// The slice has at least `distance` elements. unsafe fn slice_shl(slice: &mut [MaybeUninit], distance: usize) { unsafe { let slice_ptr = slice.as_mut_ptr(); ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance); } } /// Shifts the elements in a slice `distance` positions to the right. /// /// # Safety /// The slice has at least `distance` elements. unsafe fn slice_shr(slice: &mut [MaybeUninit], distance: usize) { unsafe { let slice_ptr = slice.as_mut_ptr(); ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance); } } /// Moves all values from a slice of initialized elements to a slice /// of uninitialized elements, leaving behind `src` as all uninitialized. /// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`. fn move_to_slice(src: &mut [MaybeUninit], dst: &mut [MaybeUninit]) { assert!(src.len() == dst.len()); unsafe { ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len()); } } #[cfg(test)] mod tests; use super::super::navigate; use super::*; use crate::fmt::Debug; use crate::string::String; impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { // Asserts that the back pointer in each reachable node points to its parent. pub fn assert_back_pointers(self) { if let ForceResult::Internal(node) = self.force() { for idx in 0..=node.len() { let edge = unsafe { Handle::new_edge(node, idx) }; let child = edge.descend(); assert!(child.ascend().ok() == Some(edge)); child.assert_back_pointers(); } } } // Renders a multi-line display of the keys in order and in tree hierarchy, // picturing the tree growing sideways from its root on the left to its // leaves on the right. pub fn dump_keys(self) -> String where K: Debug, { let mut result = String::new(); self.visit_nodes_in_order(|pos| match pos { navigate::Position::Leaf(leaf) => { let depth = self.height(); let indent = " ".repeat(depth); result += &format!("\n{}{:?}", indent, leaf.keys()); } navigate::Position::Internal(_) => {} navigate::Position::InternalKV(kv) => { let depth = self.height() - kv.into_node().height(); let indent = " ".repeat(depth); result += &format!("\n{}{:?}", indent, kv.into_kv().0); } }); result } } #[test] fn test_splitpoint() { for idx in 0..=CAPACITY { let (middle_kv_idx, insertion) = splitpoint(idx); // Simulate performing the split: let mut left_len = middle_kv_idx; let mut right_len = CAPACITY - middle_kv_idx - 1; match insertion { LeftOrRight::Left(edge_idx) => { assert!(edge_idx <= left_len); left_len += 1; } LeftOrRight::Right(edge_idx) => { assert!(edge_idx <= right_len); right_len += 1; } } assert!(left_len >= MIN_LEN_AFTER_SPLIT); assert!(right_len >= MIN_LEN_AFTER_SPLIT); assert!(left_len + right_len == CAPACITY); } } #[test] fn test_partial_eq() { let mut root1 = NodeRef::new_leaf(); root1.borrow_mut().push(1, ()); let mut root1 = NodeRef::new_internal(root1.forget_type()).forget_type(); let root2 = Root::new(); root1.reborrow().assert_back_pointers(); root2.reborrow().assert_back_pointers(); let leaf_edge_1a = root1.reborrow().first_leaf_edge().forget_node_type(); let leaf_edge_1b = root1.reborrow().last_leaf_edge().forget_node_type(); let top_edge_1 = root1.reborrow().first_edge(); let top_edge_2 = root2.reborrow().first_edge(); assert!(leaf_edge_1a == leaf_edge_1a); assert!(leaf_edge_1a != leaf_edge_1b); assert!(leaf_edge_1a != top_edge_1); assert!(leaf_edge_1a != top_edge_2); assert!(top_edge_1 == top_edge_1); assert!(top_edge_1 != top_edge_2); root1.pop_internal_level(); unsafe { root1.into_dying().deallocate_and_ascend() }; unsafe { root2.into_dying().deallocate_and_ascend() }; } #[test] #[cfg(target_arch = "x86_64")] fn test_sizes() { assert_eq!(core::mem::size_of::>(), 16); assert_eq!(core::mem::size_of::>(), 16 + CAPACITY * 2 * 8); assert_eq!(core::mem::size_of::>(), 16 + (CAPACITY + 1) * 8); assert_eq!(core::mem::size_of::>(), 16 + (CAPACITY * 3 + 1) * 8); } use super::map::MIN_LEN; use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef, Root}; impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { /// Stocks up a possibly underfull node by merging with or stealing from a /// sibling. If succesful but at the cost of shrinking the parent node, /// returns that shrunk parent node. Returns an `Err` if the node is /// an empty root. fn fix_node_through_parent( self, ) -> Result, K, V, marker::Internal>>, Self> { let len = self.len(); if len >= MIN_LEN { Ok(None) } else { match self.choose_parent_kv() { Ok(Left(mut left_parent_kv)) => { if left_parent_kv.can_merge() { let parent = left_parent_kv.merge_tracking_parent(); Ok(Some(parent)) } else { left_parent_kv.bulk_steal_left(MIN_LEN - len); Ok(None) } } Ok(Right(mut right_parent_kv)) => { if right_parent_kv.can_merge() { let parent = right_parent_kv.merge_tracking_parent(); Ok(Some(parent)) } else { right_parent_kv.bulk_steal_right(MIN_LEN - len); Ok(None) } } Err(root) => { if len > 0 { Ok(None) } else { Err(root) } } } } } } impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { /// Stocks up a possibly underfull node, and if that causes its parent node /// to shrink, stocks up the parent, recursively. /// Returns `true` if it fixed the tree, `false` if it couldn't because the /// root node became empty. /// /// This method does not expect ancestors to already be underfull upon entry /// and panics if it encounters an empty ancestor. pub fn fix_node_and_affected_ancestors(mut self) -> bool { loop { match self.fix_node_through_parent() { Ok(Some(parent)) => self = parent.forget_type(), Ok(None) => return true, Err(_) => return false, } } } } impl Root { /// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty. pub fn fix_top(&mut self) { while self.height() > 0 && self.len() == 0 { self.pop_internal_level(); } } /// Stocks up or merge away any underfull nodes on the right border of the /// tree. The other nodes, those that are not the root nor a rightmost edge, /// must already have at least MIN_LEN elements. pub fn fix_right_border(&mut self) { self.fix_top(); if self.len() > 0 { self.borrow_mut().last_kv().fix_right_border_of_right_edge(); self.fix_top(); } } /// The symmetric clone of `fix_right_border`. pub fn fix_left_border(&mut self) { self.fix_top(); if self.len() > 0 { self.borrow_mut().first_kv().fix_left_border_of_left_edge(); self.fix_top(); } } /// Stock up any underfull nodes on the right border of the tree. /// The other nodes, those that are not the root nor a rightmost edge, /// must be prepared to have up to MIN_LEN elements stolen. pub fn fix_right_border_of_plentiful(&mut self) { let mut cur_node = self.borrow_mut(); while let Internal(internal) = cur_node.force() { // Check if right-most child is underfull. let mut last_kv = internal.last_kv().consider_for_balancing(); debug_assert!(last_kv.left_child_len() >= MIN_LEN * 2); let right_child_len = last_kv.right_child_len(); if right_child_len < MIN_LEN { // We need to steal. last_kv.bulk_steal_left(MIN_LEN - right_child_len); } // Go further down. cur_node = last_kv.into_right_child(); } } } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::LeafOrInternal>, marker::KV> { fn fix_left_border_of_left_edge(mut self) { while let Internal(internal_kv) = self.force() { self = internal_kv.fix_left_child().first_kv(); debug_assert!(self.reborrow().into_node().len() > MIN_LEN); } } fn fix_right_border_of_right_edge(mut self) { while let Internal(internal_kv) = self.force() { self = internal_kv.fix_right_child().last_kv(); debug_assert!(self.reborrow().into_node().len() > MIN_LEN); } } } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, marker::KV> { /// Stocks up the left child, assuming the right child isn't underfull, and /// provisions an extra element to allow merging its children in turn /// without becoming underfull. /// Returns the left child. fn fix_left_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { let mut internal_kv = self.consider_for_balancing(); let left_len = internal_kv.left_child_len(); debug_assert!(internal_kv.right_child_len() >= MIN_LEN); if internal_kv.can_merge() { internal_kv.merge_tracking_child() } else { // `MIN_LEN + 1` to avoid readjust if merge happens on the next level. let count = (MIN_LEN + 1).saturating_sub(left_len); if count > 0 { internal_kv.bulk_steal_right(count); } internal_kv.into_left_child() } } /// Stocks up the right child, assuming the left child isn't underfull, and /// provisions an extra element to allow merging its children in turn /// without becoming underfull. /// Returns wherever the right child ended up. fn fix_right_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { let mut internal_kv = self.consider_for_balancing(); let right_len = internal_kv.right_child_len(); debug_assert!(internal_kv.left_child_len() >= MIN_LEN); if internal_kv.can_merge() { internal_kv.merge_tracking_child() } else { // `MIN_LEN + 1` to avoid readjust if merge happens on the next level. let count = (MIN_LEN + 1).saturating_sub(right_len); if count > 0 { internal_kv.bulk_steal_left(count); } internal_kv.into_right_child() } } } //! Collection types. #![stable(feature = "rust1", since = "1.0.0")] pub mod binary_heap; mod btree; pub mod linked_list; pub mod vec_deque; #[stable(feature = "rust1", since = "1.0.0")] pub mod btree_map { //! A map based on a B-Tree. #[stable(feature = "rust1", since = "1.0.0")] pub use super::btree::map::*; } #[stable(feature = "rust1", since = "1.0.0")] pub mod btree_set { //! A set based on a B-Tree. #[stable(feature = "rust1", since = "1.0.0")] pub use super::btree::set::*; } #[stable(feature = "rust1", since = "1.0.0")] #[doc(no_inline)] pub use binary_heap::BinaryHeap; #[stable(feature = "rust1", since = "1.0.0")] #[doc(no_inline)] pub use btree_map::BTreeMap; #[stable(feature = "rust1", since = "1.0.0")] #[doc(no_inline)] pub use btree_set::BTreeSet; #[stable(feature = "rust1", since = "1.0.0")] #[doc(no_inline)] pub use linked_list::LinkedList; #[stable(feature = "rust1", since = "1.0.0")] #[doc(no_inline)] pub use vec_deque::VecDeque; use crate::alloc::{Layout, LayoutError}; use core::fmt::Display; /// The error type for `try_reserve` methods. #[derive(Clone, PartialEq, Eq, Debug)] #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub enum TryReserveError { /// Error due to the computed capacity exceeding the collection's maximum /// (usually `isize::MAX` bytes). CapacityOverflow, /// The memory allocator returned an error AllocError { /// The layout of allocation request that failed layout: Layout, #[doc(hidden)] #[unstable( feature = "container_error_extra", issue = "none", reason = "\ Enable exposing the allocator’s custom error value \ if an associated type is added in the future: \ https://github.com/rust-lang/wg-allocators/issues/23" )] non_exhaustive: (), }, } #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] impl From for TryReserveError { #[inline] fn from(_: LayoutError) -> Self { TryReserveError::CapacityOverflow } } #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] impl Display for TryReserveError { fn fmt( &self, fmt: &mut core::fmt::Formatter<'_>, ) -> core::result::Result<(), core::fmt::Error> { fmt.write_str("memory allocation failed")?; let reason = match &self { TryReserveError::CapacityOverflow => { " because the computed capacity exceeded the collection's maximum" } TryReserveError::AllocError { .. } => " because the memory allocator returned a error", }; fmt.write_str(reason) } } /// An intermediate trait for specialization of `Extend`. #[doc(hidden)] trait SpecExtend { /// Extends `self` with the contents of the given iterator. fn spec_extend(&mut self, iter: I); } use core::fmt; use core::iter::{FusedIterator, TrustedLen, TrustedRandomAccess}; use super::VecDeque; /// An owning iterator over the elements of a `VecDeque`. /// /// This `struct` is created by the [`into_iter`] method on [`VecDeque`] /// (provided by the `IntoIterator` trait). See its documentation for more. /// /// [`into_iter`]: VecDeque::into_iter #[derive(Clone)] #[stable(feature = "rust1", since = "1.0.0")] pub struct IntoIter { pub(crate) inner: VecDeque, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.inner).finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for IntoIter { type Item = T; #[inline] fn next(&mut self) -> Option { self.inner.pop_front() } #[inline] fn size_hint(&self) -> (usize, Option) { let len = self.inner.len(); (len, Some(len)) } #[inline] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item where Self: TrustedRandomAccess, { // Safety: The TrustedRandomAccess contract requires that callers only pass an index // that is in bounds. // Additionally Self: TrustedRandomAccess is only implemented for T: Copy which means even // multiple repeated reads of the same index would be safe and the // values are !Drop, thus won't suffer from double drops. unsafe { let idx = self.inner.wrap_add(self.inner.tail, idx); self.inner.buffer_read(idx) } } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { self.inner.pop_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { fn is_empty(&self) -> bool { self.inner.is_empty() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for IntoIter {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for IntoIter {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] // T: Copy as approximation for !Drop since get_unchecked does not update the pointers // and thus we can't implement drop-handling unsafe impl TrustedRandomAccess for IntoIter where T: Copy, { const MAY_HAVE_SIDE_EFFECT: bool = false; } macro_rules! __impl_slice_eq1 { ([$($vars:tt)*] $lhs:ty, $rhs:ty, $($constraints:tt)*) => { #[stable(feature = "vec_deque_partial_eq_slice", since = "1.17.0")] impl PartialEq<$rhs> for $lhs where A: PartialEq, $($constraints)* { fn eq(&self, other: &$rhs) -> bool { if self.len() != other.len() { return false; } let (sa, sb) = self.as_slices(); let (oa, ob) = other[..].split_at(sa.len()); sa == oa && sb == ob } } } } use core::iter::FusedIterator; use core::ptr::{self, NonNull}; use core::{fmt, mem}; use super::{count, Iter, VecDeque}; /// A draining iterator over the elements of a `VecDeque`. /// /// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its /// documentation for more. /// /// [`drain`]: VecDeque::drain #[stable(feature = "drain", since = "1.6.0")] pub struct Drain<'a, T: 'a> { pub(crate) after_tail: usize, pub(crate) after_head: usize, pub(crate) iter: Iter<'a, T>, pub(crate) deque: NonNull>, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Drain<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Drain") .field(&self.after_tail) .field(&self.after_head) .field(&self.iter) .finish() } } #[stable(feature = "drain", since = "1.6.0")] unsafe impl Sync for Drain<'_, T> {} #[stable(feature = "drain", since = "1.6.0")] unsafe impl Send for Drain<'_, T> {} #[stable(feature = "drain", since = "1.6.0")] impl Drop for Drain<'_, T> { fn drop(&mut self) { struct DropGuard<'r, 'a, T>(&'r mut Drain<'a, T>); impl<'r, 'a, T> Drop for DropGuard<'r, 'a, T> { fn drop(&mut self) { self.0.for_each(drop); let source_deque = unsafe { self.0.deque.as_mut() }; // T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head // // T t h H // [. . . o o x x o o . . .] // let orig_tail = source_deque.tail; let drain_tail = source_deque.head; let drain_head = self.0.after_tail; let orig_head = self.0.after_head; let tail_len = count(orig_tail, drain_tail, source_deque.cap()); let head_len = count(drain_head, orig_head, source_deque.cap()); // Restore the original head value source_deque.head = orig_head; match (tail_len, head_len) { (0, 0) => { source_deque.head = 0; source_deque.tail = 0; } (0, _) => { source_deque.tail = drain_head; } (_, 0) => { source_deque.head = drain_tail; } _ => unsafe { if tail_len <= head_len { source_deque.tail = source_deque.wrap_sub(drain_head, tail_len); source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len); } else { source_deque.head = source_deque.wrap_add(drain_tail, head_len); source_deque.wrap_copy(drain_tail, drain_head, head_len); } }, } } } while let Some(item) = self.next() { let guard = DropGuard(self); drop(item); mem::forget(guard); } DropGuard(self); } } #[stable(feature = "drain", since = "1.6.0")] impl Iterator for Drain<'_, T> { type Item = T; #[inline] fn next(&mut self) -> Option { self.iter.next().map(|elt| unsafe { ptr::read(elt) }) } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } #[stable(feature = "drain", since = "1.6.0")] impl DoubleEndedIterator for Drain<'_, T> { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back().map(|elt| unsafe { ptr::read(elt) }) } } #[stable(feature = "drain", since = "1.6.0")] impl ExactSizeIterator for Drain<'_, T> {} #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Drain<'_, T> {} use core::ptr::{self}; /// Returns the two slices that cover the `VecDeque`'s valid range pub trait RingSlices: Sized { fn slice(self, from: usize, to: usize) -> Self; fn split_at(self, i: usize) -> (Self, Self); fn ring_slices(buf: Self, head: usize, tail: usize) -> (Self, Self) { let contiguous = tail <= head; if contiguous { let (empty, buf) = buf.split_at(0); (buf.slice(tail, head), empty) } else { let (mid, right) = buf.split_at(tail); let (left, _) = mid.split_at(head); (right, left) } } } impl RingSlices for &[T] { fn slice(self, from: usize, to: usize) -> Self { &self[from..to] } fn split_at(self, i: usize) -> (Self, Self) { (*self).split_at(i) } } impl RingSlices for &mut [T] { fn slice(self, from: usize, to: usize) -> Self { &mut self[from..to] } fn split_at(self, i: usize) -> (Self, Self) { (*self).split_at_mut(i) } } impl RingSlices for *mut [T] { fn slice(self, from: usize, to: usize) -> Self { assert!(from <= to && to < self.len()); // Not using `get_unchecked_mut` to keep this a safe operation. let len = to - from; ptr::slice_from_raw_parts_mut(self.as_mut_ptr().wrapping_add(from), len) } fn split_at(self, mid: usize) -> (Self, Self) { let len = self.len(); let ptr = self.as_mut_ptr(); assert!(mid <= len); ( ptr::slice_from_raw_parts_mut(ptr, mid), ptr::slice_from_raw_parts_mut(ptr.wrapping_add(mid), len - mid), ) } } use core::array; use core::cmp::{self}; use core::mem::replace; use super::VecDeque; /// PairSlices pairs up equal length slice parts of two deques /// /// For example, given deques "A" and "B" with the following division into slices: /// /// A: [0 1 2] [3 4 5] /// B: [a b] [c d e] /// /// It produces the following sequence of matching slices: /// /// ([0 1], [a b]) /// (\[2\], \[c\]) /// ([3 4], [d e]) /// /// and the uneven remainder of either A or B is skipped. pub struct PairSlices<'a, 'b, T> { pub(crate) a0: &'a mut [T], pub(crate) a1: &'a mut [T], pub(crate) b0: &'b [T], pub(crate) b1: &'b [T], } impl<'a, 'b, T> PairSlices<'a, 'b, T> { pub fn from(to: &'a mut VecDeque, from: &'b VecDeque) -> Self { let (a0, a1) = to.as_mut_slices(); let (b0, b1) = from.as_slices(); PairSlices { a0, a1, b0, b1 } } pub fn has_remainder(&self) -> bool { !self.b0.is_empty() } pub fn remainder(self) -> impl Iterator { array::IntoIter::new([self.b0, self.b1]) } } impl<'a, 'b, T> Iterator for PairSlices<'a, 'b, T> { type Item = (&'a mut [T], &'b [T]); fn next(&mut self) -> Option { // Get next part length let part = cmp::min(self.a0.len(), self.b0.len()); if part == 0 { return None; } let (p0, p1) = replace(&mut self.a0, &mut []).split_at_mut(part); let (q0, q1) = self.b0.split_at(part); // Move a1 into a0, if it's empty (and b1, b0 the same way). self.a0 = p1; self.b0 = q1; if self.a0.is_empty() { self.a0 = replace(&mut self.a1, &mut []); } if self.b0.is_empty() { self.b0 = replace(&mut self.b1, &[]); } Some((p0, q0)) } } //! A double-ended queue implemented with a growable ring buffer. //! //! This queue has *O*(1) amortized inserts and removals from both ends of the //! container. It also has *O*(1) indexing like a vector. The contained elements //! are not required to be copyable, and the queue will be sendable if the //! contained type is sendable. #![stable(feature = "rust1", since = "1.0.0")] use core::cmp::{self, Ordering}; use core::fmt; use core::hash::{Hash, Hasher}; use core::iter::{repeat_with, FromIterator}; use core::marker::PhantomData; use core::mem::{self, ManuallyDrop}; use core::ops::{Index, IndexMut, Range, RangeBounds}; use core::ptr::{self, NonNull}; use core::slice; use crate::collections::TryReserveError; use crate::raw_vec::RawVec; use crate::vec::Vec; #[macro_use] mod macros; #[stable(feature = "drain", since = "1.6.0")] pub use self::drain::Drain; mod drain; #[stable(feature = "rust1", since = "1.0.0")] pub use self::iter_mut::IterMut; mod iter_mut; #[stable(feature = "rust1", since = "1.0.0")] pub use self::into_iter::IntoIter; mod into_iter; #[stable(feature = "rust1", since = "1.0.0")] pub use self::iter::Iter; mod iter; use self::pair_slices::PairSlices; mod pair_slices; use self::ring_slices::RingSlices; mod ring_slices; #[cfg(test)] mod tests; const INITIAL_CAPACITY: usize = 7; // 2^3 - 1 const MINIMUM_CAPACITY: usize = 1; // 2 - 1 const MAXIMUM_ZST_CAPACITY: usize = 1 << (usize::BITS - 1); // Largest possible power of two /// A double-ended queue implemented with a growable ring buffer. /// /// The "default" usage of this type as a queue is to use [`push_back`] to add to /// the queue, and [`pop_front`] to remove from the queue. [`extend`] and [`append`] /// push onto the back in this manner, and iterating over `VecDeque` goes front /// to back. /// /// Since `VecDeque` is a ring buffer, its elements are not necessarily contiguous /// in memory. If you want to access the elements as a single slice, such as for /// efficient sorting, you can use [`make_contiguous`]. It rotates the `VecDeque` /// so that its elements do not wrap, and returns a mutable slice to the /// now-contiguous element sequence. /// /// [`push_back`]: VecDeque::push_back /// [`pop_front`]: VecDeque::pop_front /// [`extend`]: VecDeque::extend /// [`append`]: VecDeque::append /// [`make_contiguous`]: VecDeque::make_contiguous #[cfg_attr(not(test), rustc_diagnostic_item = "vecdeque_type")] #[stable(feature = "rust1", since = "1.0.0")] pub struct VecDeque { // tail and head are pointers into the buffer. Tail always points // to the first element that could be read, Head always points // to where data should be written. // If tail == head the buffer is empty. The length of the ringbuffer // is defined as the distance between the two. tail: usize, head: usize, buf: RawVec, } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for VecDeque { fn clone(&self) -> VecDeque { self.iter().cloned().collect() } fn clone_from(&mut self, other: &Self) { self.truncate(other.len()); let mut iter = PairSlices::from(self, other); while let Some((dst, src)) = iter.next() { dst.clone_from_slice(&src); } if iter.has_remainder() { for remainder in iter.remainder() { self.extend(remainder.iter().cloned()); } } } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<#[may_dangle] T> Drop for VecDeque { fn drop(&mut self) { /// Runs the destructor for all items in the slice when it gets dropped (normally or /// during unwinding). struct Dropper<'a, T>(&'a mut [T]); impl<'a, T> Drop for Dropper<'a, T> { fn drop(&mut self) { unsafe { ptr::drop_in_place(self.0); } } } let (front, back) = self.as_mut_slices(); unsafe { let _back_dropper = Dropper(back); // use drop for [T] ptr::drop_in_place(front); } // RawVec handles deallocation } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for VecDeque { /// Creates an empty `VecDeque`. #[inline] fn default() -> VecDeque { VecDeque::new() } } impl VecDeque { /// Marginally more convenient #[inline] fn ptr(&self) -> *mut T { self.buf.ptr() } /// Marginally more convenient #[inline] fn cap(&self) -> usize { if mem::size_of::() == 0 { // For zero sized types, we are always at maximum capacity MAXIMUM_ZST_CAPACITY } else { self.buf.capacity() } } /// Turn ptr into a slice #[inline] unsafe fn buffer_as_slice(&self) -> &[T] { unsafe { slice::from_raw_parts(self.ptr(), self.cap()) } } /// Turn ptr into a mut slice #[inline] unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] { unsafe { slice::from_raw_parts_mut(self.ptr(), self.cap()) } } /// Moves an element out of the buffer #[inline] unsafe fn buffer_read(&mut self, off: usize) -> T { unsafe { ptr::read(self.ptr().add(off)) } } /// Writes an element into the buffer, moving it. #[inline] unsafe fn buffer_write(&mut self, off: usize, value: T) { unsafe { ptr::write(self.ptr().add(off), value); } } /// Returns `true` if the buffer is at full capacity. #[inline] fn is_full(&self) -> bool { self.cap() - self.len() == 1 } /// Returns the index in the underlying buffer for a given logical element /// index. #[inline] fn wrap_index(&self, idx: usize) -> usize { wrap_index(idx, self.cap()) } /// Returns the index in the underlying buffer for a given logical element /// index + addend. #[inline] fn wrap_add(&self, idx: usize, addend: usize) -> usize { wrap_index(idx.wrapping_add(addend), self.cap()) } /// Returns the index in the underlying buffer for a given logical element /// index - subtrahend. #[inline] fn wrap_sub(&self, idx: usize, subtrahend: usize) -> usize { wrap_index(idx.wrapping_sub(subtrahend), self.cap()) } /// Copies a contiguous block of memory len long from src to dst #[inline] unsafe fn copy(&self, dst: usize, src: usize, len: usize) { debug_assert!( dst + len <= self.cap(), "cpy dst={} src={} len={} cap={}", dst, src, len, self.cap() ); debug_assert!( src + len <= self.cap(), "cpy dst={} src={} len={} cap={}", dst, src, len, self.cap() ); unsafe { ptr::copy(self.ptr().add(src), self.ptr().add(dst), len); } } /// Copies a contiguous block of memory len long from src to dst #[inline] unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) { debug_assert!( dst + len <= self.cap(), "cno dst={} src={} len={} cap={}", dst, src, len, self.cap() ); debug_assert!( src + len <= self.cap(), "cno dst={} src={} len={} cap={}", dst, src, len, self.cap() ); unsafe { ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len); } } /// Copies a potentially wrapping block of memory len long from src to dest. /// (abs(dst - src) + len) must be no larger than cap() (There must be at /// most one continuous overlapping region between src and dest). unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) { #[allow(dead_code)] fn diff(a: usize, b: usize) -> usize { if a <= b { b - a } else { a - b } } debug_assert!( cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(), "wrc dst={} src={} len={} cap={}", dst, src, len, self.cap() ); if src == dst || len == 0 { return; } let dst_after_src = self.wrap_sub(dst, src) < len; let src_pre_wrap_len = self.cap() - src; let dst_pre_wrap_len = self.cap() - dst; let src_wraps = src_pre_wrap_len < len; let dst_wraps = dst_pre_wrap_len < len; match (dst_after_src, src_wraps, dst_wraps) { (_, false, false) => { // src doesn't wrap, dst doesn't wrap // // S . . . // 1 [_ _ A A B B C C _] // 2 [_ _ A A A A B B _] // D . . . // unsafe { self.copy(dst, src, len); } } (false, false, true) => { // dst before src, src doesn't wrap, dst wraps // // S . . . // 1 [A A B B _ _ _ C C] // 2 [A A B B _ _ _ A A] // 3 [B B B B _ _ _ A A] // . . D . // unsafe { self.copy(dst, src, dst_pre_wrap_len); self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len); } } (true, false, true) => { // src before dst, src doesn't wrap, dst wraps // // S . . . // 1 [C C _ _ _ A A B B] // 2 [B B _ _ _ A A B B] // 3 [B B _ _ _ A A A A] // . . D . // unsafe { self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len); self.copy(dst, src, dst_pre_wrap_len); } } (false, true, false) => { // dst before src, src wraps, dst doesn't wrap // // . . S . // 1 [C C _ _ _ A A B B] // 2 [C C _ _ _ B B B B] // 3 [C C _ _ _ B B C C] // D . . . // unsafe { self.copy(dst, src, src_pre_wrap_len); self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len); } } (true, true, false) => { // src before dst, src wraps, dst doesn't wrap // // . . S . // 1 [A A B B _ _ _ C C] // 2 [A A A A _ _ _ C C] // 3 [C C A A _ _ _ C C] // D . . . // unsafe { self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len); self.copy(dst, src, src_pre_wrap_len); } } (false, true, true) => { // dst before src, src wraps, dst wraps // // . . . S . // 1 [A B C D _ E F G H] // 2 [A B C D _ E G H H] // 3 [A B C D _ E G H A] // 4 [B C C D _ E G H A] // . . D . . // debug_assert!(dst_pre_wrap_len > src_pre_wrap_len); let delta = dst_pre_wrap_len - src_pre_wrap_len; unsafe { self.copy(dst, src, src_pre_wrap_len); self.copy(dst + src_pre_wrap_len, 0, delta); self.copy(0, delta, len - dst_pre_wrap_len); } } (true, true, true) => { // src before dst, src wraps, dst wraps // // . . S . . // 1 [A B C D _ E F G H] // 2 [A A B D _ E F G H] // 3 [H A B D _ E F G H] // 4 [H A B D _ E F F G] // . . . D . // debug_assert!(src_pre_wrap_len > dst_pre_wrap_len); let delta = src_pre_wrap_len - dst_pre_wrap_len; unsafe { self.copy(delta, 0, len - src_pre_wrap_len); self.copy(0, self.cap() - delta, delta); self.copy(dst, src, dst_pre_wrap_len); } } } } /// Frobs the head and tail sections around to handle the fact that we /// just reallocated. Unsafe because it trusts old_capacity. #[inline] unsafe fn handle_capacity_increase(&mut self, old_capacity: usize) { let new_capacity = self.cap(); // Move the shortest contiguous section of the ring buffer // T H // [o o o o o o o . ] // T H // A [o o o o o o o . . . . . . . . . ] // H T // [o o . o o o o o ] // T H // B [. . . o o o o o o o . . . . . . ] // H T // [o o o o o . o o ] // H T // C [o o o o o . . . . . . . . . o o ] if self.tail <= self.head { // A // Nop } else if self.head < old_capacity - self.tail { // B unsafe { self.copy_nonoverlapping(old_capacity, 0, self.head); } self.head += old_capacity; debug_assert!(self.head > self.tail); } else { // C let new_tail = new_capacity - (old_capacity - self.tail); unsafe { self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail); } self.tail = new_tail; debug_assert!(self.head < self.tail); } debug_assert!(self.head < self.cap()); debug_assert!(self.tail < self.cap()); debug_assert!(self.cap().count_ones() == 1); } } impl VecDeque { /// Creates an empty `VecDeque`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let vector: VecDeque = VecDeque::new(); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn new() -> VecDeque { VecDeque::with_capacity(INITIAL_CAPACITY) } /// Creates an empty `VecDeque` with space for at least `capacity` elements. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let vector: VecDeque = VecDeque::with_capacity(10); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn with_capacity(capacity: usize) -> VecDeque { // +1 since the ringbuffer always leaves one space empty let cap = cmp::max(capacity + 1, MINIMUM_CAPACITY + 1).next_power_of_two(); assert!(cap > capacity, "capacity overflow"); VecDeque { tail: 0, head: 0, buf: RawVec::with_capacity(cap) } } /// Provides a reference to the element at the given index. /// /// Element at index 0 is the front of the queue. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(3); /// buf.push_back(4); /// buf.push_back(5); /// assert_eq!(buf.get(1), Some(&4)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get(&self, index: usize) -> Option<&T> { if index < self.len() { let idx = self.wrap_add(self.tail, index); unsafe { Some(&*self.ptr().add(idx)) } } else { None } } /// Provides a mutable reference to the element at the given index. /// /// Element at index 0 is the front of the queue. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(3); /// buf.push_back(4); /// buf.push_back(5); /// if let Some(elem) = buf.get_mut(1) { /// *elem = 7; /// } /// /// assert_eq!(buf[1], 7); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { if index < self.len() { let idx = self.wrap_add(self.tail, index); unsafe { Some(&mut *self.ptr().add(idx)) } } else { None } } /// Swaps elements at indices `i` and `j`. /// /// `i` and `j` may be equal. /// /// Element at index 0 is the front of the queue. /// /// # Panics /// /// Panics if either index is out of bounds. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(3); /// buf.push_back(4); /// buf.push_back(5); /// assert_eq!(buf, [3, 4, 5]); /// buf.swap(0, 2); /// assert_eq!(buf, [5, 4, 3]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn swap(&mut self, i: usize, j: usize) { assert!(i < self.len()); assert!(j < self.len()); let ri = self.wrap_add(self.tail, i); let rj = self.wrap_add(self.tail, j); unsafe { ptr::swap(self.ptr().add(ri), self.ptr().add(rj)) } } /// Returns the number of elements the `VecDeque` can hold without /// reallocating. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let buf: VecDeque = VecDeque::with_capacity(10); /// assert!(buf.capacity() >= 10); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn capacity(&self) -> usize { self.cap() - 1 } /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the /// given `VecDeque`. Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it requests. Therefore /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future /// insertions are expected. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque = vec![1].into_iter().collect(); /// buf.reserve_exact(10); /// assert!(buf.capacity() >= 11); /// ``` /// /// [`reserve`]: VecDeque::reserve #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve_exact(&mut self, additional: usize) { self.reserve(additional); } /// Reserves capacity for at least `additional` more elements to be inserted in the given /// `VecDeque`. The collection may reserve more space to avoid frequent reallocations. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque = vec![1].into_iter().collect(); /// buf.reserve(10); /// assert!(buf.capacity() >= 11); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { let old_cap = self.cap(); let used_cap = self.len() + 1; let new_cap = used_cap .checked_add(additional) .and_then(|needed_cap| needed_cap.checked_next_power_of_two()) .expect("capacity overflow"); if new_cap > old_cap { self.buf.reserve_exact(used_cap, new_cap - used_cap); unsafe { self.handle_capacity_increase(old_cap); } } } /// Tries to reserve the minimum capacity for exactly `additional` more elements to /// be inserted in the given `VecDeque`. After calling `try_reserve_exact`, /// capacity will be greater than or equal to `self.len() + additional`. /// Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it /// requests. Therefore, capacity can not be relied upon to be precisely /// minimal. Prefer `reserve` if future insertions are expected. /// /// # Errors /// /// If the capacity overflows `usize`, or the allocator reports a failure, then an error /// is returned. /// /// # Examples /// /// ``` /// #![feature(try_reserve)] /// use std::collections::TryReserveError; /// use std::collections::VecDeque; /// /// fn process_data(data: &[u32]) -> Result, TryReserveError> { /// let mut output = VecDeque::new(); /// /// // Pre-reserve the memory, exiting if we can't /// output.try_reserve_exact(data.len())?; /// /// // Now we know this can't OOM(Out-Of-Memory) in the middle of our complex work /// output.extend(data.iter().map(|&val| { /// val * 2 + 5 // very complicated /// })); /// /// Ok(output) /// } /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> { self.try_reserve(additional) } /// Tries to reserve capacity for at least `additional` more elements to be inserted /// in the given `VecDeque`. The collection may reserve more space to avoid /// frequent reallocations. After calling `try_reserve`, capacity will be /// greater than or equal to `self.len() + additional`. Does nothing if /// capacity is already sufficient. /// /// # Errors /// /// If the capacity overflows `usize`, or the allocator reports a failure, then an error /// is returned. /// /// # Examples /// /// ``` /// #![feature(try_reserve)] /// use std::collections::TryReserveError; /// use std::collections::VecDeque; /// /// fn process_data(data: &[u32]) -> Result, TryReserveError> { /// let mut output = VecDeque::new(); /// /// // Pre-reserve the memory, exiting if we can't /// output.try_reserve(data.len())?; /// /// // Now we know this can't OOM in the middle of our complex work /// output.extend(data.iter().map(|&val| { /// val * 2 + 5 // very complicated /// })); /// /// Ok(output) /// } /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { let old_cap = self.cap(); let used_cap = self.len() + 1; let new_cap = used_cap .checked_add(additional) .and_then(|needed_cap| needed_cap.checked_next_power_of_two()) .ok_or(TryReserveError::CapacityOverflow)?; if new_cap > old_cap { self.buf.try_reserve_exact(used_cap, new_cap - used_cap)?; unsafe { self.handle_capacity_increase(old_cap); } } Ok(()) } /// Shrinks the capacity of the `VecDeque` as much as possible. /// /// It will drop down as close as possible to the length but the allocator may still inform the /// `VecDeque` that there is space for a few more elements. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::with_capacity(15); /// buf.extend(0..4); /// assert_eq!(buf.capacity(), 15); /// buf.shrink_to_fit(); /// assert!(buf.capacity() >= 4); /// ``` #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn shrink_to_fit(&mut self) { self.shrink_to(0); } /// Shrinks the capacity of the `VecDeque` with a lower bound. /// /// The capacity will remain at least as large as both the length /// and the supplied value. /// /// If the current capacity is less than the lower limit, this is a no-op. /// /// # Examples /// /// ``` /// #![feature(shrink_to)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::with_capacity(15); /// buf.extend(0..4); /// assert_eq!(buf.capacity(), 15); /// buf.shrink_to(6); /// assert!(buf.capacity() >= 6); /// buf.shrink_to(0); /// assert!(buf.capacity() >= 4); /// ``` #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")] pub fn shrink_to(&mut self, min_capacity: usize) { let min_capacity = cmp::min(min_capacity, self.capacity()); // We don't have to worry about an overflow as neither `self.len()` nor `self.capacity()` // can ever be `usize::MAX`. +1 as the ringbuffer always leaves one space empty. let target_cap = cmp::max(cmp::max(min_capacity, self.len()) + 1, MINIMUM_CAPACITY + 1) .next_power_of_two(); if target_cap < self.cap() { // There are three cases of interest: // All elements are out of desired bounds // Elements are contiguous, and head is out of desired bounds // Elements are discontiguous, and tail is out of desired bounds // // At all other times, element positions are unaffected. // // Indicates that elements at the head should be moved. let head_outside = self.head == 0 || self.head >= target_cap; // Move elements from out of desired bounds (positions after target_cap) if self.tail >= target_cap && head_outside { // T H // [. . . . . . . . o o o o o o o . ] // T H // [o o o o o o o . ] unsafe { self.copy_nonoverlapping(0, self.tail, self.len()); } self.head = self.len(); self.tail = 0; } else if self.tail != 0 && self.tail < target_cap && head_outside { // T H // [. . . o o o o o o o . . . . . . ] // H T // [o o . o o o o o ] let len = self.wrap_sub(self.head, target_cap); unsafe { self.copy_nonoverlapping(0, target_cap, len); } self.head = len; debug_assert!(self.head < self.tail); } else if self.tail >= target_cap { // H T // [o o o o o . . . . . . . . . o o ] // H T // [o o o o o . o o ] debug_assert!(self.wrap_sub(self.head, 1) < target_cap); let len = self.cap() - self.tail; let new_tail = target_cap - len; unsafe { self.copy_nonoverlapping(new_tail, self.tail, len); } self.tail = new_tail; debug_assert!(self.head < self.tail); } self.buf.shrink_to_fit(target_cap); debug_assert!(self.head < self.cap()); debug_assert!(self.tail < self.cap()); debug_assert!(self.cap().count_ones() == 1); } } /// Shortens the `VecDeque`, keeping the first `len` elements and dropping /// the rest. /// /// If `len` is greater than the `VecDeque`'s current length, this has no /// effect. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(10); /// buf.push_back(15); /// assert_eq!(buf, [5, 10, 15]); /// buf.truncate(1); /// assert_eq!(buf, [5]); /// ``` #[stable(feature = "deque_extras", since = "1.16.0")] pub fn truncate(&mut self, len: usize) { /// Runs the destructor for all items in the slice when it gets dropped (normally or /// during unwinding). struct Dropper<'a, T>(&'a mut [T]); impl<'a, T> Drop for Dropper<'a, T> { fn drop(&mut self) { unsafe { ptr::drop_in_place(self.0); } } } // Safe because: // // * Any slice passed to `drop_in_place` is valid; the second case has // `len <= front.len()` and returning on `len > self.len()` ensures // `begin <= back.len()` in the first case // * The head of the VecDeque is moved before calling `drop_in_place`, // so no value is dropped twice if `drop_in_place` panics unsafe { if len > self.len() { return; } let num_dropped = self.len() - len; let (front, back) = self.as_mut_slices(); if len > front.len() { let begin = len - front.len(); let drop_back = back.get_unchecked_mut(begin..) as *mut _; self.head = self.wrap_sub(self.head, num_dropped); ptr::drop_in_place(drop_back); } else { let drop_back = back as *mut _; let drop_front = front.get_unchecked_mut(len..) as *mut _; self.head = self.wrap_sub(self.head, num_dropped); // Make sure the second half is dropped even when a destructor // in the first one panics. let _back_dropper = Dropper(&mut *drop_back); ptr::drop_in_place(drop_front); } } } /// Returns a front-to-back iterator. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(3); /// buf.push_back(4); /// let b: &[_] = &[&5, &3, &4]; /// let c: Vec<&i32> = buf.iter().collect(); /// assert_eq!(&c[..], b); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter<'_, T> { Iter { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_slice() } } } /// Returns a front-to-back iterator that returns mutable references. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(3); /// buf.push_back(4); /// for num in buf.iter_mut() { /// *num = *num - 2; /// } /// let b: &[_] = &[&mut 3, &mut 1, &mut 2]; /// assert_eq!(&buf.iter_mut().collect::>()[..], b); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter_mut(&mut self) -> IterMut<'_, T> { // SAFETY: The internal `IterMut` safety invariant is established because the // `ring` we create is a dereferencable slice for lifetime '_. IterMut { tail: self.tail, head: self.head, ring: ptr::slice_from_raw_parts_mut(self.ptr(), self.cap()), phantom: PhantomData, } } /// Returns a pair of slices which contain, in order, the contents of the /// `VecDeque`. /// /// If [`make_contiguous`] was previously called, all elements of the /// `VecDeque` will be in the first slice and the second slice will be empty. /// /// [`make_contiguous`]: VecDeque::make_contiguous /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut vector = VecDeque::new(); /// /// vector.push_back(0); /// vector.push_back(1); /// vector.push_back(2); /// /// assert_eq!(vector.as_slices(), (&[0, 1, 2][..], &[][..])); /// /// vector.push_front(10); /// vector.push_front(9); /// /// assert_eq!(vector.as_slices(), (&[9, 10][..], &[0, 1, 2][..])); /// ``` #[inline] #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn as_slices(&self) -> (&[T], &[T]) { unsafe { let buf = self.buffer_as_slice(); RingSlices::ring_slices(buf, self.head, self.tail) } } /// Returns a pair of slices which contain, in order, the contents of the /// `VecDeque`. /// /// If [`make_contiguous`] was previously called, all elements of the /// `VecDeque` will be in the first slice and the second slice will be empty. /// /// [`make_contiguous`]: VecDeque::make_contiguous /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut vector = VecDeque::new(); /// /// vector.push_back(0); /// vector.push_back(1); /// /// vector.push_front(10); /// vector.push_front(9); /// /// vector.as_mut_slices().0[0] = 42; /// vector.as_mut_slices().1[0] = 24; /// assert_eq!(vector.as_slices(), (&[42, 10][..], &[24, 1][..])); /// ``` #[inline] #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) { unsafe { let head = self.head; let tail = self.tail; let buf = self.buffer_as_mut_slice(); RingSlices::ring_slices(buf, head, tail) } } /// Returns the number of elements in the `VecDeque`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v = VecDeque::new(); /// assert_eq!(v.len(), 0); /// v.push_back(1); /// assert_eq!(v.len(), 1); /// ``` #[doc(alias = "length")] #[stable(feature = "rust1", since = "1.0.0")] pub fn len(&self) -> usize { count(self.tail, self.head, self.cap()) } /// Returns `true` if the `VecDeque` is empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v = VecDeque::new(); /// assert!(v.is_empty()); /// v.push_front(1); /// assert!(!v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn is_empty(&self) -> bool { self.tail == self.head } fn range_tail_head(&self, range: R) -> (usize, usize) where R: RangeBounds, { let Range { start, end } = slice::range(range, ..self.len()); let tail = self.wrap_add(self.tail, start); let head = self.wrap_add(self.tail, end); (tail, head) } /// Creates an iterator that covers the specified range in the `VecDeque`. /// /// # Panics /// /// Panics if the starting point is greater than the end point or if /// the end point is greater than the length of the vector. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let v: VecDeque<_> = vec![1, 2, 3].into_iter().collect(); /// let range = v.range(2..).copied().collect::>(); /// assert_eq!(range, [3]); /// /// // A full range covers all contents /// let all = v.range(..); /// assert_eq!(all.len(), 3); /// ``` #[inline] #[stable(feature = "deque_range", since = "1.51.0")] pub fn range(&self, range: R) -> Iter<'_, T> where R: RangeBounds, { let (tail, head) = self.range_tail_head(range); Iter { tail, head, // The shared reference we have in &self is maintained in the '_ of Iter. ring: unsafe { self.buffer_as_slice() }, } } /// Creates an iterator that covers the specified mutable range in the `VecDeque`. /// /// # Panics /// /// Panics if the starting point is greater than the end point or if /// the end point is greater than the length of the vector. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect(); /// for v in v.range_mut(2..) { /// *v *= 2; /// } /// assert_eq!(v, vec![1, 2, 6]); /// /// // A full range covers all contents /// for v in v.range_mut(..) { /// *v *= 2; /// } /// assert_eq!(v, vec![2, 4, 12]); /// ``` #[inline] #[stable(feature = "deque_range", since = "1.51.0")] pub fn range_mut(&mut self, range: R) -> IterMut<'_, T> where R: RangeBounds, { let (tail, head) = self.range_tail_head(range); // SAFETY: The internal `IterMut` safety invariant is established because the // `ring` we create is a dereferencable slice for lifetime '_. IterMut { tail, head, ring: ptr::slice_from_raw_parts_mut(self.ptr(), self.cap()), phantom: PhantomData, } } /// Creates a draining iterator that removes the specified range in the /// `VecDeque` and yields the removed items. /// /// Note 1: The element range is removed even if the iterator is not /// consumed until the end. /// /// Note 2: It is unspecified how many elements are removed from the deque, /// if the `Drain` value is not dropped, but the borrow it holds expires /// (e.g., due to `mem::forget`). /// /// # Panics /// /// Panics if the starting point is greater than the end point or if /// the end point is greater than the length of the vector. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect(); /// let drained = v.drain(2..).collect::>(); /// assert_eq!(drained, [3]); /// assert_eq!(v, [1, 2]); /// /// // A full range clears all contents /// v.drain(..); /// assert!(v.is_empty()); /// ``` #[inline] #[stable(feature = "drain", since = "1.6.0")] pub fn drain(&mut self, range: R) -> Drain<'_, T> where R: RangeBounds, { // Memory safety // // When the Drain is first created, the source deque is shortened to // make sure no uninitialized or moved-from elements are accessible at // all if the Drain's destructor never gets to run. // // Drain will ptr::read out the values to remove. // When finished, the remaining data will be copied back to cover the hole, // and the head/tail values will be restored correctly. // let (drain_tail, drain_head) = self.range_tail_head(range); // The deque's elements are parted into three segments: // * self.tail -> drain_tail // * drain_tail -> drain_head // * drain_head -> self.head // // T = self.tail; H = self.head; t = drain_tail; h = drain_head // // We store drain_tail as self.head, and drain_head and self.head as // after_tail and after_head respectively on the Drain. This also // truncates the effective array such that if the Drain is leaked, we // have forgotten about the potentially moved values after the start of // the drain. // // T t h H // [. . . o o x x o o . . .] // let head = self.head; // "forget" about the values after the start of the drain until after // the drain is complete and the Drain destructor is run. self.head = drain_tail; Drain { deque: NonNull::from(&mut *self), after_tail: drain_head, after_head: head, iter: Iter { tail: drain_tail, head: drain_head, // Crucially, we only create shared references from `self` here and read from // it. We do not write to `self` nor reborrow to a mutable reference. // Hence the raw pointer we created above, for `deque`, remains valid. ring: unsafe { self.buffer_as_slice() }, }, } } /// Clears the `VecDeque`, removing all values. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v = VecDeque::new(); /// v.push_back(1); /// v.clear(); /// assert!(v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn clear(&mut self) { self.truncate(0); } /// Returns `true` if the `VecDeque` contains an element equal to the /// given value. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut vector: VecDeque = VecDeque::new(); /// /// vector.push_back(0); /// vector.push_back(1); /// /// assert_eq!(vector.contains(&1), true); /// assert_eq!(vector.contains(&10), false); /// ``` #[stable(feature = "vec_deque_contains", since = "1.12.0")] pub fn contains(&self, x: &T) -> bool where T: PartialEq, { let (a, b) = self.as_slices(); a.contains(x) || b.contains(x) } /// Provides a reference to the front element, or `None` if the `VecDeque` is /// empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.front(), None); /// /// d.push_back(1); /// d.push_back(2); /// assert_eq!(d.front(), Some(&1)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn front(&self) -> Option<&T> { self.get(0) } /// Provides a mutable reference to the front element, or `None` if the /// `VecDeque` is empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.front_mut(), None); /// /// d.push_back(1); /// d.push_back(2); /// match d.front_mut() { /// Some(x) => *x = 9, /// None => (), /// } /// assert_eq!(d.front(), Some(&9)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn front_mut(&mut self) -> Option<&mut T> { self.get_mut(0) } /// Provides a reference to the back element, or `None` if the `VecDeque` is /// empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.back(), None); /// /// d.push_back(1); /// d.push_back(2); /// assert_eq!(d.back(), Some(&2)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn back(&self) -> Option<&T> { self.get(self.len().wrapping_sub(1)) } /// Provides a mutable reference to the back element, or `None` if the /// `VecDeque` is empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.back(), None); /// /// d.push_back(1); /// d.push_back(2); /// match d.back_mut() { /// Some(x) => *x = 9, /// None => (), /// } /// assert_eq!(d.back(), Some(&9)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn back_mut(&mut self) -> Option<&mut T> { self.get_mut(self.len().wrapping_sub(1)) } /// Removes the first element and returns it, or `None` if the `VecDeque` is /// empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// d.push_back(1); /// d.push_back(2); /// /// assert_eq!(d.pop_front(), Some(1)); /// assert_eq!(d.pop_front(), Some(2)); /// assert_eq!(d.pop_front(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn pop_front(&mut self) -> Option { if self.is_empty() { None } else { let tail = self.tail; self.tail = self.wrap_add(self.tail, 1); unsafe { Some(self.buffer_read(tail)) } } } /// Removes the last element from the `VecDeque` and returns it, or `None` if /// it is empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// assert_eq!(buf.pop_back(), None); /// buf.push_back(1); /// buf.push_back(3); /// assert_eq!(buf.pop_back(), Some(3)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn pop_back(&mut self) -> Option { if self.is_empty() { None } else { self.head = self.wrap_sub(self.head, 1); let head = self.head; unsafe { Some(self.buffer_read(head)) } } } /// Prepends an element to the `VecDeque`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// d.push_front(1); /// d.push_front(2); /// assert_eq!(d.front(), Some(&2)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn push_front(&mut self, value: T) { if self.is_full() { self.grow(); } self.tail = self.wrap_sub(self.tail, 1); let tail = self.tail; unsafe { self.buffer_write(tail, value); } } /// Appends an element to the back of the `VecDeque`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(1); /// buf.push_back(3); /// assert_eq!(3, *buf.back().unwrap()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn push_back(&mut self, value: T) { if self.is_full() { self.grow(); } let head = self.head; self.head = self.wrap_add(self.head, 1); unsafe { self.buffer_write(head, value) } } #[inline] fn is_contiguous(&self) -> bool { // FIXME: Should we consider `head == 0` to mean // that `self` is contiguous? self.tail <= self.head } /// Removes an element from anywhere in the `VecDeque` and returns it, /// replacing it with the first element. /// /// This does not preserve ordering, but is *O*(1). /// /// Returns `None` if `index` is out of bounds. /// /// Element at index 0 is the front of the queue. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// assert_eq!(buf.swap_remove_front(0), None); /// buf.push_back(1); /// buf.push_back(2); /// buf.push_back(3); /// assert_eq!(buf, [1, 2, 3]); /// /// assert_eq!(buf.swap_remove_front(2), Some(3)); /// assert_eq!(buf, [2, 1]); /// ``` #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn swap_remove_front(&mut self, index: usize) -> Option { let length = self.len(); if length > 0 && index < length && index != 0 { self.swap(index, 0); } else if index >= length { return None; } self.pop_front() } /// Removes an element from anywhere in the `VecDeque` and returns it, replacing it with the /// last element. /// /// This does not preserve ordering, but is *O*(1). /// /// Returns `None` if `index` is out of bounds. /// /// Element at index 0 is the front of the queue. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// assert_eq!(buf.swap_remove_back(0), None); /// buf.push_back(1); /// buf.push_back(2); /// buf.push_back(3); /// assert_eq!(buf, [1, 2, 3]); /// /// assert_eq!(buf.swap_remove_back(0), Some(1)); /// assert_eq!(buf, [3, 2]); /// ``` #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn swap_remove_back(&mut self, index: usize) -> Option { let length = self.len(); if length > 0 && index < length - 1 { self.swap(index, length - 1); } else if index >= length { return None; } self.pop_back() } /// Inserts an element at `index` within the `VecDeque`, shifting all elements with indices /// greater than or equal to `index` towards the back. /// /// Element at index 0 is the front of the queue. /// /// # Panics /// /// Panics if `index` is greater than `VecDeque`'s length /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut vec_deque = VecDeque::new(); /// vec_deque.push_back('a'); /// vec_deque.push_back('b'); /// vec_deque.push_back('c'); /// assert_eq!(vec_deque, &['a', 'b', 'c']); /// /// vec_deque.insert(1, 'd'); /// assert_eq!(vec_deque, &['a', 'd', 'b', 'c']); /// ``` #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn insert(&mut self, index: usize, value: T) { assert!(index <= self.len(), "index out of bounds"); if self.is_full() { self.grow(); } // Move the least number of elements in the ring buffer and insert // the given object // // At most len/2 - 1 elements will be moved. O(min(n, n-i)) // // There are three main cases: // Elements are contiguous // - special case when tail is 0 // Elements are discontiguous and the insert is in the tail section // Elements are discontiguous and the insert is in the head section // // For each of those there are two more cases: // Insert is closer to tail // Insert is closer to head // // Key: H - self.head // T - self.tail // o - Valid element // I - Insertion element // A - The element that should be after the insertion point // M - Indicates element was moved let idx = self.wrap_add(self.tail, index); let distance_to_tail = index; let distance_to_head = self.len() - index; let contiguous = self.is_contiguous(); match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) { (true, true, _) if index == 0 => { // push_front // // T // I H // [A o o o o o o . . . . . . . . .] // // H T // [A o o o o o o o . . . . . I] // self.tail = self.wrap_sub(self.tail, 1); } (true, true, _) => { unsafe { // contiguous, insert closer to tail: // // T I H // [. . . o o A o o o o . . . . . .] // // T H // [. . o o I A o o o o . . . . . .] // M M // // contiguous, insert closer to tail and tail is 0: // // // T I H // [o o A o o o o . . . . . . . . .] // // H T // [o I A o o o o o . . . . . . . o] // M M let new_tail = self.wrap_sub(self.tail, 1); self.copy(new_tail, self.tail, 1); // Already moved the tail, so we only copy `index - 1` elements. self.copy(self.tail, self.tail + 1, index - 1); self.tail = new_tail; } } (true, false, _) => { unsafe { // contiguous, insert closer to head: // // T I H // [. . . o o o o A o o . . . . . .] // // T H // [. . . o o o o I A o o . . . . .] // M M M self.copy(idx + 1, idx, self.head - idx); self.head = self.wrap_add(self.head, 1); } } (false, true, true) => { unsafe { // discontiguous, insert closer to tail, tail section: // // H T I // [o o o o o o . . . . . o o A o o] // // H T // [o o o o o o . . . . o o I A o o] // M M self.copy(self.tail - 1, self.tail, index); self.tail -= 1; } } (false, false, true) => { unsafe { // discontiguous, insert closer to head, tail section: // // H T I // [o o . . . . . . . o o o o o A o] // // H T // [o o o . . . . . . o o o o o I A] // M M M M // copy elements up to new head self.copy(1, 0, self.head); // copy last element into empty spot at bottom of buffer self.copy(0, self.cap() - 1, 1); // move elements from idx to end forward not including ^ element self.copy(idx + 1, idx, self.cap() - 1 - idx); self.head += 1; } } (false, true, false) if idx == 0 => { unsafe { // discontiguous, insert is closer to tail, head section, // and is at index zero in the internal buffer: // // I H T // [A o o o o o o o o o . . . o o o] // // H T // [A o o o o o o o o o . . o o o I] // M M M // copy elements up to new tail self.copy(self.tail - 1, self.tail, self.cap() - self.tail); // copy last element into empty spot at bottom of buffer self.copy(self.cap() - 1, 0, 1); self.tail -= 1; } } (false, true, false) => { unsafe { // discontiguous, insert closer to tail, head section: // // I H T // [o o o A o o o o o o . . . o o o] // // H T // [o o I A o o o o o o . . o o o o] // M M M M M M // copy elements up to new tail self.copy(self.tail - 1, self.tail, self.cap() - self.tail); // copy last element into empty spot at bottom of buffer self.copy(self.cap() - 1, 0, 1); // move elements from idx-1 to end forward not including ^ element self.copy(0, 1, idx - 1); self.tail -= 1; } } (false, false, false) => { unsafe { // discontiguous, insert closer to head, head section: // // I H T // [o o o o A o o . . . . . . o o o] // // H T // [o o o o I A o o . . . . . o o o] // M M M self.copy(idx + 1, idx, self.head - idx); self.head += 1; } } } // tail might've been changed so we need to recalculate let new_idx = self.wrap_add(self.tail, index); unsafe { self.buffer_write(new_idx, value); } } /// Removes and returns the element at `index` from the `VecDeque`. /// Whichever end is closer to the removal point will be moved to make /// room, and all the affected elements will be moved to new positions. /// Returns `None` if `index` is out of bounds. /// /// Element at index 0 is the front of the queue. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(1); /// buf.push_back(2); /// buf.push_back(3); /// assert_eq!(buf, [1, 2, 3]); /// /// assert_eq!(buf.remove(1), Some(2)); /// assert_eq!(buf, [1, 3]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(&mut self, index: usize) -> Option { if self.is_empty() || self.len() <= index { return None; } // There are three main cases: // Elements are contiguous // Elements are discontiguous and the removal is in the tail section // Elements are discontiguous and the removal is in the head section // - special case when elements are technically contiguous, // but self.head = 0 // // For each of those there are two more cases: // Insert is closer to tail // Insert is closer to head // // Key: H - self.head // T - self.tail // o - Valid element // x - Element marked for removal // R - Indicates element that is being removed // M - Indicates element was moved let idx = self.wrap_add(self.tail, index); let elem = unsafe { Some(self.buffer_read(idx)) }; let distance_to_tail = index; let distance_to_head = self.len() - index; let contiguous = self.is_contiguous(); match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) { (true, true, _) => { unsafe { // contiguous, remove closer to tail: // // T R H // [. . . o o x o o o o . . . . . .] // // T H // [. . . . o o o o o o . . . . . .] // M M self.copy(self.tail + 1, self.tail, index); self.tail += 1; } } (true, false, _) => { unsafe { // contiguous, remove closer to head: // // T R H // [. . . o o o o x o o . . . . . .] // // T H // [. . . o o o o o o . . . . . . .] // M M self.copy(idx, idx + 1, self.head - idx - 1); self.head -= 1; } } (false, true, true) => { unsafe { // discontiguous, remove closer to tail, tail section: // // H T R // [o o o o o o . . . . . o o x o o] // // H T // [o o o o o o . . . . . . o o o o] // M M self.copy(self.tail + 1, self.tail, index); self.tail = self.wrap_add(self.tail, 1); } } (false, false, false) => { unsafe { // discontiguous, remove closer to head, head section: // // R H T // [o o o o x o o . . . . . . o o o] // // H T // [o o o o o o . . . . . . . o o o] // M M self.copy(idx, idx + 1, self.head - idx - 1); self.head -= 1; } } (false, false, true) => { unsafe { // discontiguous, remove closer to head, tail section: // // H T R // [o o o . . . . . . o o o o o x o] // // H T // [o o . . . . . . . o o o o o o o] // M M M M // // or quasi-discontiguous, remove next to head, tail section: // // H T R // [. . . . . . . . . o o o o o x o] // // T H // [. . . . . . . . . o o o o o o .] // M // draw in elements in the tail section self.copy(idx, idx + 1, self.cap() - idx - 1); // Prevents underflow. if self.head != 0 { // copy first element into empty spot self.copy(self.cap() - 1, 0, 1); // move elements in the head section backwards self.copy(0, 1, self.head - 1); } self.head = self.wrap_sub(self.head, 1); } } (false, true, false) => { unsafe { // discontiguous, remove closer to tail, head section: // // R H T // [o o x o o o o o o o . . . o o o] // // H T // [o o o o o o o o o o . . . . o o] // M M M M M // draw in elements up to idx self.copy(1, 0, idx); // copy last element into empty spot self.copy(0, self.cap() - 1, 1); // move elements from tail to end forward, excluding the last one self.copy(self.tail + 1, self.tail, self.cap() - self.tail - 1); self.tail = self.wrap_add(self.tail, 1); } } } elem } /// Splits the `VecDeque` into two at the given index. /// /// Returns a newly allocated `VecDeque`. `self` contains elements `[0, at)`, /// and the returned `VecDeque` contains elements `[at, len)`. /// /// Note that the capacity of `self` does not change. /// /// Element at index 0 is the front of the queue. /// /// # Panics /// /// Panics if `at > len`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<_> = vec![1, 2, 3].into_iter().collect(); /// let buf2 = buf.split_off(1); /// assert_eq!(buf, [1]); /// assert_eq!(buf2, [2, 3]); /// ``` #[inline] #[must_use = "use `.truncate()` if you don't need the other half"] #[stable(feature = "split_off", since = "1.4.0")] pub fn split_off(&mut self, at: usize) -> Self { let len = self.len(); assert!(at <= len, "`at` out of bounds"); let other_len = len - at; let mut other = VecDeque::with_capacity(other_len); unsafe { let (first_half, second_half) = self.as_slices(); let first_len = first_half.len(); let second_len = second_half.len(); if at < first_len { // `at` lies in the first half. let amount_in_first = first_len - at; ptr::copy_nonoverlapping(first_half.as_ptr().add(at), other.ptr(), amount_in_first); // just take all of the second half. ptr::copy_nonoverlapping( second_half.as_ptr(), other.ptr().add(amount_in_first), second_len, ); } else { // `at` lies in the second half, need to factor in the elements we skipped // in the first half. let offset = at - first_len; let amount_in_second = second_len - offset; ptr::copy_nonoverlapping( second_half.as_ptr().add(offset), other.ptr(), amount_in_second, ); } } // Cleanup where the ends of the buffers are self.head = self.wrap_sub(self.head, other_len); other.head = other.wrap_index(other_len); other } /// Moves all the elements of `other` into `self`, leaving `other` empty. /// /// # Panics /// /// Panics if the new number of elements in self overflows a `usize`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<_> = vec![1, 2].into_iter().collect(); /// let mut buf2: VecDeque<_> = vec![3, 4].into_iter().collect(); /// buf.append(&mut buf2); /// assert_eq!(buf, [1, 2, 3, 4]); /// assert_eq!(buf2, []); /// ``` #[inline] #[stable(feature = "append", since = "1.4.0")] pub fn append(&mut self, other: &mut Self) { // naive impl self.extend(other.drain(..)); } /// Retains only the elements specified by the predicate. /// /// In other words, remove all elements `e` such that `f(&e)` returns false. /// This method operates in place, visiting each element exactly once in the /// original order, and preserves the order of the retained elements. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.extend(1..5); /// buf.retain(|&x| x % 2 == 0); /// assert_eq!(buf, [2, 4]); /// ``` /// /// The exact order may be useful for tracking external state, like an index. /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.extend(1..6); /// /// let keep = [false, true, true, false, true]; /// let mut i = 0; /// buf.retain(|_| (keep[i], i += 1).0); /// assert_eq!(buf, [2, 3, 5]); /// ``` #[stable(feature = "vec_deque_retain", since = "1.4.0")] pub fn retain(&mut self, mut f: F) where F: FnMut(&T) -> bool, { let len = self.len(); let mut del = 0; for i in 0..len { if !f(&self[i]) { del += 1; } else if del > 0 { self.swap(i - del, i); } } if del > 0 { self.truncate(len - del); } } // This may panic or abort #[inline(never)] fn grow(&mut self) { if self.is_full() { let old_cap = self.cap(); // Double the buffer size. self.buf.reserve_exact(old_cap, old_cap); assert!(self.cap() == old_cap * 2); unsafe { self.handle_capacity_increase(old_cap); } debug_assert!(!self.is_full()); } } /// Modifies the `VecDeque` in-place so that `len()` is equal to `new_len`, /// either by removing excess elements from the back or by appending /// elements generated by calling `generator` to the back. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(10); /// buf.push_back(15); /// assert_eq!(buf, [5, 10, 15]); /// /// buf.resize_with(5, Default::default); /// assert_eq!(buf, [5, 10, 15, 0, 0]); /// /// buf.resize_with(2, || unreachable!()); /// assert_eq!(buf, [5, 10]); /// /// let mut state = 100; /// buf.resize_with(5, || { state += 1; state }); /// assert_eq!(buf, [5, 10, 101, 102, 103]); /// ``` #[stable(feature = "vec_resize_with", since = "1.33.0")] pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut() -> T) { let len = self.len(); if new_len > len { self.extend(repeat_with(generator).take(new_len - len)) } else { self.truncate(new_len); } } /// Rearranges the internal storage of this deque so it is one contiguous /// slice, which is then returned. /// /// This method does not allocate and does not change the order of the /// inserted elements. As it returns a mutable slice, this can be used to /// sort a deque. /// /// Once the internal storage is contiguous, the [`as_slices`] and /// [`as_mut_slices`] methods will return the entire contents of the /// `VecDeque` in a single slice. /// /// [`as_slices`]: VecDeque::as_slices /// [`as_mut_slices`]: VecDeque::as_mut_slices /// /// # Examples /// /// Sorting the content of a deque. /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::with_capacity(15); /// /// buf.push_back(2); /// buf.push_back(1); /// buf.push_front(3); /// /// // sorting the deque /// buf.make_contiguous().sort(); /// assert_eq!(buf.as_slices(), (&[1, 2, 3] as &[_], &[] as &[_])); /// /// // sorting it in reverse order /// buf.make_contiguous().sort_by(|a, b| b.cmp(a)); /// assert_eq!(buf.as_slices(), (&[3, 2, 1] as &[_], &[] as &[_])); /// ``` /// /// Getting immutable access to the contiguous slice. /// /// ```rust /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// /// buf.push_back(2); /// buf.push_back(1); /// buf.push_front(3); /// /// buf.make_contiguous(); /// if let (slice, &[]) = buf.as_slices() { /// // we can now be sure that `slice` contains all elements of the deque, /// // while still having immutable access to `buf`. /// assert_eq!(buf.len(), slice.len()); /// assert_eq!(slice, &[3, 2, 1] as &[_]); /// } /// ``` #[stable(feature = "deque_make_contiguous", since = "1.48.0")] pub fn make_contiguous(&mut self) -> &mut [T] { if self.is_contiguous() { let tail = self.tail; let head = self.head; return unsafe { RingSlices::ring_slices(self.buffer_as_mut_slice(), head, tail).0 }; } let buf = self.buf.ptr(); let cap = self.cap(); let len = self.len(); let free = self.tail - self.head; let tail_len = cap - self.tail; if free >= tail_len { // there is enough free space to copy the tail in one go, // this means that we first shift the head backwards, and then // copy the tail to the correct position. // // from: DEFGH....ABC // to: ABCDEFGH.... unsafe { ptr::copy(buf, buf.add(tail_len), self.head); // ...DEFGH.ABC ptr::copy_nonoverlapping(buf.add(self.tail), buf, tail_len); // ABCDEFGH.... self.tail = 0; self.head = len; } } else if free > self.head { // FIXME: We currently do not consider ....ABCDEFGH // to be contiguous because `head` would be `0` in this // case. While we probably want to change this it // isn't trivial as a few places expect `is_contiguous` // to mean that we can just slice using `buf[tail..head]`. // there is enough free space to copy the head in one go, // this means that we first shift the tail forwards, and then // copy the head to the correct position. // // from: FGH....ABCDE // to: ...ABCDEFGH. unsafe { ptr::copy(buf.add(self.tail), buf.add(self.head), tail_len); // FGHABCDE.... ptr::copy_nonoverlapping(buf, buf.add(self.head + tail_len), self.head); // ...ABCDEFGH. self.tail = self.head; self.head = self.wrap_add(self.tail, len); } } else { // free is smaller than both head and tail, // this means we have to slowly "swap" the tail and the head. // // from: EFGHI...ABCD or HIJK.ABCDEFG // to: ABCDEFGHI... or ABCDEFGHIJK. let mut left_edge: usize = 0; let mut right_edge: usize = self.tail; unsafe { // The general problem looks like this // GHIJKLM...ABCDEF - before any swaps // ABCDEFM...GHIJKL - after 1 pass of swaps // ABCDEFGHIJM...KL - swap until the left edge reaches the temp store // - then restart the algorithm with a new (smaller) store // Sometimes the temp store is reached when the right edge is at the end // of the buffer - this means we've hit the right order with fewer swaps! // E.g // EF..ABCD // ABCDEF.. - after four only swaps we've finished while left_edge < len && right_edge != cap { let mut right_offset = 0; for i in left_edge..right_edge { right_offset = (i - left_edge) % (cap - right_edge); let src: isize = (right_edge + right_offset) as isize; ptr::swap(buf.add(i), buf.offset(src)); } let n_ops = right_edge - left_edge; left_edge += n_ops; right_edge += right_offset + 1; } self.tail = 0; self.head = len; } } let tail = self.tail; let head = self.head; unsafe { RingSlices::ring_slices(self.buffer_as_mut_slice(), head, tail).0 } } /// Rotates the double-ended queue `mid` places to the left. /// /// Equivalently, /// - Rotates item `mid` into the first position. /// - Pops the first `mid` items and pushes them to the end. /// - Rotates `len() - mid` places to the right. /// /// # Panics /// /// If `mid` is greater than `len()`. Note that `mid == len()` /// does _not_ panic and is a no-op rotation. /// /// # Complexity /// /// Takes `*O*(min(mid, len() - mid))` time and no extra space. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<_> = (0..10).collect(); /// /// buf.rotate_left(3); /// assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 0, 1, 2]); /// /// for i in 1..10 { /// assert_eq!(i * 3 % 10, buf[0]); /// buf.rotate_left(3); /// } /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); /// ``` #[stable(feature = "vecdeque_rotate", since = "1.36.0")] pub fn rotate_left(&mut self, mid: usize) { assert!(mid <= self.len()); let k = self.len() - mid; if mid <= k { unsafe { self.rotate_left_inner(mid) } } else { unsafe { self.rotate_right_inner(k) } } } /// Rotates the double-ended queue `k` places to the right. /// /// Equivalently, /// - Rotates the first item into position `k`. /// - Pops the last `k` items and pushes them to the front. /// - Rotates `len() - k` places to the left. /// /// # Panics /// /// If `k` is greater than `len()`. Note that `k == len()` /// does _not_ panic and is a no-op rotation. /// /// # Complexity /// /// Takes `*O*(min(k, len() - k))` time and no extra space. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<_> = (0..10).collect(); /// /// buf.rotate_right(3); /// assert_eq!(buf, [7, 8, 9, 0, 1, 2, 3, 4, 5, 6]); /// /// for i in 1..10 { /// assert_eq!(0, buf[i * 3 % 10]); /// buf.rotate_right(3); /// } /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); /// ``` #[stable(feature = "vecdeque_rotate", since = "1.36.0")] pub fn rotate_right(&mut self, k: usize) { assert!(k <= self.len()); let mid = self.len() - k; if k <= mid { unsafe { self.rotate_right_inner(k) } } else { unsafe { self.rotate_left_inner(mid) } } } // SAFETY: the following two methods require that the rotation amount // be less than half the length of the deque. // // `wrap_copy` requires that `min(x, cap() - x) + copy_len <= cap()`, // but than `min` is never more than half the capacity, regardless of x, // so it's sound to call here because we're calling with something // less than half the length, which is never above half the capacity. unsafe fn rotate_left_inner(&mut self, mid: usize) { debug_assert!(mid * 2 <= self.len()); unsafe { self.wrap_copy(self.head, self.tail, mid); } self.head = self.wrap_add(self.head, mid); self.tail = self.wrap_add(self.tail, mid); } unsafe fn rotate_right_inner(&mut self, k: usize) { debug_assert!(k * 2 <= self.len()); self.head = self.wrap_sub(self.head, k); self.tail = self.wrap_sub(self.tail, k); unsafe { self.wrap_copy(self.tail, self.head, k); } } /// Binary searches this sorted `VecDeque` for a given element. /// /// If the value is found then [`Result::Ok`] is returned, containing the /// index of the matching element. If there are multiple matches, then any /// one of the matches could be returned. If the value is not found then /// [`Result::Err`] is returned, containing the index where a matching /// element could be inserted while maintaining sorted order. /// /// See also [`binary_search_by`], [`binary_search_by_key`], and [`partition_point`]. /// /// [`binary_search_by`]: VecDeque::binary_search_by /// [`binary_search_by_key`]: VecDeque::binary_search_by_key /// [`partition_point`]: VecDeque::partition_point /// /// # Examples /// /// Looks up a series of four elements. The first is found, with a /// uniquely determined position; the second and third are not /// found; the fourth could match any position in `[1, 4]`. /// /// ``` /// #![feature(vecdeque_binary_search)] /// use std::collections::VecDeque; /// /// let deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into(); /// /// assert_eq!(deque.binary_search(&13), Ok(9)); /// assert_eq!(deque.binary_search(&4), Err(7)); /// assert_eq!(deque.binary_search(&100), Err(13)); /// let r = deque.binary_search(&1); /// assert!(matches!(r, Ok(1..=4))); /// ``` /// /// If you want to insert an item to a sorted `VecDeque`, while maintaining /// sort order: /// /// ``` /// #![feature(vecdeque_binary_search)] /// use std::collections::VecDeque; /// /// let mut deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into(); /// let num = 42; /// let idx = deque.binary_search(&num).unwrap_or_else(|x| x); /// deque.insert(idx, num); /// assert_eq!(deque, &[0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]); /// ``` #[unstable(feature = "vecdeque_binary_search", issue = "78021")] #[inline] pub fn binary_search(&self, x: &T) -> Result where T: Ord, { self.binary_search_by(|e| e.cmp(x)) } /// Binary searches this sorted `VecDeque` with a comparator function. /// /// The comparator function should implement an order consistent /// with the sort order of the underlying `VecDeque`, returning an /// order code that indicates whether its argument is `Less`, /// `Equal` or `Greater` than the desired target. /// /// If the value is found then [`Result::Ok`] is returned, containing the /// index of the matching element. If there are multiple matches, then any /// one of the matches could be returned. If the value is not found then /// [`Result::Err`] is returned, containing the index where a matching /// element could be inserted while maintaining sorted order. /// /// See also [`binary_search`], [`binary_search_by_key`], and [`partition_point`]. /// /// [`binary_search`]: VecDeque::binary_search /// [`binary_search_by_key`]: VecDeque::binary_search_by_key /// [`partition_point`]: VecDeque::partition_point /// /// # Examples /// /// Looks up a series of four elements. The first is found, with a /// uniquely determined position; the second and third are not /// found; the fourth could match any position in `[1, 4]`. /// /// ``` /// #![feature(vecdeque_binary_search)] /// use std::collections::VecDeque; /// /// let deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into(); /// /// assert_eq!(deque.binary_search_by(|x| x.cmp(&13)), Ok(9)); /// assert_eq!(deque.binary_search_by(|x| x.cmp(&4)), Err(7)); /// assert_eq!(deque.binary_search_by(|x| x.cmp(&100)), Err(13)); /// let r = deque.binary_search_by(|x| x.cmp(&1)); /// assert!(matches!(r, Ok(1..=4))); /// ``` #[unstable(feature = "vecdeque_binary_search", issue = "78021")] pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result where F: FnMut(&'a T) -> Ordering, { let (front, back) = self.as_slices(); let cmp_back = back.first().map(|elem| f(elem)); if let Some(Ordering::Equal) = cmp_back { Ok(front.len()) } else if let Some(Ordering::Less) = cmp_back { back.binary_search_by(f).map(|idx| idx + front.len()).map_err(|idx| idx + front.len()) } else { front.binary_search_by(f) } } /// Binary searches this sorted `VecDeque` with a key extraction function. /// /// Assumes that the `VecDeque` is sorted by the key, for instance with /// [`make_contiguous().sort_by_key()`] using the same key extraction function. /// /// If the value is found then [`Result::Ok`] is returned, containing the /// index of the matching element. If there are multiple matches, then any /// one of the matches could be returned. If the value is not found then /// [`Result::Err`] is returned, containing the index where a matching /// element could be inserted while maintaining sorted order. /// /// See also [`binary_search`], [`binary_search_by`], and [`partition_point`]. /// /// [`make_contiguous().sort_by_key()`]: VecDeque::make_contiguous /// [`binary_search`]: VecDeque::binary_search /// [`binary_search_by`]: VecDeque::binary_search_by /// [`partition_point`]: VecDeque::partition_point /// /// # Examples /// /// Looks up a series of four elements in a slice of pairs sorted by /// their second elements. The first is found, with a uniquely /// determined position; the second and third are not found; the /// fourth could match any position in `[1, 4]`. /// /// ``` /// #![feature(vecdeque_binary_search)] /// use std::collections::VecDeque; /// /// let deque: VecDeque<_> = vec![(0, 0), (2, 1), (4, 1), (5, 1), /// (3, 1), (1, 2), (2, 3), (4, 5), (5, 8), (3, 13), /// (1, 21), (2, 34), (4, 55)].into(); /// /// assert_eq!(deque.binary_search_by_key(&13, |&(a, b)| b), Ok(9)); /// assert_eq!(deque.binary_search_by_key(&4, |&(a, b)| b), Err(7)); /// assert_eq!(deque.binary_search_by_key(&100, |&(a, b)| b), Err(13)); /// let r = deque.binary_search_by_key(&1, |&(a, b)| b); /// assert!(matches!(r, Ok(1..=4))); /// ``` #[unstable(feature = "vecdeque_binary_search", issue = "78021")] #[inline] pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result where F: FnMut(&'a T) -> B, B: Ord, { self.binary_search_by(|k| f(k).cmp(b)) } /// Returns the index of the partition point according to the given predicate /// (the index of the first element of the second partition). /// /// The deque is assumed to be partitioned according to the given predicate. /// This means that all elements for which the predicate returns true are at the start of the deque /// and all elements for which the predicate returns false are at the end. /// For example, [7, 15, 3, 5, 4, 12, 6] is a partitioned under the predicate x % 2 != 0 /// (all odd numbers are at the start, all even at the end). /// /// If this deque is not partitioned, the returned result is unspecified and meaningless, /// as this method performs a kind of binary search. /// /// See also [`binary_search`], [`binary_search_by`], and [`binary_search_by_key`]. /// /// [`binary_search`]: VecDeque::binary_search /// [`binary_search_by`]: VecDeque::binary_search_by /// [`binary_search_by_key`]: VecDeque::binary_search_by_key /// /// # Examples /// /// ``` /// #![feature(vecdeque_binary_search)] /// use std::collections::VecDeque; /// /// let deque: VecDeque<_> = vec![1, 2, 3, 3, 5, 6, 7].into(); /// let i = deque.partition_point(|&x| x < 5); /// /// assert_eq!(i, 4); /// assert!(deque.iter().take(i).all(|&x| x < 5)); /// assert!(deque.iter().skip(i).all(|&x| !(x < 5))); /// ``` #[unstable(feature = "vecdeque_binary_search", issue = "78021")] pub fn partition_point

(&self, mut pred: P) -> usize where P: FnMut(&T) -> bool, { let (front, back) = self.as_slices(); if let Some(true) = back.first().map(|v| pred(v)) { back.partition_point(pred) + front.len() } else { front.partition_point(pred) } } } impl VecDeque { /// Modifies the `VecDeque` in-place so that `len()` is equal to new_len, /// either by removing excess elements from the back or by appending clones of `value` /// to the back. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(10); /// buf.push_back(15); /// assert_eq!(buf, [5, 10, 15]); /// /// buf.resize(2, 0); /// assert_eq!(buf, [5, 10]); /// /// buf.resize(5, 20); /// assert_eq!(buf, [5, 10, 20, 20, 20]); /// ``` #[stable(feature = "deque_extras", since = "1.16.0")] pub fn resize(&mut self, new_len: usize, value: T) { self.resize_with(new_len, || value.clone()); } } /// Returns the index in the underlying buffer for a given logical element index. #[inline] fn wrap_index(index: usize, size: usize) -> usize { // size is always a power of 2 debug_assert!(size.is_power_of_two()); index & (size - 1) } /// Calculate the number of elements left to be read in the buffer #[inline] fn count(tail: usize, head: usize, size: usize) -> usize { // size is always a power of 2 (head.wrapping_sub(tail)) & (size - 1) } #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for VecDeque { fn eq(&self, other: &VecDeque) -> bool { if self.len() != other.len() { return false; } let (sa, sb) = self.as_slices(); let (oa, ob) = other.as_slices(); if sa.len() == oa.len() { sa == oa && sb == ob } else if sa.len() < oa.len() { // Always divisible in three sections, for example: // self: [a b c|d e f] // other: [0 1 2 3|4 5] // front = 3, mid = 1, // [a b c] == [0 1 2] && [d] == [3] && [e f] == [4 5] let front = sa.len(); let mid = oa.len() - front; let (oa_front, oa_mid) = oa.split_at(front); let (sb_mid, sb_back) = sb.split_at(mid); debug_assert_eq!(sa.len(), oa_front.len()); debug_assert_eq!(sb_mid.len(), oa_mid.len()); debug_assert_eq!(sb_back.len(), ob.len()); sa == oa_front && sb_mid == oa_mid && sb_back == ob } else { let front = oa.len(); let mid = sa.len() - front; let (sa_front, sa_mid) = sa.split_at(front); let (ob_mid, ob_back) = ob.split_at(mid); debug_assert_eq!(sa_front.len(), oa.len()); debug_assert_eq!(sa_mid.len(), ob_mid.len()); debug_assert_eq!(sb.len(), ob_back.len()); sa_front == oa && sa_mid == ob_mid && sb == ob_back } } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for VecDeque {} __impl_slice_eq1! { [] VecDeque, Vec, } __impl_slice_eq1! { [] VecDeque, &[B], } __impl_slice_eq1! { [] VecDeque, &mut [B], } __impl_slice_eq1! { [const N: usize] VecDeque, [B; N], } __impl_slice_eq1! { [const N: usize] VecDeque, &[B; N], } __impl_slice_eq1! { [const N: usize] VecDeque, &mut [B; N], } #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for VecDeque { fn partial_cmp(&self, other: &VecDeque) -> Option { self.iter().partial_cmp(other.iter()) } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for VecDeque { #[inline] fn cmp(&self, other: &VecDeque) -> Ordering { self.iter().cmp(other.iter()) } } #[stable(feature = "rust1", since = "1.0.0")] impl Hash for VecDeque { fn hash(&self, state: &mut H) { self.len().hash(state); // It's not possible to use Hash::hash_slice on slices // returned by as_slices method as their length can vary // in otherwise identical deques. // // Hasher only guarantees equivalence for the exact same // set of calls to its methods. self.iter().for_each(|elem| elem.hash(state)); } } #[stable(feature = "rust1", since = "1.0.0")] impl Index for VecDeque { type Output = A; #[inline] fn index(&self, index: usize) -> &A { self.get(index).expect("Out of bounds access") } } #[stable(feature = "rust1", since = "1.0.0")] impl IndexMut for VecDeque { #[inline] fn index_mut(&mut self, index: usize) -> &mut A { self.get_mut(index).expect("Out of bounds access") } } #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator for VecDeque { fn from_iter>(iter: T) -> VecDeque { let iterator = iter.into_iter(); let (lower, _) = iterator.size_hint(); let mut deq = VecDeque::with_capacity(lower); deq.extend(iterator); deq } } #[stable(feature = "rust1", since = "1.0.0")] impl IntoIterator for VecDeque { type Item = T; type IntoIter = IntoIter; /// Consumes the `VecDeque` into a front-to-back iterator yielding elements by /// value. fn into_iter(self) -> IntoIter { IntoIter { inner: self } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a VecDeque { type Item = &'a T; type IntoIter = Iter<'a, T>; fn into_iter(self) -> Iter<'a, T> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a mut VecDeque { type Item = &'a mut T; type IntoIter = IterMut<'a, T>; fn into_iter(self) -> IterMut<'a, T> { self.iter_mut() } } #[stable(feature = "rust1", since = "1.0.0")] impl Extend for VecDeque { fn extend>(&mut self, iter: T) { // This function should be the moral equivalent of: // // for item in iter.into_iter() { // self.push_back(item); // } let mut iter = iter.into_iter(); while let Some(element) = iter.next() { if self.len() == self.capacity() { let (lower, _) = iter.size_hint(); self.reserve(lower.saturating_add(1)); } let head = self.head; self.head = self.wrap_add(self.head, 1); unsafe { self.buffer_write(head, element); } } } #[inline] fn extend_one(&mut self, elem: A) { self.push_back(elem); } #[inline] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } } #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: 'a + Copy> Extend<&'a T> for VecDeque { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } #[inline] fn extend_one(&mut self, &elem: &T) { self.push_back(elem); } #[inline] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for VecDeque { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self).finish() } } #[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] impl From> for VecDeque { /// Turn a [`Vec`] into a [`VecDeque`]. /// /// [`Vec`]: crate::vec::Vec /// [`VecDeque`]: crate::collections::VecDeque /// /// This avoids reallocating where possible, but the conditions for that are /// strict, and subject to change, and so shouldn't be relied upon unless the /// `Vec` came from `From>` and hasn't been reallocated. fn from(mut other: Vec) -> Self { let len = other.len(); if mem::size_of::() == 0 { // There's no actual allocation for ZSTs to worry about capacity, // but `VecDeque` can't handle as much length as `Vec`. assert!(len < MAXIMUM_ZST_CAPACITY, "capacity overflow"); } else { // We need to resize if the capacity is not a power of two, too small or // doesn't have at least one free space. We do this while it's still in // the `Vec` so the items will drop on panic. let min_cap = cmp::max(MINIMUM_CAPACITY, len) + 1; let cap = cmp::max(min_cap, other.capacity()).next_power_of_two(); if other.capacity() != cap { other.reserve_exact(cap - len); } } unsafe { let (other_buf, len, capacity) = other.into_raw_parts(); let buf = RawVec::from_raw_parts(other_buf, capacity); VecDeque { tail: 0, head: len, buf } } } } #[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] impl From> for Vec { /// Turn a [`VecDeque`] into a [`Vec`]. /// /// [`Vec`]: crate::vec::Vec /// [`VecDeque`]: crate::collections::VecDeque /// /// This never needs to re-allocate, but does need to do *O*(*n*) data movement if /// the circular buffer doesn't happen to be at the beginning of the allocation. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// // This one is *O*(1). /// let deque: VecDeque<_> = (1..5).collect(); /// let ptr = deque.as_slices().0.as_ptr(); /// let vec = Vec::from(deque); /// assert_eq!(vec, [1, 2, 3, 4]); /// assert_eq!(vec.as_ptr(), ptr); /// /// // This one needs data rearranging. /// let mut deque: VecDeque<_> = (1..5).collect(); /// deque.push_front(9); /// deque.push_front(8); /// let ptr = deque.as_slices().1.as_ptr(); /// let vec = Vec::from(deque); /// assert_eq!(vec, [8, 9, 1, 2, 3, 4]); /// assert_eq!(vec.as_ptr(), ptr); /// ``` fn from(mut other: VecDeque) -> Self { other.make_contiguous(); unsafe { let other = ManuallyDrop::new(other); let buf = other.buf.ptr(); let len = other.len(); let cap = other.cap(); if other.tail != 0 { ptr::copy(buf.add(other.tail), buf, len); } Vec::from_raw_parts(buf, len, cap) } } } use core::fmt; use core::iter::{FusedIterator, TrustedLen, TrustedRandomAccess}; use core::ops::Try; use super::{count, wrap_index, RingSlices}; /// An iterator over the elements of a `VecDeque`. /// /// This `struct` is created by the [`iter`] method on [`super::VecDeque`]. See its /// documentation for more. /// /// [`iter`]: super::VecDeque::iter #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, T: 'a> { pub(crate) ring: &'a [T], pub(crate) tail: usize, pub(crate) head: usize, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Iter<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); f.debug_tuple("Iter").field(&front).field(&back).finish() } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Iter<'_, T> { fn clone(&self) -> Self { Iter { ring: self.ring, tail: self.tail, head: self.head } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for Iter<'a, T> { type Item = &'a T; #[inline] fn next(&mut self) -> Option<&'a T> { if self.tail == self.head { return None; } let tail = self.tail; self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len()); unsafe { Some(self.ring.get_unchecked(tail)) } } #[inline] fn size_hint(&self) -> (usize, Option) { let len = count(self.tail, self.head, self.ring.len()); (len, Some(len)) } fn fold(self, mut accum: Acc, mut f: F) -> Acc where F: FnMut(Acc, Self::Item) -> Acc, { let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); accum = front.iter().fold(accum, &mut f); back.iter().fold(accum, &mut f) } fn try_fold(&mut self, init: B, mut f: F) -> R where Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try, { let (mut iter, final_res); if self.tail <= self.head { // single slice self.ring[self.tail..self.head] iter = self.ring[self.tail..self.head].iter(); final_res = iter.try_fold(init, &mut f); } else { // two slices: self.ring[self.tail..], self.ring[..self.head] let (front, back) = self.ring.split_at(self.tail); let mut back_iter = back.iter(); let res = back_iter.try_fold(init, &mut f); let len = self.ring.len(); self.tail = (self.ring.len() - back_iter.len()) & (len - 1); iter = front[..self.head].iter(); final_res = iter.try_fold(res?, &mut f); } self.tail = self.head - iter.len(); final_res } fn nth(&mut self, n: usize) -> Option { if n >= count(self.tail, self.head, self.ring.len()) { self.tail = self.head; None } else { self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len()); self.next() } } #[inline] fn last(mut self) -> Option<&'a T> { self.next_back() } #[inline] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item where Self: TrustedRandomAccess, { // Safety: The TrustedRandomAccess contract requires that callers only pass an index // that is in bounds. unsafe { let idx = wrap_index(self.tail.wrapping_add(idx), self.ring.len()); self.ring.get_unchecked(idx) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for Iter<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a T> { if self.tail == self.head { return None; } self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len()); unsafe { Some(self.ring.get_unchecked(self.head)) } } fn rfold(self, mut accum: Acc, mut f: F) -> Acc where F: FnMut(Acc, Self::Item) -> Acc, { let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); accum = back.iter().rfold(accum, &mut f); front.iter().rfold(accum, &mut f) } fn try_rfold(&mut self, init: B, mut f: F) -> R where Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try, { let (mut iter, final_res); if self.tail <= self.head { // single slice self.ring[self.tail..self.head] iter = self.ring[self.tail..self.head].iter(); final_res = iter.try_rfold(init, &mut f); } else { // two slices: self.ring[self.tail..], self.ring[..self.head] let (front, back) = self.ring.split_at(self.tail); let mut front_iter = front[..self.head].iter(); let res = front_iter.try_rfold(init, &mut f); self.head = front_iter.len(); iter = back.iter(); final_res = iter.try_rfold(res?, &mut f); } self.head = self.tail + iter.len(); final_res } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Iter<'_, T> { fn is_empty(&self) -> bool { self.head == self.tail } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Iter<'_, T> {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for Iter<'_, T> {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl TrustedRandomAccess for Iter<'_, T> { const MAY_HAVE_SIDE_EFFECT: bool = false; } use super::*; #[bench] #[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks fn bench_push_back_100(b: &mut test::Bencher) { let mut deq = VecDeque::with_capacity(101); b.iter(|| { for i in 0..100 { deq.push_back(i); } deq.head = 0; deq.tail = 0; }) } #[bench] #[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks fn bench_push_front_100(b: &mut test::Bencher) { let mut deq = VecDeque::with_capacity(101); b.iter(|| { for i in 0..100 { deq.push_front(i); } deq.head = 0; deq.tail = 0; }) } #[bench] #[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks fn bench_pop_back_100(b: &mut test::Bencher) { let mut deq = VecDeque::::with_capacity(101); b.iter(|| { deq.head = 100; deq.tail = 0; while !deq.is_empty() { test::black_box(deq.pop_back()); } }) } #[bench] #[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks fn bench_pop_front_100(b: &mut test::Bencher) { let mut deq = VecDeque::::with_capacity(101); b.iter(|| { deq.head = 100; deq.tail = 0; while !deq.is_empty() { test::black_box(deq.pop_front()); } }) } #[test] fn test_swap_front_back_remove() { fn test(back: bool) { // This test checks that every single combination of tail position and length is tested. // Capacity 15 should be large enough to cover every case. let mut tester = VecDeque::with_capacity(15); let usable_cap = tester.capacity(); let final_len = usable_cap / 2; for len in 0..final_len { let expected: VecDeque<_> = if back { (0..len).collect() } else { (0..len).rev().collect() }; for tail_pos in 0..usable_cap { tester.tail = tail_pos; tester.head = tail_pos; if back { for i in 0..len * 2 { tester.push_front(i); } for i in 0..len { assert_eq!(tester.swap_remove_back(i), Some(len * 2 - 1 - i)); } } else { for i in 0..len * 2 { tester.push_back(i); } for i in 0..len { let idx = tester.len() - 1 - i; assert_eq!(tester.swap_remove_front(idx), Some(len * 2 - 1 - i)); } } assert!(tester.tail < tester.cap()); assert!(tester.head < tester.cap()); assert_eq!(tester, expected); } } } test(true); test(false); } #[test] fn test_insert() { // This test checks that every single combination of tail position, length, and // insertion position is tested. Capacity 15 should be large enough to cover every case. let mut tester = VecDeque::with_capacity(15); // can't guarantee we got 15, so have to get what we got. // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else // this test isn't covering what it wants to let cap = tester.capacity(); // len is the length *after* insertion let minlen = if cfg!(miri) { cap - 1 } else { 1 }; // Miri is too slow for len in minlen..cap { // 0, 1, 2, .., len - 1 let expected = (0..).take(len).collect::>(); for tail_pos in 0..cap { for to_insert in 0..len { tester.tail = tail_pos; tester.head = tail_pos; for i in 0..len { if i != to_insert { tester.push_back(i); } } tester.insert(to_insert, to_insert); assert!(tester.tail < tester.cap()); assert!(tester.head < tester.cap()); assert_eq!(tester, expected); } } } } #[test] fn make_contiguous_big_tail() { let mut tester = VecDeque::with_capacity(15); for i in 0..3 { tester.push_back(i); } for i in 3..10 { tester.push_front(i); } // 012......9876543 assert_eq!(tester.capacity(), 15); assert_eq!((&[9, 8, 7, 6, 5, 4, 3] as &[_], &[0, 1, 2] as &[_]), tester.as_slices()); let expected_start = tester.head; tester.make_contiguous(); assert_eq!(tester.tail, expected_start); assert_eq!((&[9, 8, 7, 6, 5, 4, 3, 0, 1, 2] as &[_], &[] as &[_]), tester.as_slices()); } #[test] fn make_contiguous_big_head() { let mut tester = VecDeque::with_capacity(15); for i in 0..8 { tester.push_back(i); } for i in 8..10 { tester.push_front(i); } // 01234567......98 let expected_start = 0; tester.make_contiguous(); assert_eq!(tester.tail, expected_start); assert_eq!((&[9, 8, 0, 1, 2, 3, 4, 5, 6, 7] as &[_], &[] as &[_]), tester.as_slices()); } #[test] fn make_contiguous_small_free() { let mut tester = VecDeque::with_capacity(15); for i in 'A' as u8..'I' as u8 { tester.push_back(i as char); } for i in 'I' as u8..'N' as u8 { tester.push_front(i as char); } // ABCDEFGH...MLKJI let expected_start = 0; tester.make_contiguous(); assert_eq!(tester.tail, expected_start); assert_eq!( (&['M', 'L', 'K', 'J', 'I', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] as &[_], &[] as &[_]), tester.as_slices() ); tester.clear(); for i in 'I' as u8..'N' as u8 { tester.push_back(i as char); } for i in 'A' as u8..'I' as u8 { tester.push_front(i as char); } // IJKLM...HGFEDCBA let expected_start = 0; tester.make_contiguous(); assert_eq!(tester.tail, expected_start); assert_eq!( (&['H', 'G', 'F', 'E', 'D', 'C', 'B', 'A', 'I', 'J', 'K', 'L', 'M'] as &[_], &[] as &[_]), tester.as_slices() ); } #[test] fn make_contiguous_head_to_end() { let mut dq = VecDeque::with_capacity(3); dq.push_front('B'); dq.push_front('A'); dq.push_back('C'); dq.make_contiguous(); let expected_tail = 0; let expected_head = 3; assert_eq!(expected_tail, dq.tail); assert_eq!(expected_head, dq.head); assert_eq!((&['A', 'B', 'C'] as &[_], &[] as &[_]), dq.as_slices()); } #[test] fn make_contiguous_head_to_end_2() { // Another test case for #79808, taken from #80293. let mut dq = VecDeque::from_iter(0..6); dq.pop_front(); dq.pop_front(); dq.push_back(6); dq.push_back(7); dq.push_back(8); dq.make_contiguous(); let collected: Vec<_> = dq.iter().copied().collect(); assert_eq!(dq.as_slices(), (&collected[..], &[] as &[_])); } #[test] fn test_remove() { // This test checks that every single combination of tail position, length, and // removal position is tested. Capacity 15 should be large enough to cover every case. let mut tester = VecDeque::with_capacity(15); // can't guarantee we got 15, so have to get what we got. // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else // this test isn't covering what it wants to let cap = tester.capacity(); // len is the length *after* removal let minlen = if cfg!(miri) { cap - 2 } else { 0 }; // Miri is too slow for len in minlen..cap - 1 { // 0, 1, 2, .., len - 1 let expected = (0..).take(len).collect::>(); for tail_pos in 0..cap { for to_remove in 0..=len { tester.tail = tail_pos; tester.head = tail_pos; for i in 0..len { if i == to_remove { tester.push_back(1234); } tester.push_back(i); } if to_remove == len { tester.push_back(1234); } tester.remove(to_remove); assert!(tester.tail < tester.cap()); assert!(tester.head < tester.cap()); assert_eq!(tester, expected); } } } } #[test] fn test_range() { let mut tester: VecDeque = VecDeque::with_capacity(7); let cap = tester.capacity(); let minlen = if cfg!(miri) { cap - 1 } else { 0 }; // Miri is too slow for len in minlen..=cap { for tail in 0..=cap { for start in 0..=len { for end in start..=len { tester.tail = tail; tester.head = tail; for i in 0..len { tester.push_back(i); } // Check that we iterate over the correct values let range: VecDeque<_> = tester.range(start..end).copied().collect(); let expected: VecDeque<_> = (start..end).collect(); assert_eq!(range, expected); } } } } } #[test] fn test_range_mut() { let mut tester: VecDeque = VecDeque::with_capacity(7); let cap = tester.capacity(); for len in 0..=cap { for tail in 0..=cap { for start in 0..=len { for end in start..=len { tester.tail = tail; tester.head = tail; for i in 0..len { tester.push_back(i); } let head_was = tester.head; let tail_was = tester.tail; // Check that we iterate over the correct values let range: VecDeque<_> = tester.range_mut(start..end).map(|v| *v).collect(); let expected: VecDeque<_> = (start..end).collect(); assert_eq!(range, expected); // We shouldn't have changed the capacity or made the // head or tail out of bounds assert_eq!(tester.capacity(), cap); assert_eq!(tester.tail, tail_was); assert_eq!(tester.head, head_was); } } } } } #[test] fn test_drain() { let mut tester: VecDeque = VecDeque::with_capacity(7); let cap = tester.capacity(); for len in 0..=cap { for tail in 0..=cap { for drain_start in 0..=len { for drain_end in drain_start..=len { tester.tail = tail; tester.head = tail; for i in 0..len { tester.push_back(i); } // Check that we drain the correct values let drained: VecDeque<_> = tester.drain(drain_start..drain_end).collect(); let drained_expected: VecDeque<_> = (drain_start..drain_end).collect(); assert_eq!(drained, drained_expected); // We shouldn't have changed the capacity or made the // head or tail out of bounds assert_eq!(tester.capacity(), cap); assert!(tester.tail < tester.cap()); assert!(tester.head < tester.cap()); // We should see the correct values in the VecDeque let expected: VecDeque<_> = (0..drain_start).chain(drain_end..len).collect(); assert_eq!(expected, tester); } } } } } #[test] fn test_shrink_to_fit() { // This test checks that every single combination of head and tail position, // is tested. Capacity 15 should be large enough to cover every case. let mut tester = VecDeque::with_capacity(15); // can't guarantee we got 15, so have to get what we got. // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else // this test isn't covering what it wants to let cap = tester.capacity(); tester.reserve(63); let max_cap = tester.capacity(); for len in 0..=cap { // 0, 1, 2, .., len - 1 let expected = (0..).take(len).collect::>(); for tail_pos in 0..=max_cap { tester.tail = tail_pos; tester.head = tail_pos; tester.reserve(63); for i in 0..len { tester.push_back(i); } tester.shrink_to_fit(); assert!(tester.capacity() <= cap); assert!(tester.tail < tester.cap()); assert!(tester.head < tester.cap()); assert_eq!(tester, expected); } } } #[test] fn test_split_off() { // This test checks that every single combination of tail position, length, and // split position is tested. Capacity 15 should be large enough to cover every case. let mut tester = VecDeque::with_capacity(15); // can't guarantee we got 15, so have to get what we got. // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else // this test isn't covering what it wants to let cap = tester.capacity(); // len is the length *before* splitting let minlen = if cfg!(miri) { cap - 1 } else { 0 }; // Miri is too slow for len in minlen..cap { // index to split at for at in 0..=len { // 0, 1, 2, .., at - 1 (may be empty) let expected_self = (0..).take(at).collect::>(); // at, at + 1, .., len - 1 (may be empty) let expected_other = (at..).take(len - at).collect::>(); for tail_pos in 0..cap { tester.tail = tail_pos; tester.head = tail_pos; for i in 0..len { tester.push_back(i); } let result = tester.split_off(at); assert!(tester.tail < tester.cap()); assert!(tester.head < tester.cap()); assert!(result.tail < result.cap()); assert!(result.head < result.cap()); assert_eq!(tester, expected_self); assert_eq!(result, expected_other); } } } } #[test] fn test_from_vec() { use crate::vec::Vec; for cap in 0..35 { for len in 0..=cap { let mut vec = Vec::with_capacity(cap); vec.extend(0..len); let vd = VecDeque::from(vec.clone()); assert!(vd.cap().is_power_of_two()); assert_eq!(vd.len(), vec.len()); assert!(vd.into_iter().eq(vec)); } } let vec = Vec::from([(); MAXIMUM_ZST_CAPACITY - 1]); let vd = VecDeque::from(vec.clone()); assert!(vd.cap().is_power_of_two()); assert_eq!(vd.len(), vec.len()); } #[test] #[should_panic = "capacity overflow"] fn test_from_vec_zst_overflow() { use crate::vec::Vec; let vec = Vec::from([(); MAXIMUM_ZST_CAPACITY]); let vd = VecDeque::from(vec.clone()); // no room for +1 assert!(vd.cap().is_power_of_two()); assert_eq!(vd.len(), vec.len()); } #[test] fn test_vec_from_vecdeque() { use crate::vec::Vec; fn create_vec_and_test_convert(capacity: usize, offset: usize, len: usize) { let mut vd = VecDeque::with_capacity(capacity); for _ in 0..offset { vd.push_back(0); vd.pop_front(); } vd.extend(0..len); let vec: Vec<_> = Vec::from(vd.clone()); assert_eq!(vec.len(), vd.len()); assert!(vec.into_iter().eq(vd)); } // Miri is too slow let max_pwr = if cfg!(miri) { 5 } else { 7 }; for cap_pwr in 0..max_pwr { // Make capacity as a (2^x)-1, so that the ring size is 2^x let cap = (2i32.pow(cap_pwr) - 1) as usize; // In these cases there is enough free space to solve it with copies for len in 0..((cap + 1) / 2) { // Test contiguous cases for offset in 0..(cap - len) { create_vec_and_test_convert(cap, offset, len) } // Test cases where block at end of buffer is bigger than block at start for offset in (cap - len)..(cap - (len / 2)) { create_vec_and_test_convert(cap, offset, len) } // Test cases where block at start of buffer is bigger than block at end for offset in (cap - (len / 2))..cap { create_vec_and_test_convert(cap, offset, len) } } // Now there's not (necessarily) space to straighten the ring with simple copies, // the ring will use swapping when: // (cap + 1 - offset) > (cap + 1 - len) && (len - (cap + 1 - offset)) > (cap + 1 - len)) // right block size > free space && left block size > free space for len in ((cap + 1) / 2)..cap { // Test contiguous cases for offset in 0..(cap - len) { create_vec_and_test_convert(cap, offset, len) } // Test cases where block at end of buffer is bigger than block at start for offset in (cap - len)..(cap - (len / 2)) { create_vec_and_test_convert(cap, offset, len) } // Test cases where block at start of buffer is bigger than block at end for offset in (cap - (len / 2))..cap { create_vec_and_test_convert(cap, offset, len) } } } } #[test] fn test_clone_from() { let m = vec![1; 8]; let n = vec![2; 12]; let limit = if cfg!(miri) { 4 } else { 8 }; // Miri is too slow for pfv in 0..limit { for pfu in 0..limit { for longer in 0..2 { let (vr, ur) = if longer == 0 { (&m, &n) } else { (&n, &m) }; let mut v = VecDeque::from(vr.clone()); for _ in 0..pfv { v.push_front(1); } let mut u = VecDeque::from(ur.clone()); for _ in 0..pfu { u.push_front(2); } v.clone_from(&u); assert_eq!(&v, &u); } } } } #[test] fn test_vec_deque_truncate_drop() { static mut DROPS: u32 = 0; #[derive(Clone)] struct Elem(i32); impl Drop for Elem { fn drop(&mut self) { unsafe { DROPS += 1; } } } let v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)]; for push_front in 0..=v.len() { let v = v.clone(); let mut tester = VecDeque::with_capacity(5); for (index, elem) in v.into_iter().enumerate() { if index < push_front { tester.push_front(elem); } else { tester.push_back(elem); } } assert_eq!(unsafe { DROPS }, 0); tester.truncate(3); assert_eq!(unsafe { DROPS }, 2); tester.truncate(0); assert_eq!(unsafe { DROPS }, 5); unsafe { DROPS = 0; } } } #[test] fn issue_53529() { use crate::boxed::Box; let mut dst = VecDeque::new(); dst.push_front(Box::new(1)); dst.push_front(Box::new(2)); assert_eq!(*dst.pop_back().unwrap(), 1); let mut src = VecDeque::new(); src.push_front(Box::new(2)); dst.append(&mut src); for a in dst { assert_eq!(*a, 2); } } #[test] fn issue_80303() { use core::iter; use core::num::Wrapping; // This is a valid, albeit rather bad hash function implementation. struct SimpleHasher(Wrapping); impl Hasher for SimpleHasher { fn finish(&self) -> u64 { self.0.0 } fn write(&mut self, bytes: &[u8]) { // This particular implementation hashes value 24 in addition to bytes. // Such an implementation is valid as Hasher only guarantees equivalence // for the exact same set of calls to its methods. for &v in iter::once(&24).chain(bytes) { self.0 = Wrapping(31) * self.0 + Wrapping(u64::from(v)); } } } fn hash_code(value: impl Hash) -> u64 { let mut hasher = SimpleHasher(Wrapping(1)); value.hash(&mut hasher); hasher.finish() } // This creates two deques for which values returned by as_slices // method differ. let vda: VecDeque = (0..10).collect(); let mut vdb = VecDeque::with_capacity(10); vdb.extend(5..10); (0..5).rev().for_each(|elem| vdb.push_front(elem)); assert_ne!(vda.as_slices(), vdb.as_slices()); assert_eq!(vda, vdb); assert_eq!(hash_code(vda), hash_code(vdb)); } use core::fmt; use core::iter::{FusedIterator, TrustedLen, TrustedRandomAccess}; use core::marker::PhantomData; use super::{count, wrap_index, RingSlices}; /// A mutable iterator over the elements of a `VecDeque`. /// /// This `struct` is created by the [`iter_mut`] method on [`super::VecDeque`]. See its /// documentation for more. /// /// [`iter_mut`]: super::VecDeque::iter_mut #[stable(feature = "rust1", since = "1.0.0")] pub struct IterMut<'a, T: 'a> { // Internal safety invariant: the entire slice is dereferencable. pub(crate) ring: *mut [T], pub(crate) tail: usize, pub(crate) head: usize, pub(crate) phantom: PhantomData<&'a mut [T]>, } // SAFETY: we do nothing thread-local and there is no interior mutability, // so the usual structural `Send`/`Sync` apply. #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for IterMut<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for IterMut<'_, T> {} #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for IterMut<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); // SAFETY: these are the elements we have not handed out yet, so aliasing is fine. // The `IterMut` invariant also ensures everything is dereferencable. let (front, back) = unsafe { (&*front, &*back) }; f.debug_tuple("IterMut").field(&front).field(&back).finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for IterMut<'a, T> { type Item = &'a mut T; #[inline] fn next(&mut self) -> Option<&'a mut T> { if self.tail == self.head { return None; } let tail = self.tail; self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len()); unsafe { let elem = self.ring.get_unchecked_mut(tail); Some(&mut *elem) } } #[inline] fn size_hint(&self) -> (usize, Option) { let len = count(self.tail, self.head, self.ring.len()); (len, Some(len)) } fn fold(self, mut accum: Acc, mut f: F) -> Acc where F: FnMut(Acc, Self::Item) -> Acc, { let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); // SAFETY: these are the elements we have not handed out yet, so aliasing is fine. // The `IterMut` invariant also ensures everything is dereferencable. let (front, back) = unsafe { (&mut *front, &mut *back) }; accum = front.iter_mut().fold(accum, &mut f); back.iter_mut().fold(accum, &mut f) } fn nth(&mut self, n: usize) -> Option { if n >= count(self.tail, self.head, self.ring.len()) { self.tail = self.head; None } else { self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len()); self.next() } } #[inline] fn last(mut self) -> Option<&'a mut T> { self.next_back() } #[inline] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item where Self: TrustedRandomAccess, { // Safety: The TrustedRandomAccess contract requires that callers only pass an index // that is in bounds. unsafe { let idx = wrap_index(self.tail.wrapping_add(idx), self.ring.len()); &mut *self.ring.get_unchecked_mut(idx) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a mut T> { if self.tail == self.head { return None; } self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len()); unsafe { let elem = self.ring.get_unchecked_mut(self.head); Some(&mut *elem) } } fn rfold(self, mut accum: Acc, mut f: F) -> Acc where F: FnMut(Acc, Self::Item) -> Acc, { let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); // SAFETY: these are the elements we have not handed out yet, so aliasing is fine. // The `IterMut` invariant also ensures everything is dereferencable. let (front, back) = unsafe { (&mut *front, &mut *back) }; accum = back.iter_mut().rfold(accum, &mut f); front.iter_mut().rfold(accum, &mut f) } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IterMut<'_, T> { fn is_empty(&self) -> bool { self.head == self.tail } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for IterMut<'_, T> {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for IterMut<'_, T> {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl TrustedRandomAccess for IterMut<'_, T> { const MAY_HAVE_SIDE_EFFECT: bool = false; } //! A priority queue implemented with a binary heap. //! //! Insertion and popping the largest element have *O*(log(*n*)) time complexity. //! Checking the largest element is *O*(1). Converting a vector to a binary heap //! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be //! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* \* log(*n*)) //! in-place heapsort. //! //! # Examples //! //! This is a larger example that implements [Dijkstra's algorithm][dijkstra] //! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph]. //! It shows how to use [`BinaryHeap`] with custom types. //! //! [dijkstra]: https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm //! [sssp]: https://en.wikipedia.org/wiki/Shortest_path_problem //! [dir_graph]: https://en.wikipedia.org/wiki/Directed_graph //! //! ``` //! use std::cmp::Ordering; //! use std::collections::BinaryHeap; //! //! #[derive(Copy, Clone, Eq, PartialEq)] //! struct State { //! cost: usize, //! position: usize, //! } //! //! // The priority queue depends on `Ord`. //! // Explicitly implement the trait so the queue becomes a min-heap //! // instead of a max-heap. //! impl Ord for State { //! fn cmp(&self, other: &Self) -> Ordering { //! // Notice that the we flip the ordering on costs. //! // In case of a tie we compare positions - this step is necessary //! // to make implementations of `PartialEq` and `Ord` consistent. //! other.cost.cmp(&self.cost) //! .then_with(|| self.position.cmp(&other.position)) //! } //! } //! //! // `PartialOrd` needs to be implemented as well. //! impl PartialOrd for State { //! fn partial_cmp(&self, other: &Self) -> Option { //! Some(self.cmp(other)) //! } //! } //! //! // Each node is represented as an `usize`, for a shorter implementation. //! struct Edge { //! node: usize, //! cost: usize, //! } //! //! // Dijkstra's shortest path algorithm. //! //! // Start at `start` and use `dist` to track the current shortest distance //! // to each node. This implementation isn't memory-efficient as it may leave duplicate //! // nodes in the queue. It also uses `usize::MAX` as a sentinel value, //! // for a simpler implementation. //! fn shortest_path(adj_list: &Vec>, start: usize, goal: usize) -> Option { //! // dist[node] = current shortest distance from `start` to `node` //! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect(); //! //! let mut heap = BinaryHeap::new(); //! //! // We're at `start`, with a zero cost //! dist[start] = 0; //! heap.push(State { cost: 0, position: start }); //! //! // Examine the frontier with lower cost nodes first (min-heap) //! while let Some(State { cost, position }) = heap.pop() { //! // Alternatively we could have continued to find all shortest paths //! if position == goal { return Some(cost); } //! //! // Important as we may have already found a better way //! if cost > dist[position] { continue; } //! //! // For each node we can reach, see if we can find a way with //! // a lower cost going through this node //! for edge in &adj_list[position] { //! let next = State { cost: cost + edge.cost, position: edge.node }; //! //! // If so, add it to the frontier and continue //! if next.cost < dist[next.position] { //! heap.push(next); //! // Relaxation, we have now found a better way //! dist[next.position] = next.cost; //! } //! } //! } //! //! // Goal not reachable //! None //! } //! //! fn main() { //! // This is the directed graph we're going to use. //! // The node numbers correspond to the different states, //! // and the edge weights symbolize the cost of moving //! // from one node to another. //! // Note that the edges are one-way. //! // //! // 7 //! // +-----------------+ //! // | | //! // v 1 2 | 2 //! // 0 -----> 1 -----> 3 ---> 4 //! // | ^ ^ ^ //! // | | 1 | | //! // | | | 3 | 1 //! // +------> 2 -------+ | //! // 10 | | //! // +---------------+ //! // //! // The graph is represented as an adjacency list where each index, //! // corresponding to a node value, has a list of outgoing edges. //! // Chosen for its efficiency. //! let graph = vec![ //! // Node 0 //! vec![Edge { node: 2, cost: 10 }, //! Edge { node: 1, cost: 1 }], //! // Node 1 //! vec![Edge { node: 3, cost: 2 }], //! // Node 2 //! vec![Edge { node: 1, cost: 1 }, //! Edge { node: 3, cost: 3 }, //! Edge { node: 4, cost: 1 }], //! // Node 3 //! vec![Edge { node: 0, cost: 7 }, //! Edge { node: 4, cost: 2 }], //! // Node 4 //! vec![]]; //! //! assert_eq!(shortest_path(&graph, 0, 1), Some(1)); //! assert_eq!(shortest_path(&graph, 0, 3), Some(3)); //! assert_eq!(shortest_path(&graph, 3, 0), Some(7)); //! assert_eq!(shortest_path(&graph, 0, 4), Some(5)); //! assert_eq!(shortest_path(&graph, 4, 0), None); //! } //! ``` #![allow(missing_docs)] #![stable(feature = "rust1", since = "1.0.0")] use core::fmt; use core::iter::{FromIterator, FusedIterator, InPlaceIterable, SourceIter, TrustedLen}; use core::mem::{self, swap, ManuallyDrop}; use core::ops::{Deref, DerefMut}; use core::ptr; use crate::slice; use crate::vec::{self, AsIntoIter, Vec}; use super::SpecExtend; /// A priority queue implemented with a binary heap. /// /// This will be a max-heap. /// /// It is a logic error for an item to be modified in such a way that the /// item's ordering relative to any other item, as determined by the `Ord` /// trait, changes while it is in the heap. This is normally only possible /// through `Cell`, `RefCell`, global state, I/O, or unsafe code. The /// behavior resulting from such a logic error is not specified, but will /// not result in undefined behavior. This could include panics, incorrect /// results, aborts, memory leaks, and non-termination. /// /// # Examples /// /// ``` /// use std::collections::BinaryHeap; /// /// // Type inference lets us omit an explicit type signature (which /// // would be `BinaryHeap` in this example). /// let mut heap = BinaryHeap::new(); /// /// // We can use peek to look at the next item in the heap. In this case, /// // there's no items in there yet so we get None. /// assert_eq!(heap.peek(), None); /// /// // Let's add some scores... /// heap.push(1); /// heap.push(5); /// heap.push(2); /// /// // Now peek shows the most important item in the heap. /// assert_eq!(heap.peek(), Some(&5)); /// /// // We can check the length of a heap. /// assert_eq!(heap.len(), 3); /// /// // We can iterate over the items in the heap, although they are returned in /// // a random order. /// for x in &heap { /// println!("{}", x); /// } /// /// // If we instead pop these scores, they should come back in order. /// assert_eq!(heap.pop(), Some(5)); /// assert_eq!(heap.pop(), Some(2)); /// assert_eq!(heap.pop(), Some(1)); /// assert_eq!(heap.pop(), None); /// /// // We can clear the heap of any remaining items. /// heap.clear(); /// /// // The heap should now be empty. /// assert!(heap.is_empty()) /// ``` /// /// ## Min-heap /// /// Either `std::cmp::Reverse` or a custom `Ord` implementation can be used to /// make `BinaryHeap` a min-heap. This makes `heap.pop()` return the smallest /// value instead of the greatest one. /// /// ``` /// use std::collections::BinaryHeap; /// use std::cmp::Reverse; /// /// let mut heap = BinaryHeap::new(); /// /// // Wrap values in `Reverse` /// heap.push(Reverse(1)); /// heap.push(Reverse(5)); /// heap.push(Reverse(2)); /// /// // If we pop these scores now, they should come back in the reverse order. /// assert_eq!(heap.pop(), Some(Reverse(1))); /// assert_eq!(heap.pop(), Some(Reverse(2))); /// assert_eq!(heap.pop(), Some(Reverse(5))); /// assert_eq!(heap.pop(), None); /// ``` /// /// # Time complexity /// /// | [push] | [pop] | [peek]/[peek\_mut] | /// |--------|-----------|--------------------| /// | O(1)~ | *O*(log(*n*)) | *O*(1) | /// /// The value for `push` is an expected cost; the method documentation gives a /// more detailed analysis. /// /// [push]: BinaryHeap::push /// [pop]: BinaryHeap::pop /// [peek]: BinaryHeap::peek /// [peek\_mut]: BinaryHeap::peek_mut #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "BinaryHeap")] pub struct BinaryHeap { data: Vec, } /// Structure wrapping a mutable reference to the greatest item on a /// `BinaryHeap`. /// /// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See /// its documentation for more. /// /// [`peek_mut`]: BinaryHeap::peek_mut #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] pub struct PeekMut<'a, T: 'a + Ord> { heap: &'a mut BinaryHeap, sift: bool, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for PeekMut<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish() } } #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] impl Drop for PeekMut<'_, T> { fn drop(&mut self) { if self.sift { // SAFETY: PeekMut is only instantiated for non-empty heaps. unsafe { self.heap.sift_down(0) }; } } } #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] impl Deref for PeekMut<'_, T> { type Target = T; fn deref(&self) -> &T { debug_assert!(!self.heap.is_empty()); // SAFE: PeekMut is only instantiated for non-empty heaps unsafe { self.heap.data.get_unchecked(0) } } } #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] impl DerefMut for PeekMut<'_, T> { fn deref_mut(&mut self) -> &mut T { debug_assert!(!self.heap.is_empty()); self.sift = true; // SAFE: PeekMut is only instantiated for non-empty heaps unsafe { self.heap.data.get_unchecked_mut(0) } } } impl<'a, T: Ord> PeekMut<'a, T> { /// Removes the peeked value from the heap and returns it. #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")] pub fn pop(mut this: PeekMut<'a, T>) -> T { let value = this.heap.pop().unwrap(); this.sift = false; value } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for BinaryHeap { fn clone(&self) -> Self { BinaryHeap { data: self.data.clone() } } fn clone_from(&mut self, source: &Self) { self.data.clone_from(&source.data); } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for BinaryHeap { /// Creates an empty `BinaryHeap`. #[inline] fn default() -> BinaryHeap { BinaryHeap::new() } } #[stable(feature = "binaryheap_debug", since = "1.4.0")] impl fmt::Debug for BinaryHeap { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } impl BinaryHeap { /// Creates an empty `BinaryHeap` as a max-heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// heap.push(4); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn new() -> BinaryHeap { BinaryHeap { data: vec![] } } /// Creates an empty `BinaryHeap` with a specific capacity. /// This preallocates enough memory for `capacity` elements, /// so that the `BinaryHeap` does not have to be reallocated /// until it contains at least that many values. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::with_capacity(10); /// heap.push(4); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn with_capacity(capacity: usize) -> BinaryHeap { BinaryHeap { data: Vec::with_capacity(capacity) } } /// Returns a mutable reference to the greatest item in the binary heap, or /// `None` if it is empty. /// /// Note: If the `PeekMut` value is leaked, the heap may be in an /// inconsistent state. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// assert!(heap.peek_mut().is_none()); /// /// heap.push(1); /// heap.push(5); /// heap.push(2); /// { /// let mut val = heap.peek_mut().unwrap(); /// *val = 0; /// } /// assert_eq!(heap.peek(), Some(&2)); /// ``` /// /// # Time complexity /// /// If the item is modified then the worst case time complexity is *O*(log(*n*)), /// otherwise it's *O*(1). #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] pub fn peek_mut(&mut self) -> Option> { if self.is_empty() { None } else { Some(PeekMut { heap: self, sift: false }) } } /// Removes the greatest item from the binary heap and returns it, or `None` if it /// is empty. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::from(vec![1, 3]); /// /// assert_eq!(heap.pop(), Some(3)); /// assert_eq!(heap.pop(), Some(1)); /// assert_eq!(heap.pop(), None); /// ``` /// /// # Time complexity /// /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)). #[stable(feature = "rust1", since = "1.0.0")] pub fn pop(&mut self) -> Option { self.data.pop().map(|mut item| { if !self.is_empty() { swap(&mut item, &mut self.data[0]); // SAFETY: !self.is_empty() means that self.len() > 0 unsafe { self.sift_down_to_bottom(0) }; } item }) } /// Pushes an item onto the binary heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// heap.push(3); /// heap.push(5); /// heap.push(1); /// /// assert_eq!(heap.len(), 3); /// assert_eq!(heap.peek(), Some(&5)); /// ``` /// /// # Time complexity /// /// The expected cost of `push`, averaged over every possible ordering of /// the elements being pushed, and over a sufficiently large number of /// pushes, is *O*(1). This is the most meaningful cost metric when pushing /// elements that are *not* already in any sorted pattern. /// /// The time complexity degrades if elements are pushed in predominantly /// ascending order. In the worst case, elements are pushed in ascending /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap /// containing *n* elements. /// /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case /// occurs when capacity is exhausted and needs a resize. The resize cost /// has been amortized in the previous figures. #[stable(feature = "rust1", since = "1.0.0")] pub fn push(&mut self, item: T) { let old_len = self.len(); self.data.push(item); // SAFETY: Since we pushed a new item it means that // old_len = self.len() - 1 < self.len() unsafe { self.sift_up(0, old_len) }; } /// Consumes the `BinaryHeap` and returns a vector in sorted /// (ascending) order. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// /// let mut heap = BinaryHeap::from(vec![1, 2, 4, 5, 7]); /// heap.push(6); /// heap.push(3); /// /// let vec = heap.into_sorted_vec(); /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]); /// ``` #[stable(feature = "binary_heap_extras_15", since = "1.5.0")] pub fn into_sorted_vec(mut self) -> Vec { let mut end = self.len(); while end > 1 { end -= 1; // SAFETY: `end` goes from `self.len() - 1` to 1 (both included), // so it's always a valid index to access. // It is safe to access index 0 (i.e. `ptr`), because // 1 <= end < self.len(), which means self.len() >= 2. unsafe { let ptr = self.data.as_mut_ptr(); ptr::swap(ptr, ptr.add(end)); } // SAFETY: `end` goes from `self.len() - 1` to 1 (both included) so: // 0 < 1 <= end <= self.len() - 1 < self.len() // Which means 0 < end and end < self.len(). unsafe { self.sift_down_range(0, end) }; } self.into_vec() } // The implementations of sift_up and sift_down use unsafe blocks in // order to move an element out of the vector (leaving behind a // hole), shift along the others and move the removed element back into the // vector at the final location of the hole. // The `Hole` type is used to represent this, and make sure // the hole is filled back at the end of its scope, even on panic. // Using a hole reduces the constant factor compared to using swaps, // which involves twice as many moves. /// # Safety /// /// The caller must guarantee that `pos < self.len()`. unsafe fn sift_up(&mut self, start: usize, pos: usize) -> usize { // Take out the value at `pos` and create a hole. // SAFETY: The caller guarantees that pos < self.len() let mut hole = unsafe { Hole::new(&mut self.data, pos) }; while hole.pos() > start { let parent = (hole.pos() - 1) / 2; // SAFETY: hole.pos() > start >= 0, which means hole.pos() > 0 // and so hole.pos() - 1 can't underflow. // This guarantees that parent < hole.pos() so // it's a valid index and also != hole.pos(). if hole.element() <= unsafe { hole.get(parent) } { break; } // SAFETY: Same as above unsafe { hole.move_to(parent) }; } hole.pos() } /// Take an element at `pos` and move it down the heap, /// while its children are larger. /// /// # Safety /// /// The caller must guarantee that `pos < end <= self.len()`. unsafe fn sift_down_range(&mut self, pos: usize, end: usize) { // SAFETY: The caller guarantees that pos < end <= self.len(). let mut hole = unsafe { Hole::new(&mut self.data, pos) }; let mut child = 2 * hole.pos() + 1; // Loop invariant: child == 2 * hole.pos() + 1. while child <= end.saturating_sub(2) { // compare with the greater of the two children // SAFETY: child < end - 1 < self.len() and // child + 1 < end <= self.len(), so they're valid indexes. // child == 2 * hole.pos() + 1 != hole.pos() and // child + 1 == 2 * hole.pos() + 2 != hole.pos(). // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow // if T is a ZST child += unsafe { hole.get(child) <= hole.get(child + 1) } as usize; // if we are already in order, stop. // SAFETY: child is now either the old child or the old child+1 // We already proven that both are < self.len() and != hole.pos() if hole.element() >= unsafe { hole.get(child) } { return; } // SAFETY: same as above. unsafe { hole.move_to(child) }; child = 2 * hole.pos() + 1; } // SAFETY: && short circuit, which means that in the // second condition it's already true that child == end - 1 < self.len(). if child == end - 1 && hole.element() < unsafe { hole.get(child) } { // SAFETY: child is already proven to be a valid index and // child == 2 * hole.pos() + 1 != hole.pos(). unsafe { hole.move_to(child) }; } } /// # Safety /// /// The caller must guarantee that `pos < self.len()`. unsafe fn sift_down(&mut self, pos: usize) { let len = self.len(); // SAFETY: pos < len is guaranteed by the caller and // obviously len = self.len() <= self.len(). unsafe { self.sift_down_range(pos, len) }; } /// Take an element at `pos` and move it all the way down the heap, /// then sift it up to its position. /// /// Note: This is faster when the element is known to be large / should /// be closer to the bottom. /// /// # Safety /// /// The caller must guarantee that `pos < self.len()`. unsafe fn sift_down_to_bottom(&mut self, mut pos: usize) { let end = self.len(); let start = pos; // SAFETY: The caller guarantees that pos < self.len(). let mut hole = unsafe { Hole::new(&mut self.data, pos) }; let mut child = 2 * hole.pos() + 1; // Loop invariant: child == 2 * hole.pos() + 1. while child <= end.saturating_sub(2) { // SAFETY: child < end - 1 < self.len() and // child + 1 < end <= self.len(), so they're valid indexes. // child == 2 * hole.pos() + 1 != hole.pos() and // child + 1 == 2 * hole.pos() + 2 != hole.pos(). // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow // if T is a ZST child += unsafe { hole.get(child) <= hole.get(child + 1) } as usize; // SAFETY: Same as above unsafe { hole.move_to(child) }; child = 2 * hole.pos() + 1; } if child == end - 1 { // SAFETY: child == end - 1 < self.len(), so it's a valid index // and child == 2 * hole.pos() + 1 != hole.pos(). unsafe { hole.move_to(child) }; } pos = hole.pos(); drop(hole); // SAFETY: pos is the position in the hole and was already proven // to be a valid index. unsafe { self.sift_up(start, pos) }; } /// Rebuild assuming data[0..start] is still a proper heap. fn rebuild_tail(&mut self, start: usize) { if start == self.len() { return; } let tail_len = self.len() - start; #[inline(always)] fn log2_fast(x: usize) -> usize { (usize::BITS - x.leading_zeros() - 1) as usize } // `rebuild` takes O(self.len()) operations // and about 2 * self.len() comparisons in the worst case // while repeating `sift_up` takes O(tail_len * log(start)) operations // and about 1 * tail_len * log_2(start) comparisons in the worst case, // assuming start >= tail_len. For larger heaps, the crossover point // no longer follows this reasoning and was determined empirically. let better_to_rebuild = if start < tail_len { true } else if self.len() <= 2048 { 2 * self.len() < tail_len * log2_fast(start) } else { 2 * self.len() < tail_len * 11 }; if better_to_rebuild { self.rebuild(); } else { for i in start..self.len() { // SAFETY: The index `i` is always less than self.len(). unsafe { self.sift_up(0, i) }; } } } fn rebuild(&mut self) { let mut n = self.len() / 2; while n > 0 { n -= 1; // SAFETY: n starts from self.len() / 2 and goes down to 0. // The only case when !(n < self.len()) is if // self.len() == 0, but it's ruled out by the loop condition. unsafe { self.sift_down(n) }; } } /// Moves all the elements of `other` into `self`, leaving `other` empty. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// /// let v = vec![-10, 1, 2, 3, 3]; /// let mut a = BinaryHeap::from(v); /// /// let v = vec![-20, 5, 43]; /// let mut b = BinaryHeap::from(v); /// /// a.append(&mut b); /// /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]); /// assert!(b.is_empty()); /// ``` #[stable(feature = "binary_heap_append", since = "1.11.0")] pub fn append(&mut self, other: &mut Self) { if self.len() < other.len() { swap(self, other); } let start = self.data.len(); self.data.append(&mut other.data); self.rebuild_tail(start); } /// Returns an iterator which retrieves elements in heap order. /// The retrieved elements are removed from the original heap. /// The remaining elements will be removed on drop in heap order. /// /// Note: /// * `.drain_sorted()` is *O*(*n* \* log(*n*)); much slower than `.drain()`. /// You should use the latter for most cases. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(binary_heap_drain_sorted)] /// use std::collections::BinaryHeap; /// /// let mut heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]); /// assert_eq!(heap.len(), 5); /// /// drop(heap.drain_sorted()); // removes all elements in heap order /// assert_eq!(heap.len(), 0); /// ``` #[inline] #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] pub fn drain_sorted(&mut self) -> DrainSorted<'_, T> { DrainSorted { inner: self } } /// Retains only the elements specified by the predicate. /// /// In other words, remove all elements `e` such that `f(&e)` returns /// `false`. The elements are visited in unsorted (and unspecified) order. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(binary_heap_retain)] /// use std::collections::BinaryHeap; /// /// let mut heap = BinaryHeap::from(vec![-10, -5, 1, 2, 4, 13]); /// /// heap.retain(|x| x % 2 == 0); // only keep even numbers /// /// assert_eq!(heap.into_sorted_vec(), [-10, 2, 4]) /// ``` #[unstable(feature = "binary_heap_retain", issue = "71503")] pub fn retain(&mut self, mut f: F) where F: FnMut(&T) -> bool, { let mut first_removed = self.len(); let mut i = 0; self.data.retain(|e| { let keep = f(e); if !keep && i < first_removed { first_removed = i; } i += 1; keep }); // data[0..first_removed] is untouched, so we only need to rebuild the tail: self.rebuild_tail(first_removed); } } impl BinaryHeap { /// Returns an iterator visiting all values in the underlying vector, in /// arbitrary order. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]); /// /// // Print 1, 2, 3, 4 in arbitrary order /// for x in heap.iter() { /// println!("{}", x); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter<'_, T> { Iter { iter: self.data.iter() } } /// Returns an iterator which retrieves elements in heap order. /// This method consumes the original heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(binary_heap_into_iter_sorted)] /// use std::collections::BinaryHeap; /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]); /// /// assert_eq!(heap.into_iter_sorted().take(2).collect::>(), vec![5, 4]); /// ``` #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] pub fn into_iter_sorted(self) -> IntoIterSorted { IntoIterSorted { inner: self } } /// Returns the greatest item in the binary heap, or `None` if it is empty. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// assert_eq!(heap.peek(), None); /// /// heap.push(1); /// heap.push(5); /// heap.push(2); /// assert_eq!(heap.peek(), Some(&5)); /// /// ``` /// /// # Time complexity /// /// Cost is *O*(1) in the worst case. #[stable(feature = "rust1", since = "1.0.0")] pub fn peek(&self) -> Option<&T> { self.data.get(0) } /// Returns the number of elements the binary heap can hold without reallocating. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::with_capacity(100); /// assert!(heap.capacity() >= 100); /// heap.push(4); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn capacity(&self) -> usize { self.data.capacity() } /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the /// given `BinaryHeap`. Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it requests. Therefore /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future /// insertions are expected. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// heap.reserve_exact(100); /// assert!(heap.capacity() >= 100); /// heap.push(4); /// ``` /// /// [`reserve`]: BinaryHeap::reserve #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve_exact(&mut self, additional: usize) { self.data.reserve_exact(additional); } /// Reserves capacity for at least `additional` more elements to be inserted in the /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// heap.reserve(100); /// assert!(heap.capacity() >= 100); /// heap.push(4); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { self.data.reserve(additional); } /// Discards as much additional capacity as possible. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap: BinaryHeap = BinaryHeap::with_capacity(100); /// /// assert!(heap.capacity() >= 100); /// heap.shrink_to_fit(); /// assert!(heap.capacity() == 0); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn shrink_to_fit(&mut self) { self.data.shrink_to_fit(); } /// Discards capacity with a lower bound. /// /// The capacity will remain at least as large as both the length /// and the supplied value. /// /// If the current capacity is less than the lower limit, this is a no-op. /// /// # Examples /// /// ``` /// #![feature(shrink_to)] /// use std::collections::BinaryHeap; /// let mut heap: BinaryHeap = BinaryHeap::with_capacity(100); /// /// assert!(heap.capacity() >= 100); /// heap.shrink_to(10); /// assert!(heap.capacity() >= 10); /// ``` #[inline] #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")] pub fn shrink_to(&mut self, min_capacity: usize) { self.data.shrink_to(min_capacity) } /// Returns a slice of all values in the underlying vector, in arbitrary /// order. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(binary_heap_as_slice)] /// use std::collections::BinaryHeap; /// use std::io::{self, Write}; /// /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]); /// /// io::sink().write(heap.as_slice()).unwrap(); /// ``` #[unstable(feature = "binary_heap_as_slice", issue = "83659")] pub fn as_slice(&self) -> &[T] { self.data.as_slice() } /// Consumes the `BinaryHeap` and returns the underlying vector /// in arbitrary order. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]); /// let vec = heap.into_vec(); /// /// // Will print in some order /// for x in vec { /// println!("{}", x); /// } /// ``` #[stable(feature = "binary_heap_extras_15", since = "1.5.0")] pub fn into_vec(self) -> Vec { self.into() } /// Returns the length of the binary heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let heap = BinaryHeap::from(vec![1, 3]); /// /// assert_eq!(heap.len(), 2); /// ``` #[doc(alias = "length")] #[stable(feature = "rust1", since = "1.0.0")] pub fn len(&self) -> usize { self.data.len() } /// Checks if the binary heap is empty. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// /// assert!(heap.is_empty()); /// /// heap.push(3); /// heap.push(5); /// heap.push(1); /// /// assert!(!heap.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn is_empty(&self) -> bool { self.len() == 0 } /// Clears the binary heap, returning an iterator over the removed elements. /// /// The elements are removed in arbitrary order. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::from(vec![1, 3]); /// /// assert!(!heap.is_empty()); /// /// for x in heap.drain() { /// println!("{}", x); /// } /// /// assert!(heap.is_empty()); /// ``` #[inline] #[stable(feature = "drain", since = "1.6.0")] pub fn drain(&mut self) -> Drain<'_, T> { Drain { iter: self.data.drain(..) } } /// Drops all items from the binary heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::from(vec![1, 3]); /// /// assert!(!heap.is_empty()); /// /// heap.clear(); /// /// assert!(heap.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn clear(&mut self) { self.drain(); } } /// Hole represents a hole in a slice i.e., an index without valid value /// (because it was moved from or duplicated). /// In drop, `Hole` will restore the slice by filling the hole /// position with the value that was originally removed. struct Hole<'a, T: 'a> { data: &'a mut [T], elt: ManuallyDrop, pos: usize, } impl<'a, T> Hole<'a, T> { /// Create a new `Hole` at index `pos`. /// /// Unsafe because pos must be within the data slice. #[inline] unsafe fn new(data: &'a mut [T], pos: usize) -> Self { debug_assert!(pos < data.len()); // SAFE: pos should be inside the slice let elt = unsafe { ptr::read(data.get_unchecked(pos)) }; Hole { data, elt: ManuallyDrop::new(elt), pos } } #[inline] fn pos(&self) -> usize { self.pos } /// Returns a reference to the element removed. #[inline] fn element(&self) -> &T { &self.elt } /// Returns a reference to the element at `index`. /// /// Unsafe because index must be within the data slice and not equal to pos. #[inline] unsafe fn get(&self, index: usize) -> &T { debug_assert!(index != self.pos); debug_assert!(index < self.data.len()); unsafe { self.data.get_unchecked(index) } } /// Move hole to new location /// /// Unsafe because index must be within the data slice and not equal to pos. #[inline] unsafe fn move_to(&mut self, index: usize) { debug_assert!(index != self.pos); debug_assert!(index < self.data.len()); unsafe { let ptr = self.data.as_mut_ptr(); let index_ptr: *const _ = ptr.add(index); let hole_ptr = ptr.add(self.pos); ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1); } self.pos = index; } } impl Drop for Hole<'_, T> { #[inline] fn drop(&mut self) { // fill the hole again unsafe { let pos = self.pos; ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1); } } } /// An iterator over the elements of a `BinaryHeap`. /// /// This `struct` is created by [`BinaryHeap::iter()`]. See its /// documentation for more. /// /// [`iter`]: BinaryHeap::iter #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, T: 'a> { iter: slice::Iter<'a, T>, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Iter<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Iter").field(&self.iter.as_slice()).finish() } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Iter<'_, T> { fn clone(&self) -> Self { Iter { iter: self.iter.clone() } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for Iter<'a, T> { type Item = &'a T; #[inline] fn next(&mut self) -> Option<&'a T> { self.iter.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } #[inline] fn last(self) -> Option<&'a T> { self.iter.last() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for Iter<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a T> { self.iter.next_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Iter<'_, T> { fn is_empty(&self) -> bool { self.iter.is_empty() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Iter<'_, T> {} /// An owning iterator over the elements of a `BinaryHeap`. /// /// This `struct` is created by [`BinaryHeap::into_iter()`] /// (provided by the `IntoIterator` trait). See its documentation for more. /// /// [`into_iter`]: BinaryHeap::into_iter #[stable(feature = "rust1", since = "1.0.0")] #[derive(Clone)] pub struct IntoIter { iter: vec::IntoIter, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.iter.as_slice()).finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for IntoIter { type Item = T; #[inline] fn next(&mut self) -> Option { self.iter.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { fn is_empty(&self) -> bool { self.iter.is_empty() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for IntoIter {} #[unstable(issue = "none", feature = "inplace_iteration")] unsafe impl SourceIter for IntoIter { type Source = IntoIter; #[inline] unsafe fn as_inner(&mut self) -> &mut Self::Source { self } } #[unstable(issue = "none", feature = "inplace_iteration")] unsafe impl InPlaceIterable for IntoIter {} impl AsIntoIter for IntoIter { type Item = I; fn as_into_iter(&mut self) -> &mut vec::IntoIter { &mut self.iter } } #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] #[derive(Clone, Debug)] pub struct IntoIterSorted { inner: BinaryHeap, } #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] impl Iterator for IntoIterSorted { type Item = T; #[inline] fn next(&mut self) -> Option { self.inner.pop() } #[inline] fn size_hint(&self) -> (usize, Option) { let exact = self.inner.len(); (exact, Some(exact)) } } #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] impl ExactSizeIterator for IntoIterSorted {} #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] impl FusedIterator for IntoIterSorted {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for IntoIterSorted {} /// A draining iterator over the elements of a `BinaryHeap`. /// /// This `struct` is created by [`BinaryHeap::drain()`]. See its /// documentation for more. /// /// [`drain`]: BinaryHeap::drain #[stable(feature = "drain", since = "1.6.0")] #[derive(Debug)] pub struct Drain<'a, T: 'a> { iter: vec::Drain<'a, T>, } #[stable(feature = "drain", since = "1.6.0")] impl Iterator for Drain<'_, T> { type Item = T; #[inline] fn next(&mut self) -> Option { self.iter.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } #[stable(feature = "drain", since = "1.6.0")] impl DoubleEndedIterator for Drain<'_, T> { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back() } } #[stable(feature = "drain", since = "1.6.0")] impl ExactSizeIterator for Drain<'_, T> { fn is_empty(&self) -> bool { self.iter.is_empty() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Drain<'_, T> {} /// A draining iterator over the elements of a `BinaryHeap`. /// /// This `struct` is created by [`BinaryHeap::drain_sorted()`]. See its /// documentation for more. /// /// [`drain_sorted`]: BinaryHeap::drain_sorted #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] #[derive(Debug)] pub struct DrainSorted<'a, T: Ord> { inner: &'a mut BinaryHeap, } #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] impl<'a, T: Ord> Drop for DrainSorted<'a, T> { /// Removes heap elements in heap order. fn drop(&mut self) { struct DropGuard<'r, 'a, T: Ord>(&'r mut DrainSorted<'a, T>); impl<'r, 'a, T: Ord> Drop for DropGuard<'r, 'a, T> { fn drop(&mut self) { while self.0.inner.pop().is_some() {} } } while let Some(item) = self.inner.pop() { let guard = DropGuard(self); drop(item); mem::forget(guard); } } } #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] impl Iterator for DrainSorted<'_, T> { type Item = T; #[inline] fn next(&mut self) -> Option { self.inner.pop() } #[inline] fn size_hint(&self) -> (usize, Option) { let exact = self.inner.len(); (exact, Some(exact)) } } #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] impl ExactSizeIterator for DrainSorted<'_, T> {} #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] impl FusedIterator for DrainSorted<'_, T> {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for DrainSorted<'_, T> {} #[stable(feature = "binary_heap_extras_15", since = "1.5.0")] impl From> for BinaryHeap { /// Converts a `Vec` into a `BinaryHeap`. /// /// This conversion happens in-place, and has *O*(*n*) time complexity. fn from(vec: Vec) -> BinaryHeap { let mut heap = BinaryHeap { data: vec }; heap.rebuild(); heap } } #[stable(feature = "binary_heap_extras_15", since = "1.5.0")] impl From> for Vec { /// Converts a `BinaryHeap` into a `Vec`. /// /// This conversion requires no data movement or allocation, and has /// constant time complexity. fn from(heap: BinaryHeap) -> Vec { heap.data } } #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator for BinaryHeap { fn from_iter>(iter: I) -> BinaryHeap { BinaryHeap::from(iter.into_iter().collect::>()) } } #[stable(feature = "rust1", since = "1.0.0")] impl IntoIterator for BinaryHeap { type Item = T; type IntoIter = IntoIter; /// Creates a consuming iterator, that is, one that moves each value out of /// the binary heap in arbitrary order. The binary heap cannot be used /// after calling this. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::collections::BinaryHeap; /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]); /// /// // Print 1, 2, 3, 4 in arbitrary order /// for x in heap.into_iter() { /// // x has type i32, not &i32 /// println!("{}", x); /// } /// ``` fn into_iter(self) -> IntoIter { IntoIter { iter: self.data.into_iter() } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a BinaryHeap { type Item = &'a T; type IntoIter = Iter<'a, T>; fn into_iter(self) -> Iter<'a, T> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] impl Extend for BinaryHeap { #[inline] fn extend>(&mut self, iter: I) { >::spec_extend(self, iter); } #[inline] fn extend_one(&mut self, item: T) { self.push(item); } #[inline] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } } impl> SpecExtend for BinaryHeap { default fn spec_extend(&mut self, iter: I) { self.extend_desugared(iter.into_iter()); } } impl SpecExtend> for BinaryHeap { fn spec_extend(&mut self, ref mut other: BinaryHeap) { self.append(other); } } impl BinaryHeap { fn extend_desugared>(&mut self, iter: I) { let iterator = iter.into_iter(); let (lower, _) = iterator.size_hint(); self.reserve(lower); iterator.for_each(move |elem| self.push(elem)); } } #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BinaryHeap { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } #[inline] fn extend_one(&mut self, &item: &'a T) { self.push(item); } #[inline] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } } use crate::alloc::{Allocator, Global}; use crate::raw_vec::RawVec; use core::fmt; use core::intrinsics::arith_offset; use core::iter::{FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccess}; use core::marker::PhantomData; use core::mem::{self}; use core::ptr::{self, NonNull}; use core::slice::{self}; /// An iterator that moves out of a vector. /// /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec) /// (provided by the [`IntoIterator`] trait). /// /// # Example /// /// ``` /// let v = vec![0, 1, 2]; /// let iter: std::vec::IntoIter<_> = v.into_iter(); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub struct IntoIter< T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { pub(super) buf: NonNull, pub(super) phantom: PhantomData, pub(super) cap: usize, pub(super) alloc: A, pub(super) ptr: *const T, pub(super) end: *const T, } #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.as_slice()).finish() } } impl IntoIter { /// Returns the remaining items of this iterator as a slice. /// /// # Examples /// /// ``` /// let vec = vec!['a', 'b', 'c']; /// let mut into_iter = vec.into_iter(); /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); /// let _ = into_iter.next().unwrap(); /// assert_eq!(into_iter.as_slice(), &['b', 'c']); /// ``` #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] pub fn as_slice(&self) -> &[T] { unsafe { slice::from_raw_parts(self.ptr, self.len()) } } /// Returns the remaining items of this iterator as a mutable slice. /// /// # Examples /// /// ``` /// let vec = vec!['a', 'b', 'c']; /// let mut into_iter = vec.into_iter(); /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); /// into_iter.as_mut_slice()[2] = 'z'; /// assert_eq!(into_iter.next().unwrap(), 'a'); /// assert_eq!(into_iter.next().unwrap(), 'b'); /// assert_eq!(into_iter.next().unwrap(), 'z'); /// ``` #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] pub fn as_mut_slice(&mut self) -> &mut [T] { unsafe { &mut *self.as_raw_mut_slice() } } /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn allocator(&self) -> &A { &self.alloc } fn as_raw_mut_slice(&mut self) -> *mut [T] { ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) } /// Drops remaining elements and relinquishes the backing allocation. /// /// This is roughly equivalent to the following, but more efficient /// /// ``` /// # let mut into_iter = Vec::::with_capacity(10).into_iter(); /// (&mut into_iter).for_each(core::mem::drop); /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); } /// ``` pub(super) fn forget_allocation_drop_remaining(&mut self) { let remaining = self.as_raw_mut_slice(); // overwrite the individual fields instead of creating a new // struct and then overwriting &mut self. // this creates less assembly self.cap = 0; self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; self.ptr = self.buf.as_ptr(); self.end = self.buf.as_ptr(); unsafe { ptr::drop_in_place(remaining); } } } #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")] impl AsRef<[T]> for IntoIter { fn as_ref(&self) -> &[T] { self.as_slice() } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for IntoIter {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for IntoIter {} #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for IntoIter { type Item = T; #[inline] fn next(&mut self) -> Option { if self.ptr as *const _ == self.end { None } else if mem::size_of::() == 0 { // purposefully don't use 'ptr.offset' because for // vectors with 0-size elements this would return the // same pointer. self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T }; // Make up a value of this ZST. Some(unsafe { mem::zeroed() }) } else { let old = self.ptr; self.ptr = unsafe { self.ptr.offset(1) }; Some(unsafe { ptr::read(old) }) } } #[inline] fn size_hint(&self) -> (usize, Option) { let exact = if mem::size_of::() == 0 { (self.end as usize).wrapping_sub(self.ptr as usize) } else { unsafe { self.end.offset_from(self.ptr) as usize } }; (exact, Some(exact)) } #[inline] fn count(self) -> usize { self.len() } unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item where Self: TrustedRandomAccess, { // SAFETY: the caller must guarantee that `i` is in bounds of the // `Vec`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)` // is guaranteed to pointer to an element of the `Vec` and // thus guaranteed to be valid to dereference. // // Also note the implementation of `Self: TrustedRandomAccess` requires // that `T: Copy` so reading elements from the buffer doesn't invalidate // them for `Drop`. unsafe { if mem::size_of::() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } } } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { if self.end == self.ptr { None } else if mem::size_of::() == 0 { // See above for why 'ptr.offset' isn't used self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T }; // Make up a value of this ZST. Some(unsafe { mem::zeroed() }) } else { self.end = unsafe { self.end.offset(-1) }; Some(unsafe { ptr::read(self.end) }) } } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { fn is_empty(&self) -> bool { self.ptr == self.end } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for IntoIter {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for IntoIter {} #[doc(hidden)] #[unstable(issue = "none", feature = "std_internals")] // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr // and thus we can't implement drop-handling unsafe impl TrustedRandomAccess for IntoIter where T: Copy, { const MAY_HAVE_SIDE_EFFECT: bool = false; } #[stable(feature = "vec_into_iter_clone", since = "1.8.0")] impl Clone for IntoIter { #[cfg(not(test))] fn clone(&self) -> Self { self.as_slice().to_vec_in(self.alloc.clone()).into_iter() } #[cfg(test)] fn clone(&self) -> Self { crate::slice::to_vec(self.as_slice(), self.alloc.clone()).into_iter() } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter { fn drop(&mut self) { struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter); impl Drop for DropGuard<'_, T, A> { fn drop(&mut self) { unsafe { // `IntoIter::alloc` is not used anymore after this let alloc = ptr::read(&self.0.alloc); // RawVec handles deallocation let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); } } } let guard = DropGuard(self); // destroy the remaining elements unsafe { ptr::drop_in_place(guard.0.as_raw_mut_slice()); } // now `guard` will be dropped and do the rest } } #[unstable(issue = "none", feature = "inplace_iteration")] unsafe impl InPlaceIterable for IntoIter {} #[unstable(issue = "none", feature = "inplace_iteration")] unsafe impl SourceIter for IntoIter { type Source = Self; #[inline] unsafe fn as_inner(&mut self) -> &mut Self::Source { self } } // internal helper trait for in-place iteration specialization. #[rustc_specialization_trait] pub(crate) trait AsIntoIter { type Item; fn as_into_iter(&mut self) -> &mut IntoIter; } impl AsIntoIter for IntoIter { type Item = T; fn as_into_iter(&mut self) -> &mut IntoIter { self } } use crate::alloc::Allocator; use core::iter::TrustedLen; use core::ptr::{self}; use core::slice::{self}; use super::{IntoIter, SetLenOnDrop, Vec}; // Specialization trait used for Vec::extend pub(super) trait SpecExtend { fn spec_extend(&mut self, iter: I); } impl SpecExtend for Vec where I: Iterator, { default fn spec_extend(&mut self, iter: I) { self.extend_desugared(iter) } } impl SpecExtend for Vec where I: TrustedLen, { default fn spec_extend(&mut self, iterator: I) { // This is the case for a TrustedLen iterator. let (low, high) = iterator.size_hint(); if let Some(additional) = high { debug_assert_eq!( low, additional, "TrustedLen iterator's size hint is not exact: {:?}", (low, high) ); self.reserve(additional); unsafe { let mut ptr = self.as_mut_ptr().add(self.len()); let mut local_len = SetLenOnDrop::new(&mut self.len); iterator.for_each(move |element| { ptr::write(ptr, element); ptr = ptr.offset(1); // NB can't overflow since we would have had to alloc the address space local_len.increment_len(1); }); } } else { // Per TrustedLen contract a `None` upper bound means that the iterator length // truly exceeds usize::MAX, which would eventually lead to a capacity overflow anyway. // Since the other branch already panics eagerly (via `reserve()`) we do the same here. // This avoids additional codegen for a fallback code path which would eventually // panic anyway. panic!("capacity overflow"); } } } impl SpecExtend> for Vec { fn spec_extend(&mut self, mut iterator: IntoIter) { unsafe { self.append_elements(iterator.as_slice() as _); } iterator.ptr = iterator.end; } } impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec where I: Iterator, T: Clone, { default fn spec_extend(&mut self, iterator: I) { self.spec_extend(iterator.cloned()) } } impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec where T: Copy, { fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { let slice = iterator.as_slice(); unsafe { self.append_elements(slice) }; } } use crate::alloc::Allocator; use crate::raw_vec::RawVec; use core::ptr::{self}; use super::{ExtendElement, IsZero, Vec}; // Specialization trait used for Vec::from_elem pub(super) trait SpecFromElem: Sized { fn from_elem(elem: Self, n: usize, alloc: A) -> Vec; } impl SpecFromElem for T { default fn from_elem(elem: Self, n: usize, alloc: A) -> Vec { let mut v = Vec::with_capacity_in(n, alloc); v.extend_with(n, ExtendElement(elem)); v } } impl SpecFromElem for i8 { #[inline] fn from_elem(elem: i8, n: usize, alloc: A) -> Vec { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } unsafe { let mut v = Vec::with_capacity_in(n, alloc); ptr::write_bytes(v.as_mut_ptr(), elem as u8, n); v.set_len(n); v } } } impl SpecFromElem for u8 { #[inline] fn from_elem(elem: u8, n: usize, alloc: A) -> Vec { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } unsafe { let mut v = Vec::with_capacity_in(n, alloc); ptr::write_bytes(v.as_mut_ptr(), elem, n); v.set_len(n); v } } } impl SpecFromElem for T { #[inline] fn from_elem(elem: T, n: usize, alloc: A) -> Vec { if elem.is_zero() { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } let mut v = Vec::with_capacity_in(n, alloc); v.extend_with(n, ExtendElement(elem)); v } } use crate::alloc::{Allocator, Global}; use core::fmt; use core::iter::{FusedIterator, TrustedLen}; use core::mem::{self}; use core::ptr::{self, NonNull}; use core::slice::{self}; use super::Vec; /// A draining iterator for `Vec`. /// /// This `struct` is created by [`Vec::drain`]. /// See its documentation for more. /// /// # Example /// /// ``` /// let mut v = vec![0, 1, 2]; /// let iter: std::vec::Drain<_> = v.drain(..); /// ``` #[stable(feature = "drain", since = "1.6.0")] pub struct Drain< 'a, T: 'a, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, > { /// Index of tail to preserve pub(super) tail_start: usize, /// Length of tail pub(super) tail_len: usize, /// Current remaining range to remove pub(super) iter: slice::Iter<'a, T>, pub(super) vec: NonNull>, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Drain<'_, T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() } } impl<'a, T, A: Allocator> Drain<'a, T, A> { /// Returns the remaining items of this iterator as a slice. /// /// # Examples /// /// ``` /// let mut vec = vec!['a', 'b', 'c']; /// let mut drain = vec.drain(..); /// assert_eq!(drain.as_slice(), &['a', 'b', 'c']); /// let _ = drain.next().unwrap(); /// assert_eq!(drain.as_slice(), &['b', 'c']); /// ``` #[stable(feature = "vec_drain_as_slice", since = "1.46.0")] pub fn as_slice(&self) -> &[T] { self.iter.as_slice() } /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn allocator(&self) -> &A { unsafe { self.vec.as_ref().allocator() } } } #[stable(feature = "vec_drain_as_slice", since = "1.46.0")] impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> { fn as_ref(&self) -> &[T] { self.as_slice() } } #[stable(feature = "drain", since = "1.6.0")] unsafe impl Sync for Drain<'_, T, A> {} #[stable(feature = "drain", since = "1.6.0")] unsafe impl Send for Drain<'_, T, A> {} #[stable(feature = "drain", since = "1.6.0")] impl Iterator for Drain<'_, T, A> { type Item = T; #[inline] fn next(&mut self) -> Option { self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) }) } fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } #[stable(feature = "drain", since = "1.6.0")] impl DoubleEndedIterator for Drain<'_, T, A> { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) }) } } #[stable(feature = "drain", since = "1.6.0")] impl Drop for Drain<'_, T, A> { fn drop(&mut self) { /// Continues dropping the remaining elements in the `Drain`, then moves back the /// un-`Drain`ed elements to restore the original `Vec`. struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>); impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> { fn drop(&mut self) { // Continue the same loop we have below. If the loop already finished, this does // nothing. self.0.for_each(drop); if self.0.tail_len > 0 { unsafe { let source_vec = self.0.vec.as_mut(); // memmove back untouched tail, update to new length let start = source_vec.len(); let tail = self.0.tail_start; if tail != start { let src = source_vec.as_ptr().add(tail); let dst = source_vec.as_mut_ptr().add(start); ptr::copy(src, dst, self.0.tail_len); } source_vec.set_len(start + self.0.tail_len); } } } } // exhaust self first while let Some(item) = self.next() { let guard = DropGuard(self); drop(item); mem::forget(guard); } // Drop a `DropGuard` to move back the non-drained tail of `self`. DropGuard(self); } } #[stable(feature = "drain", since = "1.6.0")] impl ExactSizeIterator for Drain<'_, T, A> { fn is_empty(&self) -> bool { self.iter.is_empty() } } #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for Drain<'_, T, A> {} #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Drain<'_, T, A> {} use core::iter::TrustedLen; use core::ptr::{self}; use super::{SpecExtend, Vec}; /// Another specialization trait for Vec::from_iter /// necessary to manually prioritize overlapping specializations /// see [`SpecFromIter`](super::SpecFromIter) for details. pub(super) trait SpecFromIterNested { fn from_iter(iter: I) -> Self; } impl SpecFromIterNested for Vec where I: Iterator, { default fn from_iter(mut iterator: I) -> Self { // Unroll the first iteration, as the vector is going to be // expanded on this iteration in every case when the iterable is not // empty, but the loop in extend_desugared() is not going to see the // vector being full in the few subsequent loop iterations. // So we get better branch prediction. let mut vector = match iterator.next() { None => return Vec::new(), Some(element) => { let (lower, _) = iterator.size_hint(); let mut vector = Vec::with_capacity(lower.saturating_add(1)); unsafe { ptr::write(vector.as_mut_ptr(), element); vector.set_len(1); } vector } }; // must delegate to spec_extend() since extend() itself delegates // to spec_from for empty Vecs as SpecExtend>::spec_extend(&mut vector, iterator); vector } } impl SpecFromIterNested for Vec where I: TrustedLen, { fn from_iter(iterator: I) -> Self { let mut vector = match iterator.size_hint() { (_, Some(upper)) => Vec::with_capacity(upper), // TrustedLen contract guarantees that `size_hint() == (_, None)` means that there // are more than `usize::MAX` elements. // Since the previous branch would eagerly panic if the capacity is too large // (via `with_capacity`) we do the same here. _ => panic!("capacity overflow"), }; // reuse extend specialization for TrustedLen vector.spec_extend(iterator); vector } } use crate::boxed::Box; #[rustc_specialization_trait] pub(super) unsafe trait IsZero { /// Whether this value is zero fn is_zero(&self) -> bool; } macro_rules! impl_is_zero { ($t:ty, $is_zero:expr) => { unsafe impl IsZero for $t { #[inline] fn is_zero(&self) -> bool { $is_zero(*self) } } }; } impl_is_zero!(i16, |x| x == 0); impl_is_zero!(i32, |x| x == 0); impl_is_zero!(i64, |x| x == 0); impl_is_zero!(i128, |x| x == 0); impl_is_zero!(isize, |x| x == 0); impl_is_zero!(u16, |x| x == 0); impl_is_zero!(u32, |x| x == 0); impl_is_zero!(u64, |x| x == 0); impl_is_zero!(u128, |x| x == 0); impl_is_zero!(usize, |x| x == 0); impl_is_zero!(bool, |x| x == false); impl_is_zero!(char, |x| x == '\0'); impl_is_zero!(f32, |x: f32| x.to_bits() == 0); impl_is_zero!(f64, |x: f64| x.to_bits() == 0); unsafe impl IsZero for *const T { #[inline] fn is_zero(&self) -> bool { (*self).is_null() } } unsafe impl IsZero for *mut T { #[inline] fn is_zero(&self) -> bool { (*self).is_null() } } // `Option<&T>` and `Option>` are guaranteed to represent `None` as null. // For fat pointers, the bytes that would be the pointer metadata in the `Some` // variant are padding in the `None` variant, so ignoring them and // zero-initializing instead is ok. // `Option<&mut T>` never implements `Clone`, so there's no need for an impl of // `SpecFromElem`. unsafe impl IsZero for Option<&T> { #[inline] fn is_zero(&self) -> bool { self.is_none() } } unsafe impl IsZero for Option> { #[inline] fn is_zero(&self) -> bool { self.is_none() } } use core::mem::ManuallyDrop; use core::ptr::{self}; use core::slice::{self}; use super::{IntoIter, SpecExtend, SpecFromIterNested, Vec}; /// Specialization trait used for Vec::from_iter /// /// ## The delegation graph: /// /// ```text /// +-------------+ /// |FromIterator | /// +-+-----------+ /// | /// v /// +-+-------------------------------+ +---------------------+ /// |SpecFromIter +---->+SpecFromIterNested | /// |where I: | | |where I: | /// | Iterator (default)----------+ | | Iterator (default) | /// | vec::IntoIter | | | TrustedLen | /// | SourceIterMarker---fallback-+ | | | /// | slice::Iter | | | /// | Iterator | +---------------------+ /// +---------------------------------+ /// ``` pub(super) trait SpecFromIter { fn from_iter(iter: I) -> Self; } impl SpecFromIter for Vec where I: Iterator, { default fn from_iter(iterator: I) -> Self { SpecFromIterNested::from_iter(iterator) } } impl SpecFromIter> for Vec { fn from_iter(iterator: IntoIter) -> Self { // A common case is passing a vector into a function which immediately // re-collects into a vector. We can short circuit this if the IntoIter // has not been advanced at all. // When it has been advanced We can also reuse the memory and move the data to the front. // But we only do so when the resulting Vec wouldn't have more unused capacity // than creating it through the generic FromIterator implementation would. That limitation // is not strictly necessary as Vec's allocation behavior is intentionally unspecified. // But it is a conservative choice. let has_advanced = iterator.buf.as_ptr() as *const _ != iterator.ptr; if !has_advanced || iterator.len() >= iterator.cap / 2 { unsafe { let it = ManuallyDrop::new(iterator); if has_advanced { ptr::copy(it.ptr, it.buf.as_ptr(), it.len()); } return Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap); } } let mut vec = Vec::new(); // must delegate to spec_extend() since extend() itself delegates // to spec_from for empty Vecs vec.spec_extend(iterator); vec } } impl<'a, T: 'a, I> SpecFromIter<&'a T, I> for Vec where I: Iterator, T: Clone, { default fn from_iter(iterator: I) -> Self { SpecFromIter::from_iter(iterator.cloned()) } } // This utilizes `iterator.as_slice().to_vec()` since spec_extend // must take more steps to reason about the final capacity + length // and thus do more work. `to_vec()` directly allocates the correct amount // and fills it exactly. impl<'a, T: 'a + Clone> SpecFromIter<&'a T, slice::Iter<'a, T>> for Vec { #[cfg(not(test))] fn from_iter(iterator: slice::Iter<'a, T>) -> Self { iterator.as_slice().to_vec() } // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is // required for this method definition, is not available. Instead use the // `slice::to_vec` function which is only available with cfg(test) // NB see the slice::hack module in slice.rs for more information #[cfg(test)] fn from_iter(iterator: slice::Iter<'a, T>) -> Self { crate::slice::to_vec(iterator.as_slice(), crate::alloc::Global) } } use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccess}; use core::mem::{self, ManuallyDrop}; use core::ptr::{self}; use super::{AsIntoIter, InPlaceDrop, SpecFromIter, SpecFromIterNested, Vec}; /// Specialization marker for collecting an iterator pipeline into a Vec while reusing the /// source allocation, i.e. executing the pipeline in place. /// /// The SourceIter parent trait is necessary for the specializing function to access the allocation /// which is to be reused. But it is not sufficient for the specialization to be valid. See /// additional bounds on the impl. #[rustc_unsafe_specialization_marker] pub(super) trait SourceIterMarker: SourceIter {} // The std-internal SourceIter/InPlaceIterable traits are only implemented by chains of // Adapter>> (all owned by core/std). Additional bounds // on the adapter implementations (beyond `impl Trait for Adapter`) only depend on other // traits already marked as specialization traits (Copy, TrustedRandomAccess, FusedIterator). // I.e. the marker does not depend on lifetimes of user-supplied types. Modulo the Copy hole, which // several other specializations already depend on. impl SourceIterMarker for T where T: SourceIter + InPlaceIterable {} impl SpecFromIter for Vec where I: Iterator + SourceIterMarker, { default fn from_iter(mut iterator: I) -> Self { // Additional requirements which cannot expressed via trait bounds. We rely on const eval // instead: // a) no ZSTs as there would be no allocation to reuse and pointer arithmetic would panic // b) size match as required by Alloc contract // c) alignments match as required by Alloc contract if mem::size_of::() == 0 || mem::size_of::() != mem::size_of::<<::Source as AsIntoIter>::Item>() || mem::align_of::() != mem::align_of::<<::Source as AsIntoIter>::Item>() { // fallback to more generic implementations return SpecFromIterNested::from_iter(iterator); } let (src_buf, src_ptr, dst_buf, dst_end, cap) = unsafe { let inner = iterator.as_inner().as_into_iter(); ( inner.buf.as_ptr(), inner.ptr, inner.buf.as_ptr() as *mut T, inner.end as *const T, inner.cap, ) }; let len = SpecInPlaceCollect::collect_in_place(&mut iterator, dst_buf, dst_end); let src = unsafe { iterator.as_inner().as_into_iter() }; // check if SourceIter contract was upheld // caveat: if they weren't we may not even make it to this point debug_assert_eq!(src_buf, src.buf.as_ptr()); // check InPlaceIterable contract. This is only possible if the iterator advanced the // source pointer at all. If it uses unchecked access via TrustedRandomAccess // then the source pointer will stay in its initial position and we can't use it as reference if src.ptr != src_ptr { debug_assert!( unsafe { dst_buf.add(len) as *const _ } <= src.ptr, "InPlaceIterable contract violation, write pointer advanced beyond read pointer" ); } // drop any remaining values at the tail of the source // but prevent drop of the allocation itself once IntoIter goes out of scope // if the drop panics then we also leak any elements collected into dst_buf src.forget_allocation_drop_remaining(); let vec = unsafe { Vec::from_raw_parts(dst_buf, len, cap) }; vec } } fn write_in_place_with_drop( src_end: *const T, ) -> impl FnMut(InPlaceDrop, T) -> Result, !> { move |mut sink, item| { unsafe { // the InPlaceIterable contract cannot be verified precisely here since // try_fold has an exclusive reference to the source pointer // all we can do is check if it's still in range debug_assert!(sink.dst as *const _ <= src_end, "InPlaceIterable contract violation"); ptr::write(sink.dst, item); sink.dst = sink.dst.add(1); } Ok(sink) } } /// Helper trait to hold specialized implementations of the in-place iterate-collect loop trait SpecInPlaceCollect: Iterator { /// Collects an iterator (`self`) into the destination buffer (`dst`) and returns the number of items /// collected. `end` is the last writable element of the allocation and used for bounds checks. fn collect_in_place(&mut self, dst: *mut T, end: *const T) -> usize; } impl SpecInPlaceCollect for I where I: Iterator, { #[inline] default fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize { // use try-fold since // - it vectorizes better for some iterator adapters // - unlike most internal iteration methods, it only takes a &mut self // - it lets us thread the write pointer through its innards and get it back in the end let sink = InPlaceDrop { inner: dst_buf, dst: dst_buf }; let sink = self.try_fold::<_, _, Result<_, !>>(sink, write_in_place_with_drop(end)).unwrap(); // iteration succeeded, don't drop head unsafe { ManuallyDrop::new(sink).dst.offset_from(dst_buf) as usize } } } impl SpecInPlaceCollect for I where I: Iterator + TrustedRandomAccess, { #[inline] fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize { let len = self.size(); let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf }; for i in 0..len { // Safety: InplaceIterable contract guarantees that for every element we read // one slot in the underlying storage will have been freed up and we can immediately // write back the result. unsafe { let dst = dst_buf.offset(i as isize); debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation"); ptr::write(dst, self.__iterator_get_unchecked(i)); drop_guard.dst = dst.add(1); } } mem::forget(drop_guard); len } } //! A contiguous growable array type with heap-allocated contents, written //! `Vec`. //! //! Vectors have `O(1)` indexing, amortized `O(1)` push (to the end) and //! `O(1)` pop (from the end). //! //! Vectors ensure they never allocate more than `isize::MAX` bytes. //! //! # Examples //! //! You can explicitly create a [`Vec`] with [`Vec::new`]: //! //! ``` //! let v: Vec = Vec::new(); //! ``` //! //! ...or by using the [`vec!`] macro: //! //! ``` //! let v: Vec = vec![]; //! //! let v = vec![1, 2, 3, 4, 5]; //! //! let v = vec![0; 10]; // ten zeroes //! ``` //! //! You can [`push`] values onto the end of a vector (which will grow the vector //! as needed): //! //! ``` //! let mut v = vec![1, 2]; //! //! v.push(3); //! ``` //! //! Popping values works in much the same way: //! //! ``` //! let mut v = vec![1, 2]; //! //! let two = v.pop(); //! ``` //! //! Vectors also support indexing (through the [`Index`] and [`IndexMut`] traits): //! //! ``` //! let mut v = vec![1, 2, 3]; //! let three = v[2]; //! v[1] = v[1] + 5; //! ``` //! //! [`push`]: Vec::push #![stable(feature = "rust1", since = "1.0.0")] use core::cmp::{self, Ordering}; use core::convert::TryFrom; use core::fmt; use core::hash::{Hash, Hasher}; use core::intrinsics::{arith_offset, assume}; use core::iter::{self, FromIterator}; use core::marker::PhantomData; use core::mem::{self, ManuallyDrop, MaybeUninit}; use core::ops::{self, Index, IndexMut, Range, RangeBounds}; use core::ptr::{self, NonNull}; use core::slice::{self, SliceIndex}; use crate::alloc::{Allocator, Global}; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; use crate::collections::TryReserveError; use crate::raw_vec::RawVec; #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] pub use self::drain_filter::DrainFilter; mod drain_filter; #[stable(feature = "vec_splice", since = "1.21.0")] pub use self::splice::Splice; mod splice; #[stable(feature = "drain", since = "1.6.0")] pub use self::drain::Drain; mod drain; mod cow; pub(crate) use self::into_iter::AsIntoIter; #[stable(feature = "rust1", since = "1.0.0")] pub use self::into_iter::IntoIter; mod into_iter; use self::is_zero::IsZero; mod is_zero; mod source_iter_marker; mod partial_eq; use self::spec_from_elem::SpecFromElem; mod spec_from_elem; use self::set_len_on_drop::SetLenOnDrop; mod set_len_on_drop; use self::in_place_drop::InPlaceDrop; mod in_place_drop; use self::spec_from_iter_nested::SpecFromIterNested; mod spec_from_iter_nested; use self::spec_from_iter::SpecFromIter; mod spec_from_iter; use self::spec_extend::SpecExtend; mod spec_extend; /// A contiguous growable array type, written as `Vec` and pronounced 'vector'. /// /// # Examples /// /// ``` /// let mut vec = Vec::new(); /// vec.push(1); /// vec.push(2); /// /// assert_eq!(vec.len(), 2); /// assert_eq!(vec[0], 1); /// /// assert_eq!(vec.pop(), Some(2)); /// assert_eq!(vec.len(), 1); /// /// vec[0] = 7; /// assert_eq!(vec[0], 7); /// /// vec.extend([1, 2, 3].iter().copied()); /// /// for x in &vec { /// println!("{}", x); /// } /// assert_eq!(vec, [7, 1, 2, 3]); /// ``` /// /// The [`vec!`] macro is provided to make initialization more convenient: /// /// ``` /// let mut vec = vec![1, 2, 3]; /// vec.push(4); /// assert_eq!(vec, [1, 2, 3, 4]); /// ``` /// /// It can also initialize each element of a `Vec` with a given value. /// This may be more efficient than performing allocation and initialization /// in separate steps, especially when initializing a vector of zeros: /// /// ``` /// let vec = vec![0; 5]; /// assert_eq!(vec, [0, 0, 0, 0, 0]); /// /// // The following is equivalent, but potentially slower: /// let mut vec = Vec::with_capacity(5); /// vec.resize(5, 0); /// assert_eq!(vec, [0, 0, 0, 0, 0]); /// ``` /// /// For more information, see /// [Capacity and Reallocation](#capacity-and-reallocation). /// /// Use a `Vec` as an efficient stack: /// /// ``` /// let mut stack = Vec::new(); /// /// stack.push(1); /// stack.push(2); /// stack.push(3); /// /// while let Some(top) = stack.pop() { /// // Prints 3, 2, 1 /// println!("{}", top); /// } /// ``` /// /// # Indexing /// /// The `Vec` type allows to access values by index, because it implements the /// [`Index`] trait. An example will be more explicit: /// /// ``` /// let v = vec![0, 2, 4, 6]; /// println!("{}", v[1]); // it will display '2' /// ``` /// /// However be careful: if you try to access an index which isn't in the `Vec`, /// your software will panic! You cannot do this: /// /// ```should_panic /// let v = vec![0, 2, 4, 6]; /// println!("{}", v[6]); // it will panic! /// ``` /// /// Use [`get`] and [`get_mut`] if you want to check whether the index is in /// the `Vec`. /// /// # Slicing /// /// A `Vec` can be mutable. On the other hand, slices are read-only objects. /// To get a [slice][prim@slice], use [`&`]. Example: /// /// ``` /// fn read_slice(slice: &[usize]) { /// // ... /// } /// /// let v = vec![0, 1]; /// read_slice(&v); /// /// // ... and that's all! /// // you can also do it like this: /// let u: &[usize] = &v; /// // or like this: /// let u: &[_] = &v; /// ``` /// /// In Rust, it's more common to pass slices as arguments rather than vectors /// when you just want to provide read access. The same goes for [`String`] and /// [`&str`]. /// /// # Capacity and reallocation /// /// The capacity of a vector is the amount of space allocated for any future /// elements that will be added onto the vector. This is not to be confused with /// the *length* of a vector, which specifies the number of actual elements /// within the vector. If a vector's length exceeds its capacity, its capacity /// will automatically be increased, but its elements will have to be /// reallocated. /// /// For example, a vector with capacity 10 and length 0 would be an empty vector /// with space for 10 more elements. Pushing 10 or fewer elements onto the /// vector will not change its capacity or cause reallocation to occur. However, /// if the vector's length is increased to 11, it will have to reallocate, which /// can be slow. For this reason, it is recommended to use [`Vec::with_capacity`] /// whenever possible to specify how big the vector is expected to get. /// /// # Guarantees /// /// Due to its incredibly fundamental nature, `Vec` makes a lot of guarantees /// about its design. This ensures that it's as low-overhead as possible in /// the general case, and can be correctly manipulated in primitive ways /// by unsafe code. Note that these guarantees refer to an unqualified `Vec`. /// If additional type parameters are added (e.g., to support custom allocators), /// overriding their defaults may change the behavior. /// /// Most fundamentally, `Vec` is and always will be a (pointer, capacity, length) /// triplet. No more, no less. The order of these fields is completely /// unspecified, and you should use the appropriate methods to modify these. /// The pointer will never be null, so this type is null-pointer-optimized. /// /// However, the pointer might not actually point to allocated memory. In particular, /// if you construct a `Vec` with capacity 0 via [`Vec::new`], [`vec![]`][`vec!`], /// [`Vec::with_capacity(0)`][`Vec::with_capacity`], or by calling [`shrink_to_fit`] /// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized /// types inside a `Vec`, it will not allocate space for them. *Note that in this case /// the `Vec` might not report a [`capacity`] of 0*. `Vec` will allocate if and only /// if [`mem::size_of::`]`() * capacity() > 0`. In general, `Vec`'s allocation /// details are very subtle — if you intend to allocate memory using a `Vec` /// and use it for something else (either to pass to unsafe code, or to build your /// own memory-backed collection), be sure to deallocate this memory by using /// `from_raw_parts` to recover the `Vec` and then dropping it. /// /// If a `Vec` *has* allocated memory, then the memory it points to is on the heap /// (as defined by the allocator Rust is configured to use by default), and its /// pointer points to [`len`] initialized, contiguous elements in order (what /// you would see if you coerced it to a slice), followed by [`capacity`]` - /// `[`len`] logically uninitialized, contiguous elements. /// /// A vector containing the elements `'a'` and `'b'` with capacity 4 can be /// visualized as below. The top part is the `Vec` struct, it contains a /// pointer to the head of the allocation in the heap, length and capacity. /// The bottom part is the allocation on the heap, a contiguous memory block. /// /// ```text /// ptr len capacity /// +--------+--------+--------+ /// | 0x0123 | 2 | 4 | /// +--------+--------+--------+ /// | /// v /// Heap +--------+--------+--------+--------+ /// | 'a' | 'b' | uninit | uninit | /// +--------+--------+--------+--------+ /// ``` /// /// - **uninit** represents memory that is not initialized, see [`MaybeUninit`]. /// - Note: the ABI is not stable and `Vec` makes no guarantees about its memory /// layout (including the order of fields). /// /// `Vec` will never perform a "small optimization" where elements are actually /// stored on the stack for two reasons: /// /// * It would make it more difficult for unsafe code to correctly manipulate /// a `Vec`. The contents of a `Vec` wouldn't have a stable address if it were /// only moved, and it would be more difficult to determine if a `Vec` had /// actually allocated memory. /// /// * It would penalize the general case, incurring an additional branch /// on every access. /// /// `Vec` will never automatically shrink itself, even if completely empty. This /// ensures no unnecessary allocations or deallocations occur. Emptying a `Vec` /// and then filling it back up to the same [`len`] should incur no calls to /// the allocator. If you wish to free up unused memory, use /// [`shrink_to_fit`] or [`shrink_to`]. /// /// [`push`] and [`insert`] will never (re)allocate if the reported capacity is /// sufficient. [`push`] and [`insert`] *will* (re)allocate if /// [`len`]` == `[`capacity`]. That is, the reported capacity is completely /// accurate, and can be relied on. It can even be used to manually free the memory /// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even /// when not necessary. /// /// `Vec` does not guarantee any particular growth strategy when reallocating /// when full, nor when [`reserve`] is called. The current strategy is basic /// and it may prove desirable to use a non-constant growth factor. Whatever /// strategy is used will of course guarantee *O*(1) amortized [`push`]. /// /// `vec![x; n]`, `vec![a, b, c, d]`, and /// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all produce a `Vec` /// with exactly the requested capacity. If [`len`]` == `[`capacity`], /// (as is the case for the [`vec!`] macro), then a `Vec` can be converted to /// and from a [`Box<[T]>`][owned slice] without reallocating or moving the elements. /// /// `Vec` will not specifically overwrite any data that is removed from it, /// but also won't specifically preserve it. Its uninitialized memory is /// scratch space that it may use however it wants. It will generally just do /// whatever is most efficient or otherwise easy to implement. Do not rely on /// removed data to be erased for security purposes. Even if you drop a `Vec`, its /// buffer may simply be reused by another `Vec`. Even if you zero a `Vec`'s memory /// first, that might not actually happen because the optimizer does not consider /// this a side-effect that must be preserved. There is one case which we will /// not break, however: using `unsafe` code to write to the excess capacity, /// and then increasing the length to match, is always valid. /// /// Currently, `Vec` does not guarantee the order in which elements are dropped. /// The order has changed in the past and may change again. /// /// [`get`]: ../../std/vec/struct.Vec.html#method.get /// [`get_mut`]: ../../std/vec/struct.Vec.html#method.get_mut /// [`String`]: crate::string::String /// [`&str`]: type@str /// [`shrink_to_fit`]: Vec::shrink_to_fit /// [`shrink_to`]: Vec::shrink_to /// [`capacity`]: Vec::capacity /// [`mem::size_of::`]: core::mem::size_of /// [`len`]: Vec::len /// [`push`]: Vec::push /// [`insert`]: Vec::insert /// [`reserve`]: Vec::reserve /// [`MaybeUninit`]: core::mem::MaybeUninit /// [owned slice]: Box #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "vec_type")] pub struct Vec { buf: RawVec, len: usize, } //////////////////////////////////////////////////////////////////////////////// // Inherent methods //////////////////////////////////////////////////////////////////////////////// impl Vec { /// Constructs a new, empty `Vec`. /// /// The vector will not allocate until elements are pushed onto it. /// /// # Examples /// /// ``` /// # #![allow(unused_mut)] /// let mut vec: Vec = Vec::new(); /// ``` #[inline] #[rustc_const_stable(feature = "const_vec_new", since = "1.39.0")] #[stable(feature = "rust1", since = "1.0.0")] pub const fn new() -> Self { Vec { buf: RawVec::NEW, len: 0 } } /// Constructs a new, empty `Vec` with the specified capacity. /// /// The vector will be able to hold exactly `capacity` elements without /// reallocating. If `capacity` is 0, the vector will not allocate. /// /// It is important to note that although the returned vector has the /// *capacity* specified, the vector will have a zero *length*. For an /// explanation of the difference between length and capacity, see /// *[Capacity and reallocation]*. /// /// [Capacity and reallocation]: #capacity-and-reallocation /// /// # Panics /// /// Panics if the new capacity exceeds `isize::MAX` bytes. /// /// # Examples /// /// ``` /// let mut vec = Vec::with_capacity(10); /// /// // The vector contains no items, even though it has capacity for more /// assert_eq!(vec.len(), 0); /// assert_eq!(vec.capacity(), 10); /// /// // These are all done without reallocating... /// for i in 0..10 { /// vec.push(i); /// } /// assert_eq!(vec.len(), 10); /// assert_eq!(vec.capacity(), 10); /// /// // ...but this may make the vector reallocate /// vec.push(11); /// assert_eq!(vec.len(), 11); /// assert!(vec.capacity() >= 11); /// ``` #[inline] #[doc(alias = "malloc")] #[stable(feature = "rust1", since = "1.0.0")] pub fn with_capacity(capacity: usize) -> Self { Self::with_capacity_in(capacity, Global) } /// Creates a `Vec` directly from the raw components of another vector. /// /// # Safety /// /// This is highly unsafe, due to the number of invariants that aren't /// checked: /// /// * `ptr` needs to have been previously allocated via [`String`]/`Vec` /// (at least, it's highly likely to be incorrect if it wasn't). /// * `T` needs to have the same size and alignment as what `ptr` was allocated with. /// (`T` having a less strict alignment is not sufficient, the alignment really /// needs to be equal to satisfy the [`dealloc`] requirement that memory must be /// allocated and deallocated with the same layout.) /// * `length` needs to be less than or equal to `capacity`. /// * `capacity` needs to be the capacity that the pointer was allocated with. /// /// Violating these may cause problems like corrupting the allocator's /// internal data structures. For example it is **not** safe /// to build a `Vec` from a pointer to a C `char` array with length `size_t`. /// It's also not safe to build one from a `Vec` and its length, because /// the allocator cares about the alignment, and these two types have different /// alignments. The buffer was allocated with alignment 2 (for `u16`), but after /// turning it into a `Vec` it'll be deallocated with alignment 1. /// /// The ownership of `ptr` is effectively transferred to the /// `Vec` which may then deallocate, reallocate or change the /// contents of memory pointed to by the pointer at will. Ensure /// that nothing else uses the pointer after calling this /// function. /// /// [`String`]: crate::string::String /// [`dealloc`]: crate::alloc::GlobalAlloc::dealloc /// /// # Examples /// /// ``` /// use std::ptr; /// use std::mem; /// /// let v = vec![1, 2, 3]; /// // FIXME Update this when vec_into_raw_parts is stabilized /// // Prevent running `v`'s destructor so we are in complete control /// // of the allocation. /// let mut v = mem::ManuallyDrop::new(v); /// /// // Pull out the various important pieces of information about `v` /// let p = v.as_mut_ptr(); /// let len = v.len(); /// let cap = v.capacity(); /// /// unsafe { /// // Overwrite memory with 4, 5, 6 /// for i in 0..len as isize { /// ptr::write(p.offset(i), 4 + i); /// } /// /// // Put everything back together into a Vec /// let rebuilt = Vec::from_raw_parts(p, len, cap); /// assert_eq!(rebuilt, [4, 5, 6]); /// } /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self { unsafe { Self::from_raw_parts_in(ptr, length, capacity, Global) } } } impl Vec { /// Constructs a new, empty `Vec`. /// /// The vector will not allocate until elements are pushed onto it. /// /// # Examples /// /// ``` /// #![feature(allocator_api)] /// /// use std::alloc::System; /// /// # #[allow(unused_mut)] /// let mut vec: Vec = Vec::new_in(System); /// ``` #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub const fn new_in(alloc: A) -> Self { Vec { buf: RawVec::new_in(alloc), len: 0 } } /// Constructs a new, empty `Vec` with the specified capacity with the provided /// allocator. /// /// The vector will be able to hold exactly `capacity` elements without /// reallocating. If `capacity` is 0, the vector will not allocate. /// /// It is important to note that although the returned vector has the /// *capacity* specified, the vector will have a zero *length*. For an /// explanation of the difference between length and capacity, see /// *[Capacity and reallocation]*. /// /// [Capacity and reallocation]: #capacity-and-reallocation /// /// # Panics /// /// Panics if the new capacity exceeds `isize::MAX` bytes. /// /// # Examples /// /// ``` /// #![feature(allocator_api)] /// /// use std::alloc::System; /// /// let mut vec = Vec::with_capacity_in(10, System); /// /// // The vector contains no items, even though it has capacity for more /// assert_eq!(vec.len(), 0); /// assert_eq!(vec.capacity(), 10); /// /// // These are all done without reallocating... /// for i in 0..10 { /// vec.push(i); /// } /// assert_eq!(vec.len(), 10); /// assert_eq!(vec.capacity(), 10); /// /// // ...but this may make the vector reallocate /// vec.push(11); /// assert_eq!(vec.len(), 11); /// assert!(vec.capacity() >= 11); /// ``` #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 } } /// Creates a `Vec` directly from the raw components of another vector. /// /// # Safety /// /// This is highly unsafe, due to the number of invariants that aren't /// checked: /// /// * `ptr` needs to have been previously allocated via [`String`]/`Vec` /// (at least, it's highly likely to be incorrect if it wasn't). /// * `T` needs to have the same size and alignment as what `ptr` was allocated with. /// (`T` having a less strict alignment is not sufficient, the alignment really /// needs to be equal to satisfy the [`dealloc`] requirement that memory must be /// allocated and deallocated with the same layout.) /// * `length` needs to be less than or equal to `capacity`. /// * `capacity` needs to be the capacity that the pointer was allocated with. /// /// Violating these may cause problems like corrupting the allocator's /// internal data structures. For example it is **not** safe /// to build a `Vec` from a pointer to a C `char` array with length `size_t`. /// It's also not safe to build one from a `Vec` and its length, because /// the allocator cares about the alignment, and these two types have different /// alignments. The buffer was allocated with alignment 2 (for `u16`), but after /// turning it into a `Vec` it'll be deallocated with alignment 1. /// /// The ownership of `ptr` is effectively transferred to the /// `Vec` which may then deallocate, reallocate or change the /// contents of memory pointed to by the pointer at will. Ensure /// that nothing else uses the pointer after calling this /// function. /// /// [`String`]: crate::string::String /// [`dealloc`]: crate::alloc::GlobalAlloc::dealloc /// /// # Examples /// /// ``` /// #![feature(allocator_api)] /// /// use std::alloc::System; /// /// use std::ptr; /// use std::mem; /// /// let mut v = Vec::with_capacity_in(3, System); /// v.push(1); /// v.push(2); /// v.push(3); /// // FIXME Update this when vec_into_raw_parts is stabilized /// // Prevent running `v`'s destructor so we are in complete control /// // of the allocation. /// let mut v = mem::ManuallyDrop::new(v); /// /// // Pull out the various important pieces of information about `v` /// let p = v.as_mut_ptr(); /// let len = v.len(); /// let cap = v.capacity(); /// let alloc = v.allocator(); /// /// unsafe { /// // Overwrite memory with 4, 5, 6 /// for i in 0..len as isize { /// ptr::write(p.offset(i), 4 + i); /// } /// /// // Put everything back together into a Vec /// let rebuilt = Vec::from_raw_parts_in(p, len, cap, alloc.clone()); /// assert_eq!(rebuilt, [4, 5, 6]); /// } /// ``` #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self { unsafe { Vec { buf: RawVec::from_raw_parts_in(ptr, capacity, alloc), len: length } } } /// Decomposes a `Vec` into its raw components. /// /// Returns the raw pointer to the underlying data, the length of /// the vector (in elements), and the allocated capacity of the /// data (in elements). These are the same arguments in the same /// order as the arguments to [`from_raw_parts`]. /// /// After calling this function, the caller is responsible for the /// memory previously managed by the `Vec`. The only way to do /// this is to convert the raw pointer, length, and capacity back /// into a `Vec` with the [`from_raw_parts`] function, allowing /// the destructor to perform the cleanup. /// /// [`from_raw_parts`]: Vec::from_raw_parts /// /// # Examples /// /// ``` /// #![feature(vec_into_raw_parts)] /// let v: Vec = vec![-1, 0, 1]; /// /// let (ptr, len, cap) = v.into_raw_parts(); /// /// let rebuilt = unsafe { /// // We can now make changes to the components, such as /// // transmuting the raw pointer to a compatible type. /// let ptr = ptr as *mut u32; /// /// Vec::from_raw_parts(ptr, len, cap) /// }; /// assert_eq!(rebuilt, [4294967295, 0, 1]); /// ``` #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] pub fn into_raw_parts(self) -> (*mut T, usize, usize) { let mut me = ManuallyDrop::new(self); (me.as_mut_ptr(), me.len(), me.capacity()) } /// Decomposes a `Vec` into its raw components. /// /// Returns the raw pointer to the underlying data, the length of the vector (in elements), /// the allocated capacity of the data (in elements), and the allocator. These are the same /// arguments in the same order as the arguments to [`from_raw_parts_in`]. /// /// After calling this function, the caller is responsible for the /// memory previously managed by the `Vec`. The only way to do /// this is to convert the raw pointer, length, and capacity back /// into a `Vec` with the [`from_raw_parts_in`] function, allowing /// the destructor to perform the cleanup. /// /// [`from_raw_parts_in`]: Vec::from_raw_parts_in /// /// # Examples /// /// ``` /// #![feature(allocator_api, vec_into_raw_parts)] /// /// use std::alloc::System; /// /// let mut v: Vec = Vec::new_in(System); /// v.push(-1); /// v.push(0); /// v.push(1); /// /// let (ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); /// /// let rebuilt = unsafe { /// // We can now make changes to the components, such as /// // transmuting the raw pointer to a compatible type. /// let ptr = ptr as *mut u32; /// /// Vec::from_raw_parts_in(ptr, len, cap, alloc) /// }; /// assert_eq!(rebuilt, [4294967295, 0, 1]); /// ``` #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] pub fn into_raw_parts_with_alloc(self) -> (*mut T, usize, usize, A) { let mut me = ManuallyDrop::new(self); let len = me.len(); let capacity = me.capacity(); let ptr = me.as_mut_ptr(); let alloc = unsafe { ptr::read(me.allocator()) }; (ptr, len, capacity, alloc) } /// Returns the number of elements the vector can hold without /// reallocating. /// /// # Examples /// /// ``` /// let vec: Vec = Vec::with_capacity(10); /// assert_eq!(vec.capacity(), 10); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn capacity(&self) -> usize { self.buf.capacity() } /// Reserves capacity for at least `additional` more elements to be inserted /// in the given `Vec`. The collection may reserve more space to avoid /// frequent reallocations. After calling `reserve`, capacity will be /// greater than or equal to `self.len() + additional`. Does nothing if /// capacity is already sufficient. /// /// # Panics /// /// Panics if the new capacity exceeds `isize::MAX` bytes. /// /// # Examples /// /// ``` /// let mut vec = vec![1]; /// vec.reserve(10); /// assert!(vec.capacity() >= 11); /// ``` #[doc(alias = "realloc")] #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { self.buf.reserve(self.len, additional); } /// Reserves the minimum capacity for exactly `additional` more elements to /// be inserted in the given `Vec`. After calling `reserve_exact`, /// capacity will be greater than or equal to `self.len() + additional`. /// Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it /// requests. Therefore, capacity can not be relied upon to be precisely /// minimal. Prefer `reserve` if future insertions are expected. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// ``` /// let mut vec = vec![1]; /// vec.reserve_exact(10); /// assert!(vec.capacity() >= 11); /// ``` #[doc(alias = "realloc")] #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve_exact(&mut self, additional: usize) { self.buf.reserve_exact(self.len, additional); } /// Tries to reserve capacity for at least `additional` more elements to be inserted /// in the given `Vec`. The collection may reserve more space to avoid /// frequent reallocations. After calling `try_reserve`, capacity will be /// greater than or equal to `self.len() + additional`. Does nothing if /// capacity is already sufficient. /// /// # Errors /// /// If the capacity overflows, or the allocator reports a failure, then an error /// is returned. /// /// # Examples /// /// ``` /// #![feature(try_reserve)] /// use std::collections::TryReserveError; /// /// fn process_data(data: &[u32]) -> Result, TryReserveError> { /// let mut output = Vec::new(); /// /// // Pre-reserve the memory, exiting if we can't /// output.try_reserve(data.len())?; /// /// // Now we know this can't OOM in the middle of our complex work /// output.extend(data.iter().map(|&val| { /// val * 2 + 5 // very complicated /// })); /// /// Ok(output) /// } /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[doc(alias = "realloc")] #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { self.buf.try_reserve(self.len, additional) } /// Tries to reserve the minimum capacity for exactly `additional` /// elements to be inserted in the given `Vec`. After calling /// `try_reserve_exact`, capacity will be greater than or equal to /// `self.len() + additional` if it returns `Ok(())`. /// Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it /// requests. Therefore, capacity can not be relied upon to be precisely /// minimal. Prefer `reserve` if future insertions are expected. /// /// # Errors /// /// If the capacity overflows, or the allocator reports a failure, then an error /// is returned. /// /// # Examples /// /// ``` /// #![feature(try_reserve)] /// use std::collections::TryReserveError; /// /// fn process_data(data: &[u32]) -> Result, TryReserveError> { /// let mut output = Vec::new(); /// /// // Pre-reserve the memory, exiting if we can't /// output.try_reserve_exact(data.len())?; /// /// // Now we know this can't OOM in the middle of our complex work /// output.extend(data.iter().map(|&val| { /// val * 2 + 5 // very complicated /// })); /// /// Ok(output) /// } /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[doc(alias = "realloc")] #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> { self.buf.try_reserve_exact(self.len, additional) } /// Shrinks the capacity of the vector as much as possible. /// /// It will drop down as close as possible to the length but the allocator /// may still inform the vector that there is space for a few more elements. /// /// # Examples /// /// ``` /// let mut vec = Vec::with_capacity(10); /// vec.extend([1, 2, 3].iter().cloned()); /// assert_eq!(vec.capacity(), 10); /// vec.shrink_to_fit(); /// assert!(vec.capacity() >= 3); /// ``` #[doc(alias = "realloc")] #[stable(feature = "rust1", since = "1.0.0")] pub fn shrink_to_fit(&mut self) { // The capacity is never less than the length, and there's nothing to do when // they are equal, so we can avoid the panic case in `RawVec::shrink_to_fit` // by only calling it with a greater capacity. if self.capacity() > self.len { self.buf.shrink_to_fit(self.len); } } /// Shrinks the capacity of the vector with a lower bound. /// /// The capacity will remain at least as large as both the length /// and the supplied value. /// /// If the current capacity is less than the lower limit, this is a no-op. /// /// # Examples /// /// ``` /// #![feature(shrink_to)] /// let mut vec = Vec::with_capacity(10); /// vec.extend([1, 2, 3].iter().cloned()); /// assert_eq!(vec.capacity(), 10); /// vec.shrink_to(4); /// assert!(vec.capacity() >= 4); /// vec.shrink_to(0); /// assert!(vec.capacity() >= 3); /// ``` #[doc(alias = "realloc")] #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")] pub fn shrink_to(&mut self, min_capacity: usize) { if self.capacity() > min_capacity { self.buf.shrink_to_fit(cmp::max(self.len, min_capacity)); } } /// Converts the vector into [`Box<[T]>`][owned slice]. /// /// Note that this will drop any excess capacity. /// /// [owned slice]: Box /// /// # Examples /// /// ``` /// let v = vec![1, 2, 3]; /// /// let slice = v.into_boxed_slice(); /// ``` /// /// Any excess capacity is removed: /// /// ``` /// let mut vec = Vec::with_capacity(10); /// vec.extend([1, 2, 3].iter().cloned()); /// /// assert_eq!(vec.capacity(), 10); /// let slice = vec.into_boxed_slice(); /// assert_eq!(slice.into_vec().capacity(), 3); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn into_boxed_slice(mut self) -> Box<[T], A> { unsafe { self.shrink_to_fit(); let me = ManuallyDrop::new(self); let buf = ptr::read(&me.buf); let len = me.len(); buf.into_box(len).assume_init() } } /// Shortens the vector, keeping the first `len` elements and dropping /// the rest. /// /// If `len` is greater than the vector's current length, this has no /// effect. /// /// The [`drain`] method can emulate `truncate`, but causes the excess /// elements to be returned instead of dropped. /// /// Note that this method has no effect on the allocated capacity /// of the vector. /// /// # Examples /// /// Truncating a five element vector to two elements: /// /// ``` /// let mut vec = vec![1, 2, 3, 4, 5]; /// vec.truncate(2); /// assert_eq!(vec, [1, 2]); /// ``` /// /// No truncation occurs when `len` is greater than the vector's current /// length: /// /// ``` /// let mut vec = vec![1, 2, 3]; /// vec.truncate(8); /// assert_eq!(vec, [1, 2, 3]); /// ``` /// /// Truncating when `len == 0` is equivalent to calling the [`clear`] /// method. /// /// ``` /// let mut vec = vec![1, 2, 3]; /// vec.truncate(0); /// assert_eq!(vec, []); /// ``` /// /// [`clear`]: Vec::clear /// [`drain`]: Vec::drain #[stable(feature = "rust1", since = "1.0.0")] pub fn truncate(&mut self, len: usize) { // This is safe because: // // * the slice passed to `drop_in_place` is valid; the `len > self.len` // case avoids creating an invalid slice, and // * the `len` of the vector is shrunk before calling `drop_in_place`, // such that no value will be dropped twice in case `drop_in_place` // were to panic once (if it panics twice, the program aborts). unsafe { // Note: It's intentional that this is `>` and not `>=`. // Changing it to `>=` has negative performance // implications in some cases. See #78884 for more. if len > self.len { return; } let remaining_len = self.len - len; let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len); self.len = len; ptr::drop_in_place(s); } } /// Extracts a slice containing the entire vector. /// /// Equivalent to `&s[..]`. /// /// # Examples /// /// ``` /// use std::io::{self, Write}; /// let buffer = vec![1, 2, 3, 5, 8]; /// io::sink().write(buffer.as_slice()).unwrap(); /// ``` #[inline] #[stable(feature = "vec_as_slice", since = "1.7.0")] pub fn as_slice(&self) -> &[T] { self } /// Extracts a mutable slice of the entire vector. /// /// Equivalent to `&mut s[..]`. /// /// # Examples /// /// ``` /// use std::io::{self, Read}; /// let mut buffer = vec![0; 3]; /// io::repeat(0b101).read_exact(buffer.as_mut_slice()).unwrap(); /// ``` #[inline] #[stable(feature = "vec_as_slice", since = "1.7.0")] pub fn as_mut_slice(&mut self) -> &mut [T] { self } /// Returns a raw pointer to the vector's buffer. /// /// The caller must ensure that the vector outlives the pointer this /// function returns, or else it will end up pointing to garbage. /// Modifying the vector may cause its buffer to be reallocated, /// which would also make any pointers to it invalid. /// /// The caller must also ensure that the memory the pointer (non-transitively) points to /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`]. /// /// # Examples /// /// ``` /// let x = vec![1, 2, 4]; /// let x_ptr = x.as_ptr(); /// /// unsafe { /// for i in 0..x.len() { /// assert_eq!(*x_ptr.add(i), 1 << i); /// } /// } /// ``` /// /// [`as_mut_ptr`]: Vec::as_mut_ptr #[stable(feature = "vec_as_ptr", since = "1.37.0")] #[inline] pub fn as_ptr(&self) -> *const T { // We shadow the slice method of the same name to avoid going through // `deref`, which creates an intermediate reference. let ptr = self.buf.ptr(); unsafe { assume(!ptr.is_null()); } ptr } /// Returns an unsafe mutable pointer to the vector's buffer. /// /// The caller must ensure that the vector outlives the pointer this /// function returns, or else it will end up pointing to garbage. /// Modifying the vector may cause its buffer to be reallocated, /// which would also make any pointers to it invalid. /// /// # Examples /// /// ``` /// // Allocate vector big enough for 4 elements. /// let size = 4; /// let mut x: Vec = Vec::with_capacity(size); /// let x_ptr = x.as_mut_ptr(); /// /// // Initialize elements via raw pointer writes, then set length. /// unsafe { /// for i in 0..size { /// *x_ptr.add(i) = i as i32; /// } /// x.set_len(size); /// } /// assert_eq!(&*x, &[0, 1, 2, 3]); /// ``` #[stable(feature = "vec_as_ptr", since = "1.37.0")] #[inline] pub fn as_mut_ptr(&mut self) -> *mut T { // We shadow the slice method of the same name to avoid going through // `deref_mut`, which creates an intermediate reference. let ptr = self.buf.ptr(); unsafe { assume(!ptr.is_null()); } ptr } /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn allocator(&self) -> &A { self.buf.allocator() } /// Forces the length of the vector to `new_len`. /// /// This is a low-level operation that maintains none of the normal /// invariants of the type. Normally changing the length of a vector /// is done using one of the safe operations instead, such as /// [`truncate`], [`resize`], [`extend`], or [`clear`]. /// /// [`truncate`]: Vec::truncate /// [`resize`]: Vec::resize /// [`extend`]: Extend::extend /// [`clear`]: Vec::clear /// /// # Safety /// /// - `new_len` must be less than or equal to [`capacity()`]. /// - The elements at `old_len..new_len` must be initialized. /// /// [`capacity()`]: Vec::capacity /// /// # Examples /// /// This method can be useful for situations in which the vector /// is serving as a buffer for other code, particularly over FFI: /// /// ```no_run /// # #![allow(dead_code)] /// # // This is just a minimal skeleton for the doc example; /// # // don't use this as a starting point for a real library. /// # pub struct StreamWrapper { strm: *mut std::ffi::c_void } /// # const Z_OK: i32 = 0; /// # extern "C" { /// # fn deflateGetDictionary( /// # strm: *mut std::ffi::c_void, /// # dictionary: *mut u8, /// # dictLength: *mut usize, /// # ) -> i32; /// # } /// # impl StreamWrapper { /// pub fn get_dictionary(&self) -> Option> { /// // Per the FFI method's docs, "32768 bytes is always enough". /// let mut dict = Vec::with_capacity(32_768); /// let mut dict_length = 0; /// // SAFETY: When `deflateGetDictionary` returns `Z_OK`, it holds that: /// // 1. `dict_length` elements were initialized. /// // 2. `dict_length` <= the capacity (32_768) /// // which makes `set_len` safe to call. /// unsafe { /// // Make the FFI call... /// let r = deflateGetDictionary(self.strm, dict.as_mut_ptr(), &mut dict_length); /// if r == Z_OK { /// // ...and update the length to what was initialized. /// dict.set_len(dict_length); /// Some(dict) /// } else { /// None /// } /// } /// } /// # } /// ``` /// /// While the following example is sound, there is a memory leak since /// the inner vectors were not freed prior to the `set_len` call: /// /// ``` /// let mut vec = vec![vec![1, 0, 0], /// vec![0, 1, 0], /// vec![0, 0, 1]]; /// // SAFETY: /// // 1. `old_len..0` is empty so no elements need to be initialized. /// // 2. `0 <= capacity` always holds whatever `capacity` is. /// unsafe { /// vec.set_len(0); /// } /// ``` /// /// Normally, here, one would use [`clear`] instead to correctly drop /// the contents and thus not leak memory. #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn set_len(&mut self, new_len: usize) { debug_assert!(new_len <= self.capacity()); self.len = new_len; } /// Removes an element from the vector and returns it. /// /// The removed element is replaced by the last element of the vector. /// /// This does not preserve ordering, but is O(1). /// /// # Panics /// /// Panics if `index` is out of bounds. /// /// # Examples /// /// ``` /// let mut v = vec!["foo", "bar", "baz", "qux"]; /// /// assert_eq!(v.swap_remove(1), "bar"); /// assert_eq!(v, ["foo", "qux", "baz"]); /// /// assert_eq!(v.swap_remove(0), "foo"); /// assert_eq!(v, ["baz", "qux"]); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn swap_remove(&mut self, index: usize) -> T { #[cold] #[inline(never)] fn assert_failed(index: usize, len: usize) -> ! { panic!("swap_remove index (is {}) should be < len (is {})", index, len); } let len = self.len(); if index >= len { assert_failed(index, len); } unsafe { // We replace self[index] with the last element. Note that if the // bounds check above succeeds there must be a last element (which // can be self[index] itself). let last = ptr::read(self.as_ptr().add(len - 1)); let hole = self.as_mut_ptr().add(index); self.set_len(len - 1); ptr::replace(hole, last) } } /// Inserts an element at position `index` within the vector, shifting all /// elements after it to the right. /// /// # Panics /// /// Panics if `index > len`. /// /// # Examples /// /// ``` /// let mut vec = vec![1, 2, 3]; /// vec.insert(1, 4); /// assert_eq!(vec, [1, 4, 2, 3]); /// vec.insert(4, 5); /// assert_eq!(vec, [1, 4, 2, 3, 5]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(&mut self, index: usize, element: T) { #[cold] #[inline(never)] fn assert_failed(index: usize, len: usize) -> ! { panic!("insertion index (is {}) should be <= len (is {})", index, len); } let len = self.len(); if index > len { assert_failed(index, len); } // space for the new element if len == self.buf.capacity() { self.reserve(1); } unsafe { // infallible // The spot to put the new value { let p = self.as_mut_ptr().add(index); // Shift everything over to make space. (Duplicating the // `index`th element into two consecutive places.) ptr::copy(p, p.offset(1), len - index); // Write it in, overwriting the first copy of the `index`th // element. ptr::write(p, element); } self.set_len(len + 1); } } /// Removes and returns the element at position `index` within the vector, /// shifting all elements after it to the left. /// /// # Panics /// /// Panics if `index` is out of bounds. /// /// # Examples /// /// ``` /// let mut v = vec![1, 2, 3]; /// assert_eq!(v.remove(1), 2); /// assert_eq!(v, [1, 3]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(&mut self, index: usize) -> T { #[cold] #[inline(never)] fn assert_failed(index: usize, len: usize) -> ! { panic!("removal index (is {}) should be < len (is {})", index, len); } let len = self.len(); if index >= len { assert_failed(index, len); } unsafe { // infallible let ret; { // the place we are taking from. let ptr = self.as_mut_ptr().add(index); // copy it out, unsafely having a copy of the value on // the stack and in the vector at the same time. ret = ptr::read(ptr); // Shift everything down to fill in that spot. ptr::copy(ptr.offset(1), ptr, len - index - 1); } self.set_len(len - 1); ret } } /// Retains only the elements specified by the predicate. /// /// In other words, remove all elements `e` such that `f(&e)` returns `false`. /// This method operates in place, visiting each element exactly once in the /// original order, and preserves the order of the retained elements. /// /// # Examples /// /// ``` /// let mut vec = vec![1, 2, 3, 4]; /// vec.retain(|&x| x % 2 == 0); /// assert_eq!(vec, [2, 4]); /// ``` /// /// Because the elements are visited exactly once in the original order, /// external state may be used to decide which elements to keep. /// /// ``` /// let mut vec = vec![1, 2, 3, 4, 5]; /// let keep = [false, true, true, false, true]; /// let mut iter = keep.iter(); /// vec.retain(|_| *iter.next().unwrap()); /// assert_eq!(vec, [2, 3, 5]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn retain(&mut self, mut f: F) where F: FnMut(&T) -> bool, { let original_len = self.len(); // Avoid double drop if the drop guard is not executed, // since we may make some holes during the process. unsafe { self.set_len(0) }; // Vec: [Kept, Kept, Hole, Hole, Hole, Hole, Unchecked, Unchecked] // |<- processed len ->| ^- next to check // |<- deleted cnt ->| // |<- original_len ->| // Kept: Elements which predicate returns true on. // Hole: Moved or dropped element slot. // Unchecked: Unchecked valid elements. // // This drop guard will be invoked when predicate or `drop` of element panicked. // It shifts unchecked elements to cover holes and `set_len` to the correct length. // In cases when predicate and `drop` never panick, it will be optimized out. struct BackshiftOnDrop<'a, T, A: Allocator> { v: &'a mut Vec, processed_len: usize, deleted_cnt: usize, original_len: usize, } impl Drop for BackshiftOnDrop<'_, T, A> { fn drop(&mut self) { if self.deleted_cnt > 0 { // SAFETY: Trailing unchecked items must be valid since we never touch them. unsafe { ptr::copy( self.v.as_ptr().add(self.processed_len), self.v.as_mut_ptr().add(self.processed_len - self.deleted_cnt), self.original_len - self.processed_len, ); } } // SAFETY: After filling holes, all items are in contiguous memory. unsafe { self.v.set_len(self.original_len - self.deleted_cnt); } } } let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len }; while g.processed_len < original_len { // SAFETY: Unchecked element must be valid. let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) }; if !f(cur) { // Advance early to avoid double drop if `drop_in_place` panicked. g.processed_len += 1; g.deleted_cnt += 1; // SAFETY: We never touch this element again after dropped. unsafe { ptr::drop_in_place(cur) }; // We already advanced the counter. continue; } if g.deleted_cnt > 0 { // SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element. // We use copy for move, and never touch this element again. unsafe { let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt); ptr::copy_nonoverlapping(cur, hole_slot, 1); } } g.processed_len += 1; } // All item are processed. This can be optimized to `set_len` by LLVM. drop(g); } /// Removes all but the first of consecutive elements in the vector that resolve to the same /// key. /// /// If the vector is sorted, this removes all duplicates. /// /// # Examples /// /// ``` /// let mut vec = vec![10, 20, 21, 30, 20]; /// /// vec.dedup_by_key(|i| *i / 10); /// /// assert_eq!(vec, [10, 20, 30, 20]); /// ``` #[stable(feature = "dedup_by", since = "1.16.0")] #[inline] pub fn dedup_by_key(&mut self, mut key: F) where F: FnMut(&mut T) -> K, K: PartialEq, { self.dedup_by(|a, b| key(a) == key(b)) } /// Removes all but the first of consecutive elements in the vector satisfying a given equality /// relation. /// /// The `same_bucket` function is passed references to two elements from the vector and /// must determine if the elements compare equal. The elements are passed in opposite order /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is removed. /// /// If the vector is sorted, this removes all duplicates. /// /// # Examples /// /// ``` /// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"]; /// /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b)); /// /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]); /// ``` #[stable(feature = "dedup_by", since = "1.16.0")] pub fn dedup_by(&mut self, mut same_bucket: F) where F: FnMut(&mut T, &mut T) -> bool, { let len = self.len(); if len <= 1 { return; } /* INVARIANT: vec.len() > read >= write > write-1 >= 0 */ struct FillGapOnDrop<'a, T, A: core::alloc::Allocator> { /* Offset of the element we want to check if it is duplicate */ read: usize, /* Offset of the place where we want to place the non-duplicate * when we find it. */ write: usize, /* The Vec that would need correction if `same_bucket` panicked */ vec: &'a mut Vec, } impl<'a, T, A: core::alloc::Allocator> Drop for FillGapOnDrop<'a, T, A> { fn drop(&mut self) { /* This code gets executed when `same_bucket` panics */ /* SAFETY: invariant guarantees that `read - write` * and `len - read` never overflow and that the copy is always * in-bounds. */ unsafe { let ptr = self.vec.as_mut_ptr(); let len = self.vec.len(); /* How many items were left when `same_bucket` paniced. * Basically vec[read..].len() */ let items_left = len.wrapping_sub(self.read); /* Pointer to first item in vec[write..write+items_left] slice */ let dropped_ptr = ptr.add(self.write); /* Pointer to first item in vec[read..] slice */ let valid_ptr = ptr.add(self.read); /* Copy `vec[read..]` to `vec[write..write+items_left]`. * The slices can overlap, so `copy_nonoverlapping` cannot be used */ ptr::copy(valid_ptr, dropped_ptr, items_left); /* How many items have been already dropped * Basically vec[read..write].len() */ let dropped = self.read.wrapping_sub(self.write); self.vec.set_len(len - dropped); } } } let mut gap = FillGapOnDrop { read: 1, write: 1, vec: self }; let ptr = gap.vec.as_mut_ptr(); /* Drop items while going through Vec, it should be more efficient than * doing slice partition_dedup + truncate */ /* SAFETY: Because of the invariant, read_ptr, prev_ptr and write_ptr * are always in-bounds and read_ptr never aliases prev_ptr */ unsafe { while gap.read < len { let read_ptr = ptr.add(gap.read); let prev_ptr = ptr.add(gap.write.wrapping_sub(1)); if same_bucket(&mut *read_ptr, &mut *prev_ptr) { /* We have found duplicate, drop it in-place */ ptr::drop_in_place(read_ptr); } else { let write_ptr = ptr.add(gap.write); /* Because `read_ptr` can be equal to `write_ptr`, we either * have to use `copy` or conditional `copy_nonoverlapping`. * Looks like the first option is faster. */ ptr::copy(read_ptr, write_ptr, 1); /* We have filled that place, so go further */ gap.write += 1; } gap.read += 1; } /* Technically we could let `gap` clean up with its Drop, but * when `same_bucket` is guaranteed to not panic, this bloats a little * the codegen, so we just do it manually */ gap.vec.set_len(gap.write); mem::forget(gap); } } /// Appends an element to the back of a collection. /// /// # Panics /// /// Panics if the new capacity exceeds `isize::MAX` bytes. /// /// # Examples /// /// ``` /// let mut vec = vec![1, 2]; /// vec.push(3); /// assert_eq!(vec, [1, 2, 3]); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn push(&mut self, value: T) { // This will panic or abort if we would allocate > isize::MAX bytes // or if the length increment would overflow for zero-sized types. if self.len == self.buf.capacity() { self.reserve(1); } unsafe { let end = self.as_mut_ptr().add(self.len); ptr::write(end, value); self.len += 1; } } /// Removes the last element from a vector and returns it, or [`None`] if it /// is empty. /// /// # Examples /// /// ``` /// let mut vec = vec![1, 2, 3]; /// assert_eq!(vec.pop(), Some(3)); /// assert_eq!(vec, [1, 2]); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn pop(&mut self) -> Option { if self.len == 0 { None } else { unsafe { self.len -= 1; Some(ptr::read(self.as_ptr().add(self.len()))) } } } /// Moves all the elements of `other` into `Self`, leaving `other` empty. /// /// # Panics /// /// Panics if the number of elements in the vector overflows a `usize`. /// /// # Examples /// /// ``` /// let mut vec = vec![1, 2, 3]; /// let mut vec2 = vec![4, 5, 6]; /// vec.append(&mut vec2); /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]); /// assert_eq!(vec2, []); /// ``` #[inline] #[stable(feature = "append", since = "1.4.0")] pub fn append(&mut self, other: &mut Self) { unsafe { self.append_elements(other.as_slice() as _); other.set_len(0); } } /// Appends elements to `Self` from other buffer. #[inline] unsafe fn append_elements(&mut self, other: *const [T]) { let count = unsafe { (*other).len() }; self.reserve(count); let len = self.len(); unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) }; self.len += count; } /// Creates a draining iterator that removes the specified range in the vector /// and yields the removed items. /// /// When the iterator **is** dropped, all elements in the range are removed /// from the vector, even if the iterator was not fully consumed. If the /// iterator **is not** dropped (with [`mem::forget`] for example), it is /// unspecified how many elements are removed. /// /// # Panics /// /// Panics if the starting point is greater than the end point or if /// the end point is greater than the length of the vector. /// /// # Examples /// /// ``` /// let mut v = vec![1, 2, 3]; /// let u: Vec<_> = v.drain(1..).collect(); /// assert_eq!(v, &[1]); /// assert_eq!(u, &[2, 3]); /// /// // A full range clears the vector /// v.drain(..); /// assert_eq!(v, &[]); /// ``` #[stable(feature = "drain", since = "1.6.0")] pub fn drain(&mut self, range: R) -> Drain<'_, T, A> where R: RangeBounds, { // Memory safety // // When the Drain is first created, it shortens the length of // the source vector to make sure no uninitialized or moved-from elements // are accessible at all if the Drain's destructor never gets to run. // // Drain will ptr::read out the values to remove. // When finished, remaining tail of the vec is copied back to cover // the hole, and the vector length is restored to the new length. // let len = self.len(); let Range { start, end } = slice::range(range, ..len); unsafe { // set self.vec length's to start, to be safe in case Drain is leaked self.set_len(start); // Use the borrow in the IterMut to indicate borrowing behavior of the // whole Drain iterator (like &mut T). let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start); Drain { tail_start: end, tail_len: len - end, iter: range_slice.iter(), vec: NonNull::from(self), } } } /// Clears the vector, removing all values. /// /// Note that this method has no effect on the allocated capacity /// of the vector. /// /// # Examples /// /// ``` /// let mut v = vec![1, 2, 3]; /// /// v.clear(); /// /// assert!(v.is_empty()); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn clear(&mut self) { self.truncate(0) } /// Returns the number of elements in the vector, also referred to /// as its 'length'. /// /// # Examples /// /// ``` /// let a = vec![1, 2, 3]; /// assert_eq!(a.len(), 3); /// ``` #[doc(alias = "length")] #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn len(&self) -> usize { self.len } /// Returns `true` if the vector contains no elements. /// /// # Examples /// /// ``` /// let mut v = Vec::new(); /// assert!(v.is_empty()); /// /// v.push(1); /// assert!(!v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn is_empty(&self) -> bool { self.len() == 0 } /// Splits the collection into two at the given index. /// /// Returns a newly allocated vector containing the elements in the range /// `[at, len)`. After the call, the original vector will be left containing /// the elements `[0, at)` with its previous capacity unchanged. /// /// # Panics /// /// Panics if `at > len`. /// /// # Examples /// /// ``` /// let mut vec = vec![1, 2, 3]; /// let vec2 = vec.split_off(1); /// assert_eq!(vec, [1]); /// assert_eq!(vec2, [2, 3]); /// ``` #[inline] #[must_use = "use `.truncate()` if you don't need the other half"] #[stable(feature = "split_off", since = "1.4.0")] pub fn split_off(&mut self, at: usize) -> Self where A: Clone, { #[cold] #[inline(never)] fn assert_failed(at: usize, len: usize) -> ! { panic!("`at` split index (is {}) should be <= len (is {})", at, len); } if at > self.len() { assert_failed(at, self.len()); } if at == 0 { // the new vector can take over the original buffer and avoid the copy return mem::replace( self, Vec::with_capacity_in(self.capacity(), self.allocator().clone()), ); } let other_len = self.len - at; let mut other = Vec::with_capacity_in(other_len, self.allocator().clone()); // Unsafely `set_len` and copy items to `other`. unsafe { self.set_len(at); other.set_len(other_len); ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len()); } other } /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. /// /// If `new_len` is greater than `len`, the `Vec` is extended by the /// difference, with each additional slot filled with the result of /// calling the closure `f`. The return values from `f` will end up /// in the `Vec` in the order they have been generated. /// /// If `new_len` is less than `len`, the `Vec` is simply truncated. /// /// This method uses a closure to create new values on every push. If /// you'd rather [`Clone`] a given value, use [`Vec::resize`]. If you /// want to use the [`Default`] trait to generate values, you can /// pass [`Default::default`] as the second argument. /// /// # Examples /// /// ``` /// let mut vec = vec![1, 2, 3]; /// vec.resize_with(5, Default::default); /// assert_eq!(vec, [1, 2, 3, 0, 0]); /// /// let mut vec = vec![]; /// let mut p = 1; /// vec.resize_with(4, || { p *= 2; p }); /// assert_eq!(vec, [2, 4, 8, 16]); /// ``` #[stable(feature = "vec_resize_with", since = "1.33.0")] pub fn resize_with(&mut self, new_len: usize, f: F) where F: FnMut() -> T, { let len = self.len(); if new_len > len { self.extend_with(new_len - len, ExtendFunc(f)); } else { self.truncate(new_len); } } /// Consumes and leaks the `Vec`, returning a mutable reference to the contents, /// `&'a mut [T]`. Note that the type `T` must outlive the chosen lifetime /// `'a`. If the type has only static references, or none at all, then this /// may be chosen to be `'static`. /// /// This function is similar to the [`leak`][Box::leak] function on [`Box`] /// except that there is no way to recover the leaked memory. /// /// This function is mainly useful for data that lives for the remainder of /// the program's life. Dropping the returned reference will cause a memory /// leak. /// /// # Examples /// /// Simple usage: /// /// ``` /// let x = vec![1, 2, 3]; /// let static_ref: &'static mut [usize] = x.leak(); /// static_ref[0] += 1; /// assert_eq!(static_ref, &[2, 2, 3]); /// ``` #[stable(feature = "vec_leak", since = "1.47.0")] #[inline] pub fn leak<'a>(self) -> &'a mut [T] where A: 'a, { Box::leak(self.into_boxed_slice()) } /// Returns the remaining spare capacity of the vector as a slice of /// `MaybeUninit`. /// /// The returned slice can be used to fill the vector with data (e.g. by /// reading from a file) before marking the data as initialized using the /// [`set_len`] method. /// /// [`set_len`]: Vec::set_len /// /// # Examples /// /// ``` /// #![feature(vec_spare_capacity, maybe_uninit_extra)] /// /// // Allocate vector big enough for 10 elements. /// let mut v = Vec::with_capacity(10); /// /// // Fill in the first 3 elements. /// let uninit = v.spare_capacity_mut(); /// uninit[0].write(0); /// uninit[1].write(1); /// uninit[2].write(2); /// /// // Mark the first 3 elements of the vector as being initialized. /// unsafe { /// v.set_len(3); /// } /// /// assert_eq!(&v, &[0, 1, 2]); /// ``` #[unstable(feature = "vec_spare_capacity", issue = "75017")] #[inline] pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit] { // Note: // This method is not implemented in terms of `split_at_spare_mut`, // to prevent invalidation of pointers to the buffer. unsafe { slice::from_raw_parts_mut( self.as_mut_ptr().add(self.len) as *mut MaybeUninit, self.buf.capacity() - self.len, ) } } /// Returns vector content as a slice of `T`, along with the remaining spare /// capacity of the vector as a slice of `MaybeUninit`. /// /// The returned spare capacity slice can be used to fill the vector with data /// (e.g. by reading from a file) before marking the data as initialized using /// the [`set_len`] method. /// /// [`set_len`]: Vec::set_len /// /// Note that this is a low-level API, which should be used with care for /// optimization purposes. If you need to append data to a `Vec` /// you can use [`push`], [`extend`], [`extend_from_slice`], /// [`extend_from_within`], [`insert`], [`append`], [`resize`] or /// [`resize_with`], depending on your exact needs. /// /// [`push`]: Vec::push /// [`extend`]: Vec::extend /// [`extend_from_slice`]: Vec::extend_from_slice /// [`extend_from_within`]: Vec::extend_from_within /// [`insert`]: Vec::insert /// [`append`]: Vec::append /// [`resize`]: Vec::resize /// [`resize_with`]: Vec::resize_with /// /// # Examples /// /// ``` /// #![feature(vec_split_at_spare, maybe_uninit_extra)] /// /// let mut v = vec![1, 1, 2]; /// /// // Reserve additional space big enough for 10 elements. /// v.reserve(10); /// /// let (init, uninit) = v.split_at_spare_mut(); /// let sum = init.iter().copied().sum::(); /// /// // Fill in the next 4 elements. /// uninit[0].write(sum); /// uninit[1].write(sum * 2); /// uninit[2].write(sum * 3); /// uninit[3].write(sum * 4); /// /// // Mark the 4 elements of the vector as being initialized. /// unsafe { /// let len = v.len(); /// v.set_len(len + 4); /// } /// /// assert_eq!(&v, &[1, 1, 2, 4, 8, 12, 16]); /// ``` #[unstable(feature = "vec_split_at_spare", issue = "81944")] #[inline] pub fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [MaybeUninit]) { // SAFETY: // - len is ignored and so never changed let (init, spare, _) = unsafe { self.split_at_spare_mut_with_len() }; (init, spare) } /// Safety: changing returned .2 (&mut usize) is considered the same as calling `.set_len(_)`. /// /// This method provides unique access to all vec parts at once in `extend_from_within`. unsafe fn split_at_spare_mut_with_len( &mut self, ) -> (&mut [T], &mut [MaybeUninit], &mut usize) { let Range { start: ptr, end: spare_ptr } = self.as_mut_ptr_range(); let spare_ptr = spare_ptr.cast::>(); let spare_len = self.buf.capacity() - self.len; // SAFETY: // - `ptr` is guaranteed to be valid for `len` elements // - `spare_ptr` is pointing one element past the buffer, so it doesn't overlap with `initialized` unsafe { let initialized = slice::from_raw_parts_mut(ptr, self.len); let spare = slice::from_raw_parts_mut(spare_ptr, spare_len); (initialized, spare, &mut self.len) } } } impl Vec { /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. /// /// If `new_len` is greater than `len`, the `Vec` is extended by the /// difference, with each additional slot filled with `value`. /// If `new_len` is less than `len`, the `Vec` is simply truncated. /// /// This method requires `T` to implement [`Clone`], /// in order to be able to clone the passed value. /// If you need more flexibility (or want to rely on [`Default`] instead of /// [`Clone`]), use [`Vec::resize_with`]. /// /// # Examples /// /// ``` /// let mut vec = vec!["hello"]; /// vec.resize(3, "world"); /// assert_eq!(vec, ["hello", "world", "world"]); /// /// let mut vec = vec![1, 2, 3, 4]; /// vec.resize(2, 0); /// assert_eq!(vec, [1, 2]); /// ``` #[stable(feature = "vec_resize", since = "1.5.0")] pub fn resize(&mut self, new_len: usize, value: T) { let len = self.len(); if new_len > len { self.extend_with(new_len - len, ExtendElement(value)) } else { self.truncate(new_len); } } /// Clones and appends all elements in a slice to the `Vec`. /// /// Iterates over the slice `other`, clones each element, and then appends /// it to this `Vec`. The `other` vector is traversed in-order. /// /// Note that this function is same as [`extend`] except that it is /// specialized to work with slices instead. If and when Rust gets /// specialization this function will likely be deprecated (but still /// available). /// /// # Examples /// /// ``` /// let mut vec = vec![1]; /// vec.extend_from_slice(&[2, 3, 4]); /// assert_eq!(vec, [1, 2, 3, 4]); /// ``` /// /// [`extend`]: Vec::extend #[stable(feature = "vec_extend_from_slice", since = "1.6.0")] pub fn extend_from_slice(&mut self, other: &[T]) { self.spec_extend(other.iter()) } /// Copies elements from `src` range to the end of the vector. /// /// ## Examples /// /// ``` /// let mut vec = vec![0, 1, 2, 3, 4]; /// /// vec.extend_from_within(2..); /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4]); /// /// vec.extend_from_within(..2); /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4, 0, 1]); /// /// vec.extend_from_within(4..8); /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4, 0, 1, 4, 2, 3, 4]); /// ``` #[stable(feature = "vec_extend_from_within", since = "1.53.0")] pub fn extend_from_within(&mut self, src: R) where R: RangeBounds, { let range = slice::range(src, ..self.len()); self.reserve(range.len()); // SAFETY: // - `slice::range` guarantees that the given range is valid for indexing self unsafe { self.spec_extend_from_within(range); } } } // This code generalizes `extend_with_{element,default}`. trait ExtendWith { fn next(&mut self) -> T; fn last(self) -> T; } struct ExtendElement(T); impl ExtendWith for ExtendElement { fn next(&mut self) -> T { self.0.clone() } fn last(self) -> T { self.0 } } struct ExtendDefault; impl ExtendWith for ExtendDefault { fn next(&mut self) -> T { Default::default() } fn last(self) -> T { Default::default() } } struct ExtendFunc(F); impl T> ExtendWith for ExtendFunc { fn next(&mut self) -> T { (self.0)() } fn last(mut self) -> T { (self.0)() } } impl Vec { /// Extend the vector by `n` values, using the given generator. fn extend_with>(&mut self, n: usize, mut value: E) { self.reserve(n); unsafe { let mut ptr = self.as_mut_ptr().add(self.len()); // Use SetLenOnDrop to work around bug where compiler // may not realize the store through `ptr` through self.set_len() // don't alias. let mut local_len = SetLenOnDrop::new(&mut self.len); // Write all elements except the last one for _ in 1..n { ptr::write(ptr, value.next()); ptr = ptr.offset(1); // Increment the length in every step in case next() panics local_len.increment_len(1); } if n > 0 { // We can write the last element directly without cloning needlessly ptr::write(ptr, value.last()); local_len.increment_len(1); } // len set by scope guard } } } impl Vec { /// Removes consecutive repeated elements in the vector according to the /// [`PartialEq`] trait implementation. /// /// If the vector is sorted, this removes all duplicates. /// /// # Examples /// /// ``` /// let mut vec = vec![1, 2, 2, 3, 2]; /// /// vec.dedup(); /// /// assert_eq!(vec, [1, 2, 3, 2]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn dedup(&mut self) { self.dedup_by(|a, b| a == b) } } //////////////////////////////////////////////////////////////////////////////// // Internal methods and functions //////////////////////////////////////////////////////////////////////////////// #[doc(hidden)] #[stable(feature = "rust1", since = "1.0.0")] pub fn from_elem(elem: T, n: usize) -> Vec { ::from_elem(elem, n, Global) } #[doc(hidden)] #[unstable(feature = "allocator_api", issue = "32838")] pub fn from_elem_in(elem: T, n: usize, alloc: A) -> Vec { ::from_elem(elem, n, alloc) } trait ExtendFromWithinSpec { /// # Safety /// /// - `src` needs to be valid index /// - `self.capacity() - self.len()` must be `>= src.len()` unsafe fn spec_extend_from_within(&mut self, src: Range); } impl ExtendFromWithinSpec for Vec { default unsafe fn spec_extend_from_within(&mut self, src: Range) { // SAFETY: // - len is increased only after initializing elements let (this, spare, len) = unsafe { self.split_at_spare_mut_with_len() }; // SAFETY: // - caller guaratees that src is a valid index let to_clone = unsafe { this.get_unchecked(src) }; iter::zip(to_clone, spare) .map(|(src, dst)| dst.write(src.clone())) // Note: // - Element was just initialized with `MaybeUninit::write`, so it's ok to increase len // - len is increased after each element to prevent leaks (see issue #82533) .for_each(|_| *len += 1); } } impl ExtendFromWithinSpec for Vec { unsafe fn spec_extend_from_within(&mut self, src: Range) { let count = src.len(); { let (init, spare) = self.split_at_spare_mut(); // SAFETY: // - caller guaratees that `src` is a valid index let source = unsafe { init.get_unchecked(src) }; // SAFETY: // - Both pointers are created from unique slice references (`&mut [_]`) // so they are valid and do not overlap. // - Elements are :Copy so it's OK to to copy them, without doing // anything with the original values // - `count` is equal to the len of `source`, so source is valid for // `count` reads // - `.reserve(count)` guarantees that `spare.len() >= count` so spare // is valid for `count` writes unsafe { ptr::copy_nonoverlapping(source.as_ptr(), spare.as_mut_ptr() as _, count) }; } // SAFETY: // - The elements were just initialized by `copy_nonoverlapping` self.len += count; } } //////////////////////////////////////////////////////////////////////////////// // Common trait implementations for Vec //////////////////////////////////////////////////////////////////////////////// #[stable(feature = "rust1", since = "1.0.0")] impl ops::Deref for Vec { type Target = [T]; fn deref(&self) -> &[T] { unsafe { slice::from_raw_parts(self.as_ptr(), self.len) } } } #[stable(feature = "rust1", since = "1.0.0")] impl ops::DerefMut for Vec { fn deref_mut(&mut self) -> &mut [T] { unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) } } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Vec { #[cfg(not(test))] fn clone(&self) -> Self { let alloc = self.allocator().clone(); <[T]>::to_vec_in(&**self, alloc) } // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is // required for this method definition, is not available. Instead use the // `slice::to_vec` function which is only available with cfg(test) // NB see the slice::hack module in slice.rs for more information #[cfg(test)] fn clone(&self) -> Self { let alloc = self.allocator().clone(); crate::slice::to_vec(&**self, alloc) } fn clone_from(&mut self, other: &Self) { // drop anything that will not be overwritten self.truncate(other.len()); // self.len <= other.len due to the truncate above, so the // slices here are always in-bounds. let (init, tail) = other.split_at(self.len()); // reuse the contained values' allocations/resources. self.clone_from_slice(init); self.extend_from_slice(tail); } } #[stable(feature = "rust1", since = "1.0.0")] impl Hash for Vec { #[inline] fn hash(&self, state: &mut H) { Hash::hash(&**self, state) } } #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( message = "vector indices are of type `usize` or ranges of `usize`", label = "vector indices are of type `usize` or ranges of `usize`" )] impl, A: Allocator> Index for Vec { type Output = I::Output; #[inline] fn index(&self, index: I) -> &Self::Output { Index::index(&**self, index) } } #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( message = "vector indices are of type `usize` or ranges of `usize`", label = "vector indices are of type `usize` or ranges of `usize`" )] impl, A: Allocator> IndexMut for Vec { #[inline] fn index_mut(&mut self, index: I) -> &mut Self::Output { IndexMut::index_mut(&mut **self, index) } } #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator for Vec { #[inline] fn from_iter>(iter: I) -> Vec { >::from_iter(iter.into_iter()) } } #[stable(feature = "rust1", since = "1.0.0")] impl IntoIterator for Vec { type Item = T; type IntoIter = IntoIter; /// Creates a consuming iterator, that is, one that moves each value out of /// the vector (from start to end). The vector cannot be used after calling /// this. /// /// # Examples /// /// ``` /// let v = vec!["a".to_string(), "b".to_string()]; /// for s in v.into_iter() { /// // s has type String, not &String /// println!("{}", s); /// } /// ``` #[inline] fn into_iter(self) -> IntoIter { unsafe { let mut me = ManuallyDrop::new(self); let alloc = ptr::read(me.allocator()); let begin = me.as_mut_ptr(); let end = if mem::size_of::() == 0 { arith_offset(begin as *const i8, me.len() as isize) as *const T } else { begin.add(me.len()) as *const T }; let cap = me.buf.capacity(); IntoIter { buf: NonNull::new_unchecked(begin), phantom: PhantomData, cap, alloc, ptr: begin, end, } } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, A: Allocator> IntoIterator for &'a Vec { type Item = &'a T; type IntoIter = slice::Iter<'a, T>; fn into_iter(self) -> slice::Iter<'a, T> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec { type Item = &'a mut T; type IntoIter = slice::IterMut<'a, T>; fn into_iter(self) -> slice::IterMut<'a, T> { self.iter_mut() } } #[stable(feature = "rust1", since = "1.0.0")] impl Extend for Vec { #[inline] fn extend>(&mut self, iter: I) { >::spec_extend(self, iter.into_iter()) } #[inline] fn extend_one(&mut self, item: T) { self.push(item); } #[inline] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } } impl Vec { // leaf method to which various SpecFrom/SpecExtend implementations delegate when // they have no further optimizations to apply fn extend_desugared>(&mut self, mut iterator: I) { // This is the case for a general iterator. // // This function should be the moral equivalent of: // // for item in iterator { // self.push(item); // } while let Some(element) = iterator.next() { let len = self.len(); if len == self.capacity() { let (lower, _) = iterator.size_hint(); self.reserve(lower.saturating_add(1)); } unsafe { ptr::write(self.as_mut_ptr().add(len), element); // NB can't overflow since we would have had to alloc the address space self.set_len(len + 1); } } } /// Creates a splicing iterator that replaces the specified range in the vector /// with the given `replace_with` iterator and yields the removed items. /// `replace_with` does not need to be the same length as `range`. /// /// `range` is removed even if the iterator is not consumed until the end. /// /// It is unspecified how many elements are removed from the vector /// if the `Splice` value is leaked. /// /// The input iterator `replace_with` is only consumed when the `Splice` value is dropped. /// /// This is optimal if: /// /// * The tail (elements in the vector after `range`) is empty, /// * or `replace_with` yields fewer or equal elements than `range`’s length /// * or the lower bound of its `size_hint()` is exact. /// /// Otherwise, a temporary vector is allocated and the tail is moved twice. /// /// # Panics /// /// Panics if the starting point is greater than the end point or if /// the end point is greater than the length of the vector. /// /// # Examples /// /// ``` /// let mut v = vec![1, 2, 3]; /// let new = [7, 8]; /// let u: Vec<_> = v.splice(..2, new.iter().cloned()).collect(); /// assert_eq!(v, &[7, 8, 3]); /// assert_eq!(u, &[1, 2]); /// ``` #[inline] #[stable(feature = "vec_splice", since = "1.21.0")] pub fn splice(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A> where R: RangeBounds, I: IntoIterator, { Splice { drain: self.drain(range), replace_with: replace_with.into_iter() } } /// Creates an iterator which uses a closure to determine if an element should be removed. /// /// If the closure returns true, then the element is removed and yielded. /// If the closure returns false, the element will remain in the vector and will not be yielded /// by the iterator. /// /// Using this method is equivalent to the following code: /// /// ``` /// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 }; /// # let mut vec = vec![1, 2, 3, 4, 5, 6]; /// let mut i = 0; /// while i < vec.len() { /// if some_predicate(&mut vec[i]) { /// let val = vec.remove(i); /// // your code here /// } else { /// i += 1; /// } /// } /// /// # assert_eq!(vec, vec![1, 4, 5]); /// ``` /// /// But `drain_filter` is easier to use. `drain_filter` is also more efficient, /// because it can backshift the elements of the array in bulk. /// /// Note that `drain_filter` also lets you mutate every element in the filter closure, /// regardless of whether you choose to keep or remove it. /// /// # Examples /// /// Splitting an array into evens and odds, reusing the original allocation: /// /// ``` /// #![feature(drain_filter)] /// let mut numbers = vec![1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]; /// /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::>(); /// let odds = numbers; /// /// assert_eq!(evens, vec![2, 4, 6, 8, 14]); /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]); /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A> where F: FnMut(&mut T) -> bool, { let old_len = self.len(); // Guard against us getting leaked (leak amplification) unsafe { self.set_len(0); } DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false } } } /// Extend implementation that copies elements out of references before pushing them onto the Vec. /// /// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to /// append the entire slice at once. /// /// [`copy_from_slice`]: slice::copy_from_slice #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec { fn extend>(&mut self, iter: I) { self.spec_extend(iter.into_iter()) } #[inline] fn extend_one(&mut self, &item: &'a T) { self.push(item); } #[inline] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } } /// Implements comparison of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Vec { #[inline] fn partial_cmp(&self, other: &Self) -> Option { PartialOrd::partial_cmp(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for Vec {} /// Implements ordering of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Vec { #[inline] fn cmp(&self, other: &Self) -> Ordering { Ord::cmp(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec { fn drop(&mut self) { unsafe { // use drop for [T] // use a raw slice to refer to the elements of the vector as weakest necessary type; // could avoid questions of validity in certain cases ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len)) } // RawVec handles deallocation } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for Vec { /// Creates an empty `Vec`. fn default() -> Vec { Vec::new() } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Vec { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl AsRef> for Vec { fn as_ref(&self) -> &Vec { self } } #[stable(feature = "vec_as_mut", since = "1.5.0")] impl AsMut> for Vec { fn as_mut(&mut self) -> &mut Vec { self } } #[stable(feature = "rust1", since = "1.0.0")] impl AsRef<[T]> for Vec { fn as_ref(&self) -> &[T] { self } } #[stable(feature = "vec_as_mut", since = "1.5.0")] impl AsMut<[T]> for Vec { fn as_mut(&mut self) -> &mut [T] { self } } #[stable(feature = "rust1", since = "1.0.0")] impl From<&[T]> for Vec { /// Allocate a `Vec` and fill it by cloning `s`'s items. /// /// # Examples /// /// ``` /// assert_eq!(Vec::from(&[1, 2, 3][..]), vec![1, 2, 3]); /// ``` #[cfg(not(test))] fn from(s: &[T]) -> Vec { s.to_vec() } #[cfg(test)] fn from(s: &[T]) -> Vec { crate::slice::to_vec(s, Global) } } #[stable(feature = "vec_from_mut", since = "1.19.0")] impl From<&mut [T]> for Vec { /// Allocate a `Vec` and fill it by cloning `s`'s items. /// /// # Examples /// /// ``` /// assert_eq!(Vec::from(&mut [1, 2, 3][..]), vec![1, 2, 3]); /// ``` #[cfg(not(test))] fn from(s: &mut [T]) -> Vec { s.to_vec() } #[cfg(test)] fn from(s: &mut [T]) -> Vec { crate::slice::to_vec(s, Global) } } #[stable(feature = "vec_from_array", since = "1.44.0")] impl From<[T; N]> for Vec { #[cfg(not(test))] fn from(s: [T; N]) -> Vec { <[T]>::into_vec(box s) } /// Allocate a `Vec` and move `s`'s items into it. /// /// # Examples /// /// ``` /// assert_eq!(Vec::from([1, 2, 3]), vec![1, 2, 3]); /// ``` #[cfg(test)] fn from(s: [T; N]) -> Vec { crate::slice::into_vec(box s) } } #[stable(feature = "vec_from_cow_slice", since = "1.14.0")] impl<'a, T> From> for Vec where [T]: ToOwned>, { /// Convert a clone-on-write slice into a vector. /// /// If `s` already owns a `Vec`, it will be returned directly. /// If `s` is borrowing a slice, a new `Vec` will be allocated and /// filled by cloning `s`'s items into it. /// /// # Examples /// /// ``` /// # use std::borrow::Cow; /// let o: Cow<[i32]> = Cow::Owned(vec![1, 2, 3]); /// let b: Cow<[i32]> = Cow::Borrowed(&[1, 2, 3]); /// assert_eq!(Vec::from(o), Vec::from(b)); /// ``` fn from(s: Cow<'a, [T]>) -> Vec { s.into_owned() } } // note: test pulls in libstd, which causes errors here #[cfg(not(test))] #[stable(feature = "vec_from_box", since = "1.18.0")] impl From> for Vec { /// Convert a boxed slice into a vector by transferring ownership of /// the existing heap allocation. /// /// # Examples /// /// ``` /// let b: Box<[i32]> = vec![1, 2, 3].into_boxed_slice(); /// assert_eq!(Vec::from(b), vec![1, 2, 3]); /// ``` fn from(s: Box<[T], A>) -> Self { s.into_vec() } } // note: test pulls in libstd, which causes errors here #[cfg(not(test))] #[stable(feature = "box_from_vec", since = "1.20.0")] impl From> for Box<[T], A> { /// Convert a vector into a boxed slice. /// /// If `v` has excess capacity, its items will be moved into a /// newly-allocated buffer with exactly the right capacity. /// /// # Examples /// /// ``` /// assert_eq!(Box::from(vec![1, 2, 3]), vec![1, 2, 3].into_boxed_slice()); /// ``` fn from(v: Vec) -> Self { v.into_boxed_slice() } } #[stable(feature = "rust1", since = "1.0.0")] impl From<&str> for Vec { /// Allocate a `Vec` and fill it with a UTF-8 string. /// /// # Examples /// /// ``` /// assert_eq!(Vec::from("123"), vec![b'1', b'2', b'3']); /// ``` fn from(s: &str) -> Vec { From::from(s.as_bytes()) } } #[stable(feature = "array_try_from_vec", since = "1.48.0")] impl TryFrom> for [T; N] { type Error = Vec; /// Gets the entire contents of the `Vec` as an array, /// if its size exactly matches that of the requested array. /// /// # Examples /// /// ``` /// use std::convert::TryInto; /// assert_eq!(vec![1, 2, 3].try_into(), Ok([1, 2, 3])); /// assert_eq!(>::new().try_into(), Ok([])); /// ``` /// /// If the length doesn't match, the input comes back in `Err`: /// ``` /// use std::convert::TryInto; /// let r: Result<[i32; 4], _> = (0..10).collect::>().try_into(); /// assert_eq!(r, Err(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9])); /// ``` /// /// If you're fine with just getting a prefix of the `Vec`, /// you can call [`.truncate(N)`](Vec::truncate) first. /// ``` /// use std::convert::TryInto; /// let mut v = String::from("hello world").into_bytes(); /// v.sort(); /// v.truncate(2); /// let [a, b]: [_; 2] = v.try_into().unwrap(); /// assert_eq!(a, b' '); /// assert_eq!(b, b'd'); /// ``` fn try_from(mut vec: Vec) -> Result<[T; N], Vec> { if vec.len() != N { return Err(vec); } // SAFETY: `.set_len(0)` is always sound. unsafe { vec.set_len(0) }; // SAFETY: A `Vec`'s pointer is always aligned properly, and // the alignment the array needs is the same as the items. // We checked earlier that we have sufficient items. // The items will not double-drop as the `set_len` // tells the `Vec` not to also drop them. let array = unsafe { ptr::read(vec.as_ptr() as *const [T; N]) }; Ok(array) } } // Set the length of the vec when the `SetLenOnDrop` value goes out of scope. // // The idea is: The length field in SetLenOnDrop is a local variable // that the optimizer will see does not alias with any stores through the Vec's data // pointer. This is a workaround for alias analysis issue #32155 pub(super) struct SetLenOnDrop<'a> { len: &'a mut usize, local_len: usize, } impl<'a> SetLenOnDrop<'a> { #[inline] pub(super) fn new(len: &'a mut usize) -> Self { SetLenOnDrop { local_len: *len, len } } #[inline] pub(super) fn increment_len(&mut self, increment: usize) { self.local_len += increment; } } impl Drop for SetLenOnDrop<'_> { #[inline] fn drop(&mut self) { *self.len = self.local_len; } } use crate::alloc::{Allocator, Global}; use core::ptr::{self}; use core::slice::{self}; use super::Vec; /// An iterator which uses a closure to determine if an element should be removed. /// /// This struct is created by [`Vec::drain_filter`]. /// See its documentation for more. /// /// # Example /// /// ``` /// #![feature(drain_filter)] /// /// let mut v = vec![0, 1, 2]; /// let iter: std::vec::DrainFilter<_, _> = v.drain_filter(|x| *x % 2 == 0); /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] #[derive(Debug)] pub struct DrainFilter< 'a, T, F, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > where F: FnMut(&mut T) -> bool, { pub(super) vec: &'a mut Vec, /// The index of the item that will be inspected by the next call to `next`. pub(super) idx: usize, /// The number of items that have been drained (removed) thus far. pub(super) del: usize, /// The original length of `vec` prior to draining. pub(super) old_len: usize, /// The filter test predicate. pub(super) pred: F, /// A flag that indicates a panic has occurred in the filter test predicate. /// This is used as a hint in the drop implementation to prevent consumption /// of the remainder of the `DrainFilter`. Any unprocessed items will be /// backshifted in the `vec`, but no further items will be dropped or /// tested by the filter predicate. pub(super) panic_flag: bool, } impl DrainFilter<'_, T, F, A> where F: FnMut(&mut T) -> bool, { /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn allocator(&self) -> &A { self.vec.allocator() } } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] impl Iterator for DrainFilter<'_, T, F, A> where F: FnMut(&mut T) -> bool, { type Item = T; fn next(&mut self) -> Option { unsafe { while self.idx < self.old_len { let i = self.idx; let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len); self.panic_flag = true; let drained = (self.pred)(&mut v[i]); self.panic_flag = false; // Update the index *after* the predicate is called. If the index // is updated prior and the predicate panics, the element at this // index would be leaked. self.idx += 1; if drained { self.del += 1; return Some(ptr::read(&v[i])); } else if self.del > 0 { let del = self.del; let src: *const T = &v[i]; let dst: *mut T = &mut v[i - del]; ptr::copy_nonoverlapping(src, dst, 1); } } None } } fn size_hint(&self) -> (usize, Option) { (0, Some(self.old_len - self.idx)) } } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] impl Drop for DrainFilter<'_, T, F, A> where F: FnMut(&mut T) -> bool, { fn drop(&mut self) { struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator> where F: FnMut(&mut T) -> bool, { drain: &'b mut DrainFilter<'a, T, F, A>, } impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A> where F: FnMut(&mut T) -> bool, { fn drop(&mut self) { unsafe { if self.drain.idx < self.drain.old_len && self.drain.del > 0 { // This is a pretty messed up state, and there isn't really an // obviously right thing to do. We don't want to keep trying // to execute `pred`, so we just backshift all the unprocessed // elements and tell the vec that they still exist. The backshift // is required to prevent a double-drop of the last successfully // drained item prior to a panic in the predicate. let ptr = self.drain.vec.as_mut_ptr(); let src = ptr.add(self.drain.idx); let dst = src.sub(self.drain.del); let tail_len = self.drain.old_len - self.drain.idx; src.copy_to(dst, tail_len); } self.drain.vec.set_len(self.drain.old_len - self.drain.del); } } } let backshift = BackshiftOnDrop { drain: self }; // Attempt to consume any remaining elements if the filter predicate // has not yet panicked. We'll backshift any remaining elements // whether we've already panicked or if the consumption here panics. if !backshift.drain.panic_flag { backshift.drain.for_each(drop); } } } use crate::borrow::Cow; use core::iter::FromIterator; use super::Vec; #[stable(feature = "cow_from_vec", since = "1.8.0")] impl<'a, T: Clone> From<&'a [T]> for Cow<'a, [T]> { fn from(s: &'a [T]) -> Cow<'a, [T]> { Cow::Borrowed(s) } } #[stable(feature = "cow_from_vec", since = "1.8.0")] impl<'a, T: Clone> From> for Cow<'a, [T]> { fn from(v: Vec) -> Cow<'a, [T]> { Cow::Owned(v) } } #[stable(feature = "cow_from_vec_ref", since = "1.28.0")] impl<'a, T: Clone> From<&'a Vec> for Cow<'a, [T]> { fn from(v: &'a Vec) -> Cow<'a, [T]> { Cow::Borrowed(v.as_slice()) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> FromIterator for Cow<'a, [T]> where T: Clone, { fn from_iter>(it: I) -> Cow<'a, [T]> { Cow::Owned(FromIterator::from_iter(it)) } } use crate::alloc::Allocator; use crate::borrow::Cow; use super::Vec; macro_rules! __impl_slice_eq1 { ([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => { #[$stability] impl PartialEq<$rhs> for $lhs where T: PartialEq, $($ty: $bound)? { #[inline] fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] } #[inline] fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] } } } } __impl_slice_eq1! { [A: Allocator] Vec, Vec, #[stable(feature = "rust1", since = "1.0.0")] } __impl_slice_eq1! { [A: Allocator] Vec, &[U], #[stable(feature = "rust1", since = "1.0.0")] } __impl_slice_eq1! { [A: Allocator] Vec, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] } __impl_slice_eq1! { [A: Allocator] &[T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } __impl_slice_eq1! { [A: Allocator] &mut [T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } __impl_slice_eq1! { [A: Allocator] Vec, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } __impl_slice_eq1! { [A: Allocator] [T], Vec, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } __impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } __impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } __impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } __impl_slice_eq1! { [A: Allocator, const N: usize] Vec, [U; N], #[stable(feature = "rust1", since = "1.0.0")] } __impl_slice_eq1! { [A: Allocator, const N: usize] Vec, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] } // NOTE: some less important impls are omitted to reduce code bloat // FIXME(Centril): Reconsider this? //__impl_slice_eq1! { [const N: usize] Vec, &mut [B; N], } //__impl_slice_eq1! { [const N: usize] [A; N], Vec, } //__impl_slice_eq1! { [const N: usize] &[A; N], Vec, } //__impl_slice_eq1! { [const N: usize] &mut [A; N], Vec, } //__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, [B; N], } //__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &[B; N], } //__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &mut [B; N], } use crate::alloc::{Allocator, Global}; use core::ptr::{self}; use core::slice::{self}; use super::{Drain, Vec}; /// A splicing iterator for `Vec`. /// /// This struct is created by [`Vec::splice()`]. /// See its documentation for more. /// /// # Example /// /// ``` /// let mut v = vec![0, 1, 2]; /// let new = [7, 8]; /// let iter: std::vec::Splice<_> = v.splice(1.., new.iter().cloned()); /// ``` #[derive(Debug)] #[stable(feature = "vec_splice", since = "1.21.0")] pub struct Splice< 'a, I: Iterator + 'a, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, > { pub(super) drain: Drain<'a, I::Item, A>, pub(super) replace_with: I, } #[stable(feature = "vec_splice", since = "1.21.0")] impl Iterator for Splice<'_, I, A> { type Item = I::Item; fn next(&mut self) -> Option { self.drain.next() } fn size_hint(&self) -> (usize, Option) { self.drain.size_hint() } } #[stable(feature = "vec_splice", since = "1.21.0")] impl DoubleEndedIterator for Splice<'_, I, A> { fn next_back(&mut self) -> Option { self.drain.next_back() } } #[stable(feature = "vec_splice", since = "1.21.0")] impl ExactSizeIterator for Splice<'_, I, A> {} #[stable(feature = "vec_splice", since = "1.21.0")] impl Drop for Splice<'_, I, A> { fn drop(&mut self) { self.drain.by_ref().for_each(drop); unsafe { if self.drain.tail_len == 0 { self.drain.vec.as_mut().extend(self.replace_with.by_ref()); return; } // First fill the range left by drain(). if !self.drain.fill(&mut self.replace_with) { return; } // There may be more elements. Use the lower bound as an estimate. // FIXME: Is the upper bound a better guess? Or something else? let (lower_bound, _upper_bound) = self.replace_with.size_hint(); if lower_bound > 0 { self.drain.move_tail(lower_bound); if !self.drain.fill(&mut self.replace_with) { return; } } // Collect any remaining elements. // This is a zero-length vector which does not allocate if `lower_bound` was exact. let mut collected = self.replace_with.by_ref().collect::>().into_iter(); // Now we have an exact count. if collected.len() > 0 { self.drain.move_tail(collected.len()); let filled = self.drain.fill(&mut collected); debug_assert!(filled); debug_assert_eq!(collected.len(), 0); } } // Let `Drain::drop` move the tail back if necessary and restore `vec.len`. } } /// Private helper methods for `Splice::drop` impl Drain<'_, T, A> { /// The range from `self.vec.len` to `self.tail_start` contains elements /// that have been moved out. /// Fill that range as much as possible with new elements from the `replace_with` iterator. /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.) unsafe fn fill>(&mut self, replace_with: &mut I) -> bool { let vec = unsafe { self.vec.as_mut() }; let range_start = vec.len; let range_end = self.tail_start; let range_slice = unsafe { slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start) }; for place in range_slice { if let Some(new_item) = replace_with.next() { unsafe { ptr::write(place, new_item) }; vec.len += 1; } else { return false; } } true } /// Makes room for inserting more elements before the tail. unsafe fn move_tail(&mut self, additional: usize) { let vec = unsafe { self.vec.as_mut() }; let len = self.tail_start + self.tail_len; vec.buf.reserve(len, additional); let new_tail_start = self.tail_start + additional; unsafe { let src = vec.as_ptr().add(self.tail_start); let dst = vec.as_mut_ptr().add(new_tail_start); ptr::copy(src, dst, self.tail_len); } self.tail_start = new_tail_start; } } use core::ptr::{self}; use core::slice::{self}; // A helper struct for in-place iteration that drops the destination slice of iteration, // i.e. the head. The source slice (the tail) is dropped by IntoIter. pub(super) struct InPlaceDrop { pub(super) inner: *mut T, pub(super) dst: *mut T, } impl InPlaceDrop { fn len(&self) -> usize { unsafe { self.dst.offset_from(self.inner) as usize } } } impl Drop for InPlaceDrop { #[inline] fn drop(&mut self) { unsafe { ptr::drop_in_place(slice::from_raw_parts_mut(self.inner, self.len())); } } } //! The alloc Prelude //! //! The purpose of this module is to alleviate imports of commonly-used //! items of the `alloc` crate by adding a glob import to the top of modules: //! //! ``` //! # #![allow(unused_imports)] //! #![feature(alloc_prelude)] //! extern crate alloc; //! use alloc::prelude::v1::*; //! ``` #![unstable(feature = "alloc_prelude", issue = "58935")] pub mod v1; //! The first version of the prelude of `alloc` crate. //! //! See the [module-level documentation](../index.html) for more. #![unstable(feature = "alloc_prelude", issue = "58935")] #[unstable(feature = "alloc_prelude", issue = "58935")] pub use crate::borrow::ToOwned; #[unstable(feature = "alloc_prelude", issue = "58935")] pub use crate::boxed::Box; #[unstable(feature = "alloc_prelude", issue = "58935")] pub use crate::string::{String, ToString}; #[unstable(feature = "alloc_prelude", issue = "58935")] pub use crate::vec::Vec; use super::*; extern crate test; use crate::boxed::Box; use test::Bencher; #[test] fn allocate_zeroed() { unsafe { let layout = Layout::from_size_align(1024, 1).unwrap(); let ptr = Global.allocate_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout)); let mut i = ptr.as_non_null_ptr().as_ptr(); let end = i.add(layout.size()); while i < end { assert_eq!(*i, 0); i = i.offset(1); } Global.deallocate(ptr.as_non_null_ptr(), layout); } } #[bench] #[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks fn alloc_owned_small(b: &mut Bencher) { b.iter(|| { let _: Box<_> = box 10; }) } //! A pointer type for heap allocation. //! //! [`Box`], casually referred to as a 'box', provides the simplest form of //! heap allocation in Rust. Boxes provide ownership for this allocation, and //! drop their contents when they go out of scope. Boxes also ensure that they //! never allocate more than `isize::MAX` bytes. //! //! # Examples //! //! Move a value from the stack to the heap by creating a [`Box`]: //! //! ``` //! let val: u8 = 5; //! let boxed: Box = Box::new(val); //! ``` //! //! Move a value from a [`Box`] back to the stack by [dereferencing]: //! //! ``` //! let boxed: Box = Box::new(5); //! let val: u8 = *boxed; //! ``` //! //! Creating a recursive data structure: //! //! ``` //! #[derive(Debug)] //! enum List { //! Cons(T, Box>), //! Nil, //! } //! //! let list: List = List::Cons(1, Box::new(List::Cons(2, Box::new(List::Nil)))); //! println!("{:?}", list); //! ``` //! //! This will print `Cons(1, Cons(2, Nil))`. //! //! Recursive structures must be boxed, because if the definition of `Cons` //! looked like this: //! //! ```compile_fail,E0072 //! # enum List { //! Cons(T, List), //! # } //! ``` //! //! It wouldn't work. This is because the size of a `List` depends on how many //! elements are in the list, and so we don't know how much memory to allocate //! for a `Cons`. By introducing a [`Box`], which has a defined size, we know how //! big `Cons` needs to be. //! //! # Memory layout //! //! For non-zero-sized values, a [`Box`] will use the [`Global`] allocator for //! its allocation. It is valid to convert both ways between a [`Box`] and a //! raw pointer allocated with the [`Global`] allocator, given that the //! [`Layout`] used with the allocator is correct for the type. More precisely, //! a `value: *mut T` that has been allocated with the [`Global`] allocator //! with `Layout::for_value(&*value)` may be converted into a box using //! [`Box::::from_raw(value)`]. Conversely, the memory backing a `value: *mut //! T` obtained from [`Box::::into_raw`] may be deallocated using the //! [`Global`] allocator with [`Layout::for_value(&*value)`]. //! //! For zero-sized values, the `Box` pointer still has to be [valid] for reads //! and writes and sufficiently aligned. In particular, casting any aligned //! non-zero integer literal to a raw pointer produces a valid pointer, but a //! pointer pointing into previously allocated memory that since got freed is //! not valid. The recommended way to build a Box to a ZST if `Box::new` cannot //! be used is to use [`ptr::NonNull::dangling`]. //! //! So long as `T: Sized`, a `Box` is guaranteed to be represented //! as a single pointer and is also ABI-compatible with C pointers //! (i.e. the C type `T*`). This means that if you have extern "C" //! Rust functions that will be called from C, you can define those //! Rust functions using `Box` types, and use `T*` as corresponding //! type on the C side. As an example, consider this C header which //! declares functions that create and destroy some kind of `Foo` //! value: //! //! ```c //! /* C header */ //! //! /* Returns ownership to the caller */ //! struct Foo* foo_new(void); //! //! /* Takes ownership from the caller; no-op when invoked with NULL */ //! void foo_delete(struct Foo*); //! ``` //! //! These two functions might be implemented in Rust as follows. Here, the //! `struct Foo*` type from C is translated to `Box`, which captures //! the ownership constraints. Note also that the nullable argument to //! `foo_delete` is represented in Rust as `Option>`, since `Box` //! cannot be null. //! //! ``` //! #[repr(C)] //! pub struct Foo; //! //! #[no_mangle] //! pub extern "C" fn foo_new() -> Box { //! Box::new(Foo) //! } //! //! #[no_mangle] //! pub extern "C" fn foo_delete(_: Option>) {} //! ``` //! //! Even though `Box` has the same representation and C ABI as a C pointer, //! this does not mean that you can convert an arbitrary `T*` into a `Box` //! and expect things to work. `Box` values will always be fully aligned, //! non-null pointers. Moreover, the destructor for `Box` will attempt to //! free the value with the global allocator. In general, the best practice //! is to only use `Box` for pointers that originated from the global //! allocator. //! //! **Important.** At least at present, you should avoid using //! `Box` types for functions that are defined in C but invoked //! from Rust. In those cases, you should directly mirror the C types //! as closely as possible. Using types like `Box` where the C //! definition is just using `T*` can lead to undefined behavior, as //! described in [rust-lang/unsafe-code-guidelines#198][ucg#198]. //! //! [ucg#198]: https://github.com/rust-lang/unsafe-code-guidelines/issues/198 //! [dereferencing]: core::ops::Deref //! [`Box::::from_raw(value)`]: Box::from_raw //! [`Global`]: crate::alloc::Global //! [`Layout`]: crate::alloc::Layout //! [`Layout::for_value(&*value)`]: crate::alloc::Layout::for_value //! [valid]: ptr#safety #![stable(feature = "rust1", since = "1.0.0")] use core::any::Any; use core::borrow; use core::cmp::Ordering; use core::convert::{From, TryFrom}; use core::fmt; use core::future::Future; use core::hash::{Hash, Hasher}; use core::iter::{FromIterator, FusedIterator, Iterator}; use core::marker::{Unpin, Unsize}; use core::mem; use core::ops::{ CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver, }; use core::pin::Pin; use core::ptr::{self, Unique}; use core::stream::Stream; use core::task::{Context, Poll}; use crate::alloc::{handle_alloc_error, AllocError, Allocator, Global, Layout, WriteCloneIntoRaw}; use crate::borrow::Cow; use crate::raw_vec::RawVec; use crate::str::from_boxed_utf8_unchecked; use crate::vec::Vec; /// A pointer type for heap allocation. /// /// See the [module-level documentation](../../std/boxed/index.html) for more. #[lang = "owned_box"] #[fundamental] #[stable(feature = "rust1", since = "1.0.0")] pub struct Box< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, >(Unique, A); impl Box { /// Allocates memory on the heap and then places `x` into it. /// /// This doesn't actually allocate if `T` is zero-sized. /// /// # Examples /// /// ``` /// let five = Box::new(5); /// ``` #[inline(always)] #[doc(alias = "alloc")] #[doc(alias = "malloc")] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(x: T) -> Self { box x } /// Constructs a new box with uninitialized contents. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// /// let mut five = Box::::new_uninit(); /// /// let five = unsafe { /// // Deferred initialization: /// five.as_mut_ptr().write(5); /// /// five.assume_init() /// }; /// /// assert_eq!(*five, 5) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_uninit() -> Box> { Self::new_uninit_in(Global) } /// Constructs a new `Box` with uninitialized contents, with the memory /// being filled with `0` bytes. /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage /// of this method. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// /// let zero = Box::::new_zeroed(); /// let zero = unsafe { zero.assume_init() }; /// /// assert_eq!(*zero, 0) /// ``` /// /// [zeroed]: mem::MaybeUninit::zeroed #[inline] #[doc(alias = "calloc")] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Box> { Self::new_zeroed_in(Global) } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `x` will be pinned in memory and unable to be moved. #[stable(feature = "pin", since = "1.33.0")] #[inline(always)] pub fn pin(x: T) -> Pin> { (box x).into() } /// Allocates memory on the heap then places `x` into it, /// returning an error if the allocation fails /// /// This doesn't actually allocate if `T` is zero-sized. /// /// # Examples /// /// ``` /// #![feature(allocator_api)] /// /// let five = Box::try_new(5)?; /// # Ok::<(), std::alloc::AllocError>(()) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new(x: T) -> Result { Self::try_new_in(x, Global) } /// Constructs a new box with uninitialized contents on the heap, /// returning an error if the allocation fails /// /// # Examples /// /// ``` /// #![feature(allocator_api, new_uninit)] /// /// let mut five = Box::::try_new_uninit()?; /// /// let five = unsafe { /// // Deferred initialization: /// five.as_mut_ptr().write(5); /// /// five.assume_init() /// }; /// /// assert_eq!(*five, 5); /// # Ok::<(), std::alloc::AllocError>(()) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn try_new_uninit() -> Result>, AllocError> { Box::try_new_uninit_in(Global) } /// Constructs a new `Box` with uninitialized contents, with the memory /// being filled with `0` bytes on the heap /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage /// of this method. /// /// # Examples /// /// ``` /// #![feature(allocator_api, new_uninit)] /// /// let zero = Box::::try_new_zeroed()?; /// let zero = unsafe { zero.assume_init() }; /// /// assert_eq!(*zero, 0); /// # Ok::<(), std::alloc::AllocError>(()) /// ``` /// /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn try_new_zeroed() -> Result>, AllocError> { Box::try_new_zeroed_in(Global) } } impl Box { /// Allocates memory in the given allocator then places `x` into it. /// /// This doesn't actually allocate if `T` is zero-sized. /// /// # Examples /// /// ``` /// #![feature(allocator_api)] /// /// use std::alloc::System; /// /// let five = Box::new_in(5, System); /// ``` #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn new_in(x: T, alloc: A) -> Self { let mut boxed = Self::new_uninit_in(alloc); unsafe { boxed.as_mut_ptr().write(x); boxed.assume_init() } } /// Allocates memory in the given allocator then places `x` into it, /// returning an error if the allocation fails /// /// This doesn't actually allocate if `T` is zero-sized. /// /// # Examples /// /// ``` /// #![feature(allocator_api)] /// /// use std::alloc::System; /// /// let five = Box::try_new_in(5, System)?; /// # Ok::<(), std::alloc::AllocError>(()) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new_in(x: T, alloc: A) -> Result { let mut boxed = Self::try_new_uninit_in(alloc)?; unsafe { boxed.as_mut_ptr().write(x); Ok(boxed.assume_init()) } } /// Constructs a new box with uninitialized contents in the provided allocator. /// /// # Examples /// /// ``` /// #![feature(allocator_api, new_uninit)] /// /// use std::alloc::System; /// /// let mut five = Box::::new_uninit_in(System); /// /// let five = unsafe { /// // Deferred initialization: /// five.as_mut_ptr().write(5); /// /// five.assume_init() /// }; /// /// assert_eq!(*five, 5) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_in(alloc: A) -> Box, A> { let layout = Layout::new::>(); // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable. // That would make code size bigger. match Box::try_new_uninit_in(alloc) { Ok(m) => m, Err(_) => handle_alloc_error(layout), } } /// Constructs a new box with uninitialized contents in the provided allocator, /// returning an error if the allocation fails /// /// # Examples /// /// ``` /// #![feature(allocator_api, new_uninit)] /// /// use std::alloc::System; /// /// let mut five = Box::::try_new_uninit_in(System)?; /// /// let five = unsafe { /// // Deferred initialization: /// five.as_mut_ptr().write(5); /// /// five.assume_init() /// }; /// /// assert_eq!(*five, 5); /// # Ok::<(), std::alloc::AllocError>(()) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> { let layout = Layout::new::>(); let ptr = alloc.allocate(layout)?.cast(); unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) } } /// Constructs a new `Box` with uninitialized contents, with the memory /// being filled with `0` bytes in the provided allocator. /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage /// of this method. /// /// # Examples /// /// ``` /// #![feature(allocator_api, new_uninit)] /// /// use std::alloc::System; /// /// let zero = Box::::new_zeroed_in(System); /// let zero = unsafe { zero.assume_init() }; /// /// assert_eq!(*zero, 0) /// ``` /// /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_in(alloc: A) -> Box, A> { let layout = Layout::new::>(); // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable. // That would make code size bigger. match Box::try_new_zeroed_in(alloc) { Ok(m) => m, Err(_) => handle_alloc_error(layout), } } /// Constructs a new `Box` with uninitialized contents, with the memory /// being filled with `0` bytes in the provided allocator, /// returning an error if the allocation fails, /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage /// of this method. /// /// # Examples /// /// ``` /// #![feature(allocator_api, new_uninit)] /// /// use std::alloc::System; /// /// let zero = Box::::try_new_zeroed_in(System)?; /// let zero = unsafe { zero.assume_init() }; /// /// assert_eq!(*zero, 0); /// # Ok::<(), std::alloc::AllocError>(()) /// ``` /// /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocError> { let layout = Layout::new::>(); let ptr = alloc.allocate_zeroed(layout)?.cast(); unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) } } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `x` will be pinned in memory and unable to be moved. #[unstable(feature = "allocator_api", issue = "32838")] #[inline(always)] pub fn pin_in(x: T, alloc: A) -> Pin where A: 'static, { Self::new_in(x, alloc).into() } /// Converts a `Box` into a `Box<[T]>` /// /// This conversion does not allocate on the heap and happens in place. #[unstable(feature = "box_into_boxed_slice", issue = "71582")] pub fn into_boxed_slice(boxed: Self) -> Box<[T], A> { let (raw, alloc) = Box::into_raw_with_allocator(boxed); unsafe { Box::from_raw_in(raw as *mut [T; 1], alloc) } } /// Consumes the `Box`, returning the wrapped value. /// /// # Examples /// /// ``` /// #![feature(box_into_inner)] /// /// let c = Box::new(5); /// /// assert_eq!(Box::into_inner(c), 5); /// ``` #[unstable(feature = "box_into_inner", issue = "80437")] #[inline] pub fn into_inner(boxed: Self) -> T { *boxed } } impl Box<[T]> { /// Constructs a new boxed slice with uninitialized contents. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// /// let mut values = Box::<[u32]>::new_uninit_slice(3); /// /// let values = unsafe { /// // Deferred initialization: /// values[0].as_mut_ptr().write(1); /// values[1].as_mut_ptr().write(2); /// values[2].as_mut_ptr().write(3); /// /// values.assume_init() /// }; /// /// assert_eq!(*values, [1, 2, 3]) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit]> { unsafe { RawVec::with_capacity(len).into_box(len) } } /// Constructs a new boxed slice with uninitialized contents, with the memory /// being filled with `0` bytes. /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage /// of this method. /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// /// let values = Box::<[u32]>::new_zeroed_slice(3); /// let values = unsafe { values.assume_init() }; /// /// assert_eq!(*values, [0, 0, 0]) /// ``` /// /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit]> { unsafe { RawVec::with_capacity_zeroed(len).into_box(len) } } } impl Box<[T], A> { /// Constructs a new boxed slice with uninitialized contents in the provided allocator. /// /// # Examples /// /// ``` /// #![feature(allocator_api, new_uninit)] /// /// use std::alloc::System; /// /// let mut values = Box::<[u32], _>::new_uninit_slice_in(3, System); /// /// let values = unsafe { /// // Deferred initialization: /// values[0].as_mut_ptr().write(1); /// values[1].as_mut_ptr().write(2); /// values[2].as_mut_ptr().write(3); /// /// values.assume_init() /// }; /// /// assert_eq!(*values, [1, 2, 3]) /// ``` #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> { unsafe { RawVec::with_capacity_in(len, alloc).into_box(len) } } /// Constructs a new boxed slice with uninitialized contents in the provided allocator, /// with the memory being filled with `0` bytes. /// /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage /// of this method. /// /// # Examples /// /// ``` /// #![feature(allocator_api, new_uninit)] /// /// use std::alloc::System; /// /// let values = Box::<[u32], _>::new_zeroed_slice_in(3, System); /// let values = unsafe { values.assume_init() }; /// /// assert_eq!(*values, [0, 0, 0]) /// ``` /// /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> { unsafe { RawVec::with_capacity_zeroed_in(len, alloc).into_box(len) } } } impl Box, A> { /// Converts to `Box`. /// /// # Safety /// /// As with [`MaybeUninit::assume_init`], /// it is up to the caller to guarantee that the value /// really is in an initialized state. /// Calling this when the content is not yet fully initialized /// causes immediate undefined behavior. /// /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// /// let mut five = Box::::new_uninit(); /// /// let five: Box = unsafe { /// // Deferred initialization: /// five.as_mut_ptr().write(5); /// /// five.assume_init() /// }; /// /// assert_eq!(*five, 5) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Box { let (raw, alloc) = Box::into_raw_with_allocator(self); unsafe { Box::from_raw_in(raw as *mut T, alloc) } } } impl Box<[mem::MaybeUninit], A> { /// Converts to `Box<[T], A>`. /// /// # Safety /// /// As with [`MaybeUninit::assume_init`], /// it is up to the caller to guarantee that the values /// really are in an initialized state. /// Calling this when the content is not yet fully initialized /// causes immediate undefined behavior. /// /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init /// /// # Examples /// /// ``` /// #![feature(new_uninit)] /// /// let mut values = Box::<[u32]>::new_uninit_slice(3); /// /// let values = unsafe { /// // Deferred initialization: /// values[0].as_mut_ptr().write(1); /// values[1].as_mut_ptr().write(2); /// values[2].as_mut_ptr().write(3); /// /// values.assume_init() /// }; /// /// assert_eq!(*values, [1, 2, 3]) /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Box<[T], A> { let (raw, alloc) = Box::into_raw_with_allocator(self); unsafe { Box::from_raw_in(raw as *mut [T], alloc) } } } impl Box { /// Constructs a box from a raw pointer. /// /// After calling this function, the raw pointer is owned by the /// resulting `Box`. Specifically, the `Box` destructor will call /// the destructor of `T` and free the allocated memory. For this /// to be safe, the memory must have been allocated in accordance /// with the [memory layout] used by `Box` . /// /// # Safety /// /// This function is unsafe because improper use may lead to /// memory problems. For example, a double-free may occur if the /// function is called twice on the same raw pointer. /// /// The safety conditions are described in the [memory layout] section. /// /// # Examples /// /// Recreate a `Box` which was previously converted to a raw pointer /// using [`Box::into_raw`]: /// ``` /// let x = Box::new(5); /// let ptr = Box::into_raw(x); /// let x = unsafe { Box::from_raw(ptr) }; /// ``` /// Manually create a `Box` from scratch by using the global allocator: /// ``` /// use std::alloc::{alloc, Layout}; /// /// unsafe { /// let ptr = alloc(Layout::new::()) as *mut i32; /// // In general .write is required to avoid attempting to destruct /// // the (uninitialized) previous contents of `ptr`, though for this /// // simple example `*ptr = 5` would have worked as well. /// ptr.write(5); /// let x = Box::from_raw(ptr); /// } /// ``` /// /// [memory layout]: self#memory-layout /// [`Layout`]: crate::Layout #[stable(feature = "box_raw", since = "1.4.0")] #[inline] pub unsafe fn from_raw(raw: *mut T) -> Self { unsafe { Self::from_raw_in(raw, Global) } } } impl Box { /// Constructs a box from a raw pointer in the given allocator. /// /// After calling this function, the raw pointer is owned by the /// resulting `Box`. Specifically, the `Box` destructor will call /// the destructor of `T` and free the allocated memory. For this /// to be safe, the memory must have been allocated in accordance /// with the [memory layout] used by `Box` . /// /// # Safety /// /// This function is unsafe because improper use may lead to /// memory problems. For example, a double-free may occur if the /// function is called twice on the same raw pointer. /// /// /// # Examples /// /// Recreate a `Box` which was previously converted to a raw pointer /// using [`Box::into_raw_with_allocator`]: /// ``` /// #![feature(allocator_api)] /// /// use std::alloc::System; /// /// let x = Box::new_in(5, System); /// let (ptr, alloc) = Box::into_raw_with_allocator(x); /// let x = unsafe { Box::from_raw_in(ptr, alloc) }; /// ``` /// Manually create a `Box` from scratch by using the system allocator: /// ``` /// #![feature(allocator_api, slice_ptr_get)] /// /// use std::alloc::{Allocator, Layout, System}; /// /// unsafe { /// let ptr = System.allocate(Layout::new::())?.as_mut_ptr() as *mut i32; /// // In general .write is required to avoid attempting to destruct /// // the (uninitialized) previous contents of `ptr`, though for this /// // simple example `*ptr = 5` would have worked as well. /// ptr.write(5); /// let x = Box::from_raw_in(ptr, System); /// } /// # Ok::<(), std::alloc::AllocError>(()) /// ``` /// /// [memory layout]: self#memory-layout /// [`Layout`]: crate::Layout #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub unsafe fn from_raw_in(raw: *mut T, alloc: A) -> Self { Box(unsafe { Unique::new_unchecked(raw) }, alloc) } /// Consumes the `Box`, returning a wrapped raw pointer. /// /// The pointer will be properly aligned and non-null. /// /// After calling this function, the caller is responsible for the /// memory previously managed by the `Box`. In particular, the /// caller should properly destroy `T` and release the memory, taking /// into account the [memory layout] used by `Box`. The easiest way to /// do this is to convert the raw pointer back into a `Box` with the /// [`Box::from_raw`] function, allowing the `Box` destructor to perform /// the cleanup. /// /// Note: this is an associated function, which means that you have /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This /// is so that there is no conflict with a method on the inner type. /// /// # Examples /// Converting the raw pointer back into a `Box` with [`Box::from_raw`] /// for automatic cleanup: /// ``` /// let x = Box::new(String::from("Hello")); /// let ptr = Box::into_raw(x); /// let x = unsafe { Box::from_raw(ptr) }; /// ``` /// Manual cleanup by explicitly running the destructor and deallocating /// the memory: /// ``` /// use std::alloc::{dealloc, Layout}; /// use std::ptr; /// /// let x = Box::new(String::from("Hello")); /// let p = Box::into_raw(x); /// unsafe { /// ptr::drop_in_place(p); /// dealloc(p as *mut u8, Layout::new::()); /// } /// ``` /// /// [memory layout]: self#memory-layout #[stable(feature = "box_raw", since = "1.4.0")] #[inline] pub fn into_raw(b: Self) -> *mut T { Self::into_raw_with_allocator(b).0 } /// Consumes the `Box`, returning a wrapped raw pointer and the allocator. /// /// The pointer will be properly aligned and non-null. /// /// After calling this function, the caller is responsible for the /// memory previously managed by the `Box`. In particular, the /// caller should properly destroy `T` and release the memory, taking /// into account the [memory layout] used by `Box`. The easiest way to /// do this is to convert the raw pointer back into a `Box` with the /// [`Box::from_raw_in`] function, allowing the `Box` destructor to perform /// the cleanup. /// /// Note: this is an associated function, which means that you have /// to call it as `Box::into_raw_with_allocator(b)` instead of `b.into_raw_with_allocator()`. This /// is so that there is no conflict with a method on the inner type. /// /// # Examples /// Converting the raw pointer back into a `Box` with [`Box::from_raw_in`] /// for automatic cleanup: /// ``` /// #![feature(allocator_api)] /// /// use std::alloc::System; /// /// let x = Box::new_in(String::from("Hello"), System); /// let (ptr, alloc) = Box::into_raw_with_allocator(x); /// let x = unsafe { Box::from_raw_in(ptr, alloc) }; /// ``` /// Manual cleanup by explicitly running the destructor and deallocating /// the memory: /// ``` /// #![feature(allocator_api)] /// /// use std::alloc::{Allocator, Layout, System}; /// use std::ptr::{self, NonNull}; /// /// let x = Box::new_in(String::from("Hello"), System); /// let (ptr, alloc) = Box::into_raw_with_allocator(x); /// unsafe { /// ptr::drop_in_place(ptr); /// let non_null = NonNull::new_unchecked(ptr); /// alloc.deallocate(non_null.cast(), Layout::new::()); /// } /// ``` /// /// [memory layout]: self#memory-layout #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) { let (leaked, alloc) = Box::into_unique(b); (leaked.as_ptr(), alloc) } #[unstable( feature = "ptr_internals", issue = "none", reason = "use `Box::leak(b).into()` or `Unique::from(Box::leak(b))` instead" )] #[inline] #[doc(hidden)] pub fn into_unique(b: Self) -> (Unique, A) { // Box is recognized as a "unique pointer" by Stacked Borrows, but internally it is a // raw pointer for the type system. Turning it directly into a raw pointer would not be // recognized as "releasing" the unique pointer to permit aliased raw accesses, // so all raw pointer methods have to go through `Box::leak`. Turning *that* to a raw pointer // behaves correctly. let alloc = unsafe { ptr::read(&b.1) }; (Unique::from(Box::leak(b)), alloc) } /// Returns a reference to the underlying allocator. /// /// Note: this is an associated function, which means that you have /// to call it as `Box::allocator(&b)` instead of `b.allocator()`. This /// is so that there is no conflict with a method on the inner type. #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn allocator(b: &Self) -> &A { &b.1 } /// Consumes and leaks the `Box`, returning a mutable reference, /// `&'a mut T`. Note that the type `T` must outlive the chosen lifetime /// `'a`. If the type has only static references, or none at all, then this /// may be chosen to be `'static`. /// /// This function is mainly useful for data that lives for the remainder of /// the program's life. Dropping the returned reference will cause a memory /// leak. If this is not acceptable, the reference should first be wrapped /// with the [`Box::from_raw`] function producing a `Box`. This `Box` can /// then be dropped which will properly destroy `T` and release the /// allocated memory. /// /// Note: this is an associated function, which means that you have /// to call it as `Box::leak(b)` instead of `b.leak()`. This /// is so that there is no conflict with a method on the inner type. /// /// # Examples /// /// Simple usage: /// /// ``` /// let x = Box::new(41); /// let static_ref: &'static mut usize = Box::leak(x); /// *static_ref += 1; /// assert_eq!(*static_ref, 42); /// ``` /// /// Unsized data: /// /// ``` /// let x = vec![1, 2, 3].into_boxed_slice(); /// let static_ref = Box::leak(x); /// static_ref[0] = 4; /// assert_eq!(*static_ref, [4, 2, 3]); /// ``` #[stable(feature = "box_leak", since = "1.26.0")] #[inline] pub fn leak<'a>(b: Self) -> &'a mut T where A: 'a, { unsafe { &mut *mem::ManuallyDrop::new(b).0.as_ptr() } } /// Converts a `Box` into a `Pin>` /// /// This conversion does not allocate on the heap and happens in place. /// /// This is also available via [`From`]. #[unstable(feature = "box_into_pin", issue = "62370")] pub fn into_pin(boxed: Self) -> Pin where A: 'static, { // It's not possible to move or replace the insides of a `Pin>` // when `T: !Unpin`, so it's safe to pin it directly without any // additional requirements. unsafe { Pin::new_unchecked(boxed) } } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box { fn drop(&mut self) { // FIXME: Do nothing, drop is currently performed by compiler. } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for Box { /// Creates a `Box`, with the `Default` value for T. fn default() -> Self { box T::default() } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for Box<[T]> { fn default() -> Self { Box::<[T; 0]>::new([]) } } #[stable(feature = "default_box_extra", since = "1.17.0")] impl Default for Box { fn default() -> Self { unsafe { from_boxed_utf8_unchecked(Default::default()) } } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Box { /// Returns a new box with a `clone()` of this box's contents. /// /// # Examples /// /// ``` /// let x = Box::new(5); /// let y = x.clone(); /// /// // The value is the same /// assert_eq!(x, y); /// /// // But they are unique objects /// assert_ne!(&*x as *const i32, &*y as *const i32); /// ``` #[inline] fn clone(&self) -> Self { // Pre-allocate memory to allow writing the cloned value directly. let mut boxed = Self::new_uninit_in(self.1.clone()); unsafe { (**self).write_clone_into_raw(boxed.as_mut_ptr()); boxed.assume_init() } } /// Copies `source`'s contents into `self` without creating a new allocation. /// /// # Examples /// /// ``` /// let x = Box::new(5); /// let mut y = Box::new(10); /// let yp: *const i32 = &*y; /// /// y.clone_from(&x); /// /// // The value is the same /// assert_eq!(x, y); /// /// // And no allocation occurred /// assert_eq!(yp, &*y); /// ``` #[inline] fn clone_from(&mut self, source: &Self) { (**self).clone_from(&(**source)); } } #[stable(feature = "box_slice_clone", since = "1.3.0")] impl Clone for Box { fn clone(&self) -> Self { // this makes a copy of the data let buf: Box<[u8]> = self.as_bytes().into(); unsafe { from_boxed_utf8_unchecked(buf) } } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for Box { #[inline] fn eq(&self, other: &Self) -> bool { PartialEq::eq(&**self, &**other) } #[inline] fn ne(&self, other: &Self) -> bool { PartialEq::ne(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Box { #[inline] fn partial_cmp(&self, other: &Self) -> Option { PartialOrd::partial_cmp(&**self, &**other) } #[inline] fn lt(&self, other: &Self) -> bool { PartialOrd::lt(&**self, &**other) } #[inline] fn le(&self, other: &Self) -> bool { PartialOrd::le(&**self, &**other) } #[inline] fn ge(&self, other: &Self) -> bool { PartialOrd::ge(&**self, &**other) } #[inline] fn gt(&self, other: &Self) -> bool { PartialOrd::gt(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Box { #[inline] fn cmp(&self, other: &Self) -> Ordering { Ord::cmp(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for Box {} #[stable(feature = "rust1", since = "1.0.0")] impl Hash for Box { fn hash(&self, state: &mut H) { (**self).hash(state); } } #[stable(feature = "indirect_hasher_impl", since = "1.22.0")] impl Hasher for Box { fn finish(&self) -> u64 { (**self).finish() } fn write(&mut self, bytes: &[u8]) { (**self).write(bytes) } fn write_u8(&mut self, i: u8) { (**self).write_u8(i) } fn write_u16(&mut self, i: u16) { (**self).write_u16(i) } fn write_u32(&mut self, i: u32) { (**self).write_u32(i) } fn write_u64(&mut self, i: u64) { (**self).write_u64(i) } fn write_u128(&mut self, i: u128) { (**self).write_u128(i) } fn write_usize(&mut self, i: usize) { (**self).write_usize(i) } fn write_i8(&mut self, i: i8) { (**self).write_i8(i) } fn write_i16(&mut self, i: i16) { (**self).write_i16(i) } fn write_i32(&mut self, i: i32) { (**self).write_i32(i) } fn write_i64(&mut self, i: i64) { (**self).write_i64(i) } fn write_i128(&mut self, i: i128) { (**self).write_i128(i) } fn write_isize(&mut self, i: isize) { (**self).write_isize(i) } } #[stable(feature = "from_for_ptrs", since = "1.6.0")] impl From for Box { /// Converts a generic type `T` into a `Box` /// /// The conversion allocates on the heap and moves `t` /// from the stack into it. /// /// # Examples /// ```rust /// let x = 5; /// let boxed = Box::new(5); /// /// assert_eq!(Box::from(x), boxed); /// ``` fn from(t: T) -> Self { Box::new(t) } } #[stable(feature = "pin", since = "1.33.0")] impl From> for Pin> where A: 'static, { /// Converts a `Box` into a `Pin>` /// /// This conversion does not allocate on the heap and happens in place. fn from(boxed: Box) -> Self { Box::into_pin(boxed) } } #[stable(feature = "box_from_slice", since = "1.17.0")] impl From<&[T]> for Box<[T]> { /// Converts a `&[T]` into a `Box<[T]>` /// /// This conversion allocates on the heap /// and performs a copy of `slice`. /// /// # Examples /// ```rust /// // create a &[u8] which will be used to create a Box<[u8]> /// let slice: &[u8] = &[104, 101, 108, 108, 111]; /// let boxed_slice: Box<[u8]> = Box::from(slice); /// /// println!("{:?}", boxed_slice); /// ``` fn from(slice: &[T]) -> Box<[T]> { let len = slice.len(); let buf = RawVec::with_capacity(len); unsafe { ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len); buf.into_box(slice.len()).assume_init() } } } #[stable(feature = "box_from_cow", since = "1.45.0")] impl From> for Box<[T]> { #[inline] fn from(cow: Cow<'_, [T]>) -> Box<[T]> { match cow { Cow::Borrowed(slice) => Box::from(slice), Cow::Owned(slice) => Box::from(slice), } } } #[stable(feature = "box_from_slice", since = "1.17.0")] impl From<&str> for Box { /// Converts a `&str` into a `Box` /// /// This conversion allocates on the heap /// and performs a copy of `s`. /// /// # Examples /// ```rust /// let boxed: Box = Box::from("hello"); /// println!("{}", boxed); /// ``` #[inline] fn from(s: &str) -> Box { unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) } } } #[stable(feature = "box_from_cow", since = "1.45.0")] impl From> for Box { #[inline] fn from(cow: Cow<'_, str>) -> Box { match cow { Cow::Borrowed(s) => Box::from(s), Cow::Owned(s) => Box::from(s), } } } #[stable(feature = "boxed_str_conv", since = "1.19.0")] impl From> for Box<[u8], A> { /// Converts a `Box` into a `Box<[u8]>` /// /// This conversion does not allocate on the heap and happens in place. /// /// # Examples /// ```rust /// // create a Box which will be used to create a Box<[u8]> /// let boxed: Box = Box::from("hello"); /// let boxed_str: Box<[u8]> = Box::from(boxed); /// /// // create a &[u8] which will be used to create a Box<[u8]> /// let slice: &[u8] = &[104, 101, 108, 108, 111]; /// let boxed_slice = Box::from(slice); /// /// assert_eq!(boxed_slice, boxed_str); /// ``` #[inline] fn from(s: Box) -> Self { let (raw, alloc) = Box::into_raw_with_allocator(s); unsafe { Box::from_raw_in(raw as *mut [u8], alloc) } } } #[stable(feature = "box_from_array", since = "1.45.0")] impl From<[T; N]> for Box<[T]> { /// Converts a `[T; N]` into a `Box<[T]>` /// /// This conversion moves the array to newly heap-allocated memory. /// /// # Examples /// ```rust /// let boxed: Box<[u8]> = Box::from([4, 2]); /// println!("{:?}", boxed); /// ``` fn from(array: [T; N]) -> Box<[T]> { box array } } #[stable(feature = "boxed_slice_try_from", since = "1.43.0")] impl TryFrom> for Box<[T; N]> { type Error = Box<[T]>; fn try_from(boxed_slice: Box<[T]>) -> Result { if boxed_slice.len() == N { Ok(unsafe { Box::from_raw(Box::into_raw(boxed_slice) as *mut [T; N]) }) } else { Err(boxed_slice) } } } impl Box { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. /// /// # Examples /// /// ``` /// use std::any::Any; /// /// fn print_if_string(value: Box) { /// if let Ok(string) = value.downcast::() { /// println!("String ({}): {}", string.len(), string); /// } /// } /// /// let my_string = "Hello World".to_string(); /// print_if_string(Box::new(my_string)); /// print_if_string(Box::new(0i8)); /// ``` pub fn downcast(self) -> Result, Self> { if self.is::() { unsafe { let (raw, alloc): (*mut dyn Any, _) = Box::into_raw_with_allocator(self); Ok(Box::from_raw_in(raw as *mut T, alloc)) } } else { Err(self) } } } impl Box { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. /// /// # Examples /// /// ``` /// use std::any::Any; /// /// fn print_if_string(value: Box) { /// if let Ok(string) = value.downcast::() { /// println!("String ({}): {}", string.len(), string); /// } /// } /// /// let my_string = "Hello World".to_string(); /// print_if_string(Box::new(my_string)); /// print_if_string(Box::new(0i8)); /// ``` pub fn downcast(self) -> Result, Self> { if self.is::() { unsafe { let (raw, alloc): (*mut (dyn Any + Send), _) = Box::into_raw_with_allocator(self); Ok(Box::from_raw_in(raw as *mut T, alloc)) } } else { Err(self) } } } impl Box { #[inline] #[stable(feature = "box_send_sync_any_downcast", since = "1.51.0")] /// Attempt to downcast the box to a concrete type. /// /// # Examples /// /// ``` /// use std::any::Any; /// /// fn print_if_string(value: Box) { /// if let Ok(string) = value.downcast::() { /// println!("String ({}): {}", string.len(), string); /// } /// } /// /// let my_string = "Hello World".to_string(); /// print_if_string(Box::new(my_string)); /// print_if_string(Box::new(0i8)); /// ``` pub fn downcast(self) -> Result, Self> { if self.is::() { unsafe { let (raw, alloc): (*mut (dyn Any + Send + Sync), _) = Box::into_raw_with_allocator(self); Ok(Box::from_raw_in(raw as *mut T, alloc)) } } else { Err(self) } } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for Box { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Box { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Pointer for Box { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // It's not possible to extract the inner Uniq directly from the Box, // instead we cast it to a *const which aliases the Unique let ptr: *const T = &**self; fmt::Pointer::fmt(&ptr, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl Deref for Box { type Target = T; fn deref(&self) -> &T { &**self } } #[stable(feature = "rust1", since = "1.0.0")] impl DerefMut for Box { fn deref_mut(&mut self) -> &mut T { &mut **self } } #[unstable(feature = "receiver_trait", issue = "none")] impl Receiver for Box {} #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for Box { type Item = I::Item; fn next(&mut self) -> Option { (**self).next() } fn size_hint(&self) -> (usize, Option) { (**self).size_hint() } fn nth(&mut self, n: usize) -> Option { (**self).nth(n) } fn last(self) -> Option { BoxIter::last(self) } } trait BoxIter { type Item; fn last(self) -> Option; } impl BoxIter for Box { type Item = I::Item; default fn last(self) -> Option { #[inline] fn some(_: Option, x: T) -> Option { Some(x) } self.fold(None, some) } } /// Specialization for sized `I`s that uses `I`s implementation of `last()` /// instead of the default. #[stable(feature = "rust1", since = "1.0.0")] impl BoxIter for Box { fn last(self) -> Option { (*self).last() } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for Box { fn next_back(&mut self) -> Option { (**self).next_back() } fn nth_back(&mut self, n: usize) -> Option { (**self).nth_back(n) } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Box { fn len(&self) -> usize { (**self).len() } fn is_empty(&self) -> bool { (**self).is_empty() } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Box {} #[stable(feature = "boxed_closure_impls", since = "1.35.0")] impl + ?Sized, A: Allocator> FnOnce for Box { type Output = >::Output; extern "rust-call" fn call_once(self, args: Args) -> Self::Output { >::call_once(*self, args) } } #[stable(feature = "boxed_closure_impls", since = "1.35.0")] impl + ?Sized, A: Allocator> FnMut for Box { extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output { >::call_mut(self, args) } } #[stable(feature = "boxed_closure_impls", since = "1.35.0")] impl + ?Sized, A: Allocator> Fn for Box { extern "rust-call" fn call(&self, args: Args) -> Self::Output { >::call(self, args) } } #[unstable(feature = "coerce_unsized", issue = "27732")] impl, U: ?Sized, A: Allocator> CoerceUnsized> for Box {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Box {} #[stable(feature = "boxed_slice_from_iter", since = "1.32.0")] impl FromIterator for Box<[I]> { fn from_iter>(iter: T) -> Self { iter.into_iter().collect::>().into_boxed_slice() } } #[stable(feature = "box_slice_clone", since = "1.3.0")] impl Clone for Box<[T], A> { fn clone(&self) -> Self { let alloc = Box::allocator(self).clone(); self.to_vec_in(alloc).into_boxed_slice() } fn clone_from(&mut self, other: &Self) { if self.len() == other.len() { self.clone_from_slice(&other); } else { *self = other.clone(); } } } #[stable(feature = "box_borrow", since = "1.1.0")] impl borrow::Borrow for Box { fn borrow(&self) -> &T { &**self } } #[stable(feature = "box_borrow", since = "1.1.0")] impl borrow::BorrowMut for Box { fn borrow_mut(&mut self) -> &mut T { &mut **self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] impl AsRef for Box { fn as_ref(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] impl AsMut for Box { fn as_mut(&mut self) -> &mut T { &mut **self } } /* Nota bene * * We could have chosen not to add this impl, and instead have written a * function of Pin> to Pin. Such a function would not be sound, * because Box implements Unpin even when T does not, as a result of * this impl. * * We chose this API instead of the alternative for a few reasons: * - Logically, it is helpful to understand pinning in regard to the * memory region being pointed to. For this reason none of the * standard library pointer types support projecting through a pin * (Box is the only pointer type in std for which this would be * safe.) * - It is in practice very useful to have Box be unconditionally * Unpin because of trait objects, for which the structural auto * trait functionality does not apply (e.g., Box would * otherwise not be Unpin). * * Another type with the same semantics as Box but only a conditional * implementation of `Unpin` (where `T: Unpin`) would be valid/safe, and * could have a method to project a Pin from it. */ #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Box where A: 'static {} #[unstable(feature = "generator_trait", issue = "43122")] impl + Unpin, R, A: Allocator> Generator for Box where A: 'static, { type Yield = G::Yield; type Return = G::Return; fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState { G::resume(Pin::new(&mut *self), arg) } } #[unstable(feature = "generator_trait", issue = "43122")] impl, R, A: Allocator> Generator for Pin> where A: 'static, { type Yield = G::Yield; type Return = G::Return; fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState { G::resume((*self).as_mut(), arg) } } #[stable(feature = "futures_api", since = "1.36.0")] impl Future for Box where A: 'static, { type Output = F::Output; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { F::poll(Pin::new(&mut *self), cx) } } #[unstable(feature = "async_stream", issue = "79024")] impl Stream for Box { type Item = S::Item; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { Pin::new(&mut **self).poll_next(cx) } fn size_hint(&self) -> (usize, Option) { (**self).size_hint() } } use super::*; use std::cell::Cell; #[test] fn allocator_param() { use crate::alloc::AllocError; // Writing a test of integration between third-party // allocators and `RawVec` is a little tricky because the `RawVec` // API does not expose fallible allocation methods, so we // cannot check what happens when allocator is exhausted // (beyond detecting a panic). // // Instead, this just checks that the `RawVec` methods do at // least go through the Allocator API when it reserves // storage. // A dumb allocator that consumes a fixed amount of fuel // before allocation attempts start failing. struct BoundedAlloc { fuel: Cell, } unsafe impl Allocator for BoundedAlloc { fn allocate(&self, layout: Layout) -> Result, AllocError> { let size = layout.size(); if size > self.fuel.get() { return Err(AllocError); } match Global.allocate(layout) { ok @ Ok(_) => { self.fuel.set(self.fuel.get() - size); ok } err @ Err(_) => err, } } unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { unsafe { Global.deallocate(ptr, layout) } } } let a = BoundedAlloc { fuel: Cell::new(500) }; let mut v: RawVec = RawVec::with_capacity_in(50, a); assert_eq!(v.alloc.fuel.get(), 450); v.reserve(50, 150); // (causes a realloc, thus using 50 + 150 = 200 units of fuel) assert_eq!(v.alloc.fuel.get(), 250); } #[test] fn reserve_does_not_overallocate() { { let mut v: RawVec = RawVec::new(); // First, `reserve` allocates like `reserve_exact`. v.reserve(0, 9); assert_eq!(9, v.capacity()); } { let mut v: RawVec = RawVec::new(); v.reserve(0, 7); assert_eq!(7, v.capacity()); // 97 is more than double of 7, so `reserve` should work // like `reserve_exact`. v.reserve(7, 90); assert_eq!(97, v.capacity()); } { let mut v: RawVec = RawVec::new(); v.reserve(0, 12); assert_eq!(12, v.capacity()); v.reserve(12, 3); // 3 is less than half of 12, so `reserve` must grow // exponentially. At the time of writing this test grow // factor is 2, so new capacity is 24, however, grow factor // of 1.5 is OK too. Hence `>= 18` in assert. assert!(v.capacity() >= 12 + 12 / 2); } } //! Test for `boxed` mod. use core::any::Any; use core::clone::Clone; use core::convert::TryInto; use core::ops::Deref; use core::result::Result::{Err, Ok}; use std::boxed::Box; #[test] fn test_owned_clone() { let a = Box::new(5); let b: Box = a.clone(); assert!(a == b); } #[derive(PartialEq, Eq)] struct Test; #[test] fn any_move() { let a = Box::new(8) as Box; let b = Box::new(Test) as Box; match a.downcast::() { Ok(a) => { assert!(a == Box::new(8)); } Err(..) => panic!(), } match b.downcast::() { Ok(a) => { assert!(a == Box::new(Test)); } Err(..) => panic!(), } let a = Box::new(8) as Box; let b = Box::new(Test) as Box; assert!(a.downcast::>().is_err()); assert!(b.downcast::>().is_err()); } #[test] fn test_show() { let a = Box::new(8) as Box; let b = Box::new(Test) as Box; let a_str = format!("{:?}", a); let b_str = format!("{:?}", b); assert_eq!(a_str, "Any { .. }"); assert_eq!(b_str, "Any { .. }"); static EIGHT: usize = 8; static TEST: Test = Test; let a = &EIGHT as &dyn Any; let b = &TEST as &dyn Any; let s = format!("{:?}", a); assert_eq!(s, "Any { .. }"); let s = format!("{:?}", b); assert_eq!(s, "Any { .. }"); } #[test] fn deref() { fn homura>(_: T) {} homura(Box::new(765)); } #[test] fn raw_sized() { let x = Box::new(17); let p = Box::into_raw(x); unsafe { assert_eq!(17, *p); *p = 19; let y = Box::from_raw(p); assert_eq!(19, *y); } } #[test] fn raw_trait() { trait Foo { fn get(&self) -> u32; fn set(&mut self, value: u32); } struct Bar(u32); impl Foo for Bar { fn get(&self) -> u32 { self.0 } fn set(&mut self, value: u32) { self.0 = value; } } let x: Box = Box::new(Bar(17)); let p = Box::into_raw(x); unsafe { assert_eq!(17, (*p).get()); (*p).set(19); let y: Box = Box::from_raw(p); assert_eq!(19, y.get()); } } #[test] fn f64_slice() { let slice: &[f64] = &[-1.0, 0.0, 1.0, f64::INFINITY]; let boxed: Box<[f64]> = Box::from(slice); assert_eq!(&*boxed, slice) } #[test] fn i64_slice() { let slice: &[i64] = &[i64::MIN, -2, -1, 0, 1, 2, i64::MAX]; let boxed: Box<[i64]> = Box::from(slice); assert_eq!(&*boxed, slice) } #[test] fn str_slice() { let s = "Hello, world!"; let boxed: Box = Box::from(s); assert_eq!(&*boxed, s) } #[test] fn boxed_slice_from_iter() { let iter = 0..100; let boxed: Box<[u32]> = iter.collect(); assert_eq!(boxed.len(), 100); assert_eq!(boxed[7], 7); } #[test] fn test_array_from_slice() { let v = vec![1, 2, 3]; let r: Box<[u32]> = v.into_boxed_slice(); let a: Result, _> = r.clone().try_into(); assert!(a.is_ok()); let a: Result, _> = r.clone().try_into(); assert!(a.is_err()); }