1use std::collections::LinkedList;
2use test::Bencher;
3
4#[bench]
5fn bench_collect_into(b: &mut Bencher) {
6    let v = &[0; 64];
7    b.iter(|| {
8        let _: LinkedList<_> = v.iter().cloned().collect();
9    })
10}
11
12#[bench]
13fn bench_push_front(b: &mut Bencher) {
14    let mut m: LinkedList<_> = LinkedList::new();
15    b.iter(|| {
16        m.push_front(0);
17    })
18}
19
20#[bench]
21fn bench_push_back(b: &mut Bencher) {
22    let mut m: LinkedList<_> = LinkedList::new();
23    b.iter(|| {
24        m.push_back(0);
25    })
26}
27
28#[bench]
29fn bench_push_back_pop_back(b: &mut Bencher) {
30    let mut m: LinkedList<_> = LinkedList::new();
31    b.iter(|| {
32        m.push_back(0);
33        m.pop_back();
34    })
35}
36
37#[bench]
38fn bench_push_front_pop_front(b: &mut Bencher) {
39    let mut m: LinkedList<_> = LinkedList::new();
40    b.iter(|| {
41        m.push_front(0);
42        m.pop_front();
43    })
44}
45
46#[bench]
47fn bench_iter(b: &mut Bencher) {
48    let v = &[0; 128];
49    let m: LinkedList<_> = v.iter().cloned().collect();
50    b.iter(|| {
51        assert!(m.iter().count() == 128);
52    })
53}
54#[bench]
55fn bench_iter_mut(b: &mut Bencher) {
56    let v = &[0; 128];
57    let mut m: LinkedList<_> = v.iter().cloned().collect();
58    b.iter(|| {
59        assert!(m.iter_mut().count() == 128);
60    })
61}
62#[bench]
63fn bench_iter_rev(b: &mut Bencher) {
64    let v = &[0; 128];
65    let m: LinkedList<_> = v.iter().cloned().collect();
66    b.iter(|| {
67        assert!(m.iter().rev().count() == 128);
68    })
69}
70#[bench]
71fn bench_iter_mut_rev(b: &mut Bencher) {
72    let v = &[0; 128];
73    let mut m: LinkedList<_> = v.iter().cloned().collect();
74    b.iter(|| {
75        assert!(m.iter_mut().rev().count() == 128);
76    })
77}
78use test::{black_box, Bencher};
79
80#[bench]
81fn char_iterator(b: &mut Bencher) {
82    let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
83
84    b.iter(|| s.chars().count());
85}
86
87#[bench]
88fn char_iterator_for(b: &mut Bencher) {
89    let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
90
91    b.iter(|| {
92        for ch in s.chars() {
93            black_box(ch);
94        }
95    });
96}
97
98#[bench]
99fn char_iterator_ascii(b: &mut Bencher) {
100    let s = "Mary had a little lamb, Little lamb
101    Mary had a little lamb, Little lamb
102    Mary had a little lamb, Little lamb
103    Mary had a little lamb, Little lamb
104    Mary had a little lamb, Little lamb
105    Mary had a little lamb, Little lamb";
106
107    b.iter(|| s.chars().count());
108}
109
110#[bench]
111fn char_iterator_rev(b: &mut Bencher) {
112    let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
113
114    b.iter(|| s.chars().rev().count());
115}
116
117#[bench]
118fn char_iterator_rev_for(b: &mut Bencher) {
119    let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
120
121    b.iter(|| {
122        for ch in s.chars().rev() {
123            black_box(ch);
124        }
125    });
126}
127
128#[bench]
129fn char_indicesator(b: &mut Bencher) {
130    let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
131    let len = s.chars().count();
132
133    b.iter(|| assert_eq!(s.char_indices().count(), len));
134}
135
136#[bench]
137fn char_indicesator_rev(b: &mut Bencher) {
138    let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
139    let len = s.chars().count();
140
141    b.iter(|| assert_eq!(s.char_indices().rev().count(), len));
142}
143
144#[bench]
145fn split_unicode_ascii(b: &mut Bencher) {
146    let s = "ประเทศไทย中华Việt Namประเทศไทย中华Việt Nam";
147
148    b.iter(|| assert_eq!(s.split('V').count(), 3));
149}
150
151#[bench]
152fn split_ascii(b: &mut Bencher) {
153    let s = "Mary had a little lamb, Little lamb, little-lamb.";
154    let len = s.split(' ').count();
155
156    b.iter(|| assert_eq!(s.split(' ').count(), len));
157}
158
159#[bench]
160fn split_extern_fn(b: &mut Bencher) {
161    let s = "Mary had a little lamb, Little lamb, little-lamb.";
162    let len = s.split(' ').count();
163    fn pred(c: char) -> bool {
164        c == ' '
165    }
166
167    b.iter(|| assert_eq!(s.split(pred).count(), len));
168}
169
170#[bench]
171fn split_closure(b: &mut Bencher) {
172    let s = "Mary had a little lamb, Little lamb, little-lamb.";
173    let len = s.split(' ').count();
174
175    b.iter(|| assert_eq!(s.split(|c: char| c == ' ').count(), len));
176}
177
178#[bench]
179fn split_slice(b: &mut Bencher) {
180    let s = "Mary had a little lamb, Little lamb, little-lamb.";
181    let len = s.split(' ').count();
182
183    let c: &[char] = &[' '];
184    b.iter(|| assert_eq!(s.split(c).count(), len));
185}
186
187#[bench]
188fn bench_join(b: &mut Bencher) {
189    let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
190    let sep = "→";
191    let v = vec![s, s, s, s, s, s, s, s, s, s];
192    b.iter(|| {
193        assert_eq!(v.join(sep).len(), s.len() * 10 + sep.len() * 9);
194    })
195}
196
197#[bench]
198fn bench_contains_short_short(b: &mut Bencher) {
199    let haystack = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.";
200    let needle = "sit";
201
202    b.iter(|| {
203        assert!(haystack.contains(needle));
204    })
205}
206
207#[bench]
208fn bench_contains_short_long(b: &mut Bencher) {
209    let haystack = "\
210Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \
211ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \
212eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \
213sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \
214tempus vel, gravida nec quam.
215
216In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \
217sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \
218diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \
219lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \
220eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \
221interdum. Curabitur ut nisi justo.
222
223Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \
224mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \
225lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \
226est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \
227felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \
228ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \
229feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \
230Aliquam sit amet placerat lorem.
231
232Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \
233mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \
234Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \
235lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \
236suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \
237cursus accumsan.
238
239Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \
240feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \
241vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \
242leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \
243malesuada sollicitudin quam eu fermentum.";
244    let needle = "english";
245
246    b.iter(|| {
247        assert!(!haystack.contains(needle));
248    })
249}
250
251#[bench]
252fn bench_contains_bad_naive(b: &mut Bencher) {
253    let haystack = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
254    let needle = "aaaaaaaab";
255
256    b.iter(|| {
257        assert!(!haystack.contains(needle));
258    })
259}
260
261#[bench]
262fn bench_contains_equal(b: &mut Bencher) {
263    let haystack = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.";
264    let needle = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.";
265
266    b.iter(|| {
267        assert!(haystack.contains(needle));
268    })
269}
270
271macro_rules! make_test_inner {
272    ($s:ident, $code:expr, $name:ident, $str:expr, $iters:expr) => {
273        #[bench]
274        fn $name(bencher: &mut Bencher) {
275            let mut $s = $str;
276            black_box(&mut $s);
277            bencher.iter(|| {
278                for _ in 0..$iters {
279                    black_box($code);
280                }
281            });
282        }
283    };
284}
285
286macro_rules! make_test {
287    ($name:ident, $s:ident, $code:expr) => {
288        make_test!($name, $s, $code, 1);
289    };
290    ($name:ident, $s:ident, $code:expr, $iters:expr) => {
291        mod $name {
292            use test::Bencher;
293            use test::black_box;
294
295            // Short strings: 65 bytes each
296            make_test_inner!($s, $code, short_ascii,
297                "Mary had a little lamb, Little lamb Mary had a littl lamb, lamb!", $iters);
298            make_test_inner!($s, $code, short_mixed,
299                "ศไทย中华Việt Nam; Mary had a little lamb, Little lam!", $iters);
300            make_test_inner!($s, $code, short_pile_of_poo,
301                "����������������!", $iters);
302            make_test_inner!($s, $code, long_lorem_ipsum,"\
303Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \
304ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \
305eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \
306sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \
307tempus vel, gravida nec quam.
308
309In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \
310sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \
311diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \
312lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \
313eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \
314interdum. Curabitur ut nisi justo.
315
316Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \
317mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \
318lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \
319est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \
320felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \
321ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \
322feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \
323Aliquam sit amet placerat lorem.
324
325Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \
326mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \
327Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \
328lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \
329suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \
330cursus accumsan.
331
332Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \
333feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \
334vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \
335leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \
336malesuada sollicitudin quam eu fermentum!", $iters);
337        }
338    }
339}
340
341make_test!(chars_count, s, s.chars().count());
342
343make_test!(contains_bang_str, s, s.contains("!"));
344make_test!(contains_bang_char, s, s.contains('!'));
345
346make_test!(match_indices_a_str, s, s.match_indices("a").count());
347
348make_test!(split_a_str, s, s.split("a").count());
349
350make_test!(trim_ascii_char, s, { s.trim_matches(|c: char| c.is_ascii()) });
351make_test!(trim_start_ascii_char, s, { s.trim_start_matches(|c: char| c.is_ascii()) });
352make_test!(trim_end_ascii_char, s, { s.trim_end_matches(|c: char| c.is_ascii()) });
353
354make_test!(find_underscore_char, s, s.find('_'));
355make_test!(rfind_underscore_char, s, s.rfind('_'));
356make_test!(find_underscore_str, s, s.find("_"));
357
358make_test!(find_zzz_char, s, s.find('\u{1F4A4}'));
359make_test!(rfind_zzz_char, s, s.rfind('\u{1F4A4}'));
360make_test!(find_zzz_str, s, s.find("\u{1F4A4}"));
361
362make_test!(starts_with_ascii_char, s, s.starts_with('/'), 1024);
363make_test!(ends_with_ascii_char, s, s.ends_with('/'), 1024);
364make_test!(starts_with_unichar, s, s.starts_with('\u{1F4A4}'), 1024);
365make_test!(ends_with_unichar, s, s.ends_with('\u{1F4A4}'), 1024);
366make_test!(starts_with_str, s, s.starts_with("����������������"), 1024);
367make_test!(ends_with_str, s, s.ends_with("����������������"), 1024);
368
369make_test!(split_space_char, s, s.split(' ').count());
370make_test!(split_terminator_space_char, s, s.split_terminator(' ').count());
371
372make_test!(splitn_space_char, s, s.splitn(10, ' ').count());
373make_test!(rsplitn_space_char, s, s.rsplitn(10, ' ').count());
374
375make_test!(split_space_str, s, s.split(" ").count());
376make_test!(split_ad_str, s, s.split("ad").count());
377use std::iter::repeat;
378use test::{black_box, Bencher};
379
380#[bench]
381fn bench_with_capacity(b: &mut Bencher) {
382    b.iter(|| String::with_capacity(100));
383}
384
385#[bench]
386fn bench_push_str(b: &mut Bencher) {
387    let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
388    b.iter(|| {
389        let mut r = String::new();
390        r.push_str(s);
391    });
392}
393
394const REPETITIONS: u64 = 10_000;
395
396#[bench]
397fn bench_push_str_one_byte(b: &mut Bencher) {
398    b.bytes = REPETITIONS;
399    b.iter(|| {
400        let mut r = String::new();
401        for _ in 0..REPETITIONS {
402            r.push_str("a")
403        }
404    });
405}
406
407#[bench]
408fn bench_push_char_one_byte(b: &mut Bencher) {
409    b.bytes = REPETITIONS;
410    b.iter(|| {
411        let mut r = String::new();
412        for _ in 0..REPETITIONS {
413            r.push('a')
414        }
415    });
416}
417
418#[bench]
419fn bench_push_char_two_bytes(b: &mut Bencher) {
420    b.bytes = REPETITIONS * 2;
421    b.iter(|| {
422        let mut r = String::new();
423        for _ in 0..REPETITIONS {
424            r.push('â')
425        }
426    });
427}
428
429#[bench]
430fn from_utf8_lossy_100_ascii(b: &mut Bencher) {
431    let s = b"Hello there, the quick brown fox jumped over the lazy dog! \
432              Lorem ipsum dolor sit amet, consectetur. ";
433
434    assert_eq!(100, s.len());
435    b.iter(|| {
436        let _ = String::from_utf8_lossy(s);
437    });
438}
439
440#[bench]
441fn from_utf8_lossy_100_multibyte(b: &mut Bencher) {
442    let s = "������ปรدولة الكويتทศไทย中华�������".as_bytes();
443    assert_eq!(100, s.len());
444    b.iter(|| {
445        let _ = String::from_utf8_lossy(s);
446    });
447}
448
449#[bench]
450fn from_utf8_lossy_invalid(b: &mut Bencher) {
451    let s = b"Hello\xC0\x80 There\xE6\x83 Goodbye";
452    b.iter(|| {
453        let _ = String::from_utf8_lossy(s);
454    });
455}
456
457#[bench]
458fn from_utf8_lossy_100_invalid(b: &mut Bencher) {
459    let s = repeat(0xf5).take(100).collect::<Vec<_>>();
460    b.iter(|| {
461        let _ = String::from_utf8_lossy(&s);
462    });
463}
464
465#[bench]
466fn bench_exact_size_shrink_to_fit(b: &mut Bencher) {
467    let s = "Hello there, the quick brown fox jumped over the lazy dog! \
468             Lorem ipsum dolor sit amet, consectetur. ";
469    // ensure our operation produces an exact-size string before we benchmark it
470    let mut r = String::with_capacity(s.len());
471    r.push_str(s);
472    assert_eq!(r.len(), r.capacity());
473    b.iter(|| {
474        let mut r = String::with_capacity(s.len());
475        r.push_str(s);
476        r.shrink_to_fit();
477        r
478    });
479}
480
481#[bench]
482fn bench_from_str(b: &mut Bencher) {
483    let s = "Hello there, the quick brown fox jumped over the lazy dog! \
484             Lorem ipsum dolor sit amet, consectetur. ";
485    b.iter(|| String::from(s))
486}
487
488#[bench]
489fn bench_from(b: &mut Bencher) {
490    let s = "Hello there, the quick brown fox jumped over the lazy dog! \
491             Lorem ipsum dolor sit amet, consectetur. ";
492    b.iter(|| String::from(s))
493}
494
495#[bench]
496fn bench_to_string(b: &mut Bencher) {
497    let s = "Hello there, the quick brown fox jumped over the lazy dog! \
498             Lorem ipsum dolor sit amet, consectetur. ";
499    b.iter(|| s.to_string())
500}
501
502#[bench]
503fn bench_insert_char_short(b: &mut Bencher) {
504    let s = "Hello, World!";
505    b.iter(|| {
506        let mut x = String::from(s);
507        black_box(&mut x).insert(6, black_box(' '));
508        x
509    })
510}
511
512#[bench]
513fn bench_insert_char_long(b: &mut Bencher) {
514    let s = "Hello, World!";
515    b.iter(|| {
516        let mut x = String::from(s);
517        black_box(&mut x).insert(6, black_box('❤'));
518        x
519    })
520}
521
522#[bench]
523fn bench_insert_str_short(b: &mut Bencher) {
524    let s = "Hello, World!";
525    b.iter(|| {
526        let mut x = String::from(s);
527        black_box(&mut x).insert_str(6, black_box(" "));
528        x
529    })
530}
531
532#[bench]
533fn bench_insert_str_long(b: &mut Bencher) {
534    let s = "Hello, World!";
535    b.iter(|| {
536        let mut x = String::from(s);
537        black_box(&mut x).insert_str(6, black_box(" rustic "));
538        x
539    })
540}
541use rand::RngCore;
542use std::iter::{repeat, FromIterator};
543use test::{black_box, Bencher};
544
545#[bench]
546fn bench_new(b: &mut Bencher) {
547    b.iter(|| Vec::<u32>::new())
548}
549
550fn do_bench_with_capacity(b: &mut Bencher, src_len: usize) {
551    b.bytes = src_len as u64;
552
553    b.iter(|| Vec::<u32>::with_capacity(src_len))
554}
555
556#[bench]
557fn bench_with_capacity_0000(b: &mut Bencher) {
558    do_bench_with_capacity(b, 0)
559}
560
561#[bench]
562fn bench_with_capacity_0010(b: &mut Bencher) {
563    do_bench_with_capacity(b, 10)
564}
565
566#[bench]
567fn bench_with_capacity_0100(b: &mut Bencher) {
568    do_bench_with_capacity(b, 100)
569}
570
571#[bench]
572fn bench_with_capacity_1000(b: &mut Bencher) {
573    do_bench_with_capacity(b, 1000)
574}
575
576fn do_bench_from_fn(b: &mut Bencher, src_len: usize) {
577    b.bytes = src_len as u64;
578
579    b.iter(|| (0..src_len).collect::<Vec<_>>())
580}
581
582#[bench]
583fn bench_from_fn_0000(b: &mut Bencher) {
584    do_bench_from_fn(b, 0)
585}
586
587#[bench]
588fn bench_from_fn_0010(b: &mut Bencher) {
589    do_bench_from_fn(b, 10)
590}
591
592#[bench]
593fn bench_from_fn_0100(b: &mut Bencher) {
594    do_bench_from_fn(b, 100)
595}
596
597#[bench]
598fn bench_from_fn_1000(b: &mut Bencher) {
599    do_bench_from_fn(b, 1000)
600}
601
602fn do_bench_from_elem(b: &mut Bencher, src_len: usize) {
603    b.bytes = src_len as u64;
604
605    b.iter(|| repeat(5).take(src_len).collect::<Vec<usize>>())
606}
607
608#[bench]
609fn bench_from_elem_0000(b: &mut Bencher) {
610    do_bench_from_elem(b, 0)
611}
612
613#[bench]
614fn bench_from_elem_0010(b: &mut Bencher) {
615    do_bench_from_elem(b, 10)
616}
617
618#[bench]
619fn bench_from_elem_0100(b: &mut Bencher) {
620    do_bench_from_elem(b, 100)
621}
622
623#[bench]
624fn bench_from_elem_1000(b: &mut Bencher) {
625    do_bench_from_elem(b, 1000)
626}
627
628fn do_bench_from_slice(b: &mut Bencher, src_len: usize) {
629    let src: Vec<_> = FromIterator::from_iter(0..src_len);
630
631    b.bytes = src_len as u64;
632
633    b.iter(|| src.as_slice().to_vec());
634}
635
636#[bench]
637fn bench_from_slice_0000(b: &mut Bencher) {
638    do_bench_from_slice(b, 0)
639}
640
641#[bench]
642fn bench_from_slice_0010(b: &mut Bencher) {
643    do_bench_from_slice(b, 10)
644}
645
646#[bench]
647fn bench_from_slice_0100(b: &mut Bencher) {
648    do_bench_from_slice(b, 100)
649}
650
651#[bench]
652fn bench_from_slice_1000(b: &mut Bencher) {
653    do_bench_from_slice(b, 1000)
654}
655
656fn do_bench_from_iter(b: &mut Bencher, src_len: usize) {
657    let src: Vec<_> = FromIterator::from_iter(0..src_len);
658
659    b.bytes = src_len as u64;
660
661    b.iter(|| {
662        let dst: Vec<_> = FromIterator::from_iter(src.iter().cloned());
663        dst
664    });
665}
666
667#[bench]
668fn bench_from_iter_0000(b: &mut Bencher) {
669    do_bench_from_iter(b, 0)
670}
671
672#[bench]
673fn bench_from_iter_0010(b: &mut Bencher) {
674    do_bench_from_iter(b, 10)
675}
676
677#[bench]
678fn bench_from_iter_0100(b: &mut Bencher) {
679    do_bench_from_iter(b, 100)
680}
681
682#[bench]
683fn bench_from_iter_1000(b: &mut Bencher) {
684    do_bench_from_iter(b, 1000)
685}
686
687fn do_bench_extend(b: &mut Bencher, dst_len: usize, src_len: usize) {
688    let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
689    let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
690
691    b.bytes = src_len as u64;
692
693    b.iter(|| {
694        let mut dst = dst.clone();
695        dst.extend(src.clone());
696        dst
697    });
698}
699
700#[bench]
701fn bench_extend_0000_0000(b: &mut Bencher) {
702    do_bench_extend(b, 0, 0)
703}
704
705#[bench]
706fn bench_extend_0000_0010(b: &mut Bencher) {
707    do_bench_extend(b, 0, 10)
708}
709
710#[bench]
711fn bench_extend_0000_0100(b: &mut Bencher) {
712    do_bench_extend(b, 0, 100)
713}
714
715#[bench]
716fn bench_extend_0000_1000(b: &mut Bencher) {
717    do_bench_extend(b, 0, 1000)
718}
719
720#[bench]
721fn bench_extend_0010_0010(b: &mut Bencher) {
722    do_bench_extend(b, 10, 10)
723}
724
725#[bench]
726fn bench_extend_0100_0100(b: &mut Bencher) {
727    do_bench_extend(b, 100, 100)
728}
729
730#[bench]
731fn bench_extend_1000_1000(b: &mut Bencher) {
732    do_bench_extend(b, 1000, 1000)
733}
734
735fn do_bench_extend_from_slice(b: &mut Bencher, dst_len: usize, src_len: usize) {
736    let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
737    let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
738
739    b.bytes = src_len as u64;
740
741    b.iter(|| {
742        let mut dst = dst.clone();
743        dst.extend_from_slice(&src);
744        dst
745    });
746}
747
748#[bench]
749fn bench_extend_recycle(b: &mut Bencher) {
750    let mut data = vec![0; 1000];
751
752    b.iter(|| {
753        let tmp = std::mem::take(&mut data);
754        let mut to_extend = black_box(Vec::new());
755        to_extend.extend(tmp.into_iter());
756        data = black_box(to_extend);
757    });
758
759    black_box(data);
760}
761
762#[bench]
763fn bench_extend_from_slice_0000_0000(b: &mut Bencher) {
764    do_bench_extend_from_slice(b, 0, 0)
765}
766
767#[bench]
768fn bench_extend_from_slice_0000_0010(b: &mut Bencher) {
769    do_bench_extend_from_slice(b, 0, 10)
770}
771
772#[bench]
773fn bench_extend_from_slice_0000_0100(b: &mut Bencher) {
774    do_bench_extend_from_slice(b, 0, 100)
775}
776
777#[bench]
778fn bench_extend_from_slice_0000_1000(b: &mut Bencher) {
779    do_bench_extend_from_slice(b, 0, 1000)
780}
781
782#[bench]
783fn bench_extend_from_slice_0010_0010(b: &mut Bencher) {
784    do_bench_extend_from_slice(b, 10, 10)
785}
786
787#[bench]
788fn bench_extend_from_slice_0100_0100(b: &mut Bencher) {
789    do_bench_extend_from_slice(b, 100, 100)
790}
791
792#[bench]
793fn bench_extend_from_slice_1000_1000(b: &mut Bencher) {
794    do_bench_extend_from_slice(b, 1000, 1000)
795}
796
797fn do_bench_clone(b: &mut Bencher, src_len: usize) {
798    let src: Vec<usize> = FromIterator::from_iter(0..src_len);
799
800    b.bytes = src_len as u64;
801
802    b.iter(|| src.clone());
803}
804
805#[bench]
806fn bench_clone_0000(b: &mut Bencher) {
807    do_bench_clone(b, 0)
808}
809
810#[bench]
811fn bench_clone_0010(b: &mut Bencher) {
812    do_bench_clone(b, 10)
813}
814
815#[bench]
816fn bench_clone_0100(b: &mut Bencher) {
817    do_bench_clone(b, 100)
818}
819
820#[bench]
821fn bench_clone_1000(b: &mut Bencher) {
822    do_bench_clone(b, 1000)
823}
824
825fn do_bench_clone_from(b: &mut Bencher, times: usize, dst_len: usize, src_len: usize) {
826    let dst: Vec<_> = FromIterator::from_iter(0..src_len);
827    let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
828
829    b.bytes = (times * src_len) as u64;
830
831    b.iter(|| {
832        let mut dst = dst.clone();
833
834        for _ in 0..times {
835            dst.clone_from(&src);
836            dst = black_box(dst);
837        }
838        dst
839    });
840}
841
842#[bench]
843fn bench_clone_from_01_0000_0000(b: &mut Bencher) {
844    do_bench_clone_from(b, 1, 0, 0)
845}
846
847#[bench]
848fn bench_clone_from_01_0000_0010(b: &mut Bencher) {
849    do_bench_clone_from(b, 1, 0, 10)
850}
851
852#[bench]
853fn bench_clone_from_01_0000_0100(b: &mut Bencher) {
854    do_bench_clone_from(b, 1, 0, 100)
855}
856
857#[bench]
858fn bench_clone_from_01_0000_1000(b: &mut Bencher) {
859    do_bench_clone_from(b, 1, 0, 1000)
860}
861
862#[bench]
863fn bench_clone_from_01_0010_0010(b: &mut Bencher) {
864    do_bench_clone_from(b, 1, 10, 10)
865}
866
867#[bench]
868fn bench_clone_from_01_0100_0100(b: &mut Bencher) {
869    do_bench_clone_from(b, 1, 100, 100)
870}
871
872#[bench]
873fn bench_clone_from_01_1000_1000(b: &mut Bencher) {
874    do_bench_clone_from(b, 1, 1000, 1000)
875}
876
877#[bench]
878fn bench_clone_from_01_0010_0100(b: &mut Bencher) {
879    do_bench_clone_from(b, 1, 10, 100)
880}
881
882#[bench]
883fn bench_clone_from_01_0100_1000(b: &mut Bencher) {
884    do_bench_clone_from(b, 1, 100, 1000)
885}
886
887#[bench]
888fn bench_clone_from_01_0010_0000(b: &mut Bencher) {
889    do_bench_clone_from(b, 1, 10, 0)
890}
891
892#[bench]
893fn bench_clone_from_01_0100_0010(b: &mut Bencher) {
894    do_bench_clone_from(b, 1, 100, 10)
895}
896
897#[bench]
898fn bench_clone_from_01_1000_0100(b: &mut Bencher) {
899    do_bench_clone_from(b, 1, 1000, 100)
900}
901
902#[bench]
903fn bench_clone_from_10_0000_0000(b: &mut Bencher) {
904    do_bench_clone_from(b, 10, 0, 0)
905}
906
907#[bench]
908fn bench_clone_from_10_0000_0010(b: &mut Bencher) {
909    do_bench_clone_from(b, 10, 0, 10)
910}
911
912#[bench]
913fn bench_clone_from_10_0000_0100(b: &mut Bencher) {
914    do_bench_clone_from(b, 10, 0, 100)
915}
916
917#[bench]
918fn bench_clone_from_10_0000_1000(b: &mut Bencher) {
919    do_bench_clone_from(b, 10, 0, 1000)
920}
921
922#[bench]
923fn bench_clone_from_10_0010_0010(b: &mut Bencher) {
924    do_bench_clone_from(b, 10, 10, 10)
925}
926
927#[bench]
928fn bench_clone_from_10_0100_0100(b: &mut Bencher) {
929    do_bench_clone_from(b, 10, 100, 100)
930}
931
932#[bench]
933fn bench_clone_from_10_1000_1000(b: &mut Bencher) {
934    do_bench_clone_from(b, 10, 1000, 1000)
935}
936
937#[bench]
938fn bench_clone_from_10_0010_0100(b: &mut Bencher) {
939    do_bench_clone_from(b, 10, 10, 100)
940}
941
942#[bench]
943fn bench_clone_from_10_0100_1000(b: &mut Bencher) {
944    do_bench_clone_from(b, 10, 100, 1000)
945}
946
947#[bench]
948fn bench_clone_from_10_0010_0000(b: &mut Bencher) {
949    do_bench_clone_from(b, 10, 10, 0)
950}
951
952#[bench]
953fn bench_clone_from_10_0100_0010(b: &mut Bencher) {
954    do_bench_clone_from(b, 10, 100, 10)
955}
956
957#[bench]
958fn bench_clone_from_10_1000_0100(b: &mut Bencher) {
959    do_bench_clone_from(b, 10, 1000, 100)
960}
961
962macro_rules! bench_in_place {
963    ($($fname:ident, $type:ty, $count:expr, $init:expr);*) => {
964        $(
965            #[bench]
966            fn $fname(b: &mut Bencher) {
967                b.iter(|| {
968                    let src: Vec<$type> = black_box(vec![$init; $count]);
969                    src.into_iter()
970                        .enumerate()
971                        .map(|(idx, e)| idx as $type ^ e)
972                        .collect::<Vec<$type>>()
973                });
974            }
975        )+
976    };
977}
978
979bench_in_place![
980    bench_in_place_xxu8_0010_i0,   u8,   10, 0;
981    bench_in_place_xxu8_0100_i0,   u8,  100, 0;
982    bench_in_place_xxu8_1000_i0,   u8, 1000, 0;
983    bench_in_place_xxu8_0010_i1,   u8,   10, 1;
984    bench_in_place_xxu8_0100_i1,   u8,  100, 1;
985    bench_in_place_xxu8_1000_i1,   u8, 1000, 1;
986    bench_in_place_xu32_0010_i0,  u32,   10, 0;
987    bench_in_place_xu32_0100_i0,  u32,  100, 0;
988    bench_in_place_xu32_1000_i0,  u32, 1000, 0;
989    bench_in_place_xu32_0010_i1,  u32,   10, 1;
990    bench_in_place_xu32_0100_i1,  u32,  100, 1;
991    bench_in_place_xu32_1000_i1,  u32, 1000, 1;
992    bench_in_place_u128_0010_i0, u128,   10, 0;
993    bench_in_place_u128_0100_i0, u128,  100, 0;
994    bench_in_place_u128_1000_i0, u128, 1000, 0;
995    bench_in_place_u128_0010_i1, u128,   10, 1;
996    bench_in_place_u128_0100_i1, u128,  100, 1;
997    bench_in_place_u128_1000_i1, u128, 1000, 1
998];
999
1000#[bench]
1001fn bench_in_place_recycle(b: &mut Bencher) {
1002    let mut data = vec![0; 1000];
1003
1004    b.iter(|| {
1005        let tmp = std::mem::take(&mut data);
1006        data = black_box(
1007            tmp.into_iter()
1008                .enumerate()
1009                .map(|(idx, e)| idx.wrapping_add(e))
1010                .fuse()
1011                .peekable()
1012                .collect::<Vec<usize>>(),
1013        );
1014    });
1015}
1016
1017#[bench]
1018fn bench_in_place_zip_recycle(b: &mut Bencher) {
1019    let mut data = vec![0u8; 1000];
1020    let mut rng = rand::thread_rng();
1021    let mut subst = vec![0u8; 1000];
1022    rng.fill_bytes(&mut subst[..]);
1023
1024    b.iter(|| {
1025        let tmp = std::mem::take(&mut data);
1026        let mangled = tmp
1027            .into_iter()
1028            .zip(subst.iter().copied())
1029            .enumerate()
1030            .map(|(i, (d, s))| d.wrapping_add(i as u8) ^ s)
1031            .collect::<Vec<_>>();
1032        data = black_box(mangled);
1033    });
1034}
1035
1036#[bench]
1037fn bench_in_place_zip_iter_mut(b: &mut Bencher) {
1038    let mut data = vec![0u8; 256];
1039    let mut rng = rand::thread_rng();
1040    let mut subst = vec![0u8; 1000];
1041    rng.fill_bytes(&mut subst[..]);
1042
1043    b.iter(|| {
1044        data.iter_mut().enumerate().for_each(|(i, d)| {
1045            *d = d.wrapping_add(i as u8) ^ subst[i];
1046        });
1047    });
1048
1049    black_box(data);
1050}
1051
1052pub fn vec_cast<T, U>(input: Vec<T>) -> Vec<U> {
1053    input.into_iter().map(|e| unsafe { std::mem::transmute_copy(&e) }).collect()
1054}
1055
1056#[bench]
1057fn bench_transmute(b: &mut Bencher) {
1058    let mut vec = vec![10u32; 100];
1059    b.bytes = 800; // 2 casts x 4 bytes x 100
1060    b.iter(|| {
1061        let v = std::mem::take(&mut vec);
1062        let v = black_box(vec_cast::<u32, i32>(v));
1063        let v = black_box(vec_cast::<i32, u32>(v));
1064        vec = v;
1065    });
1066}
1067
1068#[derive(Clone)]
1069struct Droppable(usize);
1070
1071impl Drop for Droppable {
1072    fn drop(&mut self) {
1073        black_box(self);
1074    }
1075}
1076
1077#[bench]
1078fn bench_in_place_collect_droppable(b: &mut Bencher) {
1079    let v: Vec<Droppable> = std::iter::repeat_with(|| Droppable(0)).take(1000).collect();
1080    b.iter(|| {
1081        v.clone()
1082            .into_iter()
1083            .skip(100)
1084            .enumerate()
1085            .map(|(i, e)| Droppable(i ^ e.0))
1086            .collect::<Vec<_>>()
1087    })
1088}
1089
1090const LEN: usize = 16384;
1091
1092#[bench]
1093fn bench_chain_collect(b: &mut Bencher) {
1094    let data = black_box([0; LEN]);
1095    b.iter(|| data.iter().cloned().chain([1].iter().cloned()).collect::<Vec<_>>());
1096}
1097
1098#[bench]
1099fn bench_chain_chain_collect(b: &mut Bencher) {
1100    let data = black_box([0; LEN]);
1101    b.iter(|| {
1102        data.iter()
1103            .cloned()
1104            .chain([1].iter().cloned())
1105            .chain([2].iter().cloned())
1106            .collect::<Vec<_>>()
1107    });
1108}
1109
1110#[bench]
1111fn bench_nest_chain_chain_collect(b: &mut Bencher) {
1112    let data = black_box([0; LEN]);
1113    b.iter(|| {
1114        data.iter().cloned().chain([1].iter().chain([2].iter()).cloned()).collect::<Vec<_>>()
1115    });
1116}
1117
1118#[bench]
1119fn bench_range_map_collect(b: &mut Bencher) {
1120    b.iter(|| (0..LEN).map(|_| u32::default()).collect::<Vec<_>>());
1121}
1122
1123#[bench]
1124fn bench_chain_extend_ref(b: &mut Bencher) {
1125    let data = black_box([0; LEN]);
1126    b.iter(|| {
1127        let mut v = Vec::<u32>::with_capacity(data.len() + 1);
1128        v.extend(data.iter().chain([1].iter()));
1129        v
1130    });
1131}
1132
1133#[bench]
1134fn bench_chain_extend_value(b: &mut Bencher) {
1135    let data = black_box([0; LEN]);
1136    b.iter(|| {
1137        let mut v = Vec::<u32>::with_capacity(data.len() + 1);
1138        v.extend(data.iter().cloned().chain(Some(1)));
1139        v
1140    });
1141}
1142
1143#[bench]
1144fn bench_rev_1(b: &mut Bencher) {
1145    let data = black_box([0; LEN]);
1146    b.iter(|| {
1147        let mut v = Vec::<u32>::new();
1148        v.extend(data.iter().rev());
1149        v
1150    });
1151}
1152
1153#[bench]
1154fn bench_rev_2(b: &mut Bencher) {
1155    let data = black_box([0; LEN]);
1156    b.iter(|| {
1157        let mut v = Vec::<u32>::with_capacity(data.len());
1158        v.extend(data.iter().rev());
1159        v
1160    });
1161}
1162
1163#[bench]
1164fn bench_map_regular(b: &mut Bencher) {
1165    let data = black_box([(0, 0); LEN]);
1166    b.iter(|| {
1167        let mut v = Vec::<u32>::new();
1168        v.extend(data.iter().map(|t| t.1));
1169        v
1170    });
1171}
1172
1173#[bench]
1174fn bench_map_fast(b: &mut Bencher) {
1175    let data = black_box([(0, 0); LEN]);
1176    b.iter(|| {
1177        let mut result = Vec::with_capacity(data.len());
1178        for i in 0..data.len() {
1179            unsafe {
1180                *result.get_unchecked_mut(i) = data[i].0;
1181                result.set_len(i);
1182            }
1183        }
1184        result
1185    });
1186}
1187
1188fn random_sorted_fill(mut seed: u32, buf: &mut [u32]) {
1189    let mask = if buf.len() < 8192 {
1190        0xFF
1191    } else if buf.len() < 200_000 {
1192        0xFFFF
1193    } else {
1194        0xFFFF_FFFF
1195    };
1196
1197    for item in buf.iter_mut() {
1198        seed ^= seed << 13;
1199        seed ^= seed >> 17;
1200        seed ^= seed << 5;
1201
1202        *item = seed & mask;
1203    }
1204
1205    buf.sort();
1206}
1207
1208fn bench_vec_dedup_old(b: &mut Bencher, sz: usize) {
1209    let mut template = vec![0u32; sz];
1210    b.bytes = std::mem::size_of_val(template.as_slice()) as u64;
1211    random_sorted_fill(0x43, &mut template);
1212
1213    let mut vec = template.clone();
1214    b.iter(|| {
1215        let len = {
1216            let (dedup, _) = vec.partition_dedup();
1217            dedup.len()
1218        };
1219        vec.truncate(len);
1220
1221        black_box(vec.first());
1222        vec.clear();
1223        vec.extend_from_slice(&template);
1224    });
1225}
1226
1227fn bench_vec_dedup_new(b: &mut Bencher, sz: usize) {
1228    let mut template = vec![0u32; sz];
1229    b.bytes = std::mem::size_of_val(template.as_slice()) as u64;
1230    random_sorted_fill(0x43, &mut template);
1231
1232    let mut vec = template.clone();
1233    b.iter(|| {
1234        vec.dedup();
1235        black_box(vec.first());
1236        vec.clear();
1237        vec.extend_from_slice(&template);
1238    });
1239}
1240
1241#[bench]
1242fn bench_dedup_old_100(b: &mut Bencher) {
1243    bench_vec_dedup_old(b, 100);
1244}
1245#[bench]
1246fn bench_dedup_new_100(b: &mut Bencher) {
1247    bench_vec_dedup_new(b, 100);
1248}
1249
1250#[bench]
1251fn bench_dedup_old_1000(b: &mut Bencher) {
1252    bench_vec_dedup_old(b, 1000);
1253}
1254#[bench]
1255fn bench_dedup_new_1000(b: &mut Bencher) {
1256    bench_vec_dedup_new(b, 1000);
1257}
1258
1259#[bench]
1260fn bench_dedup_old_10000(b: &mut Bencher) {
1261    bench_vec_dedup_old(b, 10000);
1262}
1263#[bench]
1264fn bench_dedup_new_10000(b: &mut Bencher) {
1265    bench_vec_dedup_new(b, 10000);
1266}
1267
1268#[bench]
1269fn bench_dedup_old_100000(b: &mut Bencher) {
1270    bench_vec_dedup_old(b, 100000);
1271}
1272#[bench]
1273fn bench_dedup_new_100000(b: &mut Bencher) {
1274    bench_vec_dedup_new(b, 100000);
1275}
1276// Disabling on android for the time being
1277// See https://github.com/rust-lang/rust/issues/73535#event-3477699747
1278#![cfg(not(target_os = "android"))]
1279#![feature(btree_drain_filter)]
1280#![feature(map_first_last)]
1281#![feature(repr_simd)]
1282#![feature(slice_partition_dedup)]
1283#![feature(test)]
1284
1285extern crate test;
1286
1287mod binary_heap;
1288mod btree;
1289mod linked_list;
1290mod slice;
1291mod str;
1292mod string;
1293mod vec;
1294mod vec_deque;
1295use std::collections::BTreeMap;
1296use std::iter::Iterator;
1297use std::ops::RangeBounds;
1298use std::vec::Vec;
1299
1300use rand::{seq::SliceRandom, thread_rng, Rng};
1301use test::{black_box, Bencher};
1302
1303macro_rules! map_insert_rand_bench {
1304    ($name: ident, $n: expr, $map: ident) => {
1305        #[bench]
1306        pub fn $name(b: &mut Bencher) {
1307            let n: usize = $n;
1308            let mut map = $map::new();
1309            // setup
1310            let mut rng = thread_rng();
1311
1312            for _ in 0..n {
1313                let i = rng.gen::<usize>() % n;
1314                map.insert(i, i);
1315            }
1316
1317            // measure
1318            b.iter(|| {
1319                let k = rng.gen::<usize>() % n;
1320                map.insert(k, k);
1321                map.remove(&k);
1322            });
1323            black_box(map);
1324        }
1325    };
1326}
1327
1328macro_rules! map_insert_seq_bench {
1329    ($name: ident, $n: expr, $map: ident) => {
1330        #[bench]
1331        pub fn $name(b: &mut Bencher) {
1332            let mut map = $map::new();
1333            let n: usize = $n;
1334            // setup
1335            for i in 0..n {
1336                map.insert(i * 2, i * 2);
1337            }
1338
1339            // measure
1340            let mut i = 1;
1341            b.iter(|| {
1342                map.insert(i, i);
1343                map.remove(&i);
1344                i = (i + 2) % n;
1345            });
1346            black_box(map);
1347        }
1348    };
1349}
1350
1351macro_rules! map_find_rand_bench {
1352    ($name: ident, $n: expr, $map: ident) => {
1353        #[bench]
1354        pub fn $name(b: &mut Bencher) {
1355            let mut map = $map::new();
1356            let n: usize = $n;
1357
1358            // setup
1359            let mut rng = thread_rng();
1360            let mut keys: Vec<_> = (0..n).map(|_| rng.gen::<usize>() % n).collect();
1361
1362            for &k in &keys {
1363                map.insert(k, k);
1364            }
1365
1366            keys.shuffle(&mut rng);
1367
1368            // measure
1369            let mut i = 0;
1370            b.iter(|| {
1371                let t = map.get(&keys[i]);
1372                i = (i + 1) % n;
1373                black_box(t);
1374            })
1375        }
1376    };
1377}
1378
1379macro_rules! map_find_seq_bench {
1380    ($name: ident, $n: expr, $map: ident) => {
1381        #[bench]
1382        pub fn $name(b: &mut Bencher) {
1383            let mut map = $map::new();
1384            let n: usize = $n;
1385
1386            // setup
1387            for i in 0..n {
1388                map.insert(i, i);
1389            }
1390
1391            // measure
1392            let mut i = 0;
1393            b.iter(|| {
1394                let x = map.get(&i);
1395                i = (i + 1) % n;
1396                black_box(x);
1397            })
1398        }
1399    };
1400}
1401
1402map_insert_rand_bench! {insert_rand_100,    100,    BTreeMap}
1403map_insert_rand_bench! {insert_rand_10_000, 10_000, BTreeMap}
1404
1405map_insert_seq_bench! {insert_seq_100,    100,    BTreeMap}
1406map_insert_seq_bench! {insert_seq_10_000, 10_000, BTreeMap}
1407
1408map_find_rand_bench! {find_rand_100,    100,    BTreeMap}
1409map_find_rand_bench! {find_rand_10_000, 10_000, BTreeMap}
1410
1411map_find_seq_bench! {find_seq_100,    100,    BTreeMap}
1412map_find_seq_bench! {find_seq_10_000, 10_000, BTreeMap}
1413
1414fn bench_iteration(b: &mut Bencher, size: i32) {
1415    let mut map = BTreeMap::<i32, i32>::new();
1416    let mut rng = thread_rng();
1417
1418    for _ in 0..size {
1419        map.insert(rng.gen(), rng.gen());
1420    }
1421
1422    b.iter(|| {
1423        for entry in &map {
1424            black_box(entry);
1425        }
1426    });
1427}
1428
1429#[bench]
1430pub fn iteration_20(b: &mut Bencher) {
1431    bench_iteration(b, 20);
1432}
1433
1434#[bench]
1435pub fn iteration_1000(b: &mut Bencher) {
1436    bench_iteration(b, 1000);
1437}
1438
1439#[bench]
1440pub fn iteration_100000(b: &mut Bencher) {
1441    bench_iteration(b, 100000);
1442}
1443
1444fn bench_iteration_mut(b: &mut Bencher, size: i32) {
1445    let mut map = BTreeMap::<i32, i32>::new();
1446    let mut rng = thread_rng();
1447
1448    for _ in 0..size {
1449        map.insert(rng.gen(), rng.gen());
1450    }
1451
1452    b.iter(|| {
1453        for kv in map.iter_mut() {
1454            black_box(kv);
1455        }
1456    });
1457}
1458
1459#[bench]
1460pub fn iteration_mut_20(b: &mut Bencher) {
1461    bench_iteration_mut(b, 20);
1462}
1463
1464#[bench]
1465pub fn iteration_mut_1000(b: &mut Bencher) {
1466    bench_iteration_mut(b, 1000);
1467}
1468
1469#[bench]
1470pub fn iteration_mut_100000(b: &mut Bencher) {
1471    bench_iteration_mut(b, 100000);
1472}
1473
1474fn bench_first_and_last(b: &mut Bencher, size: i32) {
1475    let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
1476    b.iter(|| {
1477        for _ in 0..10 {
1478            black_box(map.first_key_value());
1479            black_box(map.last_key_value());
1480        }
1481    });
1482}
1483
1484#[bench]
1485pub fn first_and_last_0(b: &mut Bencher) {
1486    bench_first_and_last(b, 0);
1487}
1488
1489#[bench]
1490pub fn first_and_last_100(b: &mut Bencher) {
1491    bench_first_and_last(b, 100);
1492}
1493
1494#[bench]
1495pub fn first_and_last_10k(b: &mut Bencher) {
1496    bench_first_and_last(b, 10_000);
1497}
1498
1499const BENCH_RANGE_SIZE: i32 = 145;
1500const BENCH_RANGE_COUNT: i32 = BENCH_RANGE_SIZE * (BENCH_RANGE_SIZE - 1) / 2;
1501
1502fn bench_range<F, R>(b: &mut Bencher, f: F)
1503where
1504    F: Fn(i32, i32) -> R,
1505    R: RangeBounds<i32>,
1506{
1507    let map: BTreeMap<_, _> = (0..BENCH_RANGE_SIZE).map(|i| (i, i)).collect();
1508    b.iter(|| {
1509        let mut c = 0;
1510        for i in 0..BENCH_RANGE_SIZE {
1511            for j in i + 1..BENCH_RANGE_SIZE {
1512                black_box(map.range(f(i, j)));
1513                c += 1;
1514            }
1515        }
1516        debug_assert_eq!(c, BENCH_RANGE_COUNT);
1517    });
1518}
1519
1520#[bench]
1521pub fn range_included_excluded(b: &mut Bencher) {
1522    bench_range(b, |i, j| i..j);
1523}
1524
1525#[bench]
1526pub fn range_included_included(b: &mut Bencher) {
1527    bench_range(b, |i, j| i..=j);
1528}
1529
1530#[bench]
1531pub fn range_included_unbounded(b: &mut Bencher) {
1532    bench_range(b, |i, _| i..);
1533}
1534
1535#[bench]
1536pub fn range_unbounded_unbounded(b: &mut Bencher) {
1537    bench_range(b, |_, _| ..);
1538}
1539
1540fn bench_iter(b: &mut Bencher, repeats: i32, size: i32) {
1541    let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
1542    b.iter(|| {
1543        for _ in 0..repeats {
1544            black_box(map.iter());
1545        }
1546    });
1547}
1548
1549/// Contrast range_unbounded_unbounded with `iter()`.
1550#[bench]
1551pub fn range_unbounded_vs_iter(b: &mut Bencher) {
1552    bench_iter(b, BENCH_RANGE_COUNT, BENCH_RANGE_SIZE);
1553}
1554
1555#[bench]
1556pub fn iter_0(b: &mut Bencher) {
1557    bench_iter(b, 1_000, 0);
1558}
1559
1560#[bench]
1561pub fn iter_1(b: &mut Bencher) {
1562    bench_iter(b, 1_000, 1);
1563}
1564
1565#[bench]
1566pub fn iter_100(b: &mut Bencher) {
1567    bench_iter(b, 1_000, 100);
1568}
1569
1570#[bench]
1571pub fn iter_10k(b: &mut Bencher) {
1572    bench_iter(b, 1_000, 10_000);
1573}
1574
1575#[bench]
1576pub fn iter_1m(b: &mut Bencher) {
1577    bench_iter(b, 1_000, 1_000_000);
1578}
1579
1580const FAT: usize = 256;
1581
1582// The returned map has small keys and values.
1583// Benchmarks on it have a counterpart in set.rs with the same keys and no values at all.
1584fn slim_map(n: usize) -> BTreeMap<usize, usize> {
1585    (0..n).map(|i| (i, i)).collect::<BTreeMap<_, _>>()
1586}
1587
1588// The returned map has small keys and large values.
1589fn fat_val_map(n: usize) -> BTreeMap<usize, [usize; FAT]> {
1590    (0..n).map(|i| (i, [i; FAT])).collect::<BTreeMap<_, _>>()
1591}
1592
1593#[bench]
1594pub fn clone_slim_100(b: &mut Bencher) {
1595    let src = slim_map(100);
1596    b.iter(|| src.clone())
1597}
1598
1599#[bench]
1600pub fn clone_slim_100_and_clear(b: &mut Bencher) {
1601    let src = slim_map(100);
1602    b.iter(|| src.clone().clear())
1603}
1604
1605#[bench]
1606pub fn clone_slim_100_and_drain_all(b: &mut Bencher) {
1607    let src = slim_map(100);
1608    b.iter(|| src.clone().drain_filter(|_, _| true).count())
1609}
1610
1611#[bench]
1612pub fn clone_slim_100_and_drain_half(b: &mut Bencher) {
1613    let src = slim_map(100);
1614    b.iter(|| {
1615        let mut map = src.clone();
1616        assert_eq!(map.drain_filter(|i, _| i % 2 == 0).count(), 100 / 2);
1617        assert_eq!(map.len(), 100 / 2);
1618    })
1619}
1620
1621#[bench]
1622pub fn clone_slim_100_and_into_iter(b: &mut Bencher) {
1623    let src = slim_map(100);
1624    b.iter(|| src.clone().into_iter().count())
1625}
1626
1627#[bench]
1628pub fn clone_slim_100_and_pop_all(b: &mut Bencher) {
1629    let src = slim_map(100);
1630    b.iter(|| {
1631        let mut map = src.clone();
1632        while map.pop_first().is_some() {}
1633        map
1634    });
1635}
1636
1637#[bench]
1638pub fn clone_slim_100_and_remove_all(b: &mut Bencher) {
1639    let src = slim_map(100);
1640    b.iter(|| {
1641        let mut map = src.clone();
1642        while let Some(elt) = map.iter().map(|(&i, _)| i).next() {
1643            let v = map.remove(&elt);
1644            debug_assert!(v.is_some());
1645        }
1646        map
1647    });
1648}
1649
1650#[bench]
1651pub fn clone_slim_100_and_remove_half(b: &mut Bencher) {
1652    let src = slim_map(100);
1653    b.iter(|| {
1654        let mut map = src.clone();
1655        for i in (0..100).step_by(2) {
1656            let v = map.remove(&i);
1657            debug_assert!(v.is_some());
1658        }
1659        assert_eq!(map.len(), 100 / 2);
1660        map
1661    })
1662}
1663
1664#[bench]
1665pub fn clone_slim_10k(b: &mut Bencher) {
1666    let src = slim_map(10_000);
1667    b.iter(|| src.clone())
1668}
1669
1670#[bench]
1671pub fn clone_slim_10k_and_clear(b: &mut Bencher) {
1672    let src = slim_map(10_000);
1673    b.iter(|| src.clone().clear())
1674}
1675
1676#[bench]
1677pub fn clone_slim_10k_and_drain_all(b: &mut Bencher) {
1678    let src = slim_map(10_000);
1679    b.iter(|| src.clone().drain_filter(|_, _| true).count())
1680}
1681
1682#[bench]
1683pub fn clone_slim_10k_and_drain_half(b: &mut Bencher) {
1684    let src = slim_map(10_000);
1685    b.iter(|| {
1686        let mut map = src.clone();
1687        assert_eq!(map.drain_filter(|i, _| i % 2 == 0).count(), 10_000 / 2);
1688        assert_eq!(map.len(), 10_000 / 2);
1689    })
1690}
1691
1692#[bench]
1693pub fn clone_slim_10k_and_into_iter(b: &mut Bencher) {
1694    let src = slim_map(10_000);
1695    b.iter(|| src.clone().into_iter().count())
1696}
1697
1698#[bench]
1699pub fn clone_slim_10k_and_pop_all(b: &mut Bencher) {
1700    let src = slim_map(10_000);
1701    b.iter(|| {
1702        let mut map = src.clone();
1703        while map.pop_first().is_some() {}
1704        map
1705    });
1706}
1707
1708#[bench]
1709pub fn clone_slim_10k_and_remove_all(b: &mut Bencher) {
1710    let src = slim_map(10_000);
1711    b.iter(|| {
1712        let mut map = src.clone();
1713        while let Some(elt) = map.iter().map(|(&i, _)| i).next() {
1714            let v = map.remove(&elt);
1715            debug_assert!(v.is_some());
1716        }
1717        map
1718    });
1719}
1720
1721#[bench]
1722pub fn clone_slim_10k_and_remove_half(b: &mut Bencher) {
1723    let src = slim_map(10_000);
1724    b.iter(|| {
1725        let mut map = src.clone();
1726        for i in (0..10_000).step_by(2) {
1727            let v = map.remove(&i);
1728            debug_assert!(v.is_some());
1729        }
1730        assert_eq!(map.len(), 10_000 / 2);
1731        map
1732    })
1733}
1734
1735#[bench]
1736pub fn clone_fat_val_100(b: &mut Bencher) {
1737    let src = fat_val_map(100);
1738    b.iter(|| src.clone())
1739}
1740
1741#[bench]
1742pub fn clone_fat_val_100_and_clear(b: &mut Bencher) {
1743    let src = fat_val_map(100);
1744    b.iter(|| src.clone().clear())
1745}
1746
1747#[bench]
1748pub fn clone_fat_val_100_and_drain_all(b: &mut Bencher) {
1749    let src = fat_val_map(100);
1750    b.iter(|| src.clone().drain_filter(|_, _| true).count())
1751}
1752
1753#[bench]
1754pub fn clone_fat_val_100_and_drain_half(b: &mut Bencher) {
1755    let src = fat_val_map(100);
1756    b.iter(|| {
1757        let mut map = src.clone();
1758        assert_eq!(map.drain_filter(|i, _| i % 2 == 0).count(), 100 / 2);
1759        assert_eq!(map.len(), 100 / 2);
1760    })
1761}
1762
1763#[bench]
1764pub fn clone_fat_val_100_and_into_iter(b: &mut Bencher) {
1765    let src = fat_val_map(100);
1766    b.iter(|| src.clone().into_iter().count())
1767}
1768
1769#[bench]
1770pub fn clone_fat_val_100_and_pop_all(b: &mut Bencher) {
1771    let src = fat_val_map(100);
1772    b.iter(|| {
1773        let mut map = src.clone();
1774        while map.pop_first().is_some() {}
1775        map
1776    });
1777}
1778
1779#[bench]
1780pub fn clone_fat_val_100_and_remove_all(b: &mut Bencher) {
1781    let src = fat_val_map(100);
1782    b.iter(|| {
1783        let mut map = src.clone();
1784        while let Some(elt) = map.iter().map(|(&i, _)| i).next() {
1785            let v = map.remove(&elt);
1786            debug_assert!(v.is_some());
1787        }
1788        map
1789    });
1790}
1791
1792#[bench]
1793pub fn clone_fat_val_100_and_remove_half(b: &mut Bencher) {
1794    let src = fat_val_map(100);
1795    b.iter(|| {
1796        let mut map = src.clone();
1797        for i in (0..100).step_by(2) {
1798            let v = map.remove(&i);
1799            debug_assert!(v.is_some());
1800        }
1801        assert_eq!(map.len(), 100 / 2);
1802        map
1803    })
1804}
1805use std::collections::BTreeSet;
1806
1807use rand::{thread_rng, Rng};
1808use test::Bencher;
1809
1810fn random(n: usize) -> BTreeSet<usize> {
1811    let mut rng = thread_rng();
1812    let mut set = BTreeSet::new();
1813    while set.len() < n {
1814        set.insert(rng.gen());
1815    }
1816    assert_eq!(set.len(), n);
1817    set
1818}
1819
1820fn neg(n: usize) -> BTreeSet<i32> {
1821    let set: BTreeSet<i32> = (-(n as i32)..=-1).collect();
1822    assert_eq!(set.len(), n);
1823    set
1824}
1825
1826fn pos(n: usize) -> BTreeSet<i32> {
1827    let set: BTreeSet<i32> = (1..=(n as i32)).collect();
1828    assert_eq!(set.len(), n);
1829    set
1830}
1831
1832fn stagger(n1: usize, factor: usize) -> [BTreeSet<u32>; 2] {
1833    let n2 = n1 * factor;
1834    let mut sets = [BTreeSet::new(), BTreeSet::new()];
1835    for i in 0..(n1 + n2) {
1836        let b = i % (factor + 1) != 0;
1837        sets[b as usize].insert(i as u32);
1838    }
1839    assert_eq!(sets[0].len(), n1);
1840    assert_eq!(sets[1].len(), n2);
1841    sets
1842}
1843
1844macro_rules! set_bench {
1845    ($name: ident, $set_func: ident, $result_func: ident, $sets: expr) => {
1846        #[bench]
1847        pub fn $name(b: &mut Bencher) {
1848            // setup
1849            let sets = $sets;
1850
1851            // measure
1852            b.iter(|| sets[0].$set_func(&sets[1]).$result_func())
1853        }
1854    };
1855}
1856
1857fn slim_set(n: usize) -> BTreeSet<usize> {
1858    (0..n).collect::<BTreeSet<_>>()
1859}
1860
1861#[bench]
1862pub fn clone_100(b: &mut Bencher) {
1863    let src = slim_set(100);
1864    b.iter(|| src.clone())
1865}
1866
1867#[bench]
1868pub fn clone_100_and_clear(b: &mut Bencher) {
1869    let src = slim_set(100);
1870    b.iter(|| src.clone().clear())
1871}
1872
1873#[bench]
1874pub fn clone_100_and_drain_all(b: &mut Bencher) {
1875    let src = slim_set(100);
1876    b.iter(|| src.clone().drain_filter(|_| true).count())
1877}
1878
1879#[bench]
1880pub fn clone_100_and_drain_half(b: &mut Bencher) {
1881    let src = slim_set(100);
1882    b.iter(|| {
1883        let mut set = src.clone();
1884        assert_eq!(set.drain_filter(|i| i % 2 == 0).count(), 100 / 2);
1885        assert_eq!(set.len(), 100 / 2);
1886    })
1887}
1888
1889#[bench]
1890pub fn clone_100_and_into_iter(b: &mut Bencher) {
1891    let src = slim_set(100);
1892    b.iter(|| src.clone().into_iter().count())
1893}
1894
1895#[bench]
1896pub fn clone_100_and_pop_all(b: &mut Bencher) {
1897    let src = slim_set(100);
1898    b.iter(|| {
1899        let mut set = src.clone();
1900        while set.pop_first().is_some() {}
1901        set
1902    });
1903}
1904
1905#[bench]
1906pub fn clone_100_and_remove_all(b: &mut Bencher) {
1907    let src = slim_set(100);
1908    b.iter(|| {
1909        let mut set = src.clone();
1910        while let Some(elt) = set.iter().copied().next() {
1911            let ok = set.remove(&elt);
1912            debug_assert!(ok);
1913        }
1914        set
1915    });
1916}
1917
1918#[bench]
1919pub fn clone_100_and_remove_half(b: &mut Bencher) {
1920    let src = slim_set(100);
1921    b.iter(|| {
1922        let mut set = src.clone();
1923        for i in (0..100).step_by(2) {
1924            let ok = set.remove(&i);
1925            debug_assert!(ok);
1926        }
1927        assert_eq!(set.len(), 100 / 2);
1928        set
1929    })
1930}
1931
1932#[bench]
1933pub fn clone_10k(b: &mut Bencher) {
1934    let src = slim_set(10_000);
1935    b.iter(|| src.clone())
1936}
1937
1938#[bench]
1939pub fn clone_10k_and_clear(b: &mut Bencher) {
1940    let src = slim_set(10_000);
1941    b.iter(|| src.clone().clear())
1942}
1943
1944#[bench]
1945pub fn clone_10k_and_drain_all(b: &mut Bencher) {
1946    let src = slim_set(10_000);
1947    b.iter(|| src.clone().drain_filter(|_| true).count())
1948}
1949
1950#[bench]
1951pub fn clone_10k_and_drain_half(b: &mut Bencher) {
1952    let src = slim_set(10_000);
1953    b.iter(|| {
1954        let mut set = src.clone();
1955        assert_eq!(set.drain_filter(|i| i % 2 == 0).count(), 10_000 / 2);
1956        assert_eq!(set.len(), 10_000 / 2);
1957    })
1958}
1959
1960#[bench]
1961pub fn clone_10k_and_into_iter(b: &mut Bencher) {
1962    let src = slim_set(10_000);
1963    b.iter(|| src.clone().into_iter().count())
1964}
1965
1966#[bench]
1967pub fn clone_10k_and_pop_all(b: &mut Bencher) {
1968    let src = slim_set(10_000);
1969    b.iter(|| {
1970        let mut set = src.clone();
1971        while set.pop_first().is_some() {}
1972        set
1973    });
1974}
1975
1976#[bench]
1977pub fn clone_10k_and_remove_all(b: &mut Bencher) {
1978    let src = slim_set(10_000);
1979    b.iter(|| {
1980        let mut set = src.clone();
1981        while let Some(elt) = set.iter().copied().next() {
1982            let ok = set.remove(&elt);
1983            debug_assert!(ok);
1984        }
1985        set
1986    });
1987}
1988
1989#[bench]
1990pub fn clone_10k_and_remove_half(b: &mut Bencher) {
1991    let src = slim_set(10_000);
1992    b.iter(|| {
1993        let mut set = src.clone();
1994        for i in (0..10_000).step_by(2) {
1995            let ok = set.remove(&i);
1996            debug_assert!(ok);
1997        }
1998        assert_eq!(set.len(), 10_000 / 2);
1999        set
2000    })
2001}
2002
2003set_bench! {intersection_100_neg_vs_100_pos, intersection, count, [neg(100), pos(100)]}
2004set_bench! {intersection_100_neg_vs_10k_pos, intersection, count, [neg(100), pos(10_000)]}
2005set_bench! {intersection_100_pos_vs_100_neg, intersection, count, [pos(100), neg(100)]}
2006set_bench! {intersection_100_pos_vs_10k_neg, intersection, count, [pos(100), neg(10_000)]}
2007set_bench! {intersection_10k_neg_vs_100_pos, intersection, count, [neg(10_000), pos(100)]}
2008set_bench! {intersection_10k_neg_vs_10k_pos, intersection, count, [neg(10_000), pos(10_000)]}
2009set_bench! {intersection_10k_pos_vs_100_neg, intersection, count, [pos(10_000), neg(100)]}
2010set_bench! {intersection_10k_pos_vs_10k_neg, intersection, count, [pos(10_000), neg(10_000)]}
2011set_bench! {intersection_random_100_vs_100, intersection, count, [random(100), random(100)]}
2012set_bench! {intersection_random_100_vs_10k, intersection, count, [random(100), random(10_000)]}
2013set_bench! {intersection_random_10k_vs_100, intersection, count, [random(10_000), random(100)]}
2014set_bench! {intersection_random_10k_vs_10k, intersection, count, [random(10_000), random(10_000)]}
2015set_bench! {intersection_staggered_100_vs_100, intersection, count, stagger(100, 1)}
2016set_bench! {intersection_staggered_10k_vs_10k, intersection, count, stagger(10_000, 1)}
2017set_bench! {intersection_staggered_100_vs_10k, intersection, count, stagger(100, 100)}
2018set_bench! {difference_random_100_vs_100, difference, count, [random(100), random(100)]}
2019set_bench! {difference_random_100_vs_10k, difference, count, [random(100), random(10_000)]}
2020set_bench! {difference_random_10k_vs_100, difference, count, [random(10_000), random(100)]}
2021set_bench! {difference_random_10k_vs_10k, difference, count, [random(10_000), random(10_000)]}
2022set_bench! {difference_staggered_100_vs_100, difference, count, stagger(100, 1)}
2023set_bench! {difference_staggered_10k_vs_10k, difference, count, stagger(10_000, 1)}
2024set_bench! {difference_staggered_100_vs_10k, difference, count, stagger(100, 100)}
2025set_bench! {is_subset_100_vs_100, is_subset, clone, [pos(100), pos(100)]}
2026set_bench! {is_subset_100_vs_10k, is_subset, clone, [pos(100), pos(10_000)]}
2027set_bench! {is_subset_10k_vs_100, is_subset, clone, [pos(10_000), pos(100)]}
2028set_bench! {is_subset_10k_vs_10k, is_subset, clone, [pos(10_000), pos(10_000)]}
2029mod map;
2030mod set;
2031use std::{mem, ptr};
2032
2033use rand::distributions::{Alphanumeric, Standard};
2034use rand::{thread_rng, Rng, SeedableRng};
2035use rand_xorshift::XorShiftRng;
2036use test::{black_box, Bencher};
2037
2038#[bench]
2039fn iterator(b: &mut Bencher) {
2040    // peculiar numbers to stop LLVM from optimising the summation
2041    // out.
2042    let v: Vec<_> = (0..100).map(|i| i ^ (i << 1) ^ (i >> 1)).collect();
2043
2044    b.iter(|| {
2045        let mut sum = 0;
2046        for x in &v {
2047            sum += *x;
2048        }
2049        // sum == 11806, to stop dead code elimination.
2050        if sum == 0 {
2051            panic!()
2052        }
2053    })
2054}
2055
2056#[bench]
2057fn mut_iterator(b: &mut Bencher) {
2058    let mut v = vec![0; 100];
2059
2060    b.iter(|| {
2061        let mut i = 0;
2062        for x in &mut v {
2063            *x = i;
2064            i += 1;
2065        }
2066    })
2067}
2068
2069#[bench]
2070fn concat(b: &mut Bencher) {
2071    let xss: Vec<Vec<i32>> = (0..100).map(|i| (0..i).collect()).collect();
2072    b.iter(|| {
2073        xss.concat();
2074    });
2075}
2076
2077#[bench]
2078fn join(b: &mut Bencher) {
2079    let xss: Vec<Vec<i32>> = (0..100).map(|i| (0..i).collect()).collect();
2080    b.iter(|| xss.join(&0));
2081}
2082
2083#[bench]
2084fn push(b: &mut Bencher) {
2085    let mut vec = Vec::<i32>::new();
2086    b.iter(|| {
2087        vec.push(0);
2088        black_box(&vec);
2089    });
2090}
2091
2092#[bench]
2093fn starts_with_same_vector(b: &mut Bencher) {
2094    let vec: Vec<_> = (0..100).collect();
2095    b.iter(|| vec.starts_with(&vec))
2096}
2097
2098#[bench]
2099fn starts_with_single_element(b: &mut Bencher) {
2100    let vec: Vec<_> = vec![0];
2101    b.iter(|| vec.starts_with(&vec))
2102}
2103
2104#[bench]
2105fn starts_with_diff_one_element_at_end(b: &mut Bencher) {
2106    let vec: Vec<_> = (0..100).collect();
2107    let mut match_vec: Vec<_> = (0..99).collect();
2108    match_vec.push(0);
2109    b.iter(|| vec.starts_with(&match_vec))
2110}
2111
2112#[bench]
2113fn ends_with_same_vector(b: &mut Bencher) {
2114    let vec: Vec<_> = (0..100).collect();
2115    b.iter(|| vec.ends_with(&vec))
2116}
2117
2118#[bench]
2119fn ends_with_single_element(b: &mut Bencher) {
2120    let vec: Vec<_> = vec![0];
2121    b.iter(|| vec.ends_with(&vec))
2122}
2123
2124#[bench]
2125fn ends_with_diff_one_element_at_beginning(b: &mut Bencher) {
2126    let vec: Vec<_> = (0..100).collect();
2127    let mut match_vec: Vec<_> = (0..100).collect();
2128    match_vec[0] = 200;
2129    b.iter(|| vec.starts_with(&match_vec))
2130}
2131
2132#[bench]
2133fn contains_last_element(b: &mut Bencher) {
2134    let vec: Vec<_> = (0..100).collect();
2135    b.iter(|| vec.contains(&99))
2136}
2137
2138#[bench]
2139fn zero_1kb_from_elem(b: &mut Bencher) {
2140    b.iter(|| vec![0u8; 1024]);
2141}
2142
2143#[bench]
2144fn zero_1kb_set_memory(b: &mut Bencher) {
2145    b.iter(|| {
2146        let mut v = Vec::<u8>::with_capacity(1024);
2147        unsafe {
2148            let vp = v.as_mut_ptr();
2149            ptr::write_bytes(vp, 0, 1024);
2150            v.set_len(1024);
2151        }
2152        v
2153    });
2154}
2155
2156#[bench]
2157fn zero_1kb_loop_set(b: &mut Bencher) {
2158    b.iter(|| {
2159        let mut v = Vec::<u8>::with_capacity(1024);
2160        unsafe {
2161            v.set_len(1024);
2162        }
2163        for i in 0..1024 {
2164            v[i] = 0;
2165        }
2166    });
2167}
2168
2169#[bench]
2170fn zero_1kb_mut_iter(b: &mut Bencher) {
2171    b.iter(|| {
2172        let mut v = Vec::<u8>::with_capacity(1024);
2173        unsafe {
2174            v.set_len(1024);
2175        }
2176        for x in &mut v {
2177            *x = 0;
2178        }
2179        v
2180    });
2181}
2182
2183#[bench]
2184fn random_inserts(b: &mut Bencher) {
2185    let mut rng = thread_rng();
2186    b.iter(|| {
2187        let mut v = vec![(0, 0); 30];
2188        for _ in 0..100 {
2189            let l = v.len();
2190            v.insert(rng.gen::<usize>() % (l + 1), (1, 1));
2191        }
2192    })
2193}
2194
2195#[bench]
2196fn random_removes(b: &mut Bencher) {
2197    let mut rng = thread_rng();
2198    b.iter(|| {
2199        let mut v = vec![(0, 0); 130];
2200        for _ in 0..100 {
2201            let l = v.len();
2202            v.remove(rng.gen::<usize>() % l);
2203        }
2204    })
2205}
2206
2207fn gen_ascending(len: usize) -> Vec<u64> {
2208    (0..len as u64).collect()
2209}
2210
2211fn gen_descending(len: usize) -> Vec<u64> {
2212    (0..len as u64).rev().collect()
2213}
2214
2215const SEED: [u8; 16] = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15];
2216
2217fn gen_random(len: usize) -> Vec<u64> {
2218    let mut rng = XorShiftRng::from_seed(SEED);
2219    (&mut rng).sample_iter(&Standard).take(len).collect()
2220}
2221
2222fn gen_random_bytes(len: usize) -> Vec<u8> {
2223    let mut rng = XorShiftRng::from_seed(SEED);
2224    (&mut rng).sample_iter(&Standard).take(len).collect()
2225}
2226
2227fn gen_mostly_ascending(len: usize) -> Vec<u64> {
2228    let mut rng = XorShiftRng::from_seed(SEED);
2229    let mut v = gen_ascending(len);
2230    for _ in (0usize..).take_while(|x| x * x <= len) {
2231        let x = rng.gen::<usize>() % len;
2232        let y = rng.gen::<usize>() % len;
2233        v.swap(x, y);
2234    }
2235    v
2236}
2237
2238fn gen_mostly_descending(len: usize) -> Vec<u64> {
2239    let mut rng = XorShiftRng::from_seed(SEED);
2240    let mut v = gen_descending(len);
2241    for _ in (0usize..).take_while(|x| x * x <= len) {
2242        let x = rng.gen::<usize>() % len;
2243        let y = rng.gen::<usize>() % len;
2244        v.swap(x, y);
2245    }
2246    v
2247}
2248
2249fn gen_strings(len: usize) -> Vec<String> {
2250    let mut rng = XorShiftRng::from_seed(SEED);
2251    let mut v = vec![];
2252    for _ in 0..len {
2253        let n = rng.gen::<usize>() % 20 + 1;
2254        v.push((&mut rng).sample_iter(&Alphanumeric).take(n).collect());
2255    }
2256    v
2257}
2258
2259fn gen_big_random(len: usize) -> Vec<[u64; 16]> {
2260    let mut rng = XorShiftRng::from_seed(SEED);
2261    (&mut rng).sample_iter(&Standard).map(|x| [x; 16]).take(len).collect()
2262}
2263
2264macro_rules! sort {
2265    ($f:ident, $name:ident, $gen:expr, $len:expr) => {
2266        #[bench]
2267        fn $name(b: &mut Bencher) {
2268            let v = $gen($len);
2269            b.iter(|| v.clone().$f());
2270            b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
2271        }
2272    };
2273}
2274
2275macro_rules! sort_strings {
2276    ($f:ident, $name:ident, $gen:expr, $len:expr) => {
2277        #[bench]
2278        fn $name(b: &mut Bencher) {
2279            let v = $gen($len);
2280            let v = v.iter().map(|s| &**s).collect::<Vec<&str>>();
2281            b.iter(|| v.clone().$f());
2282            b.bytes = $len * mem::size_of::<&str>() as u64;
2283        }
2284    };
2285}
2286
2287macro_rules! sort_expensive {
2288    ($f:ident, $name:ident, $gen:expr, $len:expr) => {
2289        #[bench]
2290        fn $name(b: &mut Bencher) {
2291            let v = $gen($len);
2292            b.iter(|| {
2293                let mut v = v.clone();
2294                let mut count = 0;
2295                v.$f(|a: &u64, b: &u64| {
2296                    count += 1;
2297                    if count % 1_000_000_000 == 0 {
2298                        panic!("should not happen");
2299                    }
2300                    (*a as f64).cos().partial_cmp(&(*b as f64).cos()).unwrap()
2301                });
2302                black_box(count);
2303            });
2304            b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
2305        }
2306    };
2307}
2308
2309macro_rules! sort_lexicographic {
2310    ($f:ident, $name:ident, $gen:expr, $len:expr) => {
2311        #[bench]
2312        fn $name(b: &mut Bencher) {
2313            let v = $gen($len);
2314            b.iter(|| v.clone().$f(|x| x.to_string()));
2315            b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
2316        }
2317    };
2318}
2319
2320sort!(sort, sort_small_ascending, gen_ascending, 10);
2321sort!(sort, sort_small_descending, gen_descending, 10);
2322sort!(sort, sort_small_random, gen_random, 10);
2323sort!(sort, sort_small_big, gen_big_random, 10);
2324sort!(sort, sort_medium_random, gen_random, 100);
2325sort!(sort, sort_large_ascending, gen_ascending, 10000);
2326sort!(sort, sort_large_descending, gen_descending, 10000);
2327sort!(sort, sort_large_mostly_ascending, gen_mostly_ascending, 10000);
2328sort!(sort, sort_large_mostly_descending, gen_mostly_descending, 10000);
2329sort!(sort, sort_large_random, gen_random, 10000);
2330sort!(sort, sort_large_big, gen_big_random, 10000);
2331sort_strings!(sort, sort_large_strings, gen_strings, 10000);
2332sort_expensive!(sort_by, sort_large_expensive, gen_random, 10000);
2333
2334sort!(sort_unstable, sort_unstable_small_ascending, gen_ascending, 10);
2335sort!(sort_unstable, sort_unstable_small_descending, gen_descending, 10);
2336sort!(sort_unstable, sort_unstable_small_random, gen_random, 10);
2337sort!(sort_unstable, sort_unstable_small_big, gen_big_random, 10);
2338sort!(sort_unstable, sort_unstable_medium_random, gen_random, 100);
2339sort!(sort_unstable, sort_unstable_large_ascending, gen_ascending, 10000);
2340sort!(sort_unstable, sort_unstable_large_descending, gen_descending, 10000);
2341sort!(sort_unstable, sort_unstable_large_mostly_ascending, gen_mostly_ascending, 10000);
2342sort!(sort_unstable, sort_unstable_large_mostly_descending, gen_mostly_descending, 10000);
2343sort!(sort_unstable, sort_unstable_large_random, gen_random, 10000);
2344sort!(sort_unstable, sort_unstable_large_big, gen_big_random, 10000);
2345sort_strings!(sort_unstable, sort_unstable_large_strings, gen_strings, 10000);
2346sort_expensive!(sort_unstable_by, sort_unstable_large_expensive, gen_random, 10000);
2347
2348sort_lexicographic!(sort_by_key, sort_by_key_lexicographic, gen_random, 10000);
2349sort_lexicographic!(sort_unstable_by_key, sort_unstable_by_key_lexicographic, gen_random, 10000);
2350sort_lexicographic!(sort_by_cached_key, sort_by_cached_key_lexicographic, gen_random, 10000);
2351
2352macro_rules! reverse {
2353    ($name:ident, $ty:ty, $f:expr) => {
2354        #[bench]
2355        fn $name(b: &mut Bencher) {
2356            // odd length and offset by 1 to be as unaligned as possible
2357            let n = 0xFFFFF;
2358            let mut v: Vec<_> = (0..1 + (n / mem::size_of::<$ty>() as u64)).map($f).collect();
2359            b.iter(|| black_box(&mut v[1..]).reverse());
2360            b.bytes = n;
2361        }
2362    };
2363}
2364
2365reverse!(reverse_u8, u8, |x| x as u8);
2366reverse!(reverse_u16, u16, |x| x as u16);
2367reverse!(reverse_u8x3, [u8; 3], |x| [x as u8, (x >> 8) as u8, (x >> 16) as u8]);
2368reverse!(reverse_u32, u32, |x| x as u32);
2369reverse!(reverse_u64, u64, |x| x as u64);
2370reverse!(reverse_u128, u128, |x| x as u128);
2371#[repr(simd)]
2372struct F64x4(f64, f64, f64, f64);
2373reverse!(reverse_simd_f64x4, F64x4, |x| {
2374    let x = x as f64;
2375    F64x4(x, x, x, x)
2376});
2377
2378macro_rules! rotate {
2379    ($name:ident, $gen:expr, $len:expr, $mid:expr) => {
2380        #[bench]
2381        fn $name(b: &mut Bencher) {
2382            let size = mem::size_of_val(&$gen(1)[0]);
2383            let mut v = $gen($len * 8 / size);
2384            b.iter(|| black_box(&mut v).rotate_left(($mid * 8 + size - 1) / size));
2385            b.bytes = (v.len() * size) as u64;
2386        }
2387    };
2388}
2389
2390rotate!(rotate_tiny_by1, gen_random, 16, 1);
2391rotate!(rotate_tiny_half, gen_random, 16, 16 / 2);
2392rotate!(rotate_tiny_half_plus_one, gen_random, 16, 16 / 2 + 1);
2393
2394rotate!(rotate_medium_by1, gen_random, 9158, 1);
2395rotate!(rotate_medium_by727_u64, gen_random, 9158, 727);
2396rotate!(rotate_medium_by727_bytes, gen_random_bytes, 9158, 727);
2397rotate!(rotate_medium_by727_strings, gen_strings, 9158, 727);
2398rotate!(rotate_medium_half, gen_random, 9158, 9158 / 2);
2399rotate!(rotate_medium_half_plus_one, gen_random, 9158, 9158 / 2 + 1);
2400
2401// Intended to use more RAM than the machine has cache
2402rotate!(rotate_huge_by1, gen_random, 5 * 1024 * 1024, 1);
2403rotate!(rotate_huge_by9199_u64, gen_random, 5 * 1024 * 1024, 9199);
2404rotate!(rotate_huge_by9199_bytes, gen_random_bytes, 5 * 1024 * 1024, 9199);
2405rotate!(rotate_huge_by9199_strings, gen_strings, 5 * 1024 * 1024, 9199);
2406rotate!(rotate_huge_by9199_big, gen_big_random, 5 * 1024 * 1024, 9199);
2407rotate!(rotate_huge_by1234577_u64, gen_random, 5 * 1024 * 1024, 1234577);
2408rotate!(rotate_huge_by1234577_bytes, gen_random_bytes, 5 * 1024 * 1024, 1234577);
2409rotate!(rotate_huge_by1234577_strings, gen_strings, 5 * 1024 * 1024, 1234577);
2410rotate!(rotate_huge_by1234577_big, gen_big_random, 5 * 1024 * 1024, 1234577);
2411rotate!(rotate_huge_half, gen_random, 5 * 1024 * 1024, 5 * 1024 * 1024 / 2);
2412rotate!(rotate_huge_half_plus_one, gen_random, 5 * 1024 * 1024, 5 * 1024 * 1024 / 2 + 1);
2413use std::collections::VecDeque;
2414use test::{black_box, Bencher};
2415
2416#[bench]
2417fn bench_new(b: &mut Bencher) {
2418    b.iter(|| {
2419        let ring: VecDeque<i32> = VecDeque::new();
2420        black_box(ring);
2421    })
2422}
2423
2424#[bench]
2425fn bench_grow_1025(b: &mut Bencher) {
2426    b.iter(|| {
2427        let mut deq = VecDeque::new();
2428        for i in 0..1025 {
2429            deq.push_front(i);
2430        }
2431        black_box(deq);
2432    })
2433}
2434
2435#[bench]
2436fn bench_iter_1000(b: &mut Bencher) {
2437    let ring: VecDeque<_> = (0..1000).collect();
2438
2439    b.iter(|| {
2440        let mut sum = 0;
2441        for &i in &ring {
2442            sum += i;
2443        }
2444        black_box(sum);
2445    })
2446}
2447
2448#[bench]
2449fn bench_mut_iter_1000(b: &mut Bencher) {
2450    let mut ring: VecDeque<_> = (0..1000).collect();
2451
2452    b.iter(|| {
2453        let mut sum = 0;
2454        for i in &mut ring {
2455            sum += *i;
2456        }
2457        black_box(sum);
2458    })
2459}
2460
2461#[bench]
2462fn bench_try_fold(b: &mut Bencher) {
2463    let ring: VecDeque<_> = (0..1000).collect();
2464
2465    b.iter(|| black_box(ring.iter().try_fold(0, |a, b| Some(a + b))))
2466}
2467use std::collections::BinaryHeap;
2468
2469use rand::{seq::SliceRandom, thread_rng};
2470use test::{black_box, Bencher};
2471
2472#[bench]
2473fn bench_find_smallest_1000(b: &mut Bencher) {
2474    let mut rng = thread_rng();
2475    let mut vec: Vec<u32> = (0..100_000).collect();
2476    vec.shuffle(&mut rng);
2477
2478    b.iter(|| {
2479        let mut iter = vec.iter().copied();
2480        let mut heap: BinaryHeap<_> = iter.by_ref().take(1000).collect();
2481
2482        for x in iter {
2483            let mut max = heap.peek_mut().unwrap();
2484            // This comparison should be true only 1% of the time.
2485            // Unnecessary `sift_down`s will degrade performance
2486            if x < *max {
2487                *max = x;
2488            }
2489        }
2490
2491        heap
2492    })
2493}
2494
2495#[bench]
2496fn bench_peek_mut_deref_mut(b: &mut Bencher) {
2497    let mut bheap = BinaryHeap::from(vec![42]);
2498    let vec: Vec<u32> = (0..1_000_000).collect();
2499
2500    b.iter(|| {
2501        let vec = black_box(&vec);
2502        let mut peek_mut = bheap.peek_mut().unwrap();
2503        // The compiler shouldn't be able to optimize away the `sift_down`
2504        // assignment in `PeekMut`'s `DerefMut` implementation since
2505        // the loop may not run.
2506        for &i in vec.iter() {
2507            *peek_mut = i;
2508        }
2509        // Remove the already minimal overhead of the sift_down
2510        std::mem::forget(peek_mut);
2511    })
2512}
2513
2514#[bench]
2515fn bench_from_vec(b: &mut Bencher) {
2516    let mut rng = thread_rng();
2517    let mut vec: Vec<u32> = (0..100_000).collect();
2518    vec.shuffle(&mut rng);
2519
2520    b.iter(|| BinaryHeap::from(vec.clone()))
2521}
2522
2523#[bench]
2524fn bench_into_sorted_vec(b: &mut Bencher) {
2525    let bheap: BinaryHeap<i32> = (0..10_000).collect();
2526
2527    b.iter(|| bheap.clone().into_sorted_vec())
2528}
2529
2530#[bench]
2531fn bench_push(b: &mut Bencher) {
2532    let mut bheap = BinaryHeap::with_capacity(50_000);
2533    let mut rng = thread_rng();
2534    let mut vec: Vec<u32> = (0..50_000).collect();
2535    vec.shuffle(&mut rng);
2536
2537    b.iter(|| {
2538        for &i in vec.iter() {
2539            bheap.push(i);
2540        }
2541        black_box(&mut bheap);
2542        bheap.clear();
2543    })
2544}
2545
2546#[bench]
2547fn bench_pop(b: &mut Bencher) {
2548    let mut bheap = BinaryHeap::with_capacity(10_000);
2549
2550    b.iter(|| {
2551        bheap.extend((0..10_000).rev());
2552        black_box(&mut bheap);
2553        while let Some(elem) = bheap.pop() {
2554            black_box(elem);
2555        }
2556    })
2557}
2558use std::{collections::VecDeque, time::Instant};
2559
2560const VECDEQUE_LEN: i32 = 100000;
2561const WARMUP_N: usize = 100;
2562const BENCH_N: usize = 1000;
2563
2564fn main() {
2565    let a: VecDeque<i32> = (0..VECDEQUE_LEN).collect();
2566    let b: VecDeque<i32> = (0..VECDEQUE_LEN).collect();
2567
2568    for _ in 0..WARMUP_N {
2569        let mut c = a.clone();
2570        let mut d = b.clone();
2571        c.append(&mut d);
2572    }
2573
2574    let mut durations = Vec::with_capacity(BENCH_N);
2575
2576    for _ in 0..BENCH_N {
2577        let mut c = a.clone();
2578        let mut d = b.clone();
2579        let before = Instant::now();
2580        c.append(&mut d);
2581        let after = Instant::now();
2582        durations.push(after.duration_since(before));
2583    }
2584
2585    let l = durations.len();
2586    durations.sort();
2587
2588    assert!(BENCH_N % 2 == 0);
2589    let median = (durations[(l / 2) - 1] + durations[l / 2]) / 2;
2590    println!("\ncustom-bench vec_deque_append {:?} ns/iter\n", median.as_nanos());
2591}
2592use std::collections::LinkedList;
2593use std::panic::{catch_unwind, AssertUnwindSafe};
2594
2595#[test]
2596fn test_basic() {
2597    let mut m = LinkedList::<Box<_>>::new();
2598    assert_eq!(m.pop_front(), None);
2599    assert_eq!(m.pop_back(), None);
2600    assert_eq!(m.pop_front(), None);
2601    m.push_front(box 1);
2602    assert_eq!(m.pop_front(), Some(box 1));
2603    m.push_back(box 2);
2604    m.push_back(box 3);
2605    assert_eq!(m.len(), 2);
2606    assert_eq!(m.pop_front(), Some(box 2));
2607    assert_eq!(m.pop_front(), Some(box 3));
2608    assert_eq!(m.len(), 0);
2609    assert_eq!(m.pop_front(), None);
2610    m.push_back(box 1);
2611    m.push_back(box 3);
2612    m.push_back(box 5);
2613    m.push_back(box 7);
2614    assert_eq!(m.pop_front(), Some(box 1));
2615
2616    let mut n = LinkedList::new();
2617    n.push_front(2);
2618    n.push_front(3);
2619    {
2620        assert_eq!(n.front().unwrap(), &3);
2621        let x = n.front_mut().unwrap();
2622        assert_eq!(*x, 3);
2623        *x = 0;
2624    }
2625    {
2626        assert_eq!(n.back().unwrap(), &2);
2627        let y = n.back_mut().unwrap();
2628        assert_eq!(*y, 2);
2629        *y = 1;
2630    }
2631    assert_eq!(n.pop_front(), Some(0));
2632    assert_eq!(n.pop_front(), Some(1));
2633}
2634
2635fn generate_test() -> LinkedList<i32> {
2636    list_from(&[0, 1, 2, 3, 4, 5, 6])
2637}
2638
2639fn list_from<T: Clone>(v: &[T]) -> LinkedList<T> {
2640    v.iter().cloned().collect()
2641}
2642
2643#[test]
2644fn test_split_off() {
2645    // singleton
2646    {
2647        let mut m = LinkedList::new();
2648        m.push_back(1);
2649
2650        let p = m.split_off(0);
2651        assert_eq!(m.len(), 0);
2652        assert_eq!(p.len(), 1);
2653        assert_eq!(p.back(), Some(&1));
2654        assert_eq!(p.front(), Some(&1));
2655    }
2656
2657    // not singleton, forwards
2658    {
2659        let u = vec![1, 2, 3, 4, 5];
2660        let mut m = list_from(&u);
2661        let mut n = m.split_off(2);
2662        assert_eq!(m.len(), 2);
2663        assert_eq!(n.len(), 3);
2664        for elt in 1..3 {
2665            assert_eq!(m.pop_front(), Some(elt));
2666        }
2667        for elt in 3..6 {
2668            assert_eq!(n.pop_front(), Some(elt));
2669        }
2670    }
2671    // not singleton, backwards
2672    {
2673        let u = vec![1, 2, 3, 4, 5];
2674        let mut m = list_from(&u);
2675        let mut n = m.split_off(4);
2676        assert_eq!(m.len(), 4);
2677        assert_eq!(n.len(), 1);
2678        for elt in 1..5 {
2679            assert_eq!(m.pop_front(), Some(elt));
2680        }
2681        for elt in 5..6 {
2682            assert_eq!(n.pop_front(), Some(elt));
2683        }
2684    }
2685
2686    // no-op on the last index
2687    {
2688        let mut m = LinkedList::new();
2689        m.push_back(1);
2690
2691        let p = m.split_off(1);
2692        assert_eq!(m.len(), 1);
2693        assert_eq!(p.len(), 0);
2694        assert_eq!(m.back(), Some(&1));
2695        assert_eq!(m.front(), Some(&1));
2696    }
2697}
2698
2699#[test]
2700fn test_iterator() {
2701    let m = generate_test();
2702    for (i, elt) in m.iter().enumerate() {
2703        assert_eq!(i as i32, *elt);
2704    }
2705    let mut n = LinkedList::new();
2706    assert_eq!(n.iter().next(), None);
2707    n.push_front(4);
2708    let mut it = n.iter();
2709    assert_eq!(it.size_hint(), (1, Some(1)));
2710    assert_eq!(it.next().unwrap(), &4);
2711    assert_eq!(it.size_hint(), (0, Some(0)));
2712    assert_eq!(it.next(), None);
2713}
2714
2715#[test]
2716fn test_iterator_clone() {
2717    let mut n = LinkedList::new();
2718    n.push_back(2);
2719    n.push_back(3);
2720    n.push_back(4);
2721    let mut it = n.iter();
2722    it.next();
2723    let mut jt = it.clone();
2724    assert_eq!(it.next(), jt.next());
2725    assert_eq!(it.next_back(), jt.next_back());
2726    assert_eq!(it.next(), jt.next());
2727}
2728
2729#[test]
2730fn test_iterator_double_end() {
2731    let mut n = LinkedList::new();
2732    assert_eq!(n.iter().next(), None);
2733    n.push_front(4);
2734    n.push_front(5);
2735    n.push_front(6);
2736    let mut it = n.iter();
2737    assert_eq!(it.size_hint(), (3, Some(3)));
2738    assert_eq!(it.next().unwrap(), &6);
2739    assert_eq!(it.size_hint(), (2, Some(2)));
2740    assert_eq!(it.next_back().unwrap(), &4);
2741    assert_eq!(it.size_hint(), (1, Some(1)));
2742    assert_eq!(it.next_back().unwrap(), &5);
2743    assert_eq!(it.next_back(), None);
2744    assert_eq!(it.next(), None);
2745}
2746
2747#[test]
2748fn test_rev_iter() {
2749    let m = generate_test();
2750    for (i, elt) in m.iter().rev().enumerate() {
2751        assert_eq!((6 - i) as i32, *elt);
2752    }
2753    let mut n = LinkedList::new();
2754    assert_eq!(n.iter().rev().next(), None);
2755    n.push_front(4);
2756    let mut it = n.iter().rev();
2757    assert_eq!(it.size_hint(), (1, Some(1)));
2758    assert_eq!(it.next().unwrap(), &4);
2759    assert_eq!(it.size_hint(), (0, Some(0)));
2760    assert_eq!(it.next(), None);
2761}
2762
2763#[test]
2764fn test_mut_iter() {
2765    let mut m = generate_test();
2766    let mut len = m.len();
2767    for (i, elt) in m.iter_mut().enumerate() {
2768        assert_eq!(i as i32, *elt);
2769        len -= 1;
2770    }
2771    assert_eq!(len, 0);
2772    let mut n = LinkedList::new();
2773    assert!(n.iter_mut().next().is_none());
2774    n.push_front(4);
2775    n.push_back(5);
2776    let mut it = n.iter_mut();
2777    assert_eq!(it.size_hint(), (2, Some(2)));
2778    assert!(it.next().is_some());
2779    assert!(it.next().is_some());
2780    assert_eq!(it.size_hint(), (0, Some(0)));
2781    assert!(it.next().is_none());
2782}
2783
2784#[test]
2785fn test_iterator_mut_double_end() {
2786    let mut n = LinkedList::new();
2787    assert!(n.iter_mut().next_back().is_none());
2788    n.push_front(4);
2789    n.push_front(5);
2790    n.push_front(6);
2791    let mut it = n.iter_mut();
2792    assert_eq!(it.size_hint(), (3, Some(3)));
2793    assert_eq!(*it.next().unwrap(), 6);
2794    assert_eq!(it.size_hint(), (2, Some(2)));
2795    assert_eq!(*it.next_back().unwrap(), 4);
2796    assert_eq!(it.size_hint(), (1, Some(1)));
2797    assert_eq!(*it.next_back().unwrap(), 5);
2798    assert!(it.next_back().is_none());
2799    assert!(it.next().is_none());
2800}
2801
2802#[test]
2803fn test_mut_rev_iter() {
2804    let mut m = generate_test();
2805    for (i, elt) in m.iter_mut().rev().enumerate() {
2806        assert_eq!((6 - i) as i32, *elt);
2807    }
2808    let mut n = LinkedList::new();
2809    assert!(n.iter_mut().rev().next().is_none());
2810    n.push_front(4);
2811    let mut it = n.iter_mut().rev();
2812    assert!(it.next().is_some());
2813    assert!(it.next().is_none());
2814}
2815
2816#[test]
2817fn test_eq() {
2818    let mut n = list_from(&[]);
2819    let mut m = list_from(&[]);
2820    assert!(n == m);
2821    n.push_front(1);
2822    assert!(n != m);
2823    m.push_back(1);
2824    assert!(n == m);
2825
2826    let n = list_from(&[2, 3, 4]);
2827    let m = list_from(&[1, 2, 3]);
2828    assert!(n != m);
2829}
2830
2831#[test]
2832fn test_hash() {
2833    use crate::hash;
2834
2835    let mut x = LinkedList::new();
2836    let mut y = LinkedList::new();
2837
2838    assert!(hash(&x) == hash(&y));
2839
2840    x.push_back(1);
2841    x.push_back(2);
2842    x.push_back(3);
2843
2844    y.push_front(3);
2845    y.push_front(2);
2846    y.push_front(1);
2847
2848    assert!(hash(&x) == hash(&y));
2849}
2850
2851#[test]
2852fn test_ord() {
2853    let n = list_from(&[]);
2854    let m = list_from(&[1, 2, 3]);
2855    assert!(n < m);
2856    assert!(m > n);
2857    assert!(n <= n);
2858    assert!(n >= n);
2859}
2860
2861#[test]
2862fn test_ord_nan() {
2863    let nan = 0.0f64 / 0.0;
2864    let n = list_from(&[nan]);
2865    let m = list_from(&[nan]);
2866    assert!(!(n < m));
2867    assert!(!(n > m));
2868    assert!(!(n <= m));
2869    assert!(!(n >= m));
2870
2871    let n = list_from(&[nan]);
2872    let one = list_from(&[1.0f64]);
2873    assert!(!(n < one));
2874    assert!(!(n > one));
2875    assert!(!(n <= one));
2876    assert!(!(n >= one));
2877
2878    let u = list_from(&[1.0f64, 2.0, nan]);
2879    let v = list_from(&[1.0f64, 2.0, 3.0]);
2880    assert!(!(u < v));
2881    assert!(!(u > v));
2882    assert!(!(u <= v));
2883    assert!(!(u >= v));
2884
2885    let s = list_from(&[1.0f64, 2.0, 4.0, 2.0]);
2886    let t = list_from(&[1.0f64, 2.0, 3.0, 2.0]);
2887    assert!(!(s < t));
2888    assert!(s > one);
2889    assert!(!(s <= one));
2890    assert!(s >= one);
2891}
2892
2893#[test]
2894fn test_show() {
2895    let list: LinkedList<_> = (0..10).collect();
2896    assert_eq!(format!("{:?}", list), "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
2897
2898    let list: LinkedList<_> = vec!["just", "one", "test", "more"].iter().cloned().collect();
2899    assert_eq!(format!("{:?}", list), "[\"just\", \"one\", \"test\", \"more\"]");
2900}
2901
2902#[test]
2903fn test_extend_ref() {
2904    let mut a = LinkedList::new();
2905    a.push_back(1);
2906
2907    a.extend(&[2, 3, 4]);
2908
2909    assert_eq!(a.len(), 4);
2910    assert_eq!(a, list_from(&[1, 2, 3, 4]));
2911
2912    let mut b = LinkedList::new();
2913    b.push_back(5);
2914    b.push_back(6);
2915    a.extend(&b);
2916
2917    assert_eq!(a.len(), 6);
2918    assert_eq!(a, list_from(&[1, 2, 3, 4, 5, 6]));
2919}
2920
2921#[test]
2922fn test_extend() {
2923    let mut a = LinkedList::new();
2924    a.push_back(1);
2925    a.extend(vec![2, 3, 4]); // uses iterator
2926
2927    assert_eq!(a.len(), 4);
2928    assert!(a.iter().eq(&[1, 2, 3, 4]));
2929
2930    let b: LinkedList<_> = vec![5, 6, 7].into_iter().collect();
2931    a.extend(b); // specializes to `append`
2932
2933    assert_eq!(a.len(), 7);
2934    assert!(a.iter().eq(&[1, 2, 3, 4, 5, 6, 7]));
2935}
2936
2937#[test]
2938fn test_contains() {
2939    let mut l = LinkedList::new();
2940    l.extend(&[2, 3, 4]);
2941
2942    assert!(l.contains(&3));
2943    assert!(!l.contains(&1));
2944
2945    l.clear();
2946
2947    assert!(!l.contains(&3));
2948}
2949
2950#[test]
2951fn drain_filter_empty() {
2952    let mut list: LinkedList<i32> = LinkedList::new();
2953
2954    {
2955        let mut iter = list.drain_filter(|_| true);
2956        assert_eq!(iter.size_hint(), (0, Some(0)));
2957        assert_eq!(iter.next(), None);
2958        assert_eq!(iter.size_hint(), (0, Some(0)));
2959        assert_eq!(iter.next(), None);
2960        assert_eq!(iter.size_hint(), (0, Some(0)));
2961    }
2962
2963    assert_eq!(list.len(), 0);
2964    assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![]);
2965}
2966
2967#[test]
2968fn drain_filter_zst() {
2969    let mut list: LinkedList<_> = vec![(), (), (), (), ()].into_iter().collect();
2970    let initial_len = list.len();
2971    let mut count = 0;
2972
2973    {
2974        let mut iter = list.drain_filter(|_| true);
2975        assert_eq!(iter.size_hint(), (0, Some(initial_len)));
2976        while let Some(_) = iter.next() {
2977            count += 1;
2978            assert_eq!(iter.size_hint(), (0, Some(initial_len - count)));
2979        }
2980        assert_eq!(iter.size_hint(), (0, Some(0)));
2981        assert_eq!(iter.next(), None);
2982        assert_eq!(iter.size_hint(), (0, Some(0)));
2983    }
2984
2985    assert_eq!(count, initial_len);
2986    assert_eq!(list.len(), 0);
2987    assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![]);
2988}
2989
2990#[test]
2991fn drain_filter_false() {
2992    let mut list: LinkedList<_> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
2993
2994    let initial_len = list.len();
2995    let mut count = 0;
2996
2997    {
2998        let mut iter = list.drain_filter(|_| false);
2999        assert_eq!(iter.size_hint(), (0, Some(initial_len)));
3000        for _ in iter.by_ref() {
3001            count += 1;
3002        }
3003        assert_eq!(iter.size_hint(), (0, Some(0)));
3004        assert_eq!(iter.next(), None);
3005        assert_eq!(iter.size_hint(), (0, Some(0)));
3006    }
3007
3008    assert_eq!(count, 0);
3009    assert_eq!(list.len(), initial_len);
3010    assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
3011}
3012
3013#[test]
3014fn drain_filter_true() {
3015    let mut list: LinkedList<_> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
3016
3017    let initial_len = list.len();
3018    let mut count = 0;
3019
3020    {
3021        let mut iter = list.drain_filter(|_| true);
3022        assert_eq!(iter.size_hint(), (0, Some(initial_len)));
3023        while let Some(_) = iter.next() {
3024            count += 1;
3025            assert_eq!(iter.size_hint(), (0, Some(initial_len - count)));
3026        }
3027        assert_eq!(iter.size_hint(), (0, Some(0)));
3028        assert_eq!(iter.next(), None);
3029        assert_eq!(iter.size_hint(), (0, Some(0)));
3030    }
3031
3032    assert_eq!(count, initial_len);
3033    assert_eq!(list.len(), 0);
3034    assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![]);
3035}
3036
3037#[test]
3038fn drain_filter_complex() {
3039    {
3040        //                [+xxx++++++xxxxx++++x+x++]
3041        let mut list = vec![
3042            1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37,
3043            39,
3044        ]
3045        .into_iter()
3046        .collect::<LinkedList<_>>();
3047
3048        let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
3049        assert_eq!(removed.len(), 10);
3050        assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
3051
3052        assert_eq!(list.len(), 14);
3053        assert_eq!(
3054            list.into_iter().collect::<Vec<_>>(),
3055            vec![1, 7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]
3056        );
3057    }
3058
3059    {
3060        // [xxx++++++xxxxx++++x+x++]
3061        let mut list = vec![
3062            2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39,
3063        ]
3064        .into_iter()
3065        .collect::<LinkedList<_>>();
3066
3067        let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
3068        assert_eq!(removed.len(), 10);
3069        assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
3070
3071        assert_eq!(list.len(), 13);
3072        assert_eq!(
3073            list.into_iter().collect::<Vec<_>>(),
3074            vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]
3075        );
3076    }
3077
3078    {
3079        // [xxx++++++xxxxx++++x+x]
3080        let mut list =
3081            vec![2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36]
3082                .into_iter()
3083                .collect::<LinkedList<_>>();
3084
3085        let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
3086        assert_eq!(removed.len(), 10);
3087        assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
3088
3089        assert_eq!(list.len(), 11);
3090        assert_eq!(
3091            list.into_iter().collect::<Vec<_>>(),
3092            vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35]
3093        );
3094    }
3095
3096    {
3097        // [xxxxxxxxxx+++++++++++]
3098        let mut list = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19]
3099            .into_iter()
3100            .collect::<LinkedList<_>>();
3101
3102        let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
3103        assert_eq!(removed.len(), 10);
3104        assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
3105
3106        assert_eq!(list.len(), 10);
3107        assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]);
3108    }
3109
3110    {
3111        // [+++++++++++xxxxxxxxxx]
3112        let mut list = vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20]
3113            .into_iter()
3114            .collect::<LinkedList<_>>();
3115
3116        let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
3117        assert_eq!(removed.len(), 10);
3118        assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
3119
3120        assert_eq!(list.len(), 10);
3121        assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]);
3122    }
3123}
3124
3125#[test]
3126fn drain_filter_drop_panic_leak() {
3127    static mut DROPS: i32 = 0;
3128
3129    struct D(bool);
3130
3131    impl Drop for D {
3132        fn drop(&mut self) {
3133            unsafe {
3134                DROPS += 1;
3135            }
3136
3137            if self.0 {
3138                panic!("panic in `drop`");
3139            }
3140        }
3141    }
3142
3143    let mut q = LinkedList::new();
3144    q.push_back(D(false));
3145    q.push_back(D(false));
3146    q.push_back(D(false));
3147    q.push_back(D(false));
3148    q.push_back(D(false));
3149    q.push_front(D(false));
3150    q.push_front(D(true));
3151    q.push_front(D(false));
3152
3153    catch_unwind(AssertUnwindSafe(|| drop(q.drain_filter(|_| true)))).ok();
3154
3155    assert_eq!(unsafe { DROPS }, 8);
3156    assert!(q.is_empty());
3157}
3158
3159#[test]
3160fn drain_filter_pred_panic_leak() {
3161    static mut DROPS: i32 = 0;
3162
3163    #[derive(Debug)]
3164    struct D(u32);
3165
3166    impl Drop for D {
3167        fn drop(&mut self) {
3168            unsafe {
3169                DROPS += 1;
3170            }
3171        }
3172    }
3173
3174    let mut q = LinkedList::new();
3175    q.push_back(D(3));
3176    q.push_back(D(4));
3177    q.push_back(D(5));
3178    q.push_back(D(6));
3179    q.push_back(D(7));
3180    q.push_front(D(2));
3181    q.push_front(D(1));
3182    q.push_front(D(0));
3183
3184    catch_unwind(AssertUnwindSafe(|| {
3185        drop(q.drain_filter(|item| if item.0 >= 2 { panic!() } else { true }))
3186    }))
3187    .ok();
3188
3189    assert_eq!(unsafe { DROPS }, 2); // 0 and 1
3190    assert_eq!(q.len(), 6);
3191}
3192
3193#[test]
3194fn test_drop() {
3195    static mut DROPS: i32 = 0;
3196    struct Elem;
3197    impl Drop for Elem {
3198        fn drop(&mut self) {
3199            unsafe {
3200                DROPS += 1;
3201            }
3202        }
3203    }
3204
3205    let mut ring = LinkedList::new();
3206    ring.push_back(Elem);
3207    ring.push_front(Elem);
3208    ring.push_back(Elem);
3209    ring.push_front(Elem);
3210    drop(ring);
3211
3212    assert_eq!(unsafe { DROPS }, 4);
3213}
3214
3215#[test]
3216fn test_drop_with_pop() {
3217    static mut DROPS: i32 = 0;
3218    struct Elem;
3219    impl Drop for Elem {
3220        fn drop(&mut self) {
3221            unsafe {
3222                DROPS += 1;
3223            }
3224        }
3225    }
3226
3227    let mut ring = LinkedList::new();
3228    ring.push_back(Elem);
3229    ring.push_front(Elem);
3230    ring.push_back(Elem);
3231    ring.push_front(Elem);
3232
3233    drop(ring.pop_back());
3234    drop(ring.pop_front());
3235    assert_eq!(unsafe { DROPS }, 2);
3236
3237    drop(ring);
3238    assert_eq!(unsafe { DROPS }, 4);
3239}
3240
3241#[test]
3242fn test_drop_clear() {
3243    static mut DROPS: i32 = 0;
3244    struct Elem;
3245    impl Drop for Elem {
3246        fn drop(&mut self) {
3247            unsafe {
3248                DROPS += 1;
3249            }
3250        }
3251    }
3252
3253    let mut ring = LinkedList::new();
3254    ring.push_back(Elem);
3255    ring.push_front(Elem);
3256    ring.push_back(Elem);
3257    ring.push_front(Elem);
3258    ring.clear();
3259    assert_eq!(unsafe { DROPS }, 4);
3260
3261    drop(ring);
3262    assert_eq!(unsafe { DROPS }, 4);
3263}
3264
3265#[test]
3266fn test_drop_panic() {
3267    static mut DROPS: i32 = 0;
3268
3269    struct D(bool);
3270
3271    impl Drop for D {
3272        fn drop(&mut self) {
3273            unsafe {
3274                DROPS += 1;
3275            }
3276
3277            if self.0 {
3278                panic!("panic in `drop`");
3279            }
3280        }
3281    }
3282
3283    let mut q = LinkedList::new();
3284    q.push_back(D(false));
3285    q.push_back(D(false));
3286    q.push_back(D(false));
3287    q.push_back(D(false));
3288    q.push_back(D(false));
3289    q.push_front(D(false));
3290    q.push_front(D(false));
3291    q.push_front(D(true));
3292
3293    catch_unwind(move || drop(q)).ok();
3294
3295    assert_eq!(unsafe { DROPS }, 8);
3296}
3297use std::any::Any;
3298use std::cell::RefCell;
3299use std::cmp::PartialEq;
3300use std::iter::TrustedLen;
3301use std::mem;
3302use std::sync::{Arc, Weak};
3303
3304#[test]
3305fn uninhabited() {
3306    enum Void {}
3307    let mut a = Weak::<Void>::new();
3308    a = a.clone();
3309    assert!(a.upgrade().is_none());
3310
3311    let mut a: Weak<dyn Any> = a; // Unsizing
3312    a = a.clone();
3313    assert!(a.upgrade().is_none());
3314}
3315
3316#[test]
3317fn slice() {
3318    let a: Arc<[u32; 3]> = Arc::new([3, 2, 1]);
3319    let a: Arc<[u32]> = a; // Unsizing
3320    let b: Arc<[u32]> = Arc::from(&[3, 2, 1][..]); // Conversion
3321    assert_eq!(a, b);
3322
3323    // Exercise is_dangling() with a DST
3324    let mut a = Arc::downgrade(&a);
3325    a = a.clone();
3326    assert!(a.upgrade().is_some());
3327}
3328
3329#[test]
3330fn trait_object() {
3331    let a: Arc<u32> = Arc::new(4);
3332    let a: Arc<dyn Any> = a; // Unsizing
3333
3334    // Exercise is_dangling() with a DST
3335    let mut a = Arc::downgrade(&a);
3336    a = a.clone();
3337    assert!(a.upgrade().is_some());
3338
3339    let mut b = Weak::<u32>::new();
3340    b = b.clone();
3341    assert!(b.upgrade().is_none());
3342    let mut b: Weak<dyn Any> = b; // Unsizing
3343    b = b.clone();
3344    assert!(b.upgrade().is_none());
3345}
3346
3347#[test]
3348fn float_nan_ne() {
3349    let x = Arc::new(f32::NAN);
3350    assert!(x != x);
3351    assert!(!(x == x));
3352}
3353
3354#[test]
3355fn partial_eq() {
3356    struct TestPEq(RefCell<usize>);
3357    impl PartialEq for TestPEq {
3358        fn eq(&self, other: &TestPEq) -> bool {
3359            *self.0.borrow_mut() += 1;
3360            *other.0.borrow_mut() += 1;
3361            true
3362        }
3363    }
3364    let x = Arc::new(TestPEq(RefCell::new(0)));
3365    assert!(x == x);
3366    assert!(!(x != x));
3367    assert_eq!(*x.0.borrow(), 4);
3368}
3369
3370#[test]
3371fn eq() {
3372    #[derive(Eq)]
3373    struct TestEq(RefCell<usize>);
3374    impl PartialEq for TestEq {
3375        fn eq(&self, other: &TestEq) -> bool {
3376            *self.0.borrow_mut() += 1;
3377            *other.0.borrow_mut() += 1;
3378            true
3379        }
3380    }
3381    let x = Arc::new(TestEq(RefCell::new(0)));
3382    assert!(x == x);
3383    assert!(!(x != x));
3384    assert_eq!(*x.0.borrow(), 0);
3385}
3386
3387// The test code below is identical to that in `rc.rs`.
3388// For better maintainability we therefore define this type alias.
3389type Rc<T> = Arc<T>;
3390
3391const SHARED_ITER_MAX: u16 = 100;
3392
3393fn assert_trusted_len<I: TrustedLen>(_: &I) {}
3394
3395#[test]
3396fn shared_from_iter_normal() {
3397    // Exercise the base implementation for non-`TrustedLen` iterators.
3398    {
3399        // `Filter` is never `TrustedLen` since we don't
3400        // know statically how many elements will be kept:
3401        let iter = (0..SHARED_ITER_MAX).filter(|x| x % 2 == 0).map(Box::new);
3402
3403        // Collecting into a `Vec<T>` or `Rc<[T]>` should make no difference:
3404        let vec = iter.clone().collect::<Vec<_>>();
3405        let rc = iter.collect::<Rc<[_]>>();
3406        assert_eq!(&*vec, &*rc);
3407
3408        // Clone a bit and let these get dropped.
3409        {
3410            let _rc_2 = rc.clone();
3411            let _rc_3 = rc.clone();
3412            let _rc_4 = Rc::downgrade(&_rc_3);
3413        }
3414    } // Drop what hasn't been here.
3415}
3416
3417#[test]
3418fn shared_from_iter_trustedlen_normal() {
3419    // Exercise the `TrustedLen` implementation under normal circumstances
3420    // where `size_hint()` matches `(_, Some(exact_len))`.
3421    {
3422        let iter = (0..SHARED_ITER_MAX).map(Box::new);
3423        assert_trusted_len(&iter);
3424
3425        // Collecting into a `Vec<T>` or `Rc<[T]>` should make no difference:
3426        let vec = iter.clone().collect::<Vec<_>>();
3427        let rc = iter.collect::<Rc<[_]>>();
3428        assert_eq!(&*vec, &*rc);
3429        assert_eq!(mem::size_of::<Box<u16>>() * SHARED_ITER_MAX as usize, mem::size_of_val(&*rc));
3430
3431        // Clone a bit and let these get dropped.
3432        {
3433            let _rc_2 = rc.clone();
3434            let _rc_3 = rc.clone();
3435            let _rc_4 = Rc::downgrade(&_rc_3);
3436        }
3437    } // Drop what hasn't been here.
3438
3439    // Try a ZST to make sure it is handled well.
3440    {
3441        let iter = (0..SHARED_ITER_MAX).map(drop);
3442        let vec = iter.clone().collect::<Vec<_>>();
3443        let rc = iter.collect::<Rc<[_]>>();
3444        assert_eq!(&*vec, &*rc);
3445        assert_eq!(0, mem::size_of_val(&*rc));
3446        {
3447            let _rc_2 = rc.clone();
3448            let _rc_3 = rc.clone();
3449            let _rc_4 = Rc::downgrade(&_rc_3);
3450        }
3451    }
3452}
3453
3454#[test]
3455#[should_panic = "I've almost got 99 problems."]
3456fn shared_from_iter_trustedlen_panic() {
3457    // Exercise the `TrustedLen` implementation when `size_hint()` matches
3458    // `(_, Some(exact_len))` but where `.next()` drops before the last iteration.
3459    let iter = (0..SHARED_ITER_MAX).map(|val| match val {
3460        98 => panic!("I've almost got 99 problems."),
3461        _ => Box::new(val),
3462    });
3463    assert_trusted_len(&iter);
3464    let _ = iter.collect::<Rc<[_]>>();
3465
3466    panic!("I am unreachable.");
3467}
3468
3469#[test]
3470fn shared_from_iter_trustedlen_no_fuse() {
3471    // Exercise the `TrustedLen` implementation when `size_hint()` matches
3472    // `(_, Some(exact_len))` but where the iterator does not behave in a fused manner.
3473    struct Iter(std::vec::IntoIter<Option<Box<u8>>>);
3474
3475    unsafe impl TrustedLen for Iter {}
3476
3477    impl Iterator for Iter {
3478        fn size_hint(&self) -> (usize, Option<usize>) {
3479            (2, Some(2))
3480        }
3481
3482        type Item = Box<u8>;
3483
3484        fn next(&mut self) -> Option<Self::Item> {
3485            self.0.next().flatten()
3486        }
3487    }
3488
3489    let vec = vec![Some(Box::new(42)), Some(Box::new(24)), None, Some(Box::new(12))];
3490    let iter = Iter(vec.into_iter());
3491    assert_trusted_len(&iter);
3492    assert_eq!(&[Box::new(42), Box::new(24)], &*iter.collect::<Rc<[_]>>());
3493}
3494use std::borrow::Cow;
3495use std::cmp::Ordering::{Equal, Greater, Less};
3496use std::str::{from_utf8, from_utf8_unchecked};
3497
3498#[test]
3499fn test_le() {
3500    assert!("" <= "");
3501    assert!("" <= "foo");
3502    assert!("foo" <= "foo");
3503    assert_ne!("foo", "bar");
3504}
3505
3506#[test]
3507fn test_find() {
3508    assert_eq!("hello".find('l'), Some(2));
3509    assert_eq!("hello".find(|c: char| c == 'o'), Some(4));
3510    assert!("hello".find('x').is_none());
3511    assert!("hello".find(|c: char| c == 'x').is_none());
3512    assert_eq!("ประเทศไทย中华Việt Nam".find('华'), Some(30));
3513    assert_eq!("ประเทศไทย中华Việt Nam".find(|c: char| c == '华'), Some(30));
3514}
3515
3516#[test]
3517fn test_rfind() {
3518    assert_eq!("hello".rfind('l'), Some(3));
3519    assert_eq!("hello".rfind(|c: char| c == 'o'), Some(4));
3520    assert!("hello".rfind('x').is_none());
3521    assert!("hello".rfind(|c: char| c == 'x').is_none());
3522    assert_eq!("ประเทศไทย中华Việt Nam".rfind('华'), Some(30));
3523    assert_eq!("ประเทศไทย中华Việt Nam".rfind(|c: char| c == '华'), Some(30));
3524}
3525
3526#[test]
3527fn test_collect() {
3528    let empty = "";
3529    let s: String = empty.chars().collect();
3530    assert_eq!(empty, s);
3531    let data = "ประเทศไทย中";
3532    let s: String = data.chars().collect();
3533    assert_eq!(data, s);
3534}
3535
3536#[test]
3537fn test_into_bytes() {
3538    let data = String::from("asdf");
3539    let buf = data.into_bytes();
3540    assert_eq!(buf, b"asdf");
3541}
3542
3543#[test]
3544fn test_find_str() {
3545    // byte positions
3546    assert_eq!("".find(""), Some(0));
3547    assert!("banana".find("apple pie").is_none());
3548
3549    let data = "abcabc";
3550    assert_eq!(data[0..6].find("ab"), Some(0));
3551    assert_eq!(data[2..6].find("ab"), Some(3 - 2));
3552    assert!(data[2..4].find("ab").is_none());
3553
3554    let string = "ประเทศไทย中华Việt Nam";
3555    let mut data = String::from(string);
3556    data.push_str(string);
3557    assert!(data.find("ไท华").is_none());
3558    assert_eq!(data[0..43].find(""), Some(0));
3559    assert_eq!(data[6..43].find(""), Some(6 - 6));
3560
3561    assert_eq!(data[0..43].find("ประ"), Some(0));
3562    assert_eq!(data[0..43].find("ทศไ"), Some(12));
3563    assert_eq!(data[0..43].find("ย中"), Some(24));
3564    assert_eq!(data[0..43].find("iệt"), Some(34));
3565    assert_eq!(data[0..43].find("Nam"), Some(40));
3566
3567    assert_eq!(data[43..86].find("ประ"), Some(43 - 43));
3568    assert_eq!(data[43..86].find("ทศไ"), Some(55 - 43));
3569    assert_eq!(data[43..86].find("ย中"), Some(67 - 43));
3570    assert_eq!(data[43..86].find("iệt"), Some(77 - 43));
3571    assert_eq!(data[43..86].find("Nam"), Some(83 - 43));
3572
3573    // find every substring -- assert that it finds it, or an earlier occurrence.
3574    let string = "Việt Namacbaabcaabaaba";
3575    for (i, ci) in string.char_indices() {
3576        let ip = i + ci.len_utf8();
3577        for j in string[ip..].char_indices().map(|(i, _)| i).chain(Some(string.len() - ip)) {
3578            let pat = &string[i..ip + j];
3579            assert!(match string.find(pat) {
3580                None => false,
3581                Some(x) => x <= i,
3582            });
3583            assert!(match string.rfind(pat) {
3584                None => false,
3585                Some(x) => x >= i,
3586            });
3587        }
3588    }
3589}
3590
3591fn s(x: &str) -> String {
3592    x.to_string()
3593}
3594
3595macro_rules! test_concat {
3596    ($expected: expr, $string: expr) => {{
3597        let s: String = $string.concat();
3598        assert_eq!($expected, s);
3599    }};
3600}
3601
3602#[test]
3603fn test_concat_for_different_types() {
3604    test_concat!("ab", vec![s("a"), s("b")]);
3605    test_concat!("ab", vec!["a", "b"]);
3606}
3607
3608#[test]
3609fn test_concat_for_different_lengths() {
3610    let empty: &[&str] = &[];
3611    test_concat!("", empty);
3612    test_concat!("a", ["a"]);
3613    test_concat!("ab", ["a", "b"]);
3614    test_concat!("abc", ["", "a", "bc"]);
3615}
3616
3617macro_rules! test_join {
3618    ($expected: expr, $string: expr, $delim: expr) => {{
3619        let s = $string.join($delim);
3620        assert_eq!($expected, s);
3621    }};
3622}
3623
3624#[test]
3625fn test_join_for_different_types() {
3626    test_join!("a-b", ["a", "b"], "-");
3627    let hyphen = "-".to_string();
3628    test_join!("a-b", [s("a"), s("b")], &*hyphen);
3629    test_join!("a-b", vec!["a", "b"], &*hyphen);
3630    test_join!("a-b", &*vec!["a", "b"], "-");
3631    test_join!("a-b", vec![s("a"), s("b")], "-");
3632}
3633
3634#[test]
3635fn test_join_for_different_lengths() {
3636    let empty: &[&str] = &[];
3637    test_join!("", empty, "-");
3638    test_join!("a", ["a"], "-");
3639    test_join!("a-b", ["a", "b"], "-");
3640    test_join!("-a-bc", ["", "a", "bc"], "-");
3641}
3642
3643// join has fast paths for small separators up to 4 bytes
3644// this tests the slow paths.
3645#[test]
3646fn test_join_for_different_lengths_with_long_separator() {
3647    assert_eq!("~~~~~".len(), 15);
3648
3649    let empty: &[&str] = &[];
3650    test_join!("", empty, "~~~~~");
3651    test_join!("a", ["a"], "~~~~~");
3652    test_join!("a~~~~~b", ["a", "b"], "~~~~~");
3653    test_join!("~~~~~a~~~~~bc", ["", "a", "bc"], "~~~~~");
3654}
3655
3656#[test]
3657fn test_join_isue_80335() {
3658    use core::{borrow::Borrow, cell::Cell};
3659
3660    struct WeirdBorrow {
3661        state: Cell<bool>,
3662    }
3663
3664    impl Default for WeirdBorrow {
3665        fn default() -> Self {
3666            WeirdBorrow { state: Cell::new(false) }
3667        }
3668    }
3669
3670    impl Borrow<str> for WeirdBorrow {
3671        fn borrow(&self) -> &str {
3672            let state = self.state.get();
3673            if state {
3674                "0"
3675            } else {
3676                self.state.set(true);
3677                "123456"
3678            }
3679        }
3680    }
3681
3682    let arr: [WeirdBorrow; 3] = Default::default();
3683    test_join!("0-0-0", arr, "-");
3684}
3685
3686#[test]
3687#[cfg_attr(miri, ignore)] // Miri is too slow
3688fn test_unsafe_slice() {
3689    assert_eq!("ab", unsafe { "abc".get_unchecked(0..2) });
3690    assert_eq!("bc", unsafe { "abc".get_unchecked(1..3) });
3691    assert_eq!("", unsafe { "abc".get_unchecked(1..1) });
3692    fn a_million_letter_a() -> String {
3693        let mut i = 0;
3694        let mut rs = String::new();
3695        while i < 100000 {
3696            rs.push_str("aaaaaaaaaa");
3697            i += 1;
3698        }
3699        rs
3700    }
3701    fn half_a_million_letter_a() -> String {
3702        let mut i = 0;
3703        let mut rs = String::new();
3704        while i < 100000 {
3705            rs.push_str("aaaaa");
3706            i += 1;
3707        }
3708        rs
3709    }
3710    let letters = a_million_letter_a();
3711    assert_eq!(half_a_million_letter_a(), unsafe { letters.get_unchecked(0..500000) });
3712}
3713
3714#[test]
3715fn test_starts_with() {
3716    assert!("".starts_with(""));
3717    assert!("abc".starts_with(""));
3718    assert!("abc".starts_with("a"));
3719    assert!(!"a".starts_with("abc"));
3720    assert!(!"".starts_with("abc"));
3721    assert!(!"ödd".starts_with("-"));
3722    assert!("ödd".starts_with("öd"));
3723}
3724
3725#[test]
3726fn test_ends_with() {
3727    assert!("".ends_with(""));
3728    assert!("abc".ends_with(""));
3729    assert!("abc".ends_with("c"));
3730    assert!(!"a".ends_with("abc"));
3731    assert!(!"".ends_with("abc"));
3732    assert!(!"ddö".ends_with("-"));
3733    assert!("ddö".ends_with("dö"));
3734}
3735
3736#[test]
3737fn test_is_empty() {
3738    assert!("".is_empty());
3739    assert!(!"a".is_empty());
3740}
3741
3742#[test]
3743fn test_replacen() {
3744    assert_eq!("".replacen('a', "b", 5), "");
3745    assert_eq!("acaaa".replacen("a", "b", 3), "bcbba");
3746    assert_eq!("aaaa".replacen("a", "b", 0), "aaaa");
3747
3748    let test = "test";
3749    assert_eq!(" test test ".replacen(test, "toast", 3), " toast toast ");
3750    assert_eq!(" test test ".replacen(test, "toast", 0), " test test ");
3751    assert_eq!(" test test ".replacen(test, "", 5), "   ");
3752
3753    assert_eq!("qwer123zxc789".replacen(char::is_numeric, "", 3), "qwerzxc789");
3754}
3755
3756#[test]
3757fn test_replace() {
3758    let a = "a";
3759    assert_eq!("".replace(a, "b"), "");
3760    assert_eq!("a".replace(a, "b"), "b");
3761    assert_eq!("ab".replace(a, "b"), "bb");
3762    let test = "test";
3763    assert_eq!(" test test ".replace(test, "toast"), " toast toast ");
3764    assert_eq!(" test test ".replace(test, ""), "   ");
3765}
3766
3767#[test]
3768fn test_replace_2a() {
3769    let data = "ประเทศไทย中华";
3770    let repl = "دولة الكويت";
3771
3772    let a = "ประเ";
3773    let a2 = "دولة الكويتทศไทย中华";
3774    assert_eq!(data.replace(a, repl), a2);
3775}
3776
3777#[test]
3778fn test_replace_2b() {
3779    let data = "ประเทศไทย中华";
3780    let repl = "دولة الكويت";
3781
3782    let b = "ะเ";
3783    let b2 = "ปรدولة الكويتทศไทย中华";
3784    assert_eq!(data.replace(b, repl), b2);
3785}
3786
3787#[test]
3788fn test_replace_2c() {
3789    let data = "ประเทศไทย中华";
3790    let repl = "دولة الكويت";
3791
3792    let c = "中华";
3793    let c2 = "ประเทศไทยدولة الكويت";
3794    assert_eq!(data.replace(c, repl), c2);
3795}
3796
3797#[test]
3798fn test_replace_2d() {
3799    let data = "ประเทศไทย中华";
3800    let repl = "دولة الكويت";
3801
3802    let d = "ไท华";
3803    assert_eq!(data.replace(d, repl), data);
3804}
3805
3806#[test]
3807fn test_replace_pattern() {
3808    let data = "abcdαβγδabcdαβγδ";
3809    assert_eq!(data.replace("dαβ", "���"), "abc���γδabc���γδ");
3810    assert_eq!(data.replace('γ', "���"), "abcdαβ���δabcdαβ���δ");
3811    assert_eq!(data.replace(&['a', 'γ'] as &[_], "���"), "���bcdαβ���δ���bcdαβ���δ");
3812    assert_eq!(data.replace(|c| c == 'γ', "���"), "abcdαβ���δabcdαβ���δ");
3813}
3814
3815// The current implementation of SliceIndex fails to handle methods
3816// orthogonally from range types; therefore, it is worth testing
3817// all of the indexing operations on each input.
3818mod slice_index {
3819    // Test a slicing operation **that should succeed,**
3820    // testing it on all of the indexing methods.
3821    //
3822    // This is not suitable for testing failure on invalid inputs.
3823    macro_rules! assert_range_eq {
3824        ($s:expr, $range:expr, $expected:expr) => {
3825            let mut s: String = $s.to_owned();
3826            let mut expected: String = $expected.to_owned();
3827            {
3828                let s: &str = &s;
3829                let expected: &str = &expected;
3830
3831                assert_eq!(&s[$range], expected, "(in assertion for: index)");
3832                assert_eq!(s.get($range), Some(expected), "(in assertion for: get)");
3833                unsafe {
3834                    assert_eq!(
3835                        s.get_unchecked($range),
3836                        expected,
3837                        "(in assertion for: get_unchecked)",
3838                    );
3839                }
3840            }
3841            {
3842                let s: &mut str = &mut s;
3843                let expected: &mut str = &mut expected;
3844
3845                assert_eq!(&mut s[$range], expected, "(in assertion for: index_mut)",);
3846                assert_eq!(
3847                    s.get_mut($range),
3848                    Some(&mut expected[..]),
3849                    "(in assertion for: get_mut)",
3850                );
3851                unsafe {
3852                    assert_eq!(
3853                        s.get_unchecked_mut($range),
3854                        expected,
3855                        "(in assertion for: get_unchecked_mut)",
3856                    );
3857                }
3858            }
3859        };
3860    }
3861
3862    // Make sure the macro can actually detect bugs,
3863    // because if it can't, then what are we even doing here?
3864    //
3865    // (Be aware this only demonstrates the ability to detect bugs
3866    //  in the FIRST method that panics, as the macro is not designed
3867    //  to be used in `should_panic`)
3868    #[test]
3869    #[should_panic(expected = "out of bounds")]
3870    fn assert_range_eq_can_fail_by_panic() {
3871        assert_range_eq!("abc", 0..5, "abc");
3872    }
3873
3874    // (Be aware this only demonstrates the ability to detect bugs
3875    //  in the FIRST method it calls, as the macro is not designed
3876    //  to be used in `should_panic`)
3877    #[test]
3878    #[should_panic(expected = "==")]
3879    fn assert_range_eq_can_fail_by_inequality() {
3880        assert_range_eq!("abc", 0..2, "abc");
3881    }
3882
3883    // Generates test cases for bad index operations.
3884    //
3885    // This generates `should_panic` test cases for Index/IndexMut
3886    // and `None` test cases for get/get_mut.
3887    macro_rules! panic_cases {
3888        ($(
3889            in mod $case_name:ident {
3890                data: $data:expr;
3891
3892                // optional:
3893                //
3894                // a similar input for which DATA[input] succeeds, and the corresponding
3895                // output str. This helps validate "critical points" where an input range
3896                // straddles the boundary between valid and invalid.
3897                // (such as the input `len..len`, which is just barely valid)
3898                $(
3899                    good: data[$good:expr] == $output:expr;
3900                )*
3901
3902                bad: data[$bad:expr];
3903                message: $expect_msg:expr; // must be a literal
3904            }
3905        )*) => {$(
3906            mod $case_name {
3907                #[test]
3908                fn pass() {
3909                    let mut v: String = $data.into();
3910
3911                    $( assert_range_eq!(v, $good, $output); )*
3912
3913                    {
3914                        let v: &str = &v;
3915                        assert_eq!(v.get($bad), None, "(in None assertion for get)");
3916                    }
3917
3918                    {
3919                        let v: &mut str = &mut v;
3920                        assert_eq!(v.get_mut($bad), None, "(in None assertion for get_mut)");
3921                    }
3922                }
3923
3924                #[test]
3925                #[should_panic(expected = $expect_msg)]
3926                fn index_fail() {
3927                    let v: String = $data.into();
3928                    let v: &str = &v;
3929                    let _v = &v[$bad];
3930                }
3931
3932                #[test]
3933                #[should_panic(expected = $expect_msg)]
3934                fn index_mut_fail() {
3935                    let mut v: String = $data.into();
3936                    let v: &mut str = &mut v;
3937                    let _v = &mut v[$bad];
3938                }
3939            }
3940        )*};
3941    }
3942
3943    #[test]
3944    fn simple_ascii() {
3945        assert_range_eq!("abc", .., "abc");
3946
3947        assert_range_eq!("abc", 0..2, "ab");
3948        assert_range_eq!("abc", 0..=1, "ab");
3949        assert_range_eq!("abc", ..2, "ab");
3950        assert_range_eq!("abc", ..=1, "ab");
3951
3952        assert_range_eq!("abc", 1..3, "bc");
3953        assert_range_eq!("abc", 1..=2, "bc");
3954        assert_range_eq!("abc", 1..1, "");
3955        assert_range_eq!("abc", 1..=0, "");
3956    }
3957
3958    #[test]
3959    fn simple_unicode() {
3960        // 日本
3961        assert_range_eq!("\u{65e5}\u{672c}", .., "\u{65e5}\u{672c}");
3962
3963        assert_range_eq!("\u{65e5}\u{672c}", 0..3, "\u{65e5}");
3964        assert_range_eq!("\u{65e5}\u{672c}", 0..=2, "\u{65e5}");
3965        assert_range_eq!("\u{65e5}\u{672c}", ..3, "\u{65e5}");
3966        assert_range_eq!("\u{65e5}\u{672c}", ..=2, "\u{65e5}");
3967
3968        assert_range_eq!("\u{65e5}\u{672c}", 3..6, "\u{672c}");
3969        assert_range_eq!("\u{65e5}\u{672c}", 3..=5, "\u{672c}");
3970        assert_range_eq!("\u{65e5}\u{672c}", 3.., "\u{672c}");
3971
3972        let data = "ประเทศไทย中华";
3973        assert_range_eq!(data, 0..3, "ป");
3974        assert_range_eq!(data, 3..6, "ร");
3975        assert_range_eq!(data, 3..3, "");
3976        assert_range_eq!(data, 30..33, "华");
3977
3978        /*0: 中
3979         3: 华
3980         6: V
3981         7: i
3982         8: ệ
3983        11: t
3984        12:
3985        13: N
3986        14: a
3987        15: m */
3988        let ss = "中华Việt Nam";
3989        assert_range_eq!(ss, 3..6, "华");
3990        assert_range_eq!(ss, 6..16, "Việt Nam");
3991        assert_range_eq!(ss, 6..=15, "Việt Nam");
3992        assert_range_eq!(ss, 6.., "Việt Nam");
3993
3994        assert_range_eq!(ss, 0..3, "中");
3995        assert_range_eq!(ss, 3..7, "华V");
3996        assert_range_eq!(ss, 3..=6, "华V");
3997        assert_range_eq!(ss, 3..3, "");
3998        assert_range_eq!(ss, 3..=2, "");
3999    }
4000
4001    #[test]
4002    #[cfg_attr(target_os = "emscripten", ignore)] // hits an OOM
4003    #[cfg_attr(miri, ignore)] // Miri is too slow
4004    fn simple_big() {
4005        fn a_million_letter_x() -> String {
4006            let mut i = 0;
4007            let mut rs = String::new();
4008            while i < 100000 {
4009                rs.push_str("华华华华华华华华华华");
4010                i += 1;
4011            }
4012            rs
4013        }
4014        fn half_a_million_letter_x() -> String {
4015            let mut i = 0;
4016            let mut rs = String::new();
4017            while i < 100000 {
4018                rs.push_str("华华华华华");
4019                i += 1;
4020            }
4021            rs
4022        }
4023        let letters = a_million_letter_x();
4024        assert_range_eq!(letters, 0..3 * 500000, half_a_million_letter_x());
4025    }
4026
4027    #[test]
4028    #[should_panic]
4029    fn test_slice_fail() {
4030        &"中华Việt Nam"[0..2];
4031    }
4032
4033    panic_cases! {
4034        in mod rangefrom_len {
4035            data: "abcdef";
4036            good: data[6..] == "";
4037            bad: data[7..];
4038            message: "out of bounds";
4039        }
4040
4041        in mod rangeto_len {
4042            data: "abcdef";
4043            good: data[..6] == "abcdef";
4044            bad: data[..7];
4045            message: "out of bounds";
4046        }
4047
4048        in mod rangetoinclusive_len {
4049            data: "abcdef";
4050            good: data[..=5] == "abcdef";
4051            bad: data[..=6];
4052            message: "out of bounds";
4053        }
4054
4055        in mod rangeinclusive_len {
4056            data: "abcdef";
4057            good: data[0..=5] == "abcdef";
4058            bad: data[0..=6];
4059            message: "out of bounds";
4060        }
4061
4062        in mod range_len_len {
4063            data: "abcdef";
4064            good: data[6..6] == "";
4065            bad: data[7..7];
4066            message: "out of bounds";
4067        }
4068
4069        in mod rangeinclusive_len_len {
4070            data: "abcdef";
4071            good: data[6..=5] == "";
4072            bad: data[7..=6];
4073            message: "out of bounds";
4074        }
4075    }
4076
4077    panic_cases! {
4078        in mod rangeinclusive_exhausted {
4079            data: "abcdef";
4080
4081            good: data[0..=5] == "abcdef";
4082            good: data[{
4083                let mut iter = 0..=5;
4084                iter.by_ref().count(); // exhaust it
4085                iter
4086            }] == "";
4087
4088            // 0..=6 is out of bounds before exhaustion, so it
4089            // stands to reason that it still would be after.
4090            bad: data[{
4091                let mut iter = 0..=6;
4092                iter.by_ref().count(); // exhaust it
4093                iter
4094            }];
4095            message: "out of bounds";
4096        }
4097    }
4098
4099    panic_cases! {
4100        in mod range_neg_width {
4101            data: "abcdef";
4102            good: data[4..4] == "";
4103            bad: data[4..3];
4104            message: "begin <= end (4 <= 3)";
4105        }
4106
4107        in mod rangeinclusive_neg_width {
4108            data: "abcdef";
4109            good: data[4..=3] == "";
4110            bad: data[4..=2];
4111            message: "begin <= end (4 <= 3)";
4112        }
4113    }
4114
4115    mod overflow {
4116        panic_cases! {
4117            in mod rangeinclusive {
4118                data: "hello";
4119                // note: using 0 specifically ensures that the result of overflowing is 0..0,
4120                //       so that `get` doesn't simply return None for the wrong reason.
4121                bad: data[0..=usize::MAX];
4122                message: "maximum usize";
4123            }
4124
4125            in mod rangetoinclusive {
4126                data: "hello";
4127                bad: data[..=usize::MAX];
4128                message: "maximum usize";
4129            }
4130        }
4131    }
4132
4133    mod boundary {
4134        const DATA: &str = "abcαβγ";
4135
4136        const BAD_START: usize = 4;
4137        const GOOD_START: usize = 3;
4138        const BAD_END: usize = 6;
4139        const GOOD_END: usize = 7;
4140        const BAD_END_INCL: usize = BAD_END - 1;
4141        const GOOD_END_INCL: usize = GOOD_END - 1;
4142
4143        // it is especially important to test all of the different range types here
4144        // because some of the logic may be duplicated as part of micro-optimizations
4145        // to dodge unicode boundary checks on half-ranges.
4146        panic_cases! {
4147            in mod range_1 {
4148                data: super::DATA;
4149                bad: data[super::BAD_START..super::GOOD_END];
4150                message:
4151                    "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of";
4152            }
4153
4154            in mod range_2 {
4155                data: super::DATA;
4156                bad: data[super::GOOD_START..super::BAD_END];
4157                message:
4158                    "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of";
4159            }
4160
4161            in mod rangefrom {
4162                data: super::DATA;
4163                bad: data[super::BAD_START..];
4164                message:
4165                    "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of";
4166            }
4167
4168            in mod rangeto {
4169                data: super::DATA;
4170                bad: data[..super::BAD_END];
4171                message:
4172                    "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of";
4173            }
4174
4175            in mod rangeinclusive_1 {
4176                data: super::DATA;
4177                bad: data[super::BAD_START..=super::GOOD_END_INCL];
4178                message:
4179                    "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of";
4180            }
4181
4182            in mod rangeinclusive_2 {
4183                data: super::DATA;
4184                bad: data[super::GOOD_START..=super::BAD_END_INCL];
4185                message:
4186                    "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of";
4187            }
4188
4189            in mod rangetoinclusive {
4190                data: super::DATA;
4191                bad: data[..=super::BAD_END_INCL];
4192                message:
4193                    "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of";
4194            }
4195        }
4196    }
4197
4198    const LOREM_PARAGRAPH: &str = "\
4199    Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem \
4200    sit amet dolor ultricies condimentum. Praesent iaculis purus elit, ac malesuada \
4201    quam malesuada in. Duis sed orci eros. Suspendisse sit amet magna mollis, mollis \
4202    nunc luctus, imperdiet mi. Integer fringilla non sem ut lacinia. Fusce varius \
4203    tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec tempus vel, \
4204    gravida nec quam.";
4205
4206    // check the panic includes the prefix of the sliced string
4207    #[test]
4208    #[should_panic(expected = "byte index 1024 is out of bounds of `Lorem ipsum dolor sit amet")]
4209    fn test_slice_fail_truncated_1() {
4210        &LOREM_PARAGRAPH[..1024];
4211    }
4212    // check the truncation in the panic message
4213    #[test]
4214    #[should_panic(expected = "luctus, im`[...]")]
4215    fn test_slice_fail_truncated_2() {
4216        &LOREM_PARAGRAPH[..1024];
4217    }
4218}
4219
4220#[test]
4221fn test_str_slice_rangetoinclusive_ok() {
4222    let s = "abcαβγ";
4223    assert_eq!(&s[..=2], "abc");
4224    assert_eq!(&s[..=4], "abcα");
4225}
4226
4227#[test]
4228#[should_panic]
4229fn test_str_slice_rangetoinclusive_notok() {
4230    let s = "abcαβγ";
4231    &s[..=3];
4232}
4233
4234#[test]
4235fn test_str_slicemut_rangetoinclusive_ok() {
4236    let mut s = "abcαβγ".to_owned();
4237    let s: &mut str = &mut s;
4238    assert_eq!(&mut s[..=2], "abc");
4239    assert_eq!(&mut s[..=4], "abcα");
4240}
4241
4242#[test]
4243#[should_panic]
4244fn test_str_slicemut_rangetoinclusive_notok() {
4245    let mut s = "abcαβγ".to_owned();
4246    let s: &mut str = &mut s;
4247    &mut s[..=3];
4248}
4249
4250#[test]
4251fn test_is_char_boundary() {
4252    let s = "ศไทย中华Việt Nam β-release �123";
4253    assert!(s.is_char_boundary(0));
4254    assert!(s.is_char_boundary(s.len()));
4255    assert!(!s.is_char_boundary(s.len() + 1));
4256    for (i, ch) in s.char_indices() {
4257        // ensure character locations are boundaries and continuation bytes are not
4258        assert!(s.is_char_boundary(i), "{} is a char boundary in {:?}", i, s);
4259        for j in 1..ch.len_utf8() {
4260            assert!(
4261                !s.is_char_boundary(i + j),
4262                "{} should not be a char boundary in {:?}",
4263                i + j,
4264                s
4265            );
4266        }
4267    }
4268}
4269
4270#[test]
4271fn test_trim_start_matches() {
4272    let v: &[char] = &[];
4273    assert_eq!(" *** foo *** ".trim_start_matches(v), " *** foo *** ");
4274    let chars: &[char] = &['*', ' '];
4275    assert_eq!(" *** foo *** ".trim_start_matches(chars), "foo *** ");
4276    assert_eq!(" ***  *** ".trim_start_matches(chars), "");
4277    assert_eq!("foo *** ".trim_start_matches(chars), "foo *** ");
4278
4279    assert_eq!("11foo1bar11".trim_start_matches('1'), "foo1bar11");
4280    let chars: &[char] = &['1', '2'];
4281    assert_eq!("12foo1bar12".trim_start_matches(chars), "foo1bar12");
4282    assert_eq!("123foo1bar123".trim_start_matches(|c: char| c.is_numeric()), "foo1bar123");
4283}
4284
4285#[test]
4286fn test_trim_end_matches() {
4287    let v: &[char] = &[];
4288    assert_eq!(" *** foo *** ".trim_end_matches(v), " *** foo *** ");
4289    let chars: &[char] = &['*', ' '];
4290    assert_eq!(" *** foo *** ".trim_end_matches(chars), " *** foo");
4291    assert_eq!(" ***  *** ".trim_end_matches(chars), "");
4292    assert_eq!(" *** foo".trim_end_matches(chars), " *** foo");
4293
4294    assert_eq!("11foo1bar11".trim_end_matches('1'), "11foo1bar");
4295    let chars: &[char] = &['1', '2'];
4296    assert_eq!("12foo1bar12".trim_end_matches(chars), "12foo1bar");
4297    assert_eq!("123foo1bar123".trim_end_matches(|c: char| c.is_numeric()), "123foo1bar");
4298}
4299
4300#[test]
4301fn test_trim_matches() {
4302    let v: &[char] = &[];
4303    assert_eq!(" *** foo *** ".trim_matches(v), " *** foo *** ");
4304    let chars: &[char] = &['*', ' '];
4305    assert_eq!(" *** foo *** ".trim_matches(chars), "foo");
4306    assert_eq!(" ***  *** ".trim_matches(chars), "");
4307    assert_eq!("foo".trim_matches(chars), "foo");
4308
4309    assert_eq!("11foo1bar11".trim_matches('1'), "foo1bar");
4310    let chars: &[char] = &['1', '2'];
4311    assert_eq!("12foo1bar12".trim_matches(chars), "foo1bar");
4312    assert_eq!("123foo1bar123".trim_matches(|c: char| c.is_numeric()), "foo1bar");
4313}
4314
4315#[test]
4316fn test_trim_start() {
4317    assert_eq!("".trim_start(), "");
4318    assert_eq!("a".trim_start(), "a");
4319    assert_eq!("    ".trim_start(), "");
4320    assert_eq!("     blah".trim_start(), "blah");
4321    assert_eq!("   \u{3000}  wut".trim_start(), "wut");
4322    assert_eq!("hey ".trim_start(), "hey ");
4323}
4324
4325#[test]
4326fn test_trim_end() {
4327    assert_eq!("".trim_end(), "");
4328    assert_eq!("a".trim_end(), "a");
4329    assert_eq!("    ".trim_end(), "");
4330    assert_eq!("blah     ".trim_end(), "blah");
4331    assert_eq!("wut   \u{3000}  ".trim_end(), "wut");
4332    assert_eq!(" hey".trim_end(), " hey");
4333}
4334
4335#[test]
4336fn test_trim() {
4337    assert_eq!("".trim(), "");
4338    assert_eq!("a".trim(), "a");
4339    assert_eq!("    ".trim(), "");
4340    assert_eq!("    blah     ".trim(), "blah");
4341    assert_eq!("\nwut   \u{3000}  ".trim(), "wut");
4342    assert_eq!(" hey dude ".trim(), "hey dude");
4343}
4344
4345#[test]
4346fn test_is_whitespace() {
4347    assert!("".chars().all(|c| c.is_whitespace()));
4348    assert!(" ".chars().all(|c| c.is_whitespace()));
4349    assert!("\u{2009}".chars().all(|c| c.is_whitespace())); // Thin space
4350    assert!("  \n\t   ".chars().all(|c| c.is_whitespace()));
4351    assert!(!"   _   ".chars().all(|c| c.is_whitespace()));
4352}
4353
4354#[test]
4355fn test_is_utf8() {
4356    // deny overlong encodings
4357    assert!(from_utf8(&[0xc0, 0x80]).is_err());
4358    assert!(from_utf8(&[0xc0, 0xae]).is_err());
4359    assert!(from_utf8(&[0xe0, 0x80, 0x80]).is_err());
4360    assert!(from_utf8(&[0xe0, 0x80, 0xaf]).is_err());
4361    assert!(from_utf8(&[0xe0, 0x81, 0x81]).is_err());
4362    assert!(from_utf8(&[0xf0, 0x82, 0x82, 0xac]).is_err());
4363    assert!(from_utf8(&[0xf4, 0x90, 0x80, 0x80]).is_err());
4364
4365    // deny surrogates
4366    assert!(from_utf8(&[0xED, 0xA0, 0x80]).is_err());
4367    assert!(from_utf8(&[0xED, 0xBF, 0xBF]).is_err());
4368
4369    assert!(from_utf8(&[0xC2, 0x80]).is_ok());
4370    assert!(from_utf8(&[0xDF, 0xBF]).is_ok());
4371    assert!(from_utf8(&[0xE0, 0xA0, 0x80]).is_ok());
4372    assert!(from_utf8(&[0xED, 0x9F, 0xBF]).is_ok());
4373    assert!(from_utf8(&[0xEE, 0x80, 0x80]).is_ok());
4374    assert!(from_utf8(&[0xEF, 0xBF, 0xBF]).is_ok());
4375    assert!(from_utf8(&[0xF0, 0x90, 0x80, 0x80]).is_ok());
4376    assert!(from_utf8(&[0xF4, 0x8F, 0xBF, 0xBF]).is_ok());
4377}
4378
4379#[test]
4380fn from_utf8_mostly_ascii() {
4381    // deny invalid bytes embedded in long stretches of ascii
4382    for i in 32..64 {
4383        let mut data = [0; 128];
4384        data[i] = 0xC0;
4385        assert!(from_utf8(&data).is_err());
4386        data[i] = 0xC2;
4387        assert!(from_utf8(&data).is_err());
4388    }
4389}
4390
4391#[test]
4392fn from_utf8_error() {
4393    macro_rules! test {
4394        ($input: expr, $expected_valid_up_to: expr, $expected_error_len: expr) => {
4395            let error = from_utf8($input).unwrap_err();
4396            assert_eq!(error.valid_up_to(), $expected_valid_up_to);
4397            assert_eq!(error.error_len(), $expected_error_len);
4398        };
4399    }
4400    test!(b"A\xC3\xA9 \xFF ", 4, Some(1));
4401    test!(b"A\xC3\xA9 \x80 ", 4, Some(1));
4402    test!(b"A\xC3\xA9 \xC1 ", 4, Some(1));
4403    test!(b"A\xC3\xA9 \xC1", 4, Some(1));
4404    test!(b"A\xC3\xA9 \xC2", 4, None);
4405    test!(b"A\xC3\xA9 \xC2 ", 4, Some(1));
4406    test!(b"A\xC3\xA9 \xC2\xC0", 4, Some(1));
4407    test!(b"A\xC3\xA9 \xE0", 4, None);
4408    test!(b"A\xC3\xA9 \xE0\x9F", 4, Some(1));
4409    test!(b"A\xC3\xA9 \xE0\xA0", 4, None);
4410    test!(b"A\xC3\xA9 \xE0\xA0\xC0", 4, Some(2));
4411    test!(b"A\xC3\xA9 \xE0\xA0 ", 4, Some(2));
4412    test!(b"A\xC3\xA9 \xED\xA0\x80 ", 4, Some(1));
4413    test!(b"A\xC3\xA9 \xF1", 4, None);
4414    test!(b"A\xC3\xA9 \xF1\x80", 4, None);
4415    test!(b"A\xC3\xA9 \xF1\x80\x80", 4, None);
4416    test!(b"A\xC3\xA9 \xF1 ", 4, Some(1));
4417    test!(b"A\xC3\xA9 \xF1\x80 ", 4, Some(2));
4418    test!(b"A\xC3\xA9 \xF1\x80\x80 ", 4, Some(3));
4419}
4420
4421#[test]
4422fn test_as_bytes() {
4423    // no null
4424    let v = [
4425        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
4426        86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
4427    ];
4428    let b: &[u8] = &[];
4429    assert_eq!("".as_bytes(), b);
4430    assert_eq!("abc".as_bytes(), b"abc");
4431    assert_eq!("ศไทย中华Việt Nam".as_bytes(), v);
4432}
4433
4434#[test]
4435#[should_panic]
4436fn test_as_bytes_fail() {
4437    // Don't double free. (I'm not sure if this exercises the
4438    // original problem code path anymore.)
4439    let s = String::from("");
4440    let _bytes = s.as_bytes();
4441    panic!();
4442}
4443
4444#[test]
4445fn test_as_ptr() {
4446    let buf = "hello".as_ptr();
4447    unsafe {
4448        assert_eq!(*buf.offset(0), b'h');
4449        assert_eq!(*buf.offset(1), b'e');
4450        assert_eq!(*buf.offset(2), b'l');
4451        assert_eq!(*buf.offset(3), b'l');
4452        assert_eq!(*buf.offset(4), b'o');
4453    }
4454}
4455
4456#[test]
4457fn vec_str_conversions() {
4458    let s1: String = String::from("All mimsy were the borogoves");
4459
4460    let v: Vec<u8> = s1.as_bytes().to_vec();
4461    let s2: String = String::from(from_utf8(&v).unwrap());
4462    let mut i = 0;
4463    let n1 = s1.len();
4464    let n2 = v.len();
4465    assert_eq!(n1, n2);
4466    while i < n1 {
4467        let a: u8 = s1.as_bytes()[i];
4468        let b: u8 = s2.as_bytes()[i];
4469        assert_eq!(a, b);
4470        i += 1;
4471    }
4472}
4473
4474#[test]
4475fn test_contains() {
4476    assert!("abcde".contains("bcd"));
4477    assert!("abcde".contains("abcd"));
4478    assert!("abcde".contains("bcde"));
4479    assert!("abcde".contains(""));
4480    assert!("".contains(""));
4481    assert!(!"abcde".contains("def"));
4482    assert!(!"".contains("a"));
4483
4484    let data = "ประเทศไทย中华Việt Nam";
4485    assert!(data.contains("ประเ"));
4486    assert!(data.contains("ะเ"));
4487    assert!(data.contains("中华"));
4488    assert!(!data.contains("ไท华"));
4489}
4490
4491#[test]
4492fn test_contains_char() {
4493    assert!("abc".contains('b'));
4494    assert!("a".contains('a'));
4495    assert!(!"abc".contains('d'));
4496    assert!(!"".contains('a'));
4497}
4498
4499#[test]
4500fn test_split_at() {
4501    let s = "ศไทย中华Việt Nam";
4502    for (index, _) in s.char_indices() {
4503        let (a, b) = s.split_at(index);
4504        assert_eq!(&s[..a.len()], a);
4505        assert_eq!(&s[a.len()..], b);
4506    }
4507    let (a, b) = s.split_at(s.len());
4508    assert_eq!(a, s);
4509    assert_eq!(b, "");
4510}
4511
4512#[test]
4513fn test_split_at_mut() {
4514    let mut s = "Hello World".to_string();
4515    {
4516        let (a, b) = s.split_at_mut(5);
4517        a.make_ascii_uppercase();
4518        b.make_ascii_lowercase();
4519    }
4520    assert_eq!(s, "HELLO world");
4521}
4522
4523#[test]
4524#[should_panic]
4525fn test_split_at_boundscheck() {
4526    let s = "ศไทย中华Việt Nam";
4527    s.split_at(1);
4528}
4529
4530#[test]
4531fn test_escape_unicode() {
4532    assert_eq!("abc".escape_unicode().to_string(), "\\u{61}\\u{62}\\u{63}");
4533    assert_eq!("a c".escape_unicode().to_string(), "\\u{61}\\u{20}\\u{63}");
4534    assert_eq!("\r\n\t".escape_unicode().to_string(), "\\u{d}\\u{a}\\u{9}");
4535    assert_eq!("'\"\\".escape_unicode().to_string(), "\\u{27}\\u{22}\\u{5c}");
4536    assert_eq!("\x00\x01\u{fe}\u{ff}".escape_unicode().to_string(), "\\u{0}\\u{1}\\u{fe}\\u{ff}");
4537    assert_eq!("\u{100}\u{ffff}".escape_unicode().to_string(), "\\u{100}\\u{ffff}");
4538    assert_eq!("\u{10000}\u{10ffff}".escape_unicode().to_string(), "\\u{10000}\\u{10ffff}");
4539    assert_eq!("ab\u{fb00}".escape_unicode().to_string(), "\\u{61}\\u{62}\\u{fb00}");
4540    assert_eq!("\u{1d4ea}\r".escape_unicode().to_string(), "\\u{1d4ea}\\u{d}");
4541}
4542
4543#[test]
4544fn test_escape_debug() {
4545    // Note that there are subtleties with the number of backslashes
4546    // on the left- and right-hand sides. In particular, Unicode code points
4547    // are usually escaped with two backslashes on the right-hand side, as
4548    // they are escaped. However, when the character is unescaped (e.g., for
4549    // printable characters), only a single backslash appears (as the character
4550    // itself appears in the debug string).
4551    assert_eq!("abc".escape_debug().to_string(), "abc");
4552    assert_eq!("a c".escape_debug().to_string(), "a c");
4553    assert_eq!("éèê".escape_debug().to_string(), "éèê");
4554    assert_eq!("\r\n\t".escape_debug().to_string(), "\\r\\n\\t");
4555    assert_eq!("'\"\\".escape_debug().to_string(), "\\'\\\"\\\\");
4556    assert_eq!("\u{7f}\u{ff}".escape_debug().to_string(), "\\u{7f}\u{ff}");
4557    assert_eq!("\u{100}\u{ffff}".escape_debug().to_string(), "\u{100}\\u{ffff}");
4558    assert_eq!("\u{10000}\u{10ffff}".escape_debug().to_string(), "\u{10000}\\u{10ffff}");
4559    assert_eq!("ab\u{200b}".escape_debug().to_string(), "ab\\u{200b}");
4560    assert_eq!("\u{10d4ea}\r".escape_debug().to_string(), "\\u{10d4ea}\\r");
4561    assert_eq!(
4562        "\u{301}a\u{301}bé\u{e000}".escape_debug().to_string(),
4563        "\\u{301}a\u{301}bé\\u{e000}"
4564    );
4565}
4566
4567#[test]
4568fn test_escape_default() {
4569    assert_eq!("abc".escape_default().to_string(), "abc");
4570    assert_eq!("a c".escape_default().to_string(), "a c");
4571    assert_eq!("éèê".escape_default().to_string(), "\\u{e9}\\u{e8}\\u{ea}");
4572    assert_eq!("\r\n\t".escape_default().to_string(), "\\r\\n\\t");
4573    assert_eq!("'\"\\".escape_default().to_string(), "\\'\\\"\\\\");
4574    assert_eq!("\u{7f}\u{ff}".escape_default().to_string(), "\\u{7f}\\u{ff}");
4575    assert_eq!("\u{100}\u{ffff}".escape_default().to_string(), "\\u{100}\\u{ffff}");
4576    assert_eq!("\u{10000}\u{10ffff}".escape_default().to_string(), "\\u{10000}\\u{10ffff}");
4577    assert_eq!("ab\u{200b}".escape_default().to_string(), "ab\\u{200b}");
4578    assert_eq!("\u{10d4ea}\r".escape_default().to_string(), "\\u{10d4ea}\\r");
4579}
4580
4581#[test]
4582fn test_total_ord() {
4583    assert_eq!("1234".cmp("123"), Greater);
4584    assert_eq!("123".cmp("1234"), Less);
4585    assert_eq!("1234".cmp("1234"), Equal);
4586    assert_eq!("12345555".cmp("123456"), Less);
4587    assert_eq!("22".cmp("1234"), Greater);
4588}
4589
4590#[test]
4591fn test_iterator() {
4592    let s = "ศไทย中华Việt Nam";
4593    let v = ['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm'];
4594
4595    let mut pos = 0;
4596    let it = s.chars();
4597
4598    for c in it {
4599        assert_eq!(c, v[pos]);
4600        pos += 1;
4601    }
4602    assert_eq!(pos, v.len());
4603    assert_eq!(s.chars().count(), v.len());
4604}
4605
4606#[test]
4607fn test_rev_iterator() {
4608    let s = "ศไทย中华Việt Nam";
4609    let v = ['m', 'a', 'N', ' ', 't', 'ệ', 'i', 'V', '华', '中', 'ย', 'ท', 'ไ', 'ศ'];
4610
4611    let mut pos = 0;
4612    let it = s.chars().rev();
4613
4614    for c in it {
4615        assert_eq!(c, v[pos]);
4616        pos += 1;
4617    }
4618    assert_eq!(pos, v.len());
4619}
4620
4621#[test]
4622#[cfg_attr(miri, ignore)] // Miri is too slow
4623fn test_chars_decoding() {
4624    let mut bytes = [0; 4];
4625    for c in (0..0x110000).filter_map(std::char::from_u32) {
4626        let s = c.encode_utf8(&mut bytes);
4627        if Some(c) != s.chars().next() {
4628            panic!("character {:x}={} does not decode correctly", c as u32, c);
4629        }
4630    }
4631}
4632
4633#[test]
4634#[cfg_attr(miri, ignore)] // Miri is too slow
4635fn test_chars_rev_decoding() {
4636    let mut bytes = [0; 4];
4637    for c in (0..0x110000).filter_map(std::char::from_u32) {
4638        let s = c.encode_utf8(&mut bytes);
4639        if Some(c) != s.chars().rev().next() {
4640            panic!("character {:x}={} does not decode correctly", c as u32, c);
4641        }
4642    }
4643}
4644
4645#[test]
4646fn test_iterator_clone() {
4647    let s = "ศไทย中华Việt Nam";
4648    let mut it = s.chars();
4649    it.next();
4650    assert!(it.clone().zip(it).all(|(x, y)| x == y));
4651}
4652
4653#[test]
4654fn test_iterator_last() {
4655    let s = "ศไทย中华Việt Nam";
4656    let mut it = s.chars();
4657    it.next();
4658    assert_eq!(it.last(), Some('m'));
4659}
4660
4661#[test]
4662fn test_chars_debug() {
4663    let s = "ศไทย中华Việt Nam";
4664    let c = s.chars();
4665    assert_eq!(
4666        format!("{:?}", c),
4667        r#"Chars(['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm'])"#
4668    );
4669}
4670
4671#[test]
4672fn test_bytesator() {
4673    let s = "ศไทย中华Việt Nam";
4674    let v = [
4675        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
4676        86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
4677    ];
4678    let mut pos = 0;
4679
4680    for b in s.bytes() {
4681        assert_eq!(b, v[pos]);
4682        pos += 1;
4683    }
4684}
4685
4686#[test]
4687fn test_bytes_revator() {
4688    let s = "ศไทย中华Việt Nam";
4689    let v = [
4690        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
4691        86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
4692    ];
4693    let mut pos = v.len();
4694
4695    for b in s.bytes().rev() {
4696        pos -= 1;
4697        assert_eq!(b, v[pos]);
4698    }
4699}
4700
4701#[test]
4702fn test_bytesator_nth() {
4703    let s = "ศไทย中华Việt Nam";
4704    let v = [
4705        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
4706        86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
4707    ];
4708
4709    let mut b = s.bytes();
4710    assert_eq!(b.nth(2).unwrap(), v[2]);
4711    assert_eq!(b.nth(10).unwrap(), v[10]);
4712    assert_eq!(b.nth(200), None);
4713}
4714
4715#[test]
4716fn test_bytesator_count() {
4717    let s = "ศไทย中华Việt Nam";
4718
4719    let b = s.bytes();
4720    assert_eq!(b.count(), 28)
4721}
4722
4723#[test]
4724fn test_bytesator_last() {
4725    let s = "ศไทย中华Việt Nam";
4726
4727    let b = s.bytes();
4728    assert_eq!(b.last().unwrap(), 109)
4729}
4730
4731#[test]
4732fn test_char_indicesator() {
4733    let s = "ศไทย中华Việt Nam";
4734    let p = [0, 3, 6, 9, 12, 15, 18, 19, 20, 23, 24, 25, 26, 27];
4735    let v = ['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm'];
4736
4737    let mut pos = 0;
4738    let it = s.char_indices();
4739
4740    for c in it {
4741        assert_eq!(c, (p[pos], v[pos]));
4742        pos += 1;
4743    }
4744    assert_eq!(pos, v.len());
4745    assert_eq!(pos, p.len());
4746}
4747
4748#[test]
4749fn test_char_indices_revator() {
4750    let s = "ศไทย中华Việt Nam";
4751    let p = [27, 26, 25, 24, 23, 20, 19, 18, 15, 12, 9, 6, 3, 0];
4752    let v = ['m', 'a', 'N', ' ', 't', 'ệ', 'i', 'V', '华', '中', 'ย', 'ท', 'ไ', 'ศ'];
4753
4754    let mut pos = 0;
4755    let it = s.char_indices().rev();
4756
4757    for c in it {
4758        assert_eq!(c, (p[pos], v[pos]));
4759        pos += 1;
4760    }
4761    assert_eq!(pos, v.len());
4762    assert_eq!(pos, p.len());
4763}
4764
4765#[test]
4766fn test_char_indices_last() {
4767    let s = "ศไทย中华Việt Nam";
4768    let mut it = s.char_indices();
4769    it.next();
4770    assert_eq!(it.last(), Some((27, 'm')));
4771}
4772
4773#[test]
4774fn test_splitn_char_iterator() {
4775    let data = "\nMäry häd ä little lämb\nLittle lämb\n";
4776
4777    let split: Vec<&str> = data.splitn(4, ' ').collect();
4778    assert_eq!(split, ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]);
4779
4780    let split: Vec<&str> = data.splitn(4, |c: char| c == ' ').collect();
4781    assert_eq!(split, ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]);
4782
4783    // Unicode
4784    let split: Vec<&str> = data.splitn(4, 'ä').collect();
4785    assert_eq!(split, ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]);
4786
4787    let split: Vec<&str> = data.splitn(4, |c: char| c == 'ä').collect();
4788    assert_eq!(split, ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]);
4789}
4790
4791#[test]
4792fn test_split_char_iterator_no_trailing() {
4793    let data = "\nMäry häd ä little lämb\nLittle lämb\n";
4794
4795    let split: Vec<&str> = data.split('\n').collect();
4796    assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]);
4797
4798    let split: Vec<&str> = data.split_terminator('\n').collect();
4799    assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]);
4800}
4801
4802#[test]
4803fn test_split_char_iterator_inclusive() {
4804    let data = "\nMäry häd ä little lämb\nLittle lämb\n";
4805
4806    let split: Vec<&str> = data.split_inclusive('\n').collect();
4807    assert_eq!(split, ["\n", "Märy häd ä little lämb\n", "Little lämb\n"]);
4808
4809    let uppercase_separated = "SheePSharKTurtlECaT";
4810    let mut first_char = true;
4811    let split: Vec<&str> = uppercase_separated
4812        .split_inclusive(|c: char| {
4813            let split = !first_char && c.is_uppercase();
4814            first_char = split;
4815            split
4816        })
4817        .collect();
4818    assert_eq!(split, ["SheeP", "SharK", "TurtlE", "CaT"]);
4819}
4820
4821#[test]
4822fn test_split_char_iterator_inclusive_rev() {
4823    let data = "\nMäry häd ä little lämb\nLittle lämb\n";
4824
4825    let split: Vec<&str> = data.split_inclusive('\n').rev().collect();
4826    assert_eq!(split, ["Little lämb\n", "Märy häd ä little lämb\n", "\n"]);
4827
4828    // Note that the predicate is stateful and thus dependent
4829    // on the iteration order.
4830    // (A different predicate is needed for reverse iterator vs normal iterator.)
4831    // Not sure if anything can be done though.
4832    let uppercase_separated = "SheePSharKTurtlECaT";
4833    let mut term_char = true;
4834    let split: Vec<&str> = uppercase_separated
4835        .split_inclusive(|c: char| {
4836            let split = term_char && c.is_uppercase();
4837            term_char = c.is_uppercase();
4838            split
4839        })
4840        .rev()
4841        .collect();
4842    assert_eq!(split, ["CaT", "TurtlE", "SharK", "SheeP"]);
4843}
4844
4845#[test]
4846fn test_rsplit() {
4847    let data = "\nMäry häd ä little lämb\nLittle lämb\n";
4848
4849    let split: Vec<&str> = data.rsplit(' ').collect();
4850    assert_eq!(split, ["lämb\n", "lämb\nLittle", "little", "ä", "häd", "\nMäry"]);
4851
4852    let split: Vec<&str> = data.rsplit("lämb").collect();
4853    assert_eq!(split, ["\n", "\nLittle ", "\nMäry häd ä little "]);
4854
4855    let split: Vec<&str> = data.rsplit(|c: char| c == 'ä').collect();
4856    assert_eq!(split, ["mb\n", "mb\nLittle l", " little l", "d ", "ry h", "\nM"]);
4857}
4858
4859#[test]
4860fn test_rsplitn() {
4861    let data = "\nMäry häd ä little lämb\nLittle lämb\n";
4862
4863    let split: Vec<&str> = data.rsplitn(2, ' ').collect();
4864    assert_eq!(split, ["lämb\n", "\nMäry häd ä little lämb\nLittle"]);
4865
4866    let split: Vec<&str> = data.rsplitn(2, "lämb").collect();
4867    assert_eq!(split, ["\n", "\nMäry häd ä little lämb\nLittle "]);
4868
4869    let split: Vec<&str> = data.rsplitn(2, |c: char| c == 'ä').collect();
4870    assert_eq!(split, ["mb\n", "\nMäry häd ä little lämb\nLittle l"]);
4871}
4872
4873#[test]
4874fn test_split_once() {
4875    assert_eq!("".split_once("->"), None);
4876    assert_eq!("-".split_once("->"), None);
4877    assert_eq!("->".split_once("->"), Some(("", "")));
4878    assert_eq!("a->".split_once("->"), Some(("a", "")));
4879    assert_eq!("->b".split_once("->"), Some(("", "b")));
4880    assert_eq!("a->b".split_once("->"), Some(("a", "b")));
4881    assert_eq!("a->b->c".split_once("->"), Some(("a", "b->c")));
4882    assert_eq!("---".split_once("--"), Some(("", "-")));
4883}
4884
4885#[test]
4886fn test_rsplit_once() {
4887    assert_eq!("".rsplit_once("->"), None);
4888    assert_eq!("-".rsplit_once("->"), None);
4889    assert_eq!("->".rsplit_once("->"), Some(("", "")));
4890    assert_eq!("a->".rsplit_once("->"), Some(("a", "")));
4891    assert_eq!("->b".rsplit_once("->"), Some(("", "b")));
4892    assert_eq!("a->b".rsplit_once("->"), Some(("a", "b")));
4893    assert_eq!("a->b->c".rsplit_once("->"), Some(("a->b", "c")));
4894    assert_eq!("---".rsplit_once("--"), Some(("-", "")));
4895}
4896
4897#[test]
4898fn test_split_whitespace() {
4899    let data = "\n \tMäry   häd\tä  little lämb\nLittle lämb\n";
4900    let words: Vec<&str> = data.split_whitespace().collect();
4901    assert_eq!(words, ["Märy", "häd", "ä", "little", "lämb", "Little", "lämb"])
4902}
4903
4904#[test]
4905fn test_lines() {
4906    let data = "\nMäry häd ä little lämb\n\r\nLittle lämb\n";
4907    let lines: Vec<&str> = data.lines().collect();
4908    assert_eq!(lines, ["", "Märy häd ä little lämb", "", "Little lämb"]);
4909
4910    let data = "\r\nMäry häd ä little lämb\n\nLittle lämb"; // no trailing \n
4911    let lines: Vec<&str> = data.lines().collect();
4912    assert_eq!(lines, ["", "Märy häd ä little lämb", "", "Little lämb"]);
4913}
4914
4915#[test]
4916fn test_splitator() {
4917    fn t(s: &str, sep: &str, u: &[&str]) {
4918        let v: Vec<&str> = s.split(sep).collect();
4919        assert_eq!(v, u);
4920    }
4921    t("--1233345--", "12345", &["--1233345--"]);
4922    t("abc::hello::there", "::", &["abc", "hello", "there"]);
4923    t("::hello::there", "::", &["", "hello", "there"]);
4924    t("hello::there::", "::", &["hello", "there", ""]);
4925    t("::hello::there::", "::", &["", "hello", "there", ""]);
4926    t("ประเทศไทย中华Việt Nam", "中华", &["ประเทศไทย", "Việt Nam"]);
4927    t("zzXXXzzYYYzz", "zz", &["", "XXX", "YYY", ""]);
4928    t("zzXXXzYYYz", "XXX", &["zz", "zYYYz"]);
4929    t(".XXX.YYY.", ".", &["", "XXX", "YYY", ""]);
4930    t("", ".", &[""]);
4931    t("zz", "zz", &["", ""]);
4932    t("ok", "z", &["ok"]);
4933    t("zzz", "zz", &["", "z"]);
4934    t("zzzzz", "zz", &["", "", "z"]);
4935}
4936
4937#[test]
4938fn test_str_default() {
4939    use std::default::Default;
4940
4941    fn t<S: Default + AsRef<str>>() {
4942        let s: S = Default::default();
4943        assert_eq!(s.as_ref(), "");
4944    }
4945
4946    t::<&str>();
4947    t::<String>();
4948    t::<&mut str>();
4949}
4950
4951#[test]
4952fn test_str_container() {
4953    fn sum_len(v: &[&str]) -> usize {
4954        v.iter().map(|x| x.len()).sum()
4955    }
4956
4957    let s = "01234";
4958    assert_eq!(5, sum_len(&["012", "", "34"]));
4959    assert_eq!(5, sum_len(&["01", "2", "34", ""]));
4960    assert_eq!(5, sum_len(&[s]));
4961}
4962
4963#[test]
4964fn test_str_from_utf8() {
4965    let xs = b"hello";
4966    assert_eq!(from_utf8(xs), Ok("hello"));
4967
4968    let xs = "ศไทย中华Việt Nam".as_bytes();
4969    assert_eq!(from_utf8(xs), Ok("ศไทย中华Việt Nam"));
4970
4971    let xs = b"hello\xFF";
4972    assert!(from_utf8(xs).is_err());
4973}
4974
4975#[test]
4976fn test_pattern_deref_forward() {
4977    let data = "aabcdaa";
4978    assert!(data.contains("bcd"));
4979    assert!(data.contains(&"bcd"));
4980    assert!(data.contains(&"bcd".to_string()));
4981}
4982
4983#[test]
4984fn test_empty_match_indices() {
4985    let data = "aä中!";
4986    let vec: Vec<_> = data.match_indices("").collect();
4987    assert_eq!(vec, [(0, ""), (1, ""), (3, ""), (6, ""), (7, "")]);
4988}
4989
4990#[test]
4991fn test_bool_from_str() {
4992    assert_eq!("true".parse().ok(), Some(true));
4993    assert_eq!("false".parse().ok(), Some(false));
4994    assert_eq!("not even a boolean".parse::<bool>().ok(), None);
4995}
4996
4997fn check_contains_all_substrings(s: &str) {
4998    assert!(s.contains(""));
4999    for i in 0..s.len() {
5000        for j in i + 1..=s.len() {
5001            assert!(s.contains(&s[i..j]));
5002        }
5003    }
5004}
5005
5006#[test]
5007#[cfg_attr(miri, ignore)] // Miri is too slow
5008fn strslice_issue_16589() {
5009    assert!("bananas".contains("nana"));
5010
5011    // prior to the fix for #16589, x.contains("abcdabcd") returned false
5012    // test all substrings for good measure
5013    check_contains_all_substrings("012345678901234567890123456789bcdabcdabcd");
5014}
5015
5016#[test]
5017fn strslice_issue_16878() {
5018    assert!(!"1234567ah012345678901ah".contains("hah"));
5019    assert!(!"00abc01234567890123456789abc".contains("bcabc"));
5020}
5021
5022#[test]
5023#[cfg_attr(miri, ignore)] // Miri is too slow
5024fn test_strslice_contains() {
5025    let x = "There are moments, Jeeves, when one asks oneself, 'Do trousers matter?'";
5026    check_contains_all_substrings(x);
5027}
5028
5029#[test]
5030fn test_rsplitn_char_iterator() {
5031    let data = "\nMäry häd ä little lämb\nLittle lämb\n";
5032
5033    let mut split: Vec<&str> = data.rsplitn(4, ' ').collect();
5034    split.reverse();
5035    assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
5036
5037    let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == ' ').collect();
5038    split.reverse();
5039    assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
5040
5041    // Unicode
5042    let mut split: Vec<&str> = data.rsplitn(4, 'ä').collect();
5043    split.reverse();
5044    assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
5045
5046    let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == 'ä').collect();
5047    split.reverse();
5048    assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
5049}
5050
5051#[test]
5052fn test_split_char_iterator() {
5053    let data = "\nMäry häd ä little lämb\nLittle lämb\n";
5054
5055    let split: Vec<&str> = data.split(' ').collect();
5056    assert_eq!(split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
5057
5058    let mut rsplit: Vec<&str> = data.split(' ').rev().collect();
5059    rsplit.reverse();
5060    assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
5061
5062    let split: Vec<&str> = data.split(|c: char| c == ' ').collect();
5063    assert_eq!(split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
5064
5065    let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect();
5066    rsplit.reverse();
5067    assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
5068
5069    // Unicode
5070    let split: Vec<&str> = data.split('ä').collect();
5071    assert_eq!(split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
5072
5073    let mut rsplit: Vec<&str> = data.split('ä').rev().collect();
5074    rsplit.reverse();
5075    assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
5076
5077    let split: Vec<&str> = data.split(|c: char| c == 'ä').collect();
5078    assert_eq!(split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
5079
5080    let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect();
5081    rsplit.reverse();
5082    assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
5083}
5084
5085#[test]
5086fn test_rev_split_char_iterator_no_trailing() {
5087    let data = "\nMäry häd ä little lämb\nLittle lämb\n";
5088
5089    let mut split: Vec<&str> = data.split('\n').rev().collect();
5090    split.reverse();
5091    assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]);
5092
5093    let mut split: Vec<&str> = data.split_terminator('\n').rev().collect();
5094    split.reverse();
5095    assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]);
5096}
5097
5098#[test]
5099fn test_utf16_code_units() {
5100    assert_eq!("é\u{1F4A9}".encode_utf16().collect::<Vec<u16>>(), [0xE9, 0xD83D, 0xDCA9])
5101}
5102
5103#[test]
5104fn starts_with_in_unicode() {
5105    assert!(!"├── Cargo.toml".starts_with("# "));
5106}
5107
5108#[test]
5109fn starts_short_long() {
5110    assert!(!"".starts_with("##"));
5111    assert!(!"##".starts_with("####"));
5112    assert!("####".starts_with("##"));
5113    assert!(!"##ä".starts_with("####"));
5114    assert!("####ä".starts_with("##"));
5115    assert!(!"##".starts_with("####ä"));
5116    assert!("##ä##".starts_with("##ä"));
5117
5118    assert!("".starts_with(""));
5119    assert!("ä".starts_with(""));
5120    assert!("#ä".starts_with(""));
5121    assert!("##ä".starts_with(""));
5122    assert!("ä###".starts_with(""));
5123    assert!("#ä##".starts_with(""));
5124    assert!("##ä#".starts_with(""));
5125}
5126
5127#[test]
5128fn contains_weird_cases() {
5129    assert!("* \t".contains(' '));
5130    assert!(!"* \t".contains('?'));
5131    assert!(!"* \t".contains('\u{1F4A9}'));
5132}
5133
5134#[test]
5135fn trim_ws() {
5136    assert_eq!(" \t  a \t  ".trim_start_matches(|c: char| c.is_whitespace()), "a \t  ");
5137    assert_eq!(" \t  a \t  ".trim_end_matches(|c: char| c.is_whitespace()), " \t  a");
5138    assert_eq!(" \t  a \t  ".trim_start_matches(|c: char| c.is_whitespace()), "a \t  ");
5139    assert_eq!(" \t  a \t  ".trim_end_matches(|c: char| c.is_whitespace()), " \t  a");
5140    assert_eq!(" \t  a \t  ".trim_matches(|c: char| c.is_whitespace()), "a");
5141    assert_eq!(" \t   \t  ".trim_start_matches(|c: char| c.is_whitespace()), "");
5142    assert_eq!(" \t   \t  ".trim_end_matches(|c: char| c.is_whitespace()), "");
5143    assert_eq!(" \t   \t  ".trim_start_matches(|c: char| c.is_whitespace()), "");
5144    assert_eq!(" \t   \t  ".trim_end_matches(|c: char| c.is_whitespace()), "");
5145    assert_eq!(" \t   \t  ".trim_matches(|c: char| c.is_whitespace()), "");
5146}
5147
5148#[test]
5149fn to_lowercase() {
5150    assert_eq!("".to_lowercase(), "");
5151    assert_eq!("AÉDžaé ".to_lowercase(), "aédžaé ");
5152
5153    // https://github.com/rust-lang/rust/issues/26035
5154    assert_eq!("ΑΣ".to_lowercase(), "ας");
5155    assert_eq!("Α'Σ".to_lowercase(), "α'ς");
5156    assert_eq!("Α''Σ".to_lowercase(), "α''ς");
5157
5158    assert_eq!("ΑΣ Α".to_lowercase(), "ας α");
5159    assert_eq!("Α'Σ Α".to_lowercase(), "α'ς α");
5160    assert_eq!("Α''Σ Α".to_lowercase(), "α''ς α");
5161
5162    assert_eq!("ΑΣ' Α".to_lowercase(), "ας' α");
5163    assert_eq!("ΑΣ'' Α".to_lowercase(), "ας'' α");
5164
5165    assert_eq!("Α'Σ' Α".to_lowercase(), "α'ς' α");
5166    assert_eq!("Α''Σ'' Α".to_lowercase(), "α''ς'' α");
5167
5168    assert_eq!("Α Σ".to_lowercase(), "α σ");
5169    assert_eq!("Α 'Σ".to_lowercase(), "α 'σ");
5170    assert_eq!("Α ''Σ".to_lowercase(), "α ''σ");
5171
5172    assert_eq!("Σ".to_lowercase(), "σ");
5173    assert_eq!("'Σ".to_lowercase(), "'σ");
5174    assert_eq!("''Σ".to_lowercase(), "''σ");
5175
5176    assert_eq!("ΑΣΑ".to_lowercase(), "ασα");
5177    assert_eq!("ΑΣ'Α".to_lowercase(), "ασ'α");
5178    assert_eq!("ΑΣ''Α".to_lowercase(), "ασ''α");
5179}
5180
5181#[test]
5182fn to_uppercase() {
5183    assert_eq!("".to_uppercase(), "");
5184    assert_eq!("aéDžßfiᾀ".to_uppercase(), "AÉDŽSSFIἈΙ");
5185}
5186
5187#[test]
5188fn test_into_string() {
5189    // The only way to acquire a Box<str> in the first place is through a String, so just
5190    // test that we can round-trip between Box<str> and String.
5191    let string = String::from("Some text goes here");
5192    assert_eq!(string.clone().into_boxed_str().into_string(), string);
5193}
5194
5195#[test]
5196fn test_box_slice_clone() {
5197    let data = String::from("hello HELLO hello HELLO yes YES 5 中ä华!!!");
5198    let data2 = data.clone().into_boxed_str().clone().into_string();
5199
5200    assert_eq!(data, data2);
5201}
5202
5203#[test]
5204fn test_cow_from() {
5205    let borrowed = "borrowed";
5206    let owned = String::from("owned");
5207    match (Cow::from(owned.clone()), Cow::from(borrowed)) {
5208        (Cow::Owned(o), Cow::Borrowed(b)) => assert!(o == owned && b == borrowed),
5209        _ => panic!("invalid `Cow::from`"),
5210    }
5211}
5212
5213#[test]
5214fn test_repeat() {
5215    assert_eq!("".repeat(3), "");
5216    assert_eq!("abc".repeat(0), "");
5217    assert_eq!("α".repeat(3), "ααα");
5218}
5219
5220mod pattern {
5221    use std::str::pattern::SearchStep::{self, Done, Match, Reject};
5222    use std::str::pattern::{Pattern, ReverseSearcher, Searcher};
5223
5224    macro_rules! make_test {
5225        ($name:ident, $p:expr, $h:expr, [$($e:expr,)*]) => {
5226            #[allow(unused_imports)]
5227            mod $name {
5228                use std::str::pattern::SearchStep::{Match, Reject};
5229                use super::{cmp_search_to_vec};
5230                #[test]
5231                fn fwd() {
5232                    cmp_search_to_vec(false, $p, $h, vec![$($e),*]);
5233                }
5234                #[test]
5235                fn bwd() {
5236                    cmp_search_to_vec(true, $p, $h, vec![$($e),*]);
5237                }
5238            }
5239        }
5240    }
5241
5242    fn cmp_search_to_vec<'a>(
5243        rev: bool,
5244        pat: impl Pattern<'a, Searcher: ReverseSearcher<'a>>,
5245        haystack: &'a str,
5246        right: Vec<SearchStep>,
5247    ) {
5248        let mut searcher = pat.into_searcher(haystack);
5249        let mut v = vec![];
5250        loop {
5251            match if !rev { searcher.next() } else { searcher.next_back() } {
5252                Match(a, b) => v.push(Match(a, b)),
5253                Reject(a, b) => v.push(Reject(a, b)),
5254                Done => break,
5255            }
5256        }
5257        if rev {
5258            v.reverse();
5259        }
5260
5261        let mut first_index = 0;
5262        let mut err = None;
5263
5264        for (i, e) in right.iter().enumerate() {
5265            match *e {
5266                Match(a, b) | Reject(a, b) if a <= b && a == first_index => {
5267                    first_index = b;
5268                }
5269                _ => {
5270                    err = Some(i);
5271                    break;
5272                }
5273            }
5274        }
5275
5276        if let Some(err) = err {
5277            panic!("Input skipped range at {}", err);
5278        }
5279
5280        if first_index != haystack.len() {
5281            panic!("Did not cover whole input");
5282        }
5283
5284        assert_eq!(v, right);
5285    }
5286
5287    make_test!(
5288        str_searcher_ascii_haystack,
5289        "bb",
5290        "abbcbbd",
5291        [Reject(0, 1), Match(1, 3), Reject(3, 4), Match(4, 6), Reject(6, 7),]
5292    );
5293    make_test!(
5294        str_searcher_ascii_haystack_seq,
5295        "bb",
5296        "abbcbbbbd",
5297        [Reject(0, 1), Match(1, 3), Reject(3, 4), Match(4, 6), Match(6, 8), Reject(8, 9),]
5298    );
5299    make_test!(
5300        str_searcher_empty_needle_ascii_haystack,
5301        "",
5302        "abbcbbd",
5303        [
5304            Match(0, 0),
5305            Reject(0, 1),
5306            Match(1, 1),
5307            Reject(1, 2),
5308            Match(2, 2),
5309            Reject(2, 3),
5310            Match(3, 3),
5311            Reject(3, 4),
5312            Match(4, 4),
5313            Reject(4, 5),
5314            Match(5, 5),
5315            Reject(5, 6),
5316            Match(6, 6),
5317            Reject(6, 7),
5318            Match(7, 7),
5319        ]
5320    );
5321    make_test!(
5322        str_searcher_multibyte_haystack,
5323        " ",
5324        "├──",
5325        [Reject(0, 3), Reject(3, 6), Reject(6, 9),]
5326    );
5327    make_test!(
5328        str_searcher_empty_needle_multibyte_haystack,
5329        "",
5330        "├──",
5331        [
5332            Match(0, 0),
5333            Reject(0, 3),
5334            Match(3, 3),
5335            Reject(3, 6),
5336            Match(6, 6),
5337            Reject(6, 9),
5338            Match(9, 9),
5339        ]
5340    );
5341    make_test!(str_searcher_empty_needle_empty_haystack, "", "", [Match(0, 0),]);
5342    make_test!(str_searcher_nonempty_needle_empty_haystack, "├", "", []);
5343    make_test!(
5344        char_searcher_ascii_haystack,
5345        'b',
5346        "abbcbbd",
5347        [
5348            Reject(0, 1),
5349            Match(1, 2),
5350            Match(2, 3),
5351            Reject(3, 4),
5352            Match(4, 5),
5353            Match(5, 6),
5354            Reject(6, 7),
5355        ]
5356    );
5357    make_test!(
5358        char_searcher_multibyte_haystack,
5359        ' ',
5360        "├──",
5361        [Reject(0, 3), Reject(3, 6), Reject(6, 9),]
5362    );
5363    make_test!(
5364        char_searcher_short_haystack,
5365        '\u{1F4A9}',
5366        "* \t",
5367        [Reject(0, 1), Reject(1, 2), Reject(2, 3),]
5368    );
5369}
5370
5371macro_rules! generate_iterator_test {
5372    {
5373        $name:ident {
5374            $(
5375                ($($arg:expr),*) -> [$($t:tt)*];
5376            )*
5377        }
5378        with $fwd:expr, $bwd:expr;
5379    } => {
5380        #[test]
5381        fn $name() {
5382            $(
5383                {
5384                    let res = vec![$($t)*];
5385
5386                    let fwd_vec: Vec<_> = ($fwd)($($arg),*).collect();
5387                    assert_eq!(fwd_vec, res);
5388
5389                    let mut bwd_vec: Vec<_> = ($bwd)($($arg),*).collect();
5390                    bwd_vec.reverse();
5391                    assert_eq!(bwd_vec, res);
5392                }
5393            )*
5394        }
5395    };
5396    {
5397        $name:ident {
5398            $(
5399                ($($arg:expr),*) -> [$($t:tt)*];
5400            )*
5401        }
5402        with $fwd:expr;
5403    } => {
5404        #[test]
5405        fn $name() {
5406            $(
5407                {
5408                    let res = vec![$($t)*];
5409
5410                    let fwd_vec: Vec<_> = ($fwd)($($arg),*).collect();
5411                    assert_eq!(fwd_vec, res);
5412                }
5413            )*
5414        }
5415    }
5416}
5417
5418generate_iterator_test! {
5419    double_ended_split {
5420        ("foo.bar.baz", '.') -> ["foo", "bar", "baz"];
5421        ("foo::bar::baz", "::") -> ["foo", "bar", "baz"];
5422    }
5423    with str::split, str::rsplit;
5424}
5425
5426generate_iterator_test! {
5427    double_ended_split_terminator {
5428        ("foo;bar;baz;", ';') -> ["foo", "bar", "baz"];
5429    }
5430    with str::split_terminator, str::rsplit_terminator;
5431}
5432
5433generate_iterator_test! {
5434    double_ended_matches {
5435        ("a1b2c3", char::is_numeric) -> ["1", "2", "3"];
5436    }
5437    with str::matches, str::rmatches;
5438}
5439
5440generate_iterator_test! {
5441    double_ended_match_indices {
5442        ("a1b2c3", char::is_numeric) -> [(1, "1"), (3, "2"), (5, "3")];
5443    }
5444    with str::match_indices, str::rmatch_indices;
5445}
5446
5447generate_iterator_test! {
5448    not_double_ended_splitn {
5449        ("foo::bar::baz", 2, "::") -> ["foo", "bar::baz"];
5450    }
5451    with str::splitn;
5452}
5453
5454generate_iterator_test! {
5455    not_double_ended_rsplitn {
5456        ("foo::bar::baz", 2, "::") -> ["baz", "foo::bar"];
5457    }
5458    with str::rsplitn;
5459}
5460
5461#[test]
5462fn different_str_pattern_forwarding_lifetimes() {
5463    use std::str::pattern::Pattern;
5464
5465    fn foo<'a, P>(p: P)
5466    where
5467        for<'b> &'b P: Pattern<'a>,
5468    {
5469        for _ in 0..3 {
5470            "asdf".find(&p);
5471        }
5472    }
5473
5474    foo::<&str>("x");
5475}
5476
5477#[test]
5478fn test_str_multiline() {
5479    let a: String = "this \
5480is a test"
5481        .to_string();
5482    let b: String = "this \
5483              is \
5484              another \
5485              test"
5486        .to_string();
5487    assert_eq!(a, "this is a test".to_string());
5488    assert_eq!(b, "this is another test".to_string());
5489}
5490
5491#[test]
5492fn test_str_escapes() {
5493    let x = "\\\\\
5494    ";
5495    assert_eq!(x, r"\\"); // extraneous whitespace stripped
5496}
5497
5498#[test]
5499fn const_str_ptr() {
5500    const A: [u8; 2] = ['h' as u8, 'i' as u8];
5501    const B: &'static [u8; 2] = &A;
5502    const C: *const u8 = B as *const u8;
5503
5504    // Miri does not deduplicate consts (https://github.com/rust-lang/miri/issues/131)
5505    #[cfg(not(miri))]
5506    {
5507        let foo = &A as *const u8;
5508        assert_eq!(foo, C);
5509    }
5510
5511    unsafe {
5512        assert_eq!(from_utf8_unchecked(&A), "hi");
5513        assert_eq!(*C, A[0]);
5514        assert_eq!(*(&B[0] as *const u8), A[0]);
5515    }
5516}
5517
5518#[test]
5519fn utf8() {
5520    let yen: char = '¥'; // 0xa5
5521    let c_cedilla: char = 'ç'; // 0xe7
5522    let thorn: char = 'þ'; // 0xfe
5523    let y_diaeresis: char = 'ÿ'; // 0xff
5524    let pi: char = 'Π'; // 0x3a0
5525
5526    assert_eq!(yen as isize, 0xa5);
5527    assert_eq!(c_cedilla as isize, 0xe7);
5528    assert_eq!(thorn as isize, 0xfe);
5529    assert_eq!(y_diaeresis as isize, 0xff);
5530    assert_eq!(pi as isize, 0x3a0);
5531
5532    assert_eq!(pi as isize, '\u{3a0}' as isize);
5533    assert_eq!('\x0a' as isize, '\n' as isize);
5534
5535    let bhutan: String = "འབྲུག་ཡུལ།".to_string();
5536    let japan: String = "日本".to_string();
5537    let uzbekistan: String = "Ўзбекистон".to_string();
5538    let austria: String = "Österreich".to_string();
5539
5540    let bhutan_e: String =
5541        "\u{f60}\u{f56}\u{fb2}\u{f74}\u{f42}\u{f0b}\u{f61}\u{f74}\u{f63}\u{f0d}".to_string();
5542    let japan_e: String = "\u{65e5}\u{672c}".to_string();
5543    let uzbekistan_e: String =
5544        "\u{40e}\u{437}\u{431}\u{435}\u{43a}\u{438}\u{441}\u{442}\u{43e}\u{43d}".to_string();
5545    let austria_e: String = "\u{d6}sterreich".to_string();
5546
5547    let oo: char = 'Ö';
5548    assert_eq!(oo as isize, 0xd6);
5549
5550    fn check_str_eq(a: String, b: String) {
5551        let mut i: isize = 0;
5552        for ab in a.bytes() {
5553            println!("{}", i);
5554            println!("{}", ab);
5555            let bb: u8 = b.as_bytes()[i as usize];
5556            println!("{}", bb);
5557            assert_eq!(ab, bb);
5558            i += 1;
5559        }
5560    }
5561
5562    check_str_eq(bhutan, bhutan_e);
5563    check_str_eq(japan, japan_e);
5564    check_str_eq(uzbekistan, uzbekistan_e);
5565    check_str_eq(austria, austria_e);
5566}
5567
5568#[test]
5569fn utf8_chars() {
5570    // Chars of 1, 2, 3, and 4 bytes
5571    let chs: Vec<char> = vec!['e', 'é', '€', '\u{10000}'];
5572    let s: String = chs.iter().cloned().collect();
5573    let schs: Vec<char> = s.chars().collect();
5574
5575    assert_eq!(s.len(), 10);
5576    assert_eq!(s.chars().count(), 4);
5577    assert_eq!(schs.len(), 4);
5578    assert_eq!(schs.iter().cloned().collect::<String>(), s);
5579
5580    assert!((from_utf8(s.as_bytes()).is_ok()));
5581    // invalid prefix
5582    assert!((!from_utf8(&[0x80]).is_ok()));
5583    // invalid 2 byte prefix
5584    assert!((!from_utf8(&[0xc0]).is_ok()));
5585    assert!((!from_utf8(&[0xc0, 0x10]).is_ok()));
5586    // invalid 3 byte prefix
5587    assert!((!from_utf8(&[0xe0]).is_ok()));
5588    assert!((!from_utf8(&[0xe0, 0x10]).is_ok()));
5589    assert!((!from_utf8(&[0xe0, 0xff, 0x10]).is_ok()));
5590    // invalid 4 byte prefix
5591    assert!((!from_utf8(&[0xf0]).is_ok()));
5592    assert!((!from_utf8(&[0xf0, 0x10]).is_ok()));
5593    assert!((!from_utf8(&[0xf0, 0xff, 0x10]).is_ok()));
5594    assert!((!from_utf8(&[0xf0, 0xff, 0xff, 0x10]).is_ok()));
5595}
5596use std::collections::BTreeSet;
5597
5598#[test]
5599fn test_hash() {
5600    use crate::hash;
5601
5602    let mut x = BTreeSet::new();
5603    let mut y = BTreeSet::new();
5604
5605    x.insert(1);
5606    x.insert(2);
5607    x.insert(3);
5608
5609    y.insert(3);
5610    y.insert(2);
5611    y.insert(1);
5612
5613    assert_eq!(hash(&x), hash(&y));
5614}
5615use std::borrow::Cow;
5616use std::cell::Cell;
5617use std::collections::TryReserveError::*;
5618use std::ops::Bound;
5619use std::ops::Bound::*;
5620use std::ops::RangeBounds;
5621use std::panic;
5622use std::str;
5623
5624pub trait IntoCow<'a, B: ?Sized>
5625where
5626    B: ToOwned,
5627{
5628    fn into_cow(self) -> Cow<'a, B>;
5629}
5630
5631impl<'a> IntoCow<'a, str> for String {
5632    fn into_cow(self) -> Cow<'a, str> {
5633        Cow::Owned(self)
5634    }
5635}
5636
5637impl<'a> IntoCow<'a, str> for &'a str {
5638    fn into_cow(self) -> Cow<'a, str> {
5639        Cow::Borrowed(self)
5640    }
5641}
5642
5643#[test]
5644fn test_from_str() {
5645    let owned: Option<std::string::String> = "string".parse().ok();
5646    assert_eq!(owned.as_ref().map(|s| &**s), Some("string"));
5647}
5648
5649#[test]
5650fn test_from_cow_str() {
5651    assert_eq!(String::from(Cow::Borrowed("string")), "string");
5652    assert_eq!(String::from(Cow::Owned(String::from("string"))), "string");
5653}
5654
5655#[test]
5656fn test_unsized_to_string() {
5657    let s: &str = "abc";
5658    let _: String = (*s).to_string();
5659}
5660
5661#[test]
5662fn test_from_utf8() {
5663    let xs = b"hello".to_vec();
5664    assert_eq!(String::from_utf8(xs).unwrap(), String::from("hello"));
5665
5666    let xs = "ศไทย中华Việt Nam".as_bytes().to_vec();
5667    assert_eq!(String::from_utf8(xs).unwrap(), String::from("ศไทย中华Việt Nam"));
5668
5669    let xs = b"hello\xFF".to_vec();
5670    let err = String::from_utf8(xs).unwrap_err();
5671    assert_eq!(err.as_bytes(), b"hello\xff");
5672    let err_clone = err.clone();
5673    assert_eq!(err, err_clone);
5674    assert_eq!(err.into_bytes(), b"hello\xff".to_vec());
5675    assert_eq!(err_clone.utf8_error().valid_up_to(), 5);
5676}
5677
5678#[test]
5679fn test_from_utf8_lossy() {
5680    let xs = b"hello";
5681    let ys: Cow<'_, str> = "hello".into_cow();
5682    assert_eq!(String::from_utf8_lossy(xs), ys);
5683
5684    let xs = "ศไทย中华Việt Nam".as_bytes();
5685    let ys: Cow<'_, str> = "ศไทย中华Việt Nam".into_cow();
5686    assert_eq!(String::from_utf8_lossy(xs), ys);
5687
5688    let xs = b"Hello\xC2 There\xFF Goodbye";
5689    assert_eq!(
5690        String::from_utf8_lossy(xs),
5691        String::from("Hello\u{FFFD} There\u{FFFD} Goodbye").into_cow()
5692    );
5693
5694    let xs = b"Hello\xC0\x80 There\xE6\x83 Goodbye";
5695    assert_eq!(
5696        String::from_utf8_lossy(xs),
5697        String::from("Hello\u{FFFD}\u{FFFD} There\u{FFFD} Goodbye").into_cow()
5698    );
5699
5700    let xs = b"\xF5foo\xF5\x80bar";
5701    assert_eq!(
5702        String::from_utf8_lossy(xs),
5703        String::from("\u{FFFD}foo\u{FFFD}\u{FFFD}bar").into_cow()
5704    );
5705
5706    let xs = b"\xF1foo\xF1\x80bar\xF1\x80\x80baz";
5707    assert_eq!(
5708        String::from_utf8_lossy(xs),
5709        String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}baz").into_cow()
5710    );
5711
5712    let xs = b"\xF4foo\xF4\x80bar\xF4\xBFbaz";
5713    assert_eq!(
5714        String::from_utf8_lossy(xs),
5715        String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}\u{FFFD}baz").into_cow()
5716    );
5717
5718    let xs = b"\xF0\x80\x80\x80foo\xF0\x90\x80\x80bar";
5719    assert_eq!(
5720        String::from_utf8_lossy(xs),
5721        String::from("\u{FFFD}\u{FFFD}\u{FFFD}\u{FFFD}foo\u{10000}bar").into_cow()
5722    );
5723
5724    // surrogates
5725    let xs = b"\xED\xA0\x80foo\xED\xBF\xBFbar";
5726    assert_eq!(
5727        String::from_utf8_lossy(xs),
5728        String::from("\u{FFFD}\u{FFFD}\u{FFFD}foo\u{FFFD}\u{FFFD}\u{FFFD}bar").into_cow()
5729    );
5730}
5731
5732#[test]
5733fn test_from_utf16() {
5734    let pairs = [
5735        (
5736            String::from("�������\n"),
5737            vec![
5738                0xd800, 0xdf45, 0xd800, 0xdf3f, 0xd800, 0xdf3b, 0xd800, 0xdf46, 0xd800, 0xdf39,
5739                0xd800, 0xdf3b, 0xd800, 0xdf30, 0x000a,
5740            ],
5741        ),
5742        (
5743            String::from("������ ���\n"),
5744            vec![
5745                0xd801, 0xdc12, 0xd801, 0xdc49, 0xd801, 0xdc2e, 0xd801, 0xdc40, 0xd801, 0xdc32,
5746                0xd801, 0xdc4b, 0x0020, 0xd801, 0xdc0f, 0xd801, 0xdc32, 0xd801, 0xdc4d, 0x000a,
5747            ],
5748        ),
5749        (
5750            String::from("������·�������\n"),
5751            vec![
5752                0xd800, 0xdf00, 0xd800, 0xdf16, 0xd800, 0xdf0b, 0xd800, 0xdf04, 0xd800, 0xdf11,
5753                0xd800, 0xdf09, 0x00b7, 0xd800, 0xdf0c, 0xd800, 0xdf04, 0xd800, 0xdf15, 0xd800,
5754                0xdf04, 0xd800, 0xdf0b, 0xd800, 0xdf09, 0xd800, 0xdf11, 0x000a,
5755            ],
5756        ),
5757        (
5758            String::from("������ �� ��� ����� ��\n"),
5759            vec![
5760                0xd801, 0xdc8b, 0xd801, 0xdc98, 0xd801, 0xdc88, 0xd801, 0xdc91, 0xd801, 0xdc9b,
5761                0xd801, 0xdc92, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc93, 0x0020, 0xd801, 0xdc88,
5762                0xd801, 0xdc9a, 0xd801, 0xdc8d, 0x0020, 0xd801, 0xdc8f, 0xd801, 0xdc9c, 0xd801,
5763                0xdc92, 0xd801, 0xdc96, 0xd801, 0xdc86, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc86,
5764                0x000a,
5765            ],
5766        ),
5767        // Issue #12318, even-numbered non-BMP planes
5768        (String::from("\u{20000}"), vec![0xD840, 0xDC00]),
5769    ];
5770
5771    for p in &pairs {
5772        let (s, u) = (*p).clone();
5773        let s_as_utf16 = s.encode_utf16().collect::<Vec<u16>>();
5774        let u_as_string = String::from_utf16(&u).unwrap();
5775
5776        assert!(core::char::decode_utf16(u.iter().cloned()).all(|r| r.is_ok()));
5777        assert_eq!(s_as_utf16, u);
5778
5779        assert_eq!(u_as_string, s);
5780        assert_eq!(String::from_utf16_lossy(&u), s);
5781
5782        assert_eq!(String::from_utf16(&s_as_utf16).unwrap(), s);
5783        assert_eq!(u_as_string.encode_utf16().collect::<Vec<u16>>(), u);
5784    }
5785}
5786
5787#[test]
5788fn test_utf16_invalid() {
5789    // completely positive cases tested above.
5790    // lead + eof
5791    assert!(String::from_utf16(&[0xD800]).is_err());
5792    // lead + lead
5793    assert!(String::from_utf16(&[0xD800, 0xD800]).is_err());
5794
5795    // isolated trail
5796    assert!(String::from_utf16(&[0x0061, 0xDC00]).is_err());
5797
5798    // general
5799    assert!(String::from_utf16(&[0xD800, 0xd801, 0xdc8b, 0xD800]).is_err());
5800}
5801
5802#[test]
5803fn test_from_utf16_lossy() {
5804    // completely positive cases tested above.
5805    // lead + eof
5806    assert_eq!(String::from_utf16_lossy(&[0xD800]), String::from("\u{FFFD}"));
5807    // lead + lead
5808    assert_eq!(String::from_utf16_lossy(&[0xD800, 0xD800]), String::from("\u{FFFD}\u{FFFD}"));
5809
5810    // isolated trail
5811    assert_eq!(String::from_utf16_lossy(&[0x0061, 0xDC00]), String::from("a\u{FFFD}"));
5812
5813    // general
5814    assert_eq!(
5815        String::from_utf16_lossy(&[0xD800, 0xd801, 0xdc8b, 0xD800]),
5816        String::from("\u{FFFD}�\u{FFFD}")
5817    );
5818}
5819
5820#[test]
5821fn test_push_bytes() {
5822    let mut s = String::from("ABC");
5823    unsafe {
5824        let mv = s.as_mut_vec();
5825        mv.extend_from_slice(&[b'D']);
5826    }
5827    assert_eq!(s, "ABCD");
5828}
5829
5830#[test]
5831fn test_push_str() {
5832    let mut s = String::new();
5833    s.push_str("");
5834    assert_eq!(&s[0..], "");
5835    s.push_str("abc");
5836    assert_eq!(&s[0..], "abc");
5837    s.push_str("ประเทศไทย中华Việt Nam");
5838    assert_eq!(&s[0..], "abcประเทศไทย中华Việt Nam");
5839}
5840
5841#[test]
5842fn test_add_assign() {
5843    let mut s = String::new();
5844    s += "";
5845    assert_eq!(s.as_str(), "");
5846    s += "abc";
5847    assert_eq!(s.as_str(), "abc");
5848    s += "ประเทศไทย中华Việt Nam";
5849    assert_eq!(s.as_str(), "abcประเทศไทย中华Việt Nam");
5850}
5851
5852#[test]
5853fn test_push() {
5854    let mut data = String::from("ประเทศไทย中");
5855    data.push('华');
5856    data.push('b'); // 1 byte
5857    data.push('¢'); // 2 byte
5858    data.push('€'); // 3 byte
5859    data.push('�'); // 4 byte
5860    assert_eq!(data, "ประเทศไทย中华b¢€�");
5861}
5862
5863#[test]
5864fn test_pop() {
5865    let mut data = String::from("ประเทศไทย中华b¢€�");
5866    assert_eq!(data.pop().unwrap(), '�'); // 4 bytes
5867    assert_eq!(data.pop().unwrap(), '€'); // 3 bytes
5868    assert_eq!(data.pop().unwrap(), '¢'); // 2 bytes
5869    assert_eq!(data.pop().unwrap(), 'b'); // 1 bytes
5870    assert_eq!(data.pop().unwrap(), '华');
5871    assert_eq!(data, "ประเทศไทย中");
5872}
5873
5874#[test]
5875fn test_split_off_empty() {
5876    let orig = "Hello, world!";
5877    let mut split = String::from(orig);
5878    let empty: String = split.split_off(orig.len());
5879    assert!(empty.is_empty());
5880}
5881
5882#[test]
5883#[should_panic]
5884fn test_split_off_past_end() {
5885    let orig = "Hello, world!";
5886    let mut split = String::from(orig);
5887    let _ = split.split_off(orig.len() + 1);
5888}
5889
5890#[test]
5891#[should_panic]
5892fn test_split_off_mid_char() {
5893    let mut shan = String::from("山");
5894    let _broken_mountain = shan.split_off(1);
5895}
5896
5897#[test]
5898fn test_split_off_ascii() {
5899    let mut ab = String::from("ABCD");
5900    let orig_capacity = ab.capacity();
5901    let cd = ab.split_off(2);
5902    assert_eq!(ab, "AB");
5903    assert_eq!(cd, "CD");
5904    assert_eq!(ab.capacity(), orig_capacity);
5905}
5906
5907#[test]
5908fn test_split_off_unicode() {
5909    let mut nihon = String::from("日本語");
5910    let orig_capacity = nihon.capacity();
5911    let go = nihon.split_off("日本".len());
5912    assert_eq!(nihon, "日本");
5913    assert_eq!(go, "語");
5914    assert_eq!(nihon.capacity(), orig_capacity);
5915}
5916
5917#[test]
5918fn test_str_truncate() {
5919    let mut s = String::from("12345");
5920    s.truncate(5);
5921    assert_eq!(s, "12345");
5922    s.truncate(3);
5923    assert_eq!(s, "123");
5924    s.truncate(0);
5925    assert_eq!(s, "");
5926
5927    let mut s = String::from("12345");
5928    let p = s.as_ptr();
5929    s.truncate(3);
5930    s.push_str("6");
5931    let p_ = s.as_ptr();
5932    assert_eq!(p_, p);
5933}
5934
5935#[test]
5936fn test_str_truncate_invalid_len() {
5937    let mut s = String::from("12345");
5938    s.truncate(6);
5939    assert_eq!(s, "12345");
5940}
5941
5942#[test]
5943#[should_panic]
5944fn test_str_truncate_split_codepoint() {
5945    let mut s = String::from("\u{FC}"); // ü
5946    s.truncate(1);
5947}
5948
5949#[test]
5950fn test_str_clear() {
5951    let mut s = String::from("12345");
5952    s.clear();
5953    assert_eq!(s.len(), 0);
5954    assert_eq!(s, "");
5955}
5956
5957#[test]
5958fn test_str_add() {
5959    let a = String::from("12345");
5960    let b = a + "2";
5961    let b = b + "2";
5962    assert_eq!(b.len(), 7);
5963    assert_eq!(b, "1234522");
5964}
5965
5966#[test]
5967fn remove() {
5968    let mut s = "ศไทย中华Việt Nam; foobar".to_string();
5969    assert_eq!(s.remove(0), 'ศ');
5970    assert_eq!(s.len(), 33);
5971    assert_eq!(s, "ไทย中华Việt Nam; foobar");
5972    assert_eq!(s.remove(17), 'ệ');
5973    assert_eq!(s, "ไทย中华Vit Nam; foobar");
5974}
5975
5976#[test]
5977#[should_panic]
5978fn remove_bad() {
5979    "ศ".to_string().remove(1);
5980}
5981
5982#[test]
5983fn test_remove_matches() {
5984    let mut s = "abc".to_string();
5985
5986    s.remove_matches('b');
5987    assert_eq!(s, "ac");
5988    s.remove_matches('b');
5989    assert_eq!(s, "ac");
5990
5991    let mut s = "abcb".to_string();
5992
5993    s.remove_matches('b');
5994    assert_eq!(s, "ac");
5995
5996    let mut s = "ศไทย中华Việt Nam; foobarศ".to_string();
5997    s.remove_matches('ศ');
5998    assert_eq!(s, "ไทย中华Việt Nam; foobar");
5999
6000    let mut s = "".to_string();
6001    s.remove_matches("");
6002    assert_eq!(s, "");
6003
6004    let mut s = "aaaaa".to_string();
6005    s.remove_matches('a');
6006    assert_eq!(s, "");
6007}
6008
6009#[test]
6010fn test_retain() {
6011    let mut s = String::from("α_β_γ");
6012
6013    s.retain(|_| true);
6014    assert_eq!(s, "α_β_γ");
6015
6016    s.retain(|c| c != '_');
6017    assert_eq!(s, "αβγ");
6018
6019    s.retain(|c| c != 'β');
6020    assert_eq!(s, "αγ");
6021
6022    s.retain(|c| c == 'α');
6023    assert_eq!(s, "α");
6024
6025    s.retain(|_| false);
6026    assert_eq!(s, "");
6027
6028    let mut s = String::from("0è0");
6029    let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| {
6030        let mut count = 0;
6031        s.retain(|_| {
6032            count += 1;
6033            match count {
6034                1 => false,
6035                2 => true,
6036                _ => panic!(),
6037            }
6038        });
6039    }));
6040    assert!(std::str::from_utf8(s.as_bytes()).is_ok());
6041}
6042
6043#[test]
6044fn insert() {
6045    let mut s = "foobar".to_string();
6046    s.insert(0, 'ệ');
6047    assert_eq!(s, "ệfoobar");
6048    s.insert(6, 'ย');
6049    assert_eq!(s, "ệfooยbar");
6050}
6051
6052#[test]
6053#[should_panic]
6054fn insert_bad1() {
6055    "".to_string().insert(1, 't');
6056}
6057#[test]
6058#[should_panic]
6059fn insert_bad2() {
6060    "ệ".to_string().insert(1, 't');
6061}
6062
6063#[test]
6064fn test_slicing() {
6065    let s = "foobar".to_string();
6066    assert_eq!("foobar", &s[..]);
6067    assert_eq!("foo", &s[..3]);
6068    assert_eq!("bar", &s[3..]);
6069    assert_eq!("oob", &s[1..4]);
6070}
6071
6072#[test]
6073fn test_simple_types() {
6074    assert_eq!(1.to_string(), "1");
6075    assert_eq!((-1).to_string(), "-1");
6076    assert_eq!(200.to_string(), "200");
6077    assert_eq!(2.to_string(), "2");
6078    assert_eq!(true.to_string(), "true");
6079    assert_eq!(false.to_string(), "false");
6080    assert_eq!(("hi".to_string()).to_string(), "hi");
6081}
6082
6083#[test]
6084fn test_vectors() {
6085    let x: Vec<i32> = vec![];
6086    assert_eq!(format!("{:?}", x), "[]");
6087    assert_eq!(format!("{:?}", vec![1]), "[1]");
6088    assert_eq!(format!("{:?}", vec![1, 2, 3]), "[1, 2, 3]");
6089    assert!(format!("{:?}", vec![vec![], vec![1], vec![1, 1]]) == "[[], [1], [1, 1]]");
6090}
6091
6092#[test]
6093fn test_from_iterator() {
6094    let s = "ศไทย中华Việt Nam".to_string();
6095    let t = "ศไทย中华";
6096    let u = "Việt Nam";
6097
6098    let a: String = s.chars().collect();
6099    assert_eq!(s, a);
6100
6101    let mut b = t.to_string();
6102    b.extend(u.chars());
6103    assert_eq!(s, b);
6104
6105    let c: String = vec![t, u].into_iter().collect();
6106    assert_eq!(s, c);
6107
6108    let mut d = t.to_string();
6109    d.extend(vec![u]);
6110    assert_eq!(s, d);
6111}
6112
6113#[test]
6114fn test_drain() {
6115    let mut s = String::from("αβγ");
6116    assert_eq!(s.drain(2..4).collect::<String>(), "β");
6117    assert_eq!(s, "αγ");
6118
6119    let mut t = String::from("abcd");
6120    t.drain(..0);
6121    assert_eq!(t, "abcd");
6122    t.drain(..1);
6123    assert_eq!(t, "bcd");
6124    t.drain(3..);
6125    assert_eq!(t, "bcd");
6126    t.drain(..);
6127    assert_eq!(t, "");
6128}
6129
6130#[test]
6131#[should_panic]
6132fn test_drain_start_overflow() {
6133    let mut s = String::from("abc");
6134    s.drain((Excluded(usize::MAX), Included(0)));
6135}
6136
6137#[test]
6138#[should_panic]
6139fn test_drain_end_overflow() {
6140    let mut s = String::from("abc");
6141    s.drain((Included(0), Included(usize::MAX)));
6142}
6143
6144#[test]
6145fn test_replace_range() {
6146    let mut s = "Hello, world!".to_owned();
6147    s.replace_range(7..12, "世界");
6148    assert_eq!(s, "Hello, 世界!");
6149}
6150
6151#[test]
6152#[should_panic]
6153fn test_replace_range_char_boundary() {
6154    let mut s = "Hello, 世界!".to_owned();
6155    s.replace_range(..8, "");
6156}
6157
6158#[test]
6159fn test_replace_range_inclusive_range() {
6160    let mut v = String::from("12345");
6161    v.replace_range(2..=3, "789");
6162    assert_eq!(v, "127895");
6163    v.replace_range(1..=2, "A");
6164    assert_eq!(v, "1A895");
6165}
6166
6167#[test]
6168#[should_panic]
6169fn test_replace_range_out_of_bounds() {
6170    let mut s = String::from("12345");
6171    s.replace_range(5..6, "789");
6172}
6173
6174#[test]
6175#[should_panic]
6176fn test_replace_range_inclusive_out_of_bounds() {
6177    let mut s = String::from("12345");
6178    s.replace_range(5..=5, "789");
6179}
6180
6181#[test]
6182#[should_panic]
6183fn test_replace_range_start_overflow() {
6184    let mut s = String::from("123");
6185    s.replace_range((Excluded(usize::MAX), Included(0)), "");
6186}
6187
6188#[test]
6189#[should_panic]
6190fn test_replace_range_end_overflow() {
6191    let mut s = String::from("456");
6192    s.replace_range((Included(0), Included(usize::MAX)), "");
6193}
6194
6195#[test]
6196fn test_replace_range_empty() {
6197    let mut s = String::from("12345");
6198    s.replace_range(1..2, "");
6199    assert_eq!(s, "1345");
6200}
6201
6202#[test]
6203fn test_replace_range_unbounded() {
6204    let mut s = String::from("12345");
6205    s.replace_range(.., "");
6206    assert_eq!(s, "");
6207}
6208
6209#[test]
6210fn test_replace_range_evil_start_bound() {
6211    struct EvilRange(Cell<bool>);
6212
6213    impl RangeBounds<usize> for EvilRange {
6214        fn start_bound(&self) -> Bound<&usize> {
6215            Bound::Included(if self.0.get() {
6216                &1
6217            } else {
6218                self.0.set(true);
6219                &0
6220            })
6221        }
6222        fn end_bound(&self) -> Bound<&usize> {
6223            Bound::Unbounded
6224        }
6225    }
6226
6227    let mut s = String::from("�");
6228    s.replace_range(EvilRange(Cell::new(false)), "");
6229    assert_eq!(Ok(""), str::from_utf8(s.as_bytes()));
6230}
6231
6232#[test]
6233fn test_replace_range_evil_end_bound() {
6234    struct EvilRange(Cell<bool>);
6235
6236    impl RangeBounds<usize> for EvilRange {
6237        fn start_bound(&self) -> Bound<&usize> {
6238            Bound::Included(&0)
6239        }
6240        fn end_bound(&self) -> Bound<&usize> {
6241            Bound::Excluded(if self.0.get() {
6242                &3
6243            } else {
6244                self.0.set(true);
6245                &4
6246            })
6247        }
6248    }
6249
6250    let mut s = String::from("�");
6251    s.replace_range(EvilRange(Cell::new(false)), "");
6252    assert_eq!(Ok(""), str::from_utf8(s.as_bytes()));
6253}
6254
6255#[test]
6256fn test_extend_ref() {
6257    let mut a = "foo".to_string();
6258    a.extend(&['b', 'a', 'r']);
6259
6260    assert_eq!(&a, "foobar");
6261}
6262
6263#[test]
6264fn test_into_boxed_str() {
6265    let xs = String::from("hello my name is bob");
6266    let ys = xs.into_boxed_str();
6267    assert_eq!(&*ys, "hello my name is bob");
6268}
6269
6270#[test]
6271fn test_reserve_exact() {
6272    // This is all the same as test_reserve
6273
6274    let mut s = String::new();
6275    assert_eq!(s.capacity(), 0);
6276
6277    s.reserve_exact(2);
6278    assert!(s.capacity() >= 2);
6279
6280    for _i in 0..16 {
6281        s.push('0');
6282    }
6283
6284    assert!(s.capacity() >= 16);
6285    s.reserve_exact(16);
6286    assert!(s.capacity() >= 32);
6287
6288    s.push('0');
6289
6290    s.reserve_exact(16);
6291    assert!(s.capacity() >= 33)
6292}
6293
6294#[test]
6295#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
6296#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
6297fn test_try_reserve() {
6298    // These are the interesting cases:
6299    // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM)
6300    // * > isize::MAX should always fail
6301    //    * On 16/32-bit should CapacityOverflow
6302    //    * On 64-bit should OOM
6303    // * overflow may trigger when adding `len` to `cap` (in number of elements)
6304    // * overflow may trigger when multiplying `new_cap` by size_of::<T> (to get bytes)
6305
6306    const MAX_CAP: usize = isize::MAX as usize;
6307    const MAX_USIZE: usize = usize::MAX;
6308
6309    // On 16/32-bit, we check that allocations don't exceed isize::MAX,
6310    // on 64-bit, we assume the OS will give an OOM for such a ridiculous size.
6311    // Any platform that succeeds for these requests is technically broken with
6312    // ptr::offset because LLVM is the worst.
6313    let guards_against_isize = usize::BITS < 64;
6314
6315    {
6316        // Note: basic stuff is checked by test_reserve
6317        let mut empty_string: String = String::new();
6318
6319        // Check isize::MAX doesn't count as an overflow
6320        if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_CAP) {
6321            panic!("isize::MAX shouldn't trigger an overflow!");
6322        }
6323        // Play it again, frank! (just to be sure)
6324        if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_CAP) {
6325            panic!("isize::MAX shouldn't trigger an overflow!");
6326        }
6327
6328        if guards_against_isize {
6329            // Check isize::MAX + 1 does count as overflow
6330            if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_CAP + 1) {
6331            } else {
6332                panic!("isize::MAX + 1 should trigger an overflow!")
6333            }
6334
6335            // Check usize::MAX does count as overflow
6336            if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_USIZE) {
6337            } else {
6338                panic!("usize::MAX should trigger an overflow!")
6339            }
6340        } else {
6341            // Check isize::MAX + 1 is an OOM
6342            if let Err(AllocError { .. }) = empty_string.try_reserve(MAX_CAP + 1) {
6343            } else {
6344                panic!("isize::MAX + 1 should trigger an OOM!")
6345            }
6346
6347            // Check usize::MAX is an OOM
6348            if let Err(AllocError { .. }) = empty_string.try_reserve(MAX_USIZE) {
6349            } else {
6350                panic!("usize::MAX should trigger an OOM!")
6351            }
6352        }
6353    }
6354
6355    {
6356        // Same basic idea, but with non-zero len
6357        let mut ten_bytes: String = String::from("0123456789");
6358
6359        if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
6360            panic!("isize::MAX shouldn't trigger an overflow!");
6361        }
6362        if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
6363            panic!("isize::MAX shouldn't trigger an overflow!");
6364        }
6365        if guards_against_isize {
6366            if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) {
6367            } else {
6368                panic!("isize::MAX + 1 should trigger an overflow!");
6369            }
6370        } else {
6371            if let Err(AllocError { .. }) = ten_bytes.try_reserve(MAX_CAP - 9) {
6372            } else {
6373                panic!("isize::MAX + 1 should trigger an OOM!")
6374            }
6375        }
6376        // Should always overflow in the add-to-len
6377        if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) {
6378        } else {
6379            panic!("usize::MAX should trigger an overflow!")
6380        }
6381    }
6382}
6383
6384#[test]
6385#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
6386#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
6387fn test_try_reserve_exact() {
6388    // This is exactly the same as test_try_reserve with the method changed.
6389    // See that test for comments.
6390
6391    const MAX_CAP: usize = isize::MAX as usize;
6392    const MAX_USIZE: usize = usize::MAX;
6393
6394    let guards_against_isize = usize::BITS < 64;
6395
6396    {
6397        let mut empty_string: String = String::new();
6398
6399        if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_CAP) {
6400            panic!("isize::MAX shouldn't trigger an overflow!");
6401        }
6402        if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_CAP) {
6403            panic!("isize::MAX shouldn't trigger an overflow!");
6404        }
6405
6406        if guards_against_isize {
6407            if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_CAP + 1) {
6408            } else {
6409                panic!("isize::MAX + 1 should trigger an overflow!")
6410            }
6411
6412            if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_USIZE) {
6413            } else {
6414                panic!("usize::MAX should trigger an overflow!")
6415            }
6416        } else {
6417            if let Err(AllocError { .. }) = empty_string.try_reserve_exact(MAX_CAP + 1) {
6418            } else {
6419                panic!("isize::MAX + 1 should trigger an OOM!")
6420            }
6421
6422            if let Err(AllocError { .. }) = empty_string.try_reserve_exact(MAX_USIZE) {
6423            } else {
6424                panic!("usize::MAX should trigger an OOM!")
6425            }
6426        }
6427    }
6428
6429    {
6430        let mut ten_bytes: String = String::from("0123456789");
6431
6432        if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
6433            panic!("isize::MAX shouldn't trigger an overflow!");
6434        }
6435        if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
6436            panic!("isize::MAX shouldn't trigger an overflow!");
6437        }
6438        if guards_against_isize {
6439            if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
6440            } else {
6441                panic!("isize::MAX + 1 should trigger an overflow!");
6442            }
6443        } else {
6444            if let Err(AllocError { .. }) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
6445            } else {
6446                panic!("isize::MAX + 1 should trigger an OOM!")
6447            }
6448        }
6449        if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) {
6450        } else {
6451            panic!("usize::MAX should trigger an overflow!")
6452        }
6453    }
6454}
6455
6456#[test]
6457fn test_from_char() {
6458    assert_eq!(String::from('a'), 'a'.to_string());
6459    let s: String = 'x'.into();
6460    assert_eq!(s, 'x'.to_string());
6461}
6462
6463#[test]
6464fn test_str_concat() {
6465    let a: String = "hello".to_string();
6466    let b: String = "world".to_string();
6467    let s: String = format!("{}{}", a, b);
6468    assert_eq!(s.as_bytes()[9], 'd' as u8);
6469}
6470use std::borrow::{Cow, ToOwned};
6471use std::ffi::{CStr, OsStr};
6472use std::path::Path;
6473use std::rc::Rc;
6474use std::sync::Arc;
6475
6476macro_rules! test_from_cow {
6477    ($value:ident => $($ty:ty),+) => {$(
6478        let borrowed = <$ty>::from(Cow::Borrowed($value));
6479        let owned = <$ty>::from(Cow::Owned($value.to_owned()));
6480        assert_eq!($value, &*borrowed);
6481        assert_eq!($value, &*owned);
6482    )+};
6483    ($value:ident : & $ty:ty) => {
6484        test_from_cow!($value => Box<$ty>, Rc<$ty>, Arc<$ty>);
6485    }
6486}
6487
6488#[test]
6489fn test_from_cow_slice() {
6490    let slice: &[i32] = &[1, 2, 3];
6491    test_from_cow!(slice: &[i32]);
6492}
6493
6494#[test]
6495fn test_from_cow_str() {
6496    let string = "hello";
6497    test_from_cow!(string: &str);
6498}
6499
6500#[test]
6501fn test_from_cow_c_str() {
6502    let string = CStr::from_bytes_with_nul(b"hello\0").unwrap();
6503    test_from_cow!(string: &CStr);
6504}
6505
6506#[test]
6507fn test_from_cow_os_str() {
6508    let string = OsStr::new("hello");
6509    test_from_cow!(string: &OsStr);
6510}
6511
6512#[test]
6513fn test_from_cow_path() {
6514    let path = Path::new("hello");
6515    test_from_cow!(path: &Path);
6516}
6517
6518#[test]
6519fn cow_const() {
6520    // test that the methods of `Cow` are usable in a const context
6521
6522    const COW: Cow<'_, str> = Cow::Borrowed("moo");
6523
6524    const IS_BORROWED: bool = COW.is_borrowed();
6525    assert!(IS_BORROWED);
6526
6527    const IS_OWNED: bool = COW.is_owned();
6528    assert!(!IS_OWNED);
6529}
6530use std::borrow::Cow;
6531use std::cell::Cell;
6532use std::collections::TryReserveError::*;
6533use std::fmt::Debug;
6534use std::iter::InPlaceIterable;
6535use std::mem::{size_of, swap};
6536use std::ops::Bound::*;
6537use std::panic::{catch_unwind, AssertUnwindSafe};
6538use std::rc::Rc;
6539use std::sync::atomic::{AtomicU32, Ordering};
6540use std::vec::{Drain, IntoIter};
6541
6542struct DropCounter<'a> {
6543    count: &'a mut u32,
6544}
6545
6546impl Drop for DropCounter<'_> {
6547    fn drop(&mut self) {
6548        *self.count += 1;
6549    }
6550}
6551
6552#[test]
6553fn test_small_vec_struct() {
6554    assert_eq!(size_of::<Vec<u8>>(), size_of::<usize>() * 3);
6555}
6556
6557#[test]
6558fn test_double_drop() {
6559    struct TwoVec<T> {
6560        x: Vec<T>,
6561        y: Vec<T>,
6562    }
6563
6564    let (mut count_x, mut count_y) = (0, 0);
6565    {
6566        let mut tv = TwoVec { x: Vec::new(), y: Vec::new() };
6567        tv.x.push(DropCounter { count: &mut count_x });
6568        tv.y.push(DropCounter { count: &mut count_y });
6569
6570        // If Vec had a drop flag, here is where it would be zeroed.
6571        // Instead, it should rely on its internal state to prevent
6572        // doing anything significant when dropped multiple times.
6573        drop(tv.x);
6574
6575        // Here tv goes out of scope, tv.y should be dropped, but not tv.x.
6576    }
6577
6578    assert_eq!(count_x, 1);
6579    assert_eq!(count_y, 1);
6580}
6581
6582#[test]
6583fn test_reserve() {
6584    let mut v = Vec::new();
6585    assert_eq!(v.capacity(), 0);
6586
6587    v.reserve(2);
6588    assert!(v.capacity() >= 2);
6589
6590    for i in 0..16 {
6591        v.push(i);
6592    }
6593
6594    assert!(v.capacity() >= 16);
6595    v.reserve(16);
6596    assert!(v.capacity() >= 32);
6597
6598    v.push(16);
6599
6600    v.reserve(16);
6601    assert!(v.capacity() >= 33)
6602}
6603
6604#[test]
6605fn test_zst_capacity() {
6606    assert_eq!(Vec::<()>::new().capacity(), usize::MAX);
6607}
6608
6609#[test]
6610fn test_indexing() {
6611    let v: Vec<isize> = vec![10, 20];
6612    assert_eq!(v[0], 10);
6613    assert_eq!(v[1], 20);
6614    let mut x: usize = 0;
6615    assert_eq!(v[x], 10);
6616    assert_eq!(v[x + 1], 20);
6617    x = x + 1;
6618    assert_eq!(v[x], 20);
6619    assert_eq!(v[x - 1], 10);
6620}
6621
6622#[test]
6623fn test_debug_fmt() {
6624    let vec1: Vec<isize> = vec![];
6625    assert_eq!("[]", format!("{:?}", vec1));
6626
6627    let vec2 = vec![0, 1];
6628    assert_eq!("[0, 1]", format!("{:?}", vec2));
6629
6630    let slice: &[isize] = &[4, 5];
6631    assert_eq!("[4, 5]", format!("{:?}", slice));
6632}
6633
6634#[test]
6635fn test_push() {
6636    let mut v = vec![];
6637    v.push(1);
6638    assert_eq!(v, [1]);
6639    v.push(2);
6640    assert_eq!(v, [1, 2]);
6641    v.push(3);
6642    assert_eq!(v, [1, 2, 3]);
6643}
6644
6645#[test]
6646fn test_extend() {
6647    let mut v = Vec::new();
6648    let mut w = Vec::new();
6649
6650    v.extend(w.clone());
6651    assert_eq!(v, &[]);
6652
6653    v.extend(0..3);
6654    for i in 0..3 {
6655        w.push(i)
6656    }
6657
6658    assert_eq!(v, w);
6659
6660    v.extend(3..10);
6661    for i in 3..10 {
6662        w.push(i)
6663    }
6664
6665    assert_eq!(v, w);
6666
6667    v.extend(w.clone()); // specializes to `append`
6668    assert!(v.iter().eq(w.iter().chain(w.iter())));
6669
6670    // Zero sized types
6671    #[derive(PartialEq, Debug)]
6672    struct Foo;
6673
6674    let mut a = Vec::new();
6675    let b = vec![Foo, Foo];
6676
6677    a.extend(b);
6678    assert_eq!(a, &[Foo, Foo]);
6679
6680    // Double drop
6681    let mut count_x = 0;
6682    {
6683        let mut x = Vec::new();
6684        let y = vec![DropCounter { count: &mut count_x }];
6685        x.extend(y);
6686    }
6687    assert_eq!(count_x, 1);
6688}
6689
6690#[test]
6691fn test_extend_from_slice() {
6692    let a: Vec<isize> = vec![1, 2, 3, 4, 5];
6693    let b: Vec<isize> = vec![6, 7, 8, 9, 0];
6694
6695    let mut v: Vec<isize> = a;
6696
6697    v.extend_from_slice(&b);
6698
6699    assert_eq!(v, [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]);
6700}
6701
6702#[test]
6703fn test_extend_ref() {
6704    let mut v = vec![1, 2];
6705    v.extend(&[3, 4, 5]);
6706
6707    assert_eq!(v.len(), 5);
6708    assert_eq!(v, [1, 2, 3, 4, 5]);
6709
6710    let w = vec![6, 7];
6711    v.extend(&w);
6712
6713    assert_eq!(v.len(), 7);
6714    assert_eq!(v, [1, 2, 3, 4, 5, 6, 7]);
6715}
6716
6717#[test]
6718fn test_slice_from_ref() {
6719    let values = vec![1, 2, 3, 4, 5];
6720    let slice = &values[1..3];
6721
6722    assert_eq!(slice, [2, 3]);
6723}
6724
6725#[test]
6726fn test_slice_from_mut() {
6727    let mut values = vec![1, 2, 3, 4, 5];
6728    {
6729        let slice = &mut values[2..];
6730        assert!(slice == [3, 4, 5]);
6731        for p in slice {
6732            *p += 2;
6733        }
6734    }
6735
6736    assert!(values == [1, 2, 5, 6, 7]);
6737}
6738
6739#[test]
6740fn test_slice_to_mut() {
6741    let mut values = vec![1, 2, 3, 4, 5];
6742    {
6743        let slice = &mut values[..2];
6744        assert!(slice == [1, 2]);
6745        for p in slice {
6746            *p += 1;
6747        }
6748    }
6749
6750    assert!(values == [2, 3, 3, 4, 5]);
6751}
6752
6753#[test]
6754fn test_split_at_mut() {
6755    let mut values = vec![1, 2, 3, 4, 5];
6756    {
6757        let (left, right) = values.split_at_mut(2);
6758        {
6759            let left: &[_] = left;
6760            assert!(&left[..left.len()] == &[1, 2]);
6761        }
6762        for p in left {
6763            *p += 1;
6764        }
6765
6766        {
6767            let right: &[_] = right;
6768            assert!(&right[..right.len()] == &[3, 4, 5]);
6769        }
6770        for p in right {
6771            *p += 2;
6772        }
6773    }
6774
6775    assert_eq!(values, [2, 3, 5, 6, 7]);
6776}
6777
6778#[test]
6779fn test_clone() {
6780    let v: Vec<i32> = vec![];
6781    let w = vec![1, 2, 3];
6782
6783    assert_eq!(v, v.clone());
6784
6785    let z = w.clone();
6786    assert_eq!(w, z);
6787    // they should be disjoint in memory.
6788    assert!(w.as_ptr() != z.as_ptr())
6789}
6790
6791#[test]
6792fn test_clone_from() {
6793    let mut v = vec![];
6794    let three: Vec<Box<_>> = vec![box 1, box 2, box 3];
6795    let two: Vec<Box<_>> = vec![box 4, box 5];
6796    // zero, long
6797    v.clone_from(&three);
6798    assert_eq!(v, three);
6799
6800    // equal
6801    v.clone_from(&three);
6802    assert_eq!(v, three);
6803
6804    // long, short
6805    v.clone_from(&two);
6806    assert_eq!(v, two);
6807
6808    // short, long
6809    v.clone_from(&three);
6810    assert_eq!(v, three)
6811}
6812
6813#[test]
6814fn test_retain() {
6815    let mut vec = vec![1, 2, 3, 4];
6816    vec.retain(|&x| x % 2 == 0);
6817    assert_eq!(vec, [2, 4]);
6818}
6819
6820#[test]
6821fn test_retain_pred_panic_with_hole() {
6822    let v = (0..5).map(Rc::new).collect::<Vec<_>>();
6823    catch_unwind(AssertUnwindSafe(|| {
6824        let mut v = v.clone();
6825        v.retain(|r| match **r {
6826            0 => true,
6827            1 => false,
6828            2 => true,
6829            _ => panic!(),
6830        });
6831    }))
6832    .unwrap_err();
6833    // Everything is dropped when predicate panicked.
6834    assert!(v.iter().all(|r| Rc::strong_count(r) == 1));
6835}
6836
6837#[test]
6838fn test_retain_pred_panic_no_hole() {
6839    let v = (0..5).map(Rc::new).collect::<Vec<_>>();
6840    catch_unwind(AssertUnwindSafe(|| {
6841        let mut v = v.clone();
6842        v.retain(|r| match **r {
6843            0 | 1 | 2 => true,
6844            _ => panic!(),
6845        });
6846    }))
6847    .unwrap_err();
6848    // Everything is dropped when predicate panicked.
6849    assert!(v.iter().all(|r| Rc::strong_count(r) == 1));
6850}
6851
6852#[test]
6853fn test_retain_drop_panic() {
6854    struct Wrap(Rc<i32>);
6855
6856    impl Drop for Wrap {
6857        fn drop(&mut self) {
6858            if *self.0 == 3 {
6859                panic!();
6860            }
6861        }
6862    }
6863
6864    let v = (0..5).map(|x| Rc::new(x)).collect::<Vec<_>>();
6865    catch_unwind(AssertUnwindSafe(|| {
6866        let mut v = v.iter().map(|r| Wrap(r.clone())).collect::<Vec<_>>();
6867        v.retain(|w| match *w.0 {
6868            0 => true,
6869            1 => false,
6870            2 => true,
6871            3 => false, // Drop panic.
6872            _ => true,
6873        });
6874    }))
6875    .unwrap_err();
6876    // Other elements are dropped when `drop` of one element panicked.
6877    // The panicked wrapper also has its Rc dropped.
6878    assert!(v.iter().all(|r| Rc::strong_count(r) == 1));
6879}
6880
6881#[test]
6882fn test_dedup() {
6883    fn case(a: Vec<i32>, b: Vec<i32>) {
6884        let mut v = a;
6885        v.dedup();
6886        assert_eq!(v, b);
6887    }
6888    case(vec![], vec![]);
6889    case(vec![1], vec![1]);
6890    case(vec![1, 1], vec![1]);
6891    case(vec![1, 2, 3], vec![1, 2, 3]);
6892    case(vec![1, 1, 2, 3], vec![1, 2, 3]);
6893    case(vec![1, 2, 2, 3], vec![1, 2, 3]);
6894    case(vec![1, 2, 3, 3], vec![1, 2, 3]);
6895    case(vec![1, 1, 2, 2, 2, 3, 3], vec![1, 2, 3]);
6896}
6897
6898#[test]
6899fn test_dedup_by_key() {
6900    fn case(a: Vec<i32>, b: Vec<i32>) {
6901        let mut v = a;
6902        v.dedup_by_key(|i| *i / 10);
6903        assert_eq!(v, b);
6904    }
6905    case(vec![], vec![]);
6906    case(vec![10], vec![10]);
6907    case(vec![10, 11], vec![10]);
6908    case(vec![10, 20, 30], vec![10, 20, 30]);
6909    case(vec![10, 11, 20, 30], vec![10, 20, 30]);
6910    case(vec![10, 20, 21, 30], vec![10, 20, 30]);
6911    case(vec![10, 20, 30, 31], vec![10, 20, 30]);
6912    case(vec![10, 11, 20, 21, 22, 30, 31], vec![10, 20, 30]);
6913}
6914
6915#[test]
6916fn test_dedup_by() {
6917    let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"];
6918    vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b));
6919
6920    assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
6921
6922    let mut vec = vec![("foo", 1), ("foo", 2), ("bar", 3), ("bar", 4), ("bar", 5)];
6923    vec.dedup_by(|a, b| {
6924        a.0 == b.0 && {
6925            b.1 += a.1;
6926            true
6927        }
6928    });
6929
6930    assert_eq!(vec, [("foo", 3), ("bar", 12)]);
6931}
6932
6933#[test]
6934fn test_dedup_unique() {
6935    let mut v0: Vec<Box<_>> = vec![box 1, box 1, box 2, box 3];
6936    v0.dedup();
6937    let mut v1: Vec<Box<_>> = vec![box 1, box 2, box 2, box 3];
6938    v1.dedup();
6939    let mut v2: Vec<Box<_>> = vec![box 1, box 2, box 3, box 3];
6940    v2.dedup();
6941    // If the boxed pointers were leaked or otherwise misused, valgrind
6942    // and/or rt should raise errors.
6943}
6944
6945#[test]
6946fn zero_sized_values() {
6947    let mut v = Vec::new();
6948    assert_eq!(v.len(), 0);
6949    v.push(());
6950    assert_eq!(v.len(), 1);
6951    v.push(());
6952    assert_eq!(v.len(), 2);
6953    assert_eq!(v.pop(), Some(()));
6954    assert_eq!(v.pop(), Some(()));
6955    assert_eq!(v.pop(), None);
6956
6957    assert_eq!(v.iter().count(), 0);
6958    v.push(());
6959    assert_eq!(v.iter().count(), 1);
6960    v.push(());
6961    assert_eq!(v.iter().count(), 2);
6962
6963    for &() in &v {}
6964
6965    assert_eq!(v.iter_mut().count(), 2);
6966    v.push(());
6967    assert_eq!(v.iter_mut().count(), 3);
6968    v.push(());
6969    assert_eq!(v.iter_mut().count(), 4);
6970
6971    for &mut () in &mut v {}
6972    unsafe {
6973        v.set_len(0);
6974    }
6975    assert_eq!(v.iter_mut().count(), 0);
6976}
6977
6978#[test]
6979fn test_partition() {
6980    assert_eq!(vec![].into_iter().partition(|x: &i32| *x < 3), (vec![], vec![]));
6981    assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 4), (vec![1, 2, 3], vec![]));
6982    assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 2), (vec![1], vec![2, 3]));
6983    assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 0), (vec![], vec![1, 2, 3]));
6984}
6985
6986#[test]
6987fn test_zip_unzip() {
6988    let z1 = vec![(1, 4), (2, 5), (3, 6)];
6989
6990    let (left, right): (Vec<_>, Vec<_>) = z1.iter().cloned().unzip();
6991
6992    assert_eq!((1, 4), (left[0], right[0]));
6993    assert_eq!((2, 5), (left[1], right[1]));
6994    assert_eq!((3, 6), (left[2], right[2]));
6995}
6996
6997#[test]
6998fn test_cmp() {
6999    let x: &[isize] = &[1, 2, 3, 4, 5];
7000    let cmp: &[isize] = &[1, 2, 3, 4, 5];
7001    assert_eq!(&x[..], cmp);
7002    let cmp: &[isize] = &[3, 4, 5];
7003    assert_eq!(&x[2..], cmp);
7004    let cmp: &[isize] = &[1, 2, 3];
7005    assert_eq!(&x[..3], cmp);
7006    let cmp: &[isize] = &[2, 3, 4];
7007    assert_eq!(&x[1..4], cmp);
7008
7009    let x: Vec<isize> = vec![1, 2, 3, 4, 5];
7010    let cmp: &[isize] = &[1, 2, 3, 4, 5];
7011    assert_eq!(&x[..], cmp);
7012    let cmp: &[isize] = &[3, 4, 5];
7013    assert_eq!(&x[2..], cmp);
7014    let cmp: &[isize] = &[1, 2, 3];
7015    assert_eq!(&x[..3], cmp);
7016    let cmp: &[isize] = &[2, 3, 4];
7017    assert_eq!(&x[1..4], cmp);
7018}
7019
7020#[test]
7021fn test_vec_truncate_drop() {
7022    static mut DROPS: u32 = 0;
7023    struct Elem(i32);
7024    impl Drop for Elem {
7025        fn drop(&mut self) {
7026            unsafe {
7027                DROPS += 1;
7028            }
7029        }
7030    }
7031
7032    let mut v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
7033    assert_eq!(unsafe { DROPS }, 0);
7034    v.truncate(3);
7035    assert_eq!(unsafe { DROPS }, 2);
7036    v.truncate(0);
7037    assert_eq!(unsafe { DROPS }, 5);
7038}
7039
7040#[test]
7041#[should_panic]
7042fn test_vec_truncate_fail() {
7043    struct BadElem(i32);
7044    impl Drop for BadElem {
7045        fn drop(&mut self) {
7046            let BadElem(ref mut x) = *self;
7047            if *x == 0xbadbeef {
7048                panic!("BadElem panic: 0xbadbeef")
7049            }
7050        }
7051    }
7052
7053    let mut v = vec![BadElem(1), BadElem(2), BadElem(0xbadbeef), BadElem(4)];
7054    v.truncate(0);
7055}
7056
7057#[test]
7058fn test_index() {
7059    let vec = vec![1, 2, 3];
7060    assert!(vec[1] == 2);
7061}
7062
7063#[test]
7064#[should_panic]
7065fn test_index_out_of_bounds() {
7066    let vec = vec![1, 2, 3];
7067    let _ = vec[3];
7068}
7069
7070#[test]
7071#[should_panic]
7072fn test_slice_out_of_bounds_1() {
7073    let x = vec![1, 2, 3, 4, 5];
7074    &x[!0..];
7075}
7076
7077#[test]
7078#[should_panic]
7079fn test_slice_out_of_bounds_2() {
7080    let x = vec![1, 2, 3, 4, 5];
7081    &x[..6];
7082}
7083
7084#[test]
7085#[should_panic]
7086fn test_slice_out_of_bounds_3() {
7087    let x = vec![1, 2, 3, 4, 5];
7088    &x[!0..4];
7089}
7090
7091#[test]
7092#[should_panic]
7093fn test_slice_out_of_bounds_4() {
7094    let x = vec![1, 2, 3, 4, 5];
7095    &x[1..6];
7096}
7097
7098#[test]
7099#[should_panic]
7100fn test_slice_out_of_bounds_5() {
7101    let x = vec![1, 2, 3, 4, 5];
7102    &x[3..2];
7103}
7104
7105#[test]
7106#[should_panic]
7107fn test_swap_remove_empty() {
7108    let mut vec = Vec::<i32>::new();
7109    vec.swap_remove(0);
7110}
7111
7112#[test]
7113fn test_move_items() {
7114    let vec = vec![1, 2, 3];
7115    let mut vec2 = vec![];
7116    for i in vec {
7117        vec2.push(i);
7118    }
7119    assert_eq!(vec2, [1, 2, 3]);
7120}
7121
7122#[test]
7123fn test_move_items_reverse() {
7124    let vec = vec![1, 2, 3];
7125    let mut vec2 = vec![];
7126    for i in vec.into_iter().rev() {
7127        vec2.push(i);
7128    }
7129    assert_eq!(vec2, [3, 2, 1]);
7130}
7131
7132#[test]
7133fn test_move_items_zero_sized() {
7134    let vec = vec![(), (), ()];
7135    let mut vec2 = vec![];
7136    for i in vec {
7137        vec2.push(i);
7138    }
7139    assert_eq!(vec2, [(), (), ()]);
7140}
7141
7142#[test]
7143fn test_drain_empty_vec() {
7144    let mut vec: Vec<i32> = vec![];
7145    let mut vec2: Vec<i32> = vec![];
7146    for i in vec.drain(..) {
7147        vec2.push(i);
7148    }
7149    assert!(vec.is_empty());
7150    assert!(vec2.is_empty());
7151}
7152
7153#[test]
7154fn test_drain_items() {
7155    let mut vec = vec![1, 2, 3];
7156    let mut vec2 = vec![];
7157    for i in vec.drain(..) {
7158        vec2.push(i);
7159    }
7160    assert_eq!(vec, []);
7161    assert_eq!(vec2, [1, 2, 3]);
7162}
7163
7164#[test]
7165fn test_drain_items_reverse() {
7166    let mut vec = vec![1, 2, 3];
7167    let mut vec2 = vec![];
7168    for i in vec.drain(..).rev() {
7169        vec2.push(i);
7170    }
7171    assert_eq!(vec, []);
7172    assert_eq!(vec2, [3, 2, 1]);
7173}
7174
7175#[test]
7176fn test_drain_items_zero_sized() {
7177    let mut vec = vec![(), (), ()];
7178    let mut vec2 = vec![];
7179    for i in vec.drain(..) {
7180        vec2.push(i);
7181    }
7182    assert_eq!(vec, []);
7183    assert_eq!(vec2, [(), (), ()]);
7184}
7185
7186#[test]
7187#[should_panic]
7188fn test_drain_out_of_bounds() {
7189    let mut v = vec![1, 2, 3, 4, 5];
7190    v.drain(5..6);
7191}
7192
7193#[test]
7194fn test_drain_range() {
7195    let mut v = vec![1, 2, 3, 4, 5];
7196    for _ in v.drain(4..) {}
7197    assert_eq!(v, &[1, 2, 3, 4]);
7198
7199    let mut v: Vec<_> = (1..6).map(|x| x.to_string()).collect();
7200    for _ in v.drain(1..4) {}
7201    assert_eq!(v, &[1.to_string(), 5.to_string()]);
7202
7203    let mut v: Vec<_> = (1..6).map(|x| x.to_string()).collect();
7204    for _ in v.drain(1..4).rev() {}
7205    assert_eq!(v, &[1.to_string(), 5.to_string()]);
7206
7207    let mut v: Vec<_> = vec![(); 5];
7208    for _ in v.drain(1..4).rev() {}
7209    assert_eq!(v, &[(), ()]);
7210}
7211
7212#[test]
7213fn test_drain_inclusive_range() {
7214    let mut v = vec!['a', 'b', 'c', 'd', 'e'];
7215    for _ in v.drain(1..=3) {}
7216    assert_eq!(v, &['a', 'e']);
7217
7218    let mut v: Vec<_> = (0..=5).map(|x| x.to_string()).collect();
7219    for _ in v.drain(1..=5) {}
7220    assert_eq!(v, &["0".to_string()]);
7221
7222    let mut v: Vec<String> = (0..=5).map(|x| x.to_string()).collect();
7223    for _ in v.drain(0..=5) {}
7224    assert_eq!(v, Vec::<String>::new());
7225
7226    let mut v: Vec<_> = (0..=5).map(|x| x.to_string()).collect();
7227    for _ in v.drain(0..=3) {}
7228    assert_eq!(v, &["4".to_string(), "5".to_string()]);
7229
7230    let mut v: Vec<_> = (0..=1).map(|x| x.to_string()).collect();
7231    for _ in v.drain(..=0) {}
7232    assert_eq!(v, &["1".to_string()]);
7233}
7234
7235#[test]
7236fn test_drain_max_vec_size() {
7237    let mut v = Vec::<()>::with_capacity(usize::MAX);
7238    unsafe {
7239        v.set_len(usize::MAX);
7240    }
7241    for _ in v.drain(usize::MAX - 1..) {}
7242    assert_eq!(v.len(), usize::MAX - 1);
7243
7244    let mut v = Vec::<()>::with_capacity(usize::MAX);
7245    unsafe {
7246        v.set_len(usize::MAX);
7247    }
7248    for _ in v.drain(usize::MAX - 1..=usize::MAX - 1) {}
7249    assert_eq!(v.len(), usize::MAX - 1);
7250}
7251
7252#[test]
7253#[should_panic]
7254fn test_drain_index_overflow() {
7255    let mut v = Vec::<()>::with_capacity(usize::MAX);
7256    unsafe {
7257        v.set_len(usize::MAX);
7258    }
7259    v.drain(0..=usize::MAX);
7260}
7261
7262#[test]
7263#[should_panic]
7264fn test_drain_inclusive_out_of_bounds() {
7265    let mut v = vec![1, 2, 3, 4, 5];
7266    v.drain(5..=5);
7267}
7268
7269#[test]
7270#[should_panic]
7271fn test_drain_start_overflow() {
7272    let mut v = vec![1, 2, 3];
7273    v.drain((Excluded(usize::MAX), Included(0)));
7274}
7275
7276#[test]
7277#[should_panic]
7278fn test_drain_end_overflow() {
7279    let mut v = vec![1, 2, 3];
7280    v.drain((Included(0), Included(usize::MAX)));
7281}
7282
7283#[test]
7284fn test_drain_leak() {
7285    static mut DROPS: i32 = 0;
7286
7287    #[derive(Debug, PartialEq)]
7288    struct D(u32, bool);
7289
7290    impl Drop for D {
7291        fn drop(&mut self) {
7292            unsafe {
7293                DROPS += 1;
7294            }
7295
7296            if self.1 {
7297                panic!("panic in `drop`");
7298            }
7299        }
7300    }
7301
7302    let mut v = vec![
7303        D(0, false),
7304        D(1, false),
7305        D(2, false),
7306        D(3, false),
7307        D(4, true),
7308        D(5, false),
7309        D(6, false),
7310    ];
7311
7312    catch_unwind(AssertUnwindSafe(|| {
7313        v.drain(2..=5);
7314    }))
7315    .ok();
7316
7317    assert_eq!(unsafe { DROPS }, 4);
7318    assert_eq!(v, vec![D(0, false), D(1, false), D(6, false),]);
7319}
7320
7321#[test]
7322fn test_splice() {
7323    let mut v = vec![1, 2, 3, 4, 5];
7324    let a = [10, 11, 12];
7325    v.splice(2..4, a.iter().cloned());
7326    assert_eq!(v, &[1, 2, 10, 11, 12, 5]);
7327    v.splice(1..3, Some(20));
7328    assert_eq!(v, &[1, 20, 11, 12, 5]);
7329}
7330
7331#[test]
7332fn test_splice_inclusive_range() {
7333    let mut v = vec![1, 2, 3, 4, 5];
7334    let a = [10, 11, 12];
7335    let t1: Vec<_> = v.splice(2..=3, a.iter().cloned()).collect();
7336    assert_eq!(v, &[1, 2, 10, 11, 12, 5]);
7337    assert_eq!(t1, &[3, 4]);
7338    let t2: Vec<_> = v.splice(1..=2, Some(20)).collect();
7339    assert_eq!(v, &[1, 20, 11, 12, 5]);
7340    assert_eq!(t2, &[2, 10]);
7341}
7342
7343#[test]
7344#[should_panic]
7345fn test_splice_out_of_bounds() {
7346    let mut v = vec![1, 2, 3, 4, 5];
7347    let a = [10, 11, 12];
7348    v.splice(5..6, a.iter().cloned());
7349}
7350
7351#[test]
7352#[should_panic]
7353fn test_splice_inclusive_out_of_bounds() {
7354    let mut v = vec![1, 2, 3, 4, 5];
7355    let a = [10, 11, 12];
7356    v.splice(5..=5, a.iter().cloned());
7357}
7358
7359#[test]
7360fn test_splice_items_zero_sized() {
7361    let mut vec = vec![(), (), ()];
7362    let vec2 = vec![];
7363    let t: Vec<_> = vec.splice(1..2, vec2.iter().cloned()).collect();
7364    assert_eq!(vec, &[(), ()]);
7365    assert_eq!(t, &[()]);
7366}
7367
7368#[test]
7369fn test_splice_unbounded() {
7370    let mut vec = vec![1, 2, 3, 4, 5];
7371    let t: Vec<_> = vec.splice(.., None).collect();
7372    assert_eq!(vec, &[]);
7373    assert_eq!(t, &[1, 2, 3, 4, 5]);
7374}
7375
7376#[test]
7377fn test_splice_forget() {
7378    let mut v = vec![1, 2, 3, 4, 5];
7379    let a = [10, 11, 12];
7380    std::mem::forget(v.splice(2..4, a.iter().cloned()));
7381    assert_eq!(v, &[1, 2]);
7382}
7383
7384#[test]
7385fn test_into_boxed_slice() {
7386    let xs = vec![1, 2, 3];
7387    let ys = xs.into_boxed_slice();
7388    assert_eq!(&*ys, [1, 2, 3]);
7389}
7390
7391#[test]
7392fn test_append() {
7393    let mut vec = vec![1, 2, 3];
7394    let mut vec2 = vec![4, 5, 6];
7395    vec.append(&mut vec2);
7396    assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
7397    assert_eq!(vec2, []);
7398}
7399
7400#[test]
7401fn test_split_off() {
7402    let mut vec = vec![1, 2, 3, 4, 5, 6];
7403    let orig_capacity = vec.capacity();
7404    let vec2 = vec.split_off(4);
7405    assert_eq!(vec, [1, 2, 3, 4]);
7406    assert_eq!(vec2, [5, 6]);
7407    assert_eq!(vec.capacity(), orig_capacity);
7408}
7409
7410#[test]
7411fn test_split_off_take_all() {
7412    let mut vec = vec![1, 2, 3, 4, 5, 6];
7413    let orig_ptr = vec.as_ptr();
7414    let orig_capacity = vec.capacity();
7415    let vec2 = vec.split_off(0);
7416    assert_eq!(vec, []);
7417    assert_eq!(vec2, [1, 2, 3, 4, 5, 6]);
7418    assert_eq!(vec.capacity(), orig_capacity);
7419    assert_eq!(vec2.as_ptr(), orig_ptr);
7420}
7421
7422#[test]
7423fn test_into_iter_as_slice() {
7424    let vec = vec!['a', 'b', 'c'];
7425    let mut into_iter = vec.into_iter();
7426    assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
7427    let _ = into_iter.next().unwrap();
7428    assert_eq!(into_iter.as_slice(), &['b', 'c']);
7429    let _ = into_iter.next().unwrap();
7430    let _ = into_iter.next().unwrap();
7431    assert_eq!(into_iter.as_slice(), &[]);
7432}
7433
7434#[test]
7435fn test_into_iter_as_mut_slice() {
7436    let vec = vec!['a', 'b', 'c'];
7437    let mut into_iter = vec.into_iter();
7438    assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
7439    into_iter.as_mut_slice()[0] = 'x';
7440    into_iter.as_mut_slice()[1] = 'y';
7441    assert_eq!(into_iter.next().unwrap(), 'x');
7442    assert_eq!(into_iter.as_slice(), &['y', 'c']);
7443}
7444
7445#[test]
7446fn test_into_iter_debug() {
7447    let vec = vec!['a', 'b', 'c'];
7448    let into_iter = vec.into_iter();
7449    let debug = format!("{:?}", into_iter);
7450    assert_eq!(debug, "IntoIter(['a', 'b', 'c'])");
7451}
7452
7453#[test]
7454fn test_into_iter_count() {
7455    assert_eq!(vec![1, 2, 3].into_iter().count(), 3);
7456}
7457
7458#[test]
7459fn test_into_iter_clone() {
7460    fn iter_equal<I: Iterator<Item = i32>>(it: I, slice: &[i32]) {
7461        let v: Vec<i32> = it.collect();
7462        assert_eq!(&v[..], slice);
7463    }
7464    let mut it = vec![1, 2, 3].into_iter();
7465    iter_equal(it.clone(), &[1, 2, 3]);
7466    assert_eq!(it.next(), Some(1));
7467    let mut it = it.rev();
7468    iter_equal(it.clone(), &[3, 2]);
7469    assert_eq!(it.next(), Some(3));
7470    iter_equal(it.clone(), &[2]);
7471    assert_eq!(it.next(), Some(2));
7472    iter_equal(it.clone(), &[]);
7473    assert_eq!(it.next(), None);
7474}
7475
7476#[test]
7477fn test_into_iter_leak() {
7478    static mut DROPS: i32 = 0;
7479
7480    struct D(bool);
7481
7482    impl Drop for D {
7483        fn drop(&mut self) {
7484            unsafe {
7485                DROPS += 1;
7486            }
7487
7488            if self.0 {
7489                panic!("panic in `drop`");
7490            }
7491        }
7492    }
7493
7494    let v = vec![D(false), D(true), D(false)];
7495
7496    catch_unwind(move || drop(v.into_iter())).ok();
7497
7498    assert_eq!(unsafe { DROPS }, 3);
7499}
7500
7501#[test]
7502fn test_from_iter_specialization() {
7503    let src: Vec<usize> = vec![0usize; 1];
7504    let srcptr = src.as_ptr();
7505    let sink = src.into_iter().collect::<Vec<_>>();
7506    let sinkptr = sink.as_ptr();
7507    assert_eq!(srcptr, sinkptr);
7508}
7509
7510#[test]
7511fn test_from_iter_partially_drained_in_place_specialization() {
7512    let src: Vec<usize> = vec![0usize; 10];
7513    let srcptr = src.as_ptr();
7514    let mut iter = src.into_iter();
7515    iter.next();
7516    iter.next();
7517    let sink = iter.collect::<Vec<_>>();
7518    let sinkptr = sink.as_ptr();
7519    assert_eq!(srcptr, sinkptr);
7520}
7521
7522#[test]
7523fn test_from_iter_specialization_with_iterator_adapters() {
7524    fn assert_in_place_trait<T: InPlaceIterable>(_: &T) {}
7525    let src: Vec<usize> = vec![0usize; 256];
7526    let srcptr = src.as_ptr();
7527    let iter = src
7528        .into_iter()
7529        .enumerate()
7530        .map(|i| i.0 + i.1)
7531        .zip(std::iter::repeat(1usize))
7532        .map(|(a, b)| a + b)
7533        .map_while(Option::Some)
7534        .peekable()
7535        .skip(1)
7536        .map(|e| if e != usize::MAX { Ok(std::num::NonZeroUsize::new(e)) } else { Err(()) });
7537    assert_in_place_trait(&iter);
7538    let sink = iter.collect::<Result<Vec<_>, _>>().unwrap();
7539    let sinkptr = sink.as_ptr();
7540    assert_eq!(srcptr, sinkptr as *const usize);
7541}
7542
7543#[test]
7544fn test_from_iter_specialization_head_tail_drop() {
7545    let drop_count: Vec<_> = (0..=2).map(|_| Rc::new(())).collect();
7546    let src: Vec<_> = drop_count.iter().cloned().collect();
7547    let srcptr = src.as_ptr();
7548    let iter = src.into_iter();
7549    let sink: Vec<_> = iter.skip(1).take(1).collect();
7550    let sinkptr = sink.as_ptr();
7551    assert_eq!(srcptr, sinkptr, "specialization was applied");
7552    assert_eq!(Rc::strong_count(&drop_count[0]), 1, "front was dropped");
7553    assert_eq!(Rc::strong_count(&drop_count[1]), 2, "one element was collected");
7554    assert_eq!(Rc::strong_count(&drop_count[2]), 1, "tail was dropped");
7555    assert_eq!(sink.len(), 1);
7556}
7557
7558#[test]
7559fn test_from_iter_specialization_panic_during_iteration_drops() {
7560    let drop_count: Vec<_> = (0..=2).map(|_| Rc::new(())).collect();
7561    let src: Vec<_> = drop_count.iter().cloned().collect();
7562    let iter = src.into_iter();
7563
7564    let _ = std::panic::catch_unwind(AssertUnwindSafe(|| {
7565        let _ = iter
7566            .enumerate()
7567            .filter_map(|(i, e)| {
7568                if i == 1 {
7569                    std::panic!("aborting iteration");
7570                }
7571                Some(e)
7572            })
7573            .collect::<Vec<_>>();
7574    }));
7575
7576    assert!(
7577        drop_count.iter().map(Rc::strong_count).all(|count| count == 1),
7578        "all items were dropped once"
7579    );
7580}
7581
7582#[test]
7583fn test_from_iter_specialization_panic_during_drop_leaks() {
7584    static mut DROP_COUNTER: usize = 0;
7585
7586    #[derive(Debug)]
7587    enum Droppable {
7588        DroppedTwice(Box<i32>),
7589        PanicOnDrop,
7590    }
7591
7592    impl Drop for Droppable {
7593        fn drop(&mut self) {
7594            match self {
7595                Droppable::DroppedTwice(_) => {
7596                    unsafe {
7597                        DROP_COUNTER += 1;
7598                    }
7599                    println!("Dropping!")
7600                }
7601                Droppable::PanicOnDrop => {
7602                    if !std::thread::panicking() {
7603                        panic!();
7604                    }
7605                }
7606            }
7607        }
7608    }
7609
7610    let mut to_free: *mut Droppable = core::ptr::null_mut();
7611    let mut cap = 0;
7612
7613    let _ = std::panic::catch_unwind(AssertUnwindSafe(|| {
7614        let mut v = vec![Droppable::DroppedTwice(Box::new(123)), Droppable::PanicOnDrop];
7615        to_free = v.as_mut_ptr();
7616        cap = v.capacity();
7617        let _ = v.into_iter().take(0).collect::<Vec<_>>();
7618    }));
7619
7620    assert_eq!(unsafe { DROP_COUNTER }, 1);
7621    // clean up the leak to keep miri happy
7622    unsafe {
7623        drop(Vec::from_raw_parts(to_free, 0, cap));
7624    }
7625}
7626
7627#[test]
7628fn test_cow_from() {
7629    let borrowed: &[_] = &["borrowed", "(slice)"];
7630    let owned = vec!["owned", "(vec)"];
7631    match (Cow::from(owned.clone()), Cow::from(borrowed)) {
7632        (Cow::Owned(o), Cow::Borrowed(b)) => assert!(o == owned && b == borrowed),
7633        _ => panic!("invalid `Cow::from`"),
7634    }
7635}
7636
7637#[test]
7638fn test_from_cow() {
7639    let borrowed: &[_] = &["borrowed", "(slice)"];
7640    let owned = vec!["owned", "(vec)"];
7641    assert_eq!(Vec::from(Cow::Borrowed(borrowed)), vec!["borrowed", "(slice)"]);
7642    assert_eq!(Vec::from(Cow::Owned(owned)), vec!["owned", "(vec)"]);
7643}
7644
7645#[allow(dead_code)]
7646fn assert_covariance() {
7647    fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> {
7648        d
7649    }
7650    fn into_iter<'new>(i: IntoIter<&'static str>) -> IntoIter<&'new str> {
7651        i
7652    }
7653}
7654
7655#[test]
7656fn from_into_inner() {
7657    let vec = vec![1, 2, 3];
7658    let ptr = vec.as_ptr();
7659    let vec = vec.into_iter().collect::<Vec<_>>();
7660    assert_eq!(vec, [1, 2, 3]);
7661    assert_eq!(vec.as_ptr(), ptr);
7662
7663    let ptr = &vec[1] as *const _;
7664    let mut it = vec.into_iter();
7665    it.next().unwrap();
7666    let vec = it.collect::<Vec<_>>();
7667    assert_eq!(vec, [2, 3]);
7668    assert!(ptr != vec.as_ptr());
7669}
7670
7671#[test]
7672fn overaligned_allocations() {
7673    #[repr(align(256))]
7674    struct Foo(usize);
7675    let mut v = vec![Foo(273)];
7676    for i in 0..0x1000 {
7677        v.reserve_exact(i);
7678        assert!(v[0].0 == 273);
7679        assert!(v.as_ptr() as usize & 0xff == 0);
7680        v.shrink_to_fit();
7681        assert!(v[0].0 == 273);
7682        assert!(v.as_ptr() as usize & 0xff == 0);
7683    }
7684}
7685
7686#[test]
7687fn drain_filter_empty() {
7688    let mut vec: Vec<i32> = vec![];
7689
7690    {
7691        let mut iter = vec.drain_filter(|_| true);
7692        assert_eq!(iter.size_hint(), (0, Some(0)));
7693        assert_eq!(iter.next(), None);
7694        assert_eq!(iter.size_hint(), (0, Some(0)));
7695        assert_eq!(iter.next(), None);
7696        assert_eq!(iter.size_hint(), (0, Some(0)));
7697    }
7698    assert_eq!(vec.len(), 0);
7699    assert_eq!(vec, vec![]);
7700}
7701
7702#[test]
7703fn drain_filter_zst() {
7704    let mut vec = vec![(), (), (), (), ()];
7705    let initial_len = vec.len();
7706    let mut count = 0;
7707    {
7708        let mut iter = vec.drain_filter(|_| true);
7709        assert_eq!(iter.size_hint(), (0, Some(initial_len)));
7710        while let Some(_) = iter.next() {
7711            count += 1;
7712            assert_eq!(iter.size_hint(), (0, Some(initial_len - count)));
7713        }
7714        assert_eq!(iter.size_hint(), (0, Some(0)));
7715        assert_eq!(iter.next(), None);
7716        assert_eq!(iter.size_hint(), (0, Some(0)));
7717    }
7718
7719    assert_eq!(count, initial_len);
7720    assert_eq!(vec.len(), 0);
7721    assert_eq!(vec, vec![]);
7722}
7723
7724#[test]
7725fn drain_filter_false() {
7726    let mut vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
7727
7728    let initial_len = vec.len();
7729    let mut count = 0;
7730    {
7731        let mut iter = vec.drain_filter(|_| false);
7732        assert_eq!(iter.size_hint(), (0, Some(initial_len)));
7733        for _ in iter.by_ref() {
7734            count += 1;
7735        }
7736        assert_eq!(iter.size_hint(), (0, Some(0)));
7737        assert_eq!(iter.next(), None);
7738        assert_eq!(iter.size_hint(), (0, Some(0)));
7739    }
7740
7741    assert_eq!(count, 0);
7742    assert_eq!(vec.len(), initial_len);
7743    assert_eq!(vec, vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
7744}
7745
7746#[test]
7747fn drain_filter_true() {
7748    let mut vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
7749
7750    let initial_len = vec.len();
7751    let mut count = 0;
7752    {
7753        let mut iter = vec.drain_filter(|_| true);
7754        assert_eq!(iter.size_hint(), (0, Some(initial_len)));
7755        while let Some(_) = iter.next() {
7756            count += 1;
7757            assert_eq!(iter.size_hint(), (0, Some(initial_len - count)));
7758        }
7759        assert_eq!(iter.size_hint(), (0, Some(0)));
7760        assert_eq!(iter.next(), None);
7761        assert_eq!(iter.size_hint(), (0, Some(0)));
7762    }
7763
7764    assert_eq!(count, initial_len);
7765    assert_eq!(vec.len(), 0);
7766    assert_eq!(vec, vec![]);
7767}
7768
7769#[test]
7770fn drain_filter_complex() {
7771    {
7772        //                [+xxx++++++xxxxx++++x+x++]
7773        let mut vec = vec![
7774            1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37,
7775            39,
7776        ];
7777
7778        let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
7779        assert_eq!(removed.len(), 10);
7780        assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
7781
7782        assert_eq!(vec.len(), 14);
7783        assert_eq!(vec, vec![1, 7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]);
7784    }
7785
7786    {
7787        //                [xxx++++++xxxxx++++x+x++]
7788        let mut vec = vec![
7789            2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39,
7790        ];
7791
7792        let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
7793        assert_eq!(removed.len(), 10);
7794        assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
7795
7796        assert_eq!(vec.len(), 13);
7797        assert_eq!(vec, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]);
7798    }
7799
7800    {
7801        //                [xxx++++++xxxxx++++x+x]
7802        let mut vec =
7803            vec![2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36];
7804
7805        let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
7806        assert_eq!(removed.len(), 10);
7807        assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
7808
7809        assert_eq!(vec.len(), 11);
7810        assert_eq!(vec, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35]);
7811    }
7812
7813    {
7814        //                [xxxxxxxxxx+++++++++++]
7815        let mut vec = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19];
7816
7817        let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
7818        assert_eq!(removed.len(), 10);
7819        assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
7820
7821        assert_eq!(vec.len(), 10);
7822        assert_eq!(vec, vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]);
7823    }
7824
7825    {
7826        //                [+++++++++++xxxxxxxxxx]
7827        let mut vec = vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20];
7828
7829        let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
7830        assert_eq!(removed.len(), 10);
7831        assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
7832
7833        assert_eq!(vec.len(), 10);
7834        assert_eq!(vec, vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]);
7835    }
7836}
7837
7838// FIXME: re-enable emscripten once it can unwind again
7839#[test]
7840#[cfg(not(target_os = "emscripten"))]
7841fn drain_filter_consumed_panic() {
7842    use std::rc::Rc;
7843    use std::sync::Mutex;
7844
7845    struct Check {
7846        index: usize,
7847        drop_counts: Rc<Mutex<Vec<usize>>>,
7848    }
7849
7850    impl Drop for Check {
7851        fn drop(&mut self) {
7852            self.drop_counts.lock().unwrap()[self.index] += 1;
7853            println!("drop: {}", self.index);
7854        }
7855    }
7856
7857    let check_count = 10;
7858    let drop_counts = Rc::new(Mutex::new(vec![0_usize; check_count]));
7859    let mut data: Vec<Check> = (0..check_count)
7860        .map(|index| Check { index, drop_counts: Rc::clone(&drop_counts) })
7861        .collect();
7862
7863    let _ = std::panic::catch_unwind(move || {
7864        let filter = |c: &mut Check| {
7865            if c.index == 2 {
7866                panic!("panic at index: {}", c.index);
7867            }
7868            // Verify that if the filter could panic again on another element
7869            // that it would not cause a double panic and all elements of the
7870            // vec would still be dropped exactly once.
7871            if c.index == 4 {
7872                panic!("panic at index: {}", c.index);
7873            }
7874            c.index < 6
7875        };
7876        let drain = data.drain_filter(filter);
7877
7878        // NOTE: The DrainFilter is explicitly consumed
7879        drain.for_each(drop);
7880    });
7881
7882    let drop_counts = drop_counts.lock().unwrap();
7883    assert_eq!(check_count, drop_counts.len());
7884
7885    for (index, count) in drop_counts.iter().cloned().enumerate() {
7886        assert_eq!(1, count, "unexpected drop count at index: {} (count: {})", index, count);
7887    }
7888}
7889
7890// FIXME: Re-enable emscripten once it can catch panics
7891#[test]
7892#[cfg(not(target_os = "emscripten"))]
7893fn drain_filter_unconsumed_panic() {
7894    use std::rc::Rc;
7895    use std::sync::Mutex;
7896
7897    struct Check {
7898        index: usize,
7899        drop_counts: Rc<Mutex<Vec<usize>>>,
7900    }
7901
7902    impl Drop for Check {
7903        fn drop(&mut self) {
7904            self.drop_counts.lock().unwrap()[self.index] += 1;
7905            println!("drop: {}", self.index);
7906        }
7907    }
7908
7909    let check_count = 10;
7910    let drop_counts = Rc::new(Mutex::new(vec![0_usize; check_count]));
7911    let mut data: Vec<Check> = (0..check_count)
7912        .map(|index| Check { index, drop_counts: Rc::clone(&drop_counts) })
7913        .collect();
7914
7915    let _ = std::panic::catch_unwind(move || {
7916        let filter = |c: &mut Check| {
7917            if c.index == 2 {
7918                panic!("panic at index: {}", c.index);
7919            }
7920            // Verify that if the filter could panic again on another element
7921            // that it would not cause a double panic and all elements of the
7922            // vec would still be dropped exactly once.
7923            if c.index == 4 {
7924                panic!("panic at index: {}", c.index);
7925            }
7926            c.index < 6
7927        };
7928        let _drain = data.drain_filter(filter);
7929
7930        // NOTE: The DrainFilter is dropped without being consumed
7931    });
7932
7933    let drop_counts = drop_counts.lock().unwrap();
7934    assert_eq!(check_count, drop_counts.len());
7935
7936    for (index, count) in drop_counts.iter().cloned().enumerate() {
7937        assert_eq!(1, count, "unexpected drop count at index: {} (count: {})", index, count);
7938    }
7939}
7940
7941#[test]
7942fn drain_filter_unconsumed() {
7943    let mut vec = vec![1, 2, 3, 4];
7944    let drain = vec.drain_filter(|&mut x| x % 2 != 0);
7945    drop(drain);
7946    assert_eq!(vec, [2, 4]);
7947}
7948
7949#[test]
7950fn test_reserve_exact() {
7951    // This is all the same as test_reserve
7952
7953    let mut v = Vec::new();
7954    assert_eq!(v.capacity(), 0);
7955
7956    v.reserve_exact(2);
7957    assert!(v.capacity() >= 2);
7958
7959    for i in 0..16 {
7960        v.push(i);
7961    }
7962
7963    assert!(v.capacity() >= 16);
7964    v.reserve_exact(16);
7965    assert!(v.capacity() >= 32);
7966
7967    v.push(16);
7968
7969    v.reserve_exact(16);
7970    assert!(v.capacity() >= 33)
7971}
7972
7973#[test]
7974#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
7975#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
7976fn test_try_reserve() {
7977    // These are the interesting cases:
7978    // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM)
7979    // * > isize::MAX should always fail
7980    //    * On 16/32-bit should CapacityOverflow
7981    //    * On 64-bit should OOM
7982    // * overflow may trigger when adding `len` to `cap` (in number of elements)
7983    // * overflow may trigger when multiplying `new_cap` by size_of::<T> (to get bytes)
7984
7985    const MAX_CAP: usize = isize::MAX as usize;
7986    const MAX_USIZE: usize = usize::MAX;
7987
7988    // On 16/32-bit, we check that allocations don't exceed isize::MAX,
7989    // on 64-bit, we assume the OS will give an OOM for such a ridiculous size.
7990    // Any platform that succeeds for these requests is technically broken with
7991    // ptr::offset because LLVM is the worst.
7992    let guards_against_isize = usize::BITS < 64;
7993
7994    {
7995        // Note: basic stuff is checked by test_reserve
7996        let mut empty_bytes: Vec<u8> = Vec::new();
7997
7998        // Check isize::MAX doesn't count as an overflow
7999        if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) {
8000            panic!("isize::MAX shouldn't trigger an overflow!");
8001        }
8002        // Play it again, frank! (just to be sure)
8003        if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) {
8004            panic!("isize::MAX shouldn't trigger an overflow!");
8005        }
8006
8007        if guards_against_isize {
8008            // Check isize::MAX + 1 does count as overflow
8009            if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP + 1) {
8010            } else {
8011                panic!("isize::MAX + 1 should trigger an overflow!")
8012            }
8013
8014            // Check usize::MAX does count as overflow
8015            if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
8016            } else {
8017                panic!("usize::MAX should trigger an overflow!")
8018            }
8019        } else {
8020            // Check isize::MAX + 1 is an OOM
8021            if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_CAP + 1) {
8022            } else {
8023                panic!("isize::MAX + 1 should trigger an OOM!")
8024            }
8025
8026            // Check usize::MAX is an OOM
8027            if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_USIZE) {
8028            } else {
8029                panic!("usize::MAX should trigger an OOM!")
8030            }
8031        }
8032    }
8033
8034    {
8035        // Same basic idea, but with non-zero len
8036        let mut ten_bytes: Vec<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
8037
8038        if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
8039            panic!("isize::MAX shouldn't trigger an overflow!");
8040        }
8041        if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
8042            panic!("isize::MAX shouldn't trigger an overflow!");
8043        }
8044        if guards_against_isize {
8045            if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) {
8046            } else {
8047                panic!("isize::MAX + 1 should trigger an overflow!");
8048            }
8049        } else {
8050            if let Err(AllocError { .. }) = ten_bytes.try_reserve(MAX_CAP - 9) {
8051            } else {
8052                panic!("isize::MAX + 1 should trigger an OOM!")
8053            }
8054        }
8055        // Should always overflow in the add-to-len
8056        if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) {
8057        } else {
8058            panic!("usize::MAX should trigger an overflow!")
8059        }
8060    }
8061
8062    {
8063        // Same basic idea, but with interesting type size
8064        let mut ten_u32s: Vec<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
8065
8066        if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) {
8067            panic!("isize::MAX shouldn't trigger an overflow!");
8068        }
8069        if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) {
8070            panic!("isize::MAX shouldn't trigger an overflow!");
8071        }
8072        if guards_against_isize {
8073            if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) {
8074            } else {
8075                panic!("isize::MAX + 1 should trigger an overflow!");
8076            }
8077        } else {
8078            if let Err(AllocError { .. }) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) {
8079            } else {
8080                panic!("isize::MAX + 1 should trigger an OOM!")
8081            }
8082        }
8083        // Should fail in the mul-by-size
8084        if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_USIZE - 20) {
8085        } else {
8086            panic!("usize::MAX should trigger an overflow!");
8087        }
8088    }
8089}
8090
8091#[test]
8092#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
8093#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
8094fn test_try_reserve_exact() {
8095    // This is exactly the same as test_try_reserve with the method changed.
8096    // See that test for comments.
8097
8098    const MAX_CAP: usize = isize::MAX as usize;
8099    const MAX_USIZE: usize = usize::MAX;
8100
8101    let guards_against_isize = size_of::<usize>() < 8;
8102
8103    {
8104        let mut empty_bytes: Vec<u8> = Vec::new();
8105
8106        if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) {
8107            panic!("isize::MAX shouldn't trigger an overflow!");
8108        }
8109        if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) {
8110            panic!("isize::MAX shouldn't trigger an overflow!");
8111        }
8112
8113        if guards_against_isize {
8114            if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP + 1) {
8115            } else {
8116                panic!("isize::MAX + 1 should trigger an overflow!")
8117            }
8118
8119            if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_USIZE) {
8120            } else {
8121                panic!("usize::MAX should trigger an overflow!")
8122            }
8123        } else {
8124            if let Err(AllocError { .. }) = empty_bytes.try_reserve_exact(MAX_CAP + 1) {
8125            } else {
8126                panic!("isize::MAX + 1 should trigger an OOM!")
8127            }
8128
8129            if let Err(AllocError { .. }) = empty_bytes.try_reserve_exact(MAX_USIZE) {
8130            } else {
8131                panic!("usize::MAX should trigger an OOM!")
8132            }
8133        }
8134    }
8135
8136    {
8137        let mut ten_bytes: Vec<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
8138
8139        if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
8140            panic!("isize::MAX shouldn't trigger an overflow!");
8141        }
8142        if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
8143            panic!("isize::MAX shouldn't trigger an overflow!");
8144        }
8145        if guards_against_isize {
8146            if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
8147            } else {
8148                panic!("isize::MAX + 1 should trigger an overflow!");
8149            }
8150        } else {
8151            if let Err(AllocError { .. }) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
8152            } else {
8153                panic!("isize::MAX + 1 should trigger an OOM!")
8154            }
8155        }
8156        if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) {
8157        } else {
8158            panic!("usize::MAX should trigger an overflow!")
8159        }
8160    }
8161
8162    {
8163        let mut ten_u32s: Vec<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
8164
8165        if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) {
8166            panic!("isize::MAX shouldn't trigger an overflow!");
8167        }
8168        if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) {
8169            panic!("isize::MAX shouldn't trigger an overflow!");
8170        }
8171        if guards_against_isize {
8172            if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) {
8173            } else {
8174                panic!("isize::MAX + 1 should trigger an overflow!");
8175            }
8176        } else {
8177            if let Err(AllocError { .. }) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) {
8178            } else {
8179                panic!("isize::MAX + 1 should trigger an OOM!")
8180            }
8181        }
8182        if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_USIZE - 20) {
8183        } else {
8184            panic!("usize::MAX should trigger an overflow!")
8185        }
8186    }
8187}
8188
8189#[test]
8190fn test_stable_pointers() {
8191    /// Pull an element from the iterator, then drop it.
8192    /// Useful to cover both the `next` and `drop` paths of an iterator.
8193    fn next_then_drop<I: Iterator>(mut i: I) {
8194        i.next().unwrap();
8195        drop(i);
8196    }
8197
8198    // Test that, if we reserved enough space, adding and removing elements does not
8199    // invalidate references into the vector (such as `v0`).  This test also
8200    // runs in Miri, which would detect such problems.
8201    // Note that this test does *not* constitute a stable guarantee that all these functions do not
8202    // reallocate! Only what is explicitly documented at
8203    // <https://doc.rust-lang.org/nightly/std/vec/struct.Vec.html#guarantees> is stably guaranteed.
8204    let mut v = Vec::with_capacity(128);
8205    v.push(13);
8206
8207    // Laundering the lifetime -- we take care that `v` does not reallocate, so that's okay.
8208    let v0 = &mut v[0];
8209    let v0 = unsafe { &mut *(v0 as *mut _) };
8210    // Now do a bunch of things and occasionally use `v0` again to assert it is still valid.
8211
8212    // Pushing/inserting and popping/removing
8213    v.push(1);
8214    v.push(2);
8215    v.insert(1, 1);
8216    assert_eq!(*v0, 13);
8217    v.remove(1);
8218    v.pop().unwrap();
8219    assert_eq!(*v0, 13);
8220    v.push(1);
8221    v.swap_remove(1);
8222    assert_eq!(v.len(), 2);
8223    v.swap_remove(1); // swap_remove the last element
8224    assert_eq!(*v0, 13);
8225
8226    // Appending
8227    v.append(&mut vec![27, 19]);
8228    assert_eq!(*v0, 13);
8229
8230    // Extending
8231    v.extend_from_slice(&[1, 2]);
8232    v.extend(&[1, 2]); // `slice::Iter` (with `T: Copy`) specialization
8233    v.extend(vec![2, 3]); // `vec::IntoIter` specialization
8234    v.extend(std::iter::once(3)); // `TrustedLen` specialization
8235    v.extend(std::iter::empty::<i32>()); // `TrustedLen` specialization with empty iterator
8236    v.extend(std::iter::once(3).filter(|_| true)); // base case
8237    v.extend(std::iter::once(&3)); // `cloned` specialization
8238    assert_eq!(*v0, 13);
8239
8240    // Truncation
8241    v.truncate(2);
8242    assert_eq!(*v0, 13);
8243
8244    // Resizing
8245    v.resize_with(v.len() + 10, || 42);
8246    assert_eq!(*v0, 13);
8247    v.resize_with(2, || panic!());
8248    assert_eq!(*v0, 13);
8249
8250    // No-op reservation
8251    v.reserve(32);
8252    v.reserve_exact(32);
8253    assert_eq!(*v0, 13);
8254
8255    // Partial draining
8256    v.resize_with(10, || 42);
8257    next_then_drop(v.drain(5..));
8258    assert_eq!(*v0, 13);
8259
8260    // Splicing
8261    v.resize_with(10, || 42);
8262    next_then_drop(v.splice(5.., vec![1, 2, 3, 4, 5])); // empty tail after range
8263    assert_eq!(*v0, 13);
8264    next_then_drop(v.splice(5..8, vec![1])); // replacement is smaller than original range
8265    assert_eq!(*v0, 13);
8266    next_then_drop(v.splice(5..6, vec![1; 10].into_iter().filter(|_| true))); // lower bound not exact
8267    assert_eq!(*v0, 13);
8268
8269    // spare_capacity_mut
8270    v.spare_capacity_mut();
8271    assert_eq!(*v0, 13);
8272
8273    // Smoke test that would fire even outside Miri if an actual relocation happened.
8274    *v0 -= 13;
8275    assert_eq!(v[0], 0);
8276}
8277
8278// https://github.com/rust-lang/rust/pull/49496 introduced specialization based on:
8279//
8280// ```
8281// unsafe impl<T: ?Sized> IsZero for *mut T {
8282//     fn is_zero(&self) -> bool {
8283//         (*self).is_null()
8284//     }
8285// }
8286// ```
8287//
8288// … to call `RawVec::with_capacity_zeroed` for creating `Vec<*mut T>`,
8289// which is incorrect for fat pointers since `<*mut T>::is_null` only looks at the data component.
8290// That is, a fat pointer can be “null” without being made entirely of zero bits.
8291#[test]
8292fn vec_macro_repeating_null_raw_fat_pointer() {
8293    let raw_dyn = &mut (|| ()) as &mut dyn Fn() as *mut dyn Fn();
8294    let vtable = dbg!(ptr_metadata(raw_dyn));
8295    let null_raw_dyn = ptr_from_raw_parts(std::ptr::null_mut(), vtable);
8296    assert!(null_raw_dyn.is_null());
8297
8298    let vec = vec![null_raw_dyn; 1];
8299    dbg!(ptr_metadata(vec[0]));
8300    assert!(vec[0] == null_raw_dyn);
8301
8302    // Polyfill for https://github.com/rust-lang/rfcs/pull/2580
8303
8304    fn ptr_metadata(ptr: *mut dyn Fn()) -> *mut () {
8305        unsafe { std::mem::transmute::<*mut dyn Fn(), DynRepr>(ptr).vtable }
8306    }
8307
8308    fn ptr_from_raw_parts(data: *mut (), vtable: *mut ()) -> *mut dyn Fn() {
8309        unsafe { std::mem::transmute::<DynRepr, *mut dyn Fn()>(DynRepr { data, vtable }) }
8310    }
8311
8312    #[repr(C)]
8313    struct DynRepr {
8314        data: *mut (),
8315        vtable: *mut (),
8316    }
8317}
8318
8319// This test will likely fail if you change the capacities used in
8320// `RawVec::grow_amortized`.
8321#[test]
8322fn test_push_growth_strategy() {
8323    // If the element size is 1, we jump from 0 to 8, then double.
8324    {
8325        let mut v1: Vec<u8> = vec![];
8326        assert_eq!(v1.capacity(), 0);
8327
8328        for _ in 0..8 {
8329            v1.push(0);
8330            assert_eq!(v1.capacity(), 8);
8331        }
8332
8333        for _ in 8..16 {
8334            v1.push(0);
8335            assert_eq!(v1.capacity(), 16);
8336        }
8337
8338        for _ in 16..32 {
8339            v1.push(0);
8340            assert_eq!(v1.capacity(), 32);
8341        }
8342
8343        for _ in 32..64 {
8344            v1.push(0);
8345            assert_eq!(v1.capacity(), 64);
8346        }
8347    }
8348
8349    // If the element size is 2..=1024, we jump from 0 to 4, then double.
8350    {
8351        let mut v2: Vec<u16> = vec![];
8352        let mut v1024: Vec<[u8; 1024]> = vec![];
8353        assert_eq!(v2.capacity(), 0);
8354        assert_eq!(v1024.capacity(), 0);
8355
8356        for _ in 0..4 {
8357            v2.push(0);
8358            v1024.push([0; 1024]);
8359            assert_eq!(v2.capacity(), 4);
8360            assert_eq!(v1024.capacity(), 4);
8361        }
8362
8363        for _ in 4..8 {
8364            v2.push(0);
8365            v1024.push([0; 1024]);
8366            assert_eq!(v2.capacity(), 8);
8367            assert_eq!(v1024.capacity(), 8);
8368        }
8369
8370        for _ in 8..16 {
8371            v2.push(0);
8372            v1024.push([0; 1024]);
8373            assert_eq!(v2.capacity(), 16);
8374            assert_eq!(v1024.capacity(), 16);
8375        }
8376
8377        for _ in 16..32 {
8378            v2.push(0);
8379            v1024.push([0; 1024]);
8380            assert_eq!(v2.capacity(), 32);
8381            assert_eq!(v1024.capacity(), 32);
8382        }
8383
8384        for _ in 32..64 {
8385            v2.push(0);
8386            v1024.push([0; 1024]);
8387            assert_eq!(v2.capacity(), 64);
8388            assert_eq!(v1024.capacity(), 64);
8389        }
8390    }
8391
8392    // If the element size is > 1024, we jump from 0 to 1, then double.
8393    {
8394        let mut v1025: Vec<[u8; 1025]> = vec![];
8395        assert_eq!(v1025.capacity(), 0);
8396
8397        for _ in 0..1 {
8398            v1025.push([0; 1025]);
8399            assert_eq!(v1025.capacity(), 1);
8400        }
8401
8402        for _ in 1..2 {
8403            v1025.push([0; 1025]);
8404            assert_eq!(v1025.capacity(), 2);
8405        }
8406
8407        for _ in 2..4 {
8408            v1025.push([0; 1025]);
8409            assert_eq!(v1025.capacity(), 4);
8410        }
8411
8412        for _ in 4..8 {
8413            v1025.push([0; 1025]);
8414            assert_eq!(v1025.capacity(), 8);
8415        }
8416
8417        for _ in 8..16 {
8418            v1025.push([0; 1025]);
8419            assert_eq!(v1025.capacity(), 16);
8420        }
8421
8422        for _ in 16..32 {
8423            v1025.push([0; 1025]);
8424            assert_eq!(v1025.capacity(), 32);
8425        }
8426
8427        for _ in 32..64 {
8428            v1025.push([0; 1025]);
8429            assert_eq!(v1025.capacity(), 64);
8430        }
8431    }
8432}
8433
8434macro_rules! generate_assert_eq_vec_and_prim {
8435    ($name:ident<$B:ident>($type:ty)) => {
8436        fn $name<A: PartialEq<$B> + Debug, $B: Debug>(a: Vec<A>, b: $type) {
8437            assert!(a == b);
8438            assert_eq!(a, b);
8439        }
8440    };
8441}
8442
8443generate_assert_eq_vec_and_prim! { assert_eq_vec_and_slice  <B>(&[B])   }
8444generate_assert_eq_vec_and_prim! { assert_eq_vec_and_array_3<B>([B; 3]) }
8445
8446#[test]
8447fn partialeq_vec_and_prim() {
8448    assert_eq_vec_and_slice(vec![1, 2, 3], &[1, 2, 3]);
8449    assert_eq_vec_and_array_3(vec![1, 2, 3], [1, 2, 3]);
8450}
8451
8452macro_rules! assert_partial_eq_valid {
8453    ($a2:expr, $a3:expr; $b2:expr, $b3: expr) => {
8454        assert!($a2 == $b2);
8455        assert!($a2 != $b3);
8456        assert!($a3 != $b2);
8457        assert!($a3 == $b3);
8458        assert_eq!($a2, $b2);
8459        assert_ne!($a2, $b3);
8460        assert_ne!($a3, $b2);
8461        assert_eq!($a3, $b3);
8462    };
8463}
8464
8465#[test]
8466fn partialeq_vec_full() {
8467    let vec2: Vec<_> = vec![1, 2];
8468    let vec3: Vec<_> = vec![1, 2, 3];
8469    let slice2: &[_] = &[1, 2];
8470    let slice3: &[_] = &[1, 2, 3];
8471    let slicemut2: &[_] = &mut [1, 2];
8472    let slicemut3: &[_] = &mut [1, 2, 3];
8473    let array2: [_; 2] = [1, 2];
8474    let array3: [_; 3] = [1, 2, 3];
8475    let arrayref2: &[_; 2] = &[1, 2];
8476    let arrayref3: &[_; 3] = &[1, 2, 3];
8477
8478    assert_partial_eq_valid!(vec2,vec3; vec2,vec3);
8479    assert_partial_eq_valid!(vec2,vec3; slice2,slice3);
8480    assert_partial_eq_valid!(vec2,vec3; slicemut2,slicemut3);
8481    assert_partial_eq_valid!(slice2,slice3; vec2,vec3);
8482    assert_partial_eq_valid!(slicemut2,slicemut3; vec2,vec3);
8483    assert_partial_eq_valid!(vec2,vec3; array2,array3);
8484    assert_partial_eq_valid!(vec2,vec3; arrayref2,arrayref3);
8485    assert_partial_eq_valid!(vec2,vec3; arrayref2[..],arrayref3[..]);
8486}
8487
8488#[test]
8489fn test_vec_cycle() {
8490    #[derive(Debug)]
8491    struct C<'a> {
8492        v: Vec<Cell<Option<&'a C<'a>>>>,
8493    }
8494
8495    impl<'a> C<'a> {
8496        fn new() -> C<'a> {
8497            C { v: Vec::new() }
8498        }
8499    }
8500
8501    let mut c1 = C::new();
8502    let mut c2 = C::new();
8503    let mut c3 = C::new();
8504
8505    // Push
8506    c1.v.push(Cell::new(None));
8507    c1.v.push(Cell::new(None));
8508
8509    c2.v.push(Cell::new(None));
8510    c2.v.push(Cell::new(None));
8511
8512    c3.v.push(Cell::new(None));
8513    c3.v.push(Cell::new(None));
8514
8515    // Set
8516    c1.v[0].set(Some(&c2));
8517    c1.v[1].set(Some(&c3));
8518
8519    c2.v[0].set(Some(&c2));
8520    c2.v[1].set(Some(&c3));
8521
8522    c3.v[0].set(Some(&c1));
8523    c3.v[1].set(Some(&c2));
8524}
8525
8526#[test]
8527fn test_vec_cycle_wrapped() {
8528    struct Refs<'a> {
8529        v: Vec<Cell<Option<&'a C<'a>>>>,
8530    }
8531
8532    struct C<'a> {
8533        refs: Refs<'a>,
8534    }
8535
8536    impl<'a> Refs<'a> {
8537        fn new() -> Refs<'a> {
8538            Refs { v: Vec::new() }
8539        }
8540    }
8541
8542    impl<'a> C<'a> {
8543        fn new() -> C<'a> {
8544            C { refs: Refs::new() }
8545        }
8546    }
8547
8548    let mut c1 = C::new();
8549    let mut c2 = C::new();
8550    let mut c3 = C::new();
8551
8552    c1.refs.v.push(Cell::new(None));
8553    c1.refs.v.push(Cell::new(None));
8554    c2.refs.v.push(Cell::new(None));
8555    c2.refs.v.push(Cell::new(None));
8556    c3.refs.v.push(Cell::new(None));
8557    c3.refs.v.push(Cell::new(None));
8558
8559    c1.refs.v[0].set(Some(&c2));
8560    c1.refs.v[1].set(Some(&c3));
8561    c2.refs.v[0].set(Some(&c2));
8562    c2.refs.v[1].set(Some(&c3));
8563    c3.refs.v[0].set(Some(&c1));
8564    c3.refs.v[1].set(Some(&c2));
8565}
8566
8567#[test]
8568fn test_zero_sized_vec_push() {
8569    const N: usize = 8;
8570
8571    for len in 0..N {
8572        let mut tester = Vec::with_capacity(len);
8573        assert_eq!(tester.len(), 0);
8574        assert!(tester.capacity() >= len);
8575        for _ in 0..len {
8576            tester.push(());
8577        }
8578        assert_eq!(tester.len(), len);
8579        assert_eq!(tester.iter().count(), len);
8580        tester.clear();
8581    }
8582}
8583
8584#[test]
8585fn test_vec_macro_repeat() {
8586    assert_eq!(vec![1; 3], vec![1, 1, 1]);
8587    assert_eq!(vec![1; 2], vec![1, 1]);
8588    assert_eq!(vec![1; 1], vec![1]);
8589    assert_eq!(vec![1; 0], vec![]);
8590
8591    // from_elem syntax (see RFC 832)
8592    let el = Box::new(1);
8593    let n = 3;
8594    assert_eq!(vec![el; n], vec![Box::new(1), Box::new(1), Box::new(1)]);
8595}
8596
8597#[test]
8598fn test_vec_swap() {
8599    let mut a: Vec<isize> = vec![0, 1, 2, 3, 4, 5, 6];
8600    a.swap(2, 4);
8601    assert_eq!(a[2], 4);
8602    assert_eq!(a[4], 2);
8603    let mut n = 42;
8604    swap(&mut n, &mut a[0]);
8605    assert_eq!(a[0], 42);
8606    assert_eq!(n, 0);
8607}
8608
8609#[test]
8610fn test_extend_from_within_spec() {
8611    #[derive(Copy)]
8612    struct CopyOnly;
8613
8614    impl Clone for CopyOnly {
8615        fn clone(&self) -> Self {
8616            panic!("extend_from_within must use specialization on copy");
8617        }
8618    }
8619
8620    vec![CopyOnly, CopyOnly].extend_from_within(..);
8621}
8622
8623#[test]
8624fn test_extend_from_within_clone() {
8625    let mut v = vec![String::from("sssss"), String::from("12334567890"), String::from("c")];
8626    v.extend_from_within(1..);
8627
8628    assert_eq!(v, ["sssss", "12334567890", "c", "12334567890", "c"]);
8629}
8630
8631#[test]
8632fn test_extend_from_within_complete_rande() {
8633    let mut v = vec![0, 1, 2, 3];
8634    v.extend_from_within(..);
8635
8636    assert_eq!(v, [0, 1, 2, 3, 0, 1, 2, 3]);
8637}
8638
8639#[test]
8640fn test_extend_from_within_empty_rande() {
8641    let mut v = vec![0, 1, 2, 3];
8642    v.extend_from_within(1..1);
8643
8644    assert_eq!(v, [0, 1, 2, 3]);
8645}
8646
8647#[test]
8648#[should_panic]
8649fn test_extend_from_within_out_of_rande() {
8650    let mut v = vec![0, 1];
8651    v.extend_from_within(..3);
8652}
8653
8654#[test]
8655fn test_extend_from_within_zst() {
8656    let mut v = vec![(); 8];
8657    v.extend_from_within(3..7);
8658
8659    assert_eq!(v, [(); 12]);
8660}
8661
8662#[test]
8663fn test_extend_from_within_empty_vec() {
8664    let mut v = Vec::<i32>::new();
8665    v.extend_from_within(..);
8666
8667    assert_eq!(v, []);
8668}
8669
8670#[test]
8671fn test_extend_from_within() {
8672    let mut v = vec![String::from("a"), String::from("b"), String::from("c")];
8673    v.extend_from_within(1..=2);
8674    v.extend_from_within(..=1);
8675
8676    assert_eq!(v, ["a", "b", "c", "b", "c", "a", "b"]);
8677}
8678
8679#[test]
8680fn test_vec_dedup_by() {
8681    let mut vec: Vec<i32> = vec![1, -1, 2, 3, 1, -5, 5, -2, 2];
8682
8683    vec.dedup_by(|a, b| a.abs() == b.abs());
8684
8685    assert_eq!(vec, [1, 2, 3, 1, -5, -2]);
8686}
8687
8688#[test]
8689fn test_vec_dedup_empty() {
8690    let mut vec: Vec<i32> = Vec::new();
8691
8692    vec.dedup();
8693
8694    assert_eq!(vec, []);
8695}
8696
8697#[test]
8698fn test_vec_dedup_one() {
8699    let mut vec = vec![12i32];
8700
8701    vec.dedup();
8702
8703    assert_eq!(vec, [12]);
8704}
8705
8706#[test]
8707fn test_vec_dedup_multiple_ident() {
8708    let mut vec = vec![12, 12, 12, 12, 12, 11, 11, 11, 11, 11, 11];
8709
8710    vec.dedup();
8711
8712    assert_eq!(vec, [12, 11]);
8713}
8714
8715#[test]
8716fn test_vec_dedup_partialeq() {
8717    #[derive(Debug)]
8718    struct Foo(i32, i32);
8719
8720    impl PartialEq for Foo {
8721        fn eq(&self, other: &Foo) -> bool {
8722            self.0 == other.0
8723        }
8724    }
8725
8726    let mut vec = vec![Foo(0, 1), Foo(0, 5), Foo(1, 7), Foo(1, 9)];
8727
8728    vec.dedup();
8729    assert_eq!(vec, [Foo(0, 1), Foo(1, 7)]);
8730}
8731
8732#[test]
8733fn test_vec_dedup() {
8734    let mut vec: Vec<bool> = Vec::with_capacity(8);
8735    let mut template = vec.clone();
8736
8737    for x in 0u8..255u8 {
8738        vec.clear();
8739        template.clear();
8740
8741        let iter = (0..8).map(move |bit| (x >> bit) & 1 == 1);
8742        vec.extend(iter);
8743        template.extend_from_slice(&vec);
8744
8745        let (dedup, _) = template.partition_dedup();
8746        vec.dedup();
8747
8748        assert_eq!(vec, dedup);
8749    }
8750}
8751
8752#[test]
8753fn test_vec_dedup_panicking() {
8754    #[derive(Debug)]
8755    struct Panic {
8756        drop_counter: &'static AtomicU32,
8757        value: bool,
8758        index: usize,
8759    }
8760
8761    impl PartialEq for Panic {
8762        fn eq(&self, other: &Self) -> bool {
8763            self.value == other.value
8764        }
8765    }
8766
8767    impl Drop for Panic {
8768        fn drop(&mut self) {
8769            let x = self.drop_counter.fetch_add(1, Ordering::SeqCst);
8770            assert!(x != 4);
8771        }
8772    }
8773
8774    static DROP_COUNTER: AtomicU32 = AtomicU32::new(0);
8775    let expected = [
8776        Panic { drop_counter: &DROP_COUNTER, value: false, index: 0 },
8777        Panic { drop_counter: &DROP_COUNTER, value: false, index: 5 },
8778        Panic { drop_counter: &DROP_COUNTER, value: true, index: 6 },
8779        Panic { drop_counter: &DROP_COUNTER, value: true, index: 7 },
8780    ];
8781    let mut vec = vec![
8782        Panic { drop_counter: &DROP_COUNTER, value: false, index: 0 },
8783        // these elements get deduplicated
8784        Panic { drop_counter: &DROP_COUNTER, value: false, index: 1 },
8785        Panic { drop_counter: &DROP_COUNTER, value: false, index: 2 },
8786        Panic { drop_counter: &DROP_COUNTER, value: false, index: 3 },
8787        Panic { drop_counter: &DROP_COUNTER, value: false, index: 4 },
8788        // here it panics
8789        Panic { drop_counter: &DROP_COUNTER, value: false, index: 5 },
8790        Panic { drop_counter: &DROP_COUNTER, value: true, index: 6 },
8791        Panic { drop_counter: &DROP_COUNTER, value: true, index: 7 },
8792    ];
8793
8794    let _ = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
8795        vec.dedup();
8796    }));
8797
8798    let ok = vec.iter().zip(expected.iter()).all(|(x, y)| x.index == y.index);
8799
8800    if !ok {
8801        panic!("expected: {:?}\ngot: {:?}\n", expected, vec);
8802    }
8803}
8804
8805// Regression test for issue #82533
8806#[test]
8807fn test_extend_from_within_panicing_clone() {
8808    struct Panic<'dc> {
8809        drop_count: &'dc AtomicU32,
8810        aaaaa: bool,
8811    }
8812
8813    impl Clone for Panic<'_> {
8814        fn clone(&self) -> Self {
8815            if self.aaaaa {
8816                panic!("panic! at the clone");
8817            }
8818
8819            Self { ..*self }
8820        }
8821    }
8822
8823    impl Drop for Panic<'_> {
8824        fn drop(&mut self) {
8825            self.drop_count.fetch_add(1, Ordering::SeqCst);
8826        }
8827    }
8828
8829    let count = core::sync::atomic::AtomicU32::new(0);
8830    let mut vec = vec![
8831        Panic { drop_count: &count, aaaaa: false },
8832        Panic { drop_count: &count, aaaaa: true },
8833        Panic { drop_count: &count, aaaaa: false },
8834    ];
8835
8836    // This should clone&append one Panic{..} at the end, and then panic while
8837    // cloning second Panic{..}. This means that `Panic::drop` should be called
8838    // 4 times (3 for items already in vector, 1 for just appended).
8839    //
8840    // Previously just appended item was leaked, making drop_count = 3, instead of 4.
8841    std::panic::catch_unwind(move || vec.extend_from_within(..)).unwrap_err();
8842
8843    assert_eq!(count.load(Ordering::SeqCst), 4);
8844}
8845#![feature(allocator_api)]
8846#![feature(box_syntax)]
8847#![feature(cow_is_borrowed)]
8848#![feature(const_cow_is_borrowed)]
8849#![feature(drain_filter)]
8850#![feature(exact_size_is_empty)]
8851#![feature(new_uninit)]
8852#![feature(pattern)]
8853#![feature(trusted_len)]
8854#![feature(try_reserve)]
8855#![feature(unboxed_closures)]
8856#![feature(associated_type_bounds)]
8857#![feature(binary_heap_into_iter_sorted)]
8858#![feature(binary_heap_drain_sorted)]
8859#![feature(slice_ptr_get)]
8860#![feature(binary_heap_retain)]
8861#![feature(binary_heap_as_slice)]
8862#![feature(inplace_iteration)]
8863#![feature(iter_map_while)]
8864#![feature(vecdeque_binary_search)]
8865#![feature(slice_group_by)]
8866#![feature(slice_partition_dedup)]
8867#![feature(vec_spare_capacity)]
8868#![feature(string_remove_matches)]
8869
8870use std::collections::hash_map::DefaultHasher;
8871use std::hash::{Hash, Hasher};
8872
8873mod arc;
8874mod binary_heap;
8875mod borrow;
8876mod boxed;
8877mod btree_set_hash;
8878mod cow_str;
8879mod fmt;
8880mod heap;
8881mod linked_list;
8882mod rc;
8883mod slice;
8884mod str;
8885mod string;
8886mod vec;
8887mod vec_deque;
8888
8889fn hash<T: Hash>(t: &T) -> u64 {
8890    let mut s = DefaultHasher::new();
8891    t.hash(&mut s);
8892    s.finish()
8893}
8894
8895// FIXME: Instantiated functions with i128 in the signature is not supported in Emscripten.
8896// See https://github.com/kripken/emscripten-fastcomp/issues/169
8897#[cfg(not(target_os = "emscripten"))]
8898#[test]
8899fn test_boxed_hasher() {
8900    let ordinary_hash = hash(&5u32);
8901
8902    let mut hasher_1 = Box::new(DefaultHasher::new());
8903    5u32.hash(&mut hasher_1);
8904    assert_eq!(ordinary_hash, hasher_1.finish());
8905
8906    let mut hasher_2 = Box::new(DefaultHasher::new()) as Box<dyn Hasher>;
8907    5u32.hash(&mut hasher_2);
8908    assert_eq!(ordinary_hash, hasher_2.finish());
8909}
8910use std::borrow::Cow;
8911
8912// check that Cow<'a, str> implements addition
8913#[test]
8914fn check_cow_add_cow() {
8915    let borrowed1 = Cow::Borrowed("Hello, ");
8916    let borrowed2 = Cow::Borrowed("World!");
8917    let borrow_empty = Cow::Borrowed("");
8918
8919    let owned1: Cow<'_, str> = Cow::Owned(String::from("Hi, "));
8920    let owned2: Cow<'_, str> = Cow::Owned(String::from("Rustaceans!"));
8921    let owned_empty: Cow<'_, str> = Cow::Owned(String::new());
8922
8923    assert_eq!("Hello, World!", borrowed1.clone() + borrowed2.clone());
8924    assert_eq!("Hello, Rustaceans!", borrowed1.clone() + owned2.clone());
8925
8926    assert_eq!("Hi, World!", owned1.clone() + borrowed2.clone());
8927    assert_eq!("Hi, Rustaceans!", owned1.clone() + owned2.clone());
8928
8929    if let Cow::Owned(_) = borrowed1.clone() + borrow_empty.clone() {
8930        panic!("Adding empty strings to a borrow should note allocate");
8931    }
8932    if let Cow::Owned(_) = borrow_empty.clone() + borrowed1.clone() {
8933        panic!("Adding empty strings to a borrow should note allocate");
8934    }
8935    if let Cow::Owned(_) = borrowed1.clone() + owned_empty.clone() {
8936        panic!("Adding empty strings to a borrow should note allocate");
8937    }
8938    if let Cow::Owned(_) = owned_empty.clone() + borrowed1.clone() {
8939        panic!("Adding empty strings to a borrow should note allocate");
8940    }
8941}
8942
8943#[test]
8944fn check_cow_add_str() {
8945    let borrowed = Cow::Borrowed("Hello, ");
8946    let borrow_empty = Cow::Borrowed("");
8947
8948    let owned: Cow<'_, str> = Cow::Owned(String::from("Hi, "));
8949    let owned_empty: Cow<'_, str> = Cow::Owned(String::new());
8950
8951    assert_eq!("Hello, World!", borrowed.clone() + "World!");
8952
8953    assert_eq!("Hi, World!", owned.clone() + "World!");
8954
8955    if let Cow::Owned(_) = borrowed.clone() + "" {
8956        panic!("Adding empty strings to a borrow should note allocate");
8957    }
8958    if let Cow::Owned(_) = borrow_empty.clone() + "Hello, " {
8959        panic!("Adding empty strings to a borrow should note allocate");
8960    }
8961    if let Cow::Owned(_) = owned_empty.clone() + "Hello, " {
8962        panic!("Adding empty strings to a borrow should note allocate");
8963    }
8964}
8965
8966#[test]
8967fn check_cow_add_assign_cow() {
8968    let mut borrowed1 = Cow::Borrowed("Hello, ");
8969    let borrowed2 = Cow::Borrowed("World!");
8970    let borrow_empty = Cow::Borrowed("");
8971
8972    let mut owned1: Cow<'_, str> = Cow::Owned(String::from("Hi, "));
8973    let owned2: Cow<'_, str> = Cow::Owned(String::from("Rustaceans!"));
8974    let owned_empty: Cow<'_, str> = Cow::Owned(String::new());
8975
8976    let mut s = borrowed1.clone();
8977    s += borrow_empty.clone();
8978    assert_eq!("Hello, ", s);
8979    if let Cow::Owned(_) = s {
8980        panic!("Adding empty strings to a borrow should note allocate");
8981    }
8982    let mut s = borrow_empty.clone();
8983    s += borrowed1.clone();
8984    assert_eq!("Hello, ", s);
8985    if let Cow::Owned(_) = s {
8986        panic!("Adding empty strings to a borrow should note allocate");
8987    }
8988    let mut s = borrowed1.clone();
8989    s += owned_empty.clone();
8990    assert_eq!("Hello, ", s);
8991    if let Cow::Owned(_) = s {
8992        panic!("Adding empty strings to a borrow should note allocate");
8993    }
8994    let mut s = owned_empty.clone();
8995    s += borrowed1.clone();
8996    assert_eq!("Hello, ", s);
8997    if let Cow::Owned(_) = s {
8998        panic!("Adding empty strings to a borrow should note allocate");
8999    }
9000
9001    owned1 += borrowed2;
9002    borrowed1 += owned2;
9003
9004    assert_eq!("Hi, World!", owned1);
9005    assert_eq!("Hello, Rustaceans!", borrowed1);
9006}
9007
9008#[test]
9009fn check_cow_add_assign_str() {
9010    let mut borrowed = Cow::Borrowed("Hello, ");
9011    let borrow_empty = Cow::Borrowed("");
9012
9013    let mut owned: Cow<'_, str> = Cow::Owned(String::from("Hi, "));
9014    let owned_empty: Cow<'_, str> = Cow::Owned(String::new());
9015
9016    let mut s = borrowed.clone();
9017    s += "";
9018    assert_eq!("Hello, ", s);
9019    if let Cow::Owned(_) = s {
9020        panic!("Adding empty strings to a borrow should note allocate");
9021    }
9022    let mut s = borrow_empty.clone();
9023    s += "World!";
9024    assert_eq!("World!", s);
9025    if let Cow::Owned(_) = s {
9026        panic!("Adding empty strings to a borrow should note allocate");
9027    }
9028    let mut s = owned_empty.clone();
9029    s += "World!";
9030    assert_eq!("World!", s);
9031    if let Cow::Owned(_) = s {
9032        panic!("Adding empty strings to a borrow should note allocate");
9033    }
9034
9035    owned += "World!";
9036    borrowed += "World!";
9037
9038    assert_eq!("Hi, World!", owned);
9039    assert_eq!("Hello, World!", borrowed);
9040}
9041
9042#[test]
9043fn check_cow_clone_from() {
9044    let mut c1: Cow<'_, str> = Cow::Owned(String::with_capacity(25));
9045    let s: String = "hi".to_string();
9046    assert!(s.capacity() < 25);
9047    let c2: Cow<'_, str> = Cow::Owned(s);
9048    c1.clone_from(&c2);
9049    assert!(c1.into_owned().capacity() >= 25);
9050    let mut c3: Cow<'_, str> = Cow::Borrowed("bye");
9051    c3.clone_from(&c2);
9052    assert_eq!(c2, c3);
9053}
9054use std::any::Any;
9055use std::cell::RefCell;
9056use std::cmp::PartialEq;
9057use std::iter::TrustedLen;
9058use std::mem;
9059use std::rc::{Rc, Weak};
9060
9061#[test]
9062fn uninhabited() {
9063    enum Void {}
9064    let mut a = Weak::<Void>::new();
9065    a = a.clone();
9066    assert!(a.upgrade().is_none());
9067
9068    let mut a: Weak<dyn Any> = a; // Unsizing
9069    a = a.clone();
9070    assert!(a.upgrade().is_none());
9071}
9072
9073#[test]
9074fn slice() {
9075    let a: Rc<[u32; 3]> = Rc::new([3, 2, 1]);
9076    let a: Rc<[u32]> = a; // Unsizing
9077    let b: Rc<[u32]> = Rc::from(&[3, 2, 1][..]); // Conversion
9078    assert_eq!(a, b);
9079
9080    // Exercise is_dangling() with a DST
9081    let mut a = Rc::downgrade(&a);
9082    a = a.clone();
9083    assert!(a.upgrade().is_some());
9084}
9085
9086#[test]
9087fn trait_object() {
9088    let a: Rc<u32> = Rc::new(4);
9089    let a: Rc<dyn Any> = a; // Unsizing
9090
9091    // Exercise is_dangling() with a DST
9092    let mut a = Rc::downgrade(&a);
9093    a = a.clone();
9094    assert!(a.upgrade().is_some());
9095
9096    let mut b = Weak::<u32>::new();
9097    b = b.clone();
9098    assert!(b.upgrade().is_none());
9099    let mut b: Weak<dyn Any> = b; // Unsizing
9100    b = b.clone();
9101    assert!(b.upgrade().is_none());
9102}
9103
9104#[test]
9105fn float_nan_ne() {
9106    let x = Rc::new(f32::NAN);
9107    assert!(x != x);
9108    assert!(!(x == x));
9109}
9110
9111#[test]
9112fn partial_eq() {
9113    struct TestPEq(RefCell<usize>);
9114    impl PartialEq for TestPEq {
9115        fn eq(&self, other: &TestPEq) -> bool {
9116            *self.0.borrow_mut() += 1;
9117            *other.0.borrow_mut() += 1;
9118            true
9119        }
9120    }
9121    let x = Rc::new(TestPEq(RefCell::new(0)));
9122    assert!(x == x);
9123    assert!(!(x != x));
9124    assert_eq!(*x.0.borrow(), 4);
9125}
9126
9127#[test]
9128fn eq() {
9129    #[derive(Eq)]
9130    struct TestEq(RefCell<usize>);
9131    impl PartialEq for TestEq {
9132        fn eq(&self, other: &TestEq) -> bool {
9133            *self.0.borrow_mut() += 1;
9134            *other.0.borrow_mut() += 1;
9135            true
9136        }
9137    }
9138    let x = Rc::new(TestEq(RefCell::new(0)));
9139    assert!(x == x);
9140    assert!(!(x != x));
9141    assert_eq!(*x.0.borrow(), 0);
9142}
9143
9144const SHARED_ITER_MAX: u16 = 100;
9145
9146fn assert_trusted_len<I: TrustedLen>(_: &I) {}
9147
9148#[test]
9149fn shared_from_iter_normal() {
9150    // Exercise the base implementation for non-`TrustedLen` iterators.
9151    {
9152        // `Filter` is never `TrustedLen` since we don't
9153        // know statically how many elements will be kept:
9154        let iter = (0..SHARED_ITER_MAX).filter(|x| x % 2 == 0).map(Box::new);
9155
9156        // Collecting into a `Vec<T>` or `Rc<[T]>` should make no difference:
9157        let vec = iter.clone().collect::<Vec<_>>();
9158        let rc = iter.collect::<Rc<[_]>>();
9159        assert_eq!(&*vec, &*rc);
9160
9161        // Clone a bit and let these get dropped.
9162        {
9163            let _rc_2 = rc.clone();
9164            let _rc_3 = rc.clone();
9165            let _rc_4 = Rc::downgrade(&_rc_3);
9166        }
9167    } // Drop what hasn't been here.
9168}
9169
9170#[test]
9171fn shared_from_iter_trustedlen_normal() {
9172    // Exercise the `TrustedLen` implementation under normal circumstances
9173    // where `size_hint()` matches `(_, Some(exact_len))`.
9174    {
9175        let iter = (0..SHARED_ITER_MAX).map(Box::new);
9176        assert_trusted_len(&iter);
9177
9178        // Collecting into a `Vec<T>` or `Rc<[T]>` should make no difference:
9179        let vec = iter.clone().collect::<Vec<_>>();
9180        let rc = iter.collect::<Rc<[_]>>();
9181        assert_eq!(&*vec, &*rc);
9182        assert_eq!(mem::size_of::<Box<u16>>() * SHARED_ITER_MAX as usize, mem::size_of_val(&*rc));
9183
9184        // Clone a bit and let these get dropped.
9185        {
9186            let _rc_2 = rc.clone();
9187            let _rc_3 = rc.clone();
9188            let _rc_4 = Rc::downgrade(&_rc_3);
9189        }
9190    } // Drop what hasn't been here.
9191
9192    // Try a ZST to make sure it is handled well.
9193    {
9194        let iter = (0..SHARED_ITER_MAX).map(drop);
9195        let vec = iter.clone().collect::<Vec<_>>();
9196        let rc = iter.collect::<Rc<[_]>>();
9197        assert_eq!(&*vec, &*rc);
9198        assert_eq!(0, mem::size_of_val(&*rc));
9199        {
9200            let _rc_2 = rc.clone();
9201            let _rc_3 = rc.clone();
9202            let _rc_4 = Rc::downgrade(&_rc_3);
9203        }
9204    }
9205}
9206
9207#[test]
9208#[should_panic = "I've almost got 99 problems."]
9209fn shared_from_iter_trustedlen_panic() {
9210    // Exercise the `TrustedLen` implementation when `size_hint()` matches
9211    // `(_, Some(exact_len))` but where `.next()` drops before the last iteration.
9212    let iter = (0..SHARED_ITER_MAX).map(|val| match val {
9213        98 => panic!("I've almost got 99 problems."),
9214        _ => Box::new(val),
9215    });
9216    assert_trusted_len(&iter);
9217    let _ = iter.collect::<Rc<[_]>>();
9218
9219    panic!("I am unreachable.");
9220}
9221
9222#[test]
9223fn shared_from_iter_trustedlen_no_fuse() {
9224    // Exercise the `TrustedLen` implementation when `size_hint()` matches
9225    // `(_, Some(exact_len))` but where the iterator does not behave in a fused manner.
9226    struct Iter(std::vec::IntoIter<Option<Box<u8>>>);
9227
9228    unsafe impl TrustedLen for Iter {}
9229
9230    impl Iterator for Iter {
9231        fn size_hint(&self) -> (usize, Option<usize>) {
9232            (2, Some(2))
9233        }
9234
9235        type Item = Box<u8>;
9236
9237        fn next(&mut self) -> Option<Self::Item> {
9238            self.0.next().flatten()
9239        }
9240    }
9241
9242    let vec = vec![Some(Box::new(42)), Some(Box::new(24)), None, Some(Box::new(12))];
9243    let iter = Iter(vec.into_iter());
9244    assert_trusted_len(&iter);
9245    assert_eq!(&[Box::new(42), Box::new(24)], &*iter.collect::<Rc<[_]>>());
9246}
9247use std::cell::Cell;
9248use std::cmp::Ordering::{self, Equal, Greater, Less};
9249use std::convert::identity;
9250use std::mem;
9251use std::panic;
9252use std::rc::Rc;
9253use std::sync::atomic::{AtomicUsize, Ordering::Relaxed};
9254
9255use rand::distributions::Standard;
9256use rand::seq::SliceRandom;
9257use rand::{thread_rng, Rng, RngCore};
9258
9259fn square(n: usize) -> usize {
9260    n * n
9261}
9262
9263fn is_odd(n: &usize) -> bool {
9264    *n % 2 == 1
9265}
9266
9267#[test]
9268fn test_from_fn() {
9269    // Test on-stack from_fn.
9270    let mut v: Vec<_> = (0..3).map(square).collect();
9271    {
9272        let v = v;
9273        assert_eq!(v.len(), 3);
9274        assert_eq!(v[0], 0);
9275        assert_eq!(v[1], 1);
9276        assert_eq!(v[2], 4);
9277    }
9278
9279    // Test on-heap from_fn.
9280    v = (0..5).map(square).collect();
9281    {
9282        let v = v;
9283        assert_eq!(v.len(), 5);
9284        assert_eq!(v[0], 0);
9285        assert_eq!(v[1], 1);
9286        assert_eq!(v[2], 4);
9287        assert_eq!(v[3], 9);
9288        assert_eq!(v[4], 16);
9289    }
9290}
9291
9292#[test]
9293fn test_from_elem() {
9294    // Test on-stack from_elem.
9295    let mut v = vec![10, 10];
9296    {
9297        let v = v;
9298        assert_eq!(v.len(), 2);
9299        assert_eq!(v[0], 10);
9300        assert_eq!(v[1], 10);
9301    }
9302
9303    // Test on-heap from_elem.
9304    v = vec![20; 6];
9305    {
9306        let v = &v[..];
9307        assert_eq!(v[0], 20);
9308        assert_eq!(v[1], 20);
9309        assert_eq!(v[2], 20);
9310        assert_eq!(v[3], 20);
9311        assert_eq!(v[4], 20);
9312        assert_eq!(v[5], 20);
9313    }
9314}
9315
9316#[test]
9317fn test_is_empty() {
9318    let xs: [i32; 0] = [];
9319    assert!(xs.is_empty());
9320    assert!(![0].is_empty());
9321}
9322
9323#[test]
9324fn test_len_divzero() {
9325    type Z = [i8; 0];
9326    let v0: &[Z] = &[];
9327    let v1: &[Z] = &[[]];
9328    let v2: &[Z] = &[[], []];
9329    assert_eq!(mem::size_of::<Z>(), 0);
9330    assert_eq!(v0.len(), 0);
9331    assert_eq!(v1.len(), 1);
9332    assert_eq!(v2.len(), 2);
9333}
9334
9335#[test]
9336fn test_get() {
9337    let mut a = vec![11];
9338    assert_eq!(a.get(1), None);
9339    a = vec![11, 12];
9340    assert_eq!(a.get(1).unwrap(), &12);
9341    a = vec![11, 12, 13];
9342    assert_eq!(a.get(1).unwrap(), &12);
9343}
9344
9345#[test]
9346fn test_first() {
9347    let mut a = vec![];
9348    assert_eq!(a.first(), None);
9349    a = vec![11];
9350    assert_eq!(a.first().unwrap(), &11);
9351    a = vec![11, 12];
9352    assert_eq!(a.first().unwrap(), &11);
9353}
9354
9355#[test]
9356fn test_first_mut() {
9357    let mut a = vec![];
9358    assert_eq!(a.first_mut(), None);
9359    a = vec![11];
9360    assert_eq!(*a.first_mut().unwrap(), 11);
9361    a = vec![11, 12];
9362    assert_eq!(*a.first_mut().unwrap(), 11);
9363}
9364
9365#[test]
9366fn test_split_first() {
9367    let mut a = vec![11];
9368    let b: &[i32] = &[];
9369    assert!(b.split_first().is_none());
9370    assert_eq!(a.split_first(), Some((&11, b)));
9371    a = vec![11, 12];
9372    let b: &[i32] = &[12];
9373    assert_eq!(a.split_first(), Some((&11, b)));
9374}
9375
9376#[test]
9377fn test_split_first_mut() {
9378    let mut a = vec![11];
9379    let b: &mut [i32] = &mut [];
9380    assert!(b.split_first_mut().is_none());
9381    assert!(a.split_first_mut() == Some((&mut 11, b)));
9382    a = vec![11, 12];
9383    let b: &mut [_] = &mut [12];
9384    assert!(a.split_first_mut() == Some((&mut 11, b)));
9385}
9386
9387#[test]
9388fn test_split_last() {
9389    let mut a = vec![11];
9390    let b: &[i32] = &[];
9391    assert!(b.split_last().is_none());
9392    assert_eq!(a.split_last(), Some((&11, b)));
9393    a = vec![11, 12];
9394    let b: &[_] = &[11];
9395    assert_eq!(a.split_last(), Some((&12, b)));
9396}
9397
9398#[test]
9399fn test_split_last_mut() {
9400    let mut a = vec![11];
9401    let b: &mut [i32] = &mut [];
9402    assert!(b.split_last_mut().is_none());
9403    assert!(a.split_last_mut() == Some((&mut 11, b)));
9404
9405    a = vec![11, 12];
9406    let b: &mut [_] = &mut [11];
9407    assert!(a.split_last_mut() == Some((&mut 12, b)));
9408}
9409
9410#[test]
9411fn test_last() {
9412    let mut a = vec![];
9413    assert_eq!(a.last(), None);
9414    a = vec![11];
9415    assert_eq!(a.last().unwrap(), &11);
9416    a = vec![11, 12];
9417    assert_eq!(a.last().unwrap(), &12);
9418}
9419
9420#[test]
9421fn test_last_mut() {
9422    let mut a = vec![];
9423    assert_eq!(a.last_mut(), None);
9424    a = vec![11];
9425    assert_eq!(*a.last_mut().unwrap(), 11);
9426    a = vec![11, 12];
9427    assert_eq!(*a.last_mut().unwrap(), 12);
9428}
9429
9430#[test]
9431fn test_slice() {
9432    // Test fixed length vector.
9433    let vec_fixed = [1, 2, 3, 4];
9434    let v_a = vec_fixed[1..vec_fixed.len()].to_vec();
9435    assert_eq!(v_a.len(), 3);
9436
9437    assert_eq!(v_a[0], 2);
9438    assert_eq!(v_a[1], 3);
9439    assert_eq!(v_a[2], 4);
9440
9441    // Test on stack.
9442    let vec_stack: &[_] = &[1, 2, 3];
9443    let v_b = vec_stack[1..3].to_vec();
9444    assert_eq!(v_b.len(), 2);
9445
9446    assert_eq!(v_b[0], 2);
9447    assert_eq!(v_b[1], 3);
9448
9449    // Test `Box<[T]>`
9450    let vec_unique = vec![1, 2, 3, 4, 5, 6];
9451    let v_d = vec_unique[1..6].to_vec();
9452    assert_eq!(v_d.len(), 5);
9453
9454    assert_eq!(v_d[0], 2);
9455    assert_eq!(v_d[1], 3);
9456    assert_eq!(v_d[2], 4);
9457    assert_eq!(v_d[3], 5);
9458    assert_eq!(v_d[4], 6);
9459}
9460
9461#[test]
9462fn test_slice_from() {
9463    let vec: &[_] = &[1, 2, 3, 4];
9464    assert_eq!(&vec[..], vec);
9465    let b: &[_] = &[3, 4];
9466    assert_eq!(&vec[2..], b);
9467    let b: &[_] = &[];
9468    assert_eq!(&vec[4..], b);
9469}
9470
9471#[test]
9472fn test_slice_to() {
9473    let vec: &[_] = &[1, 2, 3, 4];
9474    assert_eq!(&vec[..4], vec);
9475    let b: &[_] = &[1, 2];
9476    assert_eq!(&vec[..2], b);
9477    let b: &[_] = &[];
9478    assert_eq!(&vec[..0], b);
9479}
9480
9481#[test]
9482fn test_pop() {
9483    let mut v = vec![5];
9484    let e = v.pop();
9485    assert_eq!(v.len(), 0);
9486    assert_eq!(e, Some(5));
9487    let f = v.pop();
9488    assert_eq!(f, None);
9489    let g = v.pop();
9490    assert_eq!(g, None);
9491}
9492
9493#[test]
9494fn test_swap_remove() {
9495    let mut v = vec![1, 2, 3, 4, 5];
9496    let mut e = v.swap_remove(0);
9497    assert_eq!(e, 1);
9498    assert_eq!(v, [5, 2, 3, 4]);
9499    e = v.swap_remove(3);
9500    assert_eq!(e, 4);
9501    assert_eq!(v, [5, 2, 3]);
9502}
9503
9504#[test]
9505#[should_panic]
9506fn test_swap_remove_fail() {
9507    let mut v = vec![1];
9508    let _ = v.swap_remove(0);
9509    let _ = v.swap_remove(0);
9510}
9511
9512#[test]
9513fn test_swap_remove_noncopyable() {
9514    // Tests that we don't accidentally run destructors twice.
9515    let mut v: Vec<Box<_>> = Vec::new();
9516    v.push(box 0);
9517    v.push(box 0);
9518    v.push(box 0);
9519    let mut _e = v.swap_remove(0);
9520    assert_eq!(v.len(), 2);
9521    _e = v.swap_remove(1);
9522    assert_eq!(v.len(), 1);
9523    _e = v.swap_remove(0);
9524    assert_eq!(v.len(), 0);
9525}
9526
9527#[test]
9528fn test_push() {
9529    // Test on-stack push().
9530    let mut v = vec![];
9531    v.push(1);
9532    assert_eq!(v.len(), 1);
9533    assert_eq!(v[0], 1);
9534
9535    // Test on-heap push().
9536    v.push(2);
9537    assert_eq!(v.len(), 2);
9538    assert_eq!(v[0], 1);
9539    assert_eq!(v[1], 2);
9540}
9541
9542#[test]
9543fn test_truncate() {
9544    let mut v: Vec<Box<_>> = vec![box 6, box 5, box 4];
9545    v.truncate(1);
9546    let v = v;
9547    assert_eq!(v.len(), 1);
9548    assert_eq!(*(v[0]), 6);
9549    // If the unsafe block didn't drop things properly, we blow up here.
9550}
9551
9552#[test]
9553fn test_clear() {
9554    let mut v: Vec<Box<_>> = vec![box 6, box 5, box 4];
9555    v.clear();
9556    assert_eq!(v.len(), 0);
9557    // If the unsafe block didn't drop things properly, we blow up here.
9558}
9559
9560#[test]
9561fn test_retain() {
9562    let mut v = vec![1, 2, 3, 4, 5];
9563    v.retain(is_odd);
9564    assert_eq!(v, [1, 3, 5]);
9565}
9566
9567#[test]
9568fn test_binary_search() {
9569    assert_eq!([1, 2, 3, 4, 5].binary_search(&5).ok(), Some(4));
9570    assert_eq!([1, 2, 3, 4, 5].binary_search(&4).ok(), Some(3));
9571    assert_eq!([1, 2, 3, 4, 5].binary_search(&3).ok(), Some(2));
9572    assert_eq!([1, 2, 3, 4, 5].binary_search(&2).ok(), Some(1));
9573    assert_eq!([1, 2, 3, 4, 5].binary_search(&1).ok(), Some(0));
9574
9575    assert_eq!([2, 4, 6, 8, 10].binary_search(&1).ok(), None);
9576    assert_eq!([2, 4, 6, 8, 10].binary_search(&5).ok(), None);
9577    assert_eq!([2, 4, 6, 8, 10].binary_search(&4).ok(), Some(1));
9578    assert_eq!([2, 4, 6, 8, 10].binary_search(&10).ok(), Some(4));
9579
9580    assert_eq!([2, 4, 6, 8].binary_search(&1).ok(), None);
9581    assert_eq!([2, 4, 6, 8].binary_search(&5).ok(), None);
9582    assert_eq!([2, 4, 6, 8].binary_search(&4).ok(), Some(1));
9583    assert_eq!([2, 4, 6, 8].binary_search(&8).ok(), Some(3));
9584
9585    assert_eq!([2, 4, 6].binary_search(&1).ok(), None);
9586    assert_eq!([2, 4, 6].binary_search(&5).ok(), None);
9587    assert_eq!([2, 4, 6].binary_search(&4).ok(), Some(1));
9588    assert_eq!([2, 4, 6].binary_search(&6).ok(), Some(2));
9589
9590    assert_eq!([2, 4].binary_search(&1).ok(), None);
9591    assert_eq!([2, 4].binary_search(&5).ok(), None);
9592    assert_eq!([2, 4].binary_search(&2).ok(), Some(0));
9593    assert_eq!([2, 4].binary_search(&4).ok(), Some(1));
9594
9595    assert_eq!([2].binary_search(&1).ok(), None);
9596    assert_eq!([2].binary_search(&5).ok(), None);
9597    assert_eq!([2].binary_search(&2).ok(), Some(0));
9598
9599    assert_eq!([].binary_search(&1).ok(), None);
9600    assert_eq!([].binary_search(&5).ok(), None);
9601
9602    assert!([1, 1, 1, 1, 1].binary_search(&1).ok() != None);
9603    assert!([1, 1, 1, 1, 2].binary_search(&1).ok() != None);
9604    assert!([1, 1, 1, 2, 2].binary_search(&1).ok() != None);
9605    assert!([1, 1, 2, 2, 2].binary_search(&1).ok() != None);
9606    assert_eq!([1, 2, 2, 2, 2].binary_search(&1).ok(), Some(0));
9607
9608    assert_eq!([1, 2, 3, 4, 5].binary_search(&6).ok(), None);
9609    assert_eq!([1, 2, 3, 4, 5].binary_search(&0).ok(), None);
9610}
9611
9612#[test]
9613fn test_reverse() {
9614    let mut v = vec![10, 20];
9615    assert_eq!(v[0], 10);
9616    assert_eq!(v[1], 20);
9617    v.reverse();
9618    assert_eq!(v[0], 20);
9619    assert_eq!(v[1], 10);
9620
9621    let mut v3 = Vec::<i32>::new();
9622    v3.reverse();
9623    assert!(v3.is_empty());
9624
9625    // check the 1-byte-types path
9626    let mut v = (-50..51i8).collect::<Vec<_>>();
9627    v.reverse();
9628    assert_eq!(v, (-50..51i8).rev().collect::<Vec<_>>());
9629
9630    // check the 2-byte-types path
9631    let mut v = (-50..51i16).collect::<Vec<_>>();
9632    v.reverse();
9633    assert_eq!(v, (-50..51i16).rev().collect::<Vec<_>>());
9634}
9635
9636#[test]
9637#[cfg_attr(miri, ignore)] // Miri is too slow
9638fn test_sort() {
9639    let mut rng = thread_rng();
9640
9641    for len in (2..25).chain(500..510) {
9642        for &modulus in &[5, 10, 100, 1000] {
9643            for _ in 0..10 {
9644                let orig: Vec<_> =
9645                    rng.sample_iter::<i32, _>(&Standard).map(|x| x % modulus).take(len).collect();
9646
9647                // Sort in default order.
9648                let mut v = orig.clone();
9649                v.sort();
9650                assert!(v.windows(2).all(|w| w[0] <= w[1]));
9651
9652                // Sort in ascending order.
9653                let mut v = orig.clone();
9654                v.sort_by(|a, b| a.cmp(b));
9655                assert!(v.windows(2).all(|w| w[0] <= w[1]));
9656
9657                // Sort in descending order.
9658                let mut v = orig.clone();
9659                v.sort_by(|a, b| b.cmp(a));
9660                assert!(v.windows(2).all(|w| w[0] >= w[1]));
9661
9662                // Sort in lexicographic order.
9663                let mut v1 = orig.clone();
9664                let mut v2 = orig.clone();
9665                v1.sort_by_key(|x| x.to_string());
9666                v2.sort_by_cached_key(|x| x.to_string());
9667                assert!(v1.windows(2).all(|w| w[0].to_string() <= w[1].to_string()));
9668                assert!(v1 == v2);
9669
9670                // Sort with many pre-sorted runs.
9671                let mut v = orig.clone();
9672                v.sort();
9673                v.reverse();
9674                for _ in 0..5 {
9675                    let a = rng.gen::<usize>() % len;
9676                    let b = rng.gen::<usize>() % len;
9677                    if a < b {
9678                        v[a..b].reverse();
9679                    } else {
9680                        v.swap(a, b);
9681                    }
9682                }
9683                v.sort();
9684                assert!(v.windows(2).all(|w| w[0] <= w[1]));
9685            }
9686        }
9687    }
9688
9689    // Sort using a completely random comparison function.
9690    // This will reorder the elements *somehow*, but won't panic.
9691    let mut v = [0; 500];
9692    for i in 0..v.len() {
9693        v[i] = i as i32;
9694    }
9695    v.sort_by(|_, _| *[Less, Equal, Greater].choose(&mut rng).unwrap());
9696    v.sort();
9697    for i in 0..v.len() {
9698        assert_eq!(v[i], i as i32);
9699    }
9700
9701    // Should not panic.
9702    [0i32; 0].sort();
9703    [(); 10].sort();
9704    [(); 100].sort();
9705
9706    let mut v = [0xDEADBEEFu64];
9707    v.sort();
9708    assert!(v == [0xDEADBEEF]);
9709}
9710
9711#[test]
9712fn test_sort_stability() {
9713    // Miri is too slow
9714    let large_range = if cfg!(miri) { 0..0 } else { 500..510 };
9715    let rounds = if cfg!(miri) { 1 } else { 10 };
9716
9717    for len in (2..25).chain(large_range) {
9718        for _ in 0..rounds {
9719            let mut counts = [0; 10];
9720
9721            // create a vector like [(6, 1), (5, 1), (6, 2), ...],
9722            // where the first item of each tuple is random, but
9723            // the second item represents which occurrence of that
9724            // number this element is, i.e., the second elements
9725            // will occur in sorted order.
9726            let orig: Vec<_> = (0..len)
9727                .map(|_| {
9728                    let n = thread_rng().gen::<usize>() % 10;
9729                    counts[n] += 1;
9730                    (n, counts[n])
9731                })
9732                .collect();
9733
9734            let mut v = orig.clone();
9735            // Only sort on the first element, so an unstable sort
9736            // may mix up the counts.
9737            v.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
9738
9739            // This comparison includes the count (the second item
9740            // of the tuple), so elements with equal first items
9741            // will need to be ordered with increasing
9742            // counts... i.e., exactly asserting that this sort is
9743            // stable.
9744            assert!(v.windows(2).all(|w| w[0] <= w[1]));
9745
9746            let mut v = orig.clone();
9747            v.sort_by_cached_key(|&(x, _)| x);
9748            assert!(v.windows(2).all(|w| w[0] <= w[1]));
9749        }
9750    }
9751}
9752
9753#[test]
9754fn test_rotate_left() {
9755    let expected: Vec<_> = (0..13).collect();
9756    let mut v = Vec::new();
9757
9758    // no-ops
9759    v.clone_from(&expected);
9760    v.rotate_left(0);
9761    assert_eq!(v, expected);
9762    v.rotate_left(expected.len());
9763    assert_eq!(v, expected);
9764    let mut zst_array = [(), (), ()];
9765    zst_array.rotate_left(2);
9766
9767    // happy path
9768    v = (5..13).chain(0..5).collect();
9769    v.rotate_left(8);
9770    assert_eq!(v, expected);
9771
9772    let expected: Vec<_> = (0..1000).collect();
9773
9774    // small rotations in large slice, uses ptr::copy
9775    v = (2..1000).chain(0..2).collect();
9776    v.rotate_left(998);
9777    assert_eq!(v, expected);
9778    v = (998..1000).chain(0..998).collect();
9779    v.rotate_left(2);
9780    assert_eq!(v, expected);
9781
9782    // non-small prime rotation, has a few rounds of swapping
9783    v = (389..1000).chain(0..389).collect();
9784    v.rotate_left(1000 - 389);
9785    assert_eq!(v, expected);
9786}
9787
9788#[test]
9789fn test_rotate_right() {
9790    let expected: Vec<_> = (0..13).collect();
9791    let mut v = Vec::new();
9792
9793    // no-ops
9794    v.clone_from(&expected);
9795    v.rotate_right(0);
9796    assert_eq!(v, expected);
9797    v.rotate_right(expected.len());
9798    assert_eq!(v, expected);
9799    let mut zst_array = [(), (), ()];
9800    zst_array.rotate_right(2);
9801
9802    // happy path
9803    v = (5..13).chain(0..5).collect();
9804    v.rotate_right(5);
9805    assert_eq!(v, expected);
9806
9807    let expected: Vec<_> = (0..1000).collect();
9808
9809    // small rotations in large slice, uses ptr::copy
9810    v = (2..1000).chain(0..2).collect();
9811    v.rotate_right(2);
9812    assert_eq!(v, expected);
9813    v = (998..1000).chain(0..998).collect();
9814    v.rotate_right(998);
9815    assert_eq!(v, expected);
9816
9817    // non-small prime rotation, has a few rounds of swapping
9818    v = (389..1000).chain(0..389).collect();
9819    v.rotate_right(389);
9820    assert_eq!(v, expected);
9821}
9822
9823#[test]
9824fn test_concat() {
9825    let v: [Vec<i32>; 0] = [];
9826    let c = v.concat();
9827    assert_eq!(c, []);
9828    let d = [vec![1], vec![2, 3]].concat();
9829    assert_eq!(d, [1, 2, 3]);
9830
9831    let v: &[&[_]] = &[&[1], &[2, 3]];
9832    assert_eq!(v.join(&0), [1, 0, 2, 3]);
9833    let v: &[&[_]] = &[&[1], &[2], &[3]];
9834    assert_eq!(v.join(&0), [1, 0, 2, 0, 3]);
9835}
9836
9837#[test]
9838fn test_join() {
9839    let v: [Vec<i32>; 0] = [];
9840    assert_eq!(v.join(&0), []);
9841    assert_eq!([vec![1], vec![2, 3]].join(&0), [1, 0, 2, 3]);
9842    assert_eq!([vec![1], vec![2], vec![3]].join(&0), [1, 0, 2, 0, 3]);
9843
9844    let v: [&[_]; 2] = [&[1], &[2, 3]];
9845    assert_eq!(v.join(&0), [1, 0, 2, 3]);
9846    let v: [&[_]; 3] = [&[1], &[2], &[3]];
9847    assert_eq!(v.join(&0), [1, 0, 2, 0, 3]);
9848}
9849
9850#[test]
9851fn test_join_nocopy() {
9852    let v: [String; 0] = [];
9853    assert_eq!(v.join(","), "");
9854    assert_eq!(["a".to_string(), "ab".into()].join(","), "a,ab");
9855    assert_eq!(["a".to_string(), "ab".into(), "abc".into()].join(","), "a,ab,abc");
9856    assert_eq!(["a".to_string(), "ab".into(), "".into()].join(","), "a,ab,");
9857}
9858
9859#[test]
9860fn test_insert() {
9861    let mut a = vec![1, 2, 4];
9862    a.insert(2, 3);
9863    assert_eq!(a, [1, 2, 3, 4]);
9864
9865    let mut a = vec![1, 2, 3];
9866    a.insert(0, 0);
9867    assert_eq!(a, [0, 1, 2, 3]);
9868
9869    let mut a = vec![1, 2, 3];
9870    a.insert(3, 4);
9871    assert_eq!(a, [1, 2, 3, 4]);
9872
9873    let mut a = vec![];
9874    a.insert(0, 1);
9875    assert_eq!(a, [1]);
9876}
9877
9878#[test]
9879#[should_panic]
9880fn test_insert_oob() {
9881    let mut a = vec![1, 2, 3];
9882    a.insert(4, 5);
9883}
9884
9885#[test]
9886fn test_remove() {
9887    let mut a = vec![1, 2, 3, 4];
9888
9889    assert_eq!(a.remove(2), 3);
9890    assert_eq!(a, [1, 2, 4]);
9891
9892    assert_eq!(a.remove(2), 4);
9893    assert_eq!(a, [1, 2]);
9894
9895    assert_eq!(a.remove(0), 1);
9896    assert_eq!(a, [2]);
9897
9898    assert_eq!(a.remove(0), 2);
9899    assert_eq!(a, []);
9900}
9901
9902#[test]
9903#[should_panic]
9904fn test_remove_fail() {
9905    let mut a = vec![1];
9906    let _ = a.remove(0);
9907    let _ = a.remove(0);
9908}
9909
9910#[test]
9911fn test_capacity() {
9912    let mut v = vec![0];
9913    v.reserve_exact(10);
9914    assert!(v.capacity() >= 11);
9915}
9916
9917#[test]
9918fn test_slice_2() {
9919    let v = vec![1, 2, 3, 4, 5];
9920    let v = &v[1..3];
9921    assert_eq!(v.len(), 2);
9922    assert_eq!(v[0], 2);
9923    assert_eq!(v[1], 3);
9924}
9925
9926macro_rules! assert_order {
9927    (Greater, $a:expr, $b:expr) => {
9928        assert_eq!($a.cmp($b), Greater);
9929        assert!($a > $b);
9930    };
9931    (Less, $a:expr, $b:expr) => {
9932        assert_eq!($a.cmp($b), Less);
9933        assert!($a < $b);
9934    };
9935    (Equal, $a:expr, $b:expr) => {
9936        assert_eq!($a.cmp($b), Equal);
9937        assert_eq!($a, $b);
9938    };
9939}
9940
9941#[test]
9942fn test_total_ord_u8() {
9943    let c = &[1u8, 2, 3];
9944    assert_order!(Greater, &[1u8, 2, 3, 4][..], &c[..]);
9945    let c = &[1u8, 2, 3, 4];
9946    assert_order!(Less, &[1u8, 2, 3][..], &c[..]);
9947    let c = &[1u8, 2, 3, 6];
9948    assert_order!(Equal, &[1u8, 2, 3, 6][..], &c[..]);
9949    let c = &[1u8, 2, 3, 4, 5, 6];
9950    assert_order!(Less, &[1u8, 2, 3, 4, 5, 5, 5, 5][..], &c[..]);
9951    let c = &[1u8, 2, 3, 4];
9952    assert_order!(Greater, &[2u8, 2][..], &c[..]);
9953}
9954
9955#[test]
9956fn test_total_ord_i32() {
9957    let c = &[1, 2, 3];
9958    assert_order!(Greater, &[1, 2, 3, 4][..], &c[..]);
9959    let c = &[1, 2, 3, 4];
9960    assert_order!(Less, &[1, 2, 3][..], &c[..]);
9961    let c = &[1, 2, 3, 6];
9962    assert_order!(Equal, &[1, 2, 3, 6][..], &c[..]);
9963    let c = &[1, 2, 3, 4, 5, 6];
9964    assert_order!(Less, &[1, 2, 3, 4, 5, 5, 5, 5][..], &c[..]);
9965    let c = &[1, 2, 3, 4];
9966    assert_order!(Greater, &[2, 2][..], &c[..]);
9967}
9968
9969#[test]
9970fn test_iterator() {
9971    let xs = [1, 2, 5, 10, 11];
9972    let mut it = xs.iter();
9973    assert_eq!(it.size_hint(), (5, Some(5)));
9974    assert_eq!(it.next().unwrap(), &1);
9975    assert_eq!(it.size_hint(), (4, Some(4)));
9976    assert_eq!(it.next().unwrap(), &2);
9977    assert_eq!(it.size_hint(), (3, Some(3)));
9978    assert_eq!(it.next().unwrap(), &5);
9979    assert_eq!(it.size_hint(), (2, Some(2)));
9980    assert_eq!(it.next().unwrap(), &10);
9981    assert_eq!(it.size_hint(), (1, Some(1)));
9982    assert_eq!(it.next().unwrap(), &11);
9983    assert_eq!(it.size_hint(), (0, Some(0)));
9984    assert!(it.next().is_none());
9985}
9986
9987#[test]
9988fn test_iter_size_hints() {
9989    let mut xs = [1, 2, 5, 10, 11];
9990    assert_eq!(xs.iter().size_hint(), (5, Some(5)));
9991    assert_eq!(xs.iter_mut().size_hint(), (5, Some(5)));
9992}
9993
9994#[test]
9995fn test_iter_as_slice() {
9996    let xs = [1, 2, 5, 10, 11];
9997    let mut iter = xs.iter();
9998    assert_eq!(iter.as_slice(), &[1, 2, 5, 10, 11]);
9999    iter.next();
10000    assert_eq!(iter.as_slice(), &[2, 5, 10, 11]);
10001}
10002
10003#[test]
10004fn test_iter_as_ref() {
10005    let xs = [1, 2, 5, 10, 11];
10006    let mut iter = xs.iter();
10007    assert_eq!(iter.as_ref(), &[1, 2, 5, 10, 11]);
10008    iter.next();
10009    assert_eq!(iter.as_ref(), &[2, 5, 10, 11]);
10010}
10011
10012#[test]
10013fn test_iter_clone() {
10014    let xs = [1, 2, 5];
10015    let mut it = xs.iter();
10016    it.next();
10017    let mut jt = it.clone();
10018    assert_eq!(it.next(), jt.next());
10019    assert_eq!(it.next(), jt.next());
10020    assert_eq!(it.next(), jt.next());
10021}
10022
10023#[test]
10024fn test_iter_is_empty() {
10025    let xs = [1, 2, 5, 10, 11];
10026    for i in 0..xs.len() {
10027        for j in i..xs.len() {
10028            assert_eq!(xs[i..j].iter().is_empty(), xs[i..j].is_empty());
10029        }
10030    }
10031}
10032
10033#[test]
10034fn test_mut_iterator() {
10035    let mut xs = [1, 2, 3, 4, 5];
10036    for x in &mut xs {
10037        *x += 1;
10038    }
10039    assert!(xs == [2, 3, 4, 5, 6])
10040}
10041
10042#[test]
10043fn test_rev_iterator() {
10044    let xs = [1, 2, 5, 10, 11];
10045    let ys = [11, 10, 5, 2, 1];
10046    let mut i = 0;
10047    for &x in xs.iter().rev() {
10048        assert_eq!(x, ys[i]);
10049        i += 1;
10050    }
10051    assert_eq!(i, 5);
10052}
10053
10054#[test]
10055fn test_mut_rev_iterator() {
10056    let mut xs = [1, 2, 3, 4, 5];
10057    for (i, x) in xs.iter_mut().rev().enumerate() {
10058        *x += i;
10059    }
10060    assert!(xs == [5, 5, 5, 5, 5])
10061}
10062
10063#[test]
10064fn test_move_iterator() {
10065    let xs = vec![1, 2, 3, 4, 5];
10066    assert_eq!(xs.into_iter().fold(0, |a: usize, b: usize| 10 * a + b), 12345);
10067}
10068
10069#[test]
10070fn test_move_rev_iterator() {
10071    let xs = vec![1, 2, 3, 4, 5];
10072    assert_eq!(xs.into_iter().rev().fold(0, |a: usize, b: usize| 10 * a + b), 54321);
10073}
10074
10075#[test]
10076fn test_splitator() {
10077    let xs = &[1, 2, 3, 4, 5];
10078
10079    let splits: &[&[_]] = &[&[1], &[3], &[5]];
10080    assert_eq!(xs.split(|x| *x % 2 == 0).collect::<Vec<_>>(), splits);
10081    let splits: &[&[_]] = &[&[], &[2, 3, 4, 5]];
10082    assert_eq!(xs.split(|x| *x == 1).collect::<Vec<_>>(), splits);
10083    let splits: &[&[_]] = &[&[1, 2, 3, 4], &[]];
10084    assert_eq!(xs.split(|x| *x == 5).collect::<Vec<_>>(), splits);
10085    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10086    assert_eq!(xs.split(|x| *x == 10).collect::<Vec<_>>(), splits);
10087    let splits: &[&[_]] = &[&[], &[], &[], &[], &[], &[]];
10088    assert_eq!(xs.split(|_| true).collect::<Vec<&[i32]>>(), splits);
10089
10090    let xs: &[i32] = &[];
10091    let splits: &[&[i32]] = &[&[]];
10092    assert_eq!(xs.split(|x| *x == 5).collect::<Vec<&[i32]>>(), splits);
10093}
10094
10095#[test]
10096fn test_splitator_inclusive() {
10097    let xs = &[1, 2, 3, 4, 5];
10098
10099    let splits: &[&[_]] = &[&[1, 2], &[3, 4], &[5]];
10100    assert_eq!(xs.split_inclusive(|x| *x % 2 == 0).collect::<Vec<_>>(), splits);
10101    let splits: &[&[_]] = &[&[1], &[2, 3, 4, 5]];
10102    assert_eq!(xs.split_inclusive(|x| *x == 1).collect::<Vec<_>>(), splits);
10103    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10104    assert_eq!(xs.split_inclusive(|x| *x == 5).collect::<Vec<_>>(), splits);
10105    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10106    assert_eq!(xs.split_inclusive(|x| *x == 10).collect::<Vec<_>>(), splits);
10107    let splits: &[&[_]] = &[&[1], &[2], &[3], &[4], &[5]];
10108    assert_eq!(xs.split_inclusive(|_| true).collect::<Vec<&[i32]>>(), splits);
10109
10110    let xs: &[i32] = &[];
10111    let splits: &[&[i32]] = &[&[]];
10112    assert_eq!(xs.split_inclusive(|x| *x == 5).collect::<Vec<&[i32]>>(), splits);
10113}
10114
10115#[test]
10116fn test_splitator_inclusive_reverse() {
10117    let xs = &[1, 2, 3, 4, 5];
10118
10119    let splits: &[&[_]] = &[&[5], &[3, 4], &[1, 2]];
10120    assert_eq!(xs.split_inclusive(|x| *x % 2 == 0).rev().collect::<Vec<_>>(), splits);
10121    let splits: &[&[_]] = &[&[2, 3, 4, 5], &[1]];
10122    assert_eq!(xs.split_inclusive(|x| *x == 1).rev().collect::<Vec<_>>(), splits);
10123    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10124    assert_eq!(xs.split_inclusive(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
10125    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10126    assert_eq!(xs.split_inclusive(|x| *x == 10).rev().collect::<Vec<_>>(), splits);
10127    let splits: &[&[_]] = &[&[5], &[4], &[3], &[2], &[1]];
10128    assert_eq!(xs.split_inclusive(|_| true).rev().collect::<Vec<_>>(), splits);
10129
10130    let xs: &[i32] = &[];
10131    let splits: &[&[i32]] = &[&[]];
10132    assert_eq!(xs.split_inclusive(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
10133}
10134
10135#[test]
10136fn test_splitator_mut_inclusive() {
10137    let xs = &mut [1, 2, 3, 4, 5];
10138
10139    let splits: &[&[_]] = &[&[1, 2], &[3, 4], &[5]];
10140    assert_eq!(xs.split_inclusive_mut(|x| *x % 2 == 0).collect::<Vec<_>>(), splits);
10141    let splits: &[&[_]] = &[&[1], &[2, 3, 4, 5]];
10142    assert_eq!(xs.split_inclusive_mut(|x| *x == 1).collect::<Vec<_>>(), splits);
10143    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10144    assert_eq!(xs.split_inclusive_mut(|x| *x == 5).collect::<Vec<_>>(), splits);
10145    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10146    assert_eq!(xs.split_inclusive_mut(|x| *x == 10).collect::<Vec<_>>(), splits);
10147    let splits: &[&[_]] = &[&[1], &[2], &[3], &[4], &[5]];
10148    assert_eq!(xs.split_inclusive_mut(|_| true).collect::<Vec<_>>(), splits);
10149
10150    let xs: &mut [i32] = &mut [];
10151    let splits: &[&[i32]] = &[&[]];
10152    assert_eq!(xs.split_inclusive_mut(|x| *x == 5).collect::<Vec<_>>(), splits);
10153}
10154
10155#[test]
10156fn test_splitator_mut_inclusive_reverse() {
10157    let xs = &mut [1, 2, 3, 4, 5];
10158
10159    let splits: &[&[_]] = &[&[5], &[3, 4], &[1, 2]];
10160    assert_eq!(xs.split_inclusive_mut(|x| *x % 2 == 0).rev().collect::<Vec<_>>(), splits);
10161    let splits: &[&[_]] = &[&[2, 3, 4, 5], &[1]];
10162    assert_eq!(xs.split_inclusive_mut(|x| *x == 1).rev().collect::<Vec<_>>(), splits);
10163    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10164    assert_eq!(xs.split_inclusive_mut(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
10165    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10166    assert_eq!(xs.split_inclusive_mut(|x| *x == 10).rev().collect::<Vec<_>>(), splits);
10167    let splits: &[&[_]] = &[&[5], &[4], &[3], &[2], &[1]];
10168    assert_eq!(xs.split_inclusive_mut(|_| true).rev().collect::<Vec<_>>(), splits);
10169
10170    let xs: &mut [i32] = &mut [];
10171    let splits: &[&[i32]] = &[&[]];
10172    assert_eq!(xs.split_inclusive_mut(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
10173}
10174
10175#[test]
10176fn test_splitnator() {
10177    let xs = &[1, 2, 3, 4, 5];
10178
10179    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10180    assert_eq!(xs.splitn(1, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
10181    let splits: &[&[_]] = &[&[1], &[3, 4, 5]];
10182    assert_eq!(xs.splitn(2, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
10183    let splits: &[&[_]] = &[&[], &[], &[], &[4, 5]];
10184    assert_eq!(xs.splitn(4, |_| true).collect::<Vec<_>>(), splits);
10185
10186    let xs: &[i32] = &[];
10187    let splits: &[&[i32]] = &[&[]];
10188    assert_eq!(xs.splitn(2, |x| *x == 5).collect::<Vec<_>>(), splits);
10189}
10190
10191#[test]
10192fn test_splitnator_mut() {
10193    let xs = &mut [1, 2, 3, 4, 5];
10194
10195    let splits: &[&mut [_]] = &[&mut [1, 2, 3, 4, 5]];
10196    assert_eq!(xs.splitn_mut(1, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
10197    let splits: &[&mut [_]] = &[&mut [1], &mut [3, 4, 5]];
10198    assert_eq!(xs.splitn_mut(2, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
10199    let splits: &[&mut [_]] = &[&mut [], &mut [], &mut [], &mut [4, 5]];
10200    assert_eq!(xs.splitn_mut(4, |_| true).collect::<Vec<_>>(), splits);
10201
10202    let xs: &mut [i32] = &mut [];
10203    let splits: &[&mut [i32]] = &[&mut []];
10204    assert_eq!(xs.splitn_mut(2, |x| *x == 5).collect::<Vec<_>>(), splits);
10205}
10206
10207#[test]
10208fn test_rsplitator() {
10209    let xs = &[1, 2, 3, 4, 5];
10210
10211    let splits: &[&[_]] = &[&[5], &[3], &[1]];
10212    assert_eq!(xs.split(|x| *x % 2 == 0).rev().collect::<Vec<_>>(), splits);
10213    let splits: &[&[_]] = &[&[2, 3, 4, 5], &[]];
10214    assert_eq!(xs.split(|x| *x == 1).rev().collect::<Vec<_>>(), splits);
10215    let splits: &[&[_]] = &[&[], &[1, 2, 3, 4]];
10216    assert_eq!(xs.split(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
10217    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10218    assert_eq!(xs.split(|x| *x == 10).rev().collect::<Vec<_>>(), splits);
10219
10220    let xs: &[i32] = &[];
10221    let splits: &[&[i32]] = &[&[]];
10222    assert_eq!(xs.split(|x| *x == 5).rev().collect::<Vec<&[i32]>>(), splits);
10223}
10224
10225#[test]
10226fn test_rsplitnator() {
10227    let xs = &[1, 2, 3, 4, 5];
10228
10229    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10230    assert_eq!(xs.rsplitn(1, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
10231    let splits: &[&[_]] = &[&[5], &[1, 2, 3]];
10232    assert_eq!(xs.rsplitn(2, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
10233    let splits: &[&[_]] = &[&[], &[], &[], &[1, 2]];
10234    assert_eq!(xs.rsplitn(4, |_| true).collect::<Vec<_>>(), splits);
10235
10236    let xs: &[i32] = &[];
10237    let splits: &[&[i32]] = &[&[]];
10238    assert_eq!(xs.rsplitn(2, |x| *x == 5).collect::<Vec<&[i32]>>(), splits);
10239    assert!(xs.rsplitn(0, |x| *x % 2 == 0).next().is_none());
10240}
10241
10242#[test]
10243fn test_windowsator() {
10244    let v = &[1, 2, 3, 4];
10245
10246    let wins: &[&[_]] = &[&[1, 2], &[2, 3], &[3, 4]];
10247    assert_eq!(v.windows(2).collect::<Vec<_>>(), wins);
10248
10249    let wins: &[&[_]] = &[&[1, 2, 3], &[2, 3, 4]];
10250    assert_eq!(v.windows(3).collect::<Vec<_>>(), wins);
10251    assert!(v.windows(6).next().is_none());
10252
10253    let wins: &[&[_]] = &[&[3, 4], &[2, 3], &[1, 2]];
10254    assert_eq!(v.windows(2).rev().collect::<Vec<&[_]>>(), wins);
10255}
10256
10257#[test]
10258#[should_panic]
10259fn test_windowsator_0() {
10260    let v = &[1, 2, 3, 4];
10261    let _it = v.windows(0);
10262}
10263
10264#[test]
10265fn test_chunksator() {
10266    let v = &[1, 2, 3, 4, 5];
10267
10268    assert_eq!(v.chunks(2).len(), 3);
10269
10270    let chunks: &[&[_]] = &[&[1, 2], &[3, 4], &[5]];
10271    assert_eq!(v.chunks(2).collect::<Vec<_>>(), chunks);
10272    let chunks: &[&[_]] = &[&[1, 2, 3], &[4, 5]];
10273    assert_eq!(v.chunks(3).collect::<Vec<_>>(), chunks);
10274    let chunks: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10275    assert_eq!(v.chunks(6).collect::<Vec<_>>(), chunks);
10276
10277    let chunks: &[&[_]] = &[&[5], &[3, 4], &[1, 2]];
10278    assert_eq!(v.chunks(2).rev().collect::<Vec<_>>(), chunks);
10279}
10280
10281#[test]
10282#[should_panic]
10283fn test_chunksator_0() {
10284    let v = &[1, 2, 3, 4];
10285    let _it = v.chunks(0);
10286}
10287
10288#[test]
10289fn test_chunks_exactator() {
10290    let v = &[1, 2, 3, 4, 5];
10291
10292    assert_eq!(v.chunks_exact(2).len(), 2);
10293
10294    let chunks: &[&[_]] = &[&[1, 2], &[3, 4]];
10295    assert_eq!(v.chunks_exact(2).collect::<Vec<_>>(), chunks);
10296    let chunks: &[&[_]] = &[&[1, 2, 3]];
10297    assert_eq!(v.chunks_exact(3).collect::<Vec<_>>(), chunks);
10298    let chunks: &[&[_]] = &[];
10299    assert_eq!(v.chunks_exact(6).collect::<Vec<_>>(), chunks);
10300
10301    let chunks: &[&[_]] = &[&[3, 4], &[1, 2]];
10302    assert_eq!(v.chunks_exact(2).rev().collect::<Vec<_>>(), chunks);
10303}
10304
10305#[test]
10306#[should_panic]
10307fn test_chunks_exactator_0() {
10308    let v = &[1, 2, 3, 4];
10309    let _it = v.chunks_exact(0);
10310}
10311
10312#[test]
10313fn test_rchunksator() {
10314    let v = &[1, 2, 3, 4, 5];
10315
10316    assert_eq!(v.rchunks(2).len(), 3);
10317
10318    let chunks: &[&[_]] = &[&[4, 5], &[2, 3], &[1]];
10319    assert_eq!(v.rchunks(2).collect::<Vec<_>>(), chunks);
10320    let chunks: &[&[_]] = &[&[3, 4, 5], &[1, 2]];
10321    assert_eq!(v.rchunks(3).collect::<Vec<_>>(), chunks);
10322    let chunks: &[&[_]] = &[&[1, 2, 3, 4, 5]];
10323    assert_eq!(v.rchunks(6).collect::<Vec<_>>(), chunks);
10324
10325    let chunks: &[&[_]] = &[&[1], &[2, 3], &[4, 5]];
10326    assert_eq!(v.rchunks(2).rev().collect::<Vec<_>>(), chunks);
10327}
10328
10329#[test]
10330#[should_panic]
10331fn test_rchunksator_0() {
10332    let v = &[1, 2, 3, 4];
10333    let _it = v.rchunks(0);
10334}
10335
10336#[test]
10337fn test_rchunks_exactator() {
10338    let v = &[1, 2, 3, 4, 5];
10339
10340    assert_eq!(v.rchunks_exact(2).len(), 2);
10341
10342    let chunks: &[&[_]] = &[&[4, 5], &[2, 3]];
10343    assert_eq!(v.rchunks_exact(2).collect::<Vec<_>>(), chunks);
10344    let chunks: &[&[_]] = &[&[3, 4, 5]];
10345    assert_eq!(v.rchunks_exact(3).collect::<Vec<_>>(), chunks);
10346    let chunks: &[&[_]] = &[];
10347    assert_eq!(v.rchunks_exact(6).collect::<Vec<_>>(), chunks);
10348
10349    let chunks: &[&[_]] = &[&[2, 3], &[4, 5]];
10350    assert_eq!(v.rchunks_exact(2).rev().collect::<Vec<_>>(), chunks);
10351}
10352
10353#[test]
10354#[should_panic]
10355fn test_rchunks_exactator_0() {
10356    let v = &[1, 2, 3, 4];
10357    let _it = v.rchunks_exact(0);
10358}
10359
10360#[test]
10361fn test_reverse_part() {
10362    let mut values = [1, 2, 3, 4, 5];
10363    values[1..4].reverse();
10364    assert!(values == [1, 4, 3, 2, 5]);
10365}
10366
10367#[test]
10368fn test_show() {
10369    macro_rules! test_show_vec {
10370        ($x:expr, $x_str:expr) => {{
10371            let (x, x_str) = ($x, $x_str);
10372            assert_eq!(format!("{:?}", x), x_str);
10373            assert_eq!(format!("{:?}", x), x_str);
10374        }};
10375    }
10376    let empty = Vec::<i32>::new();
10377    test_show_vec!(empty, "[]");
10378    test_show_vec!(vec![1], "[1]");
10379    test_show_vec!(vec![1, 2, 3], "[1, 2, 3]");
10380    test_show_vec!(vec![vec![], vec![1], vec![1, 1]], "[[], [1], [1, 1]]");
10381
10382    let empty_mut: &mut [i32] = &mut [];
10383    test_show_vec!(empty_mut, "[]");
10384    let v = &mut [1];
10385    test_show_vec!(v, "[1]");
10386    let v = &mut [1, 2, 3];
10387    test_show_vec!(v, "[1, 2, 3]");
10388    let v: &mut [&mut [_]] = &mut [&mut [], &mut [1], &mut [1, 1]];
10389    test_show_vec!(v, "[[], [1], [1, 1]]");
10390}
10391
10392#[test]
10393fn test_vec_default() {
10394    macro_rules! t {
10395        ($ty:ty) => {{
10396            let v: $ty = Default::default();
10397            assert!(v.is_empty());
10398        }};
10399    }
10400
10401    t!(&[i32]);
10402    t!(Vec<i32>);
10403}
10404
10405#[test]
10406#[should_panic]
10407fn test_overflow_does_not_cause_segfault() {
10408    let mut v = vec![];
10409    v.reserve_exact(!0);
10410    v.push(1);
10411    v.push(2);
10412}
10413
10414#[test]
10415#[should_panic]
10416fn test_overflow_does_not_cause_segfault_managed() {
10417    let mut v = vec![Rc::new(1)];
10418    v.reserve_exact(!0);
10419    v.push(Rc::new(2));
10420}
10421
10422#[test]
10423fn test_mut_split_at() {
10424    let mut values = [1, 2, 3, 4, 5];
10425    {
10426        let (left, right) = values.split_at_mut(2);
10427        {
10428            let left: &[_] = left;
10429            assert!(left[..left.len()] == [1, 2]);
10430        }
10431        for p in left {
10432            *p += 1;
10433        }
10434
10435        {
10436            let right: &[_] = right;
10437            assert!(right[..right.len()] == [3, 4, 5]);
10438        }
10439        for p in right {
10440            *p += 2;
10441        }
10442    }
10443
10444    assert!(values == [2, 3, 5, 6, 7]);
10445}
10446
10447#[derive(Clone, PartialEq)]
10448struct Foo;
10449
10450#[test]
10451fn test_iter_zero_sized() {
10452    let mut v = vec![Foo, Foo, Foo];
10453    assert_eq!(v.len(), 3);
10454    let mut cnt = 0;
10455
10456    for f in &v {
10457        assert!(*f == Foo);
10458        cnt += 1;
10459    }
10460    assert_eq!(cnt, 3);
10461
10462    for f in &v[1..3] {
10463        assert!(*f == Foo);
10464        cnt += 1;
10465    }
10466    assert_eq!(cnt, 5);
10467
10468    for f in &mut v {
10469        assert!(*f == Foo);
10470        cnt += 1;
10471    }
10472    assert_eq!(cnt, 8);
10473
10474    for f in v {
10475        assert!(f == Foo);
10476        cnt += 1;
10477    }
10478    assert_eq!(cnt, 11);
10479
10480    let xs: [Foo; 3] = [Foo, Foo, Foo];
10481    cnt = 0;
10482    for f in &xs {
10483        assert!(*f == Foo);
10484        cnt += 1;
10485    }
10486    assert!(cnt == 3);
10487}
10488
10489#[test]
10490fn test_shrink_to_fit() {
10491    let mut xs = vec![0, 1, 2, 3];
10492    for i in 4..100 {
10493        xs.push(i)
10494    }
10495    assert_eq!(xs.capacity(), 128);
10496    xs.shrink_to_fit();
10497    assert_eq!(xs.capacity(), 100);
10498    assert_eq!(xs, (0..100).collect::<Vec<_>>());
10499}
10500
10501#[test]
10502fn test_starts_with() {
10503    assert!(b"foobar".starts_with(b"foo"));
10504    assert!(!b"foobar".starts_with(b"oob"));
10505    assert!(!b"foobar".starts_with(b"bar"));
10506    assert!(!b"foo".starts_with(b"foobar"));
10507    assert!(!b"bar".starts_with(b"foobar"));
10508    assert!(b"foobar".starts_with(b"foobar"));
10509    let empty: &[u8] = &[];
10510    assert!(empty.starts_with(empty));
10511    assert!(!empty.starts_with(b"foo"));
10512    assert!(b"foobar".starts_with(empty));
10513}
10514
10515#[test]
10516fn test_ends_with() {
10517    assert!(b"foobar".ends_with(b"bar"));
10518    assert!(!b"foobar".ends_with(b"oba"));
10519    assert!(!b"foobar".ends_with(b"foo"));
10520    assert!(!b"foo".ends_with(b"foobar"));
10521    assert!(!b"bar".ends_with(b"foobar"));
10522    assert!(b"foobar".ends_with(b"foobar"));
10523    let empty: &[u8] = &[];
10524    assert!(empty.ends_with(empty));
10525    assert!(!empty.ends_with(b"foo"));
10526    assert!(b"foobar".ends_with(empty));
10527}
10528
10529#[test]
10530fn test_mut_splitator() {
10531    let mut xs = [0, 1, 0, 2, 3, 0, 0, 4, 5, 0];
10532    assert_eq!(xs.split_mut(|x| *x == 0).count(), 6);
10533    for slice in xs.split_mut(|x| *x == 0) {
10534        slice.reverse();
10535    }
10536    assert!(xs == [0, 1, 0, 3, 2, 0, 0, 5, 4, 0]);
10537
10538    let mut xs = [0, 1, 0, 2, 3, 0, 0, 4, 5, 0, 6, 7];
10539    for slice in xs.split_mut(|x| *x == 0).take(5) {
10540        slice.reverse();
10541    }
10542    assert!(xs == [0, 1, 0, 3, 2, 0, 0, 5, 4, 0, 6, 7]);
10543}
10544
10545#[test]
10546fn test_mut_splitator_rev() {
10547    let mut xs = [1, 2, 0, 3, 4, 0, 0, 5, 6, 0];
10548    for slice in xs.split_mut(|x| *x == 0).rev().take(4) {
10549        slice.reverse();
10550    }
10551    assert!(xs == [1, 2, 0, 4, 3, 0, 0, 6, 5, 0]);
10552}
10553
10554#[test]
10555fn test_get_mut() {
10556    let mut v = [0, 1, 2];
10557    assert_eq!(v.get_mut(3), None);
10558    v.get_mut(1).map(|e| *e = 7);
10559    assert_eq!(v[1], 7);
10560    let mut x = 2;
10561    assert_eq!(v.get_mut(2), Some(&mut x));
10562}
10563
10564#[test]
10565fn test_mut_chunks() {
10566    let mut v = [0, 1, 2, 3, 4, 5, 6];
10567    assert_eq!(v.chunks_mut(3).len(), 3);
10568    for (i, chunk) in v.chunks_mut(3).enumerate() {
10569        for x in chunk {
10570            *x = i as u8;
10571        }
10572    }
10573    let result = [0, 0, 0, 1, 1, 1, 2];
10574    assert_eq!(v, result);
10575}
10576
10577#[test]
10578fn test_mut_chunks_rev() {
10579    let mut v = [0, 1, 2, 3, 4, 5, 6];
10580    for (i, chunk) in v.chunks_mut(3).rev().enumerate() {
10581        for x in chunk {
10582            *x = i as u8;
10583        }
10584    }
10585    let result = [2, 2, 2, 1, 1, 1, 0];
10586    assert_eq!(v, result);
10587}
10588
10589#[test]
10590#[should_panic]
10591fn test_mut_chunks_0() {
10592    let mut v = [1, 2, 3, 4];
10593    let _it = v.chunks_mut(0);
10594}
10595
10596#[test]
10597fn test_mut_chunks_exact() {
10598    let mut v = [0, 1, 2, 3, 4, 5, 6];
10599    assert_eq!(v.chunks_exact_mut(3).len(), 2);
10600    for (i, chunk) in v.chunks_exact_mut(3).enumerate() {
10601        for x in chunk {
10602            *x = i as u8;
10603        }
10604    }
10605    let result = [0, 0, 0, 1, 1, 1, 6];
10606    assert_eq!(v, result);
10607}
10608
10609#[test]
10610fn test_mut_chunks_exact_rev() {
10611    let mut v = [0, 1, 2, 3, 4, 5, 6];
10612    for (i, chunk) in v.chunks_exact_mut(3).rev().enumerate() {
10613        for x in chunk {
10614            *x = i as u8;
10615        }
10616    }
10617    let result = [1, 1, 1, 0, 0, 0, 6];
10618    assert_eq!(v, result);
10619}
10620
10621#[test]
10622#[should_panic]
10623fn test_mut_chunks_exact_0() {
10624    let mut v = [1, 2, 3, 4];
10625    let _it = v.chunks_exact_mut(0);
10626}
10627
10628#[test]
10629fn test_mut_rchunks() {
10630    let mut v = [0, 1, 2, 3, 4, 5, 6];
10631    assert_eq!(v.rchunks_mut(3).len(), 3);
10632    for (i, chunk) in v.rchunks_mut(3).enumerate() {
10633        for x in chunk {
10634            *x = i as u8;
10635        }
10636    }
10637    let result = [2, 1, 1, 1, 0, 0, 0];
10638    assert_eq!(v, result);
10639}
10640
10641#[test]
10642fn test_mut_rchunks_rev() {
10643    let mut v = [0, 1, 2, 3, 4, 5, 6];
10644    for (i, chunk) in v.rchunks_mut(3).rev().enumerate() {
10645        for x in chunk {
10646            *x = i as u8;
10647        }
10648    }
10649    let result = [0, 1, 1, 1, 2, 2, 2];
10650    assert_eq!(v, result);
10651}
10652
10653#[test]
10654#[should_panic]
10655fn test_mut_rchunks_0() {
10656    let mut v = [1, 2, 3, 4];
10657    let _it = v.rchunks_mut(0);
10658}
10659
10660#[test]
10661fn test_mut_rchunks_exact() {
10662    let mut v = [0, 1, 2, 3, 4, 5, 6];
10663    assert_eq!(v.rchunks_exact_mut(3).len(), 2);
10664    for (i, chunk) in v.rchunks_exact_mut(3).enumerate() {
10665        for x in chunk {
10666            *x = i as u8;
10667        }
10668    }
10669    let result = [0, 1, 1, 1, 0, 0, 0];
10670    assert_eq!(v, result);
10671}
10672
10673#[test]
10674fn test_mut_rchunks_exact_rev() {
10675    let mut v = [0, 1, 2, 3, 4, 5, 6];
10676    for (i, chunk) in v.rchunks_exact_mut(3).rev().enumerate() {
10677        for x in chunk {
10678            *x = i as u8;
10679        }
10680    }
10681    let result = [0, 0, 0, 0, 1, 1, 1];
10682    assert_eq!(v, result);
10683}
10684
10685#[test]
10686#[should_panic]
10687fn test_mut_rchunks_exact_0() {
10688    let mut v = [1, 2, 3, 4];
10689    let _it = v.rchunks_exact_mut(0);
10690}
10691
10692#[test]
10693fn test_mut_last() {
10694    let mut x = [1, 2, 3, 4, 5];
10695    let h = x.last_mut();
10696    assert_eq!(*h.unwrap(), 5);
10697
10698    let y: &mut [i32] = &mut [];
10699    assert!(y.last_mut().is_none());
10700}
10701
10702#[test]
10703fn test_to_vec() {
10704    let xs: Box<_> = box [1, 2, 3];
10705    let ys = xs.to_vec();
10706    assert_eq!(ys, [1, 2, 3]);
10707}
10708
10709#[test]
10710fn test_in_place_iterator_specialization() {
10711    let src: Box<[usize]> = box [1, 2, 3];
10712    let src_ptr = src.as_ptr();
10713    let sink: Box<_> = src.into_vec().into_iter().map(std::convert::identity).collect();
10714    let sink_ptr = sink.as_ptr();
10715    assert_eq!(src_ptr, sink_ptr);
10716}
10717
10718#[test]
10719fn test_box_slice_clone() {
10720    let data = vec![vec![0, 1], vec![0], vec![1]];
10721    let data2 = data.clone().into_boxed_slice().clone().to_vec();
10722
10723    assert_eq!(data, data2);
10724}
10725
10726#[test]
10727#[allow(unused_must_use)] // here, we care about the side effects of `.clone()`
10728#[cfg_attr(target_os = "emscripten", ignore)]
10729fn test_box_slice_clone_panics() {
10730    use std::sync::atomic::{AtomicUsize, Ordering};
10731    use std::sync::Arc;
10732
10733    struct Canary {
10734        count: Arc<AtomicUsize>,
10735        panics: bool,
10736    }
10737
10738    impl Drop for Canary {
10739        fn drop(&mut self) {
10740            self.count.fetch_add(1, Ordering::SeqCst);
10741        }
10742    }
10743
10744    impl Clone for Canary {
10745        fn clone(&self) -> Self {
10746            if self.panics {
10747                panic!()
10748            }
10749
10750            Canary { count: self.count.clone(), panics: self.panics }
10751        }
10752    }
10753
10754    let drop_count = Arc::new(AtomicUsize::new(0));
10755    let canary = Canary { count: drop_count.clone(), panics: false };
10756    let panic = Canary { count: drop_count.clone(), panics: true };
10757
10758    std::panic::catch_unwind(move || {
10759        // When xs is dropped, +5.
10760        let xs =
10761            vec![canary.clone(), canary.clone(), canary.clone(), panic, canary].into_boxed_slice();
10762
10763        // When panic is cloned, +3.
10764        xs.clone();
10765    })
10766    .unwrap_err();
10767
10768    // Total = 8
10769    assert_eq!(drop_count.load(Ordering::SeqCst), 8);
10770}
10771
10772#[test]
10773fn test_copy_from_slice() {
10774    let src = [0, 1, 2, 3, 4, 5];
10775    let mut dst = [0; 6];
10776    dst.copy_from_slice(&src);
10777    assert_eq!(src, dst)
10778}
10779
10780#[test]
10781#[should_panic(expected = "source slice length (4) does not match destination slice length (5)")]
10782fn test_copy_from_slice_dst_longer() {
10783    let src = [0, 1, 2, 3];
10784    let mut dst = [0; 5];
10785    dst.copy_from_slice(&src);
10786}
10787
10788#[test]
10789#[should_panic(expected = "source slice length (4) does not match destination slice length (3)")]
10790fn test_copy_from_slice_dst_shorter() {
10791    let src = [0, 1, 2, 3];
10792    let mut dst = [0; 3];
10793    dst.copy_from_slice(&src);
10794}
10795
10796const MAX_LEN: usize = 80;
10797
10798static DROP_COUNTS: [AtomicUsize; MAX_LEN] = [
10799    // FIXME(RFC 1109): AtomicUsize is not Copy.
10800    AtomicUsize::new(0),
10801    AtomicUsize::new(0),
10802    AtomicUsize::new(0),
10803    AtomicUsize::new(0),
10804    AtomicUsize::new(0),
10805    AtomicUsize::new(0),
10806    AtomicUsize::new(0),
10807    AtomicUsize::new(0),
10808    AtomicUsize::new(0),
10809    AtomicUsize::new(0),
10810    AtomicUsize::new(0),
10811    AtomicUsize::new(0),
10812    AtomicUsize::new(0),
10813    AtomicUsize::new(0),
10814    AtomicUsize::new(0),
10815    AtomicUsize::new(0),
10816    AtomicUsize::new(0),
10817    AtomicUsize::new(0),
10818    AtomicUsize::new(0),
10819    AtomicUsize::new(0),
10820    AtomicUsize::new(0),
10821    AtomicUsize::new(0),
10822    AtomicUsize::new(0),
10823    AtomicUsize::new(0),
10824    AtomicUsize::new(0),
10825    AtomicUsize::new(0),
10826    AtomicUsize::new(0),
10827    AtomicUsize::new(0),
10828    AtomicUsize::new(0),
10829    AtomicUsize::new(0),
10830    AtomicUsize::new(0),
10831    AtomicUsize::new(0),
10832    AtomicUsize::new(0),
10833    AtomicUsize::new(0),
10834    AtomicUsize::new(0),
10835    AtomicUsize::new(0),
10836    AtomicUsize::new(0),
10837    AtomicUsize::new(0),
10838    AtomicUsize::new(0),
10839    AtomicUsize::new(0),
10840    AtomicUsize::new(0),
10841    AtomicUsize::new(0),
10842    AtomicUsize::new(0),
10843    AtomicUsize::new(0),
10844    AtomicUsize::new(0),
10845    AtomicUsize::new(0),
10846    AtomicUsize::new(0),
10847    AtomicUsize::new(0),
10848    AtomicUsize::new(0),
10849    AtomicUsize::new(0),
10850    AtomicUsize::new(0),
10851    AtomicUsize::new(0),
10852    AtomicUsize::new(0),
10853    AtomicUsize::new(0),
10854    AtomicUsize::new(0),
10855    AtomicUsize::new(0),
10856    AtomicUsize::new(0),
10857    AtomicUsize::new(0),
10858    AtomicUsize::new(0),
10859    AtomicUsize::new(0),
10860    AtomicUsize::new(0),
10861    AtomicUsize::new(0),
10862    AtomicUsize::new(0),
10863    AtomicUsize::new(0),
10864    AtomicUsize::new(0),
10865    AtomicUsize::new(0),
10866    AtomicUsize::new(0),
10867    AtomicUsize::new(0),
10868    AtomicUsize::new(0),
10869    AtomicUsize::new(0),
10870    AtomicUsize::new(0),
10871    AtomicUsize::new(0),
10872    AtomicUsize::new(0),
10873    AtomicUsize::new(0),
10874    AtomicUsize::new(0),
10875    AtomicUsize::new(0),
10876    AtomicUsize::new(0),
10877    AtomicUsize::new(0),
10878    AtomicUsize::new(0),
10879    AtomicUsize::new(0),
10880];
10881
10882static VERSIONS: AtomicUsize = AtomicUsize::new(0);
10883
10884#[derive(Clone, Eq)]
10885struct DropCounter {
10886    x: u32,
10887    id: usize,
10888    version: Cell<usize>,
10889}
10890
10891impl PartialEq for DropCounter {
10892    fn eq(&self, other: &Self) -> bool {
10893        self.partial_cmp(other) == Some(Ordering::Equal)
10894    }
10895}
10896
10897impl PartialOrd for DropCounter {
10898    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
10899        self.version.set(self.version.get() + 1);
10900        other.version.set(other.version.get() + 1);
10901        VERSIONS.fetch_add(2, Relaxed);
10902        self.x.partial_cmp(&other.x)
10903    }
10904}
10905
10906impl Ord for DropCounter {
10907    fn cmp(&self, other: &Self) -> Ordering {
10908        self.partial_cmp(other).unwrap()
10909    }
10910}
10911
10912impl Drop for DropCounter {
10913    fn drop(&mut self) {
10914        DROP_COUNTS[self.id].fetch_add(1, Relaxed);
10915        VERSIONS.fetch_sub(self.version.get(), Relaxed);
10916    }
10917}
10918
10919macro_rules! test {
10920    ($input:ident, $func:ident) => {
10921        let len = $input.len();
10922
10923        // Work out the total number of comparisons required to sort
10924        // this array...
10925        let mut count = 0usize;
10926        $input.to_owned().$func(|a, b| {
10927            count += 1;
10928            a.cmp(b)
10929        });
10930
10931        // ... and then panic on each and every single one.
10932        for panic_countdown in 0..count {
10933            // Refresh the counters.
10934            VERSIONS.store(0, Relaxed);
10935            for i in 0..len {
10936                DROP_COUNTS[i].store(0, Relaxed);
10937            }
10938
10939            let v = $input.to_owned();
10940            let _ = std::panic::catch_unwind(move || {
10941                let mut v = v;
10942                let mut panic_countdown = panic_countdown;
10943                v.$func(|a, b| {
10944                    if panic_countdown == 0 {
10945                        SILENCE_PANIC.with(|s| s.set(true));
10946                        panic!();
10947                    }
10948                    panic_countdown -= 1;
10949                    a.cmp(b)
10950                })
10951            });
10952
10953            // Check that the number of things dropped is exactly
10954            // what we expect (i.e., the contents of `v`).
10955            for (i, c) in DROP_COUNTS.iter().enumerate().take(len) {
10956                let count = c.load(Relaxed);
10957                assert!(count == 1, "found drop count == {} for i == {}, len == {}", count, i, len);
10958            }
10959
10960            // Check that the most recent versions of values were dropped.
10961            assert_eq!(VERSIONS.load(Relaxed), 0);
10962        }
10963    };
10964}
10965
10966thread_local!(static SILENCE_PANIC: Cell<bool> = Cell::new(false));
10967
10968#[test]
10969#[cfg_attr(target_os = "emscripten", ignore)] // no threads
10970fn panic_safe() {
10971    let prev = panic::take_hook();
10972    panic::set_hook(Box::new(move |info| {
10973        if !SILENCE_PANIC.with(|s| s.get()) {
10974            prev(info);
10975        }
10976    }));
10977
10978    let mut rng = thread_rng();
10979
10980    // Miri is too slow (but still need to `chain` to make the types match)
10981    let lens = if cfg!(miri) { (1..10).chain(0..0) } else { (1..20).chain(70..MAX_LEN) };
10982    let moduli: &[u32] = if cfg!(miri) { &[5] } else { &[5, 20, 50] };
10983
10984    for len in lens {
10985        for &modulus in moduli {
10986            for &has_runs in &[false, true] {
10987                let mut input = (0..len)
10988                    .map(|id| DropCounter {
10989                        x: rng.next_u32() % modulus,
10990                        id: id,
10991                        version: Cell::new(0),
10992                    })
10993                    .collect::<Vec<_>>();
10994
10995                if has_runs {
10996                    for c in &mut input {
10997                        c.x = c.id as u32;
10998                    }
10999
11000                    for _ in 0..5 {
11001                        let a = rng.gen::<usize>() % len;
11002                        let b = rng.gen::<usize>() % len;
11003                        if a < b {
11004                            input[a..b].reverse();
11005                        } else {
11006                            input.swap(a, b);
11007                        }
11008                    }
11009                }
11010
11011                test!(input, sort_by);
11012                test!(input, sort_unstable_by);
11013            }
11014        }
11015    }
11016
11017    // Set default panic hook again.
11018    drop(panic::take_hook());
11019}
11020
11021#[test]
11022fn repeat_generic_slice() {
11023    assert_eq!([1, 2].repeat(2), vec![1, 2, 1, 2]);
11024    assert_eq!([1, 2, 3, 4].repeat(0), vec![]);
11025    assert_eq!([1, 2, 3, 4].repeat(1), vec![1, 2, 3, 4]);
11026    assert_eq!([1, 2, 3, 4].repeat(3), vec![1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4]);
11027}
11028
11029#[test]
11030#[allow(unreachable_patterns)]
11031fn subslice_patterns() {
11032    // This test comprehensively checks the passing static and dynamic semantics
11033    // of subslice patterns `..`, `x @ ..`, `ref x @ ..`, and `ref mut @ ..`
11034    // in slice patterns `[$($pat), $(,)?]` .
11035
11036    #[derive(PartialEq, Debug, Clone)]
11037    struct N(u8);
11038
11039    macro_rules! n {
11040        ($($e:expr),* $(,)?) => {
11041            [$(N($e)),*]
11042        }
11043    }
11044
11045    macro_rules! c {
11046        ($inp:expr, $typ:ty, $out:expr $(,)?) => {
11047            assert_eq!($out, identity::<$typ>($inp))
11048        };
11049    }
11050
11051    macro_rules! m {
11052        ($e:expr, $p:pat => $b:expr) => {
11053            match $e {
11054                $p => $b,
11055                _ => panic!(),
11056            }
11057        };
11058    }
11059
11060    // == Slices ==
11061
11062    // Matching slices using `ref` patterns:
11063    let mut v = vec![N(0), N(1), N(2), N(3), N(4)];
11064    let mut vc = (0..=4).collect::<Vec<u8>>();
11065
11066    let [..] = v[..]; // Always matches.
11067    m!(v[..], [N(0), ref sub @ .., N(4)] => c!(sub, &[N], n![1, 2, 3]));
11068    m!(v[..], [N(0), ref sub @ ..] => c!(sub, &[N], n![1, 2, 3, 4]));
11069    m!(v[..], [ref sub @ .., N(4)] => c!(sub, &[N], n![0, 1, 2, 3]));
11070    m!(v[..], [ref sub @ .., _, _, _, _, _] => c!(sub, &[N], &n![] as &[N]));
11071    m!(v[..], [_, _, _, _, _, ref sub @ ..] => c!(sub, &[N], &n![] as &[N]));
11072    m!(vc[..], [x, .., y] => c!((x, y), (u8, u8), (0, 4)));
11073
11074    // Matching slices using `ref mut` patterns:
11075    let [..] = v[..]; // Always matches.
11076    m!(v[..], [N(0), ref mut sub @ .., N(4)] => c!(sub, &mut [N], n![1, 2, 3]));
11077    m!(v[..], [N(0), ref mut sub @ ..] => c!(sub, &mut [N], n![1, 2, 3, 4]));
11078    m!(v[..], [ref mut sub @ .., N(4)] => c!(sub, &mut [N], n![0, 1, 2, 3]));
11079    m!(v[..], [ref mut sub @ .., _, _, _, _, _] => c!(sub, &mut [N], &mut n![] as &mut [N]));
11080    m!(v[..], [_, _, _, _, _, ref mut sub @ ..] => c!(sub, &mut [N], &mut n![] as &mut [N]));
11081    m!(vc[..], [x, .., y] => c!((x, y), (u8, u8), (0, 4)));
11082
11083    // Matching slices using default binding modes (&):
11084    let [..] = &v[..]; // Always matches.
11085    m!(&v[..], [N(0), sub @ .., N(4)] => c!(sub, &[N], n![1, 2, 3]));
11086    m!(&v[..], [N(0), sub @ ..] => c!(sub, &[N], n![1, 2, 3, 4]));
11087    m!(&v[..], [sub @ .., N(4)] => c!(sub, &[N], n![0, 1, 2, 3]));
11088    m!(&v[..], [sub @ .., _, _, _, _, _] => c!(sub, &[N], &n![] as &[N]));
11089    m!(&v[..], [_, _, _, _, _, sub @ ..] => c!(sub, &[N], &n![] as &[N]));
11090    m!(&vc[..], [x, .., y] => c!((x, y), (&u8, &u8), (&0, &4)));
11091
11092    // Matching slices using default binding modes (&mut):
11093    let [..] = &mut v[..]; // Always matches.
11094    m!(&mut v[..], [N(0), sub @ .., N(4)] => c!(sub, &mut [N], n![1, 2, 3]));
11095    m!(&mut v[..], [N(0), sub @ ..] => c!(sub, &mut [N], n![1, 2, 3, 4]));
11096    m!(&mut v[..], [sub @ .., N(4)] => c!(sub, &mut [N], n![0, 1, 2, 3]));
11097    m!(&mut v[..], [sub @ .., _, _, _, _, _] => c!(sub, &mut [N], &mut n![] as &mut [N]));
11098    m!(&mut v[..], [_, _, _, _, _, sub @ ..] => c!(sub, &mut [N], &mut n![] as &mut [N]));
11099    m!(&mut vc[..], [x, .., y] => c!((x, y), (&mut u8, &mut u8), (&mut 0, &mut 4)));
11100
11101    // == Arrays ==
11102    let mut v = n![0, 1, 2, 3, 4];
11103    let vc = [0, 1, 2, 3, 4];
11104
11105    // Matching arrays by value:
11106    m!(v.clone(), [N(0), sub @ .., N(4)] => c!(sub, [N; 3], n![1, 2, 3]));
11107    m!(v.clone(), [N(0), sub @ ..] => c!(sub, [N; 4], n![1, 2, 3, 4]));
11108    m!(v.clone(), [sub @ .., N(4)] => c!(sub, [N; 4], n![0, 1, 2, 3]));
11109    m!(v.clone(), [sub @ .., _, _, _, _, _] => c!(sub, [N; 0], n![] as [N; 0]));
11110    m!(v.clone(), [_, _, _, _, _, sub @ ..] => c!(sub, [N; 0], n![] as [N; 0]));
11111    m!(v.clone(), [x, .., y] => c!((x, y), (N, N), (N(0), N(4))));
11112    m!(v.clone(), [..] => ());
11113
11114    // Matching arrays by ref patterns:
11115    m!(v, [N(0), ref sub @ .., N(4)] => c!(sub, &[N; 3], &n![1, 2, 3]));
11116    m!(v, [N(0), ref sub @ ..] => c!(sub, &[N; 4], &n![1, 2, 3, 4]));
11117    m!(v, [ref sub @ .., N(4)] => c!(sub, &[N; 4], &n![0, 1, 2, 3]));
11118    m!(v, [ref sub @ .., _, _, _, _, _] => c!(sub, &[N; 0], &n![] as &[N; 0]));
11119    m!(v, [_, _, _, _, _, ref sub @ ..] => c!(sub, &[N; 0], &n![] as &[N; 0]));
11120    m!(vc, [x, .., y] => c!((x, y), (u8, u8), (0, 4)));
11121
11122    // Matching arrays by ref mut patterns:
11123    m!(v, [N(0), ref mut sub @ .., N(4)] => c!(sub, &mut [N; 3], &mut n![1, 2, 3]));
11124    m!(v, [N(0), ref mut sub @ ..] => c!(sub, &mut [N; 4], &mut n![1, 2, 3, 4]));
11125    m!(v, [ref mut sub @ .., N(4)] => c!(sub, &mut [N; 4], &mut n![0, 1, 2, 3]));
11126    m!(v, [ref mut sub @ .., _, _, _, _, _] => c!(sub, &mut [N; 0], &mut n![] as &mut [N; 0]));
11127    m!(v, [_, _, _, _, _, ref mut sub @ ..] => c!(sub, &mut [N; 0], &mut n![] as &mut [N; 0]));
11128
11129    // Matching arrays by default binding modes (&):
11130    m!(&v, [N(0), sub @ .., N(4)] => c!(sub, &[N; 3], &n![1, 2, 3]));
11131    m!(&v, [N(0), sub @ ..] => c!(sub, &[N; 4], &n![1, 2, 3, 4]));
11132    m!(&v, [sub @ .., N(4)] => c!(sub, &[N; 4], &n![0, 1, 2, 3]));
11133    m!(&v, [sub @ .., _, _, _, _, _] => c!(sub, &[N; 0], &n![] as &[N; 0]));
11134    m!(&v, [_, _, _, _, _, sub @ ..] => c!(sub, &[N; 0], &n![] as &[N; 0]));
11135    m!(&v, [..] => ());
11136    m!(&v, [x, .., y] => c!((x, y), (&N, &N), (&N(0), &N(4))));
11137
11138    // Matching arrays by default binding modes (&mut):
11139    m!(&mut v, [N(0), sub @ .., N(4)] => c!(sub, &mut [N; 3], &mut n![1, 2, 3]));
11140    m!(&mut v, [N(0), sub @ ..] => c!(sub, &mut [N; 4], &mut n![1, 2, 3, 4]));
11141    m!(&mut v, [sub @ .., N(4)] => c!(sub, &mut [N; 4], &mut n![0, 1, 2, 3]));
11142    m!(&mut v, [sub @ .., _, _, _, _, _] => c!(sub, &mut [N; 0], &mut n![] as &[N; 0]));
11143    m!(&mut v, [_, _, _, _, _, sub @ ..] => c!(sub, &mut [N; 0], &mut n![] as &[N; 0]));
11144    m!(&mut v, [..] => ());
11145    m!(&mut v, [x, .., y] => c!((x, y), (&mut N, &mut N), (&mut N(0), &mut N(4))));
11146}
11147
11148#[test]
11149fn test_group_by() {
11150    let slice = &[1, 1, 1, 3, 3, 2, 2, 2, 1, 0];
11151
11152    let mut iter = slice.group_by(|a, b| a == b);
11153    assert_eq!(iter.next(), Some(&[1, 1, 1][..]));
11154    assert_eq!(iter.next(), Some(&[3, 3][..]));
11155    assert_eq!(iter.next(), Some(&[2, 2, 2][..]));
11156    assert_eq!(iter.next(), Some(&[1][..]));
11157    assert_eq!(iter.next(), Some(&[0][..]));
11158    assert_eq!(iter.next(), None);
11159
11160    let mut iter = slice.group_by(|a, b| a == b);
11161    assert_eq!(iter.next_back(), Some(&[0][..]));
11162    assert_eq!(iter.next_back(), Some(&[1][..]));
11163    assert_eq!(iter.next_back(), Some(&[2, 2, 2][..]));
11164    assert_eq!(iter.next_back(), Some(&[3, 3][..]));
11165    assert_eq!(iter.next_back(), Some(&[1, 1, 1][..]));
11166    assert_eq!(iter.next_back(), None);
11167
11168    let mut iter = slice.group_by(|a, b| a == b);
11169    assert_eq!(iter.next(), Some(&[1, 1, 1][..]));
11170    assert_eq!(iter.next_back(), Some(&[0][..]));
11171    assert_eq!(iter.next(), Some(&[3, 3][..]));
11172    assert_eq!(iter.next_back(), Some(&[1][..]));
11173    assert_eq!(iter.next(), Some(&[2, 2, 2][..]));
11174    assert_eq!(iter.next_back(), None);
11175}
11176
11177#[test]
11178fn test_group_by_mut() {
11179    let slice = &mut [1, 1, 1, 3, 3, 2, 2, 2, 1, 0];
11180
11181    let mut iter = slice.group_by_mut(|a, b| a == b);
11182    assert_eq!(iter.next(), Some(&mut [1, 1, 1][..]));
11183    assert_eq!(iter.next(), Some(&mut [3, 3][..]));
11184    assert_eq!(iter.next(), Some(&mut [2, 2, 2][..]));
11185    assert_eq!(iter.next(), Some(&mut [1][..]));
11186    assert_eq!(iter.next(), Some(&mut [0][..]));
11187    assert_eq!(iter.next(), None);
11188
11189    let mut iter = slice.group_by_mut(|a, b| a == b);
11190    assert_eq!(iter.next_back(), Some(&mut [0][..]));
11191    assert_eq!(iter.next_back(), Some(&mut [1][..]));
11192    assert_eq!(iter.next_back(), Some(&mut [2, 2, 2][..]));
11193    assert_eq!(iter.next_back(), Some(&mut [3, 3][..]));
11194    assert_eq!(iter.next_back(), Some(&mut [1, 1, 1][..]));
11195    assert_eq!(iter.next_back(), None);
11196
11197    let mut iter = slice.group_by_mut(|a, b| a == b);
11198    assert_eq!(iter.next(), Some(&mut [1, 1, 1][..]));
11199    assert_eq!(iter.next_back(), Some(&mut [0][..]));
11200    assert_eq!(iter.next(), Some(&mut [3, 3][..]));
11201    assert_eq!(iter.next_back(), Some(&mut [1][..]));
11202    assert_eq!(iter.next(), Some(&mut [2, 2, 2][..]));
11203    assert_eq!(iter.next_back(), None);
11204}
11205use std::collections::TryReserveError::*;
11206use std::collections::{vec_deque::Drain, VecDeque};
11207use std::fmt::Debug;
11208use std::mem::size_of;
11209use std::ops::Bound::*;
11210use std::panic::{catch_unwind, AssertUnwindSafe};
11211
11212use crate::hash;
11213
11214use Taggy::*;
11215use Taggypar::*;
11216
11217#[test]
11218fn test_simple() {
11219    let mut d = VecDeque::new();
11220    assert_eq!(d.len(), 0);
11221    d.push_front(17);
11222    d.push_front(42);
11223    d.push_back(137);
11224    assert_eq!(d.len(), 3);
11225    d.push_back(137);
11226    assert_eq!(d.len(), 4);
11227    assert_eq!(*d.front().unwrap(), 42);
11228    assert_eq!(*d.back().unwrap(), 137);
11229    let mut i = d.pop_front();
11230    assert_eq!(i, Some(42));
11231    i = d.pop_back();
11232    assert_eq!(i, Some(137));
11233    i = d.pop_back();
11234    assert_eq!(i, Some(137));
11235    i = d.pop_back();
11236    assert_eq!(i, Some(17));
11237    assert_eq!(d.len(), 0);
11238    d.push_back(3);
11239    assert_eq!(d.len(), 1);
11240    d.push_front(2);
11241    assert_eq!(d.len(), 2);
11242    d.push_back(4);
11243    assert_eq!(d.len(), 3);
11244    d.push_front(1);
11245    assert_eq!(d.len(), 4);
11246    assert_eq!(d[0], 1);
11247    assert_eq!(d[1], 2);
11248    assert_eq!(d[2], 3);
11249    assert_eq!(d[3], 4);
11250}
11251
11252fn test_parameterized<T: Clone + PartialEq + Debug>(a: T, b: T, c: T, d: T) {
11253    let mut deq = VecDeque::new();
11254    assert_eq!(deq.len(), 0);
11255    deq.push_front(a.clone());
11256    deq.push_front(b.clone());
11257    deq.push_back(c.clone());
11258    assert_eq!(deq.len(), 3);
11259    deq.push_back(d.clone());
11260    assert_eq!(deq.len(), 4);
11261    assert_eq!((*deq.front().unwrap()).clone(), b.clone());
11262    assert_eq!((*deq.back().unwrap()).clone(), d.clone());
11263    assert_eq!(deq.pop_front().unwrap(), b.clone());
11264    assert_eq!(deq.pop_back().unwrap(), d.clone());
11265    assert_eq!(deq.pop_back().unwrap(), c.clone());
11266    assert_eq!(deq.pop_back().unwrap(), a.clone());
11267    assert_eq!(deq.len(), 0);
11268    deq.push_back(c.clone());
11269    assert_eq!(deq.len(), 1);
11270    deq.push_front(b.clone());
11271    assert_eq!(deq.len(), 2);
11272    deq.push_back(d.clone());
11273    assert_eq!(deq.len(), 3);
11274    deq.push_front(a.clone());
11275    assert_eq!(deq.len(), 4);
11276    assert_eq!(deq[0].clone(), a.clone());
11277    assert_eq!(deq[1].clone(), b.clone());
11278    assert_eq!(deq[2].clone(), c.clone());
11279    assert_eq!(deq[3].clone(), d.clone());
11280}
11281
11282#[test]
11283fn test_push_front_grow() {
11284    let mut deq = VecDeque::new();
11285    for i in 0..66 {
11286        deq.push_front(i);
11287    }
11288    assert_eq!(deq.len(), 66);
11289
11290    for i in 0..66 {
11291        assert_eq!(deq[i], 65 - i);
11292    }
11293
11294    let mut deq = VecDeque::new();
11295    for i in 0..66 {
11296        deq.push_back(i);
11297    }
11298
11299    for i in 0..66 {
11300        assert_eq!(deq[i], i);
11301    }
11302}
11303
11304#[test]
11305fn test_index() {
11306    let mut deq = VecDeque::new();
11307    for i in 1..4 {
11308        deq.push_front(i);
11309    }
11310    assert_eq!(deq[1], 2);
11311}
11312
11313#[test]
11314#[should_panic]
11315fn test_index_out_of_bounds() {
11316    let mut deq = VecDeque::new();
11317    for i in 1..4 {
11318        deq.push_front(i);
11319    }
11320    deq[3];
11321}
11322
11323#[test]
11324#[should_panic]
11325fn test_range_start_overflow() {
11326    let deq = VecDeque::from(vec![1, 2, 3]);
11327    deq.range((Included(0), Included(usize::MAX)));
11328}
11329
11330#[test]
11331#[should_panic]
11332fn test_range_end_overflow() {
11333    let deq = VecDeque::from(vec![1, 2, 3]);
11334    deq.range((Excluded(usize::MAX), Included(0)));
11335}
11336
11337#[derive(Clone, PartialEq, Debug)]
11338enum Taggy {
11339    One(i32),
11340    Two(i32, i32),
11341    Three(i32, i32, i32),
11342}
11343
11344#[derive(Clone, PartialEq, Debug)]
11345enum Taggypar<T> {
11346    Onepar(T),
11347    Twopar(T, T),
11348    Threepar(T, T, T),
11349}
11350
11351#[derive(Clone, PartialEq, Debug)]
11352struct RecCy {
11353    x: i32,
11354    y: i32,
11355    t: Taggy,
11356}
11357
11358#[test]
11359fn test_param_int() {
11360    test_parameterized::<i32>(5, 72, 64, 175);
11361}
11362
11363#[test]
11364fn test_param_taggy() {
11365    test_parameterized::<Taggy>(One(1), Two(1, 2), Three(1, 2, 3), Two(17, 42));
11366}
11367
11368#[test]
11369fn test_param_taggypar() {
11370    test_parameterized::<Taggypar<i32>>(
11371        Onepar::<i32>(1),
11372        Twopar::<i32>(1, 2),
11373        Threepar::<i32>(1, 2, 3),
11374        Twopar::<i32>(17, 42),
11375    );
11376}
11377
11378#[test]
11379fn test_param_reccy() {
11380    let reccy1 = RecCy { x: 1, y: 2, t: One(1) };
11381    let reccy2 = RecCy { x: 345, y: 2, t: Two(1, 2) };
11382    let reccy3 = RecCy { x: 1, y: 777, t: Three(1, 2, 3) };
11383    let reccy4 = RecCy { x: 19, y: 252, t: Two(17, 42) };
11384    test_parameterized::<RecCy>(reccy1, reccy2, reccy3, reccy4);
11385}
11386
11387#[test]
11388fn test_with_capacity() {
11389    let mut d = VecDeque::with_capacity(0);
11390    d.push_back(1);
11391    assert_eq!(d.len(), 1);
11392    let mut d = VecDeque::with_capacity(50);
11393    d.push_back(1);
11394    assert_eq!(d.len(), 1);
11395}
11396
11397#[test]
11398fn test_with_capacity_non_power_two() {
11399    let mut d3 = VecDeque::with_capacity(3);
11400    d3.push_back(1);
11401
11402    // X = None, | = lo
11403    // [|1, X, X]
11404    assert_eq!(d3.pop_front(), Some(1));
11405    // [X, |X, X]
11406    assert_eq!(d3.front(), None);
11407
11408    // [X, |3, X]
11409    d3.push_back(3);
11410    // [X, |3, 6]
11411    d3.push_back(6);
11412    // [X, X, |6]
11413    assert_eq!(d3.pop_front(), Some(3));
11414
11415    // Pushing the lo past half way point to trigger
11416    // the 'B' scenario for growth
11417    // [9, X, |6]
11418    d3.push_back(9);
11419    // [9, 12, |6]
11420    d3.push_back(12);
11421
11422    d3.push_back(15);
11423    // There used to be a bug here about how the
11424    // VecDeque made growth assumptions about the
11425    // underlying Vec which didn't hold and lead
11426    // to corruption.
11427    // (Vec grows to next power of two)
11428    // good- [9, 12, 15, X, X, X, X, |6]
11429    // bug-  [15, 12, X, X, X, |6, X, X]
11430    assert_eq!(d3.pop_front(), Some(6));
11431
11432    // Which leads us to the following state which
11433    // would be a failure case.
11434    // bug-  [15, 12, X, X, X, X, |X, X]
11435    assert_eq!(d3.front(), Some(&9));
11436}
11437
11438#[test]
11439fn test_reserve_exact() {
11440    let mut d = VecDeque::new();
11441    d.push_back(0);
11442    d.reserve_exact(50);
11443    assert!(d.capacity() >= 51);
11444}
11445
11446#[test]
11447fn test_reserve() {
11448    let mut d = VecDeque::new();
11449    d.push_back(0);
11450    d.reserve(50);
11451    assert!(d.capacity() >= 51);
11452}
11453
11454#[test]
11455fn test_swap() {
11456    let mut d: VecDeque<_> = (0..5).collect();
11457    d.pop_front();
11458    d.swap(0, 3);
11459    assert_eq!(d.iter().cloned().collect::<Vec<_>>(), [4, 2, 3, 1]);
11460}
11461
11462#[test]
11463fn test_iter() {
11464    let mut d = VecDeque::new();
11465    assert_eq!(d.iter().next(), None);
11466    assert_eq!(d.iter().size_hint(), (0, Some(0)));
11467
11468    for i in 0..5 {
11469        d.push_back(i);
11470    }
11471    {
11472        let b: &[_] = &[&0, &1, &2, &3, &4];
11473        assert_eq!(d.iter().collect::<Vec<_>>(), b);
11474    }
11475
11476    for i in 6..9 {
11477        d.push_front(i);
11478    }
11479    {
11480        let b: &[_] = &[&8, &7, &6, &0, &1, &2, &3, &4];
11481        assert_eq!(d.iter().collect::<Vec<_>>(), b);
11482    }
11483
11484    let mut it = d.iter();
11485    let mut len = d.len();
11486    loop {
11487        match it.next() {
11488            None => break,
11489            _ => {
11490                len -= 1;
11491                assert_eq!(it.size_hint(), (len, Some(len)))
11492            }
11493        }
11494    }
11495}
11496
11497#[test]
11498fn test_rev_iter() {
11499    let mut d = VecDeque::new();
11500    assert_eq!(d.iter().rev().next(), None);
11501
11502    for i in 0..5 {
11503        d.push_back(i);
11504    }
11505    {
11506        let b: &[_] = &[&4, &3, &2, &1, &0];
11507        assert_eq!(d.iter().rev().collect::<Vec<_>>(), b);
11508    }
11509
11510    for i in 6..9 {
11511        d.push_front(i);
11512    }
11513    let b: &[_] = &[&4, &3, &2, &1, &0, &6, &7, &8];
11514    assert_eq!(d.iter().rev().collect::<Vec<_>>(), b);
11515}
11516
11517#[test]
11518fn test_mut_rev_iter_wrap() {
11519    let mut d = VecDeque::with_capacity(3);
11520    assert!(d.iter_mut().rev().next().is_none());
11521
11522    d.push_back(1);
11523    d.push_back(2);
11524    d.push_back(3);
11525    assert_eq!(d.pop_front(), Some(1));
11526    d.push_back(4);
11527
11528    assert_eq!(d.iter_mut().rev().map(|x| *x).collect::<Vec<_>>(), vec![4, 3, 2]);
11529}
11530
11531#[test]
11532fn test_mut_iter() {
11533    let mut d = VecDeque::new();
11534    assert!(d.iter_mut().next().is_none());
11535
11536    for i in 0..3 {
11537        d.push_front(i);
11538    }
11539
11540    for (i, elt) in d.iter_mut().enumerate() {
11541        assert_eq!(*elt, 2 - i);
11542        *elt = i;
11543    }
11544
11545    {
11546        let mut it = d.iter_mut();
11547        assert_eq!(*it.next().unwrap(), 0);
11548        assert_eq!(*it.next().unwrap(), 1);
11549        assert_eq!(*it.next().unwrap(), 2);
11550        assert!(it.next().is_none());
11551    }
11552}
11553
11554#[test]
11555fn test_mut_rev_iter() {
11556    let mut d = VecDeque::new();
11557    assert!(d.iter_mut().rev().next().is_none());
11558
11559    for i in 0..3 {
11560        d.push_front(i);
11561    }
11562
11563    for (i, elt) in d.iter_mut().rev().enumerate() {
11564        assert_eq!(*elt, i);
11565        *elt = i;
11566    }
11567
11568    {
11569        let mut it = d.iter_mut().rev();
11570        assert_eq!(*it.next().unwrap(), 0);
11571        assert_eq!(*it.next().unwrap(), 1);
11572        assert_eq!(*it.next().unwrap(), 2);
11573        assert!(it.next().is_none());
11574    }
11575}
11576
11577#[test]
11578fn test_into_iter() {
11579    // Empty iter
11580    {
11581        let d: VecDeque<i32> = VecDeque::new();
11582        let mut iter = d.into_iter();
11583
11584        assert_eq!(iter.size_hint(), (0, Some(0)));
11585        assert_eq!(iter.next(), None);
11586        assert_eq!(iter.size_hint(), (0, Some(0)));
11587    }
11588
11589    // simple iter
11590    {
11591        let mut d = VecDeque::new();
11592        for i in 0..5 {
11593            d.push_back(i);
11594        }
11595
11596        let b = vec![0, 1, 2, 3, 4];
11597        assert_eq!(d.into_iter().collect::<Vec<_>>(), b);
11598    }
11599
11600    // wrapped iter
11601    {
11602        let mut d = VecDeque::new();
11603        for i in 0..5 {
11604            d.push_back(i);
11605        }
11606        for i in 6..9 {
11607            d.push_front(i);
11608        }
11609
11610        let b = vec![8, 7, 6, 0, 1, 2, 3, 4];
11611        assert_eq!(d.into_iter().collect::<Vec<_>>(), b);
11612    }
11613
11614    // partially used
11615    {
11616        let mut d = VecDeque::new();
11617        for i in 0..5 {
11618            d.push_back(i);
11619        }
11620        for i in 6..9 {
11621            d.push_front(i);
11622        }
11623
11624        let mut it = d.into_iter();
11625        assert_eq!(it.size_hint(), (8, Some(8)));
11626        assert_eq!(it.next(), Some(8));
11627        assert_eq!(it.size_hint(), (7, Some(7)));
11628        assert_eq!(it.next_back(), Some(4));
11629        assert_eq!(it.size_hint(), (6, Some(6)));
11630        assert_eq!(it.next(), Some(7));
11631        assert_eq!(it.size_hint(), (5, Some(5)));
11632    }
11633}
11634
11635#[test]
11636fn test_drain() {
11637    // Empty iter
11638    {
11639        let mut d: VecDeque<i32> = VecDeque::new();
11640
11641        {
11642            let mut iter = d.drain(..);
11643
11644            assert_eq!(iter.size_hint(), (0, Some(0)));
11645            assert_eq!(iter.next(), None);
11646            assert_eq!(iter.size_hint(), (0, Some(0)));
11647        }
11648
11649        assert!(d.is_empty());
11650    }
11651
11652    // simple iter
11653    {
11654        let mut d = VecDeque::new();
11655        for i in 0..5 {
11656            d.push_back(i);
11657        }
11658
11659        assert_eq!(d.drain(..).collect::<Vec<_>>(), [0, 1, 2, 3, 4]);
11660        assert!(d.is_empty());
11661    }
11662
11663    // wrapped iter
11664    {
11665        let mut d = VecDeque::new();
11666        for i in 0..5 {
11667            d.push_back(i);
11668        }
11669        for i in 6..9 {
11670            d.push_front(i);
11671        }
11672
11673        assert_eq!(d.drain(..).collect::<Vec<_>>(), [8, 7, 6, 0, 1, 2, 3, 4]);
11674        assert!(d.is_empty());
11675    }
11676
11677    // partially used
11678    {
11679        let mut d: VecDeque<_> = VecDeque::new();
11680        for i in 0..5 {
11681            d.push_back(i);
11682        }
11683        for i in 6..9 {
11684            d.push_front(i);
11685        }
11686
11687        {
11688            let mut it = d.drain(..);
11689            assert_eq!(it.size_hint(), (8, Some(8)));
11690            assert_eq!(it.next(), Some(8));
11691            assert_eq!(it.size_hint(), (7, Some(7)));
11692            assert_eq!(it.next_back(), Some(4));
11693            assert_eq!(it.size_hint(), (6, Some(6)));
11694            assert_eq!(it.next(), Some(7));
11695            assert_eq!(it.size_hint(), (5, Some(5)));
11696        }
11697        assert!(d.is_empty());
11698    }
11699}
11700
11701#[test]
11702fn test_from_iter() {
11703    let v = vec![1, 2, 3, 4, 5, 6, 7];
11704    let deq: VecDeque<_> = v.iter().cloned().collect();
11705    let u: Vec<_> = deq.iter().cloned().collect();
11706    assert_eq!(u, v);
11707
11708    let seq = (0..).step_by(2).take(256);
11709    let deq: VecDeque<_> = seq.collect();
11710    for (i, &x) in deq.iter().enumerate() {
11711        assert_eq!(2 * i, x);
11712    }
11713    assert_eq!(deq.len(), 256);
11714}
11715
11716#[test]
11717fn test_clone() {
11718    let mut d = VecDeque::new();
11719    d.push_front(17);
11720    d.push_front(42);
11721    d.push_back(137);
11722    d.push_back(137);
11723    assert_eq!(d.len(), 4);
11724    let mut e = d.clone();
11725    assert_eq!(e.len(), 4);
11726    while !d.is_empty() {
11727        assert_eq!(d.pop_back(), e.pop_back());
11728    }
11729    assert_eq!(d.len(), 0);
11730    assert_eq!(e.len(), 0);
11731}
11732
11733#[test]
11734fn test_eq() {
11735    let mut d = VecDeque::new();
11736    assert!(d == VecDeque::with_capacity(0));
11737    d.push_front(137);
11738    d.push_front(17);
11739    d.push_front(42);
11740    d.push_back(137);
11741    let mut e = VecDeque::with_capacity(0);
11742    e.push_back(42);
11743    e.push_back(17);
11744    e.push_back(137);
11745    e.push_back(137);
11746    assert!(&e == &d);
11747    e.pop_back();
11748    e.push_back(0);
11749    assert!(e != d);
11750    e.clear();
11751    assert!(e == VecDeque::new());
11752}
11753
11754#[test]
11755fn test_partial_eq_array() {
11756    let d = VecDeque::<char>::new();
11757    assert!(d == []);
11758
11759    let mut d = VecDeque::new();
11760    d.push_front('a');
11761    assert!(d == ['a']);
11762
11763    let mut d = VecDeque::new();
11764    d.push_back('a');
11765    assert!(d == ['a']);
11766
11767    let mut d = VecDeque::new();
11768    d.push_back('a');
11769    d.push_back('b');
11770    assert!(d == ['a', 'b']);
11771}
11772
11773#[test]
11774fn test_hash() {
11775    let mut x = VecDeque::new();
11776    let mut y = VecDeque::new();
11777
11778    x.push_back(1);
11779    x.push_back(2);
11780    x.push_back(3);
11781
11782    y.push_back(0);
11783    y.push_back(1);
11784    y.pop_front();
11785    y.push_back(2);
11786    y.push_back(3);
11787
11788    assert!(hash(&x) == hash(&y));
11789}
11790
11791#[test]
11792fn test_hash_after_rotation() {
11793    // test that two deques hash equal even if elements are laid out differently
11794    let len = 28;
11795    let mut ring: VecDeque<i32> = (0..len as i32).collect();
11796    let orig = ring.clone();
11797    for _ in 0..ring.capacity() {
11798        // shift values 1 step to the right by pop, sub one, push
11799        ring.pop_front();
11800        for elt in &mut ring {
11801            *elt -= 1;
11802        }
11803        ring.push_back(len - 1);
11804        assert_eq!(hash(&orig), hash(&ring));
11805        assert_eq!(orig, ring);
11806        assert_eq!(ring, orig);
11807    }
11808}
11809
11810#[test]
11811fn test_eq_after_rotation() {
11812    // test that two deques are equal even if elements are laid out differently
11813    let len = 28;
11814    let mut ring: VecDeque<i32> = (0..len as i32).collect();
11815    let mut shifted = ring.clone();
11816    for _ in 0..10 {
11817        // shift values 1 step to the right by pop, sub one, push
11818        ring.pop_front();
11819        for elt in &mut ring {
11820            *elt -= 1;
11821        }
11822        ring.push_back(len - 1);
11823    }
11824
11825    // try every shift
11826    for _ in 0..shifted.capacity() {
11827        shifted.pop_front();
11828        for elt in &mut shifted {
11829            *elt -= 1;
11830        }
11831        shifted.push_back(len - 1);
11832        assert_eq!(shifted, ring);
11833        assert_eq!(ring, shifted);
11834    }
11835}
11836
11837#[test]
11838fn test_ord() {
11839    let x = VecDeque::new();
11840    let mut y = VecDeque::new();
11841    y.push_back(1);
11842    y.push_back(2);
11843    y.push_back(3);
11844    assert!(x < y);
11845    assert!(y > x);
11846    assert!(x <= x);
11847    assert!(x >= x);
11848}
11849
11850#[test]
11851fn test_show() {
11852    let ringbuf: VecDeque<_> = (0..10).collect();
11853    assert_eq!(format!("{:?}", ringbuf), "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
11854
11855    let ringbuf: VecDeque<_> = vec!["just", "one", "test", "more"].iter().cloned().collect();
11856    assert_eq!(format!("{:?}", ringbuf), "[\"just\", \"one\", \"test\", \"more\"]");
11857}
11858
11859#[test]
11860fn test_drop() {
11861    static mut DROPS: i32 = 0;
11862    struct Elem;
11863    impl Drop for Elem {
11864        fn drop(&mut self) {
11865            unsafe {
11866                DROPS += 1;
11867            }
11868        }
11869    }
11870
11871    let mut ring = VecDeque::new();
11872    ring.push_back(Elem);
11873    ring.push_front(Elem);
11874    ring.push_back(Elem);
11875    ring.push_front(Elem);
11876    drop(ring);
11877
11878    assert_eq!(unsafe { DROPS }, 4);
11879}
11880
11881#[test]
11882fn test_drop_with_pop() {
11883    static mut DROPS: i32 = 0;
11884    struct Elem;
11885    impl Drop for Elem {
11886        fn drop(&mut self) {
11887            unsafe {
11888                DROPS += 1;
11889            }
11890        }
11891    }
11892
11893    let mut ring = VecDeque::new();
11894    ring.push_back(Elem);
11895    ring.push_front(Elem);
11896    ring.push_back(Elem);
11897    ring.push_front(Elem);
11898
11899    drop(ring.pop_back());
11900    drop(ring.pop_front());
11901    assert_eq!(unsafe { DROPS }, 2);
11902
11903    drop(ring);
11904    assert_eq!(unsafe { DROPS }, 4);
11905}
11906
11907#[test]
11908fn test_drop_clear() {
11909    static mut DROPS: i32 = 0;
11910    struct Elem;
11911    impl Drop for Elem {
11912        fn drop(&mut self) {
11913            unsafe {
11914                DROPS += 1;
11915            }
11916        }
11917    }
11918
11919    let mut ring = VecDeque::new();
11920    ring.push_back(Elem);
11921    ring.push_front(Elem);
11922    ring.push_back(Elem);
11923    ring.push_front(Elem);
11924    ring.clear();
11925    assert_eq!(unsafe { DROPS }, 4);
11926
11927    drop(ring);
11928    assert_eq!(unsafe { DROPS }, 4);
11929}
11930
11931#[test]
11932fn test_drop_panic() {
11933    static mut DROPS: i32 = 0;
11934
11935    struct D(bool);
11936
11937    impl Drop for D {
11938        fn drop(&mut self) {
11939            unsafe {
11940                DROPS += 1;
11941            }
11942
11943            if self.0 {
11944                panic!("panic in `drop`");
11945            }
11946        }
11947    }
11948
11949    let mut q = VecDeque::new();
11950    q.push_back(D(false));
11951    q.push_back(D(false));
11952    q.push_back(D(false));
11953    q.push_back(D(false));
11954    q.push_back(D(false));
11955    q.push_front(D(false));
11956    q.push_front(D(false));
11957    q.push_front(D(true));
11958
11959    catch_unwind(move || drop(q)).ok();
11960
11961    assert_eq!(unsafe { DROPS }, 8);
11962}
11963
11964#[test]
11965fn test_reserve_grow() {
11966    // test growth path A
11967    // [T o o H] -> [T o o H . . . . ]
11968    let mut ring = VecDeque::with_capacity(4);
11969    for i in 0..3 {
11970        ring.push_back(i);
11971    }
11972    ring.reserve(7);
11973    for i in 0..3 {
11974        assert_eq!(ring.pop_front(), Some(i));
11975    }
11976
11977    // test growth path B
11978    // [H T o o] -> [. T o o H . . . ]
11979    let mut ring = VecDeque::with_capacity(4);
11980    for i in 0..1 {
11981        ring.push_back(i);
11982        assert_eq!(ring.pop_front(), Some(i));
11983    }
11984    for i in 0..3 {
11985        ring.push_back(i);
11986    }
11987    ring.reserve(7);
11988    for i in 0..3 {
11989        assert_eq!(ring.pop_front(), Some(i));
11990    }
11991
11992    // test growth path C
11993    // [o o H T] -> [o o H . . . . T ]
11994    let mut ring = VecDeque::with_capacity(4);
11995    for i in 0..3 {
11996        ring.push_back(i);
11997        assert_eq!(ring.pop_front(), Some(i));
11998    }
11999    for i in 0..3 {
12000        ring.push_back(i);
12001    }
12002    ring.reserve(7);
12003    for i in 0..3 {
12004        assert_eq!(ring.pop_front(), Some(i));
12005    }
12006}
12007
12008#[test]
12009fn test_get() {
12010    let mut ring = VecDeque::new();
12011    ring.push_back(0);
12012    assert_eq!(ring.get(0), Some(&0));
12013    assert_eq!(ring.get(1), None);
12014
12015    ring.push_back(1);
12016    assert_eq!(ring.get(0), Some(&0));
12017    assert_eq!(ring.get(1), Some(&1));
12018    assert_eq!(ring.get(2), None);
12019
12020    ring.push_back(2);
12021    assert_eq!(ring.get(0), Some(&0));
12022    assert_eq!(ring.get(1), Some(&1));
12023    assert_eq!(ring.get(2), Some(&2));
12024    assert_eq!(ring.get(3), None);
12025
12026    assert_eq!(ring.pop_front(), Some(0));
12027    assert_eq!(ring.get(0), Some(&1));
12028    assert_eq!(ring.get(1), Some(&2));
12029    assert_eq!(ring.get(2), None);
12030
12031    assert_eq!(ring.pop_front(), Some(1));
12032    assert_eq!(ring.get(0), Some(&2));
12033    assert_eq!(ring.get(1), None);
12034
12035    assert_eq!(ring.pop_front(), Some(2));
12036    assert_eq!(ring.get(0), None);
12037    assert_eq!(ring.get(1), None);
12038}
12039
12040#[test]
12041fn test_get_mut() {
12042    let mut ring = VecDeque::new();
12043    for i in 0..3 {
12044        ring.push_back(i);
12045    }
12046
12047    match ring.get_mut(1) {
12048        Some(x) => *x = -1,
12049        None => (),
12050    };
12051
12052    assert_eq!(ring.get_mut(0), Some(&mut 0));
12053    assert_eq!(ring.get_mut(1), Some(&mut -1));
12054    assert_eq!(ring.get_mut(2), Some(&mut 2));
12055    assert_eq!(ring.get_mut(3), None);
12056
12057    assert_eq!(ring.pop_front(), Some(0));
12058    assert_eq!(ring.get_mut(0), Some(&mut -1));
12059    assert_eq!(ring.get_mut(1), Some(&mut 2));
12060    assert_eq!(ring.get_mut(2), None);
12061}
12062
12063#[test]
12064fn test_front() {
12065    let mut ring = VecDeque::new();
12066    ring.push_back(10);
12067    ring.push_back(20);
12068    assert_eq!(ring.front(), Some(&10));
12069    ring.pop_front();
12070    assert_eq!(ring.front(), Some(&20));
12071    ring.pop_front();
12072    assert_eq!(ring.front(), None);
12073}
12074
12075#[test]
12076fn test_as_slices() {
12077    let mut ring: VecDeque<i32> = VecDeque::with_capacity(127);
12078    let cap = ring.capacity() as i32;
12079    let first = cap / 2;
12080    let last = cap - first;
12081    for i in 0..first {
12082        ring.push_back(i);
12083
12084        let (left, right) = ring.as_slices();
12085        let expected: Vec<_> = (0..=i).collect();
12086        assert_eq!(left, &expected[..]);
12087        assert_eq!(right, []);
12088    }
12089
12090    for j in -last..0 {
12091        ring.push_front(j);
12092        let (left, right) = ring.as_slices();
12093        let expected_left: Vec<_> = (-last..=j).rev().collect();
12094        let expected_right: Vec<_> = (0..first).collect();
12095        assert_eq!(left, &expected_left[..]);
12096        assert_eq!(right, &expected_right[..]);
12097    }
12098
12099    assert_eq!(ring.len() as i32, cap);
12100    assert_eq!(ring.capacity() as i32, cap);
12101}
12102
12103#[test]
12104fn test_as_mut_slices() {
12105    let mut ring: VecDeque<i32> = VecDeque::with_capacity(127);
12106    let cap = ring.capacity() as i32;
12107    let first = cap / 2;
12108    let last = cap - first;
12109    for i in 0..first {
12110        ring.push_back(i);
12111
12112        let (left, right) = ring.as_mut_slices();
12113        let expected: Vec<_> = (0..=i).collect();
12114        assert_eq!(left, &expected[..]);
12115        assert_eq!(right, []);
12116    }
12117
12118    for j in -last..0 {
12119        ring.push_front(j);
12120        let (left, right) = ring.as_mut_slices();
12121        let expected_left: Vec<_> = (-last..=j).rev().collect();
12122        let expected_right: Vec<_> = (0..first).collect();
12123        assert_eq!(left, &expected_left[..]);
12124        assert_eq!(right, &expected_right[..]);
12125    }
12126
12127    assert_eq!(ring.len() as i32, cap);
12128    assert_eq!(ring.capacity() as i32, cap);
12129}
12130
12131#[test]
12132fn test_append() {
12133    let mut a: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
12134    let mut b: VecDeque<_> = vec![4, 5, 6].into_iter().collect();
12135
12136    // normal append
12137    a.append(&mut b);
12138    assert_eq!(a.iter().cloned().collect::<Vec<_>>(), [1, 2, 3, 4, 5, 6]);
12139    assert_eq!(b.iter().cloned().collect::<Vec<_>>(), []);
12140
12141    // append nothing to something
12142    a.append(&mut b);
12143    assert_eq!(a.iter().cloned().collect::<Vec<_>>(), [1, 2, 3, 4, 5, 6]);
12144    assert_eq!(b.iter().cloned().collect::<Vec<_>>(), []);
12145
12146    // append something to nothing
12147    b.append(&mut a);
12148    assert_eq!(b.iter().cloned().collect::<Vec<_>>(), [1, 2, 3, 4, 5, 6]);
12149    assert_eq!(a.iter().cloned().collect::<Vec<_>>(), []);
12150}
12151
12152#[test]
12153fn test_append_permutations() {
12154    fn construct_vec_deque(
12155        push_back: usize,
12156        pop_back: usize,
12157        push_front: usize,
12158        pop_front: usize,
12159    ) -> VecDeque<usize> {
12160        let mut out = VecDeque::new();
12161        for a in 0..push_back {
12162            out.push_back(a);
12163        }
12164        for b in 0..push_front {
12165            out.push_front(push_back + b);
12166        }
12167        for _ in 0..pop_back {
12168            out.pop_back();
12169        }
12170        for _ in 0..pop_front {
12171            out.pop_front();
12172        }
12173        out
12174    }
12175
12176    // Miri is too slow
12177    let max = if cfg!(miri) { 3 } else { 5 };
12178
12179    // Many different permutations of both the `VecDeque` getting appended to
12180    // and the one getting appended are generated to check `append`.
12181    // This ensures all 6 code paths of `append` are tested.
12182    for src_push_back in 0..max {
12183        for src_push_front in 0..max {
12184            // doesn't pop more values than are pushed
12185            for src_pop_back in 0..(src_push_back + src_push_front) {
12186                for src_pop_front in 0..(src_push_back + src_push_front - src_pop_back) {
12187                    let src = construct_vec_deque(
12188                        src_push_back,
12189                        src_pop_back,
12190                        src_push_front,
12191                        src_pop_front,
12192                    );
12193
12194                    for dst_push_back in 0..max {
12195                        for dst_push_front in 0..max {
12196                            for dst_pop_back in 0..(dst_push_back + dst_push_front) {
12197                                for dst_pop_front in
12198                                    0..(dst_push_back + dst_push_front - dst_pop_back)
12199                                {
12200                                    let mut dst = construct_vec_deque(
12201                                        dst_push_back,
12202                                        dst_pop_back,
12203                                        dst_push_front,
12204                                        dst_pop_front,
12205                                    );
12206                                    let mut src = src.clone();
12207
12208                                    // Assert that appending `src` to `dst` gives the same order
12209                                    // of values as iterating over both in sequence.
12210                                    let correct = dst
12211                                        .iter()
12212                                        .chain(src.iter())
12213                                        .cloned()
12214                                        .collect::<Vec<usize>>();
12215                                    dst.append(&mut src);
12216                                    assert_eq!(dst, correct);
12217                                    assert!(src.is_empty());
12218                                }
12219                            }
12220                        }
12221                    }
12222                }
12223            }
12224        }
12225    }
12226}
12227
12228struct DropCounter<'a> {
12229    count: &'a mut u32,
12230}
12231
12232impl Drop for DropCounter<'_> {
12233    fn drop(&mut self) {
12234        *self.count += 1;
12235    }
12236}
12237
12238#[test]
12239fn test_append_double_drop() {
12240    let (mut count_a, mut count_b) = (0, 0);
12241    {
12242        let mut a = VecDeque::new();
12243        let mut b = VecDeque::new();
12244        a.push_back(DropCounter { count: &mut count_a });
12245        b.push_back(DropCounter { count: &mut count_b });
12246
12247        a.append(&mut b);
12248    }
12249    assert_eq!(count_a, 1);
12250    assert_eq!(count_b, 1);
12251}
12252
12253#[test]
12254fn test_retain() {
12255    let mut buf = VecDeque::new();
12256    buf.extend(1..5);
12257    buf.retain(|&x| x % 2 == 0);
12258    let v: Vec<_> = buf.into_iter().collect();
12259    assert_eq!(&v[..], &[2, 4]);
12260}
12261
12262#[test]
12263fn test_extend_ref() {
12264    let mut v = VecDeque::new();
12265    v.push_back(1);
12266    v.extend(&[2, 3, 4]);
12267
12268    assert_eq!(v.len(), 4);
12269    assert_eq!(v[0], 1);
12270    assert_eq!(v[1], 2);
12271    assert_eq!(v[2], 3);
12272    assert_eq!(v[3], 4);
12273
12274    let mut w = VecDeque::new();
12275    w.push_back(5);
12276    w.push_back(6);
12277    v.extend(&w);
12278
12279    assert_eq!(v.len(), 6);
12280    assert_eq!(v[0], 1);
12281    assert_eq!(v[1], 2);
12282    assert_eq!(v[2], 3);
12283    assert_eq!(v[3], 4);
12284    assert_eq!(v[4], 5);
12285    assert_eq!(v[5], 6);
12286}
12287
12288#[test]
12289fn test_contains() {
12290    let mut v = VecDeque::new();
12291    v.extend(&[2, 3, 4]);
12292
12293    assert!(v.contains(&3));
12294    assert!(!v.contains(&1));
12295
12296    v.clear();
12297
12298    assert!(!v.contains(&3));
12299}
12300
12301#[allow(dead_code)]
12302fn assert_covariance() {
12303    fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> {
12304        d
12305    }
12306}
12307
12308#[test]
12309fn test_is_empty() {
12310    let mut v = VecDeque::<i32>::new();
12311    assert!(v.is_empty());
12312    assert!(v.iter().is_empty());
12313    assert!(v.iter_mut().is_empty());
12314    v.extend(&[2, 3, 4]);
12315    assert!(!v.is_empty());
12316    assert!(!v.iter().is_empty());
12317    assert!(!v.iter_mut().is_empty());
12318    while let Some(_) = v.pop_front() {
12319        assert_eq!(v.is_empty(), v.len() == 0);
12320        assert_eq!(v.iter().is_empty(), v.iter().len() == 0);
12321        assert_eq!(v.iter_mut().is_empty(), v.iter_mut().len() == 0);
12322    }
12323    assert!(v.is_empty());
12324    assert!(v.iter().is_empty());
12325    assert!(v.iter_mut().is_empty());
12326    assert!(v.into_iter().is_empty());
12327}
12328
12329#[test]
12330fn test_reserve_exact_2() {
12331    // This is all the same as test_reserve
12332
12333    let mut v = VecDeque::new();
12334
12335    v.reserve_exact(2);
12336    assert!(v.capacity() >= 2);
12337
12338    for i in 0..16 {
12339        v.push_back(i);
12340    }
12341
12342    assert!(v.capacity() >= 16);
12343    v.reserve_exact(16);
12344    assert!(v.capacity() >= 32);
12345
12346    v.push_back(16);
12347
12348    v.reserve_exact(16);
12349    assert!(v.capacity() >= 48)
12350}
12351
12352#[test]
12353#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
12354#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
12355fn test_try_reserve() {
12356    // These are the interesting cases:
12357    // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM)
12358    // * > isize::MAX should always fail
12359    //    * On 16/32-bit should CapacityOverflow
12360    //    * On 64-bit should OOM
12361    // * overflow may trigger when adding `len` to `cap` (in number of elements)
12362    // * overflow may trigger when multiplying `new_cap` by size_of::<T> (to get bytes)
12363
12364    const MAX_CAP: usize = (isize::MAX as usize + 1) / 2 - 1;
12365    const MAX_USIZE: usize = usize::MAX;
12366
12367    // On 16/32-bit, we check that allocations don't exceed isize::MAX,
12368    // on 64-bit, we assume the OS will give an OOM for such a ridiculous size.
12369    // Any platform that succeeds for these requests is technically broken with
12370    // ptr::offset because LLVM is the worst.
12371    let guards_against_isize = size_of::<usize>() < 8;
12372
12373    {
12374        // Note: basic stuff is checked by test_reserve
12375        let mut empty_bytes: VecDeque<u8> = VecDeque::new();
12376
12377        // Check isize::MAX doesn't count as an overflow
12378        if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) {
12379            panic!("isize::MAX shouldn't trigger an overflow!");
12380        }
12381        // Play it again, frank! (just to be sure)
12382        if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) {
12383            panic!("isize::MAX shouldn't trigger an overflow!");
12384        }
12385
12386        if guards_against_isize {
12387            // Check isize::MAX + 1 does count as overflow
12388            if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP + 1) {
12389            } else {
12390                panic!("isize::MAX + 1 should trigger an overflow!")
12391            }
12392
12393            // Check usize::MAX does count as overflow
12394            if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
12395            } else {
12396                panic!("usize::MAX should trigger an overflow!")
12397            }
12398        } else {
12399            // Check isize::MAX is an OOM
12400            // VecDeque starts with capacity 7, always adds 1 to the capacity
12401            // and also rounds the number to next power of 2 so this is the
12402            // furthest we can go without triggering CapacityOverflow
12403            if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_CAP) {
12404            } else {
12405                panic!("isize::MAX + 1 should trigger an OOM!")
12406            }
12407        }
12408    }
12409
12410    {
12411        // Same basic idea, but with non-zero len
12412        let mut ten_bytes: VecDeque<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
12413
12414        if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
12415            panic!("isize::MAX shouldn't trigger an overflow!");
12416        }
12417        if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
12418            panic!("isize::MAX shouldn't trigger an overflow!");
12419        }
12420        if guards_against_isize {
12421            if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) {
12422            } else {
12423                panic!("isize::MAX + 1 should trigger an overflow!");
12424            }
12425        } else {
12426            if let Err(AllocError { .. }) = ten_bytes.try_reserve(MAX_CAP - 9) {
12427            } else {
12428                panic!("isize::MAX + 1 should trigger an OOM!")
12429            }
12430        }
12431        // Should always overflow in the add-to-len
12432        if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) {
12433        } else {
12434            panic!("usize::MAX should trigger an overflow!")
12435        }
12436    }
12437
12438    {
12439        // Same basic idea, but with interesting type size
12440        let mut ten_u32s: VecDeque<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
12441
12442        if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) {
12443            panic!("isize::MAX shouldn't trigger an overflow!");
12444        }
12445        if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) {
12446            panic!("isize::MAX shouldn't trigger an overflow!");
12447        }
12448        if guards_against_isize {
12449            if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) {
12450            } else {
12451                panic!("isize::MAX + 1 should trigger an overflow!");
12452            }
12453        } else {
12454            if let Err(AllocError { .. }) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) {
12455            } else {
12456                panic!("isize::MAX + 1 should trigger an OOM!")
12457            }
12458        }
12459        // Should fail in the mul-by-size
12460        if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_USIZE - 20) {
12461        } else {
12462            panic!("usize::MAX should trigger an overflow!");
12463        }
12464    }
12465}
12466
12467#[test]
12468#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
12469#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
12470fn test_try_reserve_exact() {
12471    // This is exactly the same as test_try_reserve with the method changed.
12472    // See that test for comments.
12473
12474    const MAX_CAP: usize = (isize::MAX as usize + 1) / 2 - 1;
12475    const MAX_USIZE: usize = usize::MAX;
12476
12477    let guards_against_isize = size_of::<usize>() < 8;
12478
12479    {
12480        let mut empty_bytes: VecDeque<u8> = VecDeque::new();
12481
12482        if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) {
12483            panic!("isize::MAX shouldn't trigger an overflow!");
12484        }
12485        if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) {
12486            panic!("isize::MAX shouldn't trigger an overflow!");
12487        }
12488
12489        if guards_against_isize {
12490            if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP + 1) {
12491            } else {
12492                panic!("isize::MAX + 1 should trigger an overflow!")
12493            }
12494
12495            if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_USIZE) {
12496            } else {
12497                panic!("usize::MAX should trigger an overflow!")
12498            }
12499        } else {
12500            // Check isize::MAX is an OOM
12501            // VecDeque starts with capacity 7, always adds 1 to the capacity
12502            // and also rounds the number to next power of 2 so this is the
12503            // furthest we can go without triggering CapacityOverflow
12504            if let Err(AllocError { .. }) = empty_bytes.try_reserve_exact(MAX_CAP) {
12505            } else {
12506                panic!("isize::MAX + 1 should trigger an OOM!")
12507            }
12508        }
12509    }
12510
12511    {
12512        let mut ten_bytes: VecDeque<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
12513
12514        if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
12515            panic!("isize::MAX shouldn't trigger an overflow!");
12516        }
12517        if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
12518            panic!("isize::MAX shouldn't trigger an overflow!");
12519        }
12520        if guards_against_isize {
12521            if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
12522            } else {
12523                panic!("isize::MAX + 1 should trigger an overflow!");
12524            }
12525        } else {
12526            if let Err(AllocError { .. }) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
12527            } else {
12528                panic!("isize::MAX + 1 should trigger an OOM!")
12529            }
12530        }
12531        if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) {
12532        } else {
12533            panic!("usize::MAX should trigger an overflow!")
12534        }
12535    }
12536
12537    {
12538        let mut ten_u32s: VecDeque<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
12539
12540        if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) {
12541            panic!("isize::MAX shouldn't trigger an overflow!");
12542        }
12543        if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) {
12544            panic!("isize::MAX shouldn't trigger an overflow!");
12545        }
12546        if guards_against_isize {
12547            if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) {
12548            } else {
12549                panic!("isize::MAX + 1 should trigger an overflow!");
12550            }
12551        } else {
12552            if let Err(AllocError { .. }) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) {
12553            } else {
12554                panic!("isize::MAX + 1 should trigger an OOM!")
12555            }
12556        }
12557        if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_USIZE - 20) {
12558        } else {
12559            panic!("usize::MAX should trigger an overflow!")
12560        }
12561    }
12562}
12563
12564#[test]
12565fn test_rotate_nop() {
12566    let mut v: VecDeque<_> = (0..10).collect();
12567    assert_unchanged(&v);
12568
12569    v.rotate_left(0);
12570    assert_unchanged(&v);
12571
12572    v.rotate_left(10);
12573    assert_unchanged(&v);
12574
12575    v.rotate_right(0);
12576    assert_unchanged(&v);
12577
12578    v.rotate_right(10);
12579    assert_unchanged(&v);
12580
12581    v.rotate_left(3);
12582    v.rotate_right(3);
12583    assert_unchanged(&v);
12584
12585    v.rotate_right(3);
12586    v.rotate_left(3);
12587    assert_unchanged(&v);
12588
12589    v.rotate_left(6);
12590    v.rotate_right(6);
12591    assert_unchanged(&v);
12592
12593    v.rotate_right(6);
12594    v.rotate_left(6);
12595    assert_unchanged(&v);
12596
12597    v.rotate_left(3);
12598    v.rotate_left(7);
12599    assert_unchanged(&v);
12600
12601    v.rotate_right(4);
12602    v.rotate_right(6);
12603    assert_unchanged(&v);
12604
12605    v.rotate_left(1);
12606    v.rotate_left(2);
12607    v.rotate_left(3);
12608    v.rotate_left(4);
12609    assert_unchanged(&v);
12610
12611    v.rotate_right(1);
12612    v.rotate_right(2);
12613    v.rotate_right(3);
12614    v.rotate_right(4);
12615    assert_unchanged(&v);
12616
12617    fn assert_unchanged(v: &VecDeque<i32>) {
12618        assert_eq!(v, &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
12619    }
12620}
12621
12622#[test]
12623fn test_rotate_left_parts() {
12624    let mut v: VecDeque<_> = (1..=7).collect();
12625    v.rotate_left(2);
12626    assert_eq!(v.as_slices(), (&[3, 4, 5, 6, 7, 1][..], &[2][..]));
12627    v.rotate_left(2);
12628    assert_eq!(v.as_slices(), (&[5, 6, 7, 1][..], &[2, 3, 4][..]));
12629    v.rotate_left(2);
12630    assert_eq!(v.as_slices(), (&[7, 1][..], &[2, 3, 4, 5, 6][..]));
12631    v.rotate_left(2);
12632    assert_eq!(v.as_slices(), (&[2, 3, 4, 5, 6, 7, 1][..], &[][..]));
12633    v.rotate_left(2);
12634    assert_eq!(v.as_slices(), (&[4, 5, 6, 7, 1, 2][..], &[3][..]));
12635    v.rotate_left(2);
12636    assert_eq!(v.as_slices(), (&[6, 7, 1, 2][..], &[3, 4, 5][..]));
12637    v.rotate_left(2);
12638    assert_eq!(v.as_slices(), (&[1, 2][..], &[3, 4, 5, 6, 7][..]));
12639}
12640
12641#[test]
12642fn test_rotate_right_parts() {
12643    let mut v: VecDeque<_> = (1..=7).collect();
12644    v.rotate_right(2);
12645    assert_eq!(v.as_slices(), (&[6, 7][..], &[1, 2, 3, 4, 5][..]));
12646    v.rotate_right(2);
12647    assert_eq!(v.as_slices(), (&[4, 5, 6, 7][..], &[1, 2, 3][..]));
12648    v.rotate_right(2);
12649    assert_eq!(v.as_slices(), (&[2, 3, 4, 5, 6, 7][..], &[1][..]));
12650    v.rotate_right(2);
12651    assert_eq!(v.as_slices(), (&[7, 1, 2, 3, 4, 5, 6][..], &[][..]));
12652    v.rotate_right(2);
12653    assert_eq!(v.as_slices(), (&[5, 6][..], &[7, 1, 2, 3, 4][..]));
12654    v.rotate_right(2);
12655    assert_eq!(v.as_slices(), (&[3, 4, 5, 6][..], &[7, 1, 2][..]));
12656    v.rotate_right(2);
12657    assert_eq!(v.as_slices(), (&[1, 2, 3, 4, 5, 6][..], &[7][..]));
12658}
12659
12660#[test]
12661fn test_rotate_left_random() {
12662    let shifts = [
12663        6, 1, 0, 11, 12, 1, 11, 7, 9, 3, 6, 1, 4, 0, 5, 1, 3, 1, 12, 8, 3, 1, 11, 11, 9, 4, 12, 3,
12664        12, 9, 11, 1, 7, 9, 7, 2,
12665    ];
12666    let n = 12;
12667    let mut v: VecDeque<_> = (0..n).collect();
12668    let mut total_shift = 0;
12669    for shift in shifts.iter().cloned() {
12670        v.rotate_left(shift);
12671        total_shift += shift;
12672        for i in 0..n {
12673            assert_eq!(v[i], (i + total_shift) % n);
12674        }
12675    }
12676}
12677
12678#[test]
12679fn test_rotate_right_random() {
12680    let shifts = [
12681        6, 1, 0, 11, 12, 1, 11, 7, 9, 3, 6, 1, 4, 0, 5, 1, 3, 1, 12, 8, 3, 1, 11, 11, 9, 4, 12, 3,
12682        12, 9, 11, 1, 7, 9, 7, 2,
12683    ];
12684    let n = 12;
12685    let mut v: VecDeque<_> = (0..n).collect();
12686    let mut total_shift = 0;
12687    for shift in shifts.iter().cloned() {
12688        v.rotate_right(shift);
12689        total_shift += shift;
12690        for i in 0..n {
12691            assert_eq!(v[(i + total_shift) % n], i);
12692        }
12693    }
12694}
12695
12696#[test]
12697fn test_try_fold_empty() {
12698    assert_eq!(Some(0), VecDeque::<u32>::new().iter().try_fold(0, |_, _| None));
12699}
12700
12701#[test]
12702fn test_try_fold_none() {
12703    let v: VecDeque<u32> = (0..12).collect();
12704    assert_eq!(None, v.into_iter().try_fold(0, |a, b| if b < 11 { Some(a + b) } else { None }));
12705}
12706
12707#[test]
12708fn test_try_fold_ok() {
12709    let v: VecDeque<u32> = (0..12).collect();
12710    assert_eq!(Ok::<_, ()>(66), v.into_iter().try_fold(0, |a, b| Ok(a + b)));
12711}
12712
12713#[test]
12714fn test_try_fold_unit() {
12715    let v: VecDeque<()> = std::iter::repeat(()).take(42).collect();
12716    assert_eq!(Some(()), v.into_iter().try_fold((), |(), ()| Some(())));
12717}
12718
12719#[test]
12720fn test_try_fold_unit_none() {
12721    let v: std::collections::VecDeque<()> = [(); 10].iter().cloned().collect();
12722    let mut iter = v.into_iter();
12723    assert!(iter.try_fold((), |_, _| None).is_none());
12724    assert_eq!(iter.len(), 9);
12725}
12726
12727#[test]
12728fn test_try_fold_rotated() {
12729    let mut v: VecDeque<_> = (0..12).collect();
12730    for n in 0..10 {
12731        if n & 1 == 0 {
12732            v.rotate_left(n);
12733        } else {
12734            v.rotate_right(n);
12735        }
12736        assert_eq!(Ok::<_, ()>(66), v.iter().try_fold(0, |a, b| Ok(a + b)));
12737    }
12738}
12739
12740#[test]
12741fn test_try_fold_moves_iter() {
12742    let v: VecDeque<_> = [10, 20, 30, 40, 100, 60, 70, 80, 90].iter().collect();
12743    let mut iter = v.into_iter();
12744    assert_eq!(iter.try_fold(0_i8, |acc, &x| acc.checked_add(x)), None);
12745    assert_eq!(iter.next(), Some(&60));
12746}
12747
12748#[test]
12749fn test_try_fold_exhaust_wrap() {
12750    let mut v = VecDeque::with_capacity(7);
12751    v.push_back(1);
12752    v.push_back(1);
12753    v.push_back(1);
12754    v.pop_front();
12755    v.pop_front();
12756    let mut iter = v.iter();
12757    let _ = iter.try_fold(0, |_, _| Some(1));
12758    assert!(iter.is_empty());
12759}
12760
12761#[test]
12762fn test_try_fold_wraparound() {
12763    let mut v = VecDeque::with_capacity(8);
12764    v.push_back(7);
12765    v.push_back(8);
12766    v.push_back(9);
12767    v.push_front(2);
12768    v.push_front(1);
12769    let mut iter = v.iter();
12770    let _ = iter.find(|&&x| x == 2);
12771    assert_eq!(Some(&7), iter.next());
12772}
12773
12774#[test]
12775fn test_try_rfold_rotated() {
12776    let mut v: VecDeque<_> = (0..12).collect();
12777    for n in 0..10 {
12778        if n & 1 == 0 {
12779            v.rotate_left(n);
12780        } else {
12781            v.rotate_right(n);
12782        }
12783        assert_eq!(Ok::<_, ()>(66), v.iter().try_rfold(0, |a, b| Ok(a + b)));
12784    }
12785}
12786
12787#[test]
12788fn test_try_rfold_moves_iter() {
12789    let v: VecDeque<_> = [10, 20, 30, 40, 100, 60, 70, 80, 90].iter().collect();
12790    let mut iter = v.into_iter();
12791    assert_eq!(iter.try_rfold(0_i8, |acc, &x| acc.checked_add(x)), None);
12792    assert_eq!(iter.next_back(), Some(&70));
12793}
12794
12795#[test]
12796fn truncate_leak() {
12797    static mut DROPS: i32 = 0;
12798
12799    struct D(bool);
12800
12801    impl Drop for D {
12802        fn drop(&mut self) {
12803            unsafe {
12804                DROPS += 1;
12805            }
12806
12807            if self.0 {
12808                panic!("panic in `drop`");
12809            }
12810        }
12811    }
12812
12813    let mut q = VecDeque::new();
12814    q.push_back(D(false));
12815    q.push_back(D(false));
12816    q.push_back(D(false));
12817    q.push_back(D(false));
12818    q.push_back(D(false));
12819    q.push_front(D(true));
12820    q.push_front(D(false));
12821    q.push_front(D(false));
12822
12823    catch_unwind(AssertUnwindSafe(|| q.truncate(1))).ok();
12824
12825    assert_eq!(unsafe { DROPS }, 7);
12826}
12827
12828#[test]
12829fn test_drain_leak() {
12830    static mut DROPS: i32 = 0;
12831
12832    #[derive(Debug, PartialEq)]
12833    struct D(u32, bool);
12834
12835    impl Drop for D {
12836        fn drop(&mut self) {
12837            unsafe {
12838                DROPS += 1;
12839            }
12840
12841            if self.1 {
12842                panic!("panic in `drop`");
12843            }
12844        }
12845    }
12846
12847    let mut v = VecDeque::new();
12848    v.push_back(D(4, false));
12849    v.push_back(D(5, false));
12850    v.push_back(D(6, false));
12851    v.push_front(D(3, false));
12852    v.push_front(D(2, true));
12853    v.push_front(D(1, false));
12854    v.push_front(D(0, false));
12855
12856    catch_unwind(AssertUnwindSafe(|| {
12857        v.drain(1..=4);
12858    }))
12859    .ok();
12860
12861    assert_eq!(unsafe { DROPS }, 4);
12862    assert_eq!(v.len(), 3);
12863    drop(v);
12864    assert_eq!(unsafe { DROPS }, 7);
12865}
12866
12867#[test]
12868fn test_binary_search() {
12869    // Contiguous (front only) search:
12870    let deque: VecDeque<_> = vec![1, 2, 3, 5, 6].into();
12871    assert!(deque.as_slices().1.is_empty());
12872    assert_eq!(deque.binary_search(&3), Ok(2));
12873    assert_eq!(deque.binary_search(&4), Err(3));
12874
12875    // Split search (both front & back non-empty):
12876    let mut deque: VecDeque<_> = vec![5, 6].into();
12877    deque.push_front(3);
12878    deque.push_front(2);
12879    deque.push_front(1);
12880    deque.push_back(10);
12881    assert!(!deque.as_slices().0.is_empty());
12882    assert!(!deque.as_slices().1.is_empty());
12883    assert_eq!(deque.binary_search(&0), Err(0));
12884    assert_eq!(deque.binary_search(&1), Ok(0));
12885    assert_eq!(deque.binary_search(&5), Ok(3));
12886    assert_eq!(deque.binary_search(&7), Err(5));
12887    assert_eq!(deque.binary_search(&20), Err(6));
12888}
12889
12890#[test]
12891fn test_binary_search_by() {
12892    let deque: VecDeque<_> = vec![(1,), (2,), (3,), (5,), (6,)].into();
12893
12894    assert_eq!(deque.binary_search_by(|&(v,)| v.cmp(&3)), Ok(2));
12895    assert_eq!(deque.binary_search_by(|&(v,)| v.cmp(&4)), Err(3));
12896}
12897
12898#[test]
12899fn test_binary_search_by_key() {
12900    let deque: VecDeque<_> = vec![(1,), (2,), (3,), (5,), (6,)].into();
12901
12902    assert_eq!(deque.binary_search_by_key(&3, |&(v,)| v), Ok(2));
12903    assert_eq!(deque.binary_search_by_key(&4, |&(v,)| v), Err(3));
12904}
12905
12906#[test]
12907fn test_partition_point() {
12908    // Contiguous (front only) search:
12909    let deque: VecDeque<_> = vec![1, 2, 3, 5, 6].into();
12910    assert!(deque.as_slices().1.is_empty());
12911    assert_eq!(deque.partition_point(|&v| v <= 3), 3);
12912
12913    // Split search (both front & back non-empty):
12914    let mut deque: VecDeque<_> = vec![5, 6].into();
12915    deque.push_front(3);
12916    deque.push_front(2);
12917    deque.push_front(1);
12918    deque.push_back(10);
12919    assert!(!deque.as_slices().0.is_empty());
12920    assert!(!deque.as_slices().1.is_empty());
12921    assert_eq!(deque.partition_point(|&v| v <= 5), 4);
12922}
12923
12924#[test]
12925fn test_zero_sized_push() {
12926    const N: usize = 8;
12927
12928    // Zero sized type
12929    struct Zst;
12930
12931    // Test that for all possible sequences of push_front / push_back,
12932    // we end up with a deque of the correct size
12933
12934    for len in 0..N {
12935        let mut tester = VecDeque::with_capacity(len);
12936        assert_eq!(tester.len(), 0);
12937        assert!(tester.capacity() >= len);
12938        for case in 0..(1 << len) {
12939            assert_eq!(tester.len(), 0);
12940            for bit in 0..len {
12941                if case & (1 << bit) != 0 {
12942                    tester.push_front(Zst);
12943                } else {
12944                    tester.push_back(Zst);
12945                }
12946            }
12947            assert_eq!(tester.len(), len);
12948            assert_eq!(tester.iter().count(), len);
12949            tester.clear();
12950        }
12951    }
12952}
12953
12954#[test]
12955fn test_from_zero_sized_vec() {
12956    let v = vec![(); 100];
12957    let queue = VecDeque::from(v);
12958    assert_eq!(queue.len(), 100);
12959}
12960#![deny(warnings)]
12961
12962use std::cell::RefCell;
12963use std::fmt::{self, Write};
12964
12965#[test]
12966fn test_format() {
12967    let s = fmt::format(format_args!("Hello, {}!", "world"));
12968    assert_eq!(s, "Hello, world!");
12969}
12970
12971struct A;
12972struct B;
12973struct C;
12974struct D;
12975
12976impl fmt::LowerHex for A {
12977    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
12978        f.write_str("aloha")
12979    }
12980}
12981impl fmt::UpperHex for B {
12982    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
12983        f.write_str("adios")
12984    }
12985}
12986impl fmt::Display for C {
12987    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
12988        f.pad_integral(true, "☃", "123")
12989    }
12990}
12991impl fmt::Binary for D {
12992    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
12993        f.write_str("aa")?;
12994        f.write_char('☃')?;
12995        f.write_str("bb")
12996    }
12997}
12998
12999macro_rules! t {
13000    ($a:expr, $b:expr) => {
13001        assert_eq!($a, $b)
13002    };
13003}
13004
13005#[test]
13006fn test_format_macro_interface() {
13007    // Various edge cases without formats
13008    t!(format!(""), "");
13009    t!(format!("hello"), "hello");
13010    t!(format!("hello {{"), "hello {");
13011
13012    // default formatters should work
13013    t!(format!("{}", 1.0f32), "1");
13014    t!(format!("{}", 1.0f64), "1");
13015    t!(format!("{}", "a"), "a");
13016    t!(format!("{}", "a".to_string()), "a");
13017    t!(format!("{}", false), "false");
13018    t!(format!("{}", 'a'), "a");
13019
13020    // At least exercise all the formats
13021    t!(format!("{}", true), "true");
13022    t!(format!("{}", '☃'), "☃");
13023    t!(format!("{}", 10), "10");
13024    t!(format!("{}", 10_usize), "10");
13025    t!(format!("{:?}", '☃'), "'☃'");
13026    t!(format!("{:?}", 10), "10");
13027    t!(format!("{:?}", 10_usize), "10");
13028    t!(format!("{:?}", "true"), "\"true\"");
13029    t!(format!("{:?}", "foo\nbar"), "\"foo\\nbar\"");
13030    t!(format!("{:?}", "foo\n\"bar\"\r\n\'baz\'\t\\qux\\"), r#""foo\n\"bar\"\r\n'baz'\t\\qux\\""#);
13031    t!(format!("{:?}", "foo\0bar\x01baz\u{7f}q\u{75}x"), r#""foo\u{0}bar\u{1}baz\u{7f}qux""#);
13032    t!(format!("{:o}", 10_usize), "12");
13033    t!(format!("{:x}", 10_usize), "a");
13034    t!(format!("{:X}", 10_usize), "A");
13035    t!(format!("{}", "foo"), "foo");
13036    t!(format!("{}", "foo".to_string()), "foo");
13037    if cfg!(target_pointer_width = "32") {
13038        t!(format!("{:#p}", 0x1234 as *const isize), "0x00001234");
13039        t!(format!("{:#p}", 0x1234 as *mut isize), "0x00001234");
13040    } else {
13041        t!(format!("{:#p}", 0x1234 as *const isize), "0x0000000000001234");
13042        t!(format!("{:#p}", 0x1234 as *mut isize), "0x0000000000001234");
13043    }
13044    t!(format!("{:p}", 0x1234 as *const isize), "0x1234");
13045    t!(format!("{:p}", 0x1234 as *mut isize), "0x1234");
13046    t!(format!("{:x}", A), "aloha");
13047    t!(format!("{:X}", B), "adios");
13048    t!(format!("foo {} ☃☃☃☃☃☃", "bar"), "foo bar ☃☃☃☃☃☃");
13049    t!(format!("{1} {0}", 0, 1), "1 0");
13050    t!(format!("{foo} {bar}", foo = 0, bar = 1), "0 1");
13051    t!(format!("{foo} {1} {bar} {0}", 0, 1, foo = 2, bar = 3), "2 1 3 0");
13052    t!(format!("{} {0}", "a"), "a a");
13053    t!(format!("{_foo}", _foo = 6usize), "6");
13054    t!(format!("{foo_bar}", foo_bar = 1), "1");
13055    t!(format!("{}", 5 + 5), "10");
13056    t!(format!("{:#4}", C), "☃123");
13057    t!(format!("{:b}", D), "aa☃bb");
13058
13059    let a: &dyn fmt::Debug = &1;
13060    t!(format!("{:?}", a), "1");
13061
13062    // Formatting strings and their arguments
13063    t!(format!("{}", "a"), "a");
13064    t!(format!("{:4}", "a"), "a   ");
13065    t!(format!("{:4}", "☃"), "☃   ");
13066    t!(format!("{:>4}", "a"), "   a");
13067    t!(format!("{:<4}", "a"), "a   ");
13068    t!(format!("{:^5}", "a"), "  a  ");
13069    t!(format!("{:^5}", "aa"), " aa  ");
13070    t!(format!("{:^4}", "a"), " a  ");
13071    t!(format!("{:^4}", "aa"), " aa ");
13072    t!(format!("{:.4}", "a"), "a");
13073    t!(format!("{:4.4}", "a"), "a   ");
13074    t!(format!("{:4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
13075    t!(format!("{:<4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
13076    t!(format!("{:>4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
13077    t!(format!("{:^4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
13078    t!(format!("{:>10.4}", "aaaaaaaaaaaaaaaaaa"), "      aaaa");
13079    t!(format!("{:2.4}", "aaaaa"), "aaaa");
13080    t!(format!("{:2.4}", "aaaa"), "aaaa");
13081    t!(format!("{:2.4}", "aaa"), "aaa");
13082    t!(format!("{:2.4}", "aa"), "aa");
13083    t!(format!("{:2.4}", "a"), "a ");
13084    t!(format!("{:0>2}", "a"), "0a");
13085    t!(format!("{:.*}", 4, "aaaaaaaaaaaaaaaaaa"), "aaaa");
13086    t!(format!("{:.1$}", "aaaaaaaaaaaaaaaaaa", 4), "aaaa");
13087    t!(format!("{:.a$}", "aaaaaaaaaaaaaaaaaa", a = 4), "aaaa");
13088    t!(format!("{:._a$}", "aaaaaaaaaaaaaaaaaa", _a = 4), "aaaa");
13089    t!(format!("{:1$}", "a", 4), "a   ");
13090    t!(format!("{1:0$}", 4, "a"), "a   ");
13091    t!(format!("{:a$}", "a", a = 4), "a   ");
13092    t!(format!("{:-#}", "a"), "a");
13093    t!(format!("{:+#}", "a"), "a");
13094    t!(format!("{:/^10.8}", "1234567890"), "/12345678/");
13095
13096    // Some float stuff
13097    t!(format!("{:}", 1.0f32), "1");
13098    t!(format!("{:}", 1.0f64), "1");
13099    t!(format!("{:.3}", 1.0f64), "1.000");
13100    t!(format!("{:10.3}", 1.0f64), "     1.000");
13101    t!(format!("{:+10.3}", 1.0f64), "    +1.000");
13102    t!(format!("{:+10.3}", -1.0f64), "    -1.000");
13103
13104    t!(format!("{:e}", 1.2345e6f32), "1.2345e6");
13105    t!(format!("{:e}", 1.2345e6f64), "1.2345e6");
13106    t!(format!("{:E}", 1.2345e6f64), "1.2345E6");
13107    t!(format!("{:.3e}", 1.2345e6f64), "1.234e6");
13108    t!(format!("{:10.3e}", 1.2345e6f64), "   1.234e6");
13109    t!(format!("{:+10.3e}", 1.2345e6f64), "  +1.234e6");
13110    t!(format!("{:+10.3e}", -1.2345e6f64), "  -1.234e6");
13111
13112    // Float edge cases
13113    t!(format!("{}", -0.0), "-0");
13114    t!(format!("{:?}", 0.0), "0.0");
13115
13116    // sign aware zero padding
13117    t!(format!("{:<3}", 1), "1  ");
13118    t!(format!("{:>3}", 1), "  1");
13119    t!(format!("{:^3}", 1), " 1 ");
13120    t!(format!("{:03}", 1), "001");
13121    t!(format!("{:<03}", 1), "001");
13122    t!(format!("{:>03}", 1), "001");
13123    t!(format!("{:^03}", 1), "001");
13124    t!(format!("{:+03}", 1), "+01");
13125    t!(format!("{:<+03}", 1), "+01");
13126    t!(format!("{:>+03}", 1), "+01");
13127    t!(format!("{:^+03}", 1), "+01");
13128    t!(format!("{:#05x}", 1), "0x001");
13129    t!(format!("{:<#05x}", 1), "0x001");
13130    t!(format!("{:>#05x}", 1), "0x001");
13131    t!(format!("{:^#05x}", 1), "0x001");
13132    t!(format!("{:05}", 1.2), "001.2");
13133    t!(format!("{:<05}", 1.2), "001.2");
13134    t!(format!("{:>05}", 1.2), "001.2");
13135    t!(format!("{:^05}", 1.2), "001.2");
13136    t!(format!("{:05}", -1.2), "-01.2");
13137    t!(format!("{:<05}", -1.2), "-01.2");
13138    t!(format!("{:>05}", -1.2), "-01.2");
13139    t!(format!("{:^05}", -1.2), "-01.2");
13140    t!(format!("{:+05}", 1.2), "+01.2");
13141    t!(format!("{:<+05}", 1.2), "+01.2");
13142    t!(format!("{:>+05}", 1.2), "+01.2");
13143    t!(format!("{:^+05}", 1.2), "+01.2");
13144
13145    // Ergonomic format_args!
13146    t!(format!("{0:x} {0:X}", 15), "f F");
13147    t!(format!("{0:x} {0:X} {}", 15), "f F 15");
13148    t!(format!("{:x}{0:X}{a:x}{:X}{1:x}{a:X}", 13, 14, a = 15), "dDfEeF");
13149    t!(format!("{a:x} {a:X}", a = 15), "f F");
13150
13151    // And its edge cases
13152    t!(
13153        format!(
13154            "{a:.0$} {b:.0$} {0:.0$}\n{a:.c$} {b:.c$} {c:.c$}",
13155            4,
13156            a = "abcdefg",
13157            b = "hijklmn",
13158            c = 3
13159        ),
13160        "abcd hijk 4\nabc hij 3"
13161    );
13162    t!(format!("{a:.*} {0} {:.*}", 4, 3, "efgh", a = "abcdef"), "abcd 4 efg");
13163    t!(format!("{:.a$} {a} {a:#x}", "aaaaaa", a = 2), "aa 2 0x2");
13164
13165    // Test that pointers don't get truncated.
13166    {
13167        let val = usize::MAX;
13168        let exp = format!("{:#x}", val);
13169        t!(format!("{:p}", val as *const isize), exp);
13170    }
13171
13172    // Escaping
13173    t!(format!("{{"), "{");
13174    t!(format!("}}"), "}");
13175
13176    // make sure that format! doesn't move out of local variables
13177    let a = Box::new(3);
13178    format!("{}", a);
13179    format!("{}", a);
13180
13181    // make sure that format! doesn't cause spurious unused-unsafe warnings when
13182    // it's inside of an outer unsafe block
13183    unsafe {
13184        let a: isize = ::std::mem::transmute(3_usize);
13185        format!("{}", a);
13186    }
13187
13188    // test that trailing commas are acceptable
13189    format!("{}", "test",);
13190    format!("{foo}", foo = "test",);
13191}
13192
13193// Basic test to make sure that we can invoke the `write!` macro with an
13194// fmt::Write instance.
13195#[test]
13196fn test_write() {
13197    let mut buf = String::new();
13198    let _ = write!(&mut buf, "{}", 3);
13199    {
13200        let w = &mut buf;
13201        let _ = write!(w, "{foo}", foo = 4);
13202        let _ = write!(w, "{}", "hello");
13203        let _ = writeln!(w, "{}", "line");
13204        let _ = writeln!(w, "{foo}", foo = "bar");
13205        let _ = w.write_char('☃');
13206        let _ = w.write_str("str");
13207    }
13208
13209    t!(buf, "34helloline\nbar\n☃str");
13210}
13211
13212// Just make sure that the macros are defined, there's not really a lot that we
13213// can do with them just yet (to test the output)
13214#[test]
13215fn test_print() {
13216    print!("hi");
13217    print!("{:?}", vec![0u8]);
13218    println!("hello");
13219    println!("this is a {}", "test");
13220    println!("{foo}", foo = "bar");
13221}
13222
13223// Just make sure that the macros are defined, there's not really a lot that we
13224// can do with them just yet (to test the output)
13225#[test]
13226fn test_format_args() {
13227    let mut buf = String::new();
13228    {
13229        let w = &mut buf;
13230        let _ = write!(w, "{}", format_args!("{}", 1));
13231        let _ = write!(w, "{}", format_args!("test"));
13232        let _ = write!(w, "{}", format_args!("{test}", test = 3));
13233    }
13234    let s = buf;
13235    t!(s, "1test3");
13236
13237    let s = fmt::format(format_args!("hello {}", "world"));
13238    t!(s, "hello world");
13239    let s = format!("{}: {}", "args were", format_args!("hello {}", "world"));
13240    t!(s, "args were: hello world");
13241}
13242
13243#[test]
13244fn test_order() {
13245    // Make sure format!() arguments are always evaluated in a left-to-right
13246    // ordering
13247    fn foo() -> isize {
13248        static mut FOO: isize = 0;
13249        unsafe {
13250            FOO += 1;
13251            FOO
13252        }
13253    }
13254    assert_eq!(
13255        format!("{} {} {a} {b} {} {c}", foo(), foo(), foo(), a = foo(), b = foo(), c = foo()),
13256        "1 2 4 5 3 6".to_string()
13257    );
13258}
13259
13260#[test]
13261fn test_once() {
13262    // Make sure each argument are evaluated only once even though it may be
13263    // formatted multiple times
13264    fn foo() -> isize {
13265        static mut FOO: isize = 0;
13266        unsafe {
13267            FOO += 1;
13268            FOO
13269        }
13270    }
13271    assert_eq!(format!("{0} {0} {0} {a} {a} {a}", foo(), a = foo()), "1 1 1 2 2 2".to_string());
13272}
13273
13274#[test]
13275fn test_refcell() {
13276    let refcell = RefCell::new(5);
13277    assert_eq!(format!("{:?}", refcell), "RefCell { value: 5 }");
13278    let borrow = refcell.borrow_mut();
13279    assert_eq!(format!("{:?}", refcell), "RefCell { value: <borrowed> }");
13280    drop(borrow);
13281    assert_eq!(format!("{:?}", refcell), "RefCell { value: 5 }");
13282}
13283use std::collections::binary_heap::{Drain, PeekMut};
13284use std::collections::BinaryHeap;
13285use std::iter::TrustedLen;
13286use std::panic::{catch_unwind, AssertUnwindSafe};
13287use std::sync::atomic::{AtomicU32, Ordering};
13288
13289#[test]
13290fn test_iterator() {
13291    let data = vec![5, 9, 3];
13292    let iterout = [9, 5, 3];
13293    let heap = BinaryHeap::from(data);
13294    let mut i = 0;
13295    for el in &heap {
13296        assert_eq!(*el, iterout[i]);
13297        i += 1;
13298    }
13299}
13300
13301#[test]
13302fn test_iter_rev_cloned_collect() {
13303    let data = vec![5, 9, 3];
13304    let iterout = vec![3, 5, 9];
13305    let pq = BinaryHeap::from(data);
13306
13307    let v: Vec<_> = pq.iter().rev().cloned().collect();
13308    assert_eq!(v, iterout);
13309}
13310
13311#[test]
13312fn test_into_iter_collect() {
13313    let data = vec![5, 9, 3];
13314    let iterout = vec![9, 5, 3];
13315    let pq = BinaryHeap::from(data);
13316
13317    let v: Vec<_> = pq.into_iter().collect();
13318    assert_eq!(v, iterout);
13319}
13320
13321#[test]
13322fn test_into_iter_size_hint() {
13323    let data = vec![5, 9];
13324    let pq = BinaryHeap::from(data);
13325
13326    let mut it = pq.into_iter();
13327
13328    assert_eq!(it.size_hint(), (2, Some(2)));
13329    assert_eq!(it.next(), Some(9));
13330
13331    assert_eq!(it.size_hint(), (1, Some(1)));
13332    assert_eq!(it.next(), Some(5));
13333
13334    assert_eq!(it.size_hint(), (0, Some(0)));
13335    assert_eq!(it.next(), None);
13336}
13337
13338#[test]
13339fn test_into_iter_rev_collect() {
13340    let data = vec![5, 9, 3];
13341    let iterout = vec![3, 5, 9];
13342    let pq = BinaryHeap::from(data);
13343
13344    let v: Vec<_> = pq.into_iter().rev().collect();
13345    assert_eq!(v, iterout);
13346}
13347
13348#[test]
13349fn test_into_iter_sorted_collect() {
13350    let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]);
13351    let it = heap.into_iter_sorted();
13352    let sorted = it.collect::<Vec<_>>();
13353    assert_eq!(sorted, vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 2, 1, 1, 0]);
13354}
13355
13356#[test]
13357fn test_drain_sorted_collect() {
13358    let mut heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]);
13359    let it = heap.drain_sorted();
13360    let sorted = it.collect::<Vec<_>>();
13361    assert_eq!(sorted, vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 2, 1, 1, 0]);
13362}
13363
13364fn check_exact_size_iterator<I: ExactSizeIterator>(len: usize, it: I) {
13365    let mut it = it;
13366
13367    for i in 0..it.len() {
13368        let (lower, upper) = it.size_hint();
13369        assert_eq!(Some(lower), upper);
13370        assert_eq!(lower, len - i);
13371        assert_eq!(it.len(), len - i);
13372        it.next();
13373    }
13374    assert_eq!(it.len(), 0);
13375    assert!(it.is_empty());
13376}
13377
13378#[test]
13379fn test_exact_size_iterator() {
13380    let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]);
13381    check_exact_size_iterator(heap.len(), heap.iter());
13382    check_exact_size_iterator(heap.len(), heap.clone().into_iter());
13383    check_exact_size_iterator(heap.len(), heap.clone().into_iter_sorted());
13384    check_exact_size_iterator(heap.len(), heap.clone().drain());
13385    check_exact_size_iterator(heap.len(), heap.clone().drain_sorted());
13386}
13387
13388fn check_trusted_len<I: TrustedLen>(len: usize, it: I) {
13389    let mut it = it;
13390    for i in 0..len {
13391        let (lower, upper) = it.size_hint();
13392        if upper.is_some() {
13393            assert_eq!(Some(lower), upper);
13394            assert_eq!(lower, len - i);
13395        }
13396        it.next();
13397    }
13398}
13399
13400#[test]
13401fn test_trusted_len() {
13402    let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]);
13403    check_trusted_len(heap.len(), heap.clone().into_iter_sorted());
13404    check_trusted_len(heap.len(), heap.clone().drain_sorted());
13405}
13406
13407#[test]
13408fn test_peek_and_pop() {
13409    let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1];
13410    let mut sorted = data.clone();
13411    sorted.sort();
13412    let mut heap = BinaryHeap::from(data);
13413    while !heap.is_empty() {
13414        assert_eq!(heap.peek().unwrap(), sorted.last().unwrap());
13415        assert_eq!(heap.pop().unwrap(), sorted.pop().unwrap());
13416    }
13417}
13418
13419#[test]
13420fn test_peek_mut() {
13421    let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1];
13422    let mut heap = BinaryHeap::from(data);
13423    assert_eq!(heap.peek(), Some(&10));
13424    {
13425        let mut top = heap.peek_mut().unwrap();
13426        *top -= 2;
13427    }
13428    assert_eq!(heap.peek(), Some(&9));
13429}
13430
13431#[test]
13432fn test_peek_mut_pop() {
13433    let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1];
13434    let mut heap = BinaryHeap::from(data);
13435    assert_eq!(heap.peek(), Some(&10));
13436    {
13437        let mut top = heap.peek_mut().unwrap();
13438        *top -= 2;
13439        assert_eq!(PeekMut::pop(top), 8);
13440    }
13441    assert_eq!(heap.peek(), Some(&9));
13442}
13443
13444#[test]
13445fn test_push() {
13446    let mut heap = BinaryHeap::from(vec![2, 4, 9]);
13447    assert_eq!(heap.len(), 3);
13448    assert!(*heap.peek().unwrap() == 9);
13449    heap.push(11);
13450    assert_eq!(heap.len(), 4);
13451    assert!(*heap.peek().unwrap() == 11);
13452    heap.push(5);
13453    assert_eq!(heap.len(), 5);
13454    assert!(*heap.peek().unwrap() == 11);
13455    heap.push(27);
13456    assert_eq!(heap.len(), 6);
13457    assert!(*heap.peek().unwrap() == 27);
13458    heap.push(3);
13459    assert_eq!(heap.len(), 7);
13460    assert!(*heap.peek().unwrap() == 27);
13461    heap.push(103);
13462    assert_eq!(heap.len(), 8);
13463    assert!(*heap.peek().unwrap() == 103);
13464}
13465
13466#[test]
13467fn test_push_unique() {
13468    let mut heap = BinaryHeap::<Box<_>>::from(vec![box 2, box 4, box 9]);
13469    assert_eq!(heap.len(), 3);
13470    assert!(**heap.peek().unwrap() == 9);
13471    heap.push(box 11);
13472    assert_eq!(heap.len(), 4);
13473    assert!(**heap.peek().unwrap() == 11);
13474    heap.push(box 5);
13475    assert_eq!(heap.len(), 5);
13476    assert!(**heap.peek().unwrap() == 11);
13477    heap.push(box 27);
13478    assert_eq!(heap.len(), 6);
13479    assert!(**heap.peek().unwrap() == 27);
13480    heap.push(box 3);
13481    assert_eq!(heap.len(), 7);
13482    assert!(**heap.peek().unwrap() == 27);
13483    heap.push(box 103);
13484    assert_eq!(heap.len(), 8);
13485    assert!(**heap.peek().unwrap() == 103);
13486}
13487
13488fn check_to_vec(mut data: Vec<i32>) {
13489    let heap = BinaryHeap::from(data.clone());
13490    let mut v = heap.clone().into_vec();
13491    v.sort();
13492    data.sort();
13493
13494    assert_eq!(v, data);
13495    assert_eq!(heap.into_sorted_vec(), data);
13496}
13497
13498#[test]
13499fn test_to_vec() {
13500    check_to_vec(vec![]);
13501    check_to_vec(vec![5]);
13502    check_to_vec(vec![3, 2]);
13503    check_to_vec(vec![2, 3]);
13504    check_to_vec(vec![5, 1, 2]);
13505    check_to_vec(vec![1, 100, 2, 3]);
13506    check_to_vec(vec![1, 3, 5, 7, 9, 2, 4, 6, 8, 0]);
13507    check_to_vec(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]);
13508    check_to_vec(vec![9, 11, 9, 9, 9, 9, 11, 2, 3, 4, 11, 9, 0, 0, 0, 0]);
13509    check_to_vec(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
13510    check_to_vec(vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
13511    check_to_vec(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 1, 2]);
13512    check_to_vec(vec![5, 4, 3, 2, 1, 5, 4, 3, 2, 1, 5, 4, 3, 2, 1]);
13513}
13514
13515#[test]
13516fn test_in_place_iterator_specialization() {
13517    let src: Vec<usize> = vec![1, 2, 3];
13518    let src_ptr = src.as_ptr();
13519    let heap: BinaryHeap<_> = src.into_iter().map(std::convert::identity).collect();
13520    let heap_ptr = heap.iter().next().unwrap() as *const usize;
13521    assert_eq!(src_ptr, heap_ptr);
13522    let sink: Vec<_> = heap.into_iter().map(std::convert::identity).collect();
13523    let sink_ptr = sink.as_ptr();
13524    assert_eq!(heap_ptr, sink_ptr);
13525}
13526
13527#[test]
13528fn test_empty_pop() {
13529    let mut heap = BinaryHeap::<i32>::new();
13530    assert!(heap.pop().is_none());
13531}
13532
13533#[test]
13534fn test_empty_peek() {
13535    let empty = BinaryHeap::<i32>::new();
13536    assert!(empty.peek().is_none());
13537}
13538
13539#[test]
13540fn test_empty_peek_mut() {
13541    let mut empty = BinaryHeap::<i32>::new();
13542    assert!(empty.peek_mut().is_none());
13543}
13544
13545#[test]
13546fn test_from_iter() {
13547    let xs = vec![9, 8, 7, 6, 5, 4, 3, 2, 1];
13548
13549    let mut q: BinaryHeap<_> = xs.iter().rev().cloned().collect();
13550
13551    for &x in &xs {
13552        assert_eq!(q.pop().unwrap(), x);
13553    }
13554}
13555
13556#[test]
13557fn test_drain() {
13558    let mut q: BinaryHeap<_> = [9, 8, 7, 6, 5, 4, 3, 2, 1].iter().cloned().collect();
13559
13560    assert_eq!(q.drain().take(5).count(), 5);
13561
13562    assert!(q.is_empty());
13563}
13564
13565#[test]
13566fn test_drain_sorted() {
13567    let mut q: BinaryHeap<_> = [9, 8, 7, 6, 5, 4, 3, 2, 1].iter().cloned().collect();
13568
13569    assert_eq!(q.drain_sorted().take(5).collect::<Vec<_>>(), vec![9, 8, 7, 6, 5]);
13570
13571    assert!(q.is_empty());
13572}
13573
13574#[test]
13575fn test_drain_sorted_leak() {
13576    static DROPS: AtomicU32 = AtomicU32::new(0);
13577
13578    #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
13579    struct D(u32, bool);
13580
13581    impl Drop for D {
13582        fn drop(&mut self) {
13583            DROPS.fetch_add(1, Ordering::SeqCst);
13584
13585            if self.1 {
13586                panic!("panic in `drop`");
13587            }
13588        }
13589    }
13590
13591    let mut q = BinaryHeap::from(vec![
13592        D(0, false),
13593        D(1, false),
13594        D(2, false),
13595        D(3, true),
13596        D(4, false),
13597        D(5, false),
13598    ]);
13599
13600    catch_unwind(AssertUnwindSafe(|| drop(q.drain_sorted()))).ok();
13601
13602    assert_eq!(DROPS.load(Ordering::SeqCst), 6);
13603}
13604
13605#[test]
13606fn test_extend_ref() {
13607    let mut a = BinaryHeap::new();
13608    a.push(1);
13609    a.push(2);
13610
13611    a.extend(&[3, 4, 5]);
13612
13613    assert_eq!(a.len(), 5);
13614    assert_eq!(a.into_sorted_vec(), [1, 2, 3, 4, 5]);
13615
13616    let mut a = BinaryHeap::new();
13617    a.push(1);
13618    a.push(2);
13619    let mut b = BinaryHeap::new();
13620    b.push(3);
13621    b.push(4);
13622    b.push(5);
13623
13624    a.extend(&b);
13625
13626    assert_eq!(a.len(), 5);
13627    assert_eq!(a.into_sorted_vec(), [1, 2, 3, 4, 5]);
13628}
13629
13630#[test]
13631fn test_append() {
13632    let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]);
13633    let mut b = BinaryHeap::from(vec![-20, 5, 43]);
13634
13635    a.append(&mut b);
13636
13637    assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
13638    assert!(b.is_empty());
13639}
13640
13641#[test]
13642fn test_append_to_empty() {
13643    let mut a = BinaryHeap::new();
13644    let mut b = BinaryHeap::from(vec![-20, 5, 43]);
13645
13646    a.append(&mut b);
13647
13648    assert_eq!(a.into_sorted_vec(), [-20, 5, 43]);
13649    assert!(b.is_empty());
13650}
13651
13652#[test]
13653fn test_extend_specialization() {
13654    let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]);
13655    let b = BinaryHeap::from(vec![-20, 5, 43]);
13656
13657    a.extend(b);
13658
13659    assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
13660}
13661
13662#[allow(dead_code)]
13663fn assert_covariance() {
13664    fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> {
13665        d
13666    }
13667}
13668
13669#[test]
13670fn test_retain() {
13671    let mut a = BinaryHeap::from(vec![100, 10, 50, 1, 2, 20, 30]);
13672    a.retain(|&x| x != 2);
13673
13674    // Check that 20 moved into 10's place.
13675    assert_eq!(a.clone().into_vec(), [100, 20, 50, 1, 10, 30]);
13676
13677    a.retain(|_| true);
13678
13679    assert_eq!(a.clone().into_vec(), [100, 20, 50, 1, 10, 30]);
13680
13681    a.retain(|&x| x < 50);
13682
13683    assert_eq!(a.clone().into_vec(), [30, 20, 10, 1]);
13684
13685    a.retain(|_| false);
13686
13687    assert!(a.is_empty());
13688}
13689
13690// old binaryheap failed this test
13691//
13692// Integrity means that all elements are present after a comparison panics,
13693// even if the order may not be correct.
13694//
13695// Destructors must be called exactly once per element.
13696// FIXME: re-enable emscripten once it can unwind again
13697#[test]
13698#[cfg(not(target_os = "emscripten"))]
13699fn panic_safe() {
13700    use rand::{seq::SliceRandom, thread_rng};
13701    use std::cmp;
13702    use std::panic::{self, AssertUnwindSafe};
13703    use std::sync::atomic::{AtomicUsize, Ordering};
13704
13705    static DROP_COUNTER: AtomicUsize = AtomicUsize::new(0);
13706
13707    #[derive(Eq, PartialEq, Ord, Clone, Debug)]
13708    struct PanicOrd<T>(T, bool);
13709
13710    impl<T> Drop for PanicOrd<T> {
13711        fn drop(&mut self) {
13712            // update global drop count
13713            DROP_COUNTER.fetch_add(1, Ordering::SeqCst);
13714        }
13715    }
13716
13717    impl<T: PartialOrd> PartialOrd for PanicOrd<T> {
13718        fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
13719            if self.1 || other.1 {
13720                panic!("Panicking comparison");
13721            }
13722            self.0.partial_cmp(&other.0)
13723        }
13724    }
13725    let mut rng = thread_rng();
13726    const DATASZ: usize = 32;
13727    // Miri is too slow
13728    let ntest = if cfg!(miri) { 1 } else { 10 };
13729
13730    // don't use 0 in the data -- we want to catch the zeroed-out case.
13731    let data = (1..=DATASZ).collect::<Vec<_>>();
13732
13733    // since it's a fuzzy test, run several tries.
13734    for _ in 0..ntest {
13735        for i in 1..=DATASZ {
13736            DROP_COUNTER.store(0, Ordering::SeqCst);
13737
13738            let mut panic_ords: Vec<_> =
13739                data.iter().filter(|&&x| x != i).map(|&x| PanicOrd(x, false)).collect();
13740            let panic_item = PanicOrd(i, true);
13741
13742            // heapify the sane items
13743            panic_ords.shuffle(&mut rng);
13744            let mut heap = BinaryHeap::from(panic_ords);
13745            let inner_data;
13746
13747            {
13748                // push the panicking item to the heap and catch the panic
13749                let thread_result = {
13750                    let mut heap_ref = AssertUnwindSafe(&mut heap);
13751                    panic::catch_unwind(move || {
13752                        heap_ref.push(panic_item);
13753                    })
13754                };
13755                assert!(thread_result.is_err());
13756
13757                // Assert no elements were dropped
13758                let drops = DROP_COUNTER.load(Ordering::SeqCst);
13759                assert!(drops == 0, "Must not drop items. drops={}", drops);
13760                inner_data = heap.clone().into_vec();
13761                drop(heap);
13762            }
13763            let drops = DROP_COUNTER.load(Ordering::SeqCst);
13764            assert_eq!(drops, DATASZ);
13765
13766            let mut data_sorted = inner_data.into_iter().map(|p| p.0).collect::<Vec<_>>();
13767            data_sorted.sort();
13768            assert_eq!(data_sorted, data);
13769        }
13770    }
13771}
13772use std::cell::Cell;
13773use std::mem::MaybeUninit;
13774use std::ptr::NonNull;
13775
13776#[test]
13777fn unitialized_zero_size_box() {
13778    assert_eq!(
13779        &*Box::<()>::new_uninit() as *const _,
13780        NonNull::<MaybeUninit<()>>::dangling().as_ptr(),
13781    );
13782    assert_eq!(
13783        Box::<[()]>::new_uninit_slice(4).as_ptr(),
13784        NonNull::<MaybeUninit<()>>::dangling().as_ptr(),
13785    );
13786    assert_eq!(
13787        Box::<[String]>::new_uninit_slice(0).as_ptr(),
13788        NonNull::<MaybeUninit<String>>::dangling().as_ptr(),
13789    );
13790}
13791
13792#[derive(Clone, PartialEq, Eq, Debug)]
13793struct Dummy {
13794    _data: u8,
13795}
13796
13797#[test]
13798fn box_clone_and_clone_from_equivalence() {
13799    for size in (0..8).map(|i| 2usize.pow(i)) {
13800        let control = vec![Dummy { _data: 42 }; size].into_boxed_slice();
13801        let clone = control.clone();
13802        let mut copy = vec![Dummy { _data: 84 }; size].into_boxed_slice();
13803        copy.clone_from(&control);
13804        assert_eq!(control, clone);
13805        assert_eq!(control, copy);
13806    }
13807}
13808
13809/// This test might give a false positive in case the box realocates, but the alocator keeps the
13810/// original pointer.
13811///
13812/// On the other hand it won't give a false negative, if it fails than the memory was definitely not
13813/// reused
13814#[test]
13815fn box_clone_from_ptr_stability() {
13816    for size in (0..8).map(|i| 2usize.pow(i)) {
13817        let control = vec![Dummy { _data: 42 }; size].into_boxed_slice();
13818        let mut copy = vec![Dummy { _data: 84 }; size].into_boxed_slice();
13819        let copy_raw = copy.as_ptr() as usize;
13820        copy.clone_from(&control);
13821        assert_eq!(copy.as_ptr() as usize, copy_raw);
13822    }
13823}
13824
13825#[test]
13826fn box_deref_lval() {
13827    let x = Box::new(Cell::new(5));
13828    x.set(1000);
13829    assert_eq!(x.get(), 1000);
13830}
13831use std::alloc::{Allocator, Global, Layout, System};
13832
13833/// Issue #45955 and #62251.
13834#[test]
13835fn alloc_system_overaligned_request() {
13836    check_overalign_requests(System)
13837}
13838
13839#[test]
13840fn std_heap_overaligned_request() {
13841    check_overalign_requests(Global)
13842}
13843
13844fn check_overalign_requests<T: Allocator>(allocator: T) {
13845    for &align in &[4, 8, 16, 32] {
13846        // less than and bigger than `MIN_ALIGN`
13847        for &size in &[align / 2, align - 1] {
13848            // size less than alignment
13849            let iterations = 128;
13850            unsafe {
13851                let pointers: Vec<_> = (0..iterations)
13852                    .map(|_| {
13853                        allocator.allocate(Layout::from_size_align(size, align).unwrap()).unwrap()
13854                    })
13855                    .collect();
13856                for &ptr in &pointers {
13857                    assert_eq!(
13858                        (ptr.as_non_null_ptr().as_ptr() as usize) % align,
13859                        0,
13860                        "Got a pointer less aligned than requested"
13861                    )
13862                }
13863
13864                // Clean up
13865                for &ptr in &pointers {
13866                    allocator.deallocate(
13867                        ptr.as_non_null_ptr(),
13868                        Layout::from_size_align(size, align).unwrap(),
13869                    )
13870                }
13871            }
13872        }
13873    }
13874}
13875/// Creates a [`Vec`] containing the arguments.
13876///
13877/// `vec!` allows `Vec`s to be defined with the same syntax as array expressions.
13878/// There are two forms of this macro:
13879///
13880/// - Create a [`Vec`] containing a given list of elements:
13881///
13882/// ```
13883/// let v = vec![1, 2, 3];
13884/// assert_eq!(v[0], 1);
13885/// assert_eq!(v[1], 2);
13886/// assert_eq!(v[2], 3);
13887/// ```
13888///
13889/// - Create a [`Vec`] from a given element and size:
13890///
13891/// ```
13892/// let v = vec![1; 3];
13893/// assert_eq!(v, [1, 1, 1]);
13894/// ```
13895///
13896/// Note that unlike array expressions this syntax supports all elements
13897/// which implement [`Clone`] and the number of elements doesn't have to be
13898/// a constant.
13899///
13900/// This will use `clone` to duplicate an expression, so one should be careful
13901/// using this with types having a nonstandard `Clone` implementation. For
13902/// example, `vec![Rc::new(1); 5]` will create a vector of five references
13903/// to the same boxed integer value, not five references pointing to independently
13904/// boxed integers.
13905///
13906/// Also, note that `vec![expr; 0]` is allowed, and produces an empty vector.
13907/// This will still evaluate `expr`, however, and immediately drop the resulting value, so
13908/// be mindful of side effects.
13909///
13910/// [`Vec`]: crate::vec::Vec
13911#[cfg(not(test))]
13912#[doc(alias = "alloc")]
13913#[doc(alias = "malloc")]
13914#[macro_export]
13915#[stable(feature = "rust1", since = "1.0.0")]
13916#[allow_internal_unstable(box_syntax, liballoc_internals)]
13917macro_rules! vec {
13918    () => (
13919        $crate::__rust_force_expr!($crate::vec::Vec::new())
13920    );
13921    ($elem:expr; $n:expr) => (
13922        $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n))
13923    );
13924    ($($x:expr),+ $(,)?) => (
13925        $crate::__rust_force_expr!(<[_]>::into_vec(box [$($x),+]))
13926    );
13927}
13928
13929// HACK(japaric): with cfg(test) the inherent `[T]::into_vec` method, which is
13930// required for this macro definition, is not available. Instead use the
13931// `slice::into_vec`  function which is only available with cfg(test)
13932// NB see the slice::hack module in slice.rs for more information
13933#[cfg(test)]
13934macro_rules! vec {
13935    () => (
13936        $crate::vec::Vec::new()
13937    );
13938    ($elem:expr; $n:expr) => (
13939        $crate::vec::from_elem($elem, $n)
13940    );
13941    ($($x:expr),*) => (
13942        $crate::slice::into_vec(box [$($x),*])
13943    );
13944    ($($x:expr,)*) => (vec![$($x),*])
13945}
13946
13947/// Creates a `String` using interpolation of runtime expressions.
13948///
13949/// The first argument `format!` receives is a format string. This must be a string
13950/// literal. The power of the formatting string is in the `{}`s contained.
13951///
13952/// Additional parameters passed to `format!` replace the `{}`s within the
13953/// formatting string in the order given unless named or positional parameters
13954/// are used; see [`std::fmt`] for more information.
13955///
13956/// A common use for `format!` is concatenation and interpolation of strings.
13957/// The same convention is used with [`print!`] and [`write!`] macros,
13958/// depending on the intended destination of the string.
13959///
13960/// To convert a single value to a string, use the [`to_string`] method. This
13961/// will use the [`Display`] formatting trait.
13962///
13963/// [`std::fmt`]: ../std/fmt/index.html
13964/// [`print!`]: ../std/macro.print.html
13965/// [`write!`]: core::write
13966/// [`to_string`]: crate::string::ToString
13967/// [`Display`]: core::fmt::Display
13968///
13969/// # Panics
13970///
13971/// `format!` panics if a formatting trait implementation returns an error.
13972/// This indicates an incorrect implementation
13973/// since `fmt::Write for String` never returns an error itself.
13974///
13975/// # Examples
13976///
13977/// ```
13978/// format!("test");
13979/// format!("hello {}", "world!");
13980/// format!("x = {}, y = {y}", 10, y = 30);
13981/// ```
13982#[macro_export]
13983#[stable(feature = "rust1", since = "1.0.0")]
13984#[cfg_attr(not(test), rustc_diagnostic_item = "format_macro")]
13985macro_rules! format {
13986    ($($arg:tt)*) => {{
13987        let res = $crate::fmt::format($crate::__export::format_args!($($arg)*));
13988        res
13989    }}
13990}
13991
13992/// Force AST node to an expression to improve diagnostics in pattern position.
13993#[doc(hidden)]
13994#[macro_export]
13995#[unstable(feature = "liballoc_internals", issue = "none", reason = "implementation detail")]
13996macro_rules! __rust_force_expr {
13997    ($e:expr) => {
13998        $e
13999    };
14000}
14001//! Memory allocation APIs
14002
14003#![stable(feature = "alloc_module", since = "1.28.0")]
14004
14005#[cfg(not(test))]
14006use core::intrinsics;
14007use core::intrinsics::{min_align_of_val, size_of_val};
14008
14009use core::ptr::Unique;
14010#[cfg(not(test))]
14011use core::ptr::{self, NonNull};
14012
14013#[stable(feature = "alloc_module", since = "1.28.0")]
14014#[doc(inline)]
14015pub use core::alloc::*;
14016
14017#[cfg(test)]
14018mod tests;
14019
14020extern "Rust" {
14021    // These are the magic symbols to call the global allocator.  rustc generates
14022    // them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute
14023    // (the code expanding that attribute macro generates those functions), or to call
14024    // the default implementations in libstd (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
14025    // otherwise.
14026    // The rustc fork of LLVM also special-cases these function names to be able to optimize them
14027    // like `malloc`, `realloc`, and `free`, respectively.
14028    #[rustc_allocator]
14029    #[rustc_allocator_nounwind]
14030    fn __rust_alloc(size: usize, align: usize) -> *mut u8;
14031    #[rustc_allocator_nounwind]
14032    fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
14033    #[rustc_allocator_nounwind]
14034    fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
14035    #[rustc_allocator_nounwind]
14036    fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
14037}
14038
14039/// The global memory allocator.
14040///
14041/// This type implements the [`Allocator`] trait by forwarding calls
14042/// to the allocator registered with the `#[global_allocator]` attribute
14043/// if there is one, or the `std` crate’s default.
14044///
14045/// Note: while this type is unstable, the functionality it provides can be
14046/// accessed through the [free functions in `alloc`](self#functions).
14047#[unstable(feature = "allocator_api", issue = "32838")]
14048#[derive(Copy, Clone, Default, Debug)]
14049#[cfg(not(test))]
14050pub struct Global;
14051
14052#[cfg(test)]
14053pub use std::alloc::Global;
14054
14055/// Allocate memory with the global allocator.
14056///
14057/// This function forwards calls to the [`GlobalAlloc::alloc`] method
14058/// of the allocator registered with the `#[global_allocator]` attribute
14059/// if there is one, or the `std` crate’s default.
14060///
14061/// This function is expected to be deprecated in favor of the `alloc` method
14062/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
14063///
14064/// # Safety
14065///
14066/// See [`GlobalAlloc::alloc`].
14067///
14068/// # Examples
14069///
14070/// ```
14071/// use std::alloc::{alloc, dealloc, Layout};
14072///
14073/// unsafe {
14074///     let layout = Layout::new::<u16>();
14075///     let ptr = alloc(layout);
14076///
14077///     *(ptr as *mut u16) = 42;
14078///     assert_eq!(*(ptr as *mut u16), 42);
14079///
14080///     dealloc(ptr, layout);
14081/// }
14082/// ```
14083#[stable(feature = "global_alloc", since = "1.28.0")]
14084#[inline]
14085pub unsafe fn alloc(layout: Layout) -> *mut u8 {
14086    unsafe { __rust_alloc(layout.size(), layout.align()) }
14087}
14088
14089/// Deallocate memory with the global allocator.
14090///
14091/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
14092/// of the allocator registered with the `#[global_allocator]` attribute
14093/// if there is one, or the `std` crate’s default.
14094///
14095/// This function is expected to be deprecated in favor of the `dealloc` method
14096/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
14097///
14098/// # Safety
14099///
14100/// See [`GlobalAlloc::dealloc`].
14101#[stable(feature = "global_alloc", since = "1.28.0")]
14102#[inline]
14103pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
14104    unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
14105}
14106
14107/// Reallocate memory with the global allocator.
14108///
14109/// This function forwards calls to the [`GlobalAlloc::realloc`] method
14110/// of the allocator registered with the `#[global_allocator]` attribute
14111/// if there is one, or the `std` crate’s default.
14112///
14113/// This function is expected to be deprecated in favor of the `realloc` method
14114/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
14115///
14116/// # Safety
14117///
14118/// See [`GlobalAlloc::realloc`].
14119#[stable(feature = "global_alloc", since = "1.28.0")]
14120#[inline]
14121pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
14122    unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
14123}
14124
14125/// Allocate zero-initialized memory with the global allocator.
14126///
14127/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
14128/// of the allocator registered with the `#[global_allocator]` attribute
14129/// if there is one, or the `std` crate’s default.
14130///
14131/// This function is expected to be deprecated in favor of the `alloc_zeroed` method
14132/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
14133///
14134/// # Safety
14135///
14136/// See [`GlobalAlloc::alloc_zeroed`].
14137///
14138/// # Examples
14139///
14140/// ```
14141/// use std::alloc::{alloc_zeroed, dealloc, Layout};
14142///
14143/// unsafe {
14144///     let layout = Layout::new::<u16>();
14145///     let ptr = alloc_zeroed(layout);
14146///
14147///     assert_eq!(*(ptr as *mut u16), 0);
14148///
14149///     dealloc(ptr, layout);
14150/// }
14151/// ```
14152#[stable(feature = "global_alloc", since = "1.28.0")]
14153#[inline]
14154pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
14155    unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
14156}
14157
14158#[cfg(not(test))]
14159impl Global {
14160    #[inline]
14161    fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
14162        match layout.size() {
14163            0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
14164            // SAFETY: `layout` is non-zero in size,
14165            size => unsafe {
14166                let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
14167                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
14168                Ok(NonNull::slice_from_raw_parts(ptr, size))
14169            },
14170        }
14171    }
14172
14173    // SAFETY: Same as `Allocator::grow`
14174    #[inline]
14175    unsafe fn grow_impl(
14176        &self,
14177        ptr: NonNull<u8>,
14178        old_layout: Layout,
14179        new_layout: Layout,
14180        zeroed: bool,
14181    ) -> Result<NonNull<[u8]>, AllocError> {
14182        debug_assert!(
14183            new_layout.size() >= old_layout.size(),
14184            "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
14185        );
14186
14187        match old_layout.size() {
14188            0 => self.alloc_impl(new_layout, zeroed),
14189
14190            // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
14191            // as required by safety conditions. Other conditions must be upheld by the caller
14192            old_size if old_layout.align() == new_layout.align() => unsafe {
14193                let new_size = new_layout.size();
14194
14195                // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
14196                intrinsics::assume(new_size >= old_layout.size());
14197
14198                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
14199                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
14200                if zeroed {
14201                    raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
14202                }
14203                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
14204            },
14205
14206            // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
14207            // both the old and new memory allocation are valid for reads and writes for `old_size`
14208            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
14209            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
14210            // for `dealloc` must be upheld by the caller.
14211            old_size => unsafe {
14212                let new_ptr = self.alloc_impl(new_layout, zeroed)?;
14213                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
14214                self.deallocate(ptr, old_layout);
14215                Ok(new_ptr)
14216            },
14217        }
14218    }
14219}
14220
14221#[unstable(feature = "allocator_api", issue = "32838")]
14222#[cfg(not(test))]
14223unsafe impl Allocator for Global {
14224    #[inline]
14225    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
14226        self.alloc_impl(layout, false)
14227    }
14228
14229    #[inline]
14230    fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
14231        self.alloc_impl(layout, true)
14232    }
14233
14234    #[inline]
14235    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
14236        if layout.size() != 0 {
14237            // SAFETY: `layout` is non-zero in size,
14238            // other conditions must be upheld by the caller
14239            unsafe { dealloc(ptr.as_ptr(), layout) }
14240        }
14241    }
14242
14243    #[inline]
14244    unsafe fn grow(
14245        &self,
14246        ptr: NonNull<u8>,
14247        old_layout: Layout,
14248        new_layout: Layout,
14249    ) -> Result<NonNull<[u8]>, AllocError> {
14250        // SAFETY: all conditions must be upheld by the caller
14251        unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
14252    }
14253
14254    #[inline]
14255    unsafe fn grow_zeroed(
14256        &self,
14257        ptr: NonNull<u8>,
14258        old_layout: Layout,
14259        new_layout: Layout,
14260    ) -> Result<NonNull<[u8]>, AllocError> {
14261        // SAFETY: all conditions must be upheld by the caller
14262        unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
14263    }
14264
14265    #[inline]
14266    unsafe fn shrink(
14267        &self,
14268        ptr: NonNull<u8>,
14269        old_layout: Layout,
14270        new_layout: Layout,
14271    ) -> Result<NonNull<[u8]>, AllocError> {
14272        debug_assert!(
14273            new_layout.size() <= old_layout.size(),
14274            "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
14275        );
14276
14277        match new_layout.size() {
14278            // SAFETY: conditions must be upheld by the caller
14279            0 => unsafe {
14280                self.deallocate(ptr, old_layout);
14281                Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
14282            },
14283
14284            // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
14285            new_size if old_layout.align() == new_layout.align() => unsafe {
14286                // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
14287                intrinsics::assume(new_size <= old_layout.size());
14288
14289                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
14290                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
14291                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
14292            },
14293
14294            // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
14295            // both the old and new memory allocation are valid for reads and writes for `new_size`
14296            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
14297            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
14298            // for `dealloc` must be upheld by the caller.
14299            new_size => unsafe {
14300                let new_ptr = self.allocate(new_layout)?;
14301                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
14302                self.deallocate(ptr, old_layout);
14303                Ok(new_ptr)
14304            },
14305        }
14306    }
14307}
14308
14309/// The allocator for unique pointers.
14310// This function must not unwind. If it does, MIR codegen will fail.
14311#[cfg(not(test))]
14312#[lang = "exchange_malloc"]
14313#[inline]
14314unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
14315    let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
14316    match Global.allocate(layout) {
14317        Ok(ptr) => ptr.as_mut_ptr(),
14318        Err(_) => handle_alloc_error(layout),
14319    }
14320}
14321
14322#[cfg_attr(not(test), lang = "box_free")]
14323#[inline]
14324// This signature has to be the same as `Box`, otherwise an ICE will happen.
14325// When an additional parameter to `Box` is added (like `A: Allocator`), this has to be added here as
14326// well.
14327// For example if `Box` is changed to  `struct Box<T: ?Sized, A: Allocator>(Unique<T>, A)`,
14328// this function has to be changed to `fn box_free<T: ?Sized, A: Allocator>(Unique<T>, A)` as well.
14329pub(crate) unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A) {
14330    unsafe {
14331        let size = size_of_val(ptr.as_ref());
14332        let align = min_align_of_val(ptr.as_ref());
14333        let layout = Layout::from_size_align_unchecked(size, align);
14334        alloc.deallocate(ptr.cast().into(), layout)
14335    }
14336}
14337
14338// # Allocation error handler
14339
14340extern "Rust" {
14341    // This is the magic symbol to call the global alloc error handler.  rustc generates
14342    // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
14343    // default implementations below (`__rdl_oom`) otherwise.
14344    #[rustc_allocator_nounwind]
14345    fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
14346}
14347
14348/// Abort on memory allocation error or failure.
14349///
14350/// Callers of memory allocation APIs wishing to abort computation
14351/// in response to an allocation error are encouraged to call this function,
14352/// rather than directly invoking `panic!` or similar.
14353///
14354/// The default behavior of this function is to print a message to standard error
14355/// and abort the process.
14356/// It can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
14357///
14358/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
14359/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
14360#[stable(feature = "global_alloc", since = "1.28.0")]
14361#[cfg(not(test))]
14362#[rustc_allocator_nounwind]
14363#[cold]
14364pub fn handle_alloc_error(layout: Layout) -> ! {
14365    unsafe {
14366        __rust_alloc_error_handler(layout.size(), layout.align());
14367    }
14368}
14369
14370// For alloc test `std::alloc::handle_alloc_error` can be used directly.
14371#[cfg(test)]
14372pub use std::alloc::handle_alloc_error;
14373
14374#[cfg(not(any(target_os = "hermit", test)))]
14375#[doc(hidden)]
14376#[allow(unused_attributes)]
14377#[unstable(feature = "alloc_internals", issue = "none")]
14378pub mod __alloc_error_handler {
14379    use crate::alloc::Layout;
14380
14381    // called via generated `__rust_alloc_error_handler`
14382
14383    // if there is no `#[alloc_error_handler]`
14384    #[rustc_std_internal_symbol]
14385    pub unsafe extern "C" fn __rdl_oom(size: usize, _align: usize) -> ! {
14386        panic!("memory allocation of {} bytes failed", size)
14387    }
14388
14389    // if there is a `#[alloc_error_handler]`
14390    #[rustc_std_internal_symbol]
14391    pub unsafe extern "C" fn __rg_oom(size: usize, align: usize) -> ! {
14392        let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
14393        extern "Rust" {
14394            #[lang = "oom"]
14395            fn oom_impl(layout: Layout) -> !;
14396        }
14397        unsafe { oom_impl(layout) }
14398    }
14399}
14400
14401/// Specialize clones into pre-allocated, uninitialized memory.
14402/// Used by `Box::clone` and `Rc`/`Arc::make_mut`.
14403pub(crate) trait WriteCloneIntoRaw: Sized {
14404    unsafe fn write_clone_into_raw(&self, target: *mut Self);
14405}
14406
14407impl<T: Clone> WriteCloneIntoRaw for T {
14408    #[inline]
14409    default unsafe fn write_clone_into_raw(&self, target: *mut Self) {
14410        // Having allocated *first* may allow the optimizer to create
14411        // the cloned value in-place, skipping the local and move.
14412        unsafe { target.write(self.clone()) };
14413    }
14414}
14415
14416impl<T: Copy> WriteCloneIntoRaw for T {
14417    #[inline]
14418    unsafe fn write_clone_into_raw(&self, target: *mut Self) {
14419        // We can always copy in-place, without ever involving a local value.
14420        unsafe { target.copy_from_nonoverlapping(self, 1) };
14421    }
14422}
14423#![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")]
14424#![doc(hidden)]
14425
14426use core::alloc::LayoutError;
14427use core::cmp;
14428use core::intrinsics;
14429use core::mem::{self, ManuallyDrop, MaybeUninit};
14430use core::ops::Drop;
14431use core::ptr::{self, NonNull, Unique};
14432use core::slice;
14433
14434use crate::alloc::{handle_alloc_error, Allocator, Global, Layout};
14435use crate::boxed::Box;
14436use crate::collections::TryReserveError::{self, *};
14437
14438#[cfg(test)]
14439mod tests;
14440
14441enum AllocInit {
14442    /// The contents of the new memory are uninitialized.
14443    Uninitialized,
14444    /// The new memory is guaranteed to be zeroed.
14445    Zeroed,
14446}
14447
14448/// A low-level utility for more ergonomically allocating, reallocating, and deallocating
14449/// a buffer of memory on the heap without having to worry about all the corner cases
14450/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
14451/// In particular:
14452///
14453/// * Produces `Unique::dangling()` on zero-sized types.
14454/// * Produces `Unique::dangling()` on zero-length allocations.
14455/// * Avoids freeing `Unique::dangling()`.
14456/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics).
14457/// * Guards against 32-bit systems allocating more than isize::MAX bytes.
14458/// * Guards against overflowing your length.
14459/// * Calls `handle_alloc_error` for fallible allocations.
14460/// * Contains a `ptr::Unique` and thus endows the user with all related benefits.
14461/// * Uses the excess returned from the allocator to use the largest available capacity.
14462///
14463/// This type does not in anyway inspect the memory that it manages. When dropped it *will*
14464/// free its memory, but it *won't* try to drop its contents. It is up to the user of `RawVec`
14465/// to handle the actual things *stored* inside of a `RawVec`.
14466///
14467/// Note that the excess of a zero-sized types is always infinite, so `capacity()` always returns
14468/// `usize::MAX`. This means that you need to be careful when round-tripping this type with a
14469/// `Box<[T]>`, since `capacity()` won't yield the length.
14470#[allow(missing_debug_implementations)]
14471pub struct RawVec<T, A: Allocator = Global> {
14472    ptr: Unique<T>,
14473    cap: usize,
14474    alloc: A,
14475}
14476
14477impl<T> RawVec<T, Global> {
14478    /// HACK(Centril): This exists because stable `const fn` can only call stable `const fn`, so
14479    /// they cannot call `Self::new()`.
14480    ///
14481    /// If you change `RawVec<T>::new` or dependencies, please take care to not introduce anything
14482    /// that would truly const-call something unstable.
14483    pub const NEW: Self = Self::new();
14484
14485    /// Creates the biggest possible `RawVec` (on the system heap)
14486    /// without allocating. If `T` has positive size, then this makes a
14487    /// `RawVec` with capacity `0`. If `T` is zero-sized, then it makes a
14488    /// `RawVec` with capacity `usize::MAX`. Useful for implementing
14489    /// delayed allocation.
14490    pub const fn new() -> Self {
14491        Self::new_in(Global)
14492    }
14493
14494    /// Creates a `RawVec` (on the system heap) with exactly the
14495    /// capacity and alignment requirements for a `[T; capacity]`. This is
14496    /// equivalent to calling `RawVec::new` when `capacity` is `0` or `T` is
14497    /// zero-sized. Note that if `T` is zero-sized this means you will
14498    /// *not* get a `RawVec` with the requested capacity.
14499    ///
14500    /// # Panics
14501    ///
14502    /// Panics if the requested capacity exceeds `isize::MAX` bytes.
14503    ///
14504    /// # Aborts
14505    ///
14506    /// Aborts on OOM.
14507    #[inline]
14508    pub fn with_capacity(capacity: usize) -> Self {
14509        Self::with_capacity_in(capacity, Global)
14510    }
14511
14512    /// Like `with_capacity`, but guarantees the buffer is zeroed.
14513    #[inline]
14514    pub fn with_capacity_zeroed(capacity: usize) -> Self {
14515        Self::with_capacity_zeroed_in(capacity, Global)
14516    }
14517
14518    /// Reconstitutes a `RawVec` from a pointer and capacity.
14519    ///
14520    /// # Safety
14521    ///
14522    /// The `ptr` must be allocated (on the system heap), and with the given `capacity`.
14523    /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit
14524    /// systems). ZST vectors may have a capacity up to `usize::MAX`.
14525    /// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed.
14526    #[inline]
14527    pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self {
14528        unsafe { Self::from_raw_parts_in(ptr, capacity, Global) }
14529    }
14530}
14531
14532impl<T, A: Allocator> RawVec<T, A> {
14533    // Tiny Vecs are dumb. Skip to:
14534    // - 8 if the element size is 1, because any heap allocators is likely
14535    //   to round up a request of less than 8 bytes to at least 8 bytes.
14536    // - 4 if elements are moderate-sized (<= 1 KiB).
14537    // - 1 otherwise, to avoid wasting too much space for very short Vecs.
14538    const MIN_NON_ZERO_CAP: usize = if mem::size_of::<T>() == 1 {
14539        8
14540    } else if mem::size_of::<T>() <= 1024 {
14541        4
14542    } else {
14543        1
14544    };
14545
14546    /// Like `new`, but parameterized over the choice of allocator for
14547    /// the returned `RawVec`.
14548    #[rustc_allow_const_fn_unstable(const_fn)]
14549    pub const fn new_in(alloc: A) -> Self {
14550        // `cap: 0` means "unallocated". zero-sized types are ignored.
14551        Self { ptr: Unique::dangling(), cap: 0, alloc }
14552    }
14553
14554    /// Like `with_capacity`, but parameterized over the choice of
14555    /// allocator for the returned `RawVec`.
14556    #[inline]
14557    pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
14558        Self::allocate_in(capacity, AllocInit::Uninitialized, alloc)
14559    }
14560
14561    /// Like `with_capacity_zeroed`, but parameterized over the choice
14562    /// of allocator for the returned `RawVec`.
14563    #[inline]
14564    pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
14565        Self::allocate_in(capacity, AllocInit::Zeroed, alloc)
14566    }
14567
14568    /// Converts a `Box<[T]>` into a `RawVec<T>`.
14569    pub fn from_box(slice: Box<[T], A>) -> Self {
14570        unsafe {
14571            let (slice, alloc) = Box::into_raw_with_allocator(slice);
14572            RawVec::from_raw_parts_in(slice.as_mut_ptr(), slice.len(), alloc)
14573        }
14574    }
14575
14576    /// Converts the entire buffer into `Box<[MaybeUninit<T>]>` with the specified `len`.
14577    ///
14578    /// Note that this will correctly reconstitute any `cap` changes
14579    /// that may have been performed. (See description of type for details.)
14580    ///
14581    /// # Safety
14582    ///
14583    /// * `len` must be greater than or equal to the most recently requested capacity, and
14584    /// * `len` must be less than or equal to `self.capacity()`.
14585    ///
14586    /// Note, that the requested capacity and `self.capacity()` could differ, as
14587    /// an allocator could overallocate and return a greater memory block than requested.
14588    pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit<T>], A> {
14589        // Sanity-check one half of the safety requirement (we cannot check the other half).
14590        debug_assert!(
14591            len <= self.capacity(),
14592            "`len` must be smaller than or equal to `self.capacity()`"
14593        );
14594
14595        let me = ManuallyDrop::new(self);
14596        unsafe {
14597            let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
14598            Box::from_raw_in(slice, ptr::read(&me.alloc))
14599        }
14600    }
14601
14602    fn allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Self {
14603        if mem::size_of::<T>() == 0 {
14604            Self::new_in(alloc)
14605        } else {
14606            // We avoid `unwrap_or_else` here because it bloats the amount of
14607            // LLVM IR generated.
14608            let layout = match Layout::array::<T>(capacity) {
14609                Ok(layout) => layout,
14610                Err(_) => capacity_overflow(),
14611            };
14612            match alloc_guard(layout.size()) {
14613                Ok(_) => {}
14614                Err(_) => capacity_overflow(),
14615            }
14616            let result = match init {
14617                AllocInit::Uninitialized => alloc.allocate(layout),
14618                AllocInit::Zeroed => alloc.allocate_zeroed(layout),
14619            };
14620            let ptr = match result {
14621                Ok(ptr) => ptr,
14622                Err(_) => handle_alloc_error(layout),
14623            };
14624
14625            Self {
14626                ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) },
14627                cap: Self::capacity_from_bytes(ptr.len()),
14628                alloc,
14629            }
14630        }
14631    }
14632
14633    /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator.
14634    ///
14635    /// # Safety
14636    ///
14637    /// The `ptr` must be allocated (via the given allocator `alloc`), and with the given
14638    /// `capacity`.
14639    /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit
14640    /// systems). ZST vectors may have a capacity up to `usize::MAX`.
14641    /// If the `ptr` and `capacity` come from a `RawVec` created via `alloc`, then this is
14642    /// guaranteed.
14643    #[inline]
14644    pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self {
14645        Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc }
14646    }
14647
14648    /// Gets a raw pointer to the start of the allocation. Note that this is
14649    /// `Unique::dangling()` if `capacity == 0` or `T` is zero-sized. In the former case, you must
14650    /// be careful.
14651    #[inline]
14652    pub fn ptr(&self) -> *mut T {
14653        self.ptr.as_ptr()
14654    }
14655
14656    /// Gets the capacity of the allocation.
14657    ///
14658    /// This will always be `usize::MAX` if `T` is zero-sized.
14659    #[inline(always)]
14660    pub fn capacity(&self) -> usize {
14661        if mem::size_of::<T>() == 0 { usize::MAX } else { self.cap }
14662    }
14663
14664    /// Returns a shared reference to the allocator backing this `RawVec`.
14665    pub fn allocator(&self) -> &A {
14666        &self.alloc
14667    }
14668
14669    fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> {
14670        if mem::size_of::<T>() == 0 || self.cap == 0 {
14671            None
14672        } else {
14673            // We have an allocated chunk of memory, so we can bypass runtime
14674            // checks to get our current layout.
14675            unsafe {
14676                let align = mem::align_of::<T>();
14677                let size = mem::size_of::<T>() * self.cap;
14678                let layout = Layout::from_size_align_unchecked(size, align);
14679                Some((self.ptr.cast().into(), layout))
14680            }
14681        }
14682    }
14683
14684    /// Ensures that the buffer contains at least enough space to hold `len +
14685    /// additional` elements. If it doesn't already have enough capacity, will
14686    /// reallocate enough space plus comfortable slack space to get amortized
14687    /// *O*(1) behavior. Will limit this behavior if it would needlessly cause
14688    /// itself to panic.
14689    ///
14690    /// If `len` exceeds `self.capacity()`, this may fail to actually allocate
14691    /// the requested space. This is not really unsafe, but the unsafe
14692    /// code *you* write that relies on the behavior of this function may break.
14693    ///
14694    /// This is ideal for implementing a bulk-push operation like `extend`.
14695    ///
14696    /// # Panics
14697    ///
14698    /// Panics if the new capacity exceeds `isize::MAX` bytes.
14699    ///
14700    /// # Aborts
14701    ///
14702    /// Aborts on OOM.
14703    ///
14704    /// # Examples
14705    ///
14706    /// ```
14707    /// # #![feature(raw_vec_internals)]
14708    /// # extern crate alloc;
14709    /// # use std::ptr;
14710    /// # use alloc::raw_vec::RawVec;
14711    /// struct MyVec<T> {
14712    ///     buf: RawVec<T>,
14713    ///     len: usize,
14714    /// }
14715    ///
14716    /// impl<T: Clone> MyVec<T> {
14717    ///     pub fn push_all(&mut self, elems: &[T]) {
14718    ///         self.buf.reserve(self.len, elems.len());
14719    ///         // reserve would have aborted or panicked if the len exceeded
14720    ///         // `isize::MAX` so this is safe to do unchecked now.
14721    ///         for x in elems {
14722    ///             unsafe {
14723    ///                 ptr::write(self.buf.ptr().add(self.len), x.clone());
14724    ///             }
14725    ///             self.len += 1;
14726    ///         }
14727    ///     }
14728    /// }
14729    /// # fn main() {
14730    /// #   let mut vector = MyVec { buf: RawVec::new(), len: 0 };
14731    /// #   vector.push_all(&[1, 3, 5, 7, 9]);
14732    /// # }
14733    /// ```
14734    #[inline]
14735    pub fn reserve(&mut self, len: usize, additional: usize) {
14736        // Callers expect this function to be very cheap when there is already sufficient capacity.
14737        // Therefore, we move all the resizing and error-handling logic from grow_amortized and
14738        // handle_reserve behind a call, while making sure that the this function is likely to be
14739        // inlined as just a comparison and a call if the comparison fails.
14740        #[cold]
14741        fn do_reserve_and_handle<T, A: Allocator>(
14742            slf: &mut RawVec<T, A>,
14743            len: usize,
14744            additional: usize,
14745        ) {
14746            handle_reserve(slf.grow_amortized(len, additional));
14747        }
14748
14749        if self.needs_to_grow(len, additional) {
14750            do_reserve_and_handle(self, len, additional);
14751        }
14752    }
14753
14754    /// The same as `reserve`, but returns on errors instead of panicking or aborting.
14755    pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
14756        if self.needs_to_grow(len, additional) {
14757            self.grow_amortized(len, additional)
14758        } else {
14759            Ok(())
14760        }
14761    }
14762
14763    /// Ensures that the buffer contains at least enough space to hold `len +
14764    /// additional` elements. If it doesn't already, will reallocate the
14765    /// minimum possible amount of memory necessary. Generally this will be
14766    /// exactly the amount of memory necessary, but in principle the allocator
14767    /// is free to give back more than we asked for.
14768    ///
14769    /// If `len` exceeds `self.capacity()`, this may fail to actually allocate
14770    /// the requested space. This is not really unsafe, but the unsafe code
14771    /// *you* write that relies on the behavior of this function may break.
14772    ///
14773    /// # Panics
14774    ///
14775    /// Panics if the new capacity exceeds `isize::MAX` bytes.
14776    ///
14777    /// # Aborts
14778    ///
14779    /// Aborts on OOM.
14780    pub fn reserve_exact(&mut self, len: usize, additional: usize) {
14781        handle_reserve(self.try_reserve_exact(len, additional));
14782    }
14783
14784    /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
14785    pub fn try_reserve_exact(
14786        &mut self,
14787        len: usize,
14788        additional: usize,
14789    ) -> Result<(), TryReserveError> {
14790        if self.needs_to_grow(len, additional) { self.grow_exact(len, additional) } else { Ok(()) }
14791    }
14792
14793    /// Shrinks the allocation down to the specified amount. If the given amount
14794    /// is 0, actually completely deallocates.
14795    ///
14796    /// # Panics
14797    ///
14798    /// Panics if the given amount is *larger* than the current capacity.
14799    ///
14800    /// # Aborts
14801    ///
14802    /// Aborts on OOM.
14803    pub fn shrink_to_fit(&mut self, amount: usize) {
14804        handle_reserve(self.shrink(amount));
14805    }
14806}
14807
14808impl<T, A: Allocator> RawVec<T, A> {
14809    /// Returns if the buffer needs to grow to fulfill the needed extra capacity.
14810    /// Mainly used to make inlining reserve-calls possible without inlining `grow`.
14811    fn needs_to_grow(&self, len: usize, additional: usize) -> bool {
14812        additional > self.capacity().wrapping_sub(len)
14813    }
14814
14815    fn capacity_from_bytes(excess: usize) -> usize {
14816        debug_assert_ne!(mem::size_of::<T>(), 0);
14817        excess / mem::size_of::<T>()
14818    }
14819
14820    fn set_ptr(&mut self, ptr: NonNull<[u8]>) {
14821        self.ptr = unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) };
14822        self.cap = Self::capacity_from_bytes(ptr.len());
14823    }
14824
14825    // This method is usually instantiated many times. So we want it to be as
14826    // small as possible, to improve compile times. But we also want as much of
14827    // its contents to be statically computable as possible, to make the
14828    // generated code run faster. Therefore, this method is carefully written
14829    // so that all of the code that depends on `T` is within it, while as much
14830    // of the code that doesn't depend on `T` as possible is in functions that
14831    // are non-generic over `T`.
14832    fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
14833        // This is ensured by the calling contexts.
14834        debug_assert!(additional > 0);
14835
14836        if mem::size_of::<T>() == 0 {
14837            // Since we return a capacity of `usize::MAX` when `elem_size` is
14838            // 0, getting to here necessarily means the `RawVec` is overfull.
14839            return Err(CapacityOverflow);
14840        }
14841
14842        // Nothing we can really do about these checks, sadly.
14843        let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
14844
14845        // This guarantees exponential growth. The doubling cannot overflow
14846        // because `cap <= isize::MAX` and the type of `cap` is `usize`.
14847        let cap = cmp::max(self.cap * 2, required_cap);
14848        let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap);
14849
14850        let new_layout = Layout::array::<T>(cap);
14851
14852        // `finish_grow` is non-generic over `T`.
14853        let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
14854        self.set_ptr(ptr);
14855        Ok(())
14856    }
14857
14858    // The constraints on this method are much the same as those on
14859    // `grow_amortized`, but this method is usually instantiated less often so
14860    // it's less critical.
14861    fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
14862        if mem::size_of::<T>() == 0 {
14863            // Since we return a capacity of `usize::MAX` when the type size is
14864            // 0, getting to here necessarily means the `RawVec` is overfull.
14865            return Err(CapacityOverflow);
14866        }
14867
14868        let cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
14869        let new_layout = Layout::array::<T>(cap);
14870
14871        // `finish_grow` is non-generic over `T`.
14872        let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
14873        self.set_ptr(ptr);
14874        Ok(())
14875    }
14876
14877    fn shrink(&mut self, amount: usize) -> Result<(), TryReserveError> {
14878        assert!(amount <= self.capacity(), "Tried to shrink to a larger capacity");
14879
14880        let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
14881        let new_size = amount * mem::size_of::<T>();
14882
14883        let ptr = unsafe {
14884            let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
14885            self.alloc.shrink(ptr, layout, new_layout).map_err(|_| TryReserveError::AllocError {
14886                layout: new_layout,
14887                non_exhaustive: (),
14888            })?
14889        };
14890        self.set_ptr(ptr);
14891        Ok(())
14892    }
14893}
14894
14895// This function is outside `RawVec` to minimize compile times. See the comment
14896// above `RawVec::grow_amortized` for details. (The `A` parameter isn't
14897// significant, because the number of different `A` types seen in practice is
14898// much smaller than the number of `T` types.)
14899#[inline(never)]
14900fn finish_grow<A>(
14901    new_layout: Result<Layout, LayoutError>,
14902    current_memory: Option<(NonNull<u8>, Layout)>,
14903    alloc: &mut A,
14904) -> Result<NonNull<[u8]>, TryReserveError>
14905where
14906    A: Allocator,
14907{
14908    // Check for the error here to minimize the size of `RawVec::grow_*`.
14909    let new_layout = new_layout.map_err(|_| CapacityOverflow)?;
14910
14911    alloc_guard(new_layout.size())?;
14912
14913    let memory = if let Some((ptr, old_layout)) = current_memory {
14914        debug_assert_eq!(old_layout.align(), new_layout.align());
14915        unsafe {
14916            // The allocator checks for alignment equality
14917            intrinsics::assume(old_layout.align() == new_layout.align());
14918            alloc.grow(ptr, old_layout, new_layout)
14919        }
14920    } else {
14921        alloc.allocate(new_layout)
14922    };
14923
14924    memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })
14925}
14926
14927unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> {
14928    /// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
14929    fn drop(&mut self) {
14930        if let Some((ptr, layout)) = self.current_memory() {
14931            unsafe { self.alloc.deallocate(ptr, layout) }
14932        }
14933    }
14934}
14935
14936// Central function for reserve error handling.
14937#[inline]
14938fn handle_reserve(result: Result<(), TryReserveError>) {
14939    match result {
14940        Err(CapacityOverflow) => capacity_overflow(),
14941        Err(AllocError { layout, .. }) => handle_alloc_error(layout),
14942        Ok(()) => { /* yay */ }
14943    }
14944}
14945
14946// We need to guarantee the following:
14947// * We don't ever allocate `> isize::MAX` byte-size objects.
14948// * We don't overflow `usize::MAX` and actually allocate too little.
14949//
14950// On 64-bit we just need to check for overflow since trying to allocate
14951// `> isize::MAX` bytes will surely fail. On 32-bit and 16-bit we need to add
14952// an extra guard for this in case we're running on a platform which can use
14953// all 4GB in user-space, e.g., PAE or x32.
14954
14955#[inline]
14956fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> {
14957    if usize::BITS < 64 && alloc_size > isize::MAX as usize {
14958        Err(CapacityOverflow)
14959    } else {
14960        Ok(())
14961    }
14962}
14963
14964// One central function responsible for reporting capacity overflows. This'll
14965// ensure that the code generation related to these panics is minimal as there's
14966// only one location which panics rather than a bunch throughout the module.
14967fn capacity_overflow() -> ! {
14968    panic!("capacity overflow");
14969}
14970//! Unicode string slices.
14971//!
14972//! *[See also the `str` primitive type](str).*
14973//!
14974//! The `&str` type is one of the two main string types, the other being `String`.
14975//! Unlike its `String` counterpart, its contents are borrowed.
14976//!
14977//! # Basic Usage
14978//!
14979//! A basic string declaration of `&str` type:
14980//!
14981//! ```
14982//! let hello_world = "Hello, World!";
14983//! ```
14984//!
14985//! Here we have declared a string literal, also known as a string slice.
14986//! String literals have a static lifetime, which means the string `hello_world`
14987//! is guaranteed to be valid for the duration of the entire program.
14988//! We can explicitly specify `hello_world`'s lifetime as well:
14989//!
14990//! ```
14991//! let hello_world: &'static str = "Hello, world!";
14992//! ```
14993
14994#![stable(feature = "rust1", since = "1.0.0")]
14995// Many of the usings in this module are only used in the test configuration.
14996// It's cleaner to just turn off the unused_imports warning than to fix them.
14997#![allow(unused_imports)]
14998
14999use core::borrow::{Borrow, BorrowMut};
15000use core::iter::FusedIterator;
15001use core::mem;
15002use core::ptr;
15003use core::str::pattern::{DoubleEndedSearcher, Pattern, ReverseSearcher, Searcher};
15004use core::unicode::conversions;
15005
15006use crate::borrow::ToOwned;
15007use crate::boxed::Box;
15008use crate::slice::{Concat, Join, SliceIndex};
15009use crate::string::String;
15010use crate::vec::Vec;
15011
15012#[stable(feature = "rust1", since = "1.0.0")]
15013pub use core::str::pattern;
15014#[stable(feature = "encode_utf16", since = "1.8.0")]
15015pub use core::str::EncodeUtf16;
15016#[stable(feature = "split_ascii_whitespace", since = "1.34.0")]
15017pub use core::str::SplitAsciiWhitespace;
15018#[stable(feature = "split_inclusive", since = "1.53.0")]
15019pub use core::str::SplitInclusive;
15020#[stable(feature = "rust1", since = "1.0.0")]
15021pub use core::str::SplitWhitespace;
15022#[stable(feature = "rust1", since = "1.0.0")]
15023pub use core::str::{from_utf8, from_utf8_mut, Bytes, CharIndices, Chars};
15024#[stable(feature = "rust1", since = "1.0.0")]
15025pub use core::str::{from_utf8_unchecked, from_utf8_unchecked_mut, ParseBoolError};
15026#[stable(feature = "str_escape", since = "1.34.0")]
15027pub use core::str::{EscapeDebug, EscapeDefault, EscapeUnicode};
15028#[stable(feature = "rust1", since = "1.0.0")]
15029pub use core::str::{FromStr, Utf8Error};
15030#[allow(deprecated)]
15031#[stable(feature = "rust1", since = "1.0.0")]
15032pub use core::str::{Lines, LinesAny};
15033#[stable(feature = "rust1", since = "1.0.0")]
15034pub use core::str::{MatchIndices, RMatchIndices};
15035#[stable(feature = "rust1", since = "1.0.0")]
15036pub use core::str::{Matches, RMatches};
15037#[stable(feature = "rust1", since = "1.0.0")]
15038pub use core::str::{RSplit, Split};
15039#[stable(feature = "rust1", since = "1.0.0")]
15040pub use core::str::{RSplitN, SplitN};
15041#[stable(feature = "rust1", since = "1.0.0")]
15042pub use core::str::{RSplitTerminator, SplitTerminator};
15043
15044/// Note: `str` in `Concat<str>` is not meaningful here.
15045/// This type parameter of the trait only exists to enable another impl.
15046#[unstable(feature = "slice_concat_ext", issue = "27747")]
15047impl<S: Borrow<str>> Concat<str> for [S] {
15048    type Output = String;
15049
15050    fn concat(slice: &Self) -> String {
15051        Join::join(slice, "")
15052    }
15053}
15054
15055#[unstable(feature = "slice_concat_ext", issue = "27747")]
15056impl<S: Borrow<str>> Join<&str> for [S] {
15057    type Output = String;
15058
15059    fn join(slice: &Self, sep: &str) -> String {
15060        unsafe { String::from_utf8_unchecked(join_generic_copy(slice, sep.as_bytes())) }
15061    }
15062}
15063
15064macro_rules! specialize_for_lengths {
15065    ($separator:expr, $target:expr, $iter:expr; $($num:expr),*) => {{
15066        let mut target = $target;
15067        let iter = $iter;
15068        let sep_bytes = $separator;
15069        match $separator.len() {
15070            $(
15071                // loops with hardcoded sizes run much faster
15072                // specialize the cases with small separator lengths
15073                $num => {
15074                    for s in iter {
15075                        copy_slice_and_advance!(target, sep_bytes);
15076                        let content_bytes = s.borrow().as_ref();
15077                        copy_slice_and_advance!(target, content_bytes);
15078                    }
15079                },
15080            )*
15081            _ => {
15082                // arbitrary non-zero size fallback
15083                for s in iter {
15084                    copy_slice_and_advance!(target, sep_bytes);
15085                    let content_bytes = s.borrow().as_ref();
15086                    copy_slice_and_advance!(target, content_bytes);
15087                }
15088            }
15089        }
15090        target
15091    }}
15092}
15093
15094macro_rules! copy_slice_and_advance {
15095    ($target:expr, $bytes:expr) => {
15096        let len = $bytes.len();
15097        let (head, tail) = { $target }.split_at_mut(len);
15098        head.copy_from_slice($bytes);
15099        $target = tail;
15100    };
15101}
15102
15103// Optimized join implementation that works for both Vec<T> (T: Copy) and String's inner vec
15104// Currently (2018-05-13) there is a bug with type inference and specialization (see issue #36262)
15105// For this reason SliceConcat<T> is not specialized for T: Copy and SliceConcat<str> is the
15106// only user of this function. It is left in place for the time when that is fixed.
15107//
15108// the bounds for String-join are S: Borrow<str> and for Vec-join Borrow<[T]>
15109// [T] and str both impl AsRef<[T]> for some T
15110// => s.borrow().as_ref() and we always have slices
15111fn join_generic_copy<B, T, S>(slice: &[S], sep: &[T]) -> Vec<T>
15112where
15113    T: Copy,
15114    B: AsRef<[T]> + ?Sized,
15115    S: Borrow<B>,
15116{
15117    let sep_len = sep.len();
15118    let mut iter = slice.iter();
15119
15120    // the first slice is the only one without a separator preceding it
15121    let first = match iter.next() {
15122        Some(first) => first,
15123        None => return vec![],
15124    };
15125
15126    // compute the exact total length of the joined Vec
15127    // if the `len` calculation overflows, we'll panic
15128    // we would have run out of memory anyway and the rest of the function requires
15129    // the entire Vec pre-allocated for safety
15130    let reserved_len = sep_len
15131        .checked_mul(iter.len())
15132        .and_then(|n| {
15133            slice.iter().map(|s| s.borrow().as_ref().len()).try_fold(n, usize::checked_add)
15134        })
15135        .expect("attempt to join into collection with len > usize::MAX");
15136
15137    // prepare an uninitialized buffer
15138    let mut result = Vec::with_capacity(reserved_len);
15139    debug_assert!(result.capacity() >= reserved_len);
15140
15141    result.extend_from_slice(first.borrow().as_ref());
15142
15143    unsafe {
15144        let pos = result.len();
15145        let target = result.get_unchecked_mut(pos..reserved_len);
15146
15147        // copy separator and slices over without bounds checks
15148        // generate loops with hardcoded offsets for small separators
15149        // massive improvements possible (~ x2)
15150        let remain = specialize_for_lengths!(sep, target, iter; 0, 1, 2, 3, 4);
15151
15152        // A weird borrow implementation may return different
15153        // slices for the length calculation and the actual copy.
15154        // Make sure we don't expose uninitialized bytes to the caller.
15155        let result_len = reserved_len - remain.len();
15156        result.set_len(result_len);
15157    }
15158    result
15159}
15160
15161#[stable(feature = "rust1", since = "1.0.0")]
15162impl Borrow<str> for String {
15163    #[inline]
15164    fn borrow(&self) -> &str {
15165        &self[..]
15166    }
15167}
15168
15169#[stable(feature = "string_borrow_mut", since = "1.36.0")]
15170impl BorrowMut<str> for String {
15171    #[inline]
15172    fn borrow_mut(&mut self) -> &mut str {
15173        &mut self[..]
15174    }
15175}
15176
15177#[stable(feature = "rust1", since = "1.0.0")]
15178impl ToOwned for str {
15179    type Owned = String;
15180    #[inline]
15181    fn to_owned(&self) -> String {
15182        unsafe { String::from_utf8_unchecked(self.as_bytes().to_owned()) }
15183    }
15184
15185    fn clone_into(&self, target: &mut String) {
15186        let mut b = mem::take(target).into_bytes();
15187        self.as_bytes().clone_into(&mut b);
15188        *target = unsafe { String::from_utf8_unchecked(b) }
15189    }
15190}
15191
15192/// Methods for string slices.
15193#[lang = "str_alloc"]
15194#[cfg(not(test))]
15195impl str {
15196    /// Converts a `Box<str>` into a `Box<[u8]>` without copying or allocating.
15197    ///
15198    /// # Examples
15199    ///
15200    /// Basic usage:
15201    ///
15202    /// ```
15203    /// let s = "this is a string";
15204    /// let boxed_str = s.to_owned().into_boxed_str();
15205    /// let boxed_bytes = boxed_str.into_boxed_bytes();
15206    /// assert_eq!(*boxed_bytes, *s.as_bytes());
15207    /// ```
15208    #[stable(feature = "str_box_extras", since = "1.20.0")]
15209    #[inline]
15210    pub fn into_boxed_bytes(self: Box<str>) -> Box<[u8]> {
15211        self.into()
15212    }
15213
15214    /// Replaces all matches of a pattern with another string.
15215    ///
15216    /// `replace` creates a new [`String`], and copies the data from this string slice into it.
15217    /// While doing so, it attempts to find matches of a pattern. If it finds any, it
15218    /// replaces them with the replacement string slice.
15219    ///
15220    /// # Examples
15221    ///
15222    /// Basic usage:
15223    ///
15224    /// ```
15225    /// let s = "this is old";
15226    ///
15227    /// assert_eq!("this is new", s.replace("old", "new"));
15228    /// ```
15229    ///
15230    /// When the pattern doesn't match:
15231    ///
15232    /// ```
15233    /// let s = "this is old";
15234    /// assert_eq!(s, s.replace("cookie monster", "little lamb"));
15235    /// ```
15236    #[must_use = "this returns the replaced string as a new allocation, \
15237                  without modifying the original"]
15238    #[stable(feature = "rust1", since = "1.0.0")]
15239    #[inline]
15240    pub fn replace<'a, P: Pattern<'a>>(&'a self, from: P, to: &str) -> String {
15241        let mut result = String::new();
15242        let mut last_end = 0;
15243        for (start, part) in self.match_indices(from) {
15244            result.push_str(unsafe { self.get_unchecked(last_end..start) });
15245            result.push_str(to);
15246            last_end = start + part.len();
15247        }
15248        result.push_str(unsafe { self.get_unchecked(last_end..self.len()) });
15249        result
15250    }
15251
15252    /// Replaces first N matches of a pattern with another string.
15253    ///
15254    /// `replacen` creates a new [`String`], and copies the data from this string slice into it.
15255    /// While doing so, it attempts to find matches of a pattern. If it finds any, it
15256    /// replaces them with the replacement string slice at most `count` times.
15257    ///
15258    /// # Examples
15259    ///
15260    /// Basic usage:
15261    ///
15262    /// ```
15263    /// let s = "foo foo 123 foo";
15264    /// assert_eq!("new new 123 foo", s.replacen("foo", "new", 2));
15265    /// assert_eq!("faa fao 123 foo", s.replacen('o', "a", 3));
15266    /// assert_eq!("foo foo new23 foo", s.replacen(char::is_numeric, "new", 1));
15267    /// ```
15268    ///
15269    /// When the pattern doesn't match:
15270    ///
15271    /// ```
15272    /// let s = "this is old";
15273    /// assert_eq!(s, s.replacen("cookie monster", "little lamb", 10));
15274    /// ```
15275    #[must_use = "this returns the replaced string as a new allocation, \
15276                  without modifying the original"]
15277    #[stable(feature = "str_replacen", since = "1.16.0")]
15278    pub fn replacen<'a, P: Pattern<'a>>(&'a self, pat: P, to: &str, count: usize) -> String {
15279        // Hope to reduce the times of re-allocation
15280        let mut result = String::with_capacity(32);
15281        let mut last_end = 0;
15282        for (start, part) in self.match_indices(pat).take(count) {
15283            result.push_str(unsafe { self.get_unchecked(last_end..start) });
15284            result.push_str(to);
15285            last_end = start + part.len();
15286        }
15287        result.push_str(unsafe { self.get_unchecked(last_end..self.len()) });
15288        result
15289    }
15290
15291    /// Returns the lowercase equivalent of this string slice, as a new [`String`].
15292    ///
15293    /// 'Lowercase' is defined according to the terms of the Unicode Derived Core Property
15294    /// `Lowercase`.
15295    ///
15296    /// Since some characters can expand into multiple characters when changing
15297    /// the case, this function returns a [`String`] instead of modifying the
15298    /// parameter in-place.
15299    ///
15300    /// # Examples
15301    ///
15302    /// Basic usage:
15303    ///
15304    /// ```
15305    /// let s = "HELLO";
15306    ///
15307    /// assert_eq!("hello", s.to_lowercase());
15308    /// ```
15309    ///
15310    /// A tricky example, with sigma:
15311    ///
15312    /// ```
15313    /// let sigma = "Σ";
15314    ///
15315    /// assert_eq!("σ", sigma.to_lowercase());
15316    ///
15317    /// // but at the end of a word, it's ς, not σ:
15318    /// let odysseus = "ὈΔΥΣΣΕΎΣ";
15319    ///
15320    /// assert_eq!("ὀδυσσεύς", odysseus.to_lowercase());
15321    /// ```
15322    ///
15323    /// Languages without case are not changed:
15324    ///
15325    /// ```
15326    /// let new_year = "农历新年";
15327    ///
15328    /// assert_eq!(new_year, new_year.to_lowercase());
15329    /// ```
15330    #[stable(feature = "unicode_case_mapping", since = "1.2.0")]
15331    pub fn to_lowercase(&self) -> String {
15332        let mut s = String::with_capacity(self.len());
15333        for (i, c) in self[..].char_indices() {
15334            if c == 'Σ' {
15335                // Σ maps to σ, except at the end of a word where it maps to ς.
15336                // This is the only conditional (contextual) but language-independent mapping
15337                // in `SpecialCasing.txt`,
15338                // so hard-code it rather than have a generic "condition" mechanism.
15339                // See https://github.com/rust-lang/rust/issues/26035
15340                map_uppercase_sigma(self, i, &mut s)
15341            } else {
15342                match conversions::to_lower(c) {
15343                    [a, '\0', _] => s.push(a),
15344                    [a, b, '\0'] => {
15345                        s.push(a);
15346                        s.push(b);
15347                    }
15348                    [a, b, c] => {
15349                        s.push(a);
15350                        s.push(b);
15351                        s.push(c);
15352                    }
15353                }
15354            }
15355        }
15356        return s;
15357
15358        fn map_uppercase_sigma(from: &str, i: usize, to: &mut String) {
15359            // See http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
15360            // for the definition of `Final_Sigma`.
15361            debug_assert!('Σ'.len_utf8() == 2);
15362            let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev())
15363                && !case_ignoreable_then_cased(from[i + 2..].chars());
15364            to.push_str(if is_word_final { "ς" } else { "σ" });
15365        }
15366
15367        fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool {
15368            use core::unicode::{Case_Ignorable, Cased};
15369            match iter.skip_while(|&c| Case_Ignorable(c)).next() {
15370                Some(c) => Cased(c),
15371                None => false,
15372            }
15373        }
15374    }
15375
15376    /// Returns the uppercase equivalent of this string slice, as a new [`String`].
15377    ///
15378    /// 'Uppercase' is defined according to the terms of the Unicode Derived Core Property
15379    /// `Uppercase`.
15380    ///
15381    /// Since some characters can expand into multiple characters when changing
15382    /// the case, this function returns a [`String`] instead of modifying the
15383    /// parameter in-place.
15384    ///
15385    /// # Examples
15386    ///
15387    /// Basic usage:
15388    ///
15389    /// ```
15390    /// let s = "hello";
15391    ///
15392    /// assert_eq!("HELLO", s.to_uppercase());
15393    /// ```
15394    ///
15395    /// Scripts without case are not changed:
15396    ///
15397    /// ```
15398    /// let new_year = "农历新年";
15399    ///
15400    /// assert_eq!(new_year, new_year.to_uppercase());
15401    /// ```
15402    ///
15403    /// One character can become multiple:
15404    /// ```
15405    /// let s = "tschüß";
15406    ///
15407    /// assert_eq!("TSCHÜSS", s.to_uppercase());
15408    /// ```
15409    #[stable(feature = "unicode_case_mapping", since = "1.2.0")]
15410    pub fn to_uppercase(&self) -> String {
15411        let mut s = String::with_capacity(self.len());
15412        for c in self[..].chars() {
15413            match conversions::to_upper(c) {
15414                [a, '\0', _] => s.push(a),
15415                [a, b, '\0'] => {
15416                    s.push(a);
15417                    s.push(b);
15418                }
15419                [a, b, c] => {
15420                    s.push(a);
15421                    s.push(b);
15422                    s.push(c);
15423                }
15424            }
15425        }
15426        s
15427    }
15428
15429    /// Converts a [`Box<str>`] into a [`String`] without copying or allocating.
15430    ///
15431    /// # Examples
15432    ///
15433    /// Basic usage:
15434    ///
15435    /// ```
15436    /// let string = String::from("birthday gift");
15437    /// let boxed_str = string.clone().into_boxed_str();
15438    ///
15439    /// assert_eq!(boxed_str.into_string(), string);
15440    /// ```
15441    #[stable(feature = "box_str", since = "1.4.0")]
15442    #[inline]
15443    pub fn into_string(self: Box<str>) -> String {
15444        let slice = Box::<[u8]>::from(self);
15445        unsafe { String::from_utf8_unchecked(slice.into_vec()) }
15446    }
15447
15448    /// Creates a new [`String`] by repeating a string `n` times.
15449    ///
15450    /// # Panics
15451    ///
15452    /// This function will panic if the capacity would overflow.
15453    ///
15454    /// # Examples
15455    ///
15456    /// Basic usage:
15457    ///
15458    /// ```
15459    /// assert_eq!("abc".repeat(4), String::from("abcabcabcabc"));
15460    /// ```
15461    ///
15462    /// A panic upon overflow:
15463    ///
15464    /// ```should_panic
15465    /// // this will panic at runtime
15466    /// "0123456789abcdef".repeat(usize::MAX);
15467    /// ```
15468    #[stable(feature = "repeat_str", since = "1.16.0")]
15469    pub fn repeat(&self, n: usize) -> String {
15470        unsafe { String::from_utf8_unchecked(self.as_bytes().repeat(n)) }
15471    }
15472
15473    /// Returns a copy of this string where each character is mapped to its
15474    /// ASCII upper case equivalent.
15475    ///
15476    /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
15477    /// but non-ASCII letters are unchanged.
15478    ///
15479    /// To uppercase the value in-place, use [`make_ascii_uppercase`].
15480    ///
15481    /// To uppercase ASCII characters in addition to non-ASCII characters, use
15482    /// [`to_uppercase`].
15483    ///
15484    /// # Examples
15485    ///
15486    /// ```
15487    /// let s = "Grüße, Jürgen ❤";
15488    ///
15489    /// assert_eq!("GRüßE, JüRGEN ❤", s.to_ascii_uppercase());
15490    /// ```
15491    ///
15492    /// [`make_ascii_uppercase`]: str::make_ascii_uppercase
15493    /// [`to_uppercase`]: #method.to_uppercase
15494    #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
15495    #[inline]
15496    pub fn to_ascii_uppercase(&self) -> String {
15497        let mut bytes = self.as_bytes().to_vec();
15498        bytes.make_ascii_uppercase();
15499        // make_ascii_uppercase() preserves the UTF-8 invariant.
15500        unsafe { String::from_utf8_unchecked(bytes) }
15501    }
15502
15503    /// Returns a copy of this string where each character is mapped to its
15504    /// ASCII lower case equivalent.
15505    ///
15506    /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
15507    /// but non-ASCII letters are unchanged.
15508    ///
15509    /// To lowercase the value in-place, use [`make_ascii_lowercase`].
15510    ///
15511    /// To lowercase ASCII characters in addition to non-ASCII characters, use
15512    /// [`to_lowercase`].
15513    ///
15514    /// # Examples
15515    ///
15516    /// ```
15517    /// let s = "Grüße, Jürgen ❤";
15518    ///
15519    /// assert_eq!("grüße, jürgen ❤", s.to_ascii_lowercase());
15520    /// ```
15521    ///
15522    /// [`make_ascii_lowercase`]: str::make_ascii_lowercase
15523    /// [`to_lowercase`]: #method.to_lowercase
15524    #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
15525    #[inline]
15526    pub fn to_ascii_lowercase(&self) -> String {
15527        let mut bytes = self.as_bytes().to_vec();
15528        bytes.make_ascii_lowercase();
15529        // make_ascii_lowercase() preserves the UTF-8 invariant.
15530        unsafe { String::from_utf8_unchecked(bytes) }
15531    }
15532}
15533
15534/// Converts a boxed slice of bytes to a boxed string slice without checking
15535/// that the string contains valid UTF-8.
15536///
15537/// # Examples
15538///
15539/// Basic usage:
15540///
15541/// ```
15542/// let smile_utf8 = Box::new([226, 152, 186]);
15543/// let smile = unsafe { std::str::from_boxed_utf8_unchecked(smile_utf8) };
15544///
15545/// assert_eq!("☺", &*smile);
15546/// ```
15547#[stable(feature = "str_box_extras", since = "1.20.0")]
15548#[inline]
15549pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
15550    unsafe { Box::from_raw(Box::into_raw(v) as *mut str) }
15551}
15552use super::*;
15553
15554use std::boxed::Box;
15555use std::clone::Clone;
15556use std::convert::{From, TryInto};
15557use std::mem::drop;
15558use std::ops::Drop;
15559use std::option::Option::{self, None, Some};
15560use std::sync::atomic::{
15561    self,
15562    Ordering::{Acquire, SeqCst},
15563};
15564use std::sync::mpsc::channel;
15565use std::sync::Mutex;
15566use std::thread;
15567
15568use crate::vec::Vec;
15569
15570struct Canary(*mut atomic::AtomicUsize);
15571
15572impl Drop for Canary {
15573    fn drop(&mut self) {
15574        unsafe {
15575            match *self {
15576                Canary(c) => {
15577                    (*c).fetch_add(1, SeqCst);
15578                }
15579            }
15580        }
15581    }
15582}
15583
15584#[test]
15585#[cfg_attr(target_os = "emscripten", ignore)]
15586fn manually_share_arc() {
15587    let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
15588    let arc_v = Arc::new(v);
15589
15590    let (tx, rx) = channel();
15591
15592    let _t = thread::spawn(move || {
15593        let arc_v: Arc<Vec<i32>> = rx.recv().unwrap();
15594        assert_eq!((*arc_v)[3], 4);
15595    });
15596
15597    tx.send(arc_v.clone()).unwrap();
15598
15599    assert_eq!((*arc_v)[2], 3);
15600    assert_eq!((*arc_v)[4], 5);
15601}
15602
15603#[test]
15604fn test_arc_get_mut() {
15605    let mut x = Arc::new(3);
15606    *Arc::get_mut(&mut x).unwrap() = 4;
15607    assert_eq!(*x, 4);
15608    let y = x.clone();
15609    assert!(Arc::get_mut(&mut x).is_none());
15610    drop(y);
15611    assert!(Arc::get_mut(&mut x).is_some());
15612    let _w = Arc::downgrade(&x);
15613    assert!(Arc::get_mut(&mut x).is_none());
15614}
15615
15616#[test]
15617fn weak_counts() {
15618    assert_eq!(Weak::weak_count(&Weak::<u64>::new()), 0);
15619    assert_eq!(Weak::strong_count(&Weak::<u64>::new()), 0);
15620
15621    let a = Arc::new(0);
15622    let w = Arc::downgrade(&a);
15623    assert_eq!(Weak::strong_count(&w), 1);
15624    assert_eq!(Weak::weak_count(&w), 1);
15625    let w2 = w.clone();
15626    assert_eq!(Weak::strong_count(&w), 1);
15627    assert_eq!(Weak::weak_count(&w), 2);
15628    assert_eq!(Weak::strong_count(&w2), 1);
15629    assert_eq!(Weak::weak_count(&w2), 2);
15630    drop(w);
15631    assert_eq!(Weak::strong_count(&w2), 1);
15632    assert_eq!(Weak::weak_count(&w2), 1);
15633    let a2 = a.clone();
15634    assert_eq!(Weak::strong_count(&w2), 2);
15635    assert_eq!(Weak::weak_count(&w2), 1);
15636    drop(a2);
15637    drop(a);
15638    assert_eq!(Weak::strong_count(&w2), 0);
15639    assert_eq!(Weak::weak_count(&w2), 0);
15640    drop(w2);
15641}
15642
15643#[test]
15644fn try_unwrap() {
15645    let x = Arc::new(3);
15646    assert_eq!(Arc::try_unwrap(x), Ok(3));
15647    let x = Arc::new(4);
15648    let _y = x.clone();
15649    assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4)));
15650    let x = Arc::new(5);
15651    let _w = Arc::downgrade(&x);
15652    assert_eq!(Arc::try_unwrap(x), Ok(5));
15653}
15654
15655#[test]
15656fn into_from_raw() {
15657    let x = Arc::new(box "hello");
15658    let y = x.clone();
15659
15660    let x_ptr = Arc::into_raw(x);
15661    drop(y);
15662    unsafe {
15663        assert_eq!(**x_ptr, "hello");
15664
15665        let x = Arc::from_raw(x_ptr);
15666        assert_eq!(**x, "hello");
15667
15668        assert_eq!(Arc::try_unwrap(x).map(|x| *x), Ok("hello"));
15669    }
15670}
15671
15672#[test]
15673fn test_into_from_raw_unsized() {
15674    use std::fmt::Display;
15675    use std::string::ToString;
15676
15677    let arc: Arc<str> = Arc::from("foo");
15678
15679    let ptr = Arc::into_raw(arc.clone());
15680    let arc2 = unsafe { Arc::from_raw(ptr) };
15681
15682    assert_eq!(unsafe { &*ptr }, "foo");
15683    assert_eq!(arc, arc2);
15684
15685    let arc: Arc<dyn Display> = Arc::new(123);
15686
15687    let ptr = Arc::into_raw(arc.clone());
15688    let arc2 = unsafe { Arc::from_raw(ptr) };
15689
15690    assert_eq!(unsafe { &*ptr }.to_string(), "123");
15691    assert_eq!(arc2.to_string(), "123");
15692}
15693
15694#[test]
15695fn into_from_weak_raw() {
15696    let x = Arc::new(box "hello");
15697    let y = Arc::downgrade(&x);
15698
15699    let y_ptr = Weak::into_raw(y);
15700    unsafe {
15701        assert_eq!(**y_ptr, "hello");
15702
15703        let y = Weak::from_raw(y_ptr);
15704        let y_up = Weak::upgrade(&y).unwrap();
15705        assert_eq!(**y_up, "hello");
15706        drop(y_up);
15707
15708        assert_eq!(Arc::try_unwrap(x).map(|x| *x), Ok("hello"));
15709    }
15710}
15711
15712#[test]
15713fn test_into_from_weak_raw_unsized() {
15714    use std::fmt::Display;
15715    use std::string::ToString;
15716
15717    let arc: Arc<str> = Arc::from("foo");
15718    let weak: Weak<str> = Arc::downgrade(&arc);
15719
15720    let ptr = Weak::into_raw(weak.clone());
15721    let weak2 = unsafe { Weak::from_raw(ptr) };
15722
15723    assert_eq!(unsafe { &*ptr }, "foo");
15724    assert!(weak.ptr_eq(&weak2));
15725
15726    let arc: Arc<dyn Display> = Arc::new(123);
15727    let weak: Weak<dyn Display> = Arc::downgrade(&arc);
15728
15729    let ptr = Weak::into_raw(weak.clone());
15730    let weak2 = unsafe { Weak::from_raw(ptr) };
15731
15732    assert_eq!(unsafe { &*ptr }.to_string(), "123");
15733    assert!(weak.ptr_eq(&weak2));
15734}
15735
15736#[test]
15737fn test_cowarc_clone_make_mut() {
15738    let mut cow0 = Arc::new(75);
15739    let mut cow1 = cow0.clone();
15740    let mut cow2 = cow1.clone();
15741
15742    assert!(75 == *Arc::make_mut(&mut cow0));
15743    assert!(75 == *Arc::make_mut(&mut cow1));
15744    assert!(75 == *Arc::make_mut(&mut cow2));
15745
15746    *Arc::make_mut(&mut cow0) += 1;
15747    *Arc::make_mut(&mut cow1) += 2;
15748    *Arc::make_mut(&mut cow2) += 3;
15749
15750    assert!(76 == *cow0);
15751    assert!(77 == *cow1);
15752    assert!(78 == *cow2);
15753
15754    // none should point to the same backing memory
15755    assert!(*cow0 != *cow1);
15756    assert!(*cow0 != *cow2);
15757    assert!(*cow1 != *cow2);
15758}
15759
15760#[test]
15761fn test_cowarc_clone_unique2() {
15762    let mut cow0 = Arc::new(75);
15763    let cow1 = cow0.clone();
15764    let cow2 = cow1.clone();
15765
15766    assert!(75 == *cow0);
15767    assert!(75 == *cow1);
15768    assert!(75 == *cow2);
15769
15770    *Arc::make_mut(&mut cow0) += 1;
15771    assert!(76 == *cow0);
15772    assert!(75 == *cow1);
15773    assert!(75 == *cow2);
15774
15775    // cow1 and cow2 should share the same contents
15776    // cow0 should have a unique reference
15777    assert!(*cow0 != *cow1);
15778    assert!(*cow0 != *cow2);
15779    assert!(*cow1 == *cow2);
15780}
15781
15782#[test]
15783fn test_cowarc_clone_weak() {
15784    let mut cow0 = Arc::new(75);
15785    let cow1_weak = Arc::downgrade(&cow0);
15786
15787    assert!(75 == *cow0);
15788    assert!(75 == *cow1_weak.upgrade().unwrap());
15789
15790    *Arc::make_mut(&mut cow0) += 1;
15791
15792    assert!(76 == *cow0);
15793    assert!(cow1_weak.upgrade().is_none());
15794}
15795
15796#[test]
15797fn test_live() {
15798    let x = Arc::new(5);
15799    let y = Arc::downgrade(&x);
15800    assert!(y.upgrade().is_some());
15801}
15802
15803#[test]
15804fn test_dead() {
15805    let x = Arc::new(5);
15806    let y = Arc::downgrade(&x);
15807    drop(x);
15808    assert!(y.upgrade().is_none());
15809}
15810
15811#[test]
15812fn weak_self_cyclic() {
15813    struct Cycle {
15814        x: Mutex<Option<Weak<Cycle>>>,
15815    }
15816
15817    let a = Arc::new(Cycle { x: Mutex::new(None) });
15818    let b = Arc::downgrade(&a.clone());
15819    *a.x.lock().unwrap() = Some(b);
15820
15821    // hopefully we don't double-free (or leak)...
15822}
15823
15824#[test]
15825fn drop_arc() {
15826    let mut canary = atomic::AtomicUsize::new(0);
15827    let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
15828    drop(x);
15829    assert!(canary.load(Acquire) == 1);
15830}
15831
15832#[test]
15833fn drop_arc_weak() {
15834    let mut canary = atomic::AtomicUsize::new(0);
15835    let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
15836    let arc_weak = Arc::downgrade(&arc);
15837    assert!(canary.load(Acquire) == 0);
15838    drop(arc);
15839    assert!(canary.load(Acquire) == 1);
15840    drop(arc_weak);
15841}
15842
15843#[test]
15844fn test_strong_count() {
15845    let a = Arc::new(0);
15846    assert!(Arc::strong_count(&a) == 1);
15847    let w = Arc::downgrade(&a);
15848    assert!(Arc::strong_count(&a) == 1);
15849    let b = w.upgrade().expect("");
15850    assert!(Arc::strong_count(&b) == 2);
15851    assert!(Arc::strong_count(&a) == 2);
15852    drop(w);
15853    drop(a);
15854    assert!(Arc::strong_count(&b) == 1);
15855    let c = b.clone();
15856    assert!(Arc::strong_count(&b) == 2);
15857    assert!(Arc::strong_count(&c) == 2);
15858}
15859
15860#[test]
15861fn test_weak_count() {
15862    let a = Arc::new(0);
15863    assert!(Arc::strong_count(&a) == 1);
15864    assert!(Arc::weak_count(&a) == 0);
15865    let w = Arc::downgrade(&a);
15866    assert!(Arc::strong_count(&a) == 1);
15867    assert!(Arc::weak_count(&a) == 1);
15868    let x = w.clone();
15869    assert!(Arc::weak_count(&a) == 2);
15870    drop(w);
15871    drop(x);
15872    assert!(Arc::strong_count(&a) == 1);
15873    assert!(Arc::weak_count(&a) == 0);
15874    let c = a.clone();
15875    assert!(Arc::strong_count(&a) == 2);
15876    assert!(Arc::weak_count(&a) == 0);
15877    let d = Arc::downgrade(&c);
15878    assert!(Arc::weak_count(&c) == 1);
15879    assert!(Arc::strong_count(&c) == 2);
15880
15881    drop(a);
15882    drop(c);
15883    drop(d);
15884}
15885
15886#[test]
15887fn show_arc() {
15888    let a = Arc::new(5);
15889    assert_eq!(format!("{:?}", a), "5");
15890}
15891
15892// Make sure deriving works with Arc<T>
15893#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
15894struct Foo {
15895    inner: Arc<i32>,
15896}
15897
15898#[test]
15899fn test_unsized() {
15900    let x: Arc<[i32]> = Arc::new([1, 2, 3]);
15901    assert_eq!(format!("{:?}", x), "[1, 2, 3]");
15902    let y = Arc::downgrade(&x.clone());
15903    drop(x);
15904    assert!(y.upgrade().is_none());
15905}
15906
15907#[test]
15908fn test_maybe_thin_unsized() {
15909    // If/when custom thin DSTs exist, this test should be updated to use one
15910    use std::ffi::{CStr, CString};
15911
15912    let x: Arc<CStr> = Arc::from(CString::new("swordfish").unwrap().into_boxed_c_str());
15913    assert_eq!(format!("{:?}", x), "\"swordfish\"");
15914    let y: Weak<CStr> = Arc::downgrade(&x);
15915    drop(x);
15916
15917    // At this point, the weak points to a dropped DST
15918    assert!(y.upgrade().is_none());
15919    // But we still need to be able to get the alloc layout to drop.
15920    // CStr has no drop glue, but custom DSTs might, and need to work.
15921    drop(y);
15922}
15923
15924#[test]
15925fn test_from_owned() {
15926    let foo = 123;
15927    let foo_arc = Arc::from(foo);
15928    assert!(123 == *foo_arc);
15929}
15930
15931#[test]
15932fn test_new_weak() {
15933    let foo: Weak<usize> = Weak::new();
15934    assert!(foo.upgrade().is_none());
15935}
15936
15937#[test]
15938fn test_ptr_eq() {
15939    let five = Arc::new(5);
15940    let same_five = five.clone();
15941    let other_five = Arc::new(5);
15942
15943    assert!(Arc::ptr_eq(&five, &same_five));
15944    assert!(!Arc::ptr_eq(&five, &other_five));
15945}
15946
15947#[test]
15948#[cfg_attr(target_os = "emscripten", ignore)]
15949fn test_weak_count_locked() {
15950    let mut a = Arc::new(atomic::AtomicBool::new(false));
15951    let a2 = a.clone();
15952    let t = thread::spawn(move || {
15953        // Miri is too slow
15954        let count = if cfg!(miri) { 1000 } else { 1000000 };
15955        for _i in 0..count {
15956            Arc::get_mut(&mut a);
15957        }
15958        a.store(true, SeqCst);
15959    });
15960
15961    while !a2.load(SeqCst) {
15962        let n = Arc::weak_count(&a2);
15963        assert!(n < 2, "bad weak count: {}", n);
15964        #[cfg(miri)] // Miri's scheduler does not guarantee liveness, and thus needs this hint.
15965        std::hint::spin_loop();
15966    }
15967    t.join().unwrap();
15968}
15969
15970#[test]
15971fn test_from_str() {
15972    let r: Arc<str> = Arc::from("foo");
15973
15974    assert_eq!(&r[..], "foo");
15975}
15976
15977#[test]
15978fn test_copy_from_slice() {
15979    let s: &[u32] = &[1, 2, 3];
15980    let r: Arc<[u32]> = Arc::from(s);
15981
15982    assert_eq!(&r[..], [1, 2, 3]);
15983}
15984
15985#[test]
15986fn test_clone_from_slice() {
15987    #[derive(Clone, Debug, Eq, PartialEq)]
15988    struct X(u32);
15989
15990    let s: &[X] = &[X(1), X(2), X(3)];
15991    let r: Arc<[X]> = Arc::from(s);
15992
15993    assert_eq!(&r[..], s);
15994}
15995
15996#[test]
15997#[should_panic]
15998fn test_clone_from_slice_panic() {
15999    use std::string::{String, ToString};
16000
16001    struct Fail(u32, String);
16002
16003    impl Clone for Fail {
16004        fn clone(&self) -> Fail {
16005            if self.0 == 2 {
16006                panic!();
16007            }
16008            Fail(self.0, self.1.clone())
16009        }
16010    }
16011
16012    let s: &[Fail] =
16013        &[Fail(0, "foo".to_string()), Fail(1, "bar".to_string()), Fail(2, "baz".to_string())];
16014
16015    // Should panic, but not cause memory corruption
16016    let _r: Arc<[Fail]> = Arc::from(s);
16017}
16018
16019#[test]
16020fn test_from_box() {
16021    let b: Box<u32> = box 123;
16022    let r: Arc<u32> = Arc::from(b);
16023
16024    assert_eq!(*r, 123);
16025}
16026
16027#[test]
16028fn test_from_box_str() {
16029    use std::string::String;
16030
16031    let s = String::from("foo").into_boxed_str();
16032    let r: Arc<str> = Arc::from(s);
16033
16034    assert_eq!(&r[..], "foo");
16035}
16036
16037#[test]
16038fn test_from_box_slice() {
16039    let s = vec![1, 2, 3].into_boxed_slice();
16040    let r: Arc<[u32]> = Arc::from(s);
16041
16042    assert_eq!(&r[..], [1, 2, 3]);
16043}
16044
16045#[test]
16046fn test_from_box_trait() {
16047    use std::fmt::Display;
16048    use std::string::ToString;
16049
16050    let b: Box<dyn Display> = box 123;
16051    let r: Arc<dyn Display> = Arc::from(b);
16052
16053    assert_eq!(r.to_string(), "123");
16054}
16055
16056#[test]
16057fn test_from_box_trait_zero_sized() {
16058    use std::fmt::Debug;
16059
16060    let b: Box<dyn Debug> = box ();
16061    let r: Arc<dyn Debug> = Arc::from(b);
16062
16063    assert_eq!(format!("{:?}", r), "()");
16064}
16065
16066#[test]
16067fn test_from_vec() {
16068    let v = vec![1, 2, 3];
16069    let r: Arc<[u32]> = Arc::from(v);
16070
16071    assert_eq!(&r[..], [1, 2, 3]);
16072}
16073
16074#[test]
16075fn test_downcast() {
16076    use std::any::Any;
16077
16078    let r1: Arc<dyn Any + Send + Sync> = Arc::new(i32::MAX);
16079    let r2: Arc<dyn Any + Send + Sync> = Arc::new("abc");
16080
16081    assert!(r1.clone().downcast::<u32>().is_err());
16082
16083    let r1i32 = r1.downcast::<i32>();
16084    assert!(r1i32.is_ok());
16085    assert_eq!(r1i32.unwrap(), Arc::new(i32::MAX));
16086
16087    assert!(r2.clone().downcast::<i32>().is_err());
16088
16089    let r2str = r2.downcast::<&'static str>();
16090    assert!(r2str.is_ok());
16091    assert_eq!(r2str.unwrap(), Arc::new("abc"));
16092}
16093
16094#[test]
16095fn test_array_from_slice() {
16096    let v = vec![1, 2, 3];
16097    let r: Arc<[u32]> = Arc::from(v);
16098
16099    let a: Result<Arc<[u32; 3]>, _> = r.clone().try_into();
16100    assert!(a.is_ok());
16101
16102    let a: Result<Arc<[u32; 2]>, _> = r.clone().try_into();
16103    assert!(a.is_err());
16104}
16105
16106#[test]
16107fn test_arc_cyclic_with_zero_refs() {
16108    struct ZeroRefs {
16109        inner: Weak<ZeroRefs>,
16110    }
16111    let zero_refs = Arc::new_cyclic(|inner| {
16112        assert_eq!(inner.strong_count(), 0);
16113        assert!(inner.upgrade().is_none());
16114        ZeroRefs { inner: Weak::new() }
16115    });
16116
16117    assert_eq!(Arc::strong_count(&zero_refs), 1);
16118    assert_eq!(Arc::weak_count(&zero_refs), 0);
16119    assert_eq!(zero_refs.inner.strong_count(), 0);
16120    assert_eq!(zero_refs.inner.weak_count(), 0);
16121}
16122
16123#[test]
16124fn test_arc_new_cyclic_one_ref() {
16125    struct OneRef {
16126        inner: Weak<OneRef>,
16127    }
16128    let one_ref = Arc::new_cyclic(|inner| {
16129        assert_eq!(inner.strong_count(), 0);
16130        assert!(inner.upgrade().is_none());
16131        OneRef { inner: inner.clone() }
16132    });
16133
16134    assert_eq!(Arc::strong_count(&one_ref), 1);
16135    assert_eq!(Arc::weak_count(&one_ref), 1);
16136
16137    let one_ref2 = Weak::upgrade(&one_ref.inner).unwrap();
16138    assert!(Arc::ptr_eq(&one_ref, &one_ref2));
16139
16140    assert_eq!(Arc::strong_count(&one_ref), 2);
16141    assert_eq!(Arc::weak_count(&one_ref), 1);
16142}
16143
16144#[test]
16145fn test_arc_cyclic_two_refs() {
16146    struct TwoRefs {
16147        inner1: Weak<TwoRefs>,
16148        inner2: Weak<TwoRefs>,
16149    }
16150    let two_refs = Arc::new_cyclic(|inner| {
16151        assert_eq!(inner.strong_count(), 0);
16152        assert!(inner.upgrade().is_none());
16153
16154        let inner1 = inner.clone();
16155        let inner2 = inner1.clone();
16156
16157        TwoRefs { inner1, inner2 }
16158    });
16159
16160    assert_eq!(Arc::strong_count(&two_refs), 1);
16161    assert_eq!(Arc::weak_count(&two_refs), 2);
16162
16163    let two_refs1 = Weak::upgrade(&two_refs.inner1).unwrap();
16164    assert!(Arc::ptr_eq(&two_refs, &two_refs1));
16165
16166    let two_refs2 = Weak::upgrade(&two_refs.inner2).unwrap();
16167    assert!(Arc::ptr_eq(&two_refs, &two_refs2));
16168
16169    assert_eq!(Arc::strong_count(&two_refs), 3);
16170    assert_eq!(Arc::weak_count(&two_refs), 2);
16171}
16172//! A UTF-8–encoded, growable string.
16173//!
16174//! This module contains the [`String`] type, the [`ToString`] trait for
16175//! converting to strings, and several error types that may result from
16176//! working with [`String`]s.
16177//!
16178//! # Examples
16179//!
16180//! There are multiple ways to create a new [`String`] from a string literal:
16181//!
16182//! ```
16183//! let s = "Hello".to_string();
16184//!
16185//! let s = String::from("world");
16186//! let s: String = "also this".into();
16187//! ```
16188//!
16189//! You can create a new [`String`] from an existing one by concatenating with
16190//! `+`:
16191//!
16192//! ```
16193//! let s = "Hello".to_string();
16194//!
16195//! let message = s + " world!";
16196//! ```
16197//!
16198//! If you have a vector of valid UTF-8 bytes, you can make a [`String`] out of
16199//! it. You can do the reverse too.
16200//!
16201//! ```
16202//! let sparkle_heart = vec![240, 159, 146, 150];
16203//!
16204//! // We know these bytes are valid, so we'll use `unwrap()`.
16205//! let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
16206//!
16207//! assert_eq!("�", sparkle_heart);
16208//!
16209//! let bytes = sparkle_heart.into_bytes();
16210//!
16211//! assert_eq!(bytes, [240, 159, 146, 150]);
16212//! ```
16213
16214#![stable(feature = "rust1", since = "1.0.0")]
16215
16216use core::char::{decode_utf16, REPLACEMENT_CHARACTER};
16217use core::fmt;
16218use core::hash;
16219use core::iter::{FromIterator, FusedIterator};
16220use core::ops::Bound::{Excluded, Included, Unbounded};
16221use core::ops::{self, Add, AddAssign, Index, IndexMut, Range, RangeBounds};
16222use core::ptr;
16223use core::slice;
16224use core::str::{lossy, pattern::Pattern};
16225
16226use crate::borrow::{Cow, ToOwned};
16227use crate::boxed::Box;
16228use crate::collections::TryReserveError;
16229use crate::str::{self, from_boxed_utf8_unchecked, Chars, FromStr, Utf8Error};
16230use crate::vec::Vec;
16231
16232/// A UTF-8–encoded, growable string.
16233///
16234/// The `String` type is the most common string type that has ownership over the
16235/// contents of the string. It has a close relationship with its borrowed
16236/// counterpart, the primitive [`str`].
16237///
16238/// # Examples
16239///
16240/// You can create a `String` from [a literal string][`str`] with [`String::from`]:
16241///
16242/// [`String::from`]: From::from
16243///
16244/// ```
16245/// let hello = String::from("Hello, world!");
16246/// ```
16247///
16248/// You can append a [`char`] to a `String` with the [`push`] method, and
16249/// append a [`&str`] with the [`push_str`] method:
16250///
16251/// ```
16252/// let mut hello = String::from("Hello, ");
16253///
16254/// hello.push('w');
16255/// hello.push_str("orld!");
16256/// ```
16257///
16258/// [`push`]: String::push
16259/// [`push_str`]: String::push_str
16260///
16261/// If you have a vector of UTF-8 bytes, you can create a `String` from it with
16262/// the [`from_utf8`] method:
16263///
16264/// ```
16265/// // some bytes, in a vector
16266/// let sparkle_heart = vec![240, 159, 146, 150];
16267///
16268/// // We know these bytes are valid, so we'll use `unwrap()`.
16269/// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
16270///
16271/// assert_eq!("�", sparkle_heart);
16272/// ```
16273///
16274/// [`from_utf8`]: String::from_utf8
16275///
16276/// # UTF-8
16277///
16278/// `String`s are always valid UTF-8. This has a few implications, the first of
16279/// which is that if you need a non-UTF-8 string, consider [`OsString`]. It is
16280/// similar, but without the UTF-8 constraint. The second implication is that
16281/// you cannot index into a `String`:
16282///
16283/// ```compile_fail,E0277
16284/// let s = "hello";
16285///
16286/// println!("The first letter of s is {}", s[0]); // ERROR!!!
16287/// ```
16288///
16289/// [`OsString`]: ../../std/ffi/struct.OsString.html
16290///
16291/// Indexing is intended to be a constant-time operation, but UTF-8 encoding
16292/// does not allow us to do this. Furthermore, it's not clear what sort of
16293/// thing the index should return: a byte, a codepoint, or a grapheme cluster.
16294/// The [`bytes`] and [`chars`] methods return iterators over the first
16295/// two, respectively.
16296///
16297/// [`bytes`]: str::bytes
16298/// [`chars`]: str::chars
16299///
16300/// # Deref
16301///
16302/// `String`s implement [`Deref`]`<Target=str>`, and so inherit all of [`str`]'s
16303/// methods. In addition, this means that you can pass a `String` to a
16304/// function which takes a [`&str`] by using an ampersand (`&`):
16305///
16306/// ```
16307/// fn takes_str(s: &str) { }
16308///
16309/// let s = String::from("Hello");
16310///
16311/// takes_str(&s);
16312/// ```
16313///
16314/// This will create a [`&str`] from the `String` and pass it in. This
16315/// conversion is very inexpensive, and so generally, functions will accept
16316/// [`&str`]s as arguments unless they need a `String` for some specific
16317/// reason.
16318///
16319/// In certain cases Rust doesn't have enough information to make this
16320/// conversion, known as [`Deref`] coercion. In the following example a string
16321/// slice [`&'a str`][`&str`] implements the trait `TraitExample`, and the function
16322/// `example_func` takes anything that implements the trait. In this case Rust
16323/// would need to make two implicit conversions, which Rust doesn't have the
16324/// means to do. For that reason, the following example will not compile.
16325///
16326/// ```compile_fail,E0277
16327/// trait TraitExample {}
16328///
16329/// impl<'a> TraitExample for &'a str {}
16330///
16331/// fn example_func<A: TraitExample>(example_arg: A) {}
16332///
16333/// let example_string = String::from("example_string");
16334/// example_func(&example_string);
16335/// ```
16336///
16337/// There are two options that would work instead. The first would be to
16338/// change the line `example_func(&example_string);` to
16339/// `example_func(example_string.as_str());`, using the method [`as_str()`]
16340/// to explicitly extract the string slice containing the string. The second
16341/// way changes `example_func(&example_string);` to
16342/// `example_func(&*example_string);`. In this case we are dereferencing a
16343/// `String` to a [`str`][`&str`], then referencing the [`str`][`&str`] back to
16344/// [`&str`]. The second way is more idiomatic, however both work to do the
16345/// conversion explicitly rather than relying on the implicit conversion.
16346///
16347/// # Representation
16348///
16349/// A `String` is made up of three components: a pointer to some bytes, a
16350/// length, and a capacity. The pointer points to an internal buffer `String`
16351/// uses to store its data. The length is the number of bytes currently stored
16352/// in the buffer, and the capacity is the size of the buffer in bytes. As such,
16353/// the length will always be less than or equal to the capacity.
16354///
16355/// This buffer is always stored on the heap.
16356///
16357/// You can look at these with the [`as_ptr`], [`len`], and [`capacity`]
16358/// methods:
16359///
16360/// ```
16361/// use std::mem;
16362///
16363/// let story = String::from("Once upon a time...");
16364///
16365// FIXME Update this when vec_into_raw_parts is stabilized
16366/// // Prevent automatically dropping the String's data
16367/// let mut story = mem::ManuallyDrop::new(story);
16368///
16369/// let ptr = story.as_mut_ptr();
16370/// let len = story.len();
16371/// let capacity = story.capacity();
16372///
16373/// // story has nineteen bytes
16374/// assert_eq!(19, len);
16375///
16376/// // We can re-build a String out of ptr, len, and capacity. This is all
16377/// // unsafe because we are responsible for making sure the components are
16378/// // valid:
16379/// let s = unsafe { String::from_raw_parts(ptr, len, capacity) } ;
16380///
16381/// assert_eq!(String::from("Once upon a time..."), s);
16382/// ```
16383///
16384/// [`as_ptr`]: str::as_ptr
16385/// [`len`]: String::len
16386/// [`capacity`]: String::capacity
16387///
16388/// If a `String` has enough capacity, adding elements to it will not
16389/// re-allocate. For example, consider this program:
16390///
16391/// ```
16392/// let mut s = String::new();
16393///
16394/// println!("{}", s.capacity());
16395///
16396/// for _ in 0..5 {
16397///     s.push_str("hello");
16398///     println!("{}", s.capacity());
16399/// }
16400/// ```
16401///
16402/// This will output the following:
16403///
16404/// ```text
16405/// 0
16406/// 5
16407/// 10
16408/// 20
16409/// 20
16410/// 40
16411/// ```
16412///
16413/// At first, we have no memory allocated at all, but as we append to the
16414/// string, it increases its capacity appropriately. If we instead use the
16415/// [`with_capacity`] method to allocate the correct capacity initially:
16416///
16417/// ```
16418/// let mut s = String::with_capacity(25);
16419///
16420/// println!("{}", s.capacity());
16421///
16422/// for _ in 0..5 {
16423///     s.push_str("hello");
16424///     println!("{}", s.capacity());
16425/// }
16426/// ```
16427///
16428/// [`with_capacity`]: String::with_capacity
16429///
16430/// We end up with a different output:
16431///
16432/// ```text
16433/// 25
16434/// 25
16435/// 25
16436/// 25
16437/// 25
16438/// 25
16439/// ```
16440///
16441/// Here, there's no need to allocate more memory inside the loop.
16442///
16443/// [`str`]: prim@str
16444/// [`&str`]: prim@str
16445/// [`Deref`]: core::ops::Deref
16446/// [`as_str()`]: String::as_str
16447#[derive(PartialOrd, Eq, Ord)]
16448#[cfg_attr(not(test), rustc_diagnostic_item = "string_type")]
16449#[stable(feature = "rust1", since = "1.0.0")]
16450pub struct String {
16451    vec: Vec<u8>,
16452}
16453
16454/// A possible error value when converting a `String` from a UTF-8 byte vector.
16455///
16456/// This type is the error type for the [`from_utf8`] method on [`String`]. It
16457/// is designed in such a way to carefully avoid reallocations: the
16458/// [`into_bytes`] method will give back the byte vector that was used in the
16459/// conversion attempt.
16460///
16461/// [`from_utf8`]: String::from_utf8
16462/// [`into_bytes`]: FromUtf8Error::into_bytes
16463///
16464/// The [`Utf8Error`] type provided by [`std::str`] represents an error that may
16465/// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's
16466/// an analogue to `FromUtf8Error`, and you can get one from a `FromUtf8Error`
16467/// through the [`utf8_error`] method.
16468///
16469/// [`Utf8Error`]: core::str::Utf8Error
16470/// [`std::str`]: core::str
16471/// [`&str`]: prim@str
16472/// [`utf8_error`]: Self::utf8_error
16473///
16474/// # Examples
16475///
16476/// Basic usage:
16477///
16478/// ```
16479/// // some invalid bytes, in a vector
16480/// let bytes = vec![0, 159];
16481///
16482/// let value = String::from_utf8(bytes);
16483///
16484/// assert!(value.is_err());
16485/// assert_eq!(vec![0, 159], value.unwrap_err().into_bytes());
16486/// ```
16487#[stable(feature = "rust1", since = "1.0.0")]
16488#[derive(Debug, Clone, PartialEq, Eq)]
16489pub struct FromUtf8Error {
16490    bytes: Vec<u8>,
16491    error: Utf8Error,
16492}
16493
16494/// A possible error value when converting a `String` from a UTF-16 byte slice.
16495///
16496/// This type is the error type for the [`from_utf16`] method on [`String`].
16497///
16498/// [`from_utf16`]: String::from_utf16
16499/// # Examples
16500///
16501/// Basic usage:
16502///
16503/// ```
16504/// // �mu<invalid>ic
16505/// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
16506///           0xD800, 0x0069, 0x0063];
16507///
16508/// assert!(String::from_utf16(v).is_err());
16509/// ```
16510#[stable(feature = "rust1", since = "1.0.0")]
16511#[derive(Debug)]
16512pub struct FromUtf16Error(());
16513
16514impl String {
16515    /// Creates a new empty `String`.
16516    ///
16517    /// Given that the `String` is empty, this will not allocate any initial
16518    /// buffer. While that means that this initial operation is very
16519    /// inexpensive, it may cause excessive allocation later when you add
16520    /// data. If you have an idea of how much data the `String` will hold,
16521    /// consider the [`with_capacity`] method to prevent excessive
16522    /// re-allocation.
16523    ///
16524    /// [`with_capacity`]: String::with_capacity
16525    ///
16526    /// # Examples
16527    ///
16528    /// Basic usage:
16529    ///
16530    /// ```
16531    /// let s = String::new();
16532    /// ```
16533    #[inline]
16534    #[rustc_const_stable(feature = "const_string_new", since = "1.39.0")]
16535    #[stable(feature = "rust1", since = "1.0.0")]
16536    pub const fn new() -> String {
16537        String { vec: Vec::new() }
16538    }
16539
16540    /// Creates a new empty `String` with a particular capacity.
16541    ///
16542    /// `String`s have an internal buffer to hold their data. The capacity is
16543    /// the length of that buffer, and can be queried with the [`capacity`]
16544    /// method. This method creates an empty `String`, but one with an initial
16545    /// buffer that can hold `capacity` bytes. This is useful when you may be
16546    /// appending a bunch of data to the `String`, reducing the number of
16547    /// reallocations it needs to do.
16548    ///
16549    /// [`capacity`]: String::capacity
16550    ///
16551    /// If the given capacity is `0`, no allocation will occur, and this method
16552    /// is identical to the [`new`] method.
16553    ///
16554    /// [`new`]: String::new
16555    ///
16556    /// # Examples
16557    ///
16558    /// Basic usage:
16559    ///
16560    /// ```
16561    /// let mut s = String::with_capacity(10);
16562    ///
16563    /// // The String contains no chars, even though it has capacity for more
16564    /// assert_eq!(s.len(), 0);
16565    ///
16566    /// // These are all done without reallocating...
16567    /// let cap = s.capacity();
16568    /// for _ in 0..10 {
16569    ///     s.push('a');
16570    /// }
16571    ///
16572    /// assert_eq!(s.capacity(), cap);
16573    ///
16574    /// // ...but this may make the string reallocate
16575    /// s.push('a');
16576    /// ```
16577    #[inline]
16578    #[doc(alias = "alloc")]
16579    #[doc(alias = "malloc")]
16580    #[stable(feature = "rust1", since = "1.0.0")]
16581    pub fn with_capacity(capacity: usize) -> String {
16582        String { vec: Vec::with_capacity(capacity) }
16583    }
16584
16585    // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
16586    // required for this method definition, is not available. Since we don't
16587    // require this method for testing purposes, I'll just stub it
16588    // NB see the slice::hack module in slice.rs for more information
16589    #[inline]
16590    #[cfg(test)]
16591    pub fn from_str(_: &str) -> String {
16592        panic!("not available with cfg(test)");
16593    }
16594
16595    /// Converts a vector of bytes to a `String`.
16596    ///
16597    /// A string ([`String`]) is made of bytes ([`u8`]), and a vector of bytes
16598    /// ([`Vec<u8>`]) is made of bytes, so this function converts between the
16599    /// two. Not all byte slices are valid `String`s, however: `String`
16600    /// requires that it is valid UTF-8. `from_utf8()` checks to ensure that
16601    /// the bytes are valid UTF-8, and then does the conversion.
16602    ///
16603    /// If you are sure that the byte slice is valid UTF-8, and you don't want
16604    /// to incur the overhead of the validity check, there is an unsafe version
16605    /// of this function, [`from_utf8_unchecked`], which has the same behavior
16606    /// but skips the check.
16607    ///
16608    /// This method will take care to not copy the vector, for efficiency's
16609    /// sake.
16610    ///
16611    /// If you need a [`&str`] instead of a `String`, consider
16612    /// [`str::from_utf8`].
16613    ///
16614    /// The inverse of this method is [`into_bytes`].
16615    ///
16616    /// # Errors
16617    ///
16618    /// Returns [`Err`] if the slice is not UTF-8 with a description as to why the
16619    /// provided bytes are not UTF-8. The vector you moved in is also included.
16620    ///
16621    /// # Examples
16622    ///
16623    /// Basic usage:
16624    ///
16625    /// ```
16626    /// // some bytes, in a vector
16627    /// let sparkle_heart = vec![240, 159, 146, 150];
16628    ///
16629    /// // We know these bytes are valid, so we'll use `unwrap()`.
16630    /// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
16631    ///
16632    /// assert_eq!("�", sparkle_heart);
16633    /// ```
16634    ///
16635    /// Incorrect bytes:
16636    ///
16637    /// ```
16638    /// // some invalid bytes, in a vector
16639    /// let sparkle_heart = vec![0, 159, 146, 150];
16640    ///
16641    /// assert!(String::from_utf8(sparkle_heart).is_err());
16642    /// ```
16643    ///
16644    /// See the docs for [`FromUtf8Error`] for more details on what you can do
16645    /// with this error.
16646    ///
16647    /// [`from_utf8_unchecked`]: String::from_utf8_unchecked
16648    /// [`Vec<u8>`]: crate::vec::Vec
16649    /// [`&str`]: prim@str
16650    /// [`into_bytes`]: String::into_bytes
16651    #[inline]
16652    #[stable(feature = "rust1", since = "1.0.0")]
16653    pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> {
16654        match str::from_utf8(&vec) {
16655            Ok(..) => Ok(String { vec }),
16656            Err(e) => Err(FromUtf8Error { bytes: vec, error: e }),
16657        }
16658    }
16659
16660    /// Converts a slice of bytes to a string, including invalid characters.
16661    ///
16662    /// Strings are made of bytes ([`u8`]), and a slice of bytes
16663    /// ([`&[u8]`][byteslice]) is made of bytes, so this function converts
16664    /// between the two. Not all byte slices are valid strings, however: strings
16665    /// are required to be valid UTF-8. During this conversion,
16666    /// `from_utf8_lossy()` will replace any invalid UTF-8 sequences with
16667    /// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD], which looks like this: �
16668    ///
16669    /// [byteslice]: prim@slice
16670    /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER
16671    ///
16672    /// If you are sure that the byte slice is valid UTF-8, and you don't want
16673    /// to incur the overhead of the conversion, there is an unsafe version
16674    /// of this function, [`from_utf8_unchecked`], which has the same behavior
16675    /// but skips the checks.
16676    ///
16677    /// [`from_utf8_unchecked`]: String::from_utf8_unchecked
16678    ///
16679    /// This function returns a [`Cow<'a, str>`]. If our byte slice is invalid
16680    /// UTF-8, then we need to insert the replacement characters, which will
16681    /// change the size of the string, and hence, require a `String`. But if
16682    /// it's already valid UTF-8, we don't need a new allocation. This return
16683    /// type allows us to handle both cases.
16684    ///
16685    /// [`Cow<'a, str>`]: crate::borrow::Cow
16686    ///
16687    /// # Examples
16688    ///
16689    /// Basic usage:
16690    ///
16691    /// ```
16692    /// // some bytes, in a vector
16693    /// let sparkle_heart = vec![240, 159, 146, 150];
16694    ///
16695    /// let sparkle_heart = String::from_utf8_lossy(&sparkle_heart);
16696    ///
16697    /// assert_eq!("�", sparkle_heart);
16698    /// ```
16699    ///
16700    /// Incorrect bytes:
16701    ///
16702    /// ```
16703    /// // some invalid bytes
16704    /// let input = b"Hello \xF0\x90\x80World";
16705    /// let output = String::from_utf8_lossy(input);
16706    ///
16707    /// assert_eq!("Hello �World", output);
16708    /// ```
16709    #[stable(feature = "rust1", since = "1.0.0")]
16710    pub fn from_utf8_lossy(v: &[u8]) -> Cow<'_, str> {
16711        let mut iter = lossy::Utf8Lossy::from_bytes(v).chunks();
16712
16713        let (first_valid, first_broken) = if let Some(chunk) = iter.next() {
16714            let lossy::Utf8LossyChunk { valid, broken } = chunk;
16715            if valid.len() == v.len() {
16716                debug_assert!(broken.is_empty());
16717                return Cow::Borrowed(valid);
16718            }
16719            (valid, broken)
16720        } else {
16721            return Cow::Borrowed("");
16722        };
16723
16724        const REPLACEMENT: &str = "\u{FFFD}";
16725
16726        let mut res = String::with_capacity(v.len());
16727        res.push_str(first_valid);
16728        if !first_broken.is_empty() {
16729            res.push_str(REPLACEMENT);
16730        }
16731
16732        for lossy::Utf8LossyChunk { valid, broken } in iter {
16733            res.push_str(valid);
16734            if !broken.is_empty() {
16735                res.push_str(REPLACEMENT);
16736            }
16737        }
16738
16739        Cow::Owned(res)
16740    }
16741
16742    /// Decode a UTF-16–encoded vector `v` into a `String`, returning [`Err`]
16743    /// if `v` contains any invalid data.
16744    ///
16745    /// # Examples
16746    ///
16747    /// Basic usage:
16748    ///
16749    /// ```
16750    /// // �music
16751    /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
16752    ///           0x0073, 0x0069, 0x0063];
16753    /// assert_eq!(String::from("�music"),
16754    ///            String::from_utf16(v).unwrap());
16755    ///
16756    /// // �mu<invalid>ic
16757    /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
16758    ///           0xD800, 0x0069, 0x0063];
16759    /// assert!(String::from_utf16(v).is_err());
16760    /// ```
16761    #[stable(feature = "rust1", since = "1.0.0")]
16762    pub fn from_utf16(v: &[u16]) -> Result<String, FromUtf16Error> {
16763        // This isn't done via collect::<Result<_, _>>() for performance reasons.
16764        // FIXME: the function can be simplified again when #48994 is closed.
16765        let mut ret = String::with_capacity(v.len());
16766        for c in decode_utf16(v.iter().cloned()) {
16767            if let Ok(c) = c {
16768                ret.push(c);
16769            } else {
16770                return Err(FromUtf16Error(()));
16771            }
16772        }
16773        Ok(ret)
16774    }
16775
16776    /// Decode a UTF-16–encoded slice `v` into a `String`, replacing
16777    /// invalid data with [the replacement character (`U+FFFD`)][U+FFFD].
16778    ///
16779    /// Unlike [`from_utf8_lossy`] which returns a [`Cow<'a, str>`],
16780    /// `from_utf16_lossy` returns a `String` since the UTF-16 to UTF-8
16781    /// conversion requires a memory allocation.
16782    ///
16783    /// [`from_utf8_lossy`]: String::from_utf8_lossy
16784    /// [`Cow<'a, str>`]: crate::borrow::Cow
16785    /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER
16786    ///
16787    /// # Examples
16788    ///
16789    /// Basic usage:
16790    ///
16791    /// ```
16792    /// // �mus<invalid>ic<invalid>
16793    /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
16794    ///           0x0073, 0xDD1E, 0x0069, 0x0063,
16795    ///           0xD834];
16796    ///
16797    /// assert_eq!(String::from("�mus\u{FFFD}ic\u{FFFD}"),
16798    ///            String::from_utf16_lossy(v));
16799    /// ```
16800    #[inline]
16801    #[stable(feature = "rust1", since = "1.0.0")]
16802    pub fn from_utf16_lossy(v: &[u16]) -> String {
16803        decode_utf16(v.iter().cloned()).map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)).collect()
16804    }
16805
16806    /// Decomposes a `String` into its raw components.
16807    ///
16808    /// Returns the raw pointer to the underlying data, the length of
16809    /// the string (in bytes), and the allocated capacity of the data
16810    /// (in bytes). These are the same arguments in the same order as
16811    /// the arguments to [`from_raw_parts`].
16812    ///
16813    /// After calling this function, the caller is responsible for the
16814    /// memory previously managed by the `String`. The only way to do
16815    /// this is to convert the raw pointer, length, and capacity back
16816    /// into a `String` with the [`from_raw_parts`] function, allowing
16817    /// the destructor to perform the cleanup.
16818    ///
16819    /// [`from_raw_parts`]: String::from_raw_parts
16820    ///
16821    /// # Examples
16822    ///
16823    /// ```
16824    /// #![feature(vec_into_raw_parts)]
16825    /// let s = String::from("hello");
16826    ///
16827    /// let (ptr, len, cap) = s.into_raw_parts();
16828    ///
16829    /// let rebuilt = unsafe { String::from_raw_parts(ptr, len, cap) };
16830    /// assert_eq!(rebuilt, "hello");
16831    /// ```
16832    #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
16833    pub fn into_raw_parts(self) -> (*mut u8, usize, usize) {
16834        self.vec.into_raw_parts()
16835    }
16836
16837    /// Creates a new `String` from a length, capacity, and pointer.
16838    ///
16839    /// # Safety
16840    ///
16841    /// This is highly unsafe, due to the number of invariants that aren't
16842    /// checked:
16843    ///
16844    /// * The memory at `buf` needs to have been previously allocated by the
16845    ///   same allocator the standard library uses, with a required alignment of exactly 1.
16846    /// * `length` needs to be less than or equal to `capacity`.
16847    /// * `capacity` needs to be the correct value.
16848    /// * The first `length` bytes at `buf` need to be valid UTF-8.
16849    ///
16850    /// Violating these may cause problems like corrupting the allocator's
16851    /// internal data structures.
16852    ///
16853    /// The ownership of `buf` is effectively transferred to the
16854    /// `String` which may then deallocate, reallocate or change the
16855    /// contents of memory pointed to by the pointer at will. Ensure
16856    /// that nothing else uses the pointer after calling this
16857    /// function.
16858    ///
16859    /// # Examples
16860    ///
16861    /// Basic usage:
16862    ///
16863    /// ```
16864    /// use std::mem;
16865    ///
16866    /// unsafe {
16867    ///     let s = String::from("hello");
16868    ///
16869    // FIXME Update this when vec_into_raw_parts is stabilized
16870    ///     // Prevent automatically dropping the String's data
16871    ///     let mut s = mem::ManuallyDrop::new(s);
16872    ///
16873    ///     let ptr = s.as_mut_ptr();
16874    ///     let len = s.len();
16875    ///     let capacity = s.capacity();
16876    ///
16877    ///     let s = String::from_raw_parts(ptr, len, capacity);
16878    ///
16879    ///     assert_eq!(String::from("hello"), s);
16880    /// }
16881    /// ```
16882    #[inline]
16883    #[stable(feature = "rust1", since = "1.0.0")]
16884    pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String {
16885        unsafe { String { vec: Vec::from_raw_parts(buf, length, capacity) } }
16886    }
16887
16888    /// Converts a vector of bytes to a `String` without checking that the
16889    /// string contains valid UTF-8.
16890    ///
16891    /// See the safe version, [`from_utf8`], for more details.
16892    ///
16893    /// [`from_utf8`]: String::from_utf8
16894    ///
16895    /// # Safety
16896    ///
16897    /// This function is unsafe because it does not check that the bytes passed
16898    /// to it are valid UTF-8. If this constraint is violated, it may cause
16899    /// memory unsafety issues with future users of the `String`, as the rest of
16900    /// the standard library assumes that `String`s are valid UTF-8.
16901    ///
16902    /// # Examples
16903    ///
16904    /// Basic usage:
16905    ///
16906    /// ```
16907    /// // some bytes, in a vector
16908    /// let sparkle_heart = vec![240, 159, 146, 150];
16909    ///
16910    /// let sparkle_heart = unsafe {
16911    ///     String::from_utf8_unchecked(sparkle_heart)
16912    /// };
16913    ///
16914    /// assert_eq!("�", sparkle_heart);
16915    /// ```
16916    #[inline]
16917    #[stable(feature = "rust1", since = "1.0.0")]
16918    pub unsafe fn from_utf8_unchecked(bytes: Vec<u8>) -> String {
16919        String { vec: bytes }
16920    }
16921
16922    /// Converts a `String` into a byte vector.
16923    ///
16924    /// This consumes the `String`, so we do not need to copy its contents.
16925    ///
16926    /// # Examples
16927    ///
16928    /// Basic usage:
16929    ///
16930    /// ```
16931    /// let s = String::from("hello");
16932    /// let bytes = s.into_bytes();
16933    ///
16934    /// assert_eq!(&[104, 101, 108, 108, 111][..], &bytes[..]);
16935    /// ```
16936    #[inline]
16937    #[stable(feature = "rust1", since = "1.0.0")]
16938    pub fn into_bytes(self) -> Vec<u8> {
16939        self.vec
16940    }
16941
16942    /// Extracts a string slice containing the entire `String`.
16943    ///
16944    /// # Examples
16945    ///
16946    /// Basic usage:
16947    ///
16948    /// ```
16949    /// let s = String::from("foo");
16950    ///
16951    /// assert_eq!("foo", s.as_str());
16952    /// ```
16953    #[inline]
16954    #[stable(feature = "string_as_str", since = "1.7.0")]
16955    pub fn as_str(&self) -> &str {
16956        self
16957    }
16958
16959    /// Converts a `String` into a mutable string slice.
16960    ///
16961    /// # Examples
16962    ///
16963    /// Basic usage:
16964    ///
16965    /// ```
16966    /// let mut s = String::from("foobar");
16967    /// let s_mut_str = s.as_mut_str();
16968    ///
16969    /// s_mut_str.make_ascii_uppercase();
16970    ///
16971    /// assert_eq!("FOOBAR", s_mut_str);
16972    /// ```
16973    #[inline]
16974    #[stable(feature = "string_as_str", since = "1.7.0")]
16975    pub fn as_mut_str(&mut self) -> &mut str {
16976        self
16977    }
16978
16979    /// Appends a given string slice onto the end of this `String`.
16980    ///
16981    /// # Examples
16982    ///
16983    /// Basic usage:
16984    ///
16985    /// ```
16986    /// let mut s = String::from("foo");
16987    ///
16988    /// s.push_str("bar");
16989    ///
16990    /// assert_eq!("foobar", s);
16991    /// ```
16992    #[inline]
16993    #[stable(feature = "rust1", since = "1.0.0")]
16994    pub fn push_str(&mut self, string: &str) {
16995        self.vec.extend_from_slice(string.as_bytes())
16996    }
16997
16998    /// Returns this `String`'s capacity, in bytes.
16999    ///
17000    /// # Examples
17001    ///
17002    /// Basic usage:
17003    ///
17004    /// ```
17005    /// let s = String::with_capacity(10);
17006    ///
17007    /// assert!(s.capacity() >= 10);
17008    /// ```
17009    #[inline]
17010    #[stable(feature = "rust1", since = "1.0.0")]
17011    pub fn capacity(&self) -> usize {
17012        self.vec.capacity()
17013    }
17014
17015    /// Ensures that this `String`'s capacity is at least `additional` bytes
17016    /// larger than its length.
17017    ///
17018    /// The capacity may be increased by more than `additional` bytes if it
17019    /// chooses, to prevent frequent reallocations.
17020    ///
17021    /// If you do not want this "at least" behavior, see the [`reserve_exact`]
17022    /// method.
17023    ///
17024    /// # Panics
17025    ///
17026    /// Panics if the new capacity overflows [`usize`].
17027    ///
17028    /// [`reserve_exact`]: String::reserve_exact
17029    ///
17030    /// # Examples
17031    ///
17032    /// Basic usage:
17033    ///
17034    /// ```
17035    /// let mut s = String::new();
17036    ///
17037    /// s.reserve(10);
17038    ///
17039    /// assert!(s.capacity() >= 10);
17040    /// ```
17041    ///
17042    /// This may not actually increase the capacity:
17043    ///
17044    /// ```
17045    /// let mut s = String::with_capacity(10);
17046    /// s.push('a');
17047    /// s.push('b');
17048    ///
17049    /// // s now has a length of 2 and a capacity of 10
17050    /// assert_eq!(2, s.len());
17051    /// assert_eq!(10, s.capacity());
17052    ///
17053    /// // Since we already have an extra 8 capacity, calling this...
17054    /// s.reserve(8);
17055    ///
17056    /// // ... doesn't actually increase.
17057    /// assert_eq!(10, s.capacity());
17058    /// ```
17059    #[inline]
17060    #[stable(feature = "rust1", since = "1.0.0")]
17061    pub fn reserve(&mut self, additional: usize) {
17062        self.vec.reserve(additional)
17063    }
17064
17065    /// Ensures that this `String`'s capacity is `additional` bytes
17066    /// larger than its length.
17067    ///
17068    /// Consider using the [`reserve`] method unless you absolutely know
17069    /// better than the allocator.
17070    ///
17071    /// [`reserve`]: String::reserve
17072    ///
17073    /// # Panics
17074    ///
17075    /// Panics if the new capacity overflows `usize`.
17076    ///
17077    /// # Examples
17078    ///
17079    /// Basic usage:
17080    ///
17081    /// ```
17082    /// let mut s = String::new();
17083    ///
17084    /// s.reserve_exact(10);
17085    ///
17086    /// assert!(s.capacity() >= 10);
17087    /// ```
17088    ///
17089    /// This may not actually increase the capacity:
17090    ///
17091    /// ```
17092    /// let mut s = String::with_capacity(10);
17093    /// s.push('a');
17094    /// s.push('b');
17095    ///
17096    /// // s now has a length of 2 and a capacity of 10
17097    /// assert_eq!(2, s.len());
17098    /// assert_eq!(10, s.capacity());
17099    ///
17100    /// // Since we already have an extra 8 capacity, calling this...
17101    /// s.reserve_exact(8);
17102    ///
17103    /// // ... doesn't actually increase.
17104    /// assert_eq!(10, s.capacity());
17105    /// ```
17106    #[inline]
17107    #[stable(feature = "rust1", since = "1.0.0")]
17108    pub fn reserve_exact(&mut self, additional: usize) {
17109        self.vec.reserve_exact(additional)
17110    }
17111
17112    /// Tries to reserve capacity for at least `additional` more elements to be inserted
17113    /// in the given `String`. The collection may reserve more space to avoid
17114    /// frequent reallocations. After calling `reserve`, capacity will be
17115    /// greater than or equal to `self.len() + additional`. Does nothing if
17116    /// capacity is already sufficient.
17117    ///
17118    /// # Errors
17119    ///
17120    /// If the capacity overflows, or the allocator reports a failure, then an error
17121    /// is returned.
17122    ///
17123    /// # Examples
17124    ///
17125    /// ```
17126    /// #![feature(try_reserve)]
17127    /// use std::collections::TryReserveError;
17128    ///
17129    /// fn process_data(data: &str) -> Result<String, TryReserveError> {
17130    ///     let mut output = String::new();
17131    ///
17132    ///     // Pre-reserve the memory, exiting if we can't
17133    ///     output.try_reserve(data.len())?;
17134    ///
17135    ///     // Now we know this can't OOM in the middle of our complex work
17136    ///     output.push_str(data);
17137    ///
17138    ///     Ok(output)
17139    /// }
17140    /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?");
17141    /// ```
17142    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
17143    pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
17144        self.vec.try_reserve(additional)
17145    }
17146
17147    /// Tries to reserve the minimum capacity for exactly `additional` more elements to
17148    /// be inserted in the given `String`. After calling `reserve_exact`,
17149    /// capacity will be greater than or equal to `self.len() + additional`.
17150    /// Does nothing if the capacity is already sufficient.
17151    ///
17152    /// Note that the allocator may give the collection more space than it
17153    /// requests. Therefore, capacity can not be relied upon to be precisely
17154    /// minimal. Prefer `reserve` if future insertions are expected.
17155    ///
17156    /// # Errors
17157    ///
17158    /// If the capacity overflows, or the allocator reports a failure, then an error
17159    /// is returned.
17160    ///
17161    /// # Examples
17162    ///
17163    /// ```
17164    /// #![feature(try_reserve)]
17165    /// use std::collections::TryReserveError;
17166    ///
17167    /// fn process_data(data: &str) -> Result<String, TryReserveError> {
17168    ///     let mut output = String::new();
17169    ///
17170    ///     // Pre-reserve the memory, exiting if we can't
17171    ///     output.try_reserve(data.len())?;
17172    ///
17173    ///     // Now we know this can't OOM in the middle of our complex work
17174    ///     output.push_str(data);
17175    ///
17176    ///     Ok(output)
17177    /// }
17178    /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?");
17179    /// ```
17180    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
17181    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
17182        self.vec.try_reserve_exact(additional)
17183    }
17184
17185    /// Shrinks the capacity of this `String` to match its length.
17186    ///
17187    /// # Examples
17188    ///
17189    /// Basic usage:
17190    ///
17191    /// ```
17192    /// let mut s = String::from("foo");
17193    ///
17194    /// s.reserve(100);
17195    /// assert!(s.capacity() >= 100);
17196    ///
17197    /// s.shrink_to_fit();
17198    /// assert_eq!(3, s.capacity());
17199    /// ```
17200    #[inline]
17201    #[stable(feature = "rust1", since = "1.0.0")]
17202    pub fn shrink_to_fit(&mut self) {
17203        self.vec.shrink_to_fit()
17204    }
17205
17206    /// Shrinks the capacity of this `String` with a lower bound.
17207    ///
17208    /// The capacity will remain at least as large as both the length
17209    /// and the supplied value.
17210    ///
17211    /// If the current capacity is less than the lower limit, this is a no-op.
17212    ///
17213    /// # Examples
17214    ///
17215    /// ```
17216    /// #![feature(shrink_to)]
17217    /// let mut s = String::from("foo");
17218    ///
17219    /// s.reserve(100);
17220    /// assert!(s.capacity() >= 100);
17221    ///
17222    /// s.shrink_to(10);
17223    /// assert!(s.capacity() >= 10);
17224    /// s.shrink_to(0);
17225    /// assert!(s.capacity() >= 3);
17226    /// ```
17227    #[inline]
17228    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
17229    pub fn shrink_to(&mut self, min_capacity: usize) {
17230        self.vec.shrink_to(min_capacity)
17231    }
17232
17233    /// Appends the given [`char`] to the end of this `String`.
17234    ///
17235    /// # Examples
17236    ///
17237    /// Basic usage:
17238    ///
17239    /// ```
17240    /// let mut s = String::from("abc");
17241    ///
17242    /// s.push('1');
17243    /// s.push('2');
17244    /// s.push('3');
17245    ///
17246    /// assert_eq!("abc123", s);
17247    /// ```
17248    #[inline]
17249    #[stable(feature = "rust1", since = "1.0.0")]
17250    pub fn push(&mut self, ch: char) {
17251        match ch.len_utf8() {
17252            1 => self.vec.push(ch as u8),
17253            _ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()),
17254        }
17255    }
17256
17257    /// Returns a byte slice of this `String`'s contents.
17258    ///
17259    /// The inverse of this method is [`from_utf8`].
17260    ///
17261    /// [`from_utf8`]: String::from_utf8
17262    ///
17263    /// # Examples
17264    ///
17265    /// Basic usage:
17266    ///
17267    /// ```
17268    /// let s = String::from("hello");
17269    ///
17270    /// assert_eq!(&[104, 101, 108, 108, 111], s.as_bytes());
17271    /// ```
17272    #[inline]
17273    #[stable(feature = "rust1", since = "1.0.0")]
17274    pub fn as_bytes(&self) -> &[u8] {
17275        &self.vec
17276    }
17277
17278    /// Shortens this `String` to the specified length.
17279    ///
17280    /// If `new_len` is greater than the string's current length, this has no
17281    /// effect.
17282    ///
17283    /// Note that this method has no effect on the allocated capacity
17284    /// of the string
17285    ///
17286    /// # Panics
17287    ///
17288    /// Panics if `new_len` does not lie on a [`char`] boundary.
17289    ///
17290    /// # Examples
17291    ///
17292    /// Basic usage:
17293    ///
17294    /// ```
17295    /// let mut s = String::from("hello");
17296    ///
17297    /// s.truncate(2);
17298    ///
17299    /// assert_eq!("he", s);
17300    /// ```
17301    #[inline]
17302    #[stable(feature = "rust1", since = "1.0.0")]
17303    pub fn truncate(&mut self, new_len: usize) {
17304        if new_len <= self.len() {
17305            assert!(self.is_char_boundary(new_len));
17306            self.vec.truncate(new_len)
17307        }
17308    }
17309
17310    /// Removes the last character from the string buffer and returns it.
17311    ///
17312    /// Returns [`None`] if this `String` is empty.
17313    ///
17314    /// # Examples
17315    ///
17316    /// Basic usage:
17317    ///
17318    /// ```
17319    /// let mut s = String::from("foo");
17320    ///
17321    /// assert_eq!(s.pop(), Some('o'));
17322    /// assert_eq!(s.pop(), Some('o'));
17323    /// assert_eq!(s.pop(), Some('f'));
17324    ///
17325    /// assert_eq!(s.pop(), None);
17326    /// ```
17327    #[inline]
17328    #[stable(feature = "rust1", since = "1.0.0")]
17329    pub fn pop(&mut self) -> Option<char> {
17330        let ch = self.chars().rev().next()?;
17331        let newlen = self.len() - ch.len_utf8();
17332        unsafe {
17333            self.vec.set_len(newlen);
17334        }
17335        Some(ch)
17336    }
17337
17338    /// Removes a [`char`] from this `String` at a byte position and returns it.
17339    ///
17340    /// This is an *O*(*n*) operation, as it requires copying every element in the
17341    /// buffer.
17342    ///
17343    /// # Panics
17344    ///
17345    /// Panics if `idx` is larger than or equal to the `String`'s length,
17346    /// or if it does not lie on a [`char`] boundary.
17347    ///
17348    /// # Examples
17349    ///
17350    /// Basic usage:
17351    ///
17352    /// ```
17353    /// let mut s = String::from("foo");
17354    ///
17355    /// assert_eq!(s.remove(0), 'f');
17356    /// assert_eq!(s.remove(1), 'o');
17357    /// assert_eq!(s.remove(0), 'o');
17358    /// ```
17359    #[inline]
17360    #[stable(feature = "rust1", since = "1.0.0")]
17361    pub fn remove(&mut self, idx: usize) -> char {
17362        let ch = match self[idx..].chars().next() {
17363            Some(ch) => ch,
17364            None => panic!("cannot remove a char from the end of a string"),
17365        };
17366
17367        let next = idx + ch.len_utf8();
17368        let len = self.len();
17369        unsafe {
17370            ptr::copy(self.vec.as_ptr().add(next), self.vec.as_mut_ptr().add(idx), len - next);
17371            self.vec.set_len(len - (next - idx));
17372        }
17373        ch
17374    }
17375
17376    /// Remove all matches of pattern `pat` in the `String`.
17377    ///
17378    /// # Examples
17379    ///
17380    /// ```
17381    /// #![feature(string_remove_matches)]
17382    /// let mut s = String::from("Trees are not green, the sky is not blue.");
17383    /// s.remove_matches("not ");
17384    /// assert_eq!("Trees are green, the sky is blue.", s);
17385    /// ```
17386    ///
17387    /// Matches will be detected and removed iteratively, so in cases where
17388    /// patterns overlap, only the first pattern will be removed:
17389    ///
17390    /// ```
17391    /// #![feature(string_remove_matches)]
17392    /// let mut s = String::from("banana");
17393    /// s.remove_matches("ana");
17394    /// assert_eq!("bna", s);
17395    /// ```
17396    #[unstable(feature = "string_remove_matches", reason = "new API", issue = "72826")]
17397    pub fn remove_matches<'a, P>(&'a mut self, pat: P)
17398    where
17399        P: for<'x> Pattern<'x>,
17400    {
17401        use core::str::pattern::Searcher;
17402
17403        let matches = {
17404            let mut searcher = pat.into_searcher(self);
17405            let mut matches = Vec::new();
17406
17407            while let Some(m) = searcher.next_match() {
17408                matches.push(m);
17409            }
17410
17411            matches
17412        };
17413
17414        let len = self.len();
17415        let mut shrunk_by = 0;
17416
17417        // SAFETY: start and end will be on utf8 byte boundaries per
17418        // the Searcher docs
17419        unsafe {
17420            for (start, end) in matches {
17421                ptr::copy(
17422                    self.vec.as_mut_ptr().add(end - shrunk_by),
17423                    self.vec.as_mut_ptr().add(start - shrunk_by),
17424                    len - end,
17425                );
17426                shrunk_by += end - start;
17427            }
17428            self.vec.set_len(len - shrunk_by);
17429        }
17430    }
17431
17432    /// Retains only the characters specified by the predicate.
17433    ///
17434    /// In other words, remove all characters `c` such that `f(c)` returns `false`.
17435    /// This method operates in place, visiting each character exactly once in the
17436    /// original order, and preserves the order of the retained characters.
17437    ///
17438    /// # Examples
17439    ///
17440    /// ```
17441    /// let mut s = String::from("f_o_ob_ar");
17442    ///
17443    /// s.retain(|c| c != '_');
17444    ///
17445    /// assert_eq!(s, "foobar");
17446    /// ```
17447    ///
17448    /// The exact order may be useful for tracking external state, like an index.
17449    ///
17450    /// ```
17451    /// let mut s = String::from("abcde");
17452    /// let keep = [false, true, true, false, true];
17453    /// let mut i = 0;
17454    /// s.retain(|_| (keep[i], i += 1).0);
17455    /// assert_eq!(s, "bce");
17456    /// ```
17457    #[inline]
17458    #[stable(feature = "string_retain", since = "1.26.0")]
17459    pub fn retain<F>(&mut self, mut f: F)
17460    where
17461        F: FnMut(char) -> bool,
17462    {
17463        struct SetLenOnDrop<'a> {
17464            s: &'a mut String,
17465            idx: usize,
17466            del_bytes: usize,
17467        }
17468
17469        impl<'a> Drop for SetLenOnDrop<'a> {
17470            fn drop(&mut self) {
17471                let new_len = self.idx - self.del_bytes;
17472                debug_assert!(new_len <= self.s.len());
17473                unsafe { self.s.vec.set_len(new_len) };
17474            }
17475        }
17476
17477        let len = self.len();
17478        let mut guard = SetLenOnDrop { s: self, idx: 0, del_bytes: 0 };
17479
17480        while guard.idx < len {
17481            let ch = unsafe { guard.s.get_unchecked(guard.idx..len).chars().next().unwrap() };
17482            let ch_len = ch.len_utf8();
17483
17484            if !f(ch) {
17485                guard.del_bytes += ch_len;
17486            } else if guard.del_bytes > 0 {
17487                unsafe {
17488                    ptr::copy(
17489                        guard.s.vec.as_ptr().add(guard.idx),
17490                        guard.s.vec.as_mut_ptr().add(guard.idx - guard.del_bytes),
17491                        ch_len,
17492                    );
17493                }
17494            }
17495
17496            // Point idx to the next char
17497            guard.idx += ch_len;
17498        }
17499
17500        drop(guard);
17501    }
17502
17503    /// Inserts a character into this `String` at a byte position.
17504    ///
17505    /// This is an *O*(*n*) operation as it requires copying every element in the
17506    /// buffer.
17507    ///
17508    /// # Panics
17509    ///
17510    /// Panics if `idx` is larger than the `String`'s length, or if it does not
17511    /// lie on a [`char`] boundary.
17512    ///
17513    /// # Examples
17514    ///
17515    /// Basic usage:
17516    ///
17517    /// ```
17518    /// let mut s = String::with_capacity(3);
17519    ///
17520    /// s.insert(0, 'f');
17521    /// s.insert(1, 'o');
17522    /// s.insert(2, 'o');
17523    ///
17524    /// assert_eq!("foo", s);
17525    /// ```
17526    #[inline]
17527    #[stable(feature = "rust1", since = "1.0.0")]
17528    pub fn insert(&mut self, idx: usize, ch: char) {
17529        assert!(self.is_char_boundary(idx));
17530        let mut bits = [0; 4];
17531        let bits = ch.encode_utf8(&mut bits).as_bytes();
17532
17533        unsafe {
17534            self.insert_bytes(idx, bits);
17535        }
17536    }
17537
17538    unsafe fn insert_bytes(&mut self, idx: usize, bytes: &[u8]) {
17539        let len = self.len();
17540        let amt = bytes.len();
17541        self.vec.reserve(amt);
17542
17543        unsafe {
17544            ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx);
17545            ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
17546            self.vec.set_len(len + amt);
17547        }
17548    }
17549
17550    /// Inserts a string slice into this `String` at a byte position.
17551    ///
17552    /// This is an *O*(*n*) operation as it requires copying every element in the
17553    /// buffer.
17554    ///
17555    /// # Panics
17556    ///
17557    /// Panics if `idx` is larger than the `String`'s length, or if it does not
17558    /// lie on a [`char`] boundary.
17559    ///
17560    /// # Examples
17561    ///
17562    /// Basic usage:
17563    ///
17564    /// ```
17565    /// let mut s = String::from("bar");
17566    ///
17567    /// s.insert_str(0, "foo");
17568    ///
17569    /// assert_eq!("foobar", s);
17570    /// ```
17571    #[inline]
17572    #[stable(feature = "insert_str", since = "1.16.0")]
17573    pub fn insert_str(&mut self, idx: usize, string: &str) {
17574        assert!(self.is_char_boundary(idx));
17575
17576        unsafe {
17577            self.insert_bytes(idx, string.as_bytes());
17578        }
17579    }
17580
17581    /// Returns a mutable reference to the contents of this `String`.
17582    ///
17583    /// # Safety
17584    ///
17585    /// This function is unsafe because it does not check that the bytes passed
17586    /// to it are valid UTF-8. If this constraint is violated, it may cause
17587    /// memory unsafety issues with future users of the `String`, as the rest of
17588    /// the standard library assumes that `String`s are valid UTF-8.
17589    ///
17590    /// # Examples
17591    ///
17592    /// Basic usage:
17593    ///
17594    /// ```
17595    /// let mut s = String::from("hello");
17596    ///
17597    /// unsafe {
17598    ///     let vec = s.as_mut_vec();
17599    ///     assert_eq!(&[104, 101, 108, 108, 111][..], &vec[..]);
17600    ///
17601    ///     vec.reverse();
17602    /// }
17603    /// assert_eq!(s, "olleh");
17604    /// ```
17605    #[inline]
17606    #[stable(feature = "rust1", since = "1.0.0")]
17607    pub unsafe fn as_mut_vec(&mut self) -> &mut Vec<u8> {
17608        &mut self.vec
17609    }
17610
17611    /// Returns the length of this `String`, in bytes, not [`char`]s or
17612    /// graphemes. In other words, it may not be what a human considers the
17613    /// length of the string.
17614    ///
17615    /// # Examples
17616    ///
17617    /// Basic usage:
17618    ///
17619    /// ```
17620    /// let a = String::from("foo");
17621    /// assert_eq!(a.len(), 3);
17622    ///
17623    /// let fancy_f = String::from("ƒoo");
17624    /// assert_eq!(fancy_f.len(), 4);
17625    /// assert_eq!(fancy_f.chars().count(), 3);
17626    /// ```
17627    #[doc(alias = "length")]
17628    #[inline]
17629    #[stable(feature = "rust1", since = "1.0.0")]
17630    pub fn len(&self) -> usize {
17631        self.vec.len()
17632    }
17633
17634    /// Returns `true` if this `String` has a length of zero, and `false` otherwise.
17635    ///
17636    /// # Examples
17637    ///
17638    /// Basic usage:
17639    ///
17640    /// ```
17641    /// let mut v = String::new();
17642    /// assert!(v.is_empty());
17643    ///
17644    /// v.push('a');
17645    /// assert!(!v.is_empty());
17646    /// ```
17647    #[inline]
17648    #[stable(feature = "rust1", since = "1.0.0")]
17649    pub fn is_empty(&self) -> bool {
17650        self.len() == 0
17651    }
17652
17653    /// Splits the string into two at the given byte index.
17654    ///
17655    /// Returns a newly allocated `String`. `self` contains bytes `[0, at)`, and
17656    /// the returned `String` contains bytes `[at, len)`. `at` must be on the
17657    /// boundary of a UTF-8 code point.
17658    ///
17659    /// Note that the capacity of `self` does not change.
17660    ///
17661    /// # Panics
17662    ///
17663    /// Panics if `at` is not on a `UTF-8` code point boundary, or if it is beyond the last
17664    /// code point of the string.
17665    ///
17666    /// # Examples
17667    ///
17668    /// ```
17669    /// # fn main() {
17670    /// let mut hello = String::from("Hello, World!");
17671    /// let world = hello.split_off(7);
17672    /// assert_eq!(hello, "Hello, ");
17673    /// assert_eq!(world, "World!");
17674    /// # }
17675    /// ```
17676    #[inline]
17677    #[stable(feature = "string_split_off", since = "1.16.0")]
17678    #[must_use = "use `.truncate()` if you don't need the other half"]
17679    pub fn split_off(&mut self, at: usize) -> String {
17680        assert!(self.is_char_boundary(at));
17681        let other = self.vec.split_off(at);
17682        unsafe { String::from_utf8_unchecked(other) }
17683    }
17684
17685    /// Truncates this `String`, removing all contents.
17686    ///
17687    /// While this means the `String` will have a length of zero, it does not
17688    /// touch its capacity.
17689    ///
17690    /// # Examples
17691    ///
17692    /// Basic usage:
17693    ///
17694    /// ```
17695    /// let mut s = String::from("foo");
17696    ///
17697    /// s.clear();
17698    ///
17699    /// assert!(s.is_empty());
17700    /// assert_eq!(0, s.len());
17701    /// assert_eq!(3, s.capacity());
17702    /// ```
17703    #[inline]
17704    #[stable(feature = "rust1", since = "1.0.0")]
17705    pub fn clear(&mut self) {
17706        self.vec.clear()
17707    }
17708
17709    /// Creates a draining iterator that removes the specified range in the `String`
17710    /// and yields the removed `chars`.
17711    ///
17712    /// Note: The element range is removed even if the iterator is not
17713    /// consumed until the end.
17714    ///
17715    /// # Panics
17716    ///
17717    /// Panics if the starting point or end point do not lie on a [`char`]
17718    /// boundary, or if they're out of bounds.
17719    ///
17720    /// # Examples
17721    ///
17722    /// Basic usage:
17723    ///
17724    /// ```
17725    /// let mut s = String::from("α is alpha, β is beta");
17726    /// let beta_offset = s.find('β').unwrap_or(s.len());
17727    ///
17728    /// // Remove the range up until the β from the string
17729    /// let t: String = s.drain(..beta_offset).collect();
17730    /// assert_eq!(t, "α is alpha, ");
17731    /// assert_eq!(s, "β is beta");
17732    ///
17733    /// // A full range clears the string
17734    /// s.drain(..);
17735    /// assert_eq!(s, "");
17736    /// ```
17737    #[stable(feature = "drain", since = "1.6.0")]
17738    pub fn drain<R>(&mut self, range: R) -> Drain<'_>
17739    where
17740        R: RangeBounds<usize>,
17741    {
17742        // Memory safety
17743        //
17744        // The String version of Drain does not have the memory safety issues
17745        // of the vector version. The data is just plain bytes.
17746        // Because the range removal happens in Drop, if the Drain iterator is leaked,
17747        // the removal will not happen.
17748        let Range { start, end } = slice::range(range, ..self.len());
17749        assert!(self.is_char_boundary(start));
17750        assert!(self.is_char_boundary(end));
17751
17752        // Take out two simultaneous borrows. The &mut String won't be accessed
17753        // until iteration is over, in Drop.
17754        let self_ptr = self as *mut _;
17755        // SAFETY: `slice::range` and `is_char_boundary` do the appropriate bounds checks.
17756        let chars_iter = unsafe { self.get_unchecked(start..end) }.chars();
17757
17758        Drain { start, end, iter: chars_iter, string: self_ptr }
17759    }
17760
17761    /// Removes the specified range in the string,
17762    /// and replaces it with the given string.
17763    /// The given string doesn't need to be the same length as the range.
17764    ///
17765    /// # Panics
17766    ///
17767    /// Panics if the starting point or end point do not lie on a [`char`]
17768    /// boundary, or if they're out of bounds.
17769    ///
17770    /// # Examples
17771    ///
17772    /// Basic usage:
17773    ///
17774    /// ```
17775    /// let mut s = String::from("α is alpha, β is beta");
17776    /// let beta_offset = s.find('β').unwrap_or(s.len());
17777    ///
17778    /// // Replace the range up until the β from the string
17779    /// s.replace_range(..beta_offset, "Α is capital alpha; ");
17780    /// assert_eq!(s, "Α is capital alpha; β is beta");
17781    /// ```
17782    #[stable(feature = "splice", since = "1.27.0")]
17783    pub fn replace_range<R>(&mut self, range: R, replace_with: &str)
17784    where
17785        R: RangeBounds<usize>,
17786    {
17787        // Memory safety
17788        //
17789        // Replace_range does not have the memory safety issues of a vector Splice.
17790        // of the vector version. The data is just plain bytes.
17791
17792        // WARNING: Inlining this variable would be unsound (#81138)
17793        let start = range.start_bound();
17794        match start {
17795            Included(&n) => assert!(self.is_char_boundary(n)),
17796            Excluded(&n) => assert!(self.is_char_boundary(n + 1)),
17797            Unbounded => {}
17798        };
17799        // WARNING: Inlining this variable would be unsound (#81138)
17800        let end = range.end_bound();
17801        match end {
17802            Included(&n) => assert!(self.is_char_boundary(n + 1)),
17803            Excluded(&n) => assert!(self.is_char_boundary(n)),
17804            Unbounded => {}
17805        };
17806
17807        // Using `range` again would be unsound (#81138)
17808        // We assume the bounds reported by `range` remain the same, but
17809        // an adversarial implementation could change between calls
17810        unsafe { self.as_mut_vec() }.splice((start, end), replace_with.bytes());
17811    }
17812
17813    /// Converts this `String` into a [`Box`]`<`[`str`]`>`.
17814    ///
17815    /// This will drop any excess capacity.
17816    ///
17817    /// [`str`]: prim@str
17818    ///
17819    /// # Examples
17820    ///
17821    /// Basic usage:
17822    ///
17823    /// ```
17824    /// let s = String::from("hello");
17825    ///
17826    /// let b = s.into_boxed_str();
17827    /// ```
17828    #[stable(feature = "box_str", since = "1.4.0")]
17829    #[inline]
17830    pub fn into_boxed_str(self) -> Box<str> {
17831        let slice = self.vec.into_boxed_slice();
17832        unsafe { from_boxed_utf8_unchecked(slice) }
17833    }
17834}
17835
17836impl FromUtf8Error {
17837    /// Returns a slice of [`u8`]s bytes that were attempted to convert to a `String`.
17838    ///
17839    /// # Examples
17840    ///
17841    /// Basic usage:
17842    ///
17843    /// ```
17844    /// // some invalid bytes, in a vector
17845    /// let bytes = vec![0, 159];
17846    ///
17847    /// let value = String::from_utf8(bytes);
17848    ///
17849    /// assert_eq!(&[0, 159], value.unwrap_err().as_bytes());
17850    /// ```
17851    #[stable(feature = "from_utf8_error_as_bytes", since = "1.26.0")]
17852    pub fn as_bytes(&self) -> &[u8] {
17853        &self.bytes[..]
17854    }
17855
17856    /// Returns the bytes that were attempted to convert to a `String`.
17857    ///
17858    /// This method is carefully constructed to avoid allocation. It will
17859    /// consume the error, moving out the bytes, so that a copy of the bytes
17860    /// does not need to be made.
17861    ///
17862    /// # Examples
17863    ///
17864    /// Basic usage:
17865    ///
17866    /// ```
17867    /// // some invalid bytes, in a vector
17868    /// let bytes = vec![0, 159];
17869    ///
17870    /// let value = String::from_utf8(bytes);
17871    ///
17872    /// assert_eq!(vec![0, 159], value.unwrap_err().into_bytes());
17873    /// ```
17874    #[stable(feature = "rust1", since = "1.0.0")]
17875    pub fn into_bytes(self) -> Vec<u8> {
17876        self.bytes
17877    }
17878
17879    /// Fetch a `Utf8Error` to get more details about the conversion failure.
17880    ///
17881    /// The [`Utf8Error`] type provided by [`std::str`] represents an error that may
17882    /// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's
17883    /// an analogue to `FromUtf8Error`. See its documentation for more details
17884    /// on using it.
17885    ///
17886    /// [`std::str`]: core::str
17887    /// [`&str`]: prim@str
17888    ///
17889    /// # Examples
17890    ///
17891    /// Basic usage:
17892    ///
17893    /// ```
17894    /// // some invalid bytes, in a vector
17895    /// let bytes = vec![0, 159];
17896    ///
17897    /// let error = String::from_utf8(bytes).unwrap_err().utf8_error();
17898    ///
17899    /// // the first byte is invalid here
17900    /// assert_eq!(1, error.valid_up_to());
17901    /// ```
17902    #[stable(feature = "rust1", since = "1.0.0")]
17903    pub fn utf8_error(&self) -> Utf8Error {
17904        self.error
17905    }
17906}
17907
17908#[stable(feature = "rust1", since = "1.0.0")]
17909impl fmt::Display for FromUtf8Error {
17910    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
17911        fmt::Display::fmt(&self.error, f)
17912    }
17913}
17914
17915#[stable(feature = "rust1", since = "1.0.0")]
17916impl fmt::Display for FromUtf16Error {
17917    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
17918        fmt::Display::fmt("invalid utf-16: lone surrogate found", f)
17919    }
17920}
17921
17922#[stable(feature = "rust1", since = "1.0.0")]
17923impl Clone for String {
17924    fn clone(&self) -> Self {
17925        String { vec: self.vec.clone() }
17926    }
17927
17928    fn clone_from(&mut self, source: &Self) {
17929        self.vec.clone_from(&source.vec);
17930    }
17931}
17932
17933#[stable(feature = "rust1", since = "1.0.0")]
17934impl FromIterator<char> for String {
17935    fn from_iter<I: IntoIterator<Item = char>>(iter: I) -> String {
17936        let mut buf = String::new();
17937        buf.extend(iter);
17938        buf
17939    }
17940}
17941
17942#[stable(feature = "string_from_iter_by_ref", since = "1.17.0")]
17943impl<'a> FromIterator<&'a char> for String {
17944    fn from_iter<I: IntoIterator<Item = &'a char>>(iter: I) -> String {
17945        let mut buf = String::new();
17946        buf.extend(iter);
17947        buf
17948    }
17949}
17950
17951#[stable(feature = "rust1", since = "1.0.0")]
17952impl<'a> FromIterator<&'a str> for String {
17953    fn from_iter<I: IntoIterator<Item = &'a str>>(iter: I) -> String {
17954        let mut buf = String::new();
17955        buf.extend(iter);
17956        buf
17957    }
17958}
17959
17960#[stable(feature = "extend_string", since = "1.4.0")]
17961impl FromIterator<String> for String {
17962    fn from_iter<I: IntoIterator<Item = String>>(iter: I) -> String {
17963        let mut iterator = iter.into_iter();
17964
17965        // Because we're iterating over `String`s, we can avoid at least
17966        // one allocation by getting the first string from the iterator
17967        // and appending to it all the subsequent strings.
17968        match iterator.next() {
17969            None => String::new(),
17970            Some(mut buf) => {
17971                buf.extend(iterator);
17972                buf
17973            }
17974        }
17975    }
17976}
17977
17978#[stable(feature = "box_str2", since = "1.45.0")]
17979impl FromIterator<Box<str>> for String {
17980    fn from_iter<I: IntoIterator<Item = Box<str>>>(iter: I) -> String {
17981        let mut buf = String::new();
17982        buf.extend(iter);
17983        buf
17984    }
17985}
17986
17987#[stable(feature = "herd_cows", since = "1.19.0")]
17988impl<'a> FromIterator<Cow<'a, str>> for String {
17989    fn from_iter<I: IntoIterator<Item = Cow<'a, str>>>(iter: I) -> String {
17990        let mut iterator = iter.into_iter();
17991
17992        // Because we're iterating over CoWs, we can (potentially) avoid at least
17993        // one allocation by getting the first item and appending to it all the
17994        // subsequent items.
17995        match iterator.next() {
17996            None => String::new(),
17997            Some(cow) => {
17998                let mut buf = cow.into_owned();
17999                buf.extend(iterator);
18000                buf
18001            }
18002        }
18003    }
18004}
18005
18006#[stable(feature = "rust1", since = "1.0.0")]
18007impl Extend<char> for String {
18008    fn extend<I: IntoIterator<Item = char>>(&mut self, iter: I) {
18009        let iterator = iter.into_iter();
18010        let (lower_bound, _) = iterator.size_hint();
18011        self.reserve(lower_bound);
18012        iterator.for_each(move |c| self.push(c));
18013    }
18014
18015    #[inline]
18016    fn extend_one(&mut self, c: char) {
18017        self.push(c);
18018    }
18019
18020    #[inline]
18021    fn extend_reserve(&mut self, additional: usize) {
18022        self.reserve(additional);
18023    }
18024}
18025
18026#[stable(feature = "extend_ref", since = "1.2.0")]
18027impl<'a> Extend<&'a char> for String {
18028    fn extend<I: IntoIterator<Item = &'a char>>(&mut self, iter: I) {
18029        self.extend(iter.into_iter().cloned());
18030    }
18031
18032    #[inline]
18033    fn extend_one(&mut self, &c: &'a char) {
18034        self.push(c);
18035    }
18036
18037    #[inline]
18038    fn extend_reserve(&mut self, additional: usize) {
18039        self.reserve(additional);
18040    }
18041}
18042
18043#[stable(feature = "rust1", since = "1.0.0")]
18044impl<'a> Extend<&'a str> for String {
18045    fn extend<I: IntoIterator<Item = &'a str>>(&mut self, iter: I) {
18046        iter.into_iter().for_each(move |s| self.push_str(s));
18047    }
18048
18049    #[inline]
18050    fn extend_one(&mut self, s: &'a str) {
18051        self.push_str(s);
18052    }
18053}
18054
18055#[stable(feature = "box_str2", since = "1.45.0")]
18056impl Extend<Box<str>> for String {
18057    fn extend<I: IntoIterator<Item = Box<str>>>(&mut self, iter: I) {
18058        iter.into_iter().for_each(move |s| self.push_str(&s));
18059    }
18060}
18061
18062#[stable(feature = "extend_string", since = "1.4.0")]
18063impl Extend<String> for String {
18064    fn extend<I: IntoIterator<Item = String>>(&mut self, iter: I) {
18065        iter.into_iter().for_each(move |s| self.push_str(&s));
18066    }
18067
18068    #[inline]
18069    fn extend_one(&mut self, s: String) {
18070        self.push_str(&s);
18071    }
18072}
18073
18074#[stable(feature = "herd_cows", since = "1.19.0")]
18075impl<'a> Extend<Cow<'a, str>> for String {
18076    fn extend<I: IntoIterator<Item = Cow<'a, str>>>(&mut self, iter: I) {
18077        iter.into_iter().for_each(move |s| self.push_str(&s));
18078    }
18079
18080    #[inline]
18081    fn extend_one(&mut self, s: Cow<'a, str>) {
18082        self.push_str(&s);
18083    }
18084}
18085
18086/// A convenience impl that delegates to the impl for `&str`.
18087///
18088/// # Examples
18089///
18090/// ```
18091/// assert_eq!(String::from("Hello world").find("world"), Some(6));
18092/// ```
18093#[unstable(
18094    feature = "pattern",
18095    reason = "API not fully fleshed out and ready to be stabilized",
18096    issue = "27721"
18097)]
18098impl<'a, 'b> Pattern<'a> for &'b String {
18099    type Searcher = <&'b str as Pattern<'a>>::Searcher;
18100
18101    fn into_searcher(self, haystack: &'a str) -> <&'b str as Pattern<'a>>::Searcher {
18102        self[..].into_searcher(haystack)
18103    }
18104
18105    #[inline]
18106    fn is_contained_in(self, haystack: &'a str) -> bool {
18107        self[..].is_contained_in(haystack)
18108    }
18109
18110    #[inline]
18111    fn is_prefix_of(self, haystack: &'a str) -> bool {
18112        self[..].is_prefix_of(haystack)
18113    }
18114
18115    #[inline]
18116    fn strip_prefix_of(self, haystack: &'a str) -> Option<&'a str> {
18117        self[..].strip_prefix_of(haystack)
18118    }
18119
18120    #[inline]
18121    fn is_suffix_of(self, haystack: &'a str) -> bool {
18122        self[..].is_suffix_of(haystack)
18123    }
18124
18125    #[inline]
18126    fn strip_suffix_of(self, haystack: &'a str) -> Option<&'a str> {
18127        self[..].strip_suffix_of(haystack)
18128    }
18129}
18130
18131#[stable(feature = "rust1", since = "1.0.0")]
18132impl PartialEq for String {
18133    #[inline]
18134    fn eq(&self, other: &String) -> bool {
18135        PartialEq::eq(&self[..], &other[..])
18136    }
18137    #[inline]
18138    fn ne(&self, other: &String) -> bool {
18139        PartialEq::ne(&self[..], &other[..])
18140    }
18141}
18142
18143macro_rules! impl_eq {
18144    ($lhs:ty, $rhs: ty) => {
18145        #[stable(feature = "rust1", since = "1.0.0")]
18146        #[allow(unused_lifetimes)]
18147        impl<'a, 'b> PartialEq<$rhs> for $lhs {
18148            #[inline]
18149            fn eq(&self, other: &$rhs) -> bool {
18150                PartialEq::eq(&self[..], &other[..])
18151            }
18152            #[inline]
18153            fn ne(&self, other: &$rhs) -> bool {
18154                PartialEq::ne(&self[..], &other[..])
18155            }
18156        }
18157
18158        #[stable(feature = "rust1", since = "1.0.0")]
18159        #[allow(unused_lifetimes)]
18160        impl<'a, 'b> PartialEq<$lhs> for $rhs {
18161            #[inline]
18162            fn eq(&self, other: &$lhs) -> bool {
18163                PartialEq::eq(&self[..], &other[..])
18164            }
18165            #[inline]
18166            fn ne(&self, other: &$lhs) -> bool {
18167                PartialEq::ne(&self[..], &other[..])
18168            }
18169        }
18170    };
18171}
18172
18173impl_eq! { String, str }
18174impl_eq! { String, &'a str }
18175impl_eq! { Cow<'a, str>, str }
18176impl_eq! { Cow<'a, str>, &'b str }
18177impl_eq! { Cow<'a, str>, String }
18178
18179#[stable(feature = "rust1", since = "1.0.0")]
18180impl Default for String {
18181    /// Creates an empty `String`.
18182    #[inline]
18183    fn default() -> String {
18184        String::new()
18185    }
18186}
18187
18188#[stable(feature = "rust1", since = "1.0.0")]
18189impl fmt::Display for String {
18190    #[inline]
18191    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
18192        fmt::Display::fmt(&**self, f)
18193    }
18194}
18195
18196#[stable(feature = "rust1", since = "1.0.0")]
18197impl fmt::Debug for String {
18198    #[inline]
18199    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
18200        fmt::Debug::fmt(&**self, f)
18201    }
18202}
18203
18204#[stable(feature = "rust1", since = "1.0.0")]
18205impl hash::Hash for String {
18206    #[inline]
18207    fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
18208        (**self).hash(hasher)
18209    }
18210}
18211
18212/// Implements the `+` operator for concatenating two strings.
18213///
18214/// This consumes the `String` on the left-hand side and re-uses its buffer (growing it if
18215/// necessary). This is done to avoid allocating a new `String` and copying the entire contents on
18216/// every operation, which would lead to *O*(*n*^2) running time when building an *n*-byte string by
18217/// repeated concatenation.
18218///
18219/// The string on the right-hand side is only borrowed; its contents are copied into the returned
18220/// `String`.
18221///
18222/// # Examples
18223///
18224/// Concatenating two `String`s takes the first by value and borrows the second:
18225///
18226/// ```
18227/// let a = String::from("hello");
18228/// let b = String::from(" world");
18229/// let c = a + &b;
18230/// // `a` is moved and can no longer be used here.
18231/// ```
18232///
18233/// If you want to keep using the first `String`, you can clone it and append to the clone instead:
18234///
18235/// ```
18236/// let a = String::from("hello");
18237/// let b = String::from(" world");
18238/// let c = a.clone() + &b;
18239/// // `a` is still valid here.
18240/// ```
18241///
18242/// Concatenating `&str` slices can be done by converting the first to a `String`:
18243///
18244/// ```
18245/// let a = "hello";
18246/// let b = " world";
18247/// let c = a.to_string() + b;
18248/// ```
18249#[stable(feature = "rust1", since = "1.0.0")]
18250impl Add<&str> for String {
18251    type Output = String;
18252
18253    #[inline]
18254    fn add(mut self, other: &str) -> String {
18255        self.push_str(other);
18256        self
18257    }
18258}
18259
18260/// Implements the `+=` operator for appending to a `String`.
18261///
18262/// This has the same behavior as the [`push_str`][String::push_str] method.
18263#[stable(feature = "stringaddassign", since = "1.12.0")]
18264impl AddAssign<&str> for String {
18265    #[inline]
18266    fn add_assign(&mut self, other: &str) {
18267        self.push_str(other);
18268    }
18269}
18270
18271#[stable(feature = "rust1", since = "1.0.0")]
18272impl ops::Index<ops::Range<usize>> for String {
18273    type Output = str;
18274
18275    #[inline]
18276    fn index(&self, index: ops::Range<usize>) -> &str {
18277        &self[..][index]
18278    }
18279}
18280#[stable(feature = "rust1", since = "1.0.0")]
18281impl ops::Index<ops::RangeTo<usize>> for String {
18282    type Output = str;
18283
18284    #[inline]
18285    fn index(&self, index: ops::RangeTo<usize>) -> &str {
18286        &self[..][index]
18287    }
18288}
18289#[stable(feature = "rust1", since = "1.0.0")]
18290impl ops::Index<ops::RangeFrom<usize>> for String {
18291    type Output = str;
18292
18293    #[inline]
18294    fn index(&self, index: ops::RangeFrom<usize>) -> &str {
18295        &self[..][index]
18296    }
18297}
18298#[stable(feature = "rust1", since = "1.0.0")]
18299impl ops::Index<ops::RangeFull> for String {
18300    type Output = str;
18301
18302    #[inline]
18303    fn index(&self, _index: ops::RangeFull) -> &str {
18304        unsafe { str::from_utf8_unchecked(&self.vec) }
18305    }
18306}
18307#[stable(feature = "inclusive_range", since = "1.26.0")]
18308impl ops::Index<ops::RangeInclusive<usize>> for String {
18309    type Output = str;
18310
18311    #[inline]
18312    fn index(&self, index: ops::RangeInclusive<usize>) -> &str {
18313        Index::index(&**self, index)
18314    }
18315}
18316#[stable(feature = "inclusive_range", since = "1.26.0")]
18317impl ops::Index<ops::RangeToInclusive<usize>> for String {
18318    type Output = str;
18319
18320    #[inline]
18321    fn index(&self, index: ops::RangeToInclusive<usize>) -> &str {
18322        Index::index(&**self, index)
18323    }
18324}
18325
18326#[stable(feature = "derefmut_for_string", since = "1.3.0")]
18327impl ops::IndexMut<ops::Range<usize>> for String {
18328    #[inline]
18329    fn index_mut(&mut self, index: ops::Range<usize>) -> &mut str {
18330        &mut self[..][index]
18331    }
18332}
18333#[stable(feature = "derefmut_for_string", since = "1.3.0")]
18334impl ops::IndexMut<ops::RangeTo<usize>> for String {
18335    #[inline]
18336    fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut str {
18337        &mut self[..][index]
18338    }
18339}
18340#[stable(feature = "derefmut_for_string", since = "1.3.0")]
18341impl ops::IndexMut<ops::RangeFrom<usize>> for String {
18342    #[inline]
18343    fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut str {
18344        &mut self[..][index]
18345    }
18346}
18347#[stable(feature = "derefmut_for_string", since = "1.3.0")]
18348impl ops::IndexMut<ops::RangeFull> for String {
18349    #[inline]
18350    fn index_mut(&mut self, _index: ops::RangeFull) -> &mut str {
18351        unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) }
18352    }
18353}
18354#[stable(feature = "inclusive_range", since = "1.26.0")]
18355impl ops::IndexMut<ops::RangeInclusive<usize>> for String {
18356    #[inline]
18357    fn index_mut(&mut self, index: ops::RangeInclusive<usize>) -> &mut str {
18358        IndexMut::index_mut(&mut **self, index)
18359    }
18360}
18361#[stable(feature = "inclusive_range", since = "1.26.0")]
18362impl ops::IndexMut<ops::RangeToInclusive<usize>> for String {
18363    #[inline]
18364    fn index_mut(&mut self, index: ops::RangeToInclusive<usize>) -> &mut str {
18365        IndexMut::index_mut(&mut **self, index)
18366    }
18367}
18368
18369#[stable(feature = "rust1", since = "1.0.0")]
18370impl ops::Deref for String {
18371    type Target = str;
18372
18373    #[inline]
18374    fn deref(&self) -> &str {
18375        unsafe { str::from_utf8_unchecked(&self.vec) }
18376    }
18377}
18378
18379#[stable(feature = "derefmut_for_string", since = "1.3.0")]
18380impl ops::DerefMut for String {
18381    #[inline]
18382    fn deref_mut(&mut self) -> &mut str {
18383        unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) }
18384    }
18385}
18386
18387/// A type alias for [`Infallible`].
18388///
18389/// This alias exists for backwards compatibility, and may be eventually deprecated.
18390///
18391/// [`Infallible`]: core::convert::Infallible
18392#[stable(feature = "str_parse_error", since = "1.5.0")]
18393pub type ParseError = core::convert::Infallible;
18394
18395#[stable(feature = "rust1", since = "1.0.0")]
18396impl FromStr for String {
18397    type Err = core::convert::Infallible;
18398    #[inline]
18399    fn from_str(s: &str) -> Result<String, Self::Err> {
18400        Ok(String::from(s))
18401    }
18402}
18403
18404/// A trait for converting a value to a `String`.
18405///
18406/// This trait is automatically implemented for any type which implements the
18407/// [`Display`] trait. As such, `ToString` shouldn't be implemented directly:
18408/// [`Display`] should be implemented instead, and you get the `ToString`
18409/// implementation for free.
18410///
18411/// [`Display`]: fmt::Display
18412#[cfg_attr(not(test), rustc_diagnostic_item = "ToString")]
18413#[stable(feature = "rust1", since = "1.0.0")]
18414pub trait ToString {
18415    /// Converts the given value to a `String`.
18416    ///
18417    /// # Examples
18418    ///
18419    /// Basic usage:
18420    ///
18421    /// ```
18422    /// let i = 5;
18423    /// let five = String::from("5");
18424    ///
18425    /// assert_eq!(five, i.to_string());
18426    /// ```
18427    #[rustc_conversion_suggestion]
18428    #[stable(feature = "rust1", since = "1.0.0")]
18429    fn to_string(&self) -> String;
18430}
18431
18432/// # Panics
18433///
18434/// In this implementation, the `to_string` method panics
18435/// if the `Display` implementation returns an error.
18436/// This indicates an incorrect `Display` implementation
18437/// since `fmt::Write for String` never returns an error itself.
18438#[stable(feature = "rust1", since = "1.0.0")]
18439impl<T: fmt::Display + ?Sized> ToString for T {
18440    // A common guideline is to not inline generic functions. However,
18441    // removing `#[inline]` from this method causes non-negligible regressions.
18442    // See <https://github.com/rust-lang/rust/pull/74852>, the last attempt
18443    // to try to remove it.
18444    #[inline]
18445    default fn to_string(&self) -> String {
18446        use fmt::Write;
18447        let mut buf = String::new();
18448        buf.write_fmt(format_args!("{}", self))
18449            .expect("a Display implementation returned an error unexpectedly");
18450        buf
18451    }
18452}
18453
18454#[stable(feature = "char_to_string_specialization", since = "1.46.0")]
18455impl ToString for char {
18456    #[inline]
18457    fn to_string(&self) -> String {
18458        String::from(self.encode_utf8(&mut [0; 4]))
18459    }
18460}
18461
18462#[stable(feature = "str_to_string_specialization", since = "1.9.0")]
18463impl ToString for str {
18464    #[inline]
18465    fn to_string(&self) -> String {
18466        String::from(self)
18467    }
18468}
18469
18470#[stable(feature = "cow_str_to_string_specialization", since = "1.17.0")]
18471impl ToString for Cow<'_, str> {
18472    #[inline]
18473    fn to_string(&self) -> String {
18474        self[..].to_owned()
18475    }
18476}
18477
18478#[stable(feature = "string_to_string_specialization", since = "1.17.0")]
18479impl ToString for String {
18480    #[inline]
18481    fn to_string(&self) -> String {
18482        self.to_owned()
18483    }
18484}
18485
18486#[stable(feature = "rust1", since = "1.0.0")]
18487impl AsRef<str> for String {
18488    #[inline]
18489    fn as_ref(&self) -> &str {
18490        self
18491    }
18492}
18493
18494#[stable(feature = "string_as_mut", since = "1.43.0")]
18495impl AsMut<str> for String {
18496    #[inline]
18497    fn as_mut(&mut self) -> &mut str {
18498        self
18499    }
18500}
18501
18502#[stable(feature = "rust1", since = "1.0.0")]
18503impl AsRef<[u8]> for String {
18504    #[inline]
18505    fn as_ref(&self) -> &[u8] {
18506        self.as_bytes()
18507    }
18508}
18509
18510#[stable(feature = "rust1", since = "1.0.0")]
18511impl From<&str> for String {
18512    #[inline]
18513    fn from(s: &str) -> String {
18514        s.to_owned()
18515    }
18516}
18517
18518#[stable(feature = "from_mut_str_for_string", since = "1.44.0")]
18519impl From<&mut str> for String {
18520    /// Converts a `&mut str` into a `String`.
18521    ///
18522    /// The result is allocated on the heap.
18523    #[inline]
18524    fn from(s: &mut str) -> String {
18525        s.to_owned()
18526    }
18527}
18528
18529#[stable(feature = "from_ref_string", since = "1.35.0")]
18530impl From<&String> for String {
18531    #[inline]
18532    fn from(s: &String) -> String {
18533        s.clone()
18534    }
18535}
18536
18537// note: test pulls in libstd, which causes errors here
18538#[cfg(not(test))]
18539#[stable(feature = "string_from_box", since = "1.18.0")]
18540impl From<Box<str>> for String {
18541    /// Converts the given boxed `str` slice to a `String`.
18542    /// It is notable that the `str` slice is owned.
18543    ///
18544    /// # Examples
18545    ///
18546    /// Basic usage:
18547    ///
18548    /// ```
18549    /// let s1: String = String::from("hello world");
18550    /// let s2: Box<str> = s1.into_boxed_str();
18551    /// let s3: String = String::from(s2);
18552    ///
18553    /// assert_eq!("hello world", s3)
18554    /// ```
18555    fn from(s: Box<str>) -> String {
18556        s.into_string()
18557    }
18558}
18559
18560#[stable(feature = "box_from_str", since = "1.20.0")]
18561impl From<String> for Box<str> {
18562    /// Converts the given `String` to a boxed `str` slice that is owned.
18563    ///
18564    /// # Examples
18565    ///
18566    /// Basic usage:
18567    ///
18568    /// ```
18569    /// let s1: String = String::from("hello world");
18570    /// let s2: Box<str> = Box::from(s1);
18571    /// let s3: String = String::from(s2);
18572    ///
18573    /// assert_eq!("hello world", s3)
18574    /// ```
18575    fn from(s: String) -> Box<str> {
18576        s.into_boxed_str()
18577    }
18578}
18579
18580#[stable(feature = "string_from_cow_str", since = "1.14.0")]
18581impl<'a> From<Cow<'a, str>> for String {
18582    fn from(s: Cow<'a, str>) -> String {
18583        s.into_owned()
18584    }
18585}
18586
18587#[stable(feature = "rust1", since = "1.0.0")]
18588impl<'a> From<&'a str> for Cow<'a, str> {
18589    /// Converts a string slice into a Borrowed variant.
18590    /// No heap allocation is performed, and the string
18591    /// is not copied.
18592    ///
18593    /// # Example
18594    ///
18595    /// ```
18596    /// # use std::borrow::Cow;
18597    /// assert_eq!(Cow::from("eggplant"), Cow::Borrowed("eggplant"));
18598    /// ```
18599    #[inline]
18600    fn from(s: &'a str) -> Cow<'a, str> {
18601        Cow::Borrowed(s)
18602    }
18603}
18604
18605#[stable(feature = "rust1", since = "1.0.0")]
18606impl<'a> From<String> for Cow<'a, str> {
18607    /// Converts a String into an Owned variant.
18608    /// No heap allocation is performed, and the string
18609    /// is not copied.
18610    ///
18611    /// # Example
18612    ///
18613    /// ```
18614    /// # use std::borrow::Cow;
18615    /// let s = "eggplant".to_string();
18616    /// let s2 = "eggplant".to_string();
18617    /// assert_eq!(Cow::from(s), Cow::<'static, str>::Owned(s2));
18618    /// ```
18619    #[inline]
18620    fn from(s: String) -> Cow<'a, str> {
18621        Cow::Owned(s)
18622    }
18623}
18624
18625#[stable(feature = "cow_from_string_ref", since = "1.28.0")]
18626impl<'a> From<&'a String> for Cow<'a, str> {
18627    /// Converts a String reference into a Borrowed variant.
18628    /// No heap allocation is performed, and the string
18629    /// is not copied.
18630    ///
18631    /// # Example
18632    ///
18633    /// ```
18634    /// # use std::borrow::Cow;
18635    /// let s = "eggplant".to_string();
18636    /// assert_eq!(Cow::from(&s), Cow::Borrowed("eggplant"));
18637    /// ```
18638    #[inline]
18639    fn from(s: &'a String) -> Cow<'a, str> {
18640        Cow::Borrowed(s.as_str())
18641    }
18642}
18643
18644#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
18645impl<'a> FromIterator<char> for Cow<'a, str> {
18646    fn from_iter<I: IntoIterator<Item = char>>(it: I) -> Cow<'a, str> {
18647        Cow::Owned(FromIterator::from_iter(it))
18648    }
18649}
18650
18651#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
18652impl<'a, 'b> FromIterator<&'b str> for Cow<'a, str> {
18653    fn from_iter<I: IntoIterator<Item = &'b str>>(it: I) -> Cow<'a, str> {
18654        Cow::Owned(FromIterator::from_iter(it))
18655    }
18656}
18657
18658#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
18659impl<'a> FromIterator<String> for Cow<'a, str> {
18660    fn from_iter<I: IntoIterator<Item = String>>(it: I) -> Cow<'a, str> {
18661        Cow::Owned(FromIterator::from_iter(it))
18662    }
18663}
18664
18665#[stable(feature = "from_string_for_vec_u8", since = "1.14.0")]
18666impl From<String> for Vec<u8> {
18667    /// Converts the given `String` to a vector `Vec` that holds values of type `u8`.
18668    ///
18669    /// # Examples
18670    ///
18671    /// Basic usage:
18672    ///
18673    /// ```
18674    /// let s1 = String::from("hello world");
18675    /// let v1 = Vec::from(s1);
18676    ///
18677    /// for b in v1 {
18678    ///     println!("{}", b);
18679    /// }
18680    /// ```
18681    fn from(string: String) -> Vec<u8> {
18682        string.into_bytes()
18683    }
18684}
18685
18686#[stable(feature = "rust1", since = "1.0.0")]
18687impl fmt::Write for String {
18688    #[inline]
18689    fn write_str(&mut self, s: &str) -> fmt::Result {
18690        self.push_str(s);
18691        Ok(())
18692    }
18693
18694    #[inline]
18695    fn write_char(&mut self, c: char) -> fmt::Result {
18696        self.push(c);
18697        Ok(())
18698    }
18699}
18700
18701/// A draining iterator for `String`.
18702///
18703/// This struct is created by the [`drain`] method on [`String`]. See its
18704/// documentation for more.
18705///
18706/// [`drain`]: String::drain
18707#[stable(feature = "drain", since = "1.6.0")]
18708pub struct Drain<'a> {
18709    /// Will be used as &'a mut String in the destructor
18710    string: *mut String,
18711    /// Start of part to remove
18712    start: usize,
18713    /// End of part to remove
18714    end: usize,
18715    /// Current remaining range to remove
18716    iter: Chars<'a>,
18717}
18718
18719#[stable(feature = "collection_debug", since = "1.17.0")]
18720impl fmt::Debug for Drain<'_> {
18721    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
18722        f.debug_tuple("Drain").field(&self.as_str()).finish()
18723    }
18724}
18725
18726#[stable(feature = "drain", since = "1.6.0")]
18727unsafe impl Sync for Drain<'_> {}
18728#[stable(feature = "drain", since = "1.6.0")]
18729unsafe impl Send for Drain<'_> {}
18730
18731#[stable(feature = "drain", since = "1.6.0")]
18732impl Drop for Drain<'_> {
18733    fn drop(&mut self) {
18734        unsafe {
18735            // Use Vec::drain. "Reaffirm" the bounds checks to avoid
18736            // panic code being inserted again.
18737            let self_vec = (*self.string).as_mut_vec();
18738            if self.start <= self.end && self.end <= self_vec.len() {
18739                self_vec.drain(self.start..self.end);
18740            }
18741        }
18742    }
18743}
18744
18745impl<'a> Drain<'a> {
18746    /// Returns the remaining (sub)string of this iterator as a slice.
18747    ///
18748    /// # Examples
18749    ///
18750    /// ```
18751    /// #![feature(string_drain_as_str)]
18752    /// let mut s = String::from("abc");
18753    /// let mut drain = s.drain(..);
18754    /// assert_eq!(drain.as_str(), "abc");
18755    /// let _ = drain.next().unwrap();
18756    /// assert_eq!(drain.as_str(), "bc");
18757    /// ```
18758    #[unstable(feature = "string_drain_as_str", issue = "76905")] // Note: uncomment AsRef impls below when stabilizing.
18759    pub fn as_str(&self) -> &str {
18760        self.iter.as_str()
18761    }
18762}
18763
18764// Uncomment when stabilizing `string_drain_as_str`.
18765// #[unstable(feature = "string_drain_as_str", issue = "76905")]
18766// impl<'a> AsRef<str> for Drain<'a> {
18767//     fn as_ref(&self) -> &str {
18768//         self.as_str()
18769//     }
18770// }
18771//
18772// #[unstable(feature = "string_drain_as_str", issue = "76905")]
18773// impl<'a> AsRef<[u8]> for Drain<'a> {
18774//     fn as_ref(&self) -> &[u8] {
18775//         self.as_str().as_bytes()
18776//     }
18777// }
18778
18779#[stable(feature = "drain", since = "1.6.0")]
18780impl Iterator for Drain<'_> {
18781    type Item = char;
18782
18783    #[inline]
18784    fn next(&mut self) -> Option<char> {
18785        self.iter.next()
18786    }
18787
18788    fn size_hint(&self) -> (usize, Option<usize>) {
18789        self.iter.size_hint()
18790    }
18791
18792    #[inline]
18793    fn last(mut self) -> Option<char> {
18794        self.next_back()
18795    }
18796}
18797
18798#[stable(feature = "drain", since = "1.6.0")]
18799impl DoubleEndedIterator for Drain<'_> {
18800    #[inline]
18801    fn next_back(&mut self) -> Option<char> {
18802        self.iter.next_back()
18803    }
18804}
18805
18806#[stable(feature = "fused", since = "1.26.0")]
18807impl FusedIterator for Drain<'_> {}
18808
18809#[stable(feature = "from_char_for_string", since = "1.46.0")]
18810impl From<char> for String {
18811    #[inline]
18812    fn from(c: char) -> Self {
18813        c.to_string()
18814    }
18815}
18816//! A module for working with borrowed data.
18817
18818#![stable(feature = "rust1", since = "1.0.0")]
18819
18820use core::cmp::Ordering;
18821use core::hash::{Hash, Hasher};
18822use core::ops::{Add, AddAssign, Deref};
18823
18824#[stable(feature = "rust1", since = "1.0.0")]
18825pub use core::borrow::{Borrow, BorrowMut};
18826
18827use crate::fmt;
18828use crate::string::String;
18829
18830use Cow::*;
18831
18832#[stable(feature = "rust1", since = "1.0.0")]
18833impl<'a, B: ?Sized> Borrow<B> for Cow<'a, B>
18834where
18835    B: ToOwned,
18836    <B as ToOwned>::Owned: 'a,
18837{
18838    fn borrow(&self) -> &B {
18839        &**self
18840    }
18841}
18842
18843/// A generalization of `Clone` to borrowed data.
18844///
18845/// Some types make it possible to go from borrowed to owned, usually by
18846/// implementing the `Clone` trait. But `Clone` works only for going from `&T`
18847/// to `T`. The `ToOwned` trait generalizes `Clone` to construct owned data
18848/// from any borrow of a given type.
18849#[cfg_attr(not(test), rustc_diagnostic_item = "ToOwned")]
18850#[stable(feature = "rust1", since = "1.0.0")]
18851pub trait ToOwned {
18852    /// The resulting type after obtaining ownership.
18853    #[stable(feature = "rust1", since = "1.0.0")]
18854    type Owned: Borrow<Self>;
18855
18856    /// Creates owned data from borrowed data, usually by cloning.
18857    ///
18858    /// # Examples
18859    ///
18860    /// Basic usage:
18861    ///
18862    /// ```
18863    /// let s: &str = "a";
18864    /// let ss: String = s.to_owned();
18865    ///
18866    /// let v: &[i32] = &[1, 2];
18867    /// let vv: Vec<i32> = v.to_owned();
18868    /// ```
18869    #[stable(feature = "rust1", since = "1.0.0")]
18870    #[must_use = "cloning is often expensive and is not expected to have side effects"]
18871    fn to_owned(&self) -> Self::Owned;
18872
18873    /// Uses borrowed data to replace owned data, usually by cloning.
18874    ///
18875    /// This is borrow-generalized version of `Clone::clone_from`.
18876    ///
18877    /// # Examples
18878    ///
18879    /// Basic usage:
18880    ///
18881    /// ```
18882    /// # #![feature(toowned_clone_into)]
18883    /// let mut s: String = String::new();
18884    /// "hello".clone_into(&mut s);
18885    ///
18886    /// let mut v: Vec<i32> = Vec::new();
18887    /// [1, 2][..].clone_into(&mut v);
18888    /// ```
18889    #[unstable(feature = "toowned_clone_into", reason = "recently added", issue = "41263")]
18890    fn clone_into(&self, target: &mut Self::Owned) {
18891        *target = self.to_owned();
18892    }
18893}
18894
18895#[stable(feature = "rust1", since = "1.0.0")]
18896impl<T> ToOwned for T
18897where
18898    T: Clone,
18899{
18900    type Owned = T;
18901    fn to_owned(&self) -> T {
18902        self.clone()
18903    }
18904
18905    fn clone_into(&self, target: &mut T) {
18906        target.clone_from(self);
18907    }
18908}
18909
18910/// A clone-on-write smart pointer.
18911///
18912/// The type `Cow` is a smart pointer providing clone-on-write functionality: it
18913/// can enclose and provide immutable access to borrowed data, and clone the
18914/// data lazily when mutation or ownership is required. The type is designed to
18915/// work with general borrowed data via the `Borrow` trait.
18916///
18917/// `Cow` implements `Deref`, which means that you can call
18918/// non-mutating methods directly on the data it encloses. If mutation
18919/// is desired, `to_mut` will obtain a mutable reference to an owned
18920/// value, cloning if necessary.
18921///
18922/// If you need reference-counting pointers, note that
18923/// [`Rc::make_mut`][crate::rc::Rc::make_mut] and
18924/// [`Arc::make_mut`][crate::sync::Arc::make_mut] can provide clone-on-write
18925/// functionality as well.
18926///
18927/// # Examples
18928///
18929/// ```
18930/// use std::borrow::Cow;
18931///
18932/// fn abs_all(input: &mut Cow<[i32]>) {
18933///     for i in 0..input.len() {
18934///         let v = input[i];
18935///         if v < 0 {
18936///             // Clones into a vector if not already owned.
18937///             input.to_mut()[i] = -v;
18938///         }
18939///     }
18940/// }
18941///
18942/// // No clone occurs because `input` doesn't need to be mutated.
18943/// let slice = [0, 1, 2];
18944/// let mut input = Cow::from(&slice[..]);
18945/// abs_all(&mut input);
18946///
18947/// // Clone occurs because `input` needs to be mutated.
18948/// let slice = [-1, 0, 1];
18949/// let mut input = Cow::from(&slice[..]);
18950/// abs_all(&mut input);
18951///
18952/// // No clone occurs because `input` is already owned.
18953/// let mut input = Cow::from(vec![-1, 0, 1]);
18954/// abs_all(&mut input);
18955/// ```
18956///
18957/// Another example showing how to keep `Cow` in a struct:
18958///
18959/// ```
18960/// use std::borrow::Cow;
18961///
18962/// struct Items<'a, X: 'a> where [X]: ToOwned<Owned = Vec<X>> {
18963///     values: Cow<'a, [X]>,
18964/// }
18965///
18966/// impl<'a, X: Clone + 'a> Items<'a, X> where [X]: ToOwned<Owned = Vec<X>> {
18967///     fn new(v: Cow<'a, [X]>) -> Self {
18968///         Items { values: v }
18969///     }
18970/// }
18971///
18972/// // Creates a container from borrowed values of a slice
18973/// let readonly = [1, 2];
18974/// let borrowed = Items::new((&readonly[..]).into());
18975/// match borrowed {
18976///     Items { values: Cow::Borrowed(b) } => println!("borrowed {:?}", b),
18977///     _ => panic!("expect borrowed value"),
18978/// }
18979///
18980/// let mut clone_on_write = borrowed;
18981/// // Mutates the data from slice into owned vec and pushes a new value on top
18982/// clone_on_write.values.to_mut().push(3);
18983/// println!("clone_on_write = {:?}", clone_on_write.values);
18984///
18985/// // The data was mutated. Let check it out.
18986/// match clone_on_write {
18987///     Items { values: Cow::Owned(_) } => println!("clone_on_write contains owned data"),
18988///     _ => panic!("expect owned data"),
18989/// }
18990/// ```
18991#[stable(feature = "rust1", since = "1.0.0")]
18992pub enum Cow<'a, B: ?Sized + 'a>
18993where
18994    B: ToOwned,
18995{
18996    /// Borrowed data.
18997    #[stable(feature = "rust1", since = "1.0.0")]
18998    Borrowed(#[stable(feature = "rust1", since = "1.0.0")] &'a B),
18999
19000    /// Owned data.
19001    #[stable(feature = "rust1", since = "1.0.0")]
19002    Owned(#[stable(feature = "rust1", since = "1.0.0")] <B as ToOwned>::Owned),
19003}
19004
19005#[stable(feature = "rust1", since = "1.0.0")]
19006impl<B: ?Sized + ToOwned> Clone for Cow<'_, B> {
19007    fn clone(&self) -> Self {
19008        match *self {
19009            Borrowed(b) => Borrowed(b),
19010            Owned(ref o) => {
19011                let b: &B = o.borrow();
19012                Owned(b.to_owned())
19013            }
19014        }
19015    }
19016
19017    fn clone_from(&mut self, source: &Self) {
19018        match (self, source) {
19019            (&mut Owned(ref mut dest), &Owned(ref o)) => o.borrow().clone_into(dest),
19020            (t, s) => *t = s.clone(),
19021        }
19022    }
19023}
19024
19025impl<B: ?Sized + ToOwned> Cow<'_, B> {
19026    /// Returns true if the data is borrowed, i.e. if `to_mut` would require additional work.
19027    ///
19028    /// # Examples
19029    ///
19030    /// ```
19031    /// #![feature(cow_is_borrowed)]
19032    /// use std::borrow::Cow;
19033    ///
19034    /// let cow = Cow::Borrowed("moo");
19035    /// assert!(cow.is_borrowed());
19036    ///
19037    /// let bull: Cow<'_, str> = Cow::Owned("...moo?".to_string());
19038    /// assert!(!bull.is_borrowed());
19039    /// ```
19040    #[unstable(feature = "cow_is_borrowed", issue = "65143")]
19041    #[rustc_const_unstable(feature = "const_cow_is_borrowed", issue = "65143")]
19042    pub const fn is_borrowed(&self) -> bool {
19043        match *self {
19044            Borrowed(_) => true,
19045            Owned(_) => false,
19046        }
19047    }
19048
19049    /// Returns true if the data is owned, i.e. if `to_mut` would be a no-op.
19050    ///
19051    /// # Examples
19052    ///
19053    /// ```
19054    /// #![feature(cow_is_borrowed)]
19055    /// use std::borrow::Cow;
19056    ///
19057    /// let cow: Cow<'_, str> = Cow::Owned("moo".to_string());
19058    /// assert!(cow.is_owned());
19059    ///
19060    /// let bull = Cow::Borrowed("...moo?");
19061    /// assert!(!bull.is_owned());
19062    /// ```
19063    #[unstable(feature = "cow_is_borrowed", issue = "65143")]
19064    #[rustc_const_unstable(feature = "const_cow_is_borrowed", issue = "65143")]
19065    pub const fn is_owned(&self) -> bool {
19066        !self.is_borrowed()
19067    }
19068
19069    /// Acquires a mutable reference to the owned form of the data.
19070    ///
19071    /// Clones the data if it is not already owned.
19072    ///
19073    /// # Examples
19074    ///
19075    /// ```
19076    /// use std::borrow::Cow;
19077    ///
19078    /// let mut cow = Cow::Borrowed("foo");
19079    /// cow.to_mut().make_ascii_uppercase();
19080    ///
19081    /// assert_eq!(
19082    ///   cow,
19083    ///   Cow::Owned(String::from("FOO")) as Cow<str>
19084    /// );
19085    /// ```
19086    #[stable(feature = "rust1", since = "1.0.0")]
19087    pub fn to_mut(&mut self) -> &mut <B as ToOwned>::Owned {
19088        match *self {
19089            Borrowed(borrowed) => {
19090                *self = Owned(borrowed.to_owned());
19091                match *self {
19092                    Borrowed(..) => unreachable!(),
19093                    Owned(ref mut owned) => owned,
19094                }
19095            }
19096            Owned(ref mut owned) => owned,
19097        }
19098    }
19099
19100    /// Extracts the owned data.
19101    ///
19102    /// Clones the data if it is not already owned.
19103    ///
19104    /// # Examples
19105    ///
19106    /// Calling `into_owned` on a `Cow::Borrowed` clones the underlying data
19107    /// and becomes a `Cow::Owned`:
19108    ///
19109    /// ```
19110    /// use std::borrow::Cow;
19111    ///
19112    /// let s = "Hello world!";
19113    /// let cow = Cow::Borrowed(s);
19114    ///
19115    /// assert_eq!(
19116    ///   cow.into_owned(),
19117    ///   String::from(s)
19118    /// );
19119    /// ```
19120    ///
19121    /// Calling `into_owned` on a `Cow::Owned` is a no-op:
19122    ///
19123    /// ```
19124    /// use std::borrow::Cow;
19125    ///
19126    /// let s = "Hello world!";
19127    /// let cow: Cow<str> = Cow::Owned(String::from(s));
19128    ///
19129    /// assert_eq!(
19130    ///   cow.into_owned(),
19131    ///   String::from(s)
19132    /// );
19133    /// ```
19134    #[stable(feature = "rust1", since = "1.0.0")]
19135    pub fn into_owned(self) -> <B as ToOwned>::Owned {
19136        match self {
19137            Borrowed(borrowed) => borrowed.to_owned(),
19138            Owned(owned) => owned,
19139        }
19140    }
19141}
19142
19143#[stable(feature = "rust1", since = "1.0.0")]
19144impl<B: ?Sized + ToOwned> Deref for Cow<'_, B> {
19145    type Target = B;
19146
19147    fn deref(&self) -> &B {
19148        match *self {
19149            Borrowed(borrowed) => borrowed,
19150            Owned(ref owned) => owned.borrow(),
19151        }
19152    }
19153}
19154
19155#[stable(feature = "rust1", since = "1.0.0")]
19156impl<B: ?Sized> Eq for Cow<'_, B> where B: Eq + ToOwned {}
19157
19158#[stable(feature = "rust1", since = "1.0.0")]
19159impl<B: ?Sized> Ord for Cow<'_, B>
19160where
19161    B: Ord + ToOwned,
19162{
19163    #[inline]
19164    fn cmp(&self, other: &Self) -> Ordering {
19165        Ord::cmp(&**self, &**other)
19166    }
19167}
19168
19169#[stable(feature = "rust1", since = "1.0.0")]
19170impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq<Cow<'b, C>> for Cow<'a, B>
19171where
19172    B: PartialEq<C> + ToOwned,
19173    C: ToOwned,
19174{
19175    #[inline]
19176    fn eq(&self, other: &Cow<'b, C>) -> bool {
19177        PartialEq::eq(&**self, &**other)
19178    }
19179}
19180
19181#[stable(feature = "rust1", since = "1.0.0")]
19182impl<'a, B: ?Sized> PartialOrd for Cow<'a, B>
19183where
19184    B: PartialOrd + ToOwned,
19185{
19186    #[inline]
19187    fn partial_cmp(&self, other: &Cow<'a, B>) -> Option<Ordering> {
19188        PartialOrd::partial_cmp(&**self, &**other)
19189    }
19190}
19191
19192#[stable(feature = "rust1", since = "1.0.0")]
19193impl<B: ?Sized> fmt::Debug for Cow<'_, B>
19194where
19195    B: fmt::Debug + ToOwned<Owned: fmt::Debug>,
19196{
19197    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
19198        match *self {
19199            Borrowed(ref b) => fmt::Debug::fmt(b, f),
19200            Owned(ref o) => fmt::Debug::fmt(o, f),
19201        }
19202    }
19203}
19204
19205#[stable(feature = "rust1", since = "1.0.0")]
19206impl<B: ?Sized> fmt::Display for Cow<'_, B>
19207where
19208    B: fmt::Display + ToOwned<Owned: fmt::Display>,
19209{
19210    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
19211        match *self {
19212            Borrowed(ref b) => fmt::Display::fmt(b, f),
19213            Owned(ref o) => fmt::Display::fmt(o, f),
19214        }
19215    }
19216}
19217
19218#[stable(feature = "default", since = "1.11.0")]
19219impl<B: ?Sized> Default for Cow<'_, B>
19220where
19221    B: ToOwned<Owned: Default>,
19222{
19223    /// Creates an owned Cow<'a, B> with the default value for the contained owned value.
19224    fn default() -> Self {
19225        Owned(<B as ToOwned>::Owned::default())
19226    }
19227}
19228
19229#[stable(feature = "rust1", since = "1.0.0")]
19230impl<B: ?Sized> Hash for Cow<'_, B>
19231where
19232    B: Hash + ToOwned,
19233{
19234    #[inline]
19235    fn hash<H: Hasher>(&self, state: &mut H) {
19236        Hash::hash(&**self, state)
19237    }
19238}
19239
19240#[stable(feature = "rust1", since = "1.0.0")]
19241impl<T: ?Sized + ToOwned> AsRef<T> for Cow<'_, T> {
19242    fn as_ref(&self) -> &T {
19243        self
19244    }
19245}
19246
19247#[stable(feature = "cow_add", since = "1.14.0")]
19248impl<'a> Add<&'a str> for Cow<'a, str> {
19249    type Output = Cow<'a, str>;
19250
19251    #[inline]
19252    fn add(mut self, rhs: &'a str) -> Self::Output {
19253        self += rhs;
19254        self
19255    }
19256}
19257
19258#[stable(feature = "cow_add", since = "1.14.0")]
19259impl<'a> Add<Cow<'a, str>> for Cow<'a, str> {
19260    type Output = Cow<'a, str>;
19261
19262    #[inline]
19263    fn add(mut self, rhs: Cow<'a, str>) -> Self::Output {
19264        self += rhs;
19265        self
19266    }
19267}
19268
19269#[stable(feature = "cow_add", since = "1.14.0")]
19270impl<'a> AddAssign<&'a str> for Cow<'a, str> {
19271    fn add_assign(&mut self, rhs: &'a str) {
19272        if self.is_empty() {
19273            *self = Cow::Borrowed(rhs)
19274        } else if !rhs.is_empty() {
19275            if let Cow::Borrowed(lhs) = *self {
19276                let mut s = String::with_capacity(lhs.len() + rhs.len());
19277                s.push_str(lhs);
19278                *self = Cow::Owned(s);
19279            }
19280            self.to_mut().push_str(rhs);
19281        }
19282    }
19283}
19284
19285#[stable(feature = "cow_add", since = "1.14.0")]
19286impl<'a> AddAssign<Cow<'a, str>> for Cow<'a, str> {
19287    fn add_assign(&mut self, rhs: Cow<'a, str>) {
19288        if self.is_empty() {
19289            *self = rhs
19290        } else if !rhs.is_empty() {
19291            if let Cow::Borrowed(lhs) = *self {
19292                let mut s = String::with_capacity(lhs.len() + rhs.len());
19293                s.push_str(lhs);
19294                *self = Cow::Owned(s);
19295            }
19296            self.to_mut().push_str(&rhs);
19297        }
19298    }
19299}
19300//! # The Rust core allocation and collections library
19301//!
19302//! This library provides smart pointers and collections for managing
19303//! heap-allocated values.
19304//!
19305//! This library, like libcore, normally doesn’t need to be used directly
19306//! since its contents are re-exported in the [`std` crate](../std/index.html).
19307//! Crates that use the `#![no_std]` attribute however will typically
19308//! not depend on `std`, so they’d use this crate instead.
19309//!
19310//! ## Boxed values
19311//!
19312//! The [`Box`] type is a smart pointer type. There can only be one owner of a
19313//! [`Box`], and the owner can decide to mutate the contents, which live on the
19314//! heap.
19315//!
19316//! This type can be sent among threads efficiently as the size of a `Box` value
19317//! is the same as that of a pointer. Tree-like data structures are often built
19318//! with boxes because each node often has only one owner, the parent.
19319//!
19320//! ## Reference counted pointers
19321//!
19322//! The [`Rc`] type is a non-threadsafe reference-counted pointer type intended
19323//! for sharing memory within a thread. An [`Rc`] pointer wraps a type, `T`, and
19324//! only allows access to `&T`, a shared reference.
19325//!
19326//! This type is useful when inherited mutability (such as using [`Box`]) is too
19327//! constraining for an application, and is often paired with the [`Cell`] or
19328//! [`RefCell`] types in order to allow mutation.
19329//!
19330//! ## Atomically reference counted pointers
19331//!
19332//! The [`Arc`] type is the threadsafe equivalent of the [`Rc`] type. It
19333//! provides all the same functionality of [`Rc`], except it requires that the
19334//! contained type `T` is shareable. Additionally, [`Arc<T>`][`Arc`] is itself
19335//! sendable while [`Rc<T>`][`Rc`] is not.
19336//!
19337//! This type allows for shared access to the contained data, and is often
19338//! paired with synchronization primitives such as mutexes to allow mutation of
19339//! shared resources.
19340//!
19341//! ## Collections
19342//!
19343//! Implementations of the most common general purpose data structures are
19344//! defined in this library. They are re-exported through the
19345//! [standard collections library](../std/collections/index.html).
19346//!
19347//! ## Heap interfaces
19348//!
19349//! The [`alloc`](alloc/index.html) module defines the low-level interface to the
19350//! default global allocator. It is not compatible with the libc allocator API.
19351//!
19352//! [`Arc`]: sync
19353//! [`Box`]: boxed
19354//! [`Cell`]: core::cell
19355//! [`Rc`]: rc
19356//! [`RefCell`]: core::cell
19357
19358#![allow(unused_attributes)]
19359#![stable(feature = "alloc", since = "1.36.0")]
19360#![doc(
19361    html_root_url = "https://doc.rust-lang.org/nightly/",
19362    html_playground_url = "https://play.rust-lang.org/",
19363    issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
19364    test(no_crate_inject, attr(allow(unused_variables), deny(warnings)))
19365)]
19366#![no_std]
19367#![needs_allocator]
19368#![warn(deprecated_in_future)]
19369#![warn(missing_docs)]
19370#![warn(missing_debug_implementations)]
19371#![allow(explicit_outlives_requirements)]
19372#![deny(unsafe_op_in_unsafe_fn)]
19373#![feature(rustc_allow_const_fn_unstable)]
19374#![cfg_attr(not(test), feature(generator_trait))]
19375#![cfg_attr(test, feature(test))]
19376#![cfg_attr(test, feature(new_uninit))]
19377#![feature(allocator_api)]
19378#![feature(array_chunks)]
19379#![feature(array_methods)]
19380#![feature(array_windows)]
19381#![feature(allow_internal_unstable)]
19382#![feature(arbitrary_self_types)]
19383#![feature(async_stream)]
19384#![feature(box_patterns)]
19385#![feature(box_syntax)]
19386#![feature(cfg_sanitize)]
19387#![feature(cfg_target_has_atomic)]
19388#![feature(coerce_unsized)]
19389#![feature(const_btree_new)]
19390#![cfg_attr(bootstrap, feature(const_fn))]
19391#![cfg_attr(not(bootstrap), feature(const_fn_trait_bound))]
19392#![feature(cow_is_borrowed)]
19393#![feature(const_cow_is_borrowed)]
19394#![feature(destructuring_assignment)]
19395#![feature(dispatch_from_dyn)]
19396#![feature(core_intrinsics)]
19397#![feature(dropck_eyepatch)]
19398#![feature(exact_size_is_empty)]
19399#![feature(exclusive_range_pattern)]
19400#![feature(extend_one)]
19401#![feature(fmt_internals)]
19402#![feature(fn_traits)]
19403#![feature(fundamental)]
19404#![feature(inplace_iteration)]
19405// Technically, this is a bug in rustdoc: rustdoc sees the documentation on `#[lang = slice_alloc]`
19406// blocks is for `&[T]`, which also has documentation using this feature in `core`, and gets mad
19407// that the feature-gate isn't enabled. Ideally, it wouldn't check for the feature gate for docs
19408// from other crates, but since this can only appear for lang items, it doesn't seem worth fixing.
19409#![feature(intra_doc_pointers)]
19410#![feature(iter_zip)]
19411#![feature(lang_items)]
19412#![feature(layout_for_ptr)]
19413#![feature(maybe_uninit_ref)]
19414#![feature(negative_impls)]
19415#![feature(never_type)]
19416#![feature(nll)]
19417#![feature(nonnull_slice_from_raw_parts)]
19418#![feature(auto_traits)]
19419#![feature(option_result_unwrap_unchecked)]
19420#![cfg_attr(bootstrap, feature(or_patterns))]
19421#![feature(pattern)]
19422#![feature(ptr_internals)]
19423#![feature(rustc_attrs)]
19424#![feature(receiver_trait)]
19425#![feature(min_specialization)]
19426#![feature(set_ptr_value)]
19427#![feature(slice_ptr_get)]
19428#![feature(slice_ptr_len)]
19429#![feature(slice_range)]
19430#![feature(staged_api)]
19431#![feature(str_internals)]
19432#![feature(trusted_len)]
19433#![feature(unboxed_closures)]
19434#![feature(unicode_internals)]
19435#![feature(unsize)]
19436#![feature(unsized_fn_params)]
19437#![feature(allocator_internals)]
19438#![feature(slice_partition_dedup)]
19439#![feature(maybe_uninit_extra, maybe_uninit_slice, maybe_uninit_uninit_array)]
19440#![feature(alloc_layout_extra)]
19441#![feature(trusted_random_access)]
19442#![feature(try_trait)]
19443#![feature(min_type_alias_impl_trait)]
19444#![feature(associated_type_bounds)]
19445#![feature(slice_group_by)]
19446#![feature(decl_macro)]
19447// Allow testing this library
19448
19449#[cfg(test)]
19450#[macro_use]
19451extern crate std;
19452#[cfg(test)]
19453extern crate test;
19454
19455// Module with internal macros used by other modules (needs to be included before other modules).
19456#[macro_use]
19457mod macros;
19458
19459// Heaps provided for low-level allocation strategies
19460
19461pub mod alloc;
19462
19463// Primitive types using the heaps above
19464
19465// Need to conditionally define the mod from `boxed.rs` to avoid
19466// duplicating the lang-items when building in test cfg; but also need
19467// to allow code to have `use boxed::Box;` declarations.
19468#[cfg(not(test))]
19469pub mod boxed;
19470#[cfg(test)]
19471mod boxed {
19472    pub use std::boxed::Box;
19473}
19474pub mod borrow;
19475pub mod collections;
19476pub mod fmt;
19477pub mod prelude;
19478pub mod raw_vec;
19479pub mod rc;
19480pub mod slice;
19481pub mod str;
19482pub mod string;
19483#[cfg(target_has_atomic = "ptr")]
19484pub mod sync;
19485#[cfg(target_has_atomic = "ptr")]
19486pub mod task;
19487#[cfg(test)]
19488mod tests;
19489pub mod vec;
19490
19491#[doc(hidden)]
19492#[unstable(feature = "liballoc_internals", issue = "none", reason = "implementation detail")]
19493pub mod __export {
19494    pub use core::format_args;
19495}
19496//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
19497//! Counted'.
19498//!
19499//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
19500//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
19501//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
19502//! given allocation is destroyed, the value stored in that allocation (often
19503//! referred to as "inner value") is also dropped.
19504//!
19505//! Shared references in Rust disallow mutation by default, and [`Rc`]
19506//! is no exception: you cannot generally obtain a mutable reference to
19507//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
19508//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
19509//! inside an `Rc`][mutability].
19510//!
19511//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
19512//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
19513//! does not implement [`Send`][send]. As a result, the Rust compiler
19514//! will check *at compile time* that you are not sending [`Rc`]s between
19515//! threads. If you need multi-threaded, atomic reference counting, use
19516//! [`sync::Arc`][arc].
19517//!
19518//! The [`downgrade`][downgrade] method can be used to create a non-owning
19519//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
19520//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
19521//! already been dropped. In other words, `Weak` pointers do not keep the value
19522//! inside the allocation alive; however, they *do* keep the allocation
19523//! (the backing store for the inner value) alive.
19524//!
19525//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
19526//! [`Weak`] is used to break cycles. For example, a tree could have strong
19527//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
19528//! children back to their parents.
19529//!
19530//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
19531//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
19532//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
19533//! functions, called using [fully qualified syntax]:
19534//!
19535//! ```
19536//! use std::rc::Rc;
19537//!
19538//! let my_rc = Rc::new(());
19539//! Rc::downgrade(&my_rc);
19540//! ```
19541//!
19542//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
19543//! fully qualified syntax. Some people prefer to use fully qualified syntax,
19544//! while others prefer using method-call syntax.
19545//!
19546//! ```
19547//! use std::rc::Rc;
19548//!
19549//! let rc = Rc::new(());
19550//! // Method-call syntax
19551//! let rc2 = rc.clone();
19552//! // Fully qualified syntax
19553//! let rc3 = Rc::clone(&rc);
19554//! ```
19555//!
19556//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
19557//! already been dropped.
19558//!
19559//! # Cloning references
19560//!
19561//! Creating a new reference to the same allocation as an existing reference counted pointer
19562//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
19563//!
19564//! ```
19565//! use std::rc::Rc;
19566//!
19567//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
19568//! // The two syntaxes below are equivalent.
19569//! let a = foo.clone();
19570//! let b = Rc::clone(&foo);
19571//! // a and b both point to the same memory location as foo.
19572//! ```
19573//!
19574//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
19575//! the meaning of the code. In the example above, this syntax makes it easier to see that
19576//! this code is creating a new reference rather than copying the whole content of foo.
19577//!
19578//! # Examples
19579//!
19580//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
19581//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
19582//! unique ownership, because more than one gadget may belong to the same
19583//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
19584//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
19585//!
19586//! ```
19587//! use std::rc::Rc;
19588//!
19589//! struct Owner {
19590//!     name: String,
19591//!     // ...other fields
19592//! }
19593//!
19594//! struct Gadget {
19595//!     id: i32,
19596//!     owner: Rc<Owner>,
19597//!     // ...other fields
19598//! }
19599//!
19600//! fn main() {
19601//!     // Create a reference-counted `Owner`.
19602//!     let gadget_owner: Rc<Owner> = Rc::new(
19603//!         Owner {
19604//!             name: "Gadget Man".to_string(),
19605//!         }
19606//!     );
19607//!
19608//!     // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
19609//!     // gives us a new pointer to the same `Owner` allocation, incrementing
19610//!     // the reference count in the process.
19611//!     let gadget1 = Gadget {
19612//!         id: 1,
19613//!         owner: Rc::clone(&gadget_owner),
19614//!     };
19615//!     let gadget2 = Gadget {
19616//!         id: 2,
19617//!         owner: Rc::clone(&gadget_owner),
19618//!     };
19619//!
19620//!     // Dispose of our local variable `gadget_owner`.
19621//!     drop(gadget_owner);
19622//!
19623//!     // Despite dropping `gadget_owner`, we're still able to print out the name
19624//!     // of the `Owner` of the `Gadget`s. This is because we've only dropped a
19625//!     // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
19626//!     // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
19627//!     // live. The field projection `gadget1.owner.name` works because
19628//!     // `Rc<Owner>` automatically dereferences to `Owner`.
19629//!     println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
19630//!     println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
19631//!
19632//!     // At the end of the function, `gadget1` and `gadget2` are destroyed, and
19633//!     // with them the last counted references to our `Owner`. Gadget Man now
19634//!     // gets destroyed as well.
19635//! }
19636//! ```
19637//!
19638//! If our requirements change, and we also need to be able to traverse from
19639//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
19640//! to `Gadget` introduces a cycle. This means that their
19641//! reference counts can never reach 0, and the allocation will never be destroyed:
19642//! a memory leak. In order to get around this, we can use [`Weak`]
19643//! pointers.
19644//!
19645//! Rust actually makes it somewhat difficult to produce this loop in the first
19646//! place. In order to end up with two values that point at each other, one of
19647//! them needs to be mutable. This is difficult because [`Rc`] enforces
19648//! memory safety by only giving out shared references to the value it wraps,
19649//! and these don't allow direct mutation. We need to wrap the part of the
19650//! value we wish to mutate in a [`RefCell`], which provides *interior
19651//! mutability*: a method to achieve mutability through a shared reference.
19652//! [`RefCell`] enforces Rust's borrowing rules at runtime.
19653//!
19654//! ```
19655//! use std::rc::Rc;
19656//! use std::rc::Weak;
19657//! use std::cell::RefCell;
19658//!
19659//! struct Owner {
19660//!     name: String,
19661//!     gadgets: RefCell<Vec<Weak<Gadget>>>,
19662//!     // ...other fields
19663//! }
19664//!
19665//! struct Gadget {
19666//!     id: i32,
19667//!     owner: Rc<Owner>,
19668//!     // ...other fields
19669//! }
19670//!
19671//! fn main() {
19672//!     // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
19673//!     // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
19674//!     // a shared reference.
19675//!     let gadget_owner: Rc<Owner> = Rc::new(
19676//!         Owner {
19677//!             name: "Gadget Man".to_string(),
19678//!             gadgets: RefCell::new(vec![]),
19679//!         }
19680//!     );
19681//!
19682//!     // Create `Gadget`s belonging to `gadget_owner`, as before.
19683//!     let gadget1 = Rc::new(
19684//!         Gadget {
19685//!             id: 1,
19686//!             owner: Rc::clone(&gadget_owner),
19687//!         }
19688//!     );
19689//!     let gadget2 = Rc::new(
19690//!         Gadget {
19691//!             id: 2,
19692//!             owner: Rc::clone(&gadget_owner),
19693//!         }
19694//!     );
19695//!
19696//!     // Add the `Gadget`s to their `Owner`.
19697//!     {
19698//!         let mut gadgets = gadget_owner.gadgets.borrow_mut();
19699//!         gadgets.push(Rc::downgrade(&gadget1));
19700//!         gadgets.push(Rc::downgrade(&gadget2));
19701//!
19702//!         // `RefCell` dynamic borrow ends here.
19703//!     }
19704//!
19705//!     // Iterate over our `Gadget`s, printing their details out.
19706//!     for gadget_weak in gadget_owner.gadgets.borrow().iter() {
19707//!
19708//!         // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
19709//!         // guarantee the allocation still exists, we need to call
19710//!         // `upgrade`, which returns an `Option<Rc<Gadget>>`.
19711//!         //
19712//!         // In this case we know the allocation still exists, so we simply
19713//!         // `unwrap` the `Option`. In a more complicated program, you might
19714//!         // need graceful error handling for a `None` result.
19715//!
19716//!         let gadget = gadget_weak.upgrade().unwrap();
19717//!         println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
19718//!     }
19719//!
19720//!     // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
19721//!     // are destroyed. There are now no strong (`Rc`) pointers to the
19722//!     // gadgets, so they are destroyed. This zeroes the reference count on
19723//!     // Gadget Man, so he gets destroyed as well.
19724//! }
19725//! ```
19726//!
19727//! [clone]: Clone::clone
19728//! [`Cell`]: core::cell::Cell
19729//! [`RefCell`]: core::cell::RefCell
19730//! [send]: core::marker::Send
19731//! [arc]: crate::sync::Arc
19732//! [`Deref`]: core::ops::Deref
19733//! [downgrade]: Rc::downgrade
19734//! [upgrade]: Weak::upgrade
19735//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
19736//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
19737
19738#![stable(feature = "rust1", since = "1.0.0")]
19739
19740#[cfg(not(test))]
19741use crate::boxed::Box;
19742#[cfg(test)]
19743use std::boxed::Box;
19744
19745use core::any::Any;
19746use core::borrow;
19747use core::cell::Cell;
19748use core::cmp::Ordering;
19749use core::convert::{From, TryFrom};
19750use core::fmt;
19751use core::hash::{Hash, Hasher};
19752use core::intrinsics::abort;
19753use core::iter;
19754use core::marker::{self, PhantomData, Unpin, Unsize};
19755use core::mem::{self, align_of_val_raw, forget, size_of_val};
19756use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
19757use core::pin::Pin;
19758use core::ptr::{self, NonNull};
19759use core::slice::from_raw_parts_mut;
19760
19761use crate::alloc::{
19762    box_free, handle_alloc_error, AllocError, Allocator, Global, Layout, WriteCloneIntoRaw,
19763};
19764use crate::borrow::{Cow, ToOwned};
19765use crate::string::String;
19766use crate::vec::Vec;
19767
19768#[cfg(test)]
19769mod tests;
19770
19771// This is repr(C) to future-proof against possible field-reordering, which
19772// would interfere with otherwise safe [into|from]_raw() of transmutable
19773// inner types.
19774#[repr(C)]
19775struct RcBox<T: ?Sized> {
19776    strong: Cell<usize>,
19777    weak: Cell<usize>,
19778    value: T,
19779}
19780
19781/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
19782/// Counted'.
19783///
19784/// See the [module-level documentation](./index.html) for more details.
19785///
19786/// The inherent methods of `Rc` are all associated functions, which means
19787/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
19788/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
19789///
19790/// [get_mut]: Rc::get_mut
19791#[cfg_attr(not(test), rustc_diagnostic_item = "Rc")]
19792#[stable(feature = "rust1", since = "1.0.0")]
19793pub struct Rc<T: ?Sized> {
19794    ptr: NonNull<RcBox<T>>,
19795    phantom: PhantomData<RcBox<T>>,
19796}
19797
19798#[stable(feature = "rust1", since = "1.0.0")]
19799impl<T: ?Sized> !marker::Send for Rc<T> {}
19800#[stable(feature = "rust1", since = "1.0.0")]
19801impl<T: ?Sized> !marker::Sync for Rc<T> {}
19802
19803#[unstable(feature = "coerce_unsized", issue = "27732")]
19804impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}
19805
19806#[unstable(feature = "dispatch_from_dyn", issue = "none")]
19807impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
19808
19809impl<T: ?Sized> Rc<T> {
19810    #[inline(always)]
19811    fn inner(&self) -> &RcBox<T> {
19812        // This unsafety is ok because while this Rc is alive we're guaranteed
19813        // that the inner pointer is valid.
19814        unsafe { self.ptr.as_ref() }
19815    }
19816
19817    fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
19818        Self { ptr, phantom: PhantomData }
19819    }
19820
19821    unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
19822        Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
19823    }
19824}
19825
19826impl<T> Rc<T> {
19827    /// Constructs a new `Rc<T>`.
19828    ///
19829    /// # Examples
19830    ///
19831    /// ```
19832    /// use std::rc::Rc;
19833    ///
19834    /// let five = Rc::new(5);
19835    /// ```
19836    #[stable(feature = "rust1", since = "1.0.0")]
19837    pub fn new(value: T) -> Rc<T> {
19838        // There is an implicit weak pointer owned by all the strong
19839        // pointers, which ensures that the weak destructor never frees
19840        // the allocation while the strong destructor is running, even
19841        // if the weak pointer is stored inside the strong one.
19842        Self::from_inner(
19843            Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(),
19844        )
19845    }
19846
19847    /// Constructs a new `Rc<T>` using a weak reference to itself. Attempting
19848    /// to upgrade the weak reference before this function returns will result
19849    /// in a `None` value. However, the weak reference may be cloned freely and
19850    /// stored for use at a later time.
19851    ///
19852    /// # Examples
19853    ///
19854    /// ```
19855    /// #![feature(arc_new_cyclic)]
19856    /// #![allow(dead_code)]
19857    /// use std::rc::{Rc, Weak};
19858    ///
19859    /// struct Gadget {
19860    ///     self_weak: Weak<Self>,
19861    ///     // ... more fields
19862    /// }
19863    /// impl Gadget {
19864    ///     pub fn new() -> Rc<Self> {
19865    ///         Rc::new_cyclic(|self_weak| {
19866    ///             Gadget { self_weak: self_weak.clone(), /* ... */ }
19867    ///         })
19868    ///     }
19869    /// }
19870    /// ```
19871    #[unstable(feature = "arc_new_cyclic", issue = "75861")]
19872    pub fn new_cyclic(data_fn: impl FnOnce(&Weak<T>) -> T) -> Rc<T> {
19873        // Construct the inner in the "uninitialized" state with a single
19874        // weak reference.
19875        let uninit_ptr: NonNull<_> = Box::leak(box RcBox {
19876            strong: Cell::new(0),
19877            weak: Cell::new(1),
19878            value: mem::MaybeUninit::<T>::uninit(),
19879        })
19880        .into();
19881
19882        let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();
19883
19884        let weak = Weak { ptr: init_ptr };
19885
19886        // It's important we don't give up ownership of the weak pointer, or
19887        // else the memory might be freed by the time `data_fn` returns. If
19888        // we really wanted to pass ownership, we could create an additional
19889        // weak pointer for ourselves, but this would result in additional
19890        // updates to the weak reference count which might not be necessary
19891        // otherwise.
19892        let data = data_fn(&weak);
19893
19894        unsafe {
19895            let inner = init_ptr.as_ptr();
19896            ptr::write(ptr::addr_of_mut!((*inner).value), data);
19897
19898            let prev_value = (*inner).strong.get();
19899            debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
19900            (*inner).strong.set(1);
19901        }
19902
19903        let strong = Rc::from_inner(init_ptr);
19904
19905        // Strong references should collectively own a shared weak reference,
19906        // so don't run the destructor for our old weak reference.
19907        mem::forget(weak);
19908        strong
19909    }
19910
19911    /// Constructs a new `Rc` with uninitialized contents.
19912    ///
19913    /// # Examples
19914    ///
19915    /// ```
19916    /// #![feature(new_uninit)]
19917    /// #![feature(get_mut_unchecked)]
19918    ///
19919    /// use std::rc::Rc;
19920    ///
19921    /// let mut five = Rc::<u32>::new_uninit();
19922    ///
19923    /// let five = unsafe {
19924    ///     // Deferred initialization:
19925    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
19926    ///
19927    ///     five.assume_init()
19928    /// };
19929    ///
19930    /// assert_eq!(*five, 5)
19931    /// ```
19932    #[unstable(feature = "new_uninit", issue = "63291")]
19933    pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
19934        unsafe {
19935            Rc::from_ptr(Rc::allocate_for_layout(
19936                Layout::new::<T>(),
19937                |layout| Global.allocate(layout),
19938                |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
19939            ))
19940        }
19941    }
19942
19943    /// Constructs a new `Rc` with uninitialized contents, with the memory
19944    /// being filled with `0` bytes.
19945    ///
19946    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
19947    /// incorrect usage of this method.
19948    ///
19949    /// # Examples
19950    ///
19951    /// ```
19952    /// #![feature(new_uninit)]
19953    ///
19954    /// use std::rc::Rc;
19955    ///
19956    /// let zero = Rc::<u32>::new_zeroed();
19957    /// let zero = unsafe { zero.assume_init() };
19958    ///
19959    /// assert_eq!(*zero, 0)
19960    /// ```
19961    ///
19962    /// [zeroed]: mem::MaybeUninit::zeroed
19963    #[unstable(feature = "new_uninit", issue = "63291")]
19964    pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
19965        unsafe {
19966            Rc::from_ptr(Rc::allocate_for_layout(
19967                Layout::new::<T>(),
19968                |layout| Global.allocate_zeroed(layout),
19969                |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
19970            ))
19971        }
19972    }
19973
19974    /// Constructs a new `Rc<T>`, returning an error if the allocation fails
19975    ///
19976    /// # Examples
19977    ///
19978    /// ```
19979    /// #![feature(allocator_api)]
19980    /// use std::rc::Rc;
19981    ///
19982    /// let five = Rc::try_new(5);
19983    /// # Ok::<(), std::alloc::AllocError>(())
19984    /// ```
19985    #[unstable(feature = "allocator_api", issue = "32838")]
19986    pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
19987        // There is an implicit weak pointer owned by all the strong
19988        // pointers, which ensures that the weak destructor never frees
19989        // the allocation while the strong destructor is running, even
19990        // if the weak pointer is stored inside the strong one.
19991        Ok(Self::from_inner(
19992            Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?)
19993                .into(),
19994        ))
19995    }
19996
19997    /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
19998    ///
19999    /// # Examples
20000    ///
20001    /// ```
20002    /// #![feature(allocator_api, new_uninit)]
20003    /// #![feature(get_mut_unchecked)]
20004    ///
20005    /// use std::rc::Rc;
20006    ///
20007    /// let mut five = Rc::<u32>::try_new_uninit()?;
20008    ///
20009    /// let five = unsafe {
20010    ///     // Deferred initialization:
20011    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
20012    ///
20013    ///     five.assume_init()
20014    /// };
20015    ///
20016    /// assert_eq!(*five, 5);
20017    /// # Ok::<(), std::alloc::AllocError>(())
20018    /// ```
20019    #[unstable(feature = "allocator_api", issue = "32838")]
20020    // #[unstable(feature = "new_uninit", issue = "63291")]
20021    pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
20022        unsafe {
20023            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
20024                Layout::new::<T>(),
20025                |layout| Global.allocate(layout),
20026                |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
20027            )?))
20028        }
20029    }
20030
20031    /// Constructs a new `Rc` with uninitialized contents, with the memory
20032    /// being filled with `0` bytes, returning an error if the allocation fails
20033    ///
20034    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
20035    /// incorrect usage of this method.
20036    ///
20037    /// # Examples
20038    ///
20039    /// ```
20040    /// #![feature(allocator_api, new_uninit)]
20041    ///
20042    /// use std::rc::Rc;
20043    ///
20044    /// let zero = Rc::<u32>::try_new_zeroed()?;
20045    /// let zero = unsafe { zero.assume_init() };
20046    ///
20047    /// assert_eq!(*zero, 0);
20048    /// # Ok::<(), std::alloc::AllocError>(())
20049    /// ```
20050    ///
20051    /// [zeroed]: mem::MaybeUninit::zeroed
20052    #[unstable(feature = "allocator_api", issue = "32838")]
20053    //#[unstable(feature = "new_uninit", issue = "63291")]
20054    pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
20055        unsafe {
20056            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
20057                Layout::new::<T>(),
20058                |layout| Global.allocate_zeroed(layout),
20059                |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
20060            )?))
20061        }
20062    }
20063    /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
20064    /// `value` will be pinned in memory and unable to be moved.
20065    #[stable(feature = "pin", since = "1.33.0")]
20066    pub fn pin(value: T) -> Pin<Rc<T>> {
20067        unsafe { Pin::new_unchecked(Rc::new(value)) }
20068    }
20069
20070    /// Returns the inner value, if the `Rc` has exactly one strong reference.
20071    ///
20072    /// Otherwise, an [`Err`] is returned with the same `Rc` that was
20073    /// passed in.
20074    ///
20075    /// This will succeed even if there are outstanding weak references.
20076    ///
20077    /// # Examples
20078    ///
20079    /// ```
20080    /// use std::rc::Rc;
20081    ///
20082    /// let x = Rc::new(3);
20083    /// assert_eq!(Rc::try_unwrap(x), Ok(3));
20084    ///
20085    /// let x = Rc::new(4);
20086    /// let _y = Rc::clone(&x);
20087    /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
20088    /// ```
20089    #[inline]
20090    #[stable(feature = "rc_unique", since = "1.4.0")]
20091    pub fn try_unwrap(this: Self) -> Result<T, Self> {
20092        if Rc::strong_count(&this) == 1 {
20093            unsafe {
20094                let val = ptr::read(&*this); // copy the contained object
20095
20096                // Indicate to Weaks that they can't be promoted by decrementing
20097                // the strong count, and then remove the implicit "strong weak"
20098                // pointer while also handling drop logic by just crafting a
20099                // fake Weak.
20100                this.inner().dec_strong();
20101                let _weak = Weak { ptr: this.ptr };
20102                forget(this);
20103                Ok(val)
20104            }
20105        } else {
20106            Err(this)
20107        }
20108    }
20109}
20110
20111impl<T> Rc<[T]> {
20112    /// Constructs a new reference-counted slice with uninitialized contents.
20113    ///
20114    /// # Examples
20115    ///
20116    /// ```
20117    /// #![feature(new_uninit)]
20118    /// #![feature(get_mut_unchecked)]
20119    ///
20120    /// use std::rc::Rc;
20121    ///
20122    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
20123    ///
20124    /// let values = unsafe {
20125    ///     // Deferred initialization:
20126    ///     Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
20127    ///     Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
20128    ///     Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
20129    ///
20130    ///     values.assume_init()
20131    /// };
20132    ///
20133    /// assert_eq!(*values, [1, 2, 3])
20134    /// ```
20135    #[unstable(feature = "new_uninit", issue = "63291")]
20136    pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
20137        unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
20138    }
20139
20140    /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
20141    /// filled with `0` bytes.
20142    ///
20143    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
20144    /// incorrect usage of this method.
20145    ///
20146    /// # Examples
20147    ///
20148    /// ```
20149    /// #![feature(new_uninit)]
20150    ///
20151    /// use std::rc::Rc;
20152    ///
20153    /// let values = Rc::<[u32]>::new_zeroed_slice(3);
20154    /// let values = unsafe { values.assume_init() };
20155    ///
20156    /// assert_eq!(*values, [0, 0, 0])
20157    /// ```
20158    ///
20159    /// [zeroed]: mem::MaybeUninit::zeroed
20160    #[unstable(feature = "new_uninit", issue = "63291")]
20161    pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
20162        unsafe {
20163            Rc::from_ptr(Rc::allocate_for_layout(
20164                Layout::array::<T>(len).unwrap(),
20165                |layout| Global.allocate_zeroed(layout),
20166                |mem| {
20167                    ptr::slice_from_raw_parts_mut(mem as *mut T, len)
20168                        as *mut RcBox<[mem::MaybeUninit<T>]>
20169                },
20170            ))
20171        }
20172    }
20173}
20174
20175impl<T> Rc<mem::MaybeUninit<T>> {
20176    /// Converts to `Rc<T>`.
20177    ///
20178    /// # Safety
20179    ///
20180    /// As with [`MaybeUninit::assume_init`],
20181    /// it is up to the caller to guarantee that the inner value
20182    /// really is in an initialized state.
20183    /// Calling this when the content is not yet fully initialized
20184    /// causes immediate undefined behavior.
20185    ///
20186    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
20187    ///
20188    /// # Examples
20189    ///
20190    /// ```
20191    /// #![feature(new_uninit)]
20192    /// #![feature(get_mut_unchecked)]
20193    ///
20194    /// use std::rc::Rc;
20195    ///
20196    /// let mut five = Rc::<u32>::new_uninit();
20197    ///
20198    /// let five = unsafe {
20199    ///     // Deferred initialization:
20200    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
20201    ///
20202    ///     five.assume_init()
20203    /// };
20204    ///
20205    /// assert_eq!(*five, 5)
20206    /// ```
20207    #[unstable(feature = "new_uninit", issue = "63291")]
20208    #[inline]
20209    pub unsafe fn assume_init(self) -> Rc<T> {
20210        Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
20211    }
20212}
20213
20214impl<T> Rc<[mem::MaybeUninit<T>]> {
20215    /// Converts to `Rc<[T]>`.
20216    ///
20217    /// # Safety
20218    ///
20219    /// As with [`MaybeUninit::assume_init`],
20220    /// it is up to the caller to guarantee that the inner value
20221    /// really is in an initialized state.
20222    /// Calling this when the content is not yet fully initialized
20223    /// causes immediate undefined behavior.
20224    ///
20225    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
20226    ///
20227    /// # Examples
20228    ///
20229    /// ```
20230    /// #![feature(new_uninit)]
20231    /// #![feature(get_mut_unchecked)]
20232    ///
20233    /// use std::rc::Rc;
20234    ///
20235    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
20236    ///
20237    /// let values = unsafe {
20238    ///     // Deferred initialization:
20239    ///     Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
20240    ///     Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
20241    ///     Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
20242    ///
20243    ///     values.assume_init()
20244    /// };
20245    ///
20246    /// assert_eq!(*values, [1, 2, 3])
20247    /// ```
20248    #[unstable(feature = "new_uninit", issue = "63291")]
20249    #[inline]
20250    pub unsafe fn assume_init(self) -> Rc<[T]> {
20251        unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
20252    }
20253}
20254
20255impl<T: ?Sized> Rc<T> {
20256    /// Consumes the `Rc`, returning the wrapped pointer.
20257    ///
20258    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
20259    /// [`Rc::from_raw`][from_raw].
20260    ///
20261    /// [from_raw]: Rc::from_raw
20262    ///
20263    /// # Examples
20264    ///
20265    /// ```
20266    /// use std::rc::Rc;
20267    ///
20268    /// let x = Rc::new("hello".to_owned());
20269    /// let x_ptr = Rc::into_raw(x);
20270    /// assert_eq!(unsafe { &*x_ptr }, "hello");
20271    /// ```
20272    #[stable(feature = "rc_raw", since = "1.17.0")]
20273    pub fn into_raw(this: Self) -> *const T {
20274        let ptr = Self::as_ptr(&this);
20275        mem::forget(this);
20276        ptr
20277    }
20278
20279    /// Provides a raw pointer to the data.
20280    ///
20281    /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
20282    /// for as long there are strong counts in the `Rc`.
20283    ///
20284    /// # Examples
20285    ///
20286    /// ```
20287    /// use std::rc::Rc;
20288    ///
20289    /// let x = Rc::new("hello".to_owned());
20290    /// let y = Rc::clone(&x);
20291    /// let x_ptr = Rc::as_ptr(&x);
20292    /// assert_eq!(x_ptr, Rc::as_ptr(&y));
20293    /// assert_eq!(unsafe { &*x_ptr }, "hello");
20294    /// ```
20295    #[stable(feature = "weak_into_raw", since = "1.45.0")]
20296    pub fn as_ptr(this: &Self) -> *const T {
20297        let ptr: *mut RcBox<T> = NonNull::as_ptr(this.ptr);
20298
20299        // SAFETY: This cannot go through Deref::deref or Rc::inner because
20300        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
20301        // write through the pointer after the Rc is recovered through `from_raw`.
20302        unsafe { ptr::addr_of_mut!((*ptr).value) }
20303    }
20304
20305    /// Constructs an `Rc<T>` from a raw pointer.
20306    ///
20307    /// The raw pointer must have been previously returned by a call to
20308    /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size
20309    /// and alignment as `T`. This is trivially true if `U` is `T`.
20310    /// Note that if `U` is not `T` but has the same size and alignment, this is
20311    /// basically like transmuting references of different types. See
20312    /// [`mem::transmute`][transmute] for more information on what
20313    /// restrictions apply in this case.
20314    ///
20315    /// The user of `from_raw` has to make sure a specific value of `T` is only
20316    /// dropped once.
20317    ///
20318    /// This function is unsafe because improper use may lead to memory unsafety,
20319    /// even if the returned `Rc<T>` is never accessed.
20320    ///
20321    /// [into_raw]: Rc::into_raw
20322    /// [transmute]: core::mem::transmute
20323    ///
20324    /// # Examples
20325    ///
20326    /// ```
20327    /// use std::rc::Rc;
20328    ///
20329    /// let x = Rc::new("hello".to_owned());
20330    /// let x_ptr = Rc::into_raw(x);
20331    ///
20332    /// unsafe {
20333    ///     // Convert back to an `Rc` to prevent leak.
20334    ///     let x = Rc::from_raw(x_ptr);
20335    ///     assert_eq!(&*x, "hello");
20336    ///
20337    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
20338    /// }
20339    ///
20340    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
20341    /// ```
20342    #[stable(feature = "rc_raw", since = "1.17.0")]
20343    pub unsafe fn from_raw(ptr: *const T) -> Self {
20344        let offset = unsafe { data_offset(ptr) };
20345
20346        // Reverse the offset to find the original RcBox.
20347        let rc_ptr =
20348            unsafe { (ptr as *mut RcBox<T>).set_ptr_value((ptr as *mut u8).offset(-offset)) };
20349
20350        unsafe { Self::from_ptr(rc_ptr) }
20351    }
20352
20353    /// Creates a new [`Weak`] pointer to this allocation.
20354    ///
20355    /// # Examples
20356    ///
20357    /// ```
20358    /// use std::rc::Rc;
20359    ///
20360    /// let five = Rc::new(5);
20361    ///
20362    /// let weak_five = Rc::downgrade(&five);
20363    /// ```
20364    #[stable(feature = "rc_weak", since = "1.4.0")]
20365    pub fn downgrade(this: &Self) -> Weak<T> {
20366        this.inner().inc_weak();
20367        // Make sure we do not create a dangling Weak
20368        debug_assert!(!is_dangling(this.ptr.as_ptr()));
20369        Weak { ptr: this.ptr }
20370    }
20371
20372    /// Gets the number of [`Weak`] pointers to this allocation.
20373    ///
20374    /// # Examples
20375    ///
20376    /// ```
20377    /// use std::rc::Rc;
20378    ///
20379    /// let five = Rc::new(5);
20380    /// let _weak_five = Rc::downgrade(&five);
20381    ///
20382    /// assert_eq!(1, Rc::weak_count(&five));
20383    /// ```
20384    #[inline]
20385    #[stable(feature = "rc_counts", since = "1.15.0")]
20386    pub fn weak_count(this: &Self) -> usize {
20387        this.inner().weak() - 1
20388    }
20389
20390    /// Gets the number of strong (`Rc`) pointers to this allocation.
20391    ///
20392    /// # Examples
20393    ///
20394    /// ```
20395    /// use std::rc::Rc;
20396    ///
20397    /// let five = Rc::new(5);
20398    /// let _also_five = Rc::clone(&five);
20399    ///
20400    /// assert_eq!(2, Rc::strong_count(&five));
20401    /// ```
20402    #[inline]
20403    #[stable(feature = "rc_counts", since = "1.15.0")]
20404    pub fn strong_count(this: &Self) -> usize {
20405        this.inner().strong()
20406    }
20407
20408    /// Increments the strong reference count on the `Rc<T>` associated with the
20409    /// provided pointer by one.
20410    ///
20411    /// # Safety
20412    ///
20413    /// The pointer must have been obtained through `Rc::into_raw`, and the
20414    /// associated `Rc` instance must be valid (i.e. the strong count must be at
20415    /// least 1) for the duration of this method.
20416    ///
20417    /// # Examples
20418    ///
20419    /// ```
20420    /// use std::rc::Rc;
20421    ///
20422    /// let five = Rc::new(5);
20423    ///
20424    /// unsafe {
20425    ///     let ptr = Rc::into_raw(five);
20426    ///     Rc::increment_strong_count(ptr);
20427    ///
20428    ///     let five = Rc::from_raw(ptr);
20429    ///     assert_eq!(2, Rc::strong_count(&five));
20430    /// }
20431    /// ```
20432    #[inline]
20433    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
20434    pub unsafe fn increment_strong_count(ptr: *const T) {
20435        // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
20436        let rc = unsafe { mem::ManuallyDrop::new(Rc::<T>::from_raw(ptr)) };
20437        // Now increase refcount, but don't drop new refcount either
20438        let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
20439    }
20440
20441    /// Decrements the strong reference count on the `Rc<T>` associated with the
20442    /// provided pointer by one.
20443    ///
20444    /// # Safety
20445    ///
20446    /// The pointer must have been obtained through `Rc::into_raw`, and the
20447    /// associated `Rc` instance must be valid (i.e. the strong count must be at
20448    /// least 1) when invoking this method. This method can be used to release
20449    /// the final `Rc` and backing storage, but **should not** be called after
20450    /// the final `Rc` has been released.
20451    ///
20452    /// # Examples
20453    ///
20454    /// ```
20455    /// use std::rc::Rc;
20456    ///
20457    /// let five = Rc::new(5);
20458    ///
20459    /// unsafe {
20460    ///     let ptr = Rc::into_raw(five);
20461    ///     Rc::increment_strong_count(ptr);
20462    ///
20463    ///     let five = Rc::from_raw(ptr);
20464    ///     assert_eq!(2, Rc::strong_count(&five));
20465    ///     Rc::decrement_strong_count(ptr);
20466    ///     assert_eq!(1, Rc::strong_count(&five));
20467    /// }
20468    /// ```
20469    #[inline]
20470    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
20471    pub unsafe fn decrement_strong_count(ptr: *const T) {
20472        unsafe { mem::drop(Rc::from_raw(ptr)) };
20473    }
20474
20475    /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
20476    /// this allocation.
20477    #[inline]
20478    fn is_unique(this: &Self) -> bool {
20479        Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
20480    }
20481
20482    /// Returns a mutable reference into the given `Rc`, if there are
20483    /// no other `Rc` or [`Weak`] pointers to the same allocation.
20484    ///
20485    /// Returns [`None`] otherwise, because it is not safe to
20486    /// mutate a shared value.
20487    ///
20488    /// See also [`make_mut`][make_mut], which will [`clone`][clone]
20489    /// the inner value when there are other pointers.
20490    ///
20491    /// [make_mut]: Rc::make_mut
20492    /// [clone]: Clone::clone
20493    ///
20494    /// # Examples
20495    ///
20496    /// ```
20497    /// use std::rc::Rc;
20498    ///
20499    /// let mut x = Rc::new(3);
20500    /// *Rc::get_mut(&mut x).unwrap() = 4;
20501    /// assert_eq!(*x, 4);
20502    ///
20503    /// let _y = Rc::clone(&x);
20504    /// assert!(Rc::get_mut(&mut x).is_none());
20505    /// ```
20506    #[inline]
20507    #[stable(feature = "rc_unique", since = "1.4.0")]
20508    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
20509        if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
20510    }
20511
20512    /// Returns a mutable reference into the given `Rc`,
20513    /// without any check.
20514    ///
20515    /// See also [`get_mut`], which is safe and does appropriate checks.
20516    ///
20517    /// [`get_mut`]: Rc::get_mut
20518    ///
20519    /// # Safety
20520    ///
20521    /// Any other `Rc` or [`Weak`] pointers to the same allocation must not be dereferenced
20522    /// for the duration of the returned borrow.
20523    /// This is trivially the case if no such pointers exist,
20524    /// for example immediately after `Rc::new`.
20525    ///
20526    /// # Examples
20527    ///
20528    /// ```
20529    /// #![feature(get_mut_unchecked)]
20530    ///
20531    /// use std::rc::Rc;
20532    ///
20533    /// let mut x = Rc::new(String::new());
20534    /// unsafe {
20535    ///     Rc::get_mut_unchecked(&mut x).push_str("foo")
20536    /// }
20537    /// assert_eq!(*x, "foo");
20538    /// ```
20539    #[inline]
20540    #[unstable(feature = "get_mut_unchecked", issue = "63292")]
20541    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
20542        // We are careful to *not* create a reference covering the "count" fields, as
20543        // this would conflict with accesses to the reference counts (e.g. by `Weak`).
20544        unsafe { &mut (*this.ptr.as_ptr()).value }
20545    }
20546
20547    #[inline]
20548    #[stable(feature = "ptr_eq", since = "1.17.0")]
20549    /// Returns `true` if the two `Rc`s point to the same allocation
20550    /// (in a vein similar to [`ptr::eq`]).
20551    ///
20552    /// # Examples
20553    ///
20554    /// ```
20555    /// use std::rc::Rc;
20556    ///
20557    /// let five = Rc::new(5);
20558    /// let same_five = Rc::clone(&five);
20559    /// let other_five = Rc::new(5);
20560    ///
20561    /// assert!(Rc::ptr_eq(&five, &same_five));
20562    /// assert!(!Rc::ptr_eq(&five, &other_five));
20563    /// ```
20564    ///
20565    /// [`ptr::eq`]: core::ptr::eq
20566    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
20567        this.ptr.as_ptr() == other.ptr.as_ptr()
20568    }
20569}
20570
20571impl<T: Clone> Rc<T> {
20572    /// Makes a mutable reference into the given `Rc`.
20573    ///
20574    /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
20575    /// [`clone`] the inner value to a new allocation to ensure unique ownership.  This is also
20576    /// referred to as clone-on-write.
20577    ///
20578    /// If there are no other `Rc` pointers to this allocation, then [`Weak`]
20579    /// pointers to this allocation will be disassociated.
20580    ///
20581    /// See also [`get_mut`], which will fail rather than cloning.
20582    ///
20583    /// [`clone`]: Clone::clone
20584    /// [`get_mut`]: Rc::get_mut
20585    ///
20586    /// # Examples
20587    ///
20588    /// ```
20589    /// use std::rc::Rc;
20590    ///
20591    /// let mut data = Rc::new(5);
20592    ///
20593    /// *Rc::make_mut(&mut data) += 1;        // Won't clone anything
20594    /// let mut other_data = Rc::clone(&data);    // Won't clone inner data
20595    /// *Rc::make_mut(&mut data) += 1;        // Clones inner data
20596    /// *Rc::make_mut(&mut data) += 1;        // Won't clone anything
20597    /// *Rc::make_mut(&mut other_data) *= 2;  // Won't clone anything
20598    ///
20599    /// // Now `data` and `other_data` point to different allocations.
20600    /// assert_eq!(*data, 8);
20601    /// assert_eq!(*other_data, 12);
20602    /// ```
20603    ///
20604    /// [`Weak`] pointers will be disassociated:
20605    ///
20606    /// ```
20607    /// use std::rc::Rc;
20608    ///
20609    /// let mut data = Rc::new(75);
20610    /// let weak = Rc::downgrade(&data);
20611    ///
20612    /// assert!(75 == *data);
20613    /// assert!(75 == *weak.upgrade().unwrap());
20614    ///
20615    /// *Rc::make_mut(&mut data) += 1;
20616    ///
20617    /// assert!(76 == *data);
20618    /// assert!(weak.upgrade().is_none());
20619    /// ```
20620    #[inline]
20621    #[stable(feature = "rc_unique", since = "1.4.0")]
20622    pub fn make_mut(this: &mut Self) -> &mut T {
20623        if Rc::strong_count(this) != 1 {
20624            // Gotta clone the data, there are other Rcs.
20625            // Pre-allocate memory to allow writing the cloned value directly.
20626            let mut rc = Self::new_uninit();
20627            unsafe {
20628                let data = Rc::get_mut_unchecked(&mut rc);
20629                (**this).write_clone_into_raw(data.as_mut_ptr());
20630                *this = rc.assume_init();
20631            }
20632        } else if Rc::weak_count(this) != 0 {
20633            // Can just steal the data, all that's left is Weaks
20634            let mut rc = Self::new_uninit();
20635            unsafe {
20636                let data = Rc::get_mut_unchecked(&mut rc);
20637                data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
20638
20639                this.inner().dec_strong();
20640                // Remove implicit strong-weak ref (no need to craft a fake
20641                // Weak here -- we know other Weaks can clean up for us)
20642                this.inner().dec_weak();
20643                ptr::write(this, rc.assume_init());
20644            }
20645        }
20646        // This unsafety is ok because we're guaranteed that the pointer
20647        // returned is the *only* pointer that will ever be returned to T. Our
20648        // reference count is guaranteed to be 1 at this point, and we required
20649        // the `Rc<T>` itself to be `mut`, so we're returning the only possible
20650        // reference to the allocation.
20651        unsafe { &mut this.ptr.as_mut().value }
20652    }
20653}
20654
20655impl Rc<dyn Any> {
20656    #[inline]
20657    #[stable(feature = "rc_downcast", since = "1.29.0")]
20658    /// Attempt to downcast the `Rc<dyn Any>` to a concrete type.
20659    ///
20660    /// # Examples
20661    ///
20662    /// ```
20663    /// use std::any::Any;
20664    /// use std::rc::Rc;
20665    ///
20666    /// fn print_if_string(value: Rc<dyn Any>) {
20667    ///     if let Ok(string) = value.downcast::<String>() {
20668    ///         println!("String ({}): {}", string.len(), string);
20669    ///     }
20670    /// }
20671    ///
20672    /// let my_string = "Hello World".to_string();
20673    /// print_if_string(Rc::new(my_string));
20674    /// print_if_string(Rc::new(0i8));
20675    /// ```
20676    pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> {
20677        if (*self).is::<T>() {
20678            let ptr = self.ptr.cast::<RcBox<T>>();
20679            forget(self);
20680            Ok(Rc::from_inner(ptr))
20681        } else {
20682            Err(self)
20683        }
20684    }
20685}
20686
20687impl<T: ?Sized> Rc<T> {
20688    /// Allocates an `RcBox<T>` with sufficient space for
20689    /// a possibly-unsized inner value where the value has the layout provided.
20690    ///
20691    /// The function `mem_to_rcbox` is called with the data pointer
20692    /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
20693    unsafe fn allocate_for_layout(
20694        value_layout: Layout,
20695        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
20696        mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
20697    ) -> *mut RcBox<T> {
20698        // Calculate layout using the given value layout.
20699        // Previously, layout was calculated on the expression
20700        // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
20701        // reference (see #54908).
20702        let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
20703        unsafe {
20704            Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rcbox)
20705                .unwrap_or_else(|_| handle_alloc_error(layout))
20706        }
20707    }
20708
20709    /// Allocates an `RcBox<T>` with sufficient space for
20710    /// a possibly-unsized inner value where the value has the layout provided,
20711    /// returning an error if allocation fails.
20712    ///
20713    /// The function `mem_to_rcbox` is called with the data pointer
20714    /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
20715    #[inline]
20716    unsafe fn try_allocate_for_layout(
20717        value_layout: Layout,
20718        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
20719        mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
20720    ) -> Result<*mut RcBox<T>, AllocError> {
20721        // Calculate layout using the given value layout.
20722        // Previously, layout was calculated on the expression
20723        // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
20724        // reference (see #54908).
20725        let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
20726
20727        // Allocate for the layout.
20728        let ptr = allocate(layout)?;
20729
20730        // Initialize the RcBox
20731        let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr());
20732        unsafe {
20733            debug_assert_eq!(Layout::for_value(&*inner), layout);
20734
20735            ptr::write(&mut (*inner).strong, Cell::new(1));
20736            ptr::write(&mut (*inner).weak, Cell::new(1));
20737        }
20738
20739        Ok(inner)
20740    }
20741
20742    /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
20743    unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
20744        // Allocate for the `RcBox<T>` using the given value.
20745        unsafe {
20746            Self::allocate_for_layout(
20747                Layout::for_value(&*ptr),
20748                |layout| Global.allocate(layout),
20749                |mem| (ptr as *mut RcBox<T>).set_ptr_value(mem),
20750            )
20751        }
20752    }
20753
20754    fn from_box(v: Box<T>) -> Rc<T> {
20755        unsafe {
20756            let (box_unique, alloc) = Box::into_unique(v);
20757            let bptr = box_unique.as_ptr();
20758
20759            let value_size = size_of_val(&*bptr);
20760            let ptr = Self::allocate_for_ptr(bptr);
20761
20762            // Copy value as bytes
20763            ptr::copy_nonoverlapping(
20764                bptr as *const T as *const u8,
20765                &mut (*ptr).value as *mut _ as *mut u8,
20766                value_size,
20767            );
20768
20769            // Free the allocation without dropping its contents
20770            box_free(box_unique, alloc);
20771
20772            Self::from_ptr(ptr)
20773        }
20774    }
20775}
20776
20777impl<T> Rc<[T]> {
20778    /// Allocates an `RcBox<[T]>` with the given length.
20779    unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> {
20780        unsafe {
20781            Self::allocate_for_layout(
20782                Layout::array::<T>(len).unwrap(),
20783                |layout| Global.allocate(layout),
20784                |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>,
20785            )
20786        }
20787    }
20788
20789    /// Copy elements from slice into newly allocated Rc<\[T\]>
20790    ///
20791    /// Unsafe because the caller must either take ownership or bind `T: Copy`
20792    unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
20793        unsafe {
20794            let ptr = Self::allocate_for_slice(v.len());
20795            ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
20796            Self::from_ptr(ptr)
20797        }
20798    }
20799
20800    /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
20801    ///
20802    /// Behavior is undefined should the size be wrong.
20803    unsafe fn from_iter_exact(iter: impl iter::Iterator<Item = T>, len: usize) -> Rc<[T]> {
20804        // Panic guard while cloning T elements.
20805        // In the event of a panic, elements that have been written
20806        // into the new RcBox will be dropped, then the memory freed.
20807        struct Guard<T> {
20808            mem: NonNull<u8>,
20809            elems: *mut T,
20810            layout: Layout,
20811            n_elems: usize,
20812        }
20813
20814        impl<T> Drop for Guard<T> {
20815            fn drop(&mut self) {
20816                unsafe {
20817                    let slice = from_raw_parts_mut(self.elems, self.n_elems);
20818                    ptr::drop_in_place(slice);
20819
20820                    Global.deallocate(self.mem, self.layout);
20821                }
20822            }
20823        }
20824
20825        unsafe {
20826            let ptr = Self::allocate_for_slice(len);
20827
20828            let mem = ptr as *mut _ as *mut u8;
20829            let layout = Layout::for_value(&*ptr);
20830
20831            // Pointer to first element
20832            let elems = &mut (*ptr).value as *mut [T] as *mut T;
20833
20834            let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
20835
20836            for (i, item) in iter.enumerate() {
20837                ptr::write(elems.add(i), item);
20838                guard.n_elems += 1;
20839            }
20840
20841            // All clear. Forget the guard so it doesn't free the new RcBox.
20842            forget(guard);
20843
20844            Self::from_ptr(ptr)
20845        }
20846    }
20847}
20848
20849/// Specialization trait used for `From<&[T]>`.
20850trait RcFromSlice<T> {
20851    fn from_slice(slice: &[T]) -> Self;
20852}
20853
20854impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
20855    #[inline]
20856    default fn from_slice(v: &[T]) -> Self {
20857        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
20858    }
20859}
20860
20861impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
20862    #[inline]
20863    fn from_slice(v: &[T]) -> Self {
20864        unsafe { Rc::copy_from_slice(v) }
20865    }
20866}
20867
20868#[stable(feature = "rust1", since = "1.0.0")]
20869impl<T: ?Sized> Deref for Rc<T> {
20870    type Target = T;
20871
20872    #[inline(always)]
20873    fn deref(&self) -> &T {
20874        &self.inner().value
20875    }
20876}
20877
20878#[unstable(feature = "receiver_trait", issue = "none")]
20879impl<T: ?Sized> Receiver for Rc<T> {}
20880
20881#[stable(feature = "rust1", since = "1.0.0")]
20882unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
20883    /// Drops the `Rc`.
20884    ///
20885    /// This will decrement the strong reference count. If the strong reference
20886    /// count reaches zero then the only other references (if any) are
20887    /// [`Weak`], so we `drop` the inner value.
20888    ///
20889    /// # Examples
20890    ///
20891    /// ```
20892    /// use std::rc::Rc;
20893    ///
20894    /// struct Foo;
20895    ///
20896    /// impl Drop for Foo {
20897    ///     fn drop(&mut self) {
20898    ///         println!("dropped!");
20899    ///     }
20900    /// }
20901    ///
20902    /// let foo  = Rc::new(Foo);
20903    /// let foo2 = Rc::clone(&foo);
20904    ///
20905    /// drop(foo);    // Doesn't print anything
20906    /// drop(foo2);   // Prints "dropped!"
20907    /// ```
20908    fn drop(&mut self) {
20909        unsafe {
20910            self.inner().dec_strong();
20911            if self.inner().strong() == 0 {
20912                // destroy the contained object
20913                ptr::drop_in_place(Self::get_mut_unchecked(self));
20914
20915                // remove the implicit "strong weak" pointer now that we've
20916                // destroyed the contents.
20917                self.inner().dec_weak();
20918
20919                if self.inner().weak() == 0 {
20920                    Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
20921                }
20922            }
20923        }
20924    }
20925}
20926
20927#[stable(feature = "rust1", since = "1.0.0")]
20928impl<T: ?Sized> Clone for Rc<T> {
20929    /// Makes a clone of the `Rc` pointer.
20930    ///
20931    /// This creates another pointer to the same allocation, increasing the
20932    /// strong reference count.
20933    ///
20934    /// # Examples
20935    ///
20936    /// ```
20937    /// use std::rc::Rc;
20938    ///
20939    /// let five = Rc::new(5);
20940    ///
20941    /// let _ = Rc::clone(&five);
20942    /// ```
20943    #[inline]
20944    fn clone(&self) -> Rc<T> {
20945        self.inner().inc_strong();
20946        Self::from_inner(self.ptr)
20947    }
20948}
20949
20950#[stable(feature = "rust1", since = "1.0.0")]
20951impl<T: Default> Default for Rc<T> {
20952    /// Creates a new `Rc<T>`, with the `Default` value for `T`.
20953    ///
20954    /// # Examples
20955    ///
20956    /// ```
20957    /// use std::rc::Rc;
20958    ///
20959    /// let x: Rc<i32> = Default::default();
20960    /// assert_eq!(*x, 0);
20961    /// ```
20962    #[inline]
20963    fn default() -> Rc<T> {
20964        Rc::new(Default::default())
20965    }
20966}
20967
20968#[stable(feature = "rust1", since = "1.0.0")]
20969trait RcEqIdent<T: ?Sized + PartialEq> {
20970    fn eq(&self, other: &Rc<T>) -> bool;
20971    fn ne(&self, other: &Rc<T>) -> bool;
20972}
20973
20974#[stable(feature = "rust1", since = "1.0.0")]
20975impl<T: ?Sized + PartialEq> RcEqIdent<T> for Rc<T> {
20976    #[inline]
20977    default fn eq(&self, other: &Rc<T>) -> bool {
20978        **self == **other
20979    }
20980
20981    #[inline]
20982    default fn ne(&self, other: &Rc<T>) -> bool {
20983        **self != **other
20984    }
20985}
20986
20987// Hack to allow specializing on `Eq` even though `Eq` has a method.
20988#[rustc_unsafe_specialization_marker]
20989pub(crate) trait MarkerEq: PartialEq<Self> {}
20990
20991impl<T: Eq> MarkerEq for T {}
20992
20993/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
20994/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
20995/// store large values, that are slow to clone, but also heavy to check for equality, causing this
20996/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
20997/// the same value, than two `&T`s.
20998///
20999/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
21000#[stable(feature = "rust1", since = "1.0.0")]
21001impl<T: ?Sized + MarkerEq> RcEqIdent<T> for Rc<T> {
21002    #[inline]
21003    fn eq(&self, other: &Rc<T>) -> bool {
21004        Rc::ptr_eq(self, other) || **self == **other
21005    }
21006
21007    #[inline]
21008    fn ne(&self, other: &Rc<T>) -> bool {
21009        !Rc::ptr_eq(self, other) && **self != **other
21010    }
21011}
21012
21013#[stable(feature = "rust1", since = "1.0.0")]
21014impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
21015    /// Equality for two `Rc`s.
21016    ///
21017    /// Two `Rc`s are equal if their inner values are equal, even if they are
21018    /// stored in different allocation.
21019    ///
21020    /// If `T` also implements `Eq` (implying reflexivity of equality),
21021    /// two `Rc`s that point to the same allocation are
21022    /// always equal.
21023    ///
21024    /// # Examples
21025    ///
21026    /// ```
21027    /// use std::rc::Rc;
21028    ///
21029    /// let five = Rc::new(5);
21030    ///
21031    /// assert!(five == Rc::new(5));
21032    /// ```
21033    #[inline]
21034    fn eq(&self, other: &Rc<T>) -> bool {
21035        RcEqIdent::eq(self, other)
21036    }
21037
21038    /// Inequality for two `Rc`s.
21039    ///
21040    /// Two `Rc`s are unequal if their inner values are unequal.
21041    ///
21042    /// If `T` also implements `Eq` (implying reflexivity of equality),
21043    /// two `Rc`s that point to the same allocation are
21044    /// never unequal.
21045    ///
21046    /// # Examples
21047    ///
21048    /// ```
21049    /// use std::rc::Rc;
21050    ///
21051    /// let five = Rc::new(5);
21052    ///
21053    /// assert!(five != Rc::new(6));
21054    /// ```
21055    #[inline]
21056    fn ne(&self, other: &Rc<T>) -> bool {
21057        RcEqIdent::ne(self, other)
21058    }
21059}
21060
21061#[stable(feature = "rust1", since = "1.0.0")]
21062impl<T: ?Sized + Eq> Eq for Rc<T> {}
21063
21064#[stable(feature = "rust1", since = "1.0.0")]
21065impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
21066    /// Partial comparison for two `Rc`s.
21067    ///
21068    /// The two are compared by calling `partial_cmp()` on their inner values.
21069    ///
21070    /// # Examples
21071    ///
21072    /// ```
21073    /// use std::rc::Rc;
21074    /// use std::cmp::Ordering;
21075    ///
21076    /// let five = Rc::new(5);
21077    ///
21078    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
21079    /// ```
21080    #[inline(always)]
21081    fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
21082        (**self).partial_cmp(&**other)
21083    }
21084
21085    /// Less-than comparison for two `Rc`s.
21086    ///
21087    /// The two are compared by calling `<` on their inner values.
21088    ///
21089    /// # Examples
21090    ///
21091    /// ```
21092    /// use std::rc::Rc;
21093    ///
21094    /// let five = Rc::new(5);
21095    ///
21096    /// assert!(five < Rc::new(6));
21097    /// ```
21098    #[inline(always)]
21099    fn lt(&self, other: &Rc<T>) -> bool {
21100        **self < **other
21101    }
21102
21103    /// 'Less than or equal to' comparison for two `Rc`s.
21104    ///
21105    /// The two are compared by calling `<=` on their inner values.
21106    ///
21107    /// # Examples
21108    ///
21109    /// ```
21110    /// use std::rc::Rc;
21111    ///
21112    /// let five = Rc::new(5);
21113    ///
21114    /// assert!(five <= Rc::new(5));
21115    /// ```
21116    #[inline(always)]
21117    fn le(&self, other: &Rc<T>) -> bool {
21118        **self <= **other
21119    }
21120
21121    /// Greater-than comparison for two `Rc`s.
21122    ///
21123    /// The two are compared by calling `>` on their inner values.
21124    ///
21125    /// # Examples
21126    ///
21127    /// ```
21128    /// use std::rc::Rc;
21129    ///
21130    /// let five = Rc::new(5);
21131    ///
21132    /// assert!(five > Rc::new(4));
21133    /// ```
21134    #[inline(always)]
21135    fn gt(&self, other: &Rc<T>) -> bool {
21136        **self > **other
21137    }
21138
21139    /// 'Greater than or equal to' comparison for two `Rc`s.
21140    ///
21141    /// The two are compared by calling `>=` on their inner values.
21142    ///
21143    /// # Examples
21144    ///
21145    /// ```
21146    /// use std::rc::Rc;
21147    ///
21148    /// let five = Rc::new(5);
21149    ///
21150    /// assert!(five >= Rc::new(5));
21151    /// ```
21152    #[inline(always)]
21153    fn ge(&self, other: &Rc<T>) -> bool {
21154        **self >= **other
21155    }
21156}
21157
21158#[stable(feature = "rust1", since = "1.0.0")]
21159impl<T: ?Sized + Ord> Ord for Rc<T> {
21160    /// Comparison for two `Rc`s.
21161    ///
21162    /// The two are compared by calling `cmp()` on their inner values.
21163    ///
21164    /// # Examples
21165    ///
21166    /// ```
21167    /// use std::rc::Rc;
21168    /// use std::cmp::Ordering;
21169    ///
21170    /// let five = Rc::new(5);
21171    ///
21172    /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
21173    /// ```
21174    #[inline]
21175    fn cmp(&self, other: &Rc<T>) -> Ordering {
21176        (**self).cmp(&**other)
21177    }
21178}
21179
21180#[stable(feature = "rust1", since = "1.0.0")]
21181impl<T: ?Sized + Hash> Hash for Rc<T> {
21182    fn hash<H: Hasher>(&self, state: &mut H) {
21183        (**self).hash(state);
21184    }
21185}
21186
21187#[stable(feature = "rust1", since = "1.0.0")]
21188impl<T: ?Sized + fmt::Display> fmt::Display for Rc<T> {
21189    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
21190        fmt::Display::fmt(&**self, f)
21191    }
21192}
21193
21194#[stable(feature = "rust1", since = "1.0.0")]
21195impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
21196    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
21197        fmt::Debug::fmt(&**self, f)
21198    }
21199}
21200
21201#[stable(feature = "rust1", since = "1.0.0")]
21202impl<T: ?Sized> fmt::Pointer for Rc<T> {
21203    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
21204        fmt::Pointer::fmt(&(&**self as *const T), f)
21205    }
21206}
21207
21208#[stable(feature = "from_for_ptrs", since = "1.6.0")]
21209impl<T> From<T> for Rc<T> {
21210    fn from(t: T) -> Self {
21211        Rc::new(t)
21212    }
21213}
21214
21215#[stable(feature = "shared_from_slice", since = "1.21.0")]
21216impl<T: Clone> From<&[T]> for Rc<[T]> {
21217    /// Allocate a reference-counted slice and fill it by cloning `v`'s items.
21218    ///
21219    /// # Example
21220    ///
21221    /// ```
21222    /// # use std::rc::Rc;
21223    /// let original: &[i32] = &[1, 2, 3];
21224    /// let shared: Rc<[i32]> = Rc::from(original);
21225    /// assert_eq!(&[1, 2, 3], &shared[..]);
21226    /// ```
21227    #[inline]
21228    fn from(v: &[T]) -> Rc<[T]> {
21229        <Self as RcFromSlice<T>>::from_slice(v)
21230    }
21231}
21232
21233#[stable(feature = "shared_from_slice", since = "1.21.0")]
21234impl From<&str> for Rc<str> {
21235    /// Allocate a reference-counted string slice and copy `v` into it.
21236    ///
21237    /// # Example
21238    ///
21239    /// ```
21240    /// # use std::rc::Rc;
21241    /// let shared: Rc<str> = Rc::from("statue");
21242    /// assert_eq!("statue", &shared[..]);
21243    /// ```
21244    #[inline]
21245    fn from(v: &str) -> Rc<str> {
21246        let rc = Rc::<[u8]>::from(v.as_bytes());
21247        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
21248    }
21249}
21250
21251#[stable(feature = "shared_from_slice", since = "1.21.0")]
21252impl From<String> for Rc<str> {
21253    /// Allocate a reference-counted string slice and copy `v` into it.
21254    ///
21255    /// # Example
21256    ///
21257    /// ```
21258    /// # use std::rc::Rc;
21259    /// let original: String = "statue".to_owned();
21260    /// let shared: Rc<str> = Rc::from(original);
21261    /// assert_eq!("statue", &shared[..]);
21262    /// ```
21263    #[inline]
21264    fn from(v: String) -> Rc<str> {
21265        Rc::from(&v[..])
21266    }
21267}
21268
21269#[stable(feature = "shared_from_slice", since = "1.21.0")]
21270impl<T: ?Sized> From<Box<T>> for Rc<T> {
21271    /// Move a boxed object to a new, reference counted, allocation.
21272    ///
21273    /// # Example
21274    ///
21275    /// ```
21276    /// # use std::rc::Rc;
21277    /// let original: Box<i32> = Box::new(1);
21278    /// let shared: Rc<i32> = Rc::from(original);
21279    /// assert_eq!(1, *shared);
21280    /// ```
21281    #[inline]
21282    fn from(v: Box<T>) -> Rc<T> {
21283        Rc::from_box(v)
21284    }
21285}
21286
21287#[stable(feature = "shared_from_slice", since = "1.21.0")]
21288impl<T> From<Vec<T>> for Rc<[T]> {
21289    /// Allocate a reference-counted slice and move `v`'s items into it.
21290    ///
21291    /// # Example
21292    ///
21293    /// ```
21294    /// # use std::rc::Rc;
21295    /// let original: Box<Vec<i32>> = Box::new(vec![1, 2, 3]);
21296    /// let shared: Rc<Vec<i32>> = Rc::from(original);
21297    /// assert_eq!(vec![1, 2, 3], *shared);
21298    /// ```
21299    #[inline]
21300    fn from(mut v: Vec<T>) -> Rc<[T]> {
21301        unsafe {
21302            let rc = Rc::copy_from_slice(&v);
21303
21304            // Allow the Vec to free its memory, but not destroy its contents
21305            v.set_len(0);
21306
21307            rc
21308        }
21309    }
21310}
21311
21312#[stable(feature = "shared_from_cow", since = "1.45.0")]
21313impl<'a, B> From<Cow<'a, B>> for Rc<B>
21314where
21315    B: ToOwned + ?Sized,
21316    Rc<B>: From<&'a B> + From<B::Owned>,
21317{
21318    #[inline]
21319    fn from(cow: Cow<'a, B>) -> Rc<B> {
21320        match cow {
21321            Cow::Borrowed(s) => Rc::from(s),
21322            Cow::Owned(s) => Rc::from(s),
21323        }
21324    }
21325}
21326
21327#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
21328impl<T, const N: usize> TryFrom<Rc<[T]>> for Rc<[T; N]> {
21329    type Error = Rc<[T]>;
21330
21331    fn try_from(boxed_slice: Rc<[T]>) -> Result<Self, Self::Error> {
21332        if boxed_slice.len() == N {
21333            Ok(unsafe { Rc::from_raw(Rc::into_raw(boxed_slice) as *mut [T; N]) })
21334        } else {
21335            Err(boxed_slice)
21336        }
21337    }
21338}
21339
21340#[stable(feature = "shared_from_iter", since = "1.37.0")]
21341impl<T> iter::FromIterator<T> for Rc<[T]> {
21342    /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
21343    ///
21344    /// # Performance characteristics
21345    ///
21346    /// ## The general case
21347    ///
21348    /// In the general case, collecting into `Rc<[T]>` is done by first
21349    /// collecting into a `Vec<T>`. That is, when writing the following:
21350    ///
21351    /// ```rust
21352    /// # use std::rc::Rc;
21353    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
21354    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
21355    /// ```
21356    ///
21357    /// this behaves as if we wrote:
21358    ///
21359    /// ```rust
21360    /// # use std::rc::Rc;
21361    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
21362    ///     .collect::<Vec<_>>() // The first set of allocations happens here.
21363    ///     .into(); // A second allocation for `Rc<[T]>` happens here.
21364    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
21365    /// ```
21366    ///
21367    /// This will allocate as many times as needed for constructing the `Vec<T>`
21368    /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
21369    ///
21370    /// ## Iterators of known length
21371    ///
21372    /// When your `Iterator` implements `TrustedLen` and is of an exact size,
21373    /// a single allocation will be made for the `Rc<[T]>`. For example:
21374    ///
21375    /// ```rust
21376    /// # use std::rc::Rc;
21377    /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
21378    /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
21379    /// ```
21380    fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
21381        ToRcSlice::to_rc_slice(iter.into_iter())
21382    }
21383}
21384
21385/// Specialization trait used for collecting into `Rc<[T]>`.
21386trait ToRcSlice<T>: Iterator<Item = T> + Sized {
21387    fn to_rc_slice(self) -> Rc<[T]>;
21388}
21389
21390impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
21391    default fn to_rc_slice(self) -> Rc<[T]> {
21392        self.collect::<Vec<T>>().into()
21393    }
21394}
21395
21396impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
21397    fn to_rc_slice(self) -> Rc<[T]> {
21398        // This is the case for a `TrustedLen` iterator.
21399        let (low, high) = self.size_hint();
21400        if let Some(high) = high {
21401            debug_assert_eq!(
21402                low,
21403                high,
21404                "TrustedLen iterator's size hint is not exact: {:?}",
21405                (low, high)
21406            );
21407
21408            unsafe {
21409                // SAFETY: We need to ensure that the iterator has an exact length and we have.
21410                Rc::from_iter_exact(self, low)
21411            }
21412        } else {
21413            // TrustedLen contract guarantees that `upper_bound == `None` implies an iterator
21414            // length exceeding `usize::MAX`.
21415            // The default implementation would collect into a vec which would panic.
21416            // Thus we panic here immediately without invoking `Vec` code.
21417            panic!("capacity overflow");
21418        }
21419    }
21420}
21421
21422/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
21423/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
21424/// pointer, which returns an [`Option`]`<`[`Rc`]`<T>>`.
21425///
21426/// Since a `Weak` reference does not count towards ownership, it will not
21427/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
21428/// guarantees about the value still being present. Thus it may return [`None`]
21429/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
21430/// itself (the backing store) from being deallocated.
21431///
21432/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
21433/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
21434/// prevent circular references between [`Rc`] pointers, since mutual owning references
21435/// would never allow either [`Rc`] to be dropped. For example, a tree could
21436/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
21437/// pointers from children back to their parents.
21438///
21439/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
21440///
21441/// [`upgrade`]: Weak::upgrade
21442#[stable(feature = "rc_weak", since = "1.4.0")]
21443pub struct Weak<T: ?Sized> {
21444    // This is a `NonNull` to allow optimizing the size of this type in enums,
21445    // but it is not necessarily a valid pointer.
21446    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
21447    // to allocate space on the heap.  That's not a value a real pointer
21448    // will ever have because RcBox has alignment at least 2.
21449    // This is only possible when `T: Sized`; unsized `T` never dangle.
21450    ptr: NonNull<RcBox<T>>,
21451}
21452
21453#[stable(feature = "rc_weak", since = "1.4.0")]
21454impl<T: ?Sized> !marker::Send for Weak<T> {}
21455#[stable(feature = "rc_weak", since = "1.4.0")]
21456impl<T: ?Sized> !marker::Sync for Weak<T> {}
21457
21458#[unstable(feature = "coerce_unsized", issue = "27732")]
21459impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
21460
21461#[unstable(feature = "dispatch_from_dyn", issue = "none")]
21462impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
21463
21464impl<T> Weak<T> {
21465    /// Constructs a new `Weak<T>`, without allocating any memory.
21466    /// Calling [`upgrade`] on the return value always gives [`None`].
21467    ///
21468    /// [`upgrade`]: Weak::upgrade
21469    ///
21470    /// # Examples
21471    ///
21472    /// ```
21473    /// use std::rc::Weak;
21474    ///
21475    /// let empty: Weak<i64> = Weak::new();
21476    /// assert!(empty.upgrade().is_none());
21477    /// ```
21478    #[stable(feature = "downgraded_weak", since = "1.10.0")]
21479    pub fn new() -> Weak<T> {
21480        Weak { ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0") }
21481    }
21482}
21483
21484pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool {
21485    let address = ptr as *mut () as usize;
21486    address == usize::MAX
21487}
21488
21489/// Helper type to allow accessing the reference counts without
21490/// making any assertions about the data field.
21491struct WeakInner<'a> {
21492    weak: &'a Cell<usize>,
21493    strong: &'a Cell<usize>,
21494}
21495
21496impl<T: ?Sized> Weak<T> {
21497    /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
21498    ///
21499    /// The pointer is valid only if there are some strong references. The pointer may be dangling,
21500    /// unaligned or even [`null`] otherwise.
21501    ///
21502    /// # Examples
21503    ///
21504    /// ```
21505    /// use std::rc::Rc;
21506    /// use std::ptr;
21507    ///
21508    /// let strong = Rc::new("hello".to_owned());
21509    /// let weak = Rc::downgrade(&strong);
21510    /// // Both point to the same object
21511    /// assert!(ptr::eq(&*strong, weak.as_ptr()));
21512    /// // The strong here keeps it alive, so we can still access the object.
21513    /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
21514    ///
21515    /// drop(strong);
21516    /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
21517    /// // undefined behaviour.
21518    /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
21519    /// ```
21520    ///
21521    /// [`null`]: core::ptr::null
21522    #[stable(feature = "rc_as_ptr", since = "1.45.0")]
21523    pub fn as_ptr(&self) -> *const T {
21524        let ptr: *mut RcBox<T> = NonNull::as_ptr(self.ptr);
21525
21526        if is_dangling(ptr) {
21527            // If the pointer is dangling, we return the sentinel directly. This cannot be
21528            // a valid payload address, as the payload is at least as aligned as RcBox (usize).
21529            ptr as *const T
21530        } else {
21531            // SAFETY: if is_dangling returns false, then the pointer is dereferencable.
21532            // The payload may be dropped at this point, and we have to maintain provenance,
21533            // so use raw pointer manipulation.
21534            unsafe { ptr::addr_of_mut!((*ptr).value) }
21535        }
21536    }
21537
21538    /// Consumes the `Weak<T>` and turns it into a raw pointer.
21539    ///
21540    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
21541    /// one weak reference (the weak count is not modified by this operation). It can be turned
21542    /// back into the `Weak<T>` with [`from_raw`].
21543    ///
21544    /// The same restrictions of accessing the target of the pointer as with
21545    /// [`as_ptr`] apply.
21546    ///
21547    /// # Examples
21548    ///
21549    /// ```
21550    /// use std::rc::{Rc, Weak};
21551    ///
21552    /// let strong = Rc::new("hello".to_owned());
21553    /// let weak = Rc::downgrade(&strong);
21554    /// let raw = weak.into_raw();
21555    ///
21556    /// assert_eq!(1, Rc::weak_count(&strong));
21557    /// assert_eq!("hello", unsafe { &*raw });
21558    ///
21559    /// drop(unsafe { Weak::from_raw(raw) });
21560    /// assert_eq!(0, Rc::weak_count(&strong));
21561    /// ```
21562    ///
21563    /// [`from_raw`]: Weak::from_raw
21564    /// [`as_ptr`]: Weak::as_ptr
21565    #[stable(feature = "weak_into_raw", since = "1.45.0")]
21566    pub fn into_raw(self) -> *const T {
21567        let result = self.as_ptr();
21568        mem::forget(self);
21569        result
21570    }
21571
21572    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
21573    ///
21574    /// This can be used to safely get a strong reference (by calling [`upgrade`]
21575    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
21576    ///
21577    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
21578    /// as these don't own anything; the method still works on them).
21579    ///
21580    /// # Safety
21581    ///
21582    /// The pointer must have originated from the [`into_raw`] and must still own its potential
21583    /// weak reference.
21584    ///
21585    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
21586    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
21587    /// count is not modified by this operation) and therefore it must be paired with a previous
21588    /// call to [`into_raw`].
21589    ///
21590    /// # Examples
21591    ///
21592    /// ```
21593    /// use std::rc::{Rc, Weak};
21594    ///
21595    /// let strong = Rc::new("hello".to_owned());
21596    ///
21597    /// let raw_1 = Rc::downgrade(&strong).into_raw();
21598    /// let raw_2 = Rc::downgrade(&strong).into_raw();
21599    ///
21600    /// assert_eq!(2, Rc::weak_count(&strong));
21601    ///
21602    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
21603    /// assert_eq!(1, Rc::weak_count(&strong));
21604    ///
21605    /// drop(strong);
21606    ///
21607    /// // Decrement the last weak count.
21608    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
21609    /// ```
21610    ///
21611    /// [`into_raw`]: Weak::into_raw
21612    /// [`upgrade`]: Weak::upgrade
21613    /// [`new`]: Weak::new
21614    #[stable(feature = "weak_into_raw", since = "1.45.0")]
21615    pub unsafe fn from_raw(ptr: *const T) -> Self {
21616        // See Weak::as_ptr for context on how the input pointer is derived.
21617
21618        let ptr = if is_dangling(ptr as *mut T) {
21619            // This is a dangling Weak.
21620            ptr as *mut RcBox<T>
21621        } else {
21622            // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
21623            // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
21624            let offset = unsafe { data_offset(ptr) };
21625            // Thus, we reverse the offset to get the whole RcBox.
21626            // SAFETY: the pointer originated from a Weak, so this offset is safe.
21627            unsafe { (ptr as *mut RcBox<T>).set_ptr_value((ptr as *mut u8).offset(-offset)) }
21628        };
21629
21630        // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
21631        Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
21632    }
21633
21634    /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
21635    /// dropping of the inner value if successful.
21636    ///
21637    /// Returns [`None`] if the inner value has since been dropped.
21638    ///
21639    /// # Examples
21640    ///
21641    /// ```
21642    /// use std::rc::Rc;
21643    ///
21644    /// let five = Rc::new(5);
21645    ///
21646    /// let weak_five = Rc::downgrade(&five);
21647    ///
21648    /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
21649    /// assert!(strong_five.is_some());
21650    ///
21651    /// // Destroy all strong pointers.
21652    /// drop(strong_five);
21653    /// drop(five);
21654    ///
21655    /// assert!(weak_five.upgrade().is_none());
21656    /// ```
21657    #[stable(feature = "rc_weak", since = "1.4.0")]
21658    pub fn upgrade(&self) -> Option<Rc<T>> {
21659        let inner = self.inner()?;
21660        if inner.strong() == 0 {
21661            None
21662        } else {
21663            inner.inc_strong();
21664            Some(Rc::from_inner(self.ptr))
21665        }
21666    }
21667
21668    /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
21669    ///
21670    /// If `self` was created using [`Weak::new`], this will return 0.
21671    #[stable(feature = "weak_counts", since = "1.41.0")]
21672    pub fn strong_count(&self) -> usize {
21673        if let Some(inner) = self.inner() { inner.strong() } else { 0 }
21674    }
21675
21676    /// Gets the number of `Weak` pointers pointing to this allocation.
21677    ///
21678    /// If no strong pointers remain, this will return zero.
21679    #[stable(feature = "weak_counts", since = "1.41.0")]
21680    pub fn weak_count(&self) -> usize {
21681        self.inner()
21682            .map(|inner| {
21683                if inner.strong() > 0 {
21684                    inner.weak() - 1 // subtract the implicit weak ptr
21685                } else {
21686                    0
21687                }
21688            })
21689            .unwrap_or(0)
21690    }
21691
21692    /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`,
21693    /// (i.e., when this `Weak` was created by `Weak::new`).
21694    #[inline]
21695    fn inner(&self) -> Option<WeakInner<'_>> {
21696        if is_dangling(self.ptr.as_ptr()) {
21697            None
21698        } else {
21699            // We are careful to *not* create a reference covering the "data" field, as
21700            // the field may be mutated concurrently (for example, if the last `Rc`
21701            // is dropped, the data field will be dropped in-place).
21702            Some(unsafe {
21703                let ptr = self.ptr.as_ptr();
21704                WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
21705            })
21706        }
21707    }
21708
21709    /// Returns `true` if the two `Weak`s point to the same allocation (similar to
21710    /// [`ptr::eq`]), or if both don't point to any allocation
21711    /// (because they were created with `Weak::new()`).
21712    ///
21713    /// # Notes
21714    ///
21715    /// Since this compares pointers it means that `Weak::new()` will equal each
21716    /// other, even though they don't point to any allocation.
21717    ///
21718    /// # Examples
21719    ///
21720    /// ```
21721    /// use std::rc::Rc;
21722    ///
21723    /// let first_rc = Rc::new(5);
21724    /// let first = Rc::downgrade(&first_rc);
21725    /// let second = Rc::downgrade(&first_rc);
21726    ///
21727    /// assert!(first.ptr_eq(&second));
21728    ///
21729    /// let third_rc = Rc::new(5);
21730    /// let third = Rc::downgrade(&third_rc);
21731    ///
21732    /// assert!(!first.ptr_eq(&third));
21733    /// ```
21734    ///
21735    /// Comparing `Weak::new`.
21736    ///
21737    /// ```
21738    /// use std::rc::{Rc, Weak};
21739    ///
21740    /// let first = Weak::new();
21741    /// let second = Weak::new();
21742    /// assert!(first.ptr_eq(&second));
21743    ///
21744    /// let third_rc = Rc::new(());
21745    /// let third = Rc::downgrade(&third_rc);
21746    /// assert!(!first.ptr_eq(&third));
21747    /// ```
21748    ///
21749    /// [`ptr::eq`]: core::ptr::eq
21750    #[inline]
21751    #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
21752    pub fn ptr_eq(&self, other: &Self) -> bool {
21753        self.ptr.as_ptr() == other.ptr.as_ptr()
21754    }
21755}
21756
21757#[stable(feature = "rc_weak", since = "1.4.0")]
21758impl<T: ?Sized> Drop for Weak<T> {
21759    /// Drops the `Weak` pointer.
21760    ///
21761    /// # Examples
21762    ///
21763    /// ```
21764    /// use std::rc::{Rc, Weak};
21765    ///
21766    /// struct Foo;
21767    ///
21768    /// impl Drop for Foo {
21769    ///     fn drop(&mut self) {
21770    ///         println!("dropped!");
21771    ///     }
21772    /// }
21773    ///
21774    /// let foo = Rc::new(Foo);
21775    /// let weak_foo = Rc::downgrade(&foo);
21776    /// let other_weak_foo = Weak::clone(&weak_foo);
21777    ///
21778    /// drop(weak_foo);   // Doesn't print anything
21779    /// drop(foo);        // Prints "dropped!"
21780    ///
21781    /// assert!(other_weak_foo.upgrade().is_none());
21782    /// ```
21783    fn drop(&mut self) {
21784        let inner = if let Some(inner) = self.inner() { inner } else { return };
21785
21786        inner.dec_weak();
21787        // the weak count starts at 1, and will only go to zero if all
21788        // the strong pointers have disappeared.
21789        if inner.weak() == 0 {
21790            unsafe {
21791                Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
21792            }
21793        }
21794    }
21795}
21796
21797#[stable(feature = "rc_weak", since = "1.4.0")]
21798impl<T: ?Sized> Clone for Weak<T> {
21799    /// Makes a clone of the `Weak` pointer that points to the same allocation.
21800    ///
21801    /// # Examples
21802    ///
21803    /// ```
21804    /// use std::rc::{Rc, Weak};
21805    ///
21806    /// let weak_five = Rc::downgrade(&Rc::new(5));
21807    ///
21808    /// let _ = Weak::clone(&weak_five);
21809    /// ```
21810    #[inline]
21811    fn clone(&self) -> Weak<T> {
21812        if let Some(inner) = self.inner() {
21813            inner.inc_weak()
21814        }
21815        Weak { ptr: self.ptr }
21816    }
21817}
21818
21819#[stable(feature = "rc_weak", since = "1.4.0")]
21820impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
21821    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
21822        write!(f, "(Weak)")
21823    }
21824}
21825
21826#[stable(feature = "downgraded_weak", since = "1.10.0")]
21827impl<T> Default for Weak<T> {
21828    /// Constructs a new `Weak<T>`, without allocating any memory.
21829    /// Calling [`upgrade`] on the return value always gives [`None`].
21830    ///
21831    /// [`None`]: Option
21832    /// [`upgrade`]: Weak::upgrade
21833    ///
21834    /// # Examples
21835    ///
21836    /// ```
21837    /// use std::rc::Weak;
21838    ///
21839    /// let empty: Weak<i64> = Default::default();
21840    /// assert!(empty.upgrade().is_none());
21841    /// ```
21842    fn default() -> Weak<T> {
21843        Weak::new()
21844    }
21845}
21846
21847// NOTE: We checked_add here to deal with mem::forget safely. In particular
21848// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
21849// you can free the allocation while outstanding Rcs (or Weaks) exist.
21850// We abort because this is such a degenerate scenario that we don't care about
21851// what happens -- no real program should ever experience this.
21852//
21853// This should have negligible overhead since you don't actually need to
21854// clone these much in Rust thanks to ownership and move-semantics.
21855
21856#[doc(hidden)]
21857trait RcInnerPtr {
21858    fn weak_ref(&self) -> &Cell<usize>;
21859    fn strong_ref(&self) -> &Cell<usize>;
21860
21861    #[inline]
21862    fn strong(&self) -> usize {
21863        self.strong_ref().get()
21864    }
21865
21866    #[inline]
21867    fn inc_strong(&self) {
21868        let strong = self.strong();
21869
21870        // We want to abort on overflow instead of dropping the value.
21871        // The reference count will never be zero when this is called;
21872        // nevertheless, we insert an abort here to hint LLVM at
21873        // an otherwise missed optimization.
21874        if strong == 0 || strong == usize::MAX {
21875            abort();
21876        }
21877        self.strong_ref().set(strong + 1);
21878    }
21879
21880    #[inline]
21881    fn dec_strong(&self) {
21882        self.strong_ref().set(self.strong() - 1);
21883    }
21884
21885    #[inline]
21886    fn weak(&self) -> usize {
21887        self.weak_ref().get()
21888    }
21889
21890    #[inline]
21891    fn inc_weak(&self) {
21892        let weak = self.weak();
21893
21894        // We want to abort on overflow instead of dropping the value.
21895        // The reference count will never be zero when this is called;
21896        // nevertheless, we insert an abort here to hint LLVM at
21897        // an otherwise missed optimization.
21898        if weak == 0 || weak == usize::MAX {
21899            abort();
21900        }
21901        self.weak_ref().set(weak + 1);
21902    }
21903
21904    #[inline]
21905    fn dec_weak(&self) {
21906        self.weak_ref().set(self.weak() - 1);
21907    }
21908}
21909
21910impl<T: ?Sized> RcInnerPtr for RcBox<T> {
21911    #[inline(always)]
21912    fn weak_ref(&self) -> &Cell<usize> {
21913        &self.weak
21914    }
21915
21916    #[inline(always)]
21917    fn strong_ref(&self) -> &Cell<usize> {
21918        &self.strong
21919    }
21920}
21921
21922impl<'a> RcInnerPtr for WeakInner<'a> {
21923    #[inline(always)]
21924    fn weak_ref(&self) -> &Cell<usize> {
21925        self.weak
21926    }
21927
21928    #[inline(always)]
21929    fn strong_ref(&self) -> &Cell<usize> {
21930        self.strong
21931    }
21932}
21933
21934#[stable(feature = "rust1", since = "1.0.0")]
21935impl<T: ?Sized> borrow::Borrow<T> for Rc<T> {
21936    fn borrow(&self) -> &T {
21937        &**self
21938    }
21939}
21940
21941#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
21942impl<T: ?Sized> AsRef<T> for Rc<T> {
21943    fn as_ref(&self) -> &T {
21944        &**self
21945    }
21946}
21947
21948#[stable(feature = "pin", since = "1.33.0")]
21949impl<T: ?Sized> Unpin for Rc<T> {}
21950
21951/// Get the offset within an `RcBox` for the payload behind a pointer.
21952///
21953/// # Safety
21954///
21955/// The pointer must point to (and have valid metadata for) a previously
21956/// valid instance of T, but the T is allowed to be dropped.
21957unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
21958    // Align the unsized value to the end of the RcBox.
21959    // Because RcBox is repr(C), it will always be the last field in memory.
21960    // SAFETY: since the only unsized types possible are slices, trait objects,
21961    // and extern types, the input safety requirement is currently enough to
21962    // satisfy the requirements of align_of_val_raw; this is an implementation
21963    // detail of the language that may not be relied upon outside of std.
21964    unsafe { data_offset_align(align_of_val_raw(ptr)) }
21965}
21966
21967#[inline]
21968fn data_offset_align(align: usize) -> isize {
21969    let layout = Layout::new::<RcBox<()>>();
21970    (layout.size() + layout.padding_needed_for(align)) as isize
21971}
21972//! A dynamically-sized view into a contiguous sequence, `[T]`.
21973//!
21974//! *[See also the slice primitive type](slice).*
21975//!
21976//! Slices are a view into a block of memory represented as a pointer and a
21977//! length.
21978//!
21979//! ```
21980//! // slicing a Vec
21981//! let vec = vec![1, 2, 3];
21982//! let int_slice = &vec[..];
21983//! // coercing an array to a slice
21984//! let str_slice: &[&str] = &["one", "two", "three"];
21985//! ```
21986//!
21987//! Slices are either mutable or shared. The shared slice type is `&[T]`,
21988//! while the mutable slice type is `&mut [T]`, where `T` represents the element
21989//! type. For example, you can mutate the block of memory that a mutable slice
21990//! points to:
21991//!
21992//! ```
21993//! let x = &mut [1, 2, 3];
21994//! x[1] = 7;
21995//! assert_eq!(x, &[1, 7, 3]);
21996//! ```
21997//!
21998//! Here are some of the things this module contains:
21999//!
22000//! ## Structs
22001//!
22002//! There are several structs that are useful for slices, such as [`Iter`], which
22003//! represents iteration over a slice.
22004//!
22005//! ## Trait Implementations
22006//!
22007//! There are several implementations of common traits for slices. Some examples
22008//! include:
22009//!
22010//! * [`Clone`]
22011//! * [`Eq`], [`Ord`] - for slices whose element type are [`Eq`] or [`Ord`].
22012//! * [`Hash`] - for slices whose element type is [`Hash`].
22013//!
22014//! ## Iteration
22015//!
22016//! The slices implement `IntoIterator`. The iterator yields references to the
22017//! slice elements.
22018//!
22019//! ```
22020//! let numbers = &[0, 1, 2];
22021//! for n in numbers {
22022//!     println!("{} is a number!", n);
22023//! }
22024//! ```
22025//!
22026//! The mutable slice yields mutable references to the elements:
22027//!
22028//! ```
22029//! let mut scores = [7, 8, 9];
22030//! for score in &mut scores[..] {
22031//!     *score += 1;
22032//! }
22033//! ```
22034//!
22035//! This iterator yields mutable references to the slice's elements, so while
22036//! the element type of the slice is `i32`, the element type of the iterator is
22037//! `&mut i32`.
22038//!
22039//! * [`.iter`] and [`.iter_mut`] are the explicit methods to return the default
22040//!   iterators.
22041//! * Further methods that return iterators are [`.split`], [`.splitn`],
22042//!   [`.chunks`], [`.windows`] and more.
22043//!
22044//! [`Hash`]: core::hash::Hash
22045//! [`.iter`]: slice::iter
22046//! [`.iter_mut`]: slice::iter_mut
22047//! [`.split`]: slice::split
22048//! [`.splitn`]: slice::splitn
22049//! [`.chunks`]: slice::chunks
22050//! [`.windows`]: slice::windows
22051#![stable(feature = "rust1", since = "1.0.0")]
22052// Many of the usings in this module are only used in the test configuration.
22053// It's cleaner to just turn off the unused_imports warning than to fix them.
22054#![cfg_attr(test, allow(unused_imports, dead_code))]
22055
22056use core::borrow::{Borrow, BorrowMut};
22057use core::cmp::Ordering::{self, Less};
22058use core::mem::{self, size_of};
22059use core::ptr;
22060
22061use crate::alloc::{Allocator, Global};
22062use crate::borrow::ToOwned;
22063use crate::boxed::Box;
22064use crate::vec::Vec;
22065
22066#[unstable(feature = "slice_range", issue = "76393")]
22067pub use core::slice::range;
22068#[unstable(feature = "array_chunks", issue = "74985")]
22069pub use core::slice::ArrayChunks;
22070#[unstable(feature = "array_chunks", issue = "74985")]
22071pub use core::slice::ArrayChunksMut;
22072#[unstable(feature = "array_windows", issue = "75027")]
22073pub use core::slice::ArrayWindows;
22074#[stable(feature = "slice_get_slice", since = "1.28.0")]
22075pub use core::slice::SliceIndex;
22076#[stable(feature = "from_ref", since = "1.28.0")]
22077pub use core::slice::{from_mut, from_ref};
22078#[stable(feature = "rust1", since = "1.0.0")]
22079pub use core::slice::{from_raw_parts, from_raw_parts_mut};
22080#[stable(feature = "rust1", since = "1.0.0")]
22081pub use core::slice::{Chunks, Windows};
22082#[stable(feature = "chunks_exact", since = "1.31.0")]
22083pub use core::slice::{ChunksExact, ChunksExactMut};
22084#[stable(feature = "rust1", since = "1.0.0")]
22085pub use core::slice::{ChunksMut, Split, SplitMut};
22086#[unstable(feature = "slice_group_by", issue = "80552")]
22087pub use core::slice::{GroupBy, GroupByMut};
22088#[stable(feature = "rust1", since = "1.0.0")]
22089pub use core::slice::{Iter, IterMut};
22090#[stable(feature = "rchunks", since = "1.31.0")]
22091pub use core::slice::{RChunks, RChunksExact, RChunksExactMut, RChunksMut};
22092#[stable(feature = "slice_rsplit", since = "1.27.0")]
22093pub use core::slice::{RSplit, RSplitMut};
22094#[stable(feature = "rust1", since = "1.0.0")]
22095pub use core::slice::{RSplitN, RSplitNMut, SplitN, SplitNMut};
22096
22097////////////////////////////////////////////////////////////////////////////////
22098// Basic slice extension methods
22099////////////////////////////////////////////////////////////////////////////////
22100
22101// HACK(japaric) needed for the implementation of `vec!` macro during testing
22102// N.B., see the `hack` module in this file for more details.
22103#[cfg(test)]
22104pub use hack::into_vec;
22105
22106// HACK(japaric) needed for the implementation of `Vec::clone` during testing
22107// N.B., see the `hack` module in this file for more details.
22108#[cfg(test)]
22109pub use hack::to_vec;
22110
22111// HACK(japaric): With cfg(test) `impl [T]` is not available, these three
22112// functions are actually methods that are in `impl [T]` but not in
22113// `core::slice::SliceExt` - we need to supply these functions for the
22114// `test_permutations` test
22115mod hack {
22116    use core::alloc::Allocator;
22117
22118    use crate::boxed::Box;
22119    use crate::vec::Vec;
22120
22121    // We shouldn't add inline attribute to this since this is used in
22122    // `vec!` macro mostly and causes perf regression. See #71204 for
22123    // discussion and perf results.
22124    pub fn into_vec<T, A: Allocator>(b: Box<[T], A>) -> Vec<T, A> {
22125        unsafe {
22126            let len = b.len();
22127            let (b, alloc) = Box::into_raw_with_allocator(b);
22128            Vec::from_raw_parts_in(b as *mut T, len, len, alloc)
22129        }
22130    }
22131
22132    #[inline]
22133    pub fn to_vec<T: ConvertVec, A: Allocator>(s: &[T], alloc: A) -> Vec<T, A> {
22134        T::to_vec(s, alloc)
22135    }
22136
22137    pub trait ConvertVec {
22138        fn to_vec<A: Allocator>(s: &[Self], alloc: A) -> Vec<Self, A>
22139        where
22140            Self: Sized;
22141    }
22142
22143    impl<T: Clone> ConvertVec for T {
22144        #[inline]
22145        default fn to_vec<A: Allocator>(s: &[Self], alloc: A) -> Vec<Self, A> {
22146            struct DropGuard<'a, T, A: Allocator> {
22147                vec: &'a mut Vec<T, A>,
22148                num_init: usize,
22149            }
22150            impl<'a, T, A: Allocator> Drop for DropGuard<'a, T, A> {
22151                #[inline]
22152                fn drop(&mut self) {
22153                    // SAFETY:
22154                    // items were marked initialized in the loop below
22155                    unsafe {
22156                        self.vec.set_len(self.num_init);
22157                    }
22158                }
22159            }
22160            let mut vec = Vec::with_capacity_in(s.len(), alloc);
22161            let mut guard = DropGuard { vec: &mut vec, num_init: 0 };
22162            let slots = guard.vec.spare_capacity_mut();
22163            // .take(slots.len()) is necessary for LLVM to remove bounds checks
22164            // and has better codegen than zip.
22165            for (i, b) in s.iter().enumerate().take(slots.len()) {
22166                guard.num_init = i;
22167                slots[i].write(b.clone());
22168            }
22169            core::mem::forget(guard);
22170            // SAFETY:
22171            // the vec was allocated and initialized above to at least this length.
22172            unsafe {
22173                vec.set_len(s.len());
22174            }
22175            vec
22176        }
22177    }
22178
22179    impl<T: Copy> ConvertVec for T {
22180        #[inline]
22181        fn to_vec<A: Allocator>(s: &[Self], alloc: A) -> Vec<Self, A> {
22182            let mut v = Vec::with_capacity_in(s.len(), alloc);
22183            // SAFETY:
22184            // allocated above with the capacity of `s`, and initialize to `s.len()` in
22185            // ptr::copy_to_non_overlapping below.
22186            unsafe {
22187                s.as_ptr().copy_to_nonoverlapping(v.as_mut_ptr(), s.len());
22188                v.set_len(s.len());
22189            }
22190            v
22191        }
22192    }
22193}
22194
22195#[lang = "slice_alloc"]
22196#[cfg(not(test))]
22197impl<T> [T] {
22198    /// Sorts the slice.
22199    ///
22200    /// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*)) worst-case.
22201    ///
22202    /// When applicable, unstable sorting is preferred because it is generally faster than stable
22203    /// sorting and it doesn't allocate auxiliary memory.
22204    /// See [`sort_unstable`](slice::sort_unstable).
22205    ///
22206    /// # Current implementation
22207    ///
22208    /// The current algorithm is an adaptive, iterative merge sort inspired by
22209    /// [timsort](https://en.wikipedia.org/wiki/Timsort).
22210    /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
22211    /// two or more sorted sequences concatenated one after another.
22212    ///
22213    /// Also, it allocates temporary storage half the size of `self`, but for short slices a
22214    /// non-allocating insertion sort is used instead.
22215    ///
22216    /// # Examples
22217    ///
22218    /// ```
22219    /// let mut v = [-5, 4, 1, -3, 2];
22220    ///
22221    /// v.sort();
22222    /// assert!(v == [-5, -3, 1, 2, 4]);
22223    /// ```
22224    #[stable(feature = "rust1", since = "1.0.0")]
22225    #[inline]
22226    pub fn sort(&mut self)
22227    where
22228        T: Ord,
22229    {
22230        merge_sort(self, |a, b| a.lt(b));
22231    }
22232
22233    /// Sorts the slice with a comparator function.
22234    ///
22235    /// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*)) worst-case.
22236    ///
22237    /// The comparator function must define a total ordering for the elements in the slice. If
22238    /// the ordering is not total, the order of the elements is unspecified. An order is a
22239    /// total order if it is (for all `a`, `b` and `c`):
22240    ///
22241    /// * total and antisymmetric: exactly one of `a < b`, `a == b` or `a > b` is true, and
22242    /// * transitive, `a < b` and `b < c` implies `a < c`. The same must hold for both `==` and `>`.
22243    ///
22244    /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
22245    /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
22246    ///
22247    /// ```
22248    /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
22249    /// floats.sort_by(|a, b| a.partial_cmp(b).unwrap());
22250    /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
22251    /// ```
22252    ///
22253    /// When applicable, unstable sorting is preferred because it is generally faster than stable
22254    /// sorting and it doesn't allocate auxiliary memory.
22255    /// See [`sort_unstable_by`](slice::sort_unstable_by).
22256    ///
22257    /// # Current implementation
22258    ///
22259    /// The current algorithm is an adaptive, iterative merge sort inspired by
22260    /// [timsort](https://en.wikipedia.org/wiki/Timsort).
22261    /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
22262    /// two or more sorted sequences concatenated one after another.
22263    ///
22264    /// Also, it allocates temporary storage half the size of `self`, but for short slices a
22265    /// non-allocating insertion sort is used instead.
22266    ///
22267    /// # Examples
22268    ///
22269    /// ```
22270    /// let mut v = [5, 4, 1, 3, 2];
22271    /// v.sort_by(|a, b| a.cmp(b));
22272    /// assert!(v == [1, 2, 3, 4, 5]);
22273    ///
22274    /// // reverse sorting
22275    /// v.sort_by(|a, b| b.cmp(a));
22276    /// assert!(v == [5, 4, 3, 2, 1]);
22277    /// ```
22278    #[stable(feature = "rust1", since = "1.0.0")]
22279    #[inline]
22280    pub fn sort_by<F>(&mut self, mut compare: F)
22281    where
22282        F: FnMut(&T, &T) -> Ordering,
22283    {
22284        merge_sort(self, |a, b| compare(a, b) == Less);
22285    }
22286
22287    /// Sorts the slice with a key extraction function.
22288    ///
22289    /// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* \* log(*n*))
22290    /// worst-case, where the key function is *O*(*m*).
22291    ///
22292    /// For expensive key functions (e.g. functions that are not simple property accesses or
22293    /// basic operations), [`sort_by_cached_key`](slice::sort_by_cached_key) is likely to be
22294    /// significantly faster, as it does not recompute element keys.
22295    ///
22296    /// When applicable, unstable sorting is preferred because it is generally faster than stable
22297    /// sorting and it doesn't allocate auxiliary memory.
22298    /// See [`sort_unstable_by_key`](slice::sort_unstable_by_key).
22299    ///
22300    /// # Current implementation
22301    ///
22302    /// The current algorithm is an adaptive, iterative merge sort inspired by
22303    /// [timsort](https://en.wikipedia.org/wiki/Timsort).
22304    /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
22305    /// two or more sorted sequences concatenated one after another.
22306    ///
22307    /// Also, it allocates temporary storage half the size of `self`, but for short slices a
22308    /// non-allocating insertion sort is used instead.
22309    ///
22310    /// # Examples
22311    ///
22312    /// ```
22313    /// let mut v = [-5i32, 4, 1, -3, 2];
22314    ///
22315    /// v.sort_by_key(|k| k.abs());
22316    /// assert!(v == [1, 2, -3, 4, -5]);
22317    /// ```
22318    #[stable(feature = "slice_sort_by_key", since = "1.7.0")]
22319    #[inline]
22320    pub fn sort_by_key<K, F>(&mut self, mut f: F)
22321    where
22322        F: FnMut(&T) -> K,
22323        K: Ord,
22324    {
22325        merge_sort(self, |a, b| f(a).lt(&f(b)));
22326    }
22327
22328    /// Sorts the slice with a key extraction function.
22329    ///
22330    /// During sorting, the key function is called only once per element.
22331    ///
22332    /// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* + *n* \* log(*n*))
22333    /// worst-case, where the key function is *O*(*m*).
22334    ///
22335    /// For simple key functions (e.g., functions that are property accesses or
22336    /// basic operations), [`sort_by_key`](slice::sort_by_key) is likely to be
22337    /// faster.
22338    ///
22339    /// # Current implementation
22340    ///
22341    /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
22342    /// which combines the fast average case of randomized quicksort with the fast worst case of
22343    /// heapsort, while achieving linear time on slices with certain patterns. It uses some
22344    /// randomization to avoid degenerate cases, but with a fixed seed to always provide
22345    /// deterministic behavior.
22346    ///
22347    /// In the worst case, the algorithm allocates temporary storage in a `Vec<(K, usize)>` the
22348    /// length of the slice.
22349    ///
22350    /// # Examples
22351    ///
22352    /// ```
22353    /// let mut v = [-5i32, 4, 32, -3, 2];
22354    ///
22355    /// v.sort_by_cached_key(|k| k.to_string());
22356    /// assert!(v == [-3, -5, 2, 32, 4]);
22357    /// ```
22358    ///
22359    /// [pdqsort]: https://github.com/orlp/pdqsort
22360    #[stable(feature = "slice_sort_by_cached_key", since = "1.34.0")]
22361    #[inline]
22362    pub fn sort_by_cached_key<K, F>(&mut self, f: F)
22363    where
22364        F: FnMut(&T) -> K,
22365        K: Ord,
22366    {
22367        // Helper macro for indexing our vector by the smallest possible type, to reduce allocation.
22368        macro_rules! sort_by_key {
22369            ($t:ty, $slice:ident, $f:ident) => {{
22370                let mut indices: Vec<_> =
22371                    $slice.iter().map($f).enumerate().map(|(i, k)| (k, i as $t)).collect();
22372                // The elements of `indices` are unique, as they are indexed, so any sort will be
22373                // stable with respect to the original slice. We use `sort_unstable` here because
22374                // it requires less memory allocation.
22375                indices.sort_unstable();
22376                for i in 0..$slice.len() {
22377                    let mut index = indices[i].1;
22378                    while (index as usize) < i {
22379                        index = indices[index as usize].1;
22380                    }
22381                    indices[i].1 = index;
22382                    $slice.swap(i, index as usize);
22383                }
22384            }};
22385        }
22386
22387        let sz_u8 = mem::size_of::<(K, u8)>();
22388        let sz_u16 = mem::size_of::<(K, u16)>();
22389        let sz_u32 = mem::size_of::<(K, u32)>();
22390        let sz_usize = mem::size_of::<(K, usize)>();
22391
22392        let len = self.len();
22393        if len < 2 {
22394            return;
22395        }
22396        if sz_u8 < sz_u16 && len <= (u8::MAX as usize) {
22397            return sort_by_key!(u8, self, f);
22398        }
22399        if sz_u16 < sz_u32 && len <= (u16::MAX as usize) {
22400            return sort_by_key!(u16, self, f);
22401        }
22402        if sz_u32 < sz_usize && len <= (u32::MAX as usize) {
22403            return sort_by_key!(u32, self, f);
22404        }
22405        sort_by_key!(usize, self, f)
22406    }
22407
22408    /// Copies `self` into a new `Vec`.
22409    ///
22410    /// # Examples
22411    ///
22412    /// ```
22413    /// let s = [10, 40, 30];
22414    /// let x = s.to_vec();
22415    /// // Here, `s` and `x` can be modified independently.
22416    /// ```
22417    #[rustc_conversion_suggestion]
22418    #[stable(feature = "rust1", since = "1.0.0")]
22419    #[inline]
22420    pub fn to_vec(&self) -> Vec<T>
22421    where
22422        T: Clone,
22423    {
22424        self.to_vec_in(Global)
22425    }
22426
22427    /// Copies `self` into a new `Vec` with an allocator.
22428    ///
22429    /// # Examples
22430    ///
22431    /// ```
22432    /// #![feature(allocator_api)]
22433    ///
22434    /// use std::alloc::System;
22435    ///
22436    /// let s = [10, 40, 30];
22437    /// let x = s.to_vec_in(System);
22438    /// // Here, `s` and `x` can be modified independently.
22439    /// ```
22440    #[inline]
22441    #[unstable(feature = "allocator_api", issue = "32838")]
22442    pub fn to_vec_in<A: Allocator>(&self, alloc: A) -> Vec<T, A>
22443    where
22444        T: Clone,
22445    {
22446        // N.B., see the `hack` module in this file for more details.
22447        hack::to_vec(self, alloc)
22448    }
22449
22450    /// Converts `self` into a vector without clones or allocation.
22451    ///
22452    /// The resulting vector can be converted back into a box via
22453    /// `Vec<T>`'s `into_boxed_slice` method.
22454    ///
22455    /// # Examples
22456    ///
22457    /// ```
22458    /// let s: Box<[i32]> = Box::new([10, 40, 30]);
22459    /// let x = s.into_vec();
22460    /// // `s` cannot be used anymore because it has been converted into `x`.
22461    ///
22462    /// assert_eq!(x, vec![10, 40, 30]);
22463    /// ```
22464    #[stable(feature = "rust1", since = "1.0.0")]
22465    #[inline]
22466    pub fn into_vec<A: Allocator>(self: Box<Self, A>) -> Vec<T, A> {
22467        // N.B., see the `hack` module in this file for more details.
22468        hack::into_vec(self)
22469    }
22470
22471    /// Creates a vector by repeating a slice `n` times.
22472    ///
22473    /// # Panics
22474    ///
22475    /// This function will panic if the capacity would overflow.
22476    ///
22477    /// # Examples
22478    ///
22479    /// Basic usage:
22480    ///
22481    /// ```
22482    /// assert_eq!([1, 2].repeat(3), vec![1, 2, 1, 2, 1, 2]);
22483    /// ```
22484    ///
22485    /// A panic upon overflow:
22486    ///
22487    /// ```should_panic
22488    /// // this will panic at runtime
22489    /// b"0123456789abcdef".repeat(usize::MAX);
22490    /// ```
22491    #[stable(feature = "repeat_generic_slice", since = "1.40.0")]
22492    pub fn repeat(&self, n: usize) -> Vec<T>
22493    where
22494        T: Copy,
22495    {
22496        if n == 0 {
22497            return Vec::new();
22498        }
22499
22500        // If `n` is larger than zero, it can be split as
22501        // `n = 2^expn + rem (2^expn > rem, expn >= 0, rem >= 0)`.
22502        // `2^expn` is the number represented by the leftmost '1' bit of `n`,
22503        // and `rem` is the remaining part of `n`.
22504
22505        // Using `Vec` to access `set_len()`.
22506        let capacity = self.len().checked_mul(n).expect("capacity overflow");
22507        let mut buf = Vec::with_capacity(capacity);
22508
22509        // `2^expn` repetition is done by doubling `buf` `expn`-times.
22510        buf.extend(self);
22511        {
22512            let mut m = n >> 1;
22513            // If `m > 0`, there are remaining bits up to the leftmost '1'.
22514            while m > 0 {
22515                // `buf.extend(buf)`:
22516                unsafe {
22517                    ptr::copy_nonoverlapping(
22518                        buf.as_ptr(),
22519                        (buf.as_mut_ptr() as *mut T).add(buf.len()),
22520                        buf.len(),
22521                    );
22522                    // `buf` has capacity of `self.len() * n`.
22523                    let buf_len = buf.len();
22524                    buf.set_len(buf_len * 2);
22525                }
22526
22527                m >>= 1;
22528            }
22529        }
22530
22531        // `rem` (`= n - 2^expn`) repetition is done by copying
22532        // first `rem` repetitions from `buf` itself.
22533        let rem_len = capacity - buf.len(); // `self.len() * rem`
22534        if rem_len > 0 {
22535            // `buf.extend(buf[0 .. rem_len])`:
22536            unsafe {
22537                // This is non-overlapping since `2^expn > rem`.
22538                ptr::copy_nonoverlapping(
22539                    buf.as_ptr(),
22540                    (buf.as_mut_ptr() as *mut T).add(buf.len()),
22541                    rem_len,
22542                );
22543                // `buf.len() + rem_len` equals to `buf.capacity()` (`= self.len() * n`).
22544                buf.set_len(capacity);
22545            }
22546        }
22547        buf
22548    }
22549
22550    /// Flattens a slice of `T` into a single value `Self::Output`.
22551    ///
22552    /// # Examples
22553    ///
22554    /// ```
22555    /// assert_eq!(["hello", "world"].concat(), "helloworld");
22556    /// assert_eq!([[1, 2], [3, 4]].concat(), [1, 2, 3, 4]);
22557    /// ```
22558    #[stable(feature = "rust1", since = "1.0.0")]
22559    pub fn concat<Item: ?Sized>(&self) -> <Self as Concat<Item>>::Output
22560    where
22561        Self: Concat<Item>,
22562    {
22563        Concat::concat(self)
22564    }
22565
22566    /// Flattens a slice of `T` into a single value `Self::Output`, placing a
22567    /// given separator between each.
22568    ///
22569    /// # Examples
22570    ///
22571    /// ```
22572    /// assert_eq!(["hello", "world"].join(" "), "hello world");
22573    /// assert_eq!([[1, 2], [3, 4]].join(&0), [1, 2, 0, 3, 4]);
22574    /// assert_eq!([[1, 2], [3, 4]].join(&[0, 0][..]), [1, 2, 0, 0, 3, 4]);
22575    /// ```
22576    #[stable(feature = "rename_connect_to_join", since = "1.3.0")]
22577    pub fn join<Separator>(&self, sep: Separator) -> <Self as Join<Separator>>::Output
22578    where
22579        Self: Join<Separator>,
22580    {
22581        Join::join(self, sep)
22582    }
22583
22584    /// Flattens a slice of `T` into a single value `Self::Output`, placing a
22585    /// given separator between each.
22586    ///
22587    /// # Examples
22588    ///
22589    /// ```
22590    /// # #![allow(deprecated)]
22591    /// assert_eq!(["hello", "world"].connect(" "), "hello world");
22592    /// assert_eq!([[1, 2], [3, 4]].connect(&0), [1, 2, 0, 3, 4]);
22593    /// ```
22594    #[stable(feature = "rust1", since = "1.0.0")]
22595    #[rustc_deprecated(since = "1.3.0", reason = "renamed to join")]
22596    pub fn connect<Separator>(&self, sep: Separator) -> <Self as Join<Separator>>::Output
22597    where
22598        Self: Join<Separator>,
22599    {
22600        Join::join(self, sep)
22601    }
22602}
22603
22604#[lang = "slice_u8_alloc"]
22605#[cfg(not(test))]
22606impl [u8] {
22607    /// Returns a vector containing a copy of this slice where each byte
22608    /// is mapped to its ASCII upper case equivalent.
22609    ///
22610    /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
22611    /// but non-ASCII letters are unchanged.
22612    ///
22613    /// To uppercase the value in-place, use [`make_ascii_uppercase`].
22614    ///
22615    /// [`make_ascii_uppercase`]: slice::make_ascii_uppercase
22616    #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
22617    #[inline]
22618    pub fn to_ascii_uppercase(&self) -> Vec<u8> {
22619        let mut me = self.to_vec();
22620        me.make_ascii_uppercase();
22621        me
22622    }
22623
22624    /// Returns a vector containing a copy of this slice where each byte
22625    /// is mapped to its ASCII lower case equivalent.
22626    ///
22627    /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
22628    /// but non-ASCII letters are unchanged.
22629    ///
22630    /// To lowercase the value in-place, use [`make_ascii_lowercase`].
22631    ///
22632    /// [`make_ascii_lowercase`]: slice::make_ascii_lowercase
22633    #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
22634    #[inline]
22635    pub fn to_ascii_lowercase(&self) -> Vec<u8> {
22636        let mut me = self.to_vec();
22637        me.make_ascii_lowercase();
22638        me
22639    }
22640}
22641
22642////////////////////////////////////////////////////////////////////////////////
22643// Extension traits for slices over specific kinds of data
22644////////////////////////////////////////////////////////////////////////////////
22645
22646/// Helper trait for [`[T]::concat`](slice::concat).
22647///
22648/// Note: the `Item` type parameter is not used in this trait,
22649/// but it allows impls to be more generic.
22650/// Without it, we get this error:
22651///
22652/// ```error
22653/// error[E0207]: the type parameter `T` is not constrained by the impl trait, self type, or predica
22654///    --> src/liballoc/slice.rs:608:6
22655///     |
22656/// 608 | impl<T: Clone, V: Borrow<[T]>> Concat for [V] {
22657///     |      ^ unconstrained type parameter
22658/// ```
22659///
22660/// This is because there could exist `V` types with multiple `Borrow<[_]>` impls,
22661/// such that multiple `T` types would apply:
22662///
22663/// ```
22664/// # #[allow(dead_code)]
22665/// pub struct Foo(Vec<u32>, Vec<String>);
22666///
22667/// impl std::borrow::Borrow<[u32]> for Foo {
22668///     fn borrow(&self) -> &[u32] { &self.0 }
22669/// }
22670///
22671/// impl std::borrow::Borrow<[String]> for Foo {
22672///     fn borrow(&self) -> &[String] { &self.1 }
22673/// }
22674/// ```
22675#[unstable(feature = "slice_concat_trait", issue = "27747")]
22676pub trait Concat<Item: ?Sized> {
22677    #[unstable(feature = "slice_concat_trait", issue = "27747")]
22678    /// The resulting type after concatenation
22679    type Output;
22680
22681    /// Implementation of [`[T]::concat`](slice::concat)
22682    #[unstable(feature = "slice_concat_trait", issue = "27747")]
22683    fn concat(slice: &Self) -> Self::Output;
22684}
22685
22686/// Helper trait for [`[T]::join`](slice::join)
22687#[unstable(feature = "slice_concat_trait", issue = "27747")]
22688pub trait Join<Separator> {
22689    #[unstable(feature = "slice_concat_trait", issue = "27747")]
22690    /// The resulting type after concatenation
22691    type Output;
22692
22693    /// Implementation of [`[T]::join`](slice::join)
22694    #[unstable(feature = "slice_concat_trait", issue = "27747")]
22695    fn join(slice: &Self, sep: Separator) -> Self::Output;
22696}
22697
22698#[unstable(feature = "slice_concat_ext", issue = "27747")]
22699impl<T: Clone, V: Borrow<[T]>> Concat<T> for [V] {
22700    type Output = Vec<T>;
22701
22702    fn concat(slice: &Self) -> Vec<T> {
22703        let size = slice.iter().map(|slice| slice.borrow().len()).sum();
22704        let mut result = Vec::with_capacity(size);
22705        for v in slice {
22706            result.extend_from_slice(v.borrow())
22707        }
22708        result
22709    }
22710}
22711
22712#[unstable(feature = "slice_concat_ext", issue = "27747")]
22713impl<T: Clone, V: Borrow<[T]>> Join<&T> for [V] {
22714    type Output = Vec<T>;
22715
22716    fn join(slice: &Self, sep: &T) -> Vec<T> {
22717        let mut iter = slice.iter();
22718        let first = match iter.next() {
22719            Some(first) => first,
22720            None => return vec![],
22721        };
22722        let size = slice.iter().map(|v| v.borrow().len()).sum::<usize>() + slice.len() - 1;
22723        let mut result = Vec::with_capacity(size);
22724        result.extend_from_slice(first.borrow());
22725
22726        for v in iter {
22727            result.push(sep.clone());
22728            result.extend_from_slice(v.borrow())
22729        }
22730        result
22731    }
22732}
22733
22734#[unstable(feature = "slice_concat_ext", issue = "27747")]
22735impl<T: Clone, V: Borrow<[T]>> Join<&[T]> for [V] {
22736    type Output = Vec<T>;
22737
22738    fn join(slice: &Self, sep: &[T]) -> Vec<T> {
22739        let mut iter = slice.iter();
22740        let first = match iter.next() {
22741            Some(first) => first,
22742            None => return vec![],
22743        };
22744        let size =
22745            slice.iter().map(|v| v.borrow().len()).sum::<usize>() + sep.len() * (slice.len() - 1);
22746        let mut result = Vec::with_capacity(size);
22747        result.extend_from_slice(first.borrow());
22748
22749        for v in iter {
22750            result.extend_from_slice(sep);
22751            result.extend_from_slice(v.borrow())
22752        }
22753        result
22754    }
22755}
22756
22757////////////////////////////////////////////////////////////////////////////////
22758// Standard trait implementations for slices
22759////////////////////////////////////////////////////////////////////////////////
22760
22761#[stable(feature = "rust1", since = "1.0.0")]
22762impl<T> Borrow<[T]> for Vec<T> {
22763    fn borrow(&self) -> &[T] {
22764        &self[..]
22765    }
22766}
22767
22768#[stable(feature = "rust1", since = "1.0.0")]
22769impl<T> BorrowMut<[T]> for Vec<T> {
22770    fn borrow_mut(&mut self) -> &mut [T] {
22771        &mut self[..]
22772    }
22773}
22774
22775#[stable(feature = "rust1", since = "1.0.0")]
22776impl<T: Clone> ToOwned for [T] {
22777    type Owned = Vec<T>;
22778    #[cfg(not(test))]
22779    fn to_owned(&self) -> Vec<T> {
22780        self.to_vec()
22781    }
22782
22783    #[cfg(test)]
22784    fn to_owned(&self) -> Vec<T> {
22785        hack::to_vec(self, Global)
22786    }
22787
22788    fn clone_into(&self, target: &mut Vec<T>) {
22789        // drop anything in target that will not be overwritten
22790        target.truncate(self.len());
22791
22792        // target.len <= self.len due to the truncate above, so the
22793        // slices here are always in-bounds.
22794        let (init, tail) = self.split_at(target.len());
22795
22796        // reuse the contained values' allocations/resources.
22797        target.clone_from_slice(init);
22798        target.extend_from_slice(tail);
22799    }
22800}
22801
22802////////////////////////////////////////////////////////////////////////////////
22803// Sorting
22804////////////////////////////////////////////////////////////////////////////////
22805
22806/// Inserts `v[0]` into pre-sorted sequence `v[1..]` so that whole `v[..]` becomes sorted.
22807///
22808/// This is the integral subroutine of insertion sort.
22809fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
22810where
22811    F: FnMut(&T, &T) -> bool,
22812{
22813    if v.len() >= 2 && is_less(&v[1], &v[0]) {
22814        unsafe {
22815            // There are three ways to implement insertion here:
22816            //
22817            // 1. Swap adjacent elements until the first one gets to its final destination.
22818            //    However, this way we copy data around more than is necessary. If elements are big
22819            //    structures (costly to copy), this method will be slow.
22820            //
22821            // 2. Iterate until the right place for the first element is found. Then shift the
22822            //    elements succeeding it to make room for it and finally place it into the
22823            //    remaining hole. This is a good method.
22824            //
22825            // 3. Copy the first element into a temporary variable. Iterate until the right place
22826            //    for it is found. As we go along, copy every traversed element into the slot
22827            //    preceding it. Finally, copy data from the temporary variable into the remaining
22828            //    hole. This method is very good. Benchmarks demonstrated slightly better
22829            //    performance than with the 2nd method.
22830            //
22831            // All methods were benchmarked, and the 3rd showed best results. So we chose that one.
22832            let mut tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
22833
22834            // Intermediate state of the insertion process is always tracked by `hole`, which
22835            // serves two purposes:
22836            // 1. Protects integrity of `v` from panics in `is_less`.
22837            // 2. Fills the remaining hole in `v` in the end.
22838            //
22839            // Panic safety:
22840            //
22841            // If `is_less` panics at any point during the process, `hole` will get dropped and
22842            // fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
22843            // initially held exactly once.
22844            let mut hole = InsertionHole { src: &mut *tmp, dest: &mut v[1] };
22845            ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
22846
22847            for i in 2..v.len() {
22848                if !is_less(&v[i], &*tmp) {
22849                    break;
22850                }
22851                ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1);
22852                hole.dest = &mut v[i];
22853            }
22854            // `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`.
22855        }
22856    }
22857
22858    // When dropped, copies from `src` into `dest`.
22859    struct InsertionHole<T> {
22860        src: *mut T,
22861        dest: *mut T,
22862    }
22863
22864    impl<T> Drop for InsertionHole<T> {
22865        fn drop(&mut self) {
22866            unsafe {
22867                ptr::copy_nonoverlapping(self.src, self.dest, 1);
22868            }
22869        }
22870    }
22871}
22872
22873/// Merges non-decreasing runs `v[..mid]` and `v[mid..]` using `buf` as temporary storage, and
22874/// stores the result into `v[..]`.
22875///
22876/// # Safety
22877///
22878/// The two slices must be non-empty and `mid` must be in bounds. Buffer `buf` must be long enough
22879/// to hold a copy of the shorter slice. Also, `T` must not be a zero-sized type.
22880unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F)
22881where
22882    F: FnMut(&T, &T) -> bool,
22883{
22884    let len = v.len();
22885    let v = v.as_mut_ptr();
22886    let (v_mid, v_end) = unsafe { (v.add(mid), v.add(len)) };
22887
22888    // The merge process first copies the shorter run into `buf`. Then it traces the newly copied
22889    // run and the longer run forwards (or backwards), comparing their next unconsumed elements and
22890    // copying the lesser (or greater) one into `v`.
22891    //
22892    // As soon as the shorter run is fully consumed, the process is done. If the longer run gets
22893    // consumed first, then we must copy whatever is left of the shorter run into the remaining
22894    // hole in `v`.
22895    //
22896    // Intermediate state of the process is always tracked by `hole`, which serves two purposes:
22897    // 1. Protects integrity of `v` from panics in `is_less`.
22898    // 2. Fills the remaining hole in `v` if the longer run gets consumed first.
22899    //
22900    // Panic safety:
22901    //
22902    // If `is_less` panics at any point during the process, `hole` will get dropped and fill the
22903    // hole in `v` with the unconsumed range in `buf`, thus ensuring that `v` still holds every
22904    // object it initially held exactly once.
22905    let mut hole;
22906
22907    if mid <= len - mid {
22908        // The left run is shorter.
22909        unsafe {
22910            ptr::copy_nonoverlapping(v, buf, mid);
22911            hole = MergeHole { start: buf, end: buf.add(mid), dest: v };
22912        }
22913
22914        // Initially, these pointers point to the beginnings of their arrays.
22915        let left = &mut hole.start;
22916        let mut right = v_mid;
22917        let out = &mut hole.dest;
22918
22919        while *left < hole.end && right < v_end {
22920            // Consume the lesser side.
22921            // If equal, prefer the left run to maintain stability.
22922            unsafe {
22923                let to_copy = if is_less(&*right, &**left) {
22924                    get_and_increment(&mut right)
22925                } else {
22926                    get_and_increment(left)
22927                };
22928                ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
22929            }
22930        }
22931    } else {
22932        // The right run is shorter.
22933        unsafe {
22934            ptr::copy_nonoverlapping(v_mid, buf, len - mid);
22935            hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid };
22936        }
22937
22938        // Initially, these pointers point past the ends of their arrays.
22939        let left = &mut hole.dest;
22940        let right = &mut hole.end;
22941        let mut out = v_end;
22942
22943        while v < *left && buf < *right {
22944            // Consume the greater side.
22945            // If equal, prefer the right run to maintain stability.
22946            unsafe {
22947                let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) {
22948                    decrement_and_get(left)
22949                } else {
22950                    decrement_and_get(right)
22951                };
22952                ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
22953            }
22954        }
22955    }
22956    // Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of
22957    // it will now be copied into the hole in `v`.
22958
22959    unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
22960        let old = *ptr;
22961        *ptr = unsafe { ptr.offset(1) };
22962        old
22963    }
22964
22965    unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
22966        *ptr = unsafe { ptr.offset(-1) };
22967        *ptr
22968    }
22969
22970    // When dropped, copies the range `start..end` into `dest..`.
22971    struct MergeHole<T> {
22972        start: *mut T,
22973        end: *mut T,
22974        dest: *mut T,
22975    }
22976
22977    impl<T> Drop for MergeHole<T> {
22978        fn drop(&mut self) {
22979            // `T` is not a zero-sized type, so it's okay to divide by its size.
22980            let len = (self.end as usize - self.start as usize) / mem::size_of::<T>();
22981            unsafe {
22982                ptr::copy_nonoverlapping(self.start, self.dest, len);
22983            }
22984        }
22985    }
22986}
22987
22988/// This merge sort borrows some (but not all) ideas from TimSort, which is described in detail
22989/// [here](http://svn.python.org/projects/python/trunk/Objects/listsort.txt).
22990///
22991/// The algorithm identifies strictly descending and non-descending subsequences, which are called
22992/// natural runs. There is a stack of pending runs yet to be merged. Each newly found run is pushed
22993/// onto the stack, and then some pairs of adjacent runs are merged until these two invariants are
22994/// satisfied:
22995///
22996/// 1. for every `i` in `1..runs.len()`: `runs[i - 1].len > runs[i].len`
22997/// 2. for every `i` in `2..runs.len()`: `runs[i - 2].len > runs[i - 1].len + runs[i].len`
22998///
22999/// The invariants ensure that the total running time is *O*(*n* \* log(*n*)) worst-case.
23000fn merge_sort<T, F>(v: &mut [T], mut is_less: F)
23001where
23002    F: FnMut(&T, &T) -> bool,
23003{
23004    // Slices of up to this length get sorted using insertion sort.
23005    const MAX_INSERTION: usize = 20;
23006    // Very short runs are extended using insertion sort to span at least this many elements.
23007    const MIN_RUN: usize = 10;
23008
23009    // Sorting has no meaningful behavior on zero-sized types.
23010    if size_of::<T>() == 0 {
23011        return;
23012    }
23013
23014    let len = v.len();
23015
23016    // Short arrays get sorted in-place via insertion sort to avoid allocations.
23017    if len <= MAX_INSERTION {
23018        if len >= 2 {
23019            for i in (0..len - 1).rev() {
23020                insert_head(&mut v[i..], &mut is_less);
23021            }
23022        }
23023        return;
23024    }
23025
23026    // Allocate a buffer to use as scratch memory. We keep the length 0 so we can keep in it
23027    // shallow copies of the contents of `v` without risking the dtors running on copies if
23028    // `is_less` panics. When merging two sorted runs, this buffer holds a copy of the shorter run,
23029    // which will always have length at most `len / 2`.
23030    let mut buf = Vec::with_capacity(len / 2);
23031
23032    // In order to identify natural runs in `v`, we traverse it backwards. That might seem like a
23033    // strange decision, but consider the fact that merges more often go in the opposite direction
23034    // (forwards). According to benchmarks, merging forwards is slightly faster than merging
23035    // backwards. To conclude, identifying runs by traversing backwards improves performance.
23036    let mut runs = vec![];
23037    let mut end = len;
23038    while end > 0 {
23039        // Find the next natural run, and reverse it if it's strictly descending.
23040        let mut start = end - 1;
23041        if start > 0 {
23042            start -= 1;
23043            unsafe {
23044                if is_less(v.get_unchecked(start + 1), v.get_unchecked(start)) {
23045                    while start > 0 && is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) {
23046                        start -= 1;
23047                    }
23048                    v[start..end].reverse();
23049                } else {
23050                    while start > 0 && !is_less(v.get_unchecked(start), v.get_unchecked(start - 1))
23051                    {
23052                        start -= 1;
23053                    }
23054                }
23055            }
23056        }
23057
23058        // Insert some more elements into the run if it's too short. Insertion sort is faster than
23059        // merge sort on short sequences, so this significantly improves performance.
23060        while start > 0 && end - start < MIN_RUN {
23061            start -= 1;
23062            insert_head(&mut v[start..end], &mut is_less);
23063        }
23064
23065        // Push this run onto the stack.
23066        runs.push(Run { start, len: end - start });
23067        end = start;
23068
23069        // Merge some pairs of adjacent runs to satisfy the invariants.
23070        while let Some(r) = collapse(&runs) {
23071            let left = runs[r + 1];
23072            let right = runs[r];
23073            unsafe {
23074                merge(
23075                    &mut v[left.start..right.start + right.len],
23076                    left.len,
23077                    buf.as_mut_ptr(),
23078                    &mut is_less,
23079                );
23080            }
23081            runs[r] = Run { start: left.start, len: left.len + right.len };
23082            runs.remove(r + 1);
23083        }
23084    }
23085
23086    // Finally, exactly one run must remain in the stack.
23087    debug_assert!(runs.len() == 1 && runs[0].start == 0 && runs[0].len == len);
23088
23089    // Examines the stack of runs and identifies the next pair of runs to merge. More specifically,
23090    // if `Some(r)` is returned, that means `runs[r]` and `runs[r + 1]` must be merged next. If the
23091    // algorithm should continue building a new run instead, `None` is returned.
23092    //
23093    // TimSort is infamous for its buggy implementations, as described here:
23094    // http://envisage-project.eu/timsort-specification-and-verification/
23095    //
23096    // The gist of the story is: we must enforce the invariants on the top four runs on the stack.
23097    // Enforcing them on just top three is not sufficient to ensure that the invariants will still
23098    // hold for *all* runs in the stack.
23099    //
23100    // This function correctly checks invariants for the top four runs. Additionally, if the top
23101    // run starts at index 0, it will always demand a merge operation until the stack is fully
23102    // collapsed, in order to complete the sort.
23103    #[inline]
23104    fn collapse(runs: &[Run]) -> Option<usize> {
23105        let n = runs.len();
23106        if n >= 2
23107            && (runs[n - 1].start == 0
23108                || runs[n - 2].len <= runs[n - 1].len
23109                || (n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len)
23110                || (n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len))
23111        {
23112            if n >= 3 && runs[n - 3].len < runs[n - 1].len { Some(n - 3) } else { Some(n - 2) }
23113        } else {
23114            None
23115        }
23116    }
23117
23118    #[derive(Clone, Copy)]
23119    struct Run {
23120        start: usize,
23121        len: usize,
23122    }
23123}
23124#![stable(feature = "wake_trait", since = "1.51.0")]
23125//! Types and Traits for working with asynchronous tasks.
23126use core::mem::ManuallyDrop;
23127use core::task::{RawWaker, RawWakerVTable, Waker};
23128
23129use crate::sync::Arc;
23130
23131/// The implementation of waking a task on an executor.
23132///
23133/// This trait can be used to create a [`Waker`]. An executor can define an
23134/// implementation of this trait, and use that to construct a Waker to pass
23135/// to the tasks that are executed on that executor.
23136///
23137/// This trait is a memory-safe and ergonomic alternative to constructing a
23138/// [`RawWaker`]. It supports the common executor design in which the data used
23139/// to wake up a task is stored in an [`Arc`]. Some executors (especially
23140/// those for embedded systems) cannot use this API, which is why [`RawWaker`]
23141/// exists as an alternative for those systems.
23142///
23143/// [arc]: ../../std/sync/struct.Arc.html
23144///
23145/// # Examples
23146///
23147/// A basic `block_on` function that takes a future and runs it to completion on
23148/// the current thread.
23149///
23150/// **Note:** This example trades correctness for simplicity. In order to prevent
23151/// deadlocks, production-grade implementations will also need to handle
23152/// intermediate calls to `thread::unpark` as well as nested invocations.
23153///
23154/// ```rust
23155/// use std::future::Future;
23156/// use std::sync::Arc;
23157/// use std::task::{Context, Poll, Wake};
23158/// use std::thread::{self, Thread};
23159///
23160/// /// A waker that wakes up the current thread when called.
23161/// struct ThreadWaker(Thread);
23162///
23163/// impl Wake for ThreadWaker {
23164///     fn wake(self: Arc<Self>) {
23165///         self.0.unpark();
23166///     }
23167/// }
23168///
23169/// /// Run a future to completion on the current thread.
23170/// fn block_on<T>(fut: impl Future<Output = T>) -> T {
23171///     // Pin the future so it can be polled.
23172///     let mut fut = Box::pin(fut);
23173///
23174///     // Create a new context to be passed to the future.
23175///     let t = thread::current();
23176///     let waker = Arc::new(ThreadWaker(t)).into();
23177///     let mut cx = Context::from_waker(&waker);
23178///
23179///     // Run the future to completion.
23180///     loop {
23181///         match fut.as_mut().poll(&mut cx) {
23182///             Poll::Ready(res) => return res,
23183///             Poll::Pending => thread::park(),
23184///         }
23185///     }
23186/// }
23187///
23188/// block_on(async {
23189///     println!("Hi from inside a future!");
23190/// });
23191/// ```
23192#[stable(feature = "wake_trait", since = "1.51.0")]
23193pub trait Wake {
23194    /// Wake this task.
23195    #[stable(feature = "wake_trait", since = "1.51.0")]
23196    fn wake(self: Arc<Self>);
23197
23198    /// Wake this task without consuming the waker.
23199    ///
23200    /// If an executor supports a cheaper way to wake without consuming the
23201    /// waker, it should override this method. By default, it clones the
23202    /// [`Arc`] and calls [`wake`] on the clone.
23203    ///
23204    /// [`wake`]: Wake::wake
23205    #[stable(feature = "wake_trait", since = "1.51.0")]
23206    fn wake_by_ref(self: &Arc<Self>) {
23207        self.clone().wake();
23208    }
23209}
23210
23211#[stable(feature = "wake_trait", since = "1.51.0")]
23212impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for Waker {
23213    /// Use a `Wake`-able type as a `Waker`.
23214    ///
23215    /// No heap allocations or atomic operations are used for this conversion.
23216    fn from(waker: Arc<W>) -> Waker {
23217        // SAFETY: This is safe because raw_waker safely constructs
23218        // a RawWaker from Arc<W>.
23219        unsafe { Waker::from_raw(raw_waker(waker)) }
23220    }
23221}
23222
23223#[stable(feature = "wake_trait", since = "1.51.0")]
23224impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for RawWaker {
23225    /// Use a `Wake`-able type as a `RawWaker`.
23226    ///
23227    /// No heap allocations or atomic operations are used for this conversion.
23228    fn from(waker: Arc<W>) -> RawWaker {
23229        raw_waker(waker)
23230    }
23231}
23232
23233// NB: This private function for constructing a RawWaker is used, rather than
23234// inlining this into the `From<Arc<W>> for RawWaker` impl, to ensure that
23235// the safety of `From<Arc<W>> for Waker` does not depend on the correct
23236// trait dispatch - instead both impls call this function directly and
23237// explicitly.
23238#[inline(always)]
23239fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
23240    // Increment the reference count of the arc to clone it.
23241    unsafe fn clone_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) -> RawWaker {
23242        unsafe { Arc::increment_strong_count(waker as *const W) };
23243        RawWaker::new(
23244            waker as *const (),
23245            &RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>),
23246        )
23247    }
23248
23249    // Wake by value, moving the Arc into the Wake::wake function
23250    unsafe fn wake<W: Wake + Send + Sync + 'static>(waker: *const ()) {
23251        let waker = unsafe { Arc::from_raw(waker as *const W) };
23252        <W as Wake>::wake(waker);
23253    }
23254
23255    // Wake by reference, wrap the waker in ManuallyDrop to avoid dropping it
23256    unsafe fn wake_by_ref<W: Wake + Send + Sync + 'static>(waker: *const ()) {
23257        let waker = unsafe { ManuallyDrop::new(Arc::from_raw(waker as *const W)) };
23258        <W as Wake>::wake_by_ref(&waker);
23259    }
23260
23261    // Decrement the reference count of the Arc on drop
23262    unsafe fn drop_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) {
23263        unsafe { Arc::decrement_strong_count(waker as *const W) };
23264    }
23265
23266    RawWaker::new(
23267        Arc::into_raw(waker) as *const (),
23268        &RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>),
23269    )
23270}
23271#![stable(feature = "rust1", since = "1.0.0")]
23272
23273//! Thread-safe reference-counting pointers.
23274//!
23275//! See the [`Arc<T>`][Arc] documentation for more details.
23276
23277use core::any::Any;
23278use core::borrow;
23279use core::cmp::Ordering;
23280use core::convert::{From, TryFrom};
23281use core::fmt;
23282use core::hash::{Hash, Hasher};
23283use core::hint;
23284use core::intrinsics::abort;
23285use core::iter;
23286use core::marker::{PhantomData, Unpin, Unsize};
23287use core::mem::{self, align_of_val_raw, size_of_val};
23288use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
23289use core::pin::Pin;
23290use core::ptr::{self, NonNull};
23291use core::slice::from_raw_parts_mut;
23292use core::sync::atomic;
23293use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
23294
23295use crate::alloc::{
23296    box_free, handle_alloc_error, AllocError, Allocator, Global, Layout, WriteCloneIntoRaw,
23297};
23298use crate::borrow::{Cow, ToOwned};
23299use crate::boxed::Box;
23300use crate::rc::is_dangling;
23301use crate::string::String;
23302use crate::vec::Vec;
23303
23304#[cfg(test)]
23305mod tests;
23306
23307/// A soft limit on the amount of references that may be made to an `Arc`.
23308///
23309/// Going above this limit will abort your program (although not
23310/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
23311const MAX_REFCOUNT: usize = (isize::MAX) as usize;
23312
23313#[cfg(not(sanitize = "thread"))]
23314macro_rules! acquire {
23315    ($x:expr) => {
23316        atomic::fence(Acquire)
23317    };
23318}
23319
23320// ThreadSanitizer does not support memory fences. To avoid false positive
23321// reports in Arc / Weak implementation use atomic loads for synchronization
23322// instead.
23323#[cfg(sanitize = "thread")]
23324macro_rules! acquire {
23325    ($x:expr) => {
23326        $x.load(Acquire)
23327    };
23328}
23329
23330/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
23331/// Reference Counted'.
23332///
23333/// The type `Arc<T>` provides shared ownership of a value of type `T`,
23334/// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces
23335/// a new `Arc` instance, which points to the same allocation on the heap as the
23336/// source `Arc`, while increasing a reference count. When the last `Arc`
23337/// pointer to a given allocation is destroyed, the value stored in that allocation (often
23338/// referred to as "inner value") is also dropped.
23339///
23340/// Shared references in Rust disallow mutation by default, and `Arc` is no
23341/// exception: you cannot generally obtain a mutable reference to something
23342/// inside an `Arc`. If you need to mutate through an `Arc`, use
23343/// [`Mutex`][mutex], [`RwLock`][rwlock], or one of the [`Atomic`][atomic]
23344/// types.
23345///
23346/// ## Thread Safety
23347///
23348/// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
23349/// counting. This means that it is thread-safe. The disadvantage is that
23350/// atomic operations are more expensive than ordinary memory accesses. If you
23351/// are not sharing reference-counted allocations between threads, consider using
23352/// [`Rc<T>`] for lower overhead. [`Rc<T>`] is a safe default, because the
23353/// compiler will catch any attempt to send an [`Rc<T>`] between threads.
23354/// However, a library might choose `Arc<T>` in order to give library consumers
23355/// more flexibility.
23356///
23357/// `Arc<T>` will implement [`Send`] and [`Sync`] as long as the `T` implements
23358/// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an
23359/// `Arc<T>` to make it thread-safe? This may be a bit counter-intuitive at
23360/// first: after all, isn't the point of `Arc<T>` thread safety? The key is
23361/// this: `Arc<T>` makes it thread safe to have multiple ownership of the same
23362/// data, but it  doesn't add thread safety to its data. Consider
23363/// `Arc<`[`RefCell<T>`]`>`. [`RefCell<T>`] isn't [`Sync`], and if `Arc<T>` was always
23364/// [`Send`], `Arc<`[`RefCell<T>`]`>` would be as well. But then we'd have a problem:
23365/// [`RefCell<T>`] is not thread safe; it keeps track of the borrowing count using
23366/// non-atomic operations.
23367///
23368/// In the end, this means that you may need to pair `Arc<T>` with some sort of
23369/// [`std::sync`] type, usually [`Mutex<T>`][mutex].
23370///
23371/// ## Breaking cycles with `Weak`
23372///
23373/// The [`downgrade`][downgrade] method can be used to create a non-owning
23374/// [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
23375/// to an `Arc`, but this will return [`None`] if the value stored in the allocation has
23376/// already been dropped. In other words, `Weak` pointers do not keep the value
23377/// inside the allocation alive; however, they *do* keep the allocation
23378/// (the backing store for the value) alive.
23379///
23380/// A cycle between `Arc` pointers will never be deallocated. For this reason,
23381/// [`Weak`] is used to break cycles. For example, a tree could have
23382/// strong `Arc` pointers from parent nodes to children, and [`Weak`]
23383/// pointers from children back to their parents.
23384///
23385/// # Cloning references
23386///
23387/// Creating a new reference from an existing reference-counted pointer is done using the
23388/// `Clone` trait implemented for [`Arc<T>`][Arc] and [`Weak<T>`][Weak].
23389///
23390/// ```
23391/// use std::sync::Arc;
23392/// let foo = Arc::new(vec![1.0, 2.0, 3.0]);
23393/// // The two syntaxes below are equivalent.
23394/// let a = foo.clone();
23395/// let b = Arc::clone(&foo);
23396/// // a, b, and foo are all Arcs that point to the same memory location
23397/// ```
23398///
23399/// ## `Deref` behavior
23400///
23401/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
23402/// so you can call `T`'s methods on a value of type `Arc<T>`. To avoid name
23403/// clashes with `T`'s methods, the methods of `Arc<T>` itself are associated
23404/// functions, called using [fully qualified syntax]:
23405///
23406/// ```
23407/// use std::sync::Arc;
23408///
23409/// let my_arc = Arc::new(());
23410/// Arc::downgrade(&my_arc);
23411/// ```
23412///
23413/// `Arc<T>`'s implementations of traits like `Clone` may also be called using
23414/// fully qualified syntax. Some people prefer to use fully qualified syntax,
23415/// while others prefer using method-call syntax.
23416///
23417/// ```
23418/// use std::sync::Arc;
23419///
23420/// let arc = Arc::new(());
23421/// // Method-call syntax
23422/// let arc2 = arc.clone();
23423/// // Fully qualified syntax
23424/// let arc3 = Arc::clone(&arc);
23425/// ```
23426///
23427/// [`Weak<T>`][Weak] does not auto-dereference to `T`, because the inner value may have
23428/// already been dropped.
23429///
23430/// [`Rc<T>`]: crate::rc::Rc
23431/// [clone]: Clone::clone
23432/// [mutex]: ../../std/sync/struct.Mutex.html
23433/// [rwlock]: ../../std/sync/struct.RwLock.html
23434/// [atomic]: core::sync::atomic
23435/// [`Send`]: core::marker::Send
23436/// [`Sync`]: core::marker::Sync
23437/// [deref]: core::ops::Deref
23438/// [downgrade]: Arc::downgrade
23439/// [upgrade]: Weak::upgrade
23440/// [`RefCell<T>`]: core::cell::RefCell
23441/// [`std::sync`]: ../../std/sync/index.html
23442/// [`Arc::clone(&from)`]: Arc::clone
23443/// [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
23444///
23445/// # Examples
23446///
23447/// Sharing some immutable data between threads:
23448///
23449// Note that we **do not** run these tests here. The windows builders get super
23450// unhappy if a thread outlives the main thread and then exits at the same time
23451// (something deadlocks) so we just avoid this entirely by not running these
23452// tests.
23453/// ```no_run
23454/// use std::sync::Arc;
23455/// use std::thread;
23456///
23457/// let five = Arc::new(5);
23458///
23459/// for _ in 0..10 {
23460///     let five = Arc::clone(&five);
23461///
23462///     thread::spawn(move || {
23463///         println!("{:?}", five);
23464///     });
23465/// }
23466/// ```
23467///
23468/// Sharing a mutable [`AtomicUsize`]:
23469///
23470/// [`AtomicUsize`]: core::sync::atomic::AtomicUsize
23471///
23472/// ```no_run
23473/// use std::sync::Arc;
23474/// use std::sync::atomic::{AtomicUsize, Ordering};
23475/// use std::thread;
23476///
23477/// let val = Arc::new(AtomicUsize::new(5));
23478///
23479/// for _ in 0..10 {
23480///     let val = Arc::clone(&val);
23481///
23482///     thread::spawn(move || {
23483///         let v = val.fetch_add(1, Ordering::SeqCst);
23484///         println!("{:?}", v);
23485///     });
23486/// }
23487/// ```
23488///
23489/// See the [`rc` documentation][rc_examples] for more examples of reference
23490/// counting in general.
23491///
23492/// [rc_examples]: crate::rc#examples
23493#[cfg_attr(not(test), rustc_diagnostic_item = "Arc")]
23494#[stable(feature = "rust1", since = "1.0.0")]
23495pub struct Arc<T: ?Sized> {
23496    ptr: NonNull<ArcInner<T>>,
23497    phantom: PhantomData<ArcInner<T>>,
23498}
23499
23500#[stable(feature = "rust1", since = "1.0.0")]
23501unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
23502#[stable(feature = "rust1", since = "1.0.0")]
23503unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
23504
23505#[unstable(feature = "coerce_unsized", issue = "27732")]
23506impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
23507
23508#[unstable(feature = "dispatch_from_dyn", issue = "none")]
23509impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {}
23510
23511impl<T: ?Sized> Arc<T> {
23512    fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
23513        Self { ptr, phantom: PhantomData }
23514    }
23515
23516    unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
23517        unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
23518    }
23519}
23520
23521/// `Weak` is a version of [`Arc`] that holds a non-owning reference to the
23522/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
23523/// pointer, which returns an [`Option`]`<`[`Arc`]`<T>>`.
23524///
23525/// Since a `Weak` reference does not count towards ownership, it will not
23526/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
23527/// guarantees about the value still being present. Thus it may return [`None`]
23528/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
23529/// itself (the backing store) from being deallocated.
23530///
23531/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
23532/// managed by [`Arc`] without preventing its inner value from being dropped. It is also used to
23533/// prevent circular references between [`Arc`] pointers, since mutual owning references
23534/// would never allow either [`Arc`] to be dropped. For example, a tree could
23535/// have strong [`Arc`] pointers from parent nodes to children, and `Weak`
23536/// pointers from children back to their parents.
23537///
23538/// The typical way to obtain a `Weak` pointer is to call [`Arc::downgrade`].
23539///
23540/// [`upgrade`]: Weak::upgrade
23541#[stable(feature = "arc_weak", since = "1.4.0")]
23542pub struct Weak<T: ?Sized> {
23543    // This is a `NonNull` to allow optimizing the size of this type in enums,
23544    // but it is not necessarily a valid pointer.
23545    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
23546    // to allocate space on the heap.  That's not a value a real pointer
23547    // will ever have because RcBox has alignment at least 2.
23548    // This is only possible when `T: Sized`; unsized `T` never dangle.
23549    ptr: NonNull<ArcInner<T>>,
23550}
23551
23552#[stable(feature = "arc_weak", since = "1.4.0")]
23553unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
23554#[stable(feature = "arc_weak", since = "1.4.0")]
23555unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
23556
23557#[unstable(feature = "coerce_unsized", issue = "27732")]
23558impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
23559#[unstable(feature = "dispatch_from_dyn", issue = "none")]
23560impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
23561
23562#[stable(feature = "arc_weak", since = "1.4.0")]
23563impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
23564    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
23565        write!(f, "(Weak)")
23566    }
23567}
23568
23569// This is repr(C) to future-proof against possible field-reordering, which
23570// would interfere with otherwise safe [into|from]_raw() of transmutable
23571// inner types.
23572#[repr(C)]
23573struct ArcInner<T: ?Sized> {
23574    strong: atomic::AtomicUsize,
23575
23576    // the value usize::MAX acts as a sentinel for temporarily "locking" the
23577    // ability to upgrade weak pointers or downgrade strong ones; this is used
23578    // to avoid races in `make_mut` and `get_mut`.
23579    weak: atomic::AtomicUsize,
23580
23581    data: T,
23582}
23583
23584unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
23585unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
23586
23587impl<T> Arc<T> {
23588    /// Constructs a new `Arc<T>`.
23589    ///
23590    /// # Examples
23591    ///
23592    /// ```
23593    /// use std::sync::Arc;
23594    ///
23595    /// let five = Arc::new(5);
23596    /// ```
23597    #[inline]
23598    #[stable(feature = "rust1", since = "1.0.0")]
23599    pub fn new(data: T) -> Arc<T> {
23600        // Start the weak pointer count as 1 which is the weak pointer that's
23601        // held by all the strong pointers (kinda), see std/rc.rs for more info
23602        let x: Box<_> = box ArcInner {
23603            strong: atomic::AtomicUsize::new(1),
23604            weak: atomic::AtomicUsize::new(1),
23605            data,
23606        };
23607        Self::from_inner(Box::leak(x).into())
23608    }
23609
23610    /// Constructs a new `Arc<T>` using a weak reference to itself. Attempting
23611    /// to upgrade the weak reference before this function returns will result
23612    /// in a `None` value. However, the weak reference may be cloned freely and
23613    /// stored for use at a later time.
23614    ///
23615    /// # Examples
23616    /// ```
23617    /// #![feature(arc_new_cyclic)]
23618    /// #![allow(dead_code)]
23619    ///
23620    /// use std::sync::{Arc, Weak};
23621    ///
23622    /// struct Foo {
23623    ///     me: Weak<Foo>,
23624    /// }
23625    ///
23626    /// let foo = Arc::new_cyclic(|me| Foo {
23627    ///     me: me.clone(),
23628    /// });
23629    /// ```
23630    #[inline]
23631    #[unstable(feature = "arc_new_cyclic", issue = "75861")]
23632    pub fn new_cyclic(data_fn: impl FnOnce(&Weak<T>) -> T) -> Arc<T> {
23633        // Construct the inner in the "uninitialized" state with a single
23634        // weak reference.
23635        let uninit_ptr: NonNull<_> = Box::leak(box ArcInner {
23636            strong: atomic::AtomicUsize::new(0),
23637            weak: atomic::AtomicUsize::new(1),
23638            data: mem::MaybeUninit::<T>::uninit(),
23639        })
23640        .into();
23641        let init_ptr: NonNull<ArcInner<T>> = uninit_ptr.cast();
23642
23643        let weak = Weak { ptr: init_ptr };
23644
23645        // It's important we don't give up ownership of the weak pointer, or
23646        // else the memory might be freed by the time `data_fn` returns. If
23647        // we really wanted to pass ownership, we could create an additional
23648        // weak pointer for ourselves, but this would result in additional
23649        // updates to the weak reference count which might not be necessary
23650        // otherwise.
23651        let data = data_fn(&weak);
23652
23653        // Now we can properly initialize the inner value and turn our weak
23654        // reference into a strong reference.
23655        unsafe {
23656            let inner = init_ptr.as_ptr();
23657            ptr::write(ptr::addr_of_mut!((*inner).data), data);
23658
23659            // The above write to the data field must be visible to any threads which
23660            // observe a non-zero strong count. Therefore we need at least "Release" ordering
23661            // in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`.
23662            //
23663            // "Acquire" ordering is not required. When considering the possible behaviours
23664            // of `data_fn` we only need to look at what it could do with a reference to a
23665            // non-upgradeable `Weak`:
23666            // - It can *clone* the `Weak`, increasing the weak reference count.
23667            // - It can drop those clones, decreasing the weak reference count (but never to zero).
23668            //
23669            // These side effects do not impact us in any way, and no other side effects are
23670            // possible with safe code alone.
23671            let prev_value = (*inner).strong.fetch_add(1, Release);
23672            debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
23673        }
23674
23675        let strong = Arc::from_inner(init_ptr);
23676
23677        // Strong references should collectively own a shared weak reference,
23678        // so don't run the destructor for our old weak reference.
23679        mem::forget(weak);
23680        strong
23681    }
23682
23683    /// Constructs a new `Arc` with uninitialized contents.
23684    ///
23685    /// # Examples
23686    ///
23687    /// ```
23688    /// #![feature(new_uninit)]
23689    /// #![feature(get_mut_unchecked)]
23690    ///
23691    /// use std::sync::Arc;
23692    ///
23693    /// let mut five = Arc::<u32>::new_uninit();
23694    ///
23695    /// let five = unsafe {
23696    ///     // Deferred initialization:
23697    ///     Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
23698    ///
23699    ///     five.assume_init()
23700    /// };
23701    ///
23702    /// assert_eq!(*five, 5)
23703    /// ```
23704    #[unstable(feature = "new_uninit", issue = "63291")]
23705    pub fn new_uninit() -> Arc<mem::MaybeUninit<T>> {
23706        unsafe {
23707            Arc::from_ptr(Arc::allocate_for_layout(
23708                Layout::new::<T>(),
23709                |layout| Global.allocate(layout),
23710                |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
23711            ))
23712        }
23713    }
23714
23715    /// Constructs a new `Arc` with uninitialized contents, with the memory
23716    /// being filled with `0` bytes.
23717    ///
23718    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
23719    /// of this method.
23720    ///
23721    /// # Examples
23722    ///
23723    /// ```
23724    /// #![feature(new_uninit)]
23725    ///
23726    /// use std::sync::Arc;
23727    ///
23728    /// let zero = Arc::<u32>::new_zeroed();
23729    /// let zero = unsafe { zero.assume_init() };
23730    ///
23731    /// assert_eq!(*zero, 0)
23732    /// ```
23733    ///
23734    /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
23735    #[unstable(feature = "new_uninit", issue = "63291")]
23736    pub fn new_zeroed() -> Arc<mem::MaybeUninit<T>> {
23737        unsafe {
23738            Arc::from_ptr(Arc::allocate_for_layout(
23739                Layout::new::<T>(),
23740                |layout| Global.allocate_zeroed(layout),
23741                |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
23742            ))
23743        }
23744    }
23745
23746    /// Constructs a new `Pin<Arc<T>>`. If `T` does not implement `Unpin`, then
23747    /// `data` will be pinned in memory and unable to be moved.
23748    #[stable(feature = "pin", since = "1.33.0")]
23749    pub fn pin(data: T) -> Pin<Arc<T>> {
23750        unsafe { Pin::new_unchecked(Arc::new(data)) }
23751    }
23752
23753    /// Constructs a new `Arc<T>`, returning an error if allocation fails.
23754    ///
23755    /// # Examples
23756    ///
23757    /// ```
23758    /// #![feature(allocator_api)]
23759    /// use std::sync::Arc;
23760    ///
23761    /// let five = Arc::try_new(5)?;
23762    /// # Ok::<(), std::alloc::AllocError>(())
23763    /// ```
23764    #[unstable(feature = "allocator_api", issue = "32838")]
23765    #[inline]
23766    pub fn try_new(data: T) -> Result<Arc<T>, AllocError> {
23767        // Start the weak pointer count as 1 which is the weak pointer that's
23768        // held by all the strong pointers (kinda), see std/rc.rs for more info
23769        let x: Box<_> = Box::try_new(ArcInner {
23770            strong: atomic::AtomicUsize::new(1),
23771            weak: atomic::AtomicUsize::new(1),
23772            data,
23773        })?;
23774        Ok(Self::from_inner(Box::leak(x).into()))
23775    }
23776
23777    /// Constructs a new `Arc` with uninitialized contents, returning an error
23778    /// if allocation fails.
23779    ///
23780    /// # Examples
23781    ///
23782    /// ```
23783    /// #![feature(new_uninit, allocator_api)]
23784    /// #![feature(get_mut_unchecked)]
23785    ///
23786    /// use std::sync::Arc;
23787    ///
23788    /// let mut five = Arc::<u32>::try_new_uninit()?;
23789    ///
23790    /// let five = unsafe {
23791    ///     // Deferred initialization:
23792    ///     Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
23793    ///
23794    ///     five.assume_init()
23795    /// };
23796    ///
23797    /// assert_eq!(*five, 5);
23798    /// # Ok::<(), std::alloc::AllocError>(())
23799    /// ```
23800    #[unstable(feature = "allocator_api", issue = "32838")]
23801    // #[unstable(feature = "new_uninit", issue = "63291")]
23802    pub fn try_new_uninit() -> Result<Arc<mem::MaybeUninit<T>>, AllocError> {
23803        unsafe {
23804            Ok(Arc::from_ptr(Arc::try_allocate_for_layout(
23805                Layout::new::<T>(),
23806                |layout| Global.allocate(layout),
23807                |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
23808            )?))
23809        }
23810    }
23811
23812    /// Constructs a new `Arc` with uninitialized contents, with the memory
23813    /// being filled with `0` bytes, returning an error if allocation fails.
23814    ///
23815    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
23816    /// of this method.
23817    ///
23818    /// # Examples
23819    ///
23820    /// ```
23821    /// #![feature(new_uninit, allocator_api)]
23822    ///
23823    /// use std::sync::Arc;
23824    ///
23825    /// let zero = Arc::<u32>::try_new_zeroed()?;
23826    /// let zero = unsafe { zero.assume_init() };
23827    ///
23828    /// assert_eq!(*zero, 0);
23829    /// # Ok::<(), std::alloc::AllocError>(())
23830    /// ```
23831    ///
23832    /// [zeroed]: mem::MaybeUninit::zeroed
23833    #[unstable(feature = "allocator_api", issue = "32838")]
23834    // #[unstable(feature = "new_uninit", issue = "63291")]
23835    pub fn try_new_zeroed() -> Result<Arc<mem::MaybeUninit<T>>, AllocError> {
23836        unsafe {
23837            Ok(Arc::from_ptr(Arc::try_allocate_for_layout(
23838                Layout::new::<T>(),
23839                |layout| Global.allocate_zeroed(layout),
23840                |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
23841            )?))
23842        }
23843    }
23844    /// Returns the inner value, if the `Arc` has exactly one strong reference.
23845    ///
23846    /// Otherwise, an [`Err`] is returned with the same `Arc` that was
23847    /// passed in.
23848    ///
23849    /// This will succeed even if there are outstanding weak references.
23850    ///
23851    /// # Examples
23852    ///
23853    /// ```
23854    /// use std::sync::Arc;
23855    ///
23856    /// let x = Arc::new(3);
23857    /// assert_eq!(Arc::try_unwrap(x), Ok(3));
23858    ///
23859    /// let x = Arc::new(4);
23860    /// let _y = Arc::clone(&x);
23861    /// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4);
23862    /// ```
23863    #[inline]
23864    #[stable(feature = "arc_unique", since = "1.4.0")]
23865    pub fn try_unwrap(this: Self) -> Result<T, Self> {
23866        if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() {
23867            return Err(this);
23868        }
23869
23870        acquire!(this.inner().strong);
23871
23872        unsafe {
23873            let elem = ptr::read(&this.ptr.as_ref().data);
23874
23875            // Make a weak pointer to clean up the implicit strong-weak reference
23876            let _weak = Weak { ptr: this.ptr };
23877            mem::forget(this);
23878
23879            Ok(elem)
23880        }
23881    }
23882}
23883
23884impl<T> Arc<[T]> {
23885    /// Constructs a new atomically reference-counted slice with uninitialized contents.
23886    ///
23887    /// # Examples
23888    ///
23889    /// ```
23890    /// #![feature(new_uninit)]
23891    /// #![feature(get_mut_unchecked)]
23892    ///
23893    /// use std::sync::Arc;
23894    ///
23895    /// let mut values = Arc::<[u32]>::new_uninit_slice(3);
23896    ///
23897    /// let values = unsafe {
23898    ///     // Deferred initialization:
23899    ///     Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
23900    ///     Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
23901    ///     Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
23902    ///
23903    ///     values.assume_init()
23904    /// };
23905    ///
23906    /// assert_eq!(*values, [1, 2, 3])
23907    /// ```
23908    #[unstable(feature = "new_uninit", issue = "63291")]
23909    pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit<T>]> {
23910        unsafe { Arc::from_ptr(Arc::allocate_for_slice(len)) }
23911    }
23912
23913    /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being
23914    /// filled with `0` bytes.
23915    ///
23916    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
23917    /// incorrect usage of this method.
23918    ///
23919    /// # Examples
23920    ///
23921    /// ```
23922    /// #![feature(new_uninit)]
23923    ///
23924    /// use std::sync::Arc;
23925    ///
23926    /// let values = Arc::<[u32]>::new_zeroed_slice(3);
23927    /// let values = unsafe { values.assume_init() };
23928    ///
23929    /// assert_eq!(*values, [0, 0, 0])
23930    /// ```
23931    ///
23932    /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
23933    #[unstable(feature = "new_uninit", issue = "63291")]
23934    pub fn new_zeroed_slice(len: usize) -> Arc<[mem::MaybeUninit<T>]> {
23935        unsafe {
23936            Arc::from_ptr(Arc::allocate_for_layout(
23937                Layout::array::<T>(len).unwrap(),
23938                |layout| Global.allocate_zeroed(layout),
23939                |mem| {
23940                    ptr::slice_from_raw_parts_mut(mem as *mut T, len)
23941                        as *mut ArcInner<[mem::MaybeUninit<T>]>
23942                },
23943            ))
23944        }
23945    }
23946}
23947
23948impl<T> Arc<mem::MaybeUninit<T>> {
23949    /// Converts to `Arc<T>`.
23950    ///
23951    /// # Safety
23952    ///
23953    /// As with [`MaybeUninit::assume_init`],
23954    /// it is up to the caller to guarantee that the inner value
23955    /// really is in an initialized state.
23956    /// Calling this when the content is not yet fully initialized
23957    /// causes immediate undefined behavior.
23958    ///
23959    /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
23960    ///
23961    /// # Examples
23962    ///
23963    /// ```
23964    /// #![feature(new_uninit)]
23965    /// #![feature(get_mut_unchecked)]
23966    ///
23967    /// use std::sync::Arc;
23968    ///
23969    /// let mut five = Arc::<u32>::new_uninit();
23970    ///
23971    /// let five = unsafe {
23972    ///     // Deferred initialization:
23973    ///     Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
23974    ///
23975    ///     five.assume_init()
23976    /// };
23977    ///
23978    /// assert_eq!(*five, 5)
23979    /// ```
23980    #[unstable(feature = "new_uninit", issue = "63291")]
23981    #[inline]
23982    pub unsafe fn assume_init(self) -> Arc<T> {
23983        Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
23984    }
23985}
23986
23987impl<T> Arc<[mem::MaybeUninit<T>]> {
23988    /// Converts to `Arc<[T]>`.
23989    ///
23990    /// # Safety
23991    ///
23992    /// As with [`MaybeUninit::assume_init`],
23993    /// it is up to the caller to guarantee that the inner value
23994    /// really is in an initialized state.
23995    /// Calling this when the content is not yet fully initialized
23996    /// causes immediate undefined behavior.
23997    ///
23998    /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
23999    ///
24000    /// # Examples
24001    ///
24002    /// ```
24003    /// #![feature(new_uninit)]
24004    /// #![feature(get_mut_unchecked)]
24005    ///
24006    /// use std::sync::Arc;
24007    ///
24008    /// let mut values = Arc::<[u32]>::new_uninit_slice(3);
24009    ///
24010    /// let values = unsafe {
24011    ///     // Deferred initialization:
24012    ///     Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
24013    ///     Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
24014    ///     Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
24015    ///
24016    ///     values.assume_init()
24017    /// };
24018    ///
24019    /// assert_eq!(*values, [1, 2, 3])
24020    /// ```
24021    #[unstable(feature = "new_uninit", issue = "63291")]
24022    #[inline]
24023    pub unsafe fn assume_init(self) -> Arc<[T]> {
24024        unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
24025    }
24026}
24027
24028impl<T: ?Sized> Arc<T> {
24029    /// Consumes the `Arc`, returning the wrapped pointer.
24030    ///
24031    /// To avoid a memory leak the pointer must be converted back to an `Arc` using
24032    /// [`Arc::from_raw`].
24033    ///
24034    /// # Examples
24035    ///
24036    /// ```
24037    /// use std::sync::Arc;
24038    ///
24039    /// let x = Arc::new("hello".to_owned());
24040    /// let x_ptr = Arc::into_raw(x);
24041    /// assert_eq!(unsafe { &*x_ptr }, "hello");
24042    /// ```
24043    #[stable(feature = "rc_raw", since = "1.17.0")]
24044    pub fn into_raw(this: Self) -> *const T {
24045        let ptr = Self::as_ptr(&this);
24046        mem::forget(this);
24047        ptr
24048    }
24049
24050    /// Provides a raw pointer to the data.
24051    ///
24052    /// The counts are not affected in any way and the `Arc` is not consumed. The pointer is valid for
24053    /// as long as there are strong counts in the `Arc`.
24054    ///
24055    /// # Examples
24056    ///
24057    /// ```
24058    /// use std::sync::Arc;
24059    ///
24060    /// let x = Arc::new("hello".to_owned());
24061    /// let y = Arc::clone(&x);
24062    /// let x_ptr = Arc::as_ptr(&x);
24063    /// assert_eq!(x_ptr, Arc::as_ptr(&y));
24064    /// assert_eq!(unsafe { &*x_ptr }, "hello");
24065    /// ```
24066    #[stable(feature = "rc_as_ptr", since = "1.45.0")]
24067    pub fn as_ptr(this: &Self) -> *const T {
24068        let ptr: *mut ArcInner<T> = NonNull::as_ptr(this.ptr);
24069
24070        // SAFETY: This cannot go through Deref::deref or RcBoxPtr::inner because
24071        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
24072        // write through the pointer after the Rc is recovered through `from_raw`.
24073        unsafe { ptr::addr_of_mut!((*ptr).data) }
24074    }
24075
24076    /// Constructs an `Arc<T>` from a raw pointer.
24077    ///
24078    /// The raw pointer must have been previously returned by a call to
24079    /// [`Arc<U>::into_raw`][into_raw] where `U` must have the same size and
24080    /// alignment as `T`. This is trivially true if `U` is `T`.
24081    /// Note that if `U` is not `T` but has the same size and alignment, this is
24082    /// basically like transmuting references of different types. See
24083    /// [`mem::transmute`][transmute] for more information on what
24084    /// restrictions apply in this case.
24085    ///
24086    /// The user of `from_raw` has to make sure a specific value of `T` is only
24087    /// dropped once.
24088    ///
24089    /// This function is unsafe because improper use may lead to memory unsafety,
24090    /// even if the returned `Arc<T>` is never accessed.
24091    ///
24092    /// [into_raw]: Arc::into_raw
24093    /// [transmute]: core::mem::transmute
24094    ///
24095    /// # Examples
24096    ///
24097    /// ```
24098    /// use std::sync::Arc;
24099    ///
24100    /// let x = Arc::new("hello".to_owned());
24101    /// let x_ptr = Arc::into_raw(x);
24102    ///
24103    /// unsafe {
24104    ///     // Convert back to an `Arc` to prevent leak.
24105    ///     let x = Arc::from_raw(x_ptr);
24106    ///     assert_eq!(&*x, "hello");
24107    ///
24108    ///     // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe.
24109    /// }
24110    ///
24111    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
24112    /// ```
24113    #[stable(feature = "rc_raw", since = "1.17.0")]
24114    pub unsafe fn from_raw(ptr: *const T) -> Self {
24115        unsafe {
24116            let offset = data_offset(ptr);
24117
24118            // Reverse the offset to find the original ArcInner.
24119            let arc_ptr = (ptr as *mut ArcInner<T>).set_ptr_value((ptr as *mut u8).offset(-offset));
24120
24121            Self::from_ptr(arc_ptr)
24122        }
24123    }
24124
24125    /// Creates a new [`Weak`] pointer to this allocation.
24126    ///
24127    /// # Examples
24128    ///
24129    /// ```
24130    /// use std::sync::Arc;
24131    ///
24132    /// let five = Arc::new(5);
24133    ///
24134    /// let weak_five = Arc::downgrade(&five);
24135    /// ```
24136    #[stable(feature = "arc_weak", since = "1.4.0")]
24137    pub fn downgrade(this: &Self) -> Weak<T> {
24138        // This Relaxed is OK because we're checking the value in the CAS
24139        // below.
24140        let mut cur = this.inner().weak.load(Relaxed);
24141
24142        loop {
24143            // check if the weak counter is currently "locked"; if so, spin.
24144            if cur == usize::MAX {
24145                hint::spin_loop();
24146                cur = this.inner().weak.load(Relaxed);
24147                continue;
24148            }
24149
24150            // NOTE: this code currently ignores the possibility of overflow
24151            // into usize::MAX; in general both Rc and Arc need to be adjusted
24152            // to deal with overflow.
24153
24154            // Unlike with Clone(), we need this to be an Acquire read to
24155            // synchronize with the write coming from `is_unique`, so that the
24156            // events prior to that write happen before this read.
24157            match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
24158                Ok(_) => {
24159                    // Make sure we do not create a dangling Weak
24160                    debug_assert!(!is_dangling(this.ptr.as_ptr()));
24161                    return Weak { ptr: this.ptr };
24162                }
24163                Err(old) => cur = old,
24164            }
24165        }
24166    }
24167
24168    /// Gets the number of [`Weak`] pointers to this allocation.
24169    ///
24170    /// # Safety
24171    ///
24172    /// This method by itself is safe, but using it correctly requires extra care.
24173    /// Another thread can change the weak count at any time,
24174    /// including potentially between calling this method and acting on the result.
24175    ///
24176    /// # Examples
24177    ///
24178    /// ```
24179    /// use std::sync::Arc;
24180    ///
24181    /// let five = Arc::new(5);
24182    /// let _weak_five = Arc::downgrade(&five);
24183    ///
24184    /// // This assertion is deterministic because we haven't shared
24185    /// // the `Arc` or `Weak` between threads.
24186    /// assert_eq!(1, Arc::weak_count(&five));
24187    /// ```
24188    #[inline]
24189    #[stable(feature = "arc_counts", since = "1.15.0")]
24190    pub fn weak_count(this: &Self) -> usize {
24191        let cnt = this.inner().weak.load(SeqCst);
24192        // If the weak count is currently locked, the value of the
24193        // count was 0 just before taking the lock.
24194        if cnt == usize::MAX { 0 } else { cnt - 1 }
24195    }
24196
24197    /// Gets the number of strong (`Arc`) pointers to this allocation.
24198    ///
24199    /// # Safety
24200    ///
24201    /// This method by itself is safe, but using it correctly requires extra care.
24202    /// Another thread can change the strong count at any time,
24203    /// including potentially between calling this method and acting on the result.
24204    ///
24205    /// # Examples
24206    ///
24207    /// ```
24208    /// use std::sync::Arc;
24209    ///
24210    /// let five = Arc::new(5);
24211    /// let _also_five = Arc::clone(&five);
24212    ///
24213    /// // This assertion is deterministic because we haven't shared
24214    /// // the `Arc` between threads.
24215    /// assert_eq!(2, Arc::strong_count(&five));
24216    /// ```
24217    #[inline]
24218    #[stable(feature = "arc_counts", since = "1.15.0")]
24219    pub fn strong_count(this: &Self) -> usize {
24220        this.inner().strong.load(SeqCst)
24221    }
24222
24223    /// Increments the strong reference count on the `Arc<T>` associated with the
24224    /// provided pointer by one.
24225    ///
24226    /// # Safety
24227    ///
24228    /// The pointer must have been obtained through `Arc::into_raw`, and the
24229    /// associated `Arc` instance must be valid (i.e. the strong count must be at
24230    /// least 1) for the duration of this method.
24231    ///
24232    /// # Examples
24233    ///
24234    /// ```
24235    /// use std::sync::Arc;
24236    ///
24237    /// let five = Arc::new(5);
24238    ///
24239    /// unsafe {
24240    ///     let ptr = Arc::into_raw(five);
24241    ///     Arc::increment_strong_count(ptr);
24242    ///
24243    ///     // This assertion is deterministic because we haven't shared
24244    ///     // the `Arc` between threads.
24245    ///     let five = Arc::from_raw(ptr);
24246    ///     assert_eq!(2, Arc::strong_count(&five));
24247    /// }
24248    /// ```
24249    #[inline]
24250    #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")]
24251    pub unsafe fn increment_strong_count(ptr: *const T) {
24252        // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop
24253        let arc = unsafe { mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr)) };
24254        // Now increase refcount, but don't drop new refcount either
24255        let _arc_clone: mem::ManuallyDrop<_> = arc.clone();
24256    }
24257
24258    /// Decrements the strong reference count on the `Arc<T>` associated with the
24259    /// provided pointer by one.
24260    ///
24261    /// # Safety
24262    ///
24263    /// The pointer must have been obtained through `Arc::into_raw`, and the
24264    /// associated `Arc` instance must be valid (i.e. the strong count must be at
24265    /// least 1) when invoking this method. This method can be used to release the final
24266    /// `Arc` and backing storage, but **should not** be called after the final `Arc` has been
24267    /// released.
24268    ///
24269    /// # Examples
24270    ///
24271    /// ```
24272    /// use std::sync::Arc;
24273    ///
24274    /// let five = Arc::new(5);
24275    ///
24276    /// unsafe {
24277    ///     let ptr = Arc::into_raw(five);
24278    ///     Arc::increment_strong_count(ptr);
24279    ///
24280    ///     // Those assertions are deterministic because we haven't shared
24281    ///     // the `Arc` between threads.
24282    ///     let five = Arc::from_raw(ptr);
24283    ///     assert_eq!(2, Arc::strong_count(&five));
24284    ///     Arc::decrement_strong_count(ptr);
24285    ///     assert_eq!(1, Arc::strong_count(&five));
24286    /// }
24287    /// ```
24288    #[inline]
24289    #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")]
24290    pub unsafe fn decrement_strong_count(ptr: *const T) {
24291        unsafe { mem::drop(Arc::from_raw(ptr)) };
24292    }
24293
24294    #[inline]
24295    fn inner(&self) -> &ArcInner<T> {
24296        // This unsafety is ok because while this arc is alive we're guaranteed
24297        // that the inner pointer is valid. Furthermore, we know that the
24298        // `ArcInner` structure itself is `Sync` because the inner data is
24299        // `Sync` as well, so we're ok loaning out an immutable pointer to these
24300        // contents.
24301        unsafe { self.ptr.as_ref() }
24302    }
24303
24304    // Non-inlined part of `drop`.
24305    #[inline(never)]
24306    unsafe fn drop_slow(&mut self) {
24307        // Destroy the data at this time, even though we may not free the box
24308        // allocation itself (there may still be weak pointers lying around).
24309        unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) };
24310
24311        // Drop the weak ref collectively held by all strong references
24312        drop(Weak { ptr: self.ptr });
24313    }
24314
24315    #[inline]
24316    #[stable(feature = "ptr_eq", since = "1.17.0")]
24317    /// Returns `true` if the two `Arc`s point to the same allocation
24318    /// (in a vein similar to [`ptr::eq`]).
24319    ///
24320    /// # Examples
24321    ///
24322    /// ```
24323    /// use std::sync::Arc;
24324    ///
24325    /// let five = Arc::new(5);
24326    /// let same_five = Arc::clone(&five);
24327    /// let other_five = Arc::new(5);
24328    ///
24329    /// assert!(Arc::ptr_eq(&five, &same_five));
24330    /// assert!(!Arc::ptr_eq(&five, &other_five));
24331    /// ```
24332    ///
24333    /// [`ptr::eq`]: core::ptr::eq
24334    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
24335        this.ptr.as_ptr() == other.ptr.as_ptr()
24336    }
24337}
24338
24339impl<T: ?Sized> Arc<T> {
24340    /// Allocates an `ArcInner<T>` with sufficient space for
24341    /// a possibly-unsized inner value where the value has the layout provided.
24342    ///
24343    /// The function `mem_to_arcinner` is called with the data pointer
24344    /// and must return back a (potentially fat)-pointer for the `ArcInner<T>`.
24345    unsafe fn allocate_for_layout(
24346        value_layout: Layout,
24347        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
24348        mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner<T>,
24349    ) -> *mut ArcInner<T> {
24350        // Calculate layout using the given value layout.
24351        // Previously, layout was calculated on the expression
24352        // `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
24353        // reference (see #54908).
24354        let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
24355        unsafe {
24356            Arc::try_allocate_for_layout(value_layout, allocate, mem_to_arcinner)
24357                .unwrap_or_else(|_| handle_alloc_error(layout))
24358        }
24359    }
24360
24361    /// Allocates an `ArcInner<T>` with sufficient space for
24362    /// a possibly-unsized inner value where the value has the layout provided,
24363    /// returning an error if allocation fails.
24364    ///
24365    /// The function `mem_to_arcinner` is called with the data pointer
24366    /// and must return back a (potentially fat)-pointer for the `ArcInner<T>`.
24367    unsafe fn try_allocate_for_layout(
24368        value_layout: Layout,
24369        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
24370        mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner<T>,
24371    ) -> Result<*mut ArcInner<T>, AllocError> {
24372        // Calculate layout using the given value layout.
24373        // Previously, layout was calculated on the expression
24374        // `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
24375        // reference (see #54908).
24376        let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
24377
24378        let ptr = allocate(layout)?;
24379
24380        // Initialize the ArcInner
24381        let inner = mem_to_arcinner(ptr.as_non_null_ptr().as_ptr());
24382        debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout);
24383
24384        unsafe {
24385            ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
24386            ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
24387        }
24388
24389        Ok(inner)
24390    }
24391
24392    /// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
24393    unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
24394        // Allocate for the `ArcInner<T>` using the given value.
24395        unsafe {
24396            Self::allocate_for_layout(
24397                Layout::for_value(&*ptr),
24398                |layout| Global.allocate(layout),
24399                |mem| (ptr as *mut ArcInner<T>).set_ptr_value(mem) as *mut ArcInner<T>,
24400            )
24401        }
24402    }
24403
24404    fn from_box(v: Box<T>) -> Arc<T> {
24405        unsafe {
24406            let (box_unique, alloc) = Box::into_unique(v);
24407            let bptr = box_unique.as_ptr();
24408
24409            let value_size = size_of_val(&*bptr);
24410            let ptr = Self::allocate_for_ptr(bptr);
24411
24412            // Copy value as bytes
24413            ptr::copy_nonoverlapping(
24414                bptr as *const T as *const u8,
24415                &mut (*ptr).data as *mut _ as *mut u8,
24416                value_size,
24417            );
24418
24419            // Free the allocation without dropping its contents
24420            box_free(box_unique, alloc);
24421
24422            Self::from_ptr(ptr)
24423        }
24424    }
24425}
24426
24427impl<T> Arc<[T]> {
24428    /// Allocates an `ArcInner<[T]>` with the given length.
24429    unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> {
24430        unsafe {
24431            Self::allocate_for_layout(
24432                Layout::array::<T>(len).unwrap(),
24433                |layout| Global.allocate(layout),
24434                |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>,
24435            )
24436        }
24437    }
24438
24439    /// Copy elements from slice into newly allocated Arc<\[T\]>
24440    ///
24441    /// Unsafe because the caller must either take ownership or bind `T: Copy`.
24442    unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
24443        unsafe {
24444            let ptr = Self::allocate_for_slice(v.len());
24445
24446            ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
24447
24448            Self::from_ptr(ptr)
24449        }
24450    }
24451
24452    /// Constructs an `Arc<[T]>` from an iterator known to be of a certain size.
24453    ///
24454    /// Behavior is undefined should the size be wrong.
24455    unsafe fn from_iter_exact(iter: impl iter::Iterator<Item = T>, len: usize) -> Arc<[T]> {
24456        // Panic guard while cloning T elements.
24457        // In the event of a panic, elements that have been written
24458        // into the new ArcInner will be dropped, then the memory freed.
24459        struct Guard<T> {
24460            mem: NonNull<u8>,
24461            elems: *mut T,
24462            layout: Layout,
24463            n_elems: usize,
24464        }
24465
24466        impl<T> Drop for Guard<T> {
24467            fn drop(&mut self) {
24468                unsafe {
24469                    let slice = from_raw_parts_mut(self.elems, self.n_elems);
24470                    ptr::drop_in_place(slice);
24471
24472                    Global.deallocate(self.mem, self.layout);
24473                }
24474            }
24475        }
24476
24477        unsafe {
24478            let ptr = Self::allocate_for_slice(len);
24479
24480            let mem = ptr as *mut _ as *mut u8;
24481            let layout = Layout::for_value(&*ptr);
24482
24483            // Pointer to first element
24484            let elems = &mut (*ptr).data as *mut [T] as *mut T;
24485
24486            let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
24487
24488            for (i, item) in iter.enumerate() {
24489                ptr::write(elems.add(i), item);
24490                guard.n_elems += 1;
24491            }
24492
24493            // All clear. Forget the guard so it doesn't free the new ArcInner.
24494            mem::forget(guard);
24495
24496            Self::from_ptr(ptr)
24497        }
24498    }
24499}
24500
24501/// Specialization trait used for `From<&[T]>`.
24502trait ArcFromSlice<T> {
24503    fn from_slice(slice: &[T]) -> Self;
24504}
24505
24506impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
24507    #[inline]
24508    default fn from_slice(v: &[T]) -> Self {
24509        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
24510    }
24511}
24512
24513impl<T: Copy> ArcFromSlice<T> for Arc<[T]> {
24514    #[inline]
24515    fn from_slice(v: &[T]) -> Self {
24516        unsafe { Arc::copy_from_slice(v) }
24517    }
24518}
24519
24520#[stable(feature = "rust1", since = "1.0.0")]
24521impl<T: ?Sized> Clone for Arc<T> {
24522    /// Makes a clone of the `Arc` pointer.
24523    ///
24524    /// This creates another pointer to the same allocation, increasing the
24525    /// strong reference count.
24526    ///
24527    /// # Examples
24528    ///
24529    /// ```
24530    /// use std::sync::Arc;
24531    ///
24532    /// let five = Arc::new(5);
24533    ///
24534    /// let _ = Arc::clone(&five);
24535    /// ```
24536    #[inline]
24537    fn clone(&self) -> Arc<T> {
24538        // Using a relaxed ordering is alright here, as knowledge of the
24539        // original reference prevents other threads from erroneously deleting
24540        // the object.
24541        //
24542        // As explained in the [Boost documentation][1], Increasing the
24543        // reference counter can always be done with memory_order_relaxed: New
24544        // references to an object can only be formed from an existing
24545        // reference, and passing an existing reference from one thread to
24546        // another must already provide any required synchronization.
24547        //
24548        // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
24549        let old_size = self.inner().strong.fetch_add(1, Relaxed);
24550
24551        // However we need to guard against massive refcounts in case someone
24552        // is `mem::forget`ing Arcs. If we don't do this the count can overflow
24553        // and users will use-after free. We racily saturate to `isize::MAX` on
24554        // the assumption that there aren't ~2 billion threads incrementing
24555        // the reference count at once. This branch will never be taken in
24556        // any realistic program.
24557        //
24558        // We abort because such a program is incredibly degenerate, and we
24559        // don't care to support it.
24560        if old_size > MAX_REFCOUNT {
24561            abort();
24562        }
24563
24564        Self::from_inner(self.ptr)
24565    }
24566}
24567
24568#[stable(feature = "rust1", since = "1.0.0")]
24569impl<T: ?Sized> Deref for Arc<T> {
24570    type Target = T;
24571
24572    #[inline]
24573    fn deref(&self) -> &T {
24574        &self.inner().data
24575    }
24576}
24577
24578#[unstable(feature = "receiver_trait", issue = "none")]
24579impl<T: ?Sized> Receiver for Arc<T> {}
24580
24581impl<T: Clone> Arc<T> {
24582    /// Makes a mutable reference into the given `Arc`.
24583    ///
24584    /// If there are other `Arc` or [`Weak`] pointers to the same allocation,
24585    /// then `make_mut` will create a new allocation and invoke [`clone`][clone] on the inner value
24586    /// to ensure unique ownership. This is also referred to as clone-on-write.
24587    ///
24588    /// Note that this differs from the behavior of [`Rc::make_mut`] which disassociates
24589    /// any remaining `Weak` pointers.
24590    ///
24591    /// See also [`get_mut`][get_mut], which will fail rather than cloning.
24592    ///
24593    /// [clone]: Clone::clone
24594    /// [get_mut]: Arc::get_mut
24595    /// [`Rc::make_mut`]: super::rc::Rc::make_mut
24596    ///
24597    /// # Examples
24598    ///
24599    /// ```
24600    /// use std::sync::Arc;
24601    ///
24602    /// let mut data = Arc::new(5);
24603    ///
24604    /// *Arc::make_mut(&mut data) += 1;         // Won't clone anything
24605    /// let mut other_data = Arc::clone(&data); // Won't clone inner data
24606    /// *Arc::make_mut(&mut data) += 1;         // Clones inner data
24607    /// *Arc::make_mut(&mut data) += 1;         // Won't clone anything
24608    /// *Arc::make_mut(&mut other_data) *= 2;   // Won't clone anything
24609    ///
24610    /// // Now `data` and `other_data` point to different allocations.
24611    /// assert_eq!(*data, 8);
24612    /// assert_eq!(*other_data, 12);
24613    /// ```
24614    #[inline]
24615    #[stable(feature = "arc_unique", since = "1.4.0")]
24616    pub fn make_mut(this: &mut Self) -> &mut T {
24617        // Note that we hold both a strong reference and a weak reference.
24618        // Thus, releasing our strong reference only will not, by itself, cause
24619        // the memory to be deallocated.
24620        //
24621        // Use Acquire to ensure that we see any writes to `weak` that happen
24622        // before release writes (i.e., decrements) to `strong`. Since we hold a
24623        // weak count, there's no chance the ArcInner itself could be
24624        // deallocated.
24625        if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
24626            // Another strong pointer exists, so we must clone.
24627            // Pre-allocate memory to allow writing the cloned value directly.
24628            let mut arc = Self::new_uninit();
24629            unsafe {
24630                let data = Arc::get_mut_unchecked(&mut arc);
24631                (**this).write_clone_into_raw(data.as_mut_ptr());
24632                *this = arc.assume_init();
24633            }
24634        } else if this.inner().weak.load(Relaxed) != 1 {
24635            // Relaxed suffices in the above because this is fundamentally an
24636            // optimization: we are always racing with weak pointers being
24637            // dropped. Worst case, we end up allocated a new Arc unnecessarily.
24638
24639            // We removed the last strong ref, but there are additional weak
24640            // refs remaining. We'll move the contents to a new Arc, and
24641            // invalidate the other weak refs.
24642
24643            // Note that it is not possible for the read of `weak` to yield
24644            // usize::MAX (i.e., locked), since the weak count can only be
24645            // locked by a thread with a strong reference.
24646
24647            // Materialize our own implicit weak pointer, so that it can clean
24648            // up the ArcInner as needed.
24649            let _weak = Weak { ptr: this.ptr };
24650
24651            // Can just steal the data, all that's left is Weaks
24652            let mut arc = Self::new_uninit();
24653            unsafe {
24654                let data = Arc::get_mut_unchecked(&mut arc);
24655                data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
24656                ptr::write(this, arc.assume_init());
24657            }
24658        } else {
24659            // We were the sole reference of either kind; bump back up the
24660            // strong ref count.
24661            this.inner().strong.store(1, Release);
24662        }
24663
24664        // As with `get_mut()`, the unsafety is ok because our reference was
24665        // either unique to begin with, or became one upon cloning the contents.
24666        unsafe { Self::get_mut_unchecked(this) }
24667    }
24668}
24669
24670impl<T: ?Sized> Arc<T> {
24671    /// Returns a mutable reference into the given `Arc`, if there are
24672    /// no other `Arc` or [`Weak`] pointers to the same allocation.
24673    ///
24674    /// Returns [`None`] otherwise, because it is not safe to
24675    /// mutate a shared value.
24676    ///
24677    /// See also [`make_mut`][make_mut], which will [`clone`][clone]
24678    /// the inner value when there are other pointers.
24679    ///
24680    /// [make_mut]: Arc::make_mut
24681    /// [clone]: Clone::clone
24682    ///
24683    /// # Examples
24684    ///
24685    /// ```
24686    /// use std::sync::Arc;
24687    ///
24688    /// let mut x = Arc::new(3);
24689    /// *Arc::get_mut(&mut x).unwrap() = 4;
24690    /// assert_eq!(*x, 4);
24691    ///
24692    /// let _y = Arc::clone(&x);
24693    /// assert!(Arc::get_mut(&mut x).is_none());
24694    /// ```
24695    #[inline]
24696    #[stable(feature = "arc_unique", since = "1.4.0")]
24697    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
24698        if this.is_unique() {
24699            // This unsafety is ok because we're guaranteed that the pointer
24700            // returned is the *only* pointer that will ever be returned to T. Our
24701            // reference count is guaranteed to be 1 at this point, and we required
24702            // the Arc itself to be `mut`, so we're returning the only possible
24703            // reference to the inner data.
24704            unsafe { Some(Arc::get_mut_unchecked(this)) }
24705        } else {
24706            None
24707        }
24708    }
24709
24710    /// Returns a mutable reference into the given `Arc`,
24711    /// without any check.
24712    ///
24713    /// See also [`get_mut`], which is safe and does appropriate checks.
24714    ///
24715    /// [`get_mut`]: Arc::get_mut
24716    ///
24717    /// # Safety
24718    ///
24719    /// Any other `Arc` or [`Weak`] pointers to the same allocation must not be dereferenced
24720    /// for the duration of the returned borrow.
24721    /// This is trivially the case if no such pointers exist,
24722    /// for example immediately after `Arc::new`.
24723    ///
24724    /// # Examples
24725    ///
24726    /// ```
24727    /// #![feature(get_mut_unchecked)]
24728    ///
24729    /// use std::sync::Arc;
24730    ///
24731    /// let mut x = Arc::new(String::new());
24732    /// unsafe {
24733    ///     Arc::get_mut_unchecked(&mut x).push_str("foo")
24734    /// }
24735    /// assert_eq!(*x, "foo");
24736    /// ```
24737    #[inline]
24738    #[unstable(feature = "get_mut_unchecked", issue = "63292")]
24739    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
24740        // We are careful to *not* create a reference covering the "count" fields, as
24741        // this would alias with concurrent access to the reference counts (e.g. by `Weak`).
24742        unsafe { &mut (*this.ptr.as_ptr()).data }
24743    }
24744
24745    /// Determine whether this is the unique reference (including weak refs) to
24746    /// the underlying data.
24747    ///
24748    /// Note that this requires locking the weak ref count.
24749    fn is_unique(&mut self) -> bool {
24750        // lock the weak pointer count if we appear to be the sole weak pointer
24751        // holder.
24752        //
24753        // The acquire label here ensures a happens-before relationship with any
24754        // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
24755        // of the `weak` count (via `Weak::drop`, which uses release).  If the upgraded
24756        // weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
24757        if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
24758            // This needs to be an `Acquire` to synchronize with the decrement of the `strong`
24759            // counter in `drop` -- the only access that happens when any but the last reference
24760            // is being dropped.
24761            let unique = self.inner().strong.load(Acquire) == 1;
24762
24763            // The release write here synchronizes with a read in `downgrade`,
24764            // effectively preventing the above read of `strong` from happening
24765            // after the write.
24766            self.inner().weak.store(1, Release); // release the lock
24767            unique
24768        } else {
24769            false
24770        }
24771    }
24772}
24773
24774#[stable(feature = "rust1", since = "1.0.0")]
24775unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
24776    /// Drops the `Arc`.
24777    ///
24778    /// This will decrement the strong reference count. If the strong reference
24779    /// count reaches zero then the only other references (if any) are
24780    /// [`Weak`], so we `drop` the inner value.
24781    ///
24782    /// # Examples
24783    ///
24784    /// ```
24785    /// use std::sync::Arc;
24786    ///
24787    /// struct Foo;
24788    ///
24789    /// impl Drop for Foo {
24790    ///     fn drop(&mut self) {
24791    ///         println!("dropped!");
24792    ///     }
24793    /// }
24794    ///
24795    /// let foo  = Arc::new(Foo);
24796    /// let foo2 = Arc::clone(&foo);
24797    ///
24798    /// drop(foo);    // Doesn't print anything
24799    /// drop(foo2);   // Prints "dropped!"
24800    /// ```
24801    #[inline]
24802    fn drop(&mut self) {
24803        // Because `fetch_sub` is already atomic, we do not need to synchronize
24804        // with other threads unless we are going to delete the object. This
24805        // same logic applies to the below `fetch_sub` to the `weak` count.
24806        if self.inner().strong.fetch_sub(1, Release) != 1 {
24807            return;
24808        }
24809
24810        // This fence is needed to prevent reordering of use of the data and
24811        // deletion of the data.  Because it is marked `Release`, the decreasing
24812        // of the reference count synchronizes with this `Acquire` fence. This
24813        // means that use of the data happens before decreasing the reference
24814        // count, which happens before this fence, which happens before the
24815        // deletion of the data.
24816        //
24817        // As explained in the [Boost documentation][1],
24818        //
24819        // > It is important to enforce any possible access to the object in one
24820        // > thread (through an existing reference) to *happen before* deleting
24821        // > the object in a different thread. This is achieved by a "release"
24822        // > operation after dropping a reference (any access to the object
24823        // > through this reference must obviously happened before), and an
24824        // > "acquire" operation before deleting the object.
24825        //
24826        // In particular, while the contents of an Arc are usually immutable, it's
24827        // possible to have interior writes to something like a Mutex<T>. Since a
24828        // Mutex is not acquired when it is deleted, we can't rely on its
24829        // synchronization logic to make writes in thread A visible to a destructor
24830        // running in thread B.
24831        //
24832        // Also note that the Acquire fence here could probably be replaced with an
24833        // Acquire load, which could improve performance in highly-contended
24834        // situations. See [2].
24835        //
24836        // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
24837        // [2]: (https://github.com/rust-lang/rust/pull/41714)
24838        acquire!(self.inner().strong);
24839
24840        unsafe {
24841            self.drop_slow();
24842        }
24843    }
24844}
24845
24846impl Arc<dyn Any + Send + Sync> {
24847    #[inline]
24848    #[stable(feature = "rc_downcast", since = "1.29.0")]
24849    /// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type.
24850    ///
24851    /// # Examples
24852    ///
24853    /// ```
24854    /// use std::any::Any;
24855    /// use std::sync::Arc;
24856    ///
24857    /// fn print_if_string(value: Arc<dyn Any + Send + Sync>) {
24858    ///     if let Ok(string) = value.downcast::<String>() {
24859    ///         println!("String ({}): {}", string.len(), string);
24860    ///     }
24861    /// }
24862    ///
24863    /// let my_string = "Hello World".to_string();
24864    /// print_if_string(Arc::new(my_string));
24865    /// print_if_string(Arc::new(0i8));
24866    /// ```
24867    pub fn downcast<T>(self) -> Result<Arc<T>, Self>
24868    where
24869        T: Any + Send + Sync + 'static,
24870    {
24871        if (*self).is::<T>() {
24872            let ptr = self.ptr.cast::<ArcInner<T>>();
24873            mem::forget(self);
24874            Ok(Arc::from_inner(ptr))
24875        } else {
24876            Err(self)
24877        }
24878    }
24879}
24880
24881impl<T> Weak<T> {
24882    /// Constructs a new `Weak<T>`, without allocating any memory.
24883    /// Calling [`upgrade`] on the return value always gives [`None`].
24884    ///
24885    /// [`upgrade`]: Weak::upgrade
24886    ///
24887    /// # Examples
24888    ///
24889    /// ```
24890    /// use std::sync::Weak;
24891    ///
24892    /// let empty: Weak<i64> = Weak::new();
24893    /// assert!(empty.upgrade().is_none());
24894    /// ```
24895    #[stable(feature = "downgraded_weak", since = "1.10.0")]
24896    pub fn new() -> Weak<T> {
24897        Weak { ptr: NonNull::new(usize::MAX as *mut ArcInner<T>).expect("MAX is not 0") }
24898    }
24899}
24900
24901/// Helper type to allow accessing the reference counts without
24902/// making any assertions about the data field.
24903struct WeakInner<'a> {
24904    weak: &'a atomic::AtomicUsize,
24905    strong: &'a atomic::AtomicUsize,
24906}
24907
24908impl<T: ?Sized> Weak<T> {
24909    /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
24910    ///
24911    /// The pointer is valid only if there are some strong references. The pointer may be dangling,
24912    /// unaligned or even [`null`] otherwise.
24913    ///
24914    /// # Examples
24915    ///
24916    /// ```
24917    /// use std::sync::Arc;
24918    /// use std::ptr;
24919    ///
24920    /// let strong = Arc::new("hello".to_owned());
24921    /// let weak = Arc::downgrade(&strong);
24922    /// // Both point to the same object
24923    /// assert!(ptr::eq(&*strong, weak.as_ptr()));
24924    /// // The strong here keeps it alive, so we can still access the object.
24925    /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
24926    ///
24927    /// drop(strong);
24928    /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
24929    /// // undefined behaviour.
24930    /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
24931    /// ```
24932    ///
24933    /// [`null`]: core::ptr::null
24934    #[stable(feature = "weak_into_raw", since = "1.45.0")]
24935    pub fn as_ptr(&self) -> *const T {
24936        let ptr: *mut ArcInner<T> = NonNull::as_ptr(self.ptr);
24937
24938        if is_dangling(ptr) {
24939            // If the pointer is dangling, we return the sentinel directly. This cannot be
24940            // a valid payload address, as the payload is at least as aligned as ArcInner (usize).
24941            ptr as *const T
24942        } else {
24943            // SAFETY: if is_dangling returns false, then the pointer is dereferencable.
24944            // The payload may be dropped at this point, and we have to maintain provenance,
24945            // so use raw pointer manipulation.
24946            unsafe { ptr::addr_of_mut!((*ptr).data) }
24947        }
24948    }
24949
24950    /// Consumes the `Weak<T>` and turns it into a raw pointer.
24951    ///
24952    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
24953    /// one weak reference (the weak count is not modified by this operation). It can be turned
24954    /// back into the `Weak<T>` with [`from_raw`].
24955    ///
24956    /// The same restrictions of accessing the target of the pointer as with
24957    /// [`as_ptr`] apply.
24958    ///
24959    /// # Examples
24960    ///
24961    /// ```
24962    /// use std::sync::{Arc, Weak};
24963    ///
24964    /// let strong = Arc::new("hello".to_owned());
24965    /// let weak = Arc::downgrade(&strong);
24966    /// let raw = weak.into_raw();
24967    ///
24968    /// assert_eq!(1, Arc::weak_count(&strong));
24969    /// assert_eq!("hello", unsafe { &*raw });
24970    ///
24971    /// drop(unsafe { Weak::from_raw(raw) });
24972    /// assert_eq!(0, Arc::weak_count(&strong));
24973    /// ```
24974    ///
24975    /// [`from_raw`]: Weak::from_raw
24976    /// [`as_ptr`]: Weak::as_ptr
24977    #[stable(feature = "weak_into_raw", since = "1.45.0")]
24978    pub fn into_raw(self) -> *const T {
24979        let result = self.as_ptr();
24980        mem::forget(self);
24981        result
24982    }
24983
24984    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
24985    ///
24986    /// This can be used to safely get a strong reference (by calling [`upgrade`]
24987    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
24988    ///
24989    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
24990    /// as these don't own anything; the method still works on them).
24991    ///
24992    /// # Safety
24993    ///
24994    /// The pointer must have originated from the [`into_raw`] and must still own its potential
24995    /// weak reference.
24996    ///
24997    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
24998    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
24999    /// count is not modified by this operation) and therefore it must be paired with a previous
25000    /// call to [`into_raw`].
25001    /// # Examples
25002    ///
25003    /// ```
25004    /// use std::sync::{Arc, Weak};
25005    ///
25006    /// let strong = Arc::new("hello".to_owned());
25007    ///
25008    /// let raw_1 = Arc::downgrade(&strong).into_raw();
25009    /// let raw_2 = Arc::downgrade(&strong).into_raw();
25010    ///
25011    /// assert_eq!(2, Arc::weak_count(&strong));
25012    ///
25013    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
25014    /// assert_eq!(1, Arc::weak_count(&strong));
25015    ///
25016    /// drop(strong);
25017    ///
25018    /// // Decrement the last weak count.
25019    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
25020    /// ```
25021    ///
25022    /// [`new`]: Weak::new
25023    /// [`into_raw`]: Weak::into_raw
25024    /// [`upgrade`]: Weak::upgrade
25025    /// [`forget`]: std::mem::forget
25026    #[stable(feature = "weak_into_raw", since = "1.45.0")]
25027    pub unsafe fn from_raw(ptr: *const T) -> Self {
25028        // See Weak::as_ptr for context on how the input pointer is derived.
25029
25030        let ptr = if is_dangling(ptr as *mut T) {
25031            // This is a dangling Weak.
25032            ptr as *mut ArcInner<T>
25033        } else {
25034            // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
25035            // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
25036            let offset = unsafe { data_offset(ptr) };
25037            // Thus, we reverse the offset to get the whole RcBox.
25038            // SAFETY: the pointer originated from a Weak, so this offset is safe.
25039            unsafe { (ptr as *mut ArcInner<T>).set_ptr_value((ptr as *mut u8).offset(-offset)) }
25040        };
25041
25042        // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
25043        Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
25044    }
25045}
25046
25047impl<T: ?Sized> Weak<T> {
25048    /// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying
25049    /// dropping of the inner value if successful.
25050    ///
25051    /// Returns [`None`] if the inner value has since been dropped.
25052    ///
25053    /// # Examples
25054    ///
25055    /// ```
25056    /// use std::sync::Arc;
25057    ///
25058    /// let five = Arc::new(5);
25059    ///
25060    /// let weak_five = Arc::downgrade(&five);
25061    ///
25062    /// let strong_five: Option<Arc<_>> = weak_five.upgrade();
25063    /// assert!(strong_five.is_some());
25064    ///
25065    /// // Destroy all strong pointers.
25066    /// drop(strong_five);
25067    /// drop(five);
25068    ///
25069    /// assert!(weak_five.upgrade().is_none());
25070    /// ```
25071    #[stable(feature = "arc_weak", since = "1.4.0")]
25072    pub fn upgrade(&self) -> Option<Arc<T>> {
25073        // We use a CAS loop to increment the strong count instead of a
25074        // fetch_add as this function should never take the reference count
25075        // from zero to one.
25076        let inner = self.inner()?;
25077
25078        // Relaxed load because any write of 0 that we can observe
25079        // leaves the field in a permanently zero state (so a
25080        // "stale" read of 0 is fine), and any other value is
25081        // confirmed via the CAS below.
25082        let mut n = inner.strong.load(Relaxed);
25083
25084        loop {
25085            if n == 0 {
25086                return None;
25087            }
25088
25089            // See comments in `Arc::clone` for why we do this (for `mem::forget`).
25090            if n > MAX_REFCOUNT {
25091                abort();
25092            }
25093
25094            // Relaxed is fine for the failure case because we don't have any expectations about the new state.
25095            // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner
25096            // value can be initialized after `Weak` references have already been created. In that case, we
25097            // expect to observe the fully initialized value.
25098            match inner.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) {
25099                Ok(_) => return Some(Arc::from_inner(self.ptr)), // null checked above
25100                Err(old) => n = old,
25101            }
25102        }
25103    }
25104
25105    /// Gets the number of strong (`Arc`) pointers pointing to this allocation.
25106    ///
25107    /// If `self` was created using [`Weak::new`], this will return 0.
25108    #[stable(feature = "weak_counts", since = "1.41.0")]
25109    pub fn strong_count(&self) -> usize {
25110        if let Some(inner) = self.inner() { inner.strong.load(SeqCst) } else { 0 }
25111    }
25112
25113    /// Gets an approximation of the number of `Weak` pointers pointing to this
25114    /// allocation.
25115    ///
25116    /// If `self` was created using [`Weak::new`], or if there are no remaining
25117    /// strong pointers, this will return 0.
25118    ///
25119    /// # Accuracy
25120    ///
25121    /// Due to implementation details, the returned value can be off by 1 in
25122    /// either direction when other threads are manipulating any `Arc`s or
25123    /// `Weak`s pointing to the same allocation.
25124    #[stable(feature = "weak_counts", since = "1.41.0")]
25125    pub fn weak_count(&self) -> usize {
25126        self.inner()
25127            .map(|inner| {
25128                let weak = inner.weak.load(SeqCst);
25129                let strong = inner.strong.load(SeqCst);
25130                if strong == 0 {
25131                    0
25132                } else {
25133                    // Since we observed that there was at least one strong pointer
25134                    // after reading the weak count, we know that the implicit weak
25135                    // reference (present whenever any strong references are alive)
25136                    // was still around when we observed the weak count, and can
25137                    // therefore safely subtract it.
25138                    weak - 1
25139                }
25140            })
25141            .unwrap_or(0)
25142    }
25143
25144    /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`,
25145    /// (i.e., when this `Weak` was created by `Weak::new`).
25146    #[inline]
25147    fn inner(&self) -> Option<WeakInner<'_>> {
25148        if is_dangling(self.ptr.as_ptr()) {
25149            None
25150        } else {
25151            // We are careful to *not* create a reference covering the "data" field, as
25152            // the field may be mutated concurrently (for example, if the last `Arc`
25153            // is dropped, the data field will be dropped in-place).
25154            Some(unsafe {
25155                let ptr = self.ptr.as_ptr();
25156                WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
25157            })
25158        }
25159    }
25160
25161    /// Returns `true` if the two `Weak`s point to the same allocation (similar to
25162    /// [`ptr::eq`]), or if both don't point to any allocation
25163    /// (because they were created with `Weak::new()`).
25164    ///
25165    /// # Notes
25166    ///
25167    /// Since this compares pointers it means that `Weak::new()` will equal each
25168    /// other, even though they don't point to any allocation.
25169    ///
25170    /// # Examples
25171    ///
25172    /// ```
25173    /// use std::sync::Arc;
25174    ///
25175    /// let first_rc = Arc::new(5);
25176    /// let first = Arc::downgrade(&first_rc);
25177    /// let second = Arc::downgrade(&first_rc);
25178    ///
25179    /// assert!(first.ptr_eq(&second));
25180    ///
25181    /// let third_rc = Arc::new(5);
25182    /// let third = Arc::downgrade(&third_rc);
25183    ///
25184    /// assert!(!first.ptr_eq(&third));
25185    /// ```
25186    ///
25187    /// Comparing `Weak::new`.
25188    ///
25189    /// ```
25190    /// use std::sync::{Arc, Weak};
25191    ///
25192    /// let first = Weak::new();
25193    /// let second = Weak::new();
25194    /// assert!(first.ptr_eq(&second));
25195    ///
25196    /// let third_rc = Arc::new(());
25197    /// let third = Arc::downgrade(&third_rc);
25198    /// assert!(!first.ptr_eq(&third));
25199    /// ```
25200    ///
25201    /// [`ptr::eq`]: core::ptr::eq
25202    #[inline]
25203    #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
25204    pub fn ptr_eq(&self, other: &Self) -> bool {
25205        self.ptr.as_ptr() == other.ptr.as_ptr()
25206    }
25207}
25208
25209#[stable(feature = "arc_weak", since = "1.4.0")]
25210impl<T: ?Sized> Clone for Weak<T> {
25211    /// Makes a clone of the `Weak` pointer that points to the same allocation.
25212    ///
25213    /// # Examples
25214    ///
25215    /// ```
25216    /// use std::sync::{Arc, Weak};
25217    ///
25218    /// let weak_five = Arc::downgrade(&Arc::new(5));
25219    ///
25220    /// let _ = Weak::clone(&weak_five);
25221    /// ```
25222    #[inline]
25223    fn clone(&self) -> Weak<T> {
25224        let inner = if let Some(inner) = self.inner() {
25225            inner
25226        } else {
25227            return Weak { ptr: self.ptr };
25228        };
25229        // See comments in Arc::clone() for why this is relaxed.  This can use a
25230        // fetch_add (ignoring the lock) because the weak count is only locked
25231        // where are *no other* weak pointers in existence. (So we can't be
25232        // running this code in that case).
25233        let old_size = inner.weak.fetch_add(1, Relaxed);
25234
25235        // See comments in Arc::clone() for why we do this (for mem::forget).
25236        if old_size > MAX_REFCOUNT {
25237            abort();
25238        }
25239
25240        Weak { ptr: self.ptr }
25241    }
25242}
25243
25244#[stable(feature = "downgraded_weak", since = "1.10.0")]
25245impl<T> Default for Weak<T> {
25246    /// Constructs a new `Weak<T>`, without allocating memory.
25247    /// Calling [`upgrade`] on the return value always
25248    /// gives [`None`].
25249    ///
25250    /// [`upgrade`]: Weak::upgrade
25251    ///
25252    /// # Examples
25253    ///
25254    /// ```
25255    /// use std::sync::Weak;
25256    ///
25257    /// let empty: Weak<i64> = Default::default();
25258    /// assert!(empty.upgrade().is_none());
25259    /// ```
25260    fn default() -> Weak<T> {
25261        Weak::new()
25262    }
25263}
25264
25265#[stable(feature = "arc_weak", since = "1.4.0")]
25266impl<T: ?Sized> Drop for Weak<T> {
25267    /// Drops the `Weak` pointer.
25268    ///
25269    /// # Examples
25270    ///
25271    /// ```
25272    /// use std::sync::{Arc, Weak};
25273    ///
25274    /// struct Foo;
25275    ///
25276    /// impl Drop for Foo {
25277    ///     fn drop(&mut self) {
25278    ///         println!("dropped!");
25279    ///     }
25280    /// }
25281    ///
25282    /// let foo = Arc::new(Foo);
25283    /// let weak_foo = Arc::downgrade(&foo);
25284    /// let other_weak_foo = Weak::clone(&weak_foo);
25285    ///
25286    /// drop(weak_foo);   // Doesn't print anything
25287    /// drop(foo);        // Prints "dropped!"
25288    ///
25289    /// assert!(other_weak_foo.upgrade().is_none());
25290    /// ```
25291    fn drop(&mut self) {
25292        // If we find out that we were the last weak pointer, then its time to
25293        // deallocate the data entirely. See the discussion in Arc::drop() about
25294        // the memory orderings
25295        //
25296        // It's not necessary to check for the locked state here, because the
25297        // weak count can only be locked if there was precisely one weak ref,
25298        // meaning that drop could only subsequently run ON that remaining weak
25299        // ref, which can only happen after the lock is released.
25300        let inner = if let Some(inner) = self.inner() { inner } else { return };
25301
25302        if inner.weak.fetch_sub(1, Release) == 1 {
25303            acquire!(inner.weak);
25304            unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) }
25305        }
25306    }
25307}
25308
25309#[stable(feature = "rust1", since = "1.0.0")]
25310trait ArcEqIdent<T: ?Sized + PartialEq> {
25311    fn eq(&self, other: &Arc<T>) -> bool;
25312    fn ne(&self, other: &Arc<T>) -> bool;
25313}
25314
25315#[stable(feature = "rust1", since = "1.0.0")]
25316impl<T: ?Sized + PartialEq> ArcEqIdent<T> for Arc<T> {
25317    #[inline]
25318    default fn eq(&self, other: &Arc<T>) -> bool {
25319        **self == **other
25320    }
25321    #[inline]
25322    default fn ne(&self, other: &Arc<T>) -> bool {
25323        **self != **other
25324    }
25325}
25326
25327/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
25328/// would otherwise add a cost to all equality checks on refs. We assume that `Arc`s are used to
25329/// store large values, that are slow to clone, but also heavy to check for equality, causing this
25330/// cost to pay off more easily. It's also more likely to have two `Arc` clones, that point to
25331/// the same value, than two `&T`s.
25332///
25333/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
25334#[stable(feature = "rust1", since = "1.0.0")]
25335impl<T: ?Sized + crate::rc::MarkerEq> ArcEqIdent<T> for Arc<T> {
25336    #[inline]
25337    fn eq(&self, other: &Arc<T>) -> bool {
25338        Arc::ptr_eq(self, other) || **self == **other
25339    }
25340
25341    #[inline]
25342    fn ne(&self, other: &Arc<T>) -> bool {
25343        !Arc::ptr_eq(self, other) && **self != **other
25344    }
25345}
25346
25347#[stable(feature = "rust1", since = "1.0.0")]
25348impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
25349    /// Equality for two `Arc`s.
25350    ///
25351    /// Two `Arc`s are equal if their inner values are equal, even if they are
25352    /// stored in different allocation.
25353    ///
25354    /// If `T` also implements `Eq` (implying reflexivity of equality),
25355    /// two `Arc`s that point to the same allocation are always equal.
25356    ///
25357    /// # Examples
25358    ///
25359    /// ```
25360    /// use std::sync::Arc;
25361    ///
25362    /// let five = Arc::new(5);
25363    ///
25364    /// assert!(five == Arc::new(5));
25365    /// ```
25366    #[inline]
25367    fn eq(&self, other: &Arc<T>) -> bool {
25368        ArcEqIdent::eq(self, other)
25369    }
25370
25371    /// Inequality for two `Arc`s.
25372    ///
25373    /// Two `Arc`s are unequal if their inner values are unequal.
25374    ///
25375    /// If `T` also implements `Eq` (implying reflexivity of equality),
25376    /// two `Arc`s that point to the same value are never unequal.
25377    ///
25378    /// # Examples
25379    ///
25380    /// ```
25381    /// use std::sync::Arc;
25382    ///
25383    /// let five = Arc::new(5);
25384    ///
25385    /// assert!(five != Arc::new(6));
25386    /// ```
25387    #[inline]
25388    fn ne(&self, other: &Arc<T>) -> bool {
25389        ArcEqIdent::ne(self, other)
25390    }
25391}
25392
25393#[stable(feature = "rust1", since = "1.0.0")]
25394impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
25395    /// Partial comparison for two `Arc`s.
25396    ///
25397    /// The two are compared by calling `partial_cmp()` on their inner values.
25398    ///
25399    /// # Examples
25400    ///
25401    /// ```
25402    /// use std::sync::Arc;
25403    /// use std::cmp::Ordering;
25404    ///
25405    /// let five = Arc::new(5);
25406    ///
25407    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6)));
25408    /// ```
25409    fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
25410        (**self).partial_cmp(&**other)
25411    }
25412
25413    /// Less-than comparison for two `Arc`s.
25414    ///
25415    /// The two are compared by calling `<` on their inner values.
25416    ///
25417    /// # Examples
25418    ///
25419    /// ```
25420    /// use std::sync::Arc;
25421    ///
25422    /// let five = Arc::new(5);
25423    ///
25424    /// assert!(five < Arc::new(6));
25425    /// ```
25426    fn lt(&self, other: &Arc<T>) -> bool {
25427        *(*self) < *(*other)
25428    }
25429
25430    /// 'Less than or equal to' comparison for two `Arc`s.
25431    ///
25432    /// The two are compared by calling `<=` on their inner values.
25433    ///
25434    /// # Examples
25435    ///
25436    /// ```
25437    /// use std::sync::Arc;
25438    ///
25439    /// let five = Arc::new(5);
25440    ///
25441    /// assert!(five <= Arc::new(5));
25442    /// ```
25443    fn le(&self, other: &Arc<T>) -> bool {
25444        *(*self) <= *(*other)
25445    }
25446
25447    /// Greater-than comparison for two `Arc`s.
25448    ///
25449    /// The two are compared by calling `>` on their inner values.
25450    ///
25451    /// # Examples
25452    ///
25453    /// ```
25454    /// use std::sync::Arc;
25455    ///
25456    /// let five = Arc::new(5);
25457    ///
25458    /// assert!(five > Arc::new(4));
25459    /// ```
25460    fn gt(&self, other: &Arc<T>) -> bool {
25461        *(*self) > *(*other)
25462    }
25463
25464    /// 'Greater than or equal to' comparison for two `Arc`s.
25465    ///
25466    /// The two are compared by calling `>=` on their inner values.
25467    ///
25468    /// # Examples
25469    ///
25470    /// ```
25471    /// use std::sync::Arc;
25472    ///
25473    /// let five = Arc::new(5);
25474    ///
25475    /// assert!(five >= Arc::new(5));
25476    /// ```
25477    fn ge(&self, other: &Arc<T>) -> bool {
25478        *(*self) >= *(*other)
25479    }
25480}
25481#[stable(feature = "rust1", since = "1.0.0")]
25482impl<T: ?Sized + Ord> Ord for Arc<T> {
25483    /// Comparison for two `Arc`s.
25484    ///
25485    /// The two are compared by calling `cmp()` on their inner values.
25486    ///
25487    /// # Examples
25488    ///
25489    /// ```
25490    /// use std::sync::Arc;
25491    /// use std::cmp::Ordering;
25492    ///
25493    /// let five = Arc::new(5);
25494    ///
25495    /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6)));
25496    /// ```
25497    fn cmp(&self, other: &Arc<T>) -> Ordering {
25498        (**self).cmp(&**other)
25499    }
25500}
25501#[stable(feature = "rust1", since = "1.0.0")]
25502impl<T: ?Sized + Eq> Eq for Arc<T> {}
25503
25504#[stable(feature = "rust1", since = "1.0.0")]
25505impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
25506    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
25507        fmt::Display::fmt(&**self, f)
25508    }
25509}
25510
25511#[stable(feature = "rust1", since = "1.0.0")]
25512impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
25513    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
25514        fmt::Debug::fmt(&**self, f)
25515    }
25516}
25517
25518#[stable(feature = "rust1", since = "1.0.0")]
25519impl<T: ?Sized> fmt::Pointer for Arc<T> {
25520    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
25521        fmt::Pointer::fmt(&(&**self as *const T), f)
25522    }
25523}
25524
25525#[stable(feature = "rust1", since = "1.0.0")]
25526impl<T: Default> Default for Arc<T> {
25527    /// Creates a new `Arc<T>`, with the `Default` value for `T`.
25528    ///
25529    /// # Examples
25530    ///
25531    /// ```
25532    /// use std::sync::Arc;
25533    ///
25534    /// let x: Arc<i32> = Default::default();
25535    /// assert_eq!(*x, 0);
25536    /// ```
25537    fn default() -> Arc<T> {
25538        Arc::new(Default::default())
25539    }
25540}
25541
25542#[stable(feature = "rust1", since = "1.0.0")]
25543impl<T: ?Sized + Hash> Hash for Arc<T> {
25544    fn hash<H: Hasher>(&self, state: &mut H) {
25545        (**self).hash(state)
25546    }
25547}
25548
25549#[stable(feature = "from_for_ptrs", since = "1.6.0")]
25550impl<T> From<T> for Arc<T> {
25551    fn from(t: T) -> Self {
25552        Arc::new(t)
25553    }
25554}
25555
25556#[stable(feature = "shared_from_slice", since = "1.21.0")]
25557impl<T: Clone> From<&[T]> for Arc<[T]> {
25558    /// Allocate a reference-counted slice and fill it by cloning `v`'s items.
25559    ///
25560    /// # Example
25561    ///
25562    /// ```
25563    /// # use std::sync::Arc;
25564    /// let original: &[i32] = &[1, 2, 3];
25565    /// let shared: Arc<[i32]> = Arc::from(original);
25566    /// assert_eq!(&[1, 2, 3], &shared[..]);
25567    /// ```
25568    #[inline]
25569    fn from(v: &[T]) -> Arc<[T]> {
25570        <Self as ArcFromSlice<T>>::from_slice(v)
25571    }
25572}
25573
25574#[stable(feature = "shared_from_slice", since = "1.21.0")]
25575impl From<&str> for Arc<str> {
25576    /// Allocate a reference-counted `str` and copy `v` into it.
25577    ///
25578    /// # Example
25579    ///
25580    /// ```
25581    /// # use std::sync::Arc;
25582    /// let shared: Arc<str> = Arc::from("eggplant");
25583    /// assert_eq!("eggplant", &shared[..]);
25584    /// ```
25585    #[inline]
25586    fn from(v: &str) -> Arc<str> {
25587        let arc = Arc::<[u8]>::from(v.as_bytes());
25588        unsafe { Arc::from_raw(Arc::into_raw(arc) as *const str) }
25589    }
25590}
25591
25592#[stable(feature = "shared_from_slice", since = "1.21.0")]
25593impl From<String> for Arc<str> {
25594    /// Allocate a reference-counted `str` and copy `v` into it.
25595    ///
25596    /// # Example
25597    ///
25598    /// ```
25599    /// # use std::sync::Arc;
25600    /// let unique: String = "eggplant".to_owned();
25601    /// let shared: Arc<str> = Arc::from(unique);
25602    /// assert_eq!("eggplant", &shared[..]);
25603    /// ```
25604    #[inline]
25605    fn from(v: String) -> Arc<str> {
25606        Arc::from(&v[..])
25607    }
25608}
25609
25610#[stable(feature = "shared_from_slice", since = "1.21.0")]
25611impl<T: ?Sized> From<Box<T>> for Arc<T> {
25612    /// Move a boxed object to a new, reference-counted allocation.
25613    ///
25614    /// # Example
25615    ///
25616    /// ```
25617    /// # use std::sync::Arc;
25618    /// let unique: Box<str> = Box::from("eggplant");
25619    /// let shared: Arc<str> = Arc::from(unique);
25620    /// assert_eq!("eggplant", &shared[..]);
25621    /// ```
25622    #[inline]
25623    fn from(v: Box<T>) -> Arc<T> {
25624        Arc::from_box(v)
25625    }
25626}
25627
25628#[stable(feature = "shared_from_slice", since = "1.21.0")]
25629impl<T> From<Vec<T>> for Arc<[T]> {
25630    /// Allocate a reference-counted slice and move `v`'s items into it.
25631    ///
25632    /// # Example
25633    ///
25634    /// ```
25635    /// # use std::sync::Arc;
25636    /// let unique: Vec<i32> = vec![1, 2, 3];
25637    /// let shared: Arc<[i32]> = Arc::from(unique);
25638    /// assert_eq!(&[1, 2, 3], &shared[..]);
25639    /// ```
25640    #[inline]
25641    fn from(mut v: Vec<T>) -> Arc<[T]> {
25642        unsafe {
25643            let arc = Arc::copy_from_slice(&v);
25644
25645            // Allow the Vec to free its memory, but not destroy its contents
25646            v.set_len(0);
25647
25648            arc
25649        }
25650    }
25651}
25652
25653#[stable(feature = "shared_from_cow", since = "1.45.0")]
25654impl<'a, B> From<Cow<'a, B>> for Arc<B>
25655where
25656    B: ToOwned + ?Sized,
25657    Arc<B>: From<&'a B> + From<B::Owned>,
25658{
25659    #[inline]
25660    fn from(cow: Cow<'a, B>) -> Arc<B> {
25661        match cow {
25662            Cow::Borrowed(s) => Arc::from(s),
25663            Cow::Owned(s) => Arc::from(s),
25664        }
25665    }
25666}
25667
25668#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
25669impl<T, const N: usize> TryFrom<Arc<[T]>> for Arc<[T; N]> {
25670    type Error = Arc<[T]>;
25671
25672    fn try_from(boxed_slice: Arc<[T]>) -> Result<Self, Self::Error> {
25673        if boxed_slice.len() == N {
25674            Ok(unsafe { Arc::from_raw(Arc::into_raw(boxed_slice) as *mut [T; N]) })
25675        } else {
25676            Err(boxed_slice)
25677        }
25678    }
25679}
25680
25681#[stable(feature = "shared_from_iter", since = "1.37.0")]
25682impl<T> iter::FromIterator<T> for Arc<[T]> {
25683    /// Takes each element in the `Iterator` and collects it into an `Arc<[T]>`.
25684    ///
25685    /// # Performance characteristics
25686    ///
25687    /// ## The general case
25688    ///
25689    /// In the general case, collecting into `Arc<[T]>` is done by first
25690    /// collecting into a `Vec<T>`. That is, when writing the following:
25691    ///
25692    /// ```rust
25693    /// # use std::sync::Arc;
25694    /// let evens: Arc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
25695    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
25696    /// ```
25697    ///
25698    /// this behaves as if we wrote:
25699    ///
25700    /// ```rust
25701    /// # use std::sync::Arc;
25702    /// let evens: Arc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
25703    ///     .collect::<Vec<_>>() // The first set of allocations happens here.
25704    ///     .into(); // A second allocation for `Arc<[T]>` happens here.
25705    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
25706    /// ```
25707    ///
25708    /// This will allocate as many times as needed for constructing the `Vec<T>`
25709    /// and then it will allocate once for turning the `Vec<T>` into the `Arc<[T]>`.
25710    ///
25711    /// ## Iterators of known length
25712    ///
25713    /// When your `Iterator` implements `TrustedLen` and is of an exact size,
25714    /// a single allocation will be made for the `Arc<[T]>`. For example:
25715    ///
25716    /// ```rust
25717    /// # use std::sync::Arc;
25718    /// let evens: Arc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
25719    /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
25720    /// ```
25721    fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
25722        ToArcSlice::to_arc_slice(iter.into_iter())
25723    }
25724}
25725
25726/// Specialization trait used for collecting into `Arc<[T]>`.
25727trait ToArcSlice<T>: Iterator<Item = T> + Sized {
25728    fn to_arc_slice(self) -> Arc<[T]>;
25729}
25730
25731impl<T, I: Iterator<Item = T>> ToArcSlice<T> for I {
25732    default fn to_arc_slice(self) -> Arc<[T]> {
25733        self.collect::<Vec<T>>().into()
25734    }
25735}
25736
25737impl<T, I: iter::TrustedLen<Item = T>> ToArcSlice<T> for I {
25738    fn to_arc_slice(self) -> Arc<[T]> {
25739        // This is the case for a `TrustedLen` iterator.
25740        let (low, high) = self.size_hint();
25741        if let Some(high) = high {
25742            debug_assert_eq!(
25743                low,
25744                high,
25745                "TrustedLen iterator's size hint is not exact: {:?}",
25746                (low, high)
25747            );
25748
25749            unsafe {
25750                // SAFETY: We need to ensure that the iterator has an exact length and we have.
25751                Arc::from_iter_exact(self, low)
25752            }
25753        } else {
25754            // TrustedLen contract guarantees that `upper_bound == `None` implies an iterator
25755            // length exceeding `usize::MAX`.
25756            // The default implementation would collect into a vec which would panic.
25757            // Thus we panic here immediately without invoking `Vec` code.
25758            panic!("capacity overflow");
25759        }
25760    }
25761}
25762
25763#[stable(feature = "rust1", since = "1.0.0")]
25764impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
25765    fn borrow(&self) -> &T {
25766        &**self
25767    }
25768}
25769
25770#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
25771impl<T: ?Sized> AsRef<T> for Arc<T> {
25772    fn as_ref(&self) -> &T {
25773        &**self
25774    }
25775}
25776
25777#[stable(feature = "pin", since = "1.33.0")]
25778impl<T: ?Sized> Unpin for Arc<T> {}
25779
25780/// Get the offset within an `ArcInner` for the payload behind a pointer.
25781///
25782/// # Safety
25783///
25784/// The pointer must point to (and have valid metadata for) a previously
25785/// valid instance of T, but the T is allowed to be dropped.
25786unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
25787    // Align the unsized value to the end of the ArcInner.
25788    // Because RcBox is repr(C), it will always be the last field in memory.
25789    // SAFETY: since the only unsized types possible are slices, trait objects,
25790    // and extern types, the input safety requirement is currently enough to
25791    // satisfy the requirements of align_of_val_raw; this is an implementation
25792    // detail of the language that may not be relied upon outside of std.
25793    unsafe { data_offset_align(align_of_val_raw(ptr)) }
25794}
25795
25796#[inline]
25797fn data_offset_align(align: usize) -> isize {
25798    let layout = Layout::new::<ArcInner<()>>();
25799    (layout.size() + layout.padding_needed_for(align)) as isize
25800}
25801//! Utilities for formatting and printing `String`s.
25802//!
25803//! This module contains the runtime support for the [`format!`] syntax extension.
25804//! This macro is implemented in the compiler to emit calls to this module in
25805//! order to format arguments at runtime into strings.
25806//!
25807//! # Usage
25808//!
25809//! The [`format!`] macro is intended to be familiar to those coming from C's
25810//! `printf`/`fprintf` functions or Python's `str.format` function.
25811//!
25812//! Some examples of the [`format!`] extension are:
25813//!
25814//! ```
25815//! format!("Hello");                 // => "Hello"
25816//! format!("Hello, {}!", "world");   // => "Hello, world!"
25817//! format!("The number is {}", 1);   // => "The number is 1"
25818//! format!("{:?}", (3, 4));          // => "(3, 4)"
25819//! format!("{value}", value=4);      // => "4"
25820//! format!("{} {}", 1, 2);           // => "1 2"
25821//! format!("{:04}", 42);             // => "0042" with leading zeros
25822//! format!("{:#?}", (100, 200));     // => "(
25823//!                                   //       100,
25824//!                                   //       200,
25825//!                                   //     )"
25826//! ```
25827//!
25828//! From these, you can see that the first argument is a format string. It is
25829//! required by the compiler for this to be a string literal; it cannot be a
25830//! variable passed in (in order to perform validity checking). The compiler
25831//! will then parse the format string and determine if the list of arguments
25832//! provided is suitable to pass to this format string.
25833//!
25834//! To convert a single value to a string, use the [`to_string`] method. This
25835//! will use the [`Display`] formatting trait.
25836//!
25837//! ## Positional parameters
25838//!
25839//! Each formatting argument is allowed to specify which value argument it's
25840//! referencing, and if omitted it is assumed to be "the next argument". For
25841//! example, the format string `{} {} {}` would take three parameters, and they
25842//! would be formatted in the same order as they're given. The format string
25843//! `{2} {1} {0}`, however, would format arguments in reverse order.
25844//!
25845//! Things can get a little tricky once you start intermingling the two types of
25846//! positional specifiers. The "next argument" specifier can be thought of as an
25847//! iterator over the argument. Each time a "next argument" specifier is seen,
25848//! the iterator advances. This leads to behavior like this:
25849//!
25850//! ```
25851//! format!("{1} {} {0} {}", 1, 2); // => "2 1 1 2"
25852//! ```
25853//!
25854//! The internal iterator over the argument has not been advanced by the time
25855//! the first `{}` is seen, so it prints the first argument. Then upon reaching
25856//! the second `{}`, the iterator has advanced forward to the second argument.
25857//! Essentially, parameters that explicitly name their argument do not affect
25858//! parameters that do not name an argument in terms of positional specifiers.
25859//!
25860//! A format string is required to use all of its arguments, otherwise it is a
25861//! compile-time error. You may refer to the same argument more than once in the
25862//! format string.
25863//!
25864//! ## Named parameters
25865//!
25866//! Rust itself does not have a Python-like equivalent of named parameters to a
25867//! function, but the [`format!`] macro is a syntax extension that allows it to
25868//! leverage named parameters. Named parameters are listed at the end of the
25869//! argument list and have the syntax:
25870//!
25871//! ```text
25872//! identifier '=' expression
25873//! ```
25874//!
25875//! For example, the following [`format!`] expressions all use named argument:
25876//!
25877//! ```
25878//! format!("{argument}", argument = "test");   // => "test"
25879//! format!("{name} {}", 1, name = 2);          // => "2 1"
25880//! format!("{a} {c} {b}", a="a", b='b', c=3);  // => "a 3 b"
25881//! ```
25882//!
25883//! It is not valid to put positional parameters (those without names) after
25884//! arguments that have names. Like with positional parameters, it is not
25885//! valid to provide named parameters that are unused by the format string.
25886//!
25887//! # Formatting Parameters
25888//!
25889//! Each argument being formatted can be transformed by a number of formatting
25890//! parameters (corresponding to `format_spec` in [the syntax](#syntax)). These
25891//! parameters affect the string representation of what's being formatted.
25892//!
25893//! ## Width
25894//!
25895//! ```
25896//! // All of these print "Hello x    !"
25897//! println!("Hello {:5}!", "x");
25898//! println!("Hello {:1$}!", "x", 5);
25899//! println!("Hello {1:0$}!", 5, "x");
25900//! println!("Hello {:width$}!", "x", width = 5);
25901//! ```
25902//!
25903//! This is a parameter for the "minimum width" that the format should take up.
25904//! If the value's string does not fill up this many characters, then the
25905//! padding specified by fill/alignment will be used to take up the required
25906//! space (see below).
25907//!
25908//! The value for the width can also be provided as a [`usize`] in the list of
25909//! parameters by adding a postfix `$`, indicating that the second argument is
25910//! a [`usize`] specifying the width.
25911//!
25912//! Referring to an argument with the dollar syntax does not affect the "next
25913//! argument" counter, so it's usually a good idea to refer to arguments by
25914//! position, or use named arguments.
25915//!
25916//! ## Fill/Alignment
25917//!
25918//! ```
25919//! assert_eq!(format!("Hello {:<5}!", "x"),  "Hello x    !");
25920//! assert_eq!(format!("Hello {:-<5}!", "x"), "Hello x----!");
25921//! assert_eq!(format!("Hello {:^5}!", "x"),  "Hello   x  !");
25922//! assert_eq!(format!("Hello {:>5}!", "x"),  "Hello     x!");
25923//! ```
25924//!
25925//! The optional fill character and alignment is provided normally in conjunction with the
25926//! [`width`](#width) parameter. It must be defined before `width`, right after the `:`.
25927//! This indicates that if the value being formatted is smaller than
25928//! `width` some extra characters will be printed around it.
25929//! Filling comes in the following variants for different alignments:
25930//!
25931//! * `[fill]<` - the argument is left-aligned in `width` columns
25932//! * `[fill]^` - the argument is center-aligned in `width` columns
25933//! * `[fill]>` - the argument is right-aligned in `width` columns
25934//!
25935//! The default [fill/alignment](#fillalignment) for non-numerics is a space and
25936//! left-aligned. The
25937//! default for numeric formatters is also a space character but with right-alignment. If
25938//! the `0` flag (see below) is specified for numerics, then the implicit fill character is
25939//! `0`.
25940//!
25941//! Note that alignment may not be implemented by some types. In particular, it
25942//! is not generally implemented for the `Debug` trait.  A good way to ensure
25943//! padding is applied is to format your input, then pad this resulting string
25944//! to obtain your output:
25945//!
25946//! ```
25947//! println!("Hello {:^15}!", format!("{:?}", Some("hi"))); // => "Hello   Some("hi")   !"
25948//! ```
25949//!
25950//! ## Sign/`#`/`0`
25951//!
25952//! ```
25953//! assert_eq!(format!("Hello {:+}!", 5), "Hello +5!");
25954//! assert_eq!(format!("{:#x}!", 27), "0x1b!");
25955//! assert_eq!(format!("Hello {:05}!", 5),  "Hello 00005!");
25956//! assert_eq!(format!("Hello {:05}!", -5), "Hello -0005!");
25957//! assert_eq!(format!("{:#010x}!", 27), "0x0000001b!");
25958//! ```
25959//!
25960//! These are all flags altering the behavior of the formatter.
25961//!
25962//! * `+` - This is intended for numeric types and indicates that the sign
25963//!         should always be printed. Positive signs are never printed by
25964//!         default, and the negative sign is only printed by default for signed values.
25965//!         This flag indicates that the correct sign (`+` or `-`) should always be printed.
25966//! * `-` - Currently not used
25967//! * `#` - This flag indicates that the "alternate" form of printing should
25968//!         be used. The alternate forms are:
25969//!     * `#?` - pretty-print the [`Debug`] formatting (adds linebreaks and indentation)
25970//!     * `#x` - precedes the argument with a `0x`
25971//!     * `#X` - precedes the argument with a `0x`
25972//!     * `#b` - precedes the argument with a `0b`
25973//!     * `#o` - precedes the argument with a `0o`
25974//! * `0` - This is used to indicate for integer formats that the padding to `width` should
25975//!         both be done with a `0` character as well as be sign-aware. A format
25976//!         like `{:08}` would yield `00000001` for the integer `1`, while the
25977//!         same format would yield `-0000001` for the integer `-1`. Notice that
25978//!         the negative version has one fewer zero than the positive version.
25979//!         Note that padding zeros are always placed after the sign (if any)
25980//!         and before the digits. When used together with the `#` flag, a similar
25981//!         rule applies: padding zeros are inserted after the prefix but before
25982//!         the digits. The prefix is included in the total width.
25983//!
25984//! ## Precision
25985//!
25986//! For non-numeric types, this can be considered a "maximum width". If the resulting string is
25987//! longer than this width, then it is truncated down to this many characters and that truncated
25988//! value is emitted with proper `fill`, `alignment` and `width` if those parameters are set.
25989//!
25990//! For integral types, this is ignored.
25991//!
25992//! For floating-point types, this indicates how many digits after the decimal point should be
25993//! printed.
25994//!
25995//! There are three possible ways to specify the desired `precision`:
25996//!
25997//! 1. An integer `.N`:
25998//!
25999//!    the integer `N` itself is the precision.
26000//!
26001//! 2. An integer or name followed by dollar sign `.N$`:
26002//!
26003//!    use format *argument* `N` (which must be a `usize`) as the precision.
26004//!
26005//! 3. An asterisk `.*`:
26006//!
26007//!    `.*` means that this `{...}` is associated with *two* format inputs rather than one: the
26008//!    first input holds the `usize` precision, and the second holds the value to print. Note that
26009//!    in this case, if one uses the format string `{<arg>:<spec>.*}`, then the `<arg>` part refers
26010//!    to the *value* to print, and the `precision` must come in the input preceding `<arg>`.
26011//!
26012//! For example, the following calls all print the same thing `Hello x is 0.01000`:
26013//!
26014//! ```
26015//! // Hello {arg 0 ("x")} is {arg 1 (0.01) with precision specified inline (5)}
26016//! println!("Hello {0} is {1:.5}", "x", 0.01);
26017//!
26018//! // Hello {arg 1 ("x")} is {arg 2 (0.01) with precision specified in arg 0 (5)}
26019//! println!("Hello {1} is {2:.0$}", 5, "x", 0.01);
26020//!
26021//! // Hello {arg 0 ("x")} is {arg 2 (0.01) with precision specified in arg 1 (5)}
26022//! println!("Hello {0} is {2:.1$}", "x", 5, 0.01);
26023//!
26024//! // Hello {next arg ("x")} is {second of next two args (0.01) with precision
26025//! //                          specified in first of next two args (5)}
26026//! println!("Hello {} is {:.*}",    "x", 5, 0.01);
26027//!
26028//! // Hello {next arg ("x")} is {arg 2 (0.01) with precision
26029//! //                          specified in its predecessor (5)}
26030//! println!("Hello {} is {2:.*}",   "x", 5, 0.01);
26031//!
26032//! // Hello {next arg ("x")} is {arg "number" (0.01) with precision specified
26033//! //                          in arg "prec" (5)}
26034//! println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01);
26035//! ```
26036//!
26037//! While these:
26038//!
26039//! ```
26040//! println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56);
26041//! println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56");
26042//! println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56");
26043//! ```
26044//!
26045//! print three significantly different things:
26046//!
26047//! ```text
26048//! Hello, `1234.560` has 3 fractional digits
26049//! Hello, `123` has 3 characters
26050//! Hello, `     123` has 3 right-aligned characters
26051//! ```
26052//!
26053//! ## Localization
26054//!
26055//! In some programming languages, the behavior of string formatting functions
26056//! depends on the operating system's locale setting. The format functions
26057//! provided by Rust's standard library do not have any concept of locale and
26058//! will produce the same results on all systems regardless of user
26059//! configuration.
26060//!
26061//! For example, the following code will always print `1.5` even if the system
26062//! locale uses a decimal separator other than a dot.
26063//!
26064//! ```
26065//! println!("The value is {}", 1.5);
26066//! ```
26067//!
26068//! # Escaping
26069//!
26070//! The literal characters `{` and `}` may be included in a string by preceding
26071//! them with the same character. For example, the `{` character is escaped with
26072//! `{{` and the `}` character is escaped with `}}`.
26073//!
26074//! ```
26075//! assert_eq!(format!("Hello {{}}"), "Hello {}");
26076//! assert_eq!(format!("{{ Hello"), "{ Hello");
26077//! ```
26078//!
26079//! # Syntax
26080//!
26081//! To summarize, here you can find the full grammar of format strings.
26082//! The syntax for the formatting language used is drawn from other languages,
26083//! so it should not be too alien. Arguments are formatted with Python-like
26084//! syntax, meaning that arguments are surrounded by `{}` instead of the C-like
26085//! `%`. The actual grammar for the formatting syntax is:
26086//!
26087//! ```text
26088//! format_string := text [ maybe_format text ] *
26089//! maybe_format := '{' '{' | '}' '}' | format
26090//! format := '{' [ argument ] [ ':' format_spec ] '}'
26091//! argument := integer | identifier
26092//!
26093//! format_spec := [[fill]align][sign]['#']['0'][width]['.' precision]type
26094//! fill := character
26095//! align := '<' | '^' | '>'
26096//! sign := '+' | '-'
26097//! width := count
26098//! precision := count | '*'
26099//! type := '' | '?' | 'x?' | 'X?' | identifier
26100//! count := parameter | integer
26101//! parameter := argument '$'
26102//! ```
26103//! In the above grammar, `text` may not contain any `'{'` or `'}'` characters.
26104//!
26105//! # Formatting traits
26106//!
26107//! When requesting that an argument be formatted with a particular type, you
26108//! are actually requesting that an argument ascribes to a particular trait.
26109//! This allows multiple actual types to be formatted via `{:x}` (like [`i8`] as
26110//! well as [`isize`]). The current mapping of types to traits is:
26111//!
26112//! * *nothing* ⇒ [`Display`]
26113//! * `?` ⇒ [`Debug`]
26114//! * `x?` ⇒ [`Debug`] with lower-case hexadecimal integers
26115//! * `X?` ⇒ [`Debug`] with upper-case hexadecimal integers
26116//! * `o` ⇒ [`Octal`]
26117//! * `x` ⇒ [`LowerHex`]
26118//! * `X` ⇒ [`UpperHex`]
26119//! * `p` ⇒ [`Pointer`]
26120//! * `b` ⇒ [`Binary`]
26121//! * `e` ⇒ [`LowerExp`]
26122//! * `E` ⇒ [`UpperExp`]
26123//!
26124//! What this means is that any type of argument which implements the
26125//! [`fmt::Binary`][`Binary`] trait can then be formatted with `{:b}`. Implementations
26126//! are provided for these traits for a number of primitive types by the
26127//! standard library as well. If no format is specified (as in `{}` or `{:6}`),
26128//! then the format trait used is the [`Display`] trait.
26129//!
26130//! When implementing a format trait for your own type, you will have to
26131//! implement a method of the signature:
26132//!
26133//! ```
26134//! # #![allow(dead_code)]
26135//! # use std::fmt;
26136//! # struct Foo; // our custom type
26137//! # impl fmt::Display for Foo {
26138//! fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
26139//! # write!(f, "testing, testing")
26140//! # } }
26141//! ```
26142//!
26143//! Your type will be passed as `self` by-reference, and then the function
26144//! should emit output into the `f.buf` stream. It is up to each format trait
26145//! implementation to correctly adhere to the requested formatting parameters.
26146//! The values of these parameters will be listed in the fields of the
26147//! [`Formatter`] struct. In order to help with this, the [`Formatter`] struct also
26148//! provides some helper methods.
26149//!
26150//! Additionally, the return value of this function is [`fmt::Result`] which is a
26151//! type alias of [`Result`]`<(), `[`std::fmt::Error`]`>`. Formatting implementations
26152//! should ensure that they propagate errors from the [`Formatter`] (e.g., when
26153//! calling [`write!`]). However, they should never return errors spuriously. That
26154//! is, a formatting implementation must and may only return an error if the
26155//! passed-in [`Formatter`] returns an error. This is because, contrary to what
26156//! the function signature might suggest, string formatting is an infallible
26157//! operation. This function only returns a result because writing to the
26158//! underlying stream might fail and it must provide a way to propagate the fact
26159//! that an error has occurred back up the stack.
26160//!
26161//! An example of implementing the formatting traits would look
26162//! like:
26163//!
26164//! ```
26165//! use std::fmt;
26166//!
26167//! #[derive(Debug)]
26168//! struct Vector2D {
26169//!     x: isize,
26170//!     y: isize,
26171//! }
26172//!
26173//! impl fmt::Display for Vector2D {
26174//!     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
26175//!         // The `f` value implements the `Write` trait, which is what the
26176//!         // write! macro is expecting. Note that this formatting ignores the
26177//!         // various flags provided to format strings.
26178//!         write!(f, "({}, {})", self.x, self.y)
26179//!     }
26180//! }
26181//!
26182//! // Different traits allow different forms of output of a type. The meaning
26183//! // of this format is to print the magnitude of a vector.
26184//! impl fmt::Binary for Vector2D {
26185//!     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
26186//!         let magnitude = (self.x * self.x + self.y * self.y) as f64;
26187//!         let magnitude = magnitude.sqrt();
26188//!
26189//!         // Respect the formatting flags by using the helper method
26190//!         // `pad_integral` on the Formatter object. See the method
26191//!         // documentation for details, and the function `pad` can be used
26192//!         // to pad strings.
26193//!         let decimals = f.precision().unwrap_or(3);
26194//!         let string = format!("{:.*}", decimals, magnitude);
26195//!         f.pad_integral(true, "", &string)
26196//!     }
26197//! }
26198//!
26199//! fn main() {
26200//!     let myvector = Vector2D { x: 3, y: 4 };
26201//!
26202//!     println!("{}", myvector);       // => "(3, 4)"
26203//!     println!("{:?}", myvector);     // => "Vector2D {x: 3, y:4}"
26204//!     println!("{:10.3b}", myvector); // => "     5.000"
26205//! }
26206//! ```
26207//!
26208//! ### `fmt::Display` vs `fmt::Debug`
26209//!
26210//! These two formatting traits have distinct purposes:
26211//!
26212//! - [`fmt::Display`][`Display`] implementations assert that the type can be faithfully
26213//!   represented as a UTF-8 string at all times. It is **not** expected that
26214//!   all types implement the [`Display`] trait.
26215//! - [`fmt::Debug`][`Debug`] implementations should be implemented for **all** public types.
26216//!   Output will typically represent the internal state as faithfully as possible.
26217//!   The purpose of the [`Debug`] trait is to facilitate debugging Rust code. In
26218//!   most cases, using `#[derive(Debug)]` is sufficient and recommended.
26219//!
26220//! Some examples of the output from both traits:
26221//!
26222//! ```
26223//! assert_eq!(format!("{} {:?}", 3, 4), "3 4");
26224//! assert_eq!(format!("{} {:?}", 'a', 'b'), "a 'b'");
26225//! assert_eq!(format!("{} {:?}", "foo\n", "bar\n"), "foo\n \"bar\\n\"");
26226//! ```
26227//!
26228//! # Related macros
26229//!
26230//! There are a number of related macros in the [`format!`] family. The ones that
26231//! are currently implemented are:
26232//!
26233//! ```ignore (only-for-syntax-highlight)
26234//! format!      // described above
26235//! write!       // first argument is a &mut io::Write, the destination
26236//! writeln!     // same as write but appends a newline
26237//! print!       // the format string is printed to the standard output
26238//! println!     // same as print but appends a newline
26239//! eprint!      // the format string is printed to the standard error
26240//! eprintln!    // same as eprint but appends a newline
26241//! format_args! // described below.
26242//! ```
26243//!
26244//! ### `write!`
26245//!
26246//! This and [`writeln!`] are two macros which are used to emit the format string
26247//! to a specified stream. This is used to prevent intermediate allocations of
26248//! format strings and instead directly write the output. Under the hood, this
26249//! function is actually invoking the [`write_fmt`] function defined on the
26250//! [`std::io::Write`] trait. Example usage is:
26251//!
26252//! ```
26253//! # #![allow(unused_must_use)]
26254//! use std::io::Write;
26255//! let mut w = Vec::new();
26256//! write!(&mut w, "Hello {}!", "world");
26257//! ```
26258//!
26259//! ### `print!`
26260//!
26261//! This and [`println!`] emit their output to stdout. Similarly to the [`write!`]
26262//! macro, the goal of these macros is to avoid intermediate allocations when
26263//! printing output. Example usage is:
26264//!
26265//! ```
26266//! print!("Hello {}!", "world");
26267//! println!("I have a newline {}", "character at the end");
26268//! ```
26269//! ### `eprint!`
26270//!
26271//! The [`eprint!`] and [`eprintln!`] macros are identical to
26272//! [`print!`] and [`println!`], respectively, except they emit their
26273//! output to stderr.
26274//!
26275//! ### `format_args!`
26276//!
26277//! This is a curious macro used to safely pass around
26278//! an opaque object describing the format string. This object
26279//! does not require any heap allocations to create, and it only
26280//! references information on the stack. Under the hood, all of
26281//! the related macros are implemented in terms of this. First
26282//! off, some example usage is:
26283//!
26284//! ```
26285//! # #![allow(unused_must_use)]
26286//! use std::fmt;
26287//! use std::io::{self, Write};
26288//!
26289//! let mut some_writer = io::stdout();
26290//! write!(&mut some_writer, "{}", format_args!("print with a {}", "macro"));
26291//!
26292//! fn my_fmt_fn(args: fmt::Arguments) {
26293//!     write!(&mut io::stdout(), "{}", args);
26294//! }
26295//! my_fmt_fn(format_args!(", or a {} too", "function"));
26296//! ```
26297//!
26298//! The result of the [`format_args!`] macro is a value of type [`fmt::Arguments`].
26299//! This structure can then be passed to the [`write`] and [`format`] functions
26300//! inside this module in order to process the format string.
26301//! The goal of this macro is to even further prevent intermediate allocations
26302//! when dealing with formatting strings.
26303//!
26304//! For example, a logging library could use the standard formatting syntax, but
26305//! it would internally pass around this structure until it has been determined
26306//! where output should go to.
26307//!
26308//! [`fmt::Result`]: Result
26309//! [`Result`]: core::result::Result
26310//! [`std::fmt::Error`]: Error
26311//! [`write!`]: core::write
26312//! [`write`]: core::write
26313//! [`format!`]: crate::format
26314//! [`to_string`]: crate::string::ToString
26315//! [`writeln!`]: core::writeln
26316//! [`write_fmt`]: ../../std/io/trait.Write.html#method.write_fmt
26317//! [`std::io::Write`]: ../../std/io/trait.Write.html
26318//! [`print!`]: ../../std/macro.print.html
26319//! [`println!`]: ../../std/macro.println.html
26320//! [`eprint!`]: ../../std/macro.eprint.html
26321//! [`eprintln!`]: ../../std/macro.eprintln.html
26322//! [`format_args!`]: core::format_args
26323//! [`fmt::Arguments`]: Arguments
26324//! [`format`]: crate::format
26325
26326#![stable(feature = "rust1", since = "1.0.0")]
26327
26328#[unstable(feature = "fmt_internals", issue = "none")]
26329pub use core::fmt::rt;
26330#[stable(feature = "fmt_flags_align", since = "1.28.0")]
26331pub use core::fmt::Alignment;
26332#[stable(feature = "rust1", since = "1.0.0")]
26333pub use core::fmt::Error;
26334#[stable(feature = "rust1", since = "1.0.0")]
26335pub use core::fmt::{write, ArgumentV1, Arguments};
26336#[stable(feature = "rust1", since = "1.0.0")]
26337pub use core::fmt::{Binary, Octal};
26338#[stable(feature = "rust1", since = "1.0.0")]
26339pub use core::fmt::{Debug, Display};
26340#[stable(feature = "rust1", since = "1.0.0")]
26341pub use core::fmt::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple};
26342#[stable(feature = "rust1", since = "1.0.0")]
26343pub use core::fmt::{Formatter, Result, Write};
26344#[stable(feature = "rust1", since = "1.0.0")]
26345pub use core::fmt::{LowerExp, UpperExp};
26346#[stable(feature = "rust1", since = "1.0.0")]
26347pub use core::fmt::{LowerHex, Pointer, UpperHex};
26348
26349use crate::string;
26350
26351/// The `format` function takes an [`Arguments`] struct and returns the resulting
26352/// formatted string.
26353///
26354/// The [`Arguments`] instance can be created with the [`format_args!`] macro.
26355///
26356/// # Examples
26357///
26358/// Basic usage:
26359///
26360/// ```
26361/// use std::fmt;
26362///
26363/// let s = fmt::format(format_args!("Hello, {}!", "world"));
26364/// assert_eq!(s, "Hello, world!");
26365/// ```
26366///
26367/// Please note that using [`format!`] might be preferable.
26368/// Example:
26369///
26370/// ```
26371/// let s = format!("Hello, {}!", "world");
26372/// assert_eq!(s, "Hello, world!");
26373/// ```
26374///
26375/// [`format_args!`]: core::format_args
26376/// [`format!`]: crate::format
26377#[stable(feature = "rust1", since = "1.0.0")]
26378pub fn format(args: Arguments<'_>) -> string::String {
26379    let capacity = args.estimated_capacity();
26380    let mut output = string::String::with_capacity(capacity);
26381    output.write_fmt(args).expect("a formatting trait implementation returned an error");
26382    output
26383}
26384use super::*;
26385
26386use std::boxed::Box;
26387use std::cell::RefCell;
26388use std::clone::Clone;
26389use std::convert::{From, TryInto};
26390use std::mem::drop;
26391use std::option::Option::{self, None, Some};
26392use std::result::Result::{Err, Ok};
26393
26394#[test]
26395fn test_clone() {
26396    let x = Rc::new(RefCell::new(5));
26397    let y = x.clone();
26398    *x.borrow_mut() = 20;
26399    assert_eq!(*y.borrow(), 20);
26400}
26401
26402#[test]
26403fn test_simple() {
26404    let x = Rc::new(5);
26405    assert_eq!(*x, 5);
26406}
26407
26408#[test]
26409fn test_simple_clone() {
26410    let x = Rc::new(5);
26411    let y = x.clone();
26412    assert_eq!(*x, 5);
26413    assert_eq!(*y, 5);
26414}
26415
26416#[test]
26417fn test_destructor() {
26418    let x: Rc<Box<_>> = Rc::new(box 5);
26419    assert_eq!(**x, 5);
26420}
26421
26422#[test]
26423fn test_live() {
26424    let x = Rc::new(5);
26425    let y = Rc::downgrade(&x);
26426    assert!(y.upgrade().is_some());
26427}
26428
26429#[test]
26430fn test_dead() {
26431    let x = Rc::new(5);
26432    let y = Rc::downgrade(&x);
26433    drop(x);
26434    assert!(y.upgrade().is_none());
26435}
26436
26437#[test]
26438fn weak_self_cyclic() {
26439    struct Cycle {
26440        x: RefCell<Option<Weak<Cycle>>>,
26441    }
26442
26443    let a = Rc::new(Cycle { x: RefCell::new(None) });
26444    let b = Rc::downgrade(&a.clone());
26445    *a.x.borrow_mut() = Some(b);
26446
26447    // hopefully we don't double-free (or leak)...
26448}
26449
26450#[test]
26451fn is_unique() {
26452    let x = Rc::new(3);
26453    assert!(Rc::is_unique(&x));
26454    let y = x.clone();
26455    assert!(!Rc::is_unique(&x));
26456    drop(y);
26457    assert!(Rc::is_unique(&x));
26458    let w = Rc::downgrade(&x);
26459    assert!(!Rc::is_unique(&x));
26460    drop(w);
26461    assert!(Rc::is_unique(&x));
26462}
26463
26464#[test]
26465fn test_strong_count() {
26466    let a = Rc::new(0);
26467    assert!(Rc::strong_count(&a) == 1);
26468    let w = Rc::downgrade(&a);
26469    assert!(Rc::strong_count(&a) == 1);
26470    let b = w.upgrade().expect("upgrade of live rc failed");
26471    assert!(Rc::strong_count(&b) == 2);
26472    assert!(Rc::strong_count(&a) == 2);
26473    drop(w);
26474    drop(a);
26475    assert!(Rc::strong_count(&b) == 1);
26476    let c = b.clone();
26477    assert!(Rc::strong_count(&b) == 2);
26478    assert!(Rc::strong_count(&c) == 2);
26479}
26480
26481#[test]
26482fn test_weak_count() {
26483    let a = Rc::new(0);
26484    assert!(Rc::strong_count(&a) == 1);
26485    assert!(Rc::weak_count(&a) == 0);
26486    let w = Rc::downgrade(&a);
26487    assert!(Rc::strong_count(&a) == 1);
26488    assert!(Rc::weak_count(&a) == 1);
26489    drop(w);
26490    assert!(Rc::strong_count(&a) == 1);
26491    assert!(Rc::weak_count(&a) == 0);
26492    let c = a.clone();
26493    assert!(Rc::strong_count(&a) == 2);
26494    assert!(Rc::weak_count(&a) == 0);
26495    drop(c);
26496}
26497
26498#[test]
26499fn weak_counts() {
26500    assert_eq!(Weak::weak_count(&Weak::<u64>::new()), 0);
26501    assert_eq!(Weak::strong_count(&Weak::<u64>::new()), 0);
26502
26503    let a = Rc::new(0);
26504    let w = Rc::downgrade(&a);
26505    assert_eq!(Weak::strong_count(&w), 1);
26506    assert_eq!(Weak::weak_count(&w), 1);
26507    let w2 = w.clone();
26508    assert_eq!(Weak::strong_count(&w), 1);
26509    assert_eq!(Weak::weak_count(&w), 2);
26510    assert_eq!(Weak::strong_count(&w2), 1);
26511    assert_eq!(Weak::weak_count(&w2), 2);
26512    drop(w);
26513    assert_eq!(Weak::strong_count(&w2), 1);
26514    assert_eq!(Weak::weak_count(&w2), 1);
26515    let a2 = a.clone();
26516    assert_eq!(Weak::strong_count(&w2), 2);
26517    assert_eq!(Weak::weak_count(&w2), 1);
26518    drop(a2);
26519    drop(a);
26520    assert_eq!(Weak::strong_count(&w2), 0);
26521    assert_eq!(Weak::weak_count(&w2), 0);
26522    drop(w2);
26523}
26524
26525#[test]
26526fn try_unwrap() {
26527    let x = Rc::new(3);
26528    assert_eq!(Rc::try_unwrap(x), Ok(3));
26529    let x = Rc::new(4);
26530    let _y = x.clone();
26531    assert_eq!(Rc::try_unwrap(x), Err(Rc::new(4)));
26532    let x = Rc::new(5);
26533    let _w = Rc::downgrade(&x);
26534    assert_eq!(Rc::try_unwrap(x), Ok(5));
26535}
26536
26537#[test]
26538fn into_from_raw() {
26539    let x = Rc::new(box "hello");
26540    let y = x.clone();
26541
26542    let x_ptr = Rc::into_raw(x);
26543    drop(y);
26544    unsafe {
26545        assert_eq!(**x_ptr, "hello");
26546
26547        let x = Rc::from_raw(x_ptr);
26548        assert_eq!(**x, "hello");
26549
26550        assert_eq!(Rc::try_unwrap(x).map(|x| *x), Ok("hello"));
26551    }
26552}
26553
26554#[test]
26555fn test_into_from_raw_unsized() {
26556    use std::fmt::Display;
26557    use std::string::ToString;
26558
26559    let rc: Rc<str> = Rc::from("foo");
26560
26561    let ptr = Rc::into_raw(rc.clone());
26562    let rc2 = unsafe { Rc::from_raw(ptr) };
26563
26564    assert_eq!(unsafe { &*ptr }, "foo");
26565    assert_eq!(rc, rc2);
26566
26567    let rc: Rc<dyn Display> = Rc::new(123);
26568
26569    let ptr = Rc::into_raw(rc.clone());
26570    let rc2 = unsafe { Rc::from_raw(ptr) };
26571
26572    assert_eq!(unsafe { &*ptr }.to_string(), "123");
26573    assert_eq!(rc2.to_string(), "123");
26574}
26575
26576#[test]
26577fn into_from_weak_raw() {
26578    let x = Rc::new(box "hello");
26579    let y = Rc::downgrade(&x);
26580
26581    let y_ptr = Weak::into_raw(y);
26582    unsafe {
26583        assert_eq!(**y_ptr, "hello");
26584
26585        let y = Weak::from_raw(y_ptr);
26586        let y_up = Weak::upgrade(&y).unwrap();
26587        assert_eq!(**y_up, "hello");
26588        drop(y_up);
26589
26590        assert_eq!(Rc::try_unwrap(x).map(|x| *x), Ok("hello"));
26591    }
26592}
26593
26594#[test]
26595fn test_into_from_weak_raw_unsized() {
26596    use std::fmt::Display;
26597    use std::string::ToString;
26598
26599    let arc: Rc<str> = Rc::from("foo");
26600    let weak: Weak<str> = Rc::downgrade(&arc);
26601
26602    let ptr = Weak::into_raw(weak.clone());
26603    let weak2 = unsafe { Weak::from_raw(ptr) };
26604
26605    assert_eq!(unsafe { &*ptr }, "foo");
26606    assert!(weak.ptr_eq(&weak2));
26607
26608    let arc: Rc<dyn Display> = Rc::new(123);
26609    let weak: Weak<dyn Display> = Rc::downgrade(&arc);
26610
26611    let ptr = Weak::into_raw(weak.clone());
26612    let weak2 = unsafe { Weak::from_raw(ptr) };
26613
26614    assert_eq!(unsafe { &*ptr }.to_string(), "123");
26615    assert!(weak.ptr_eq(&weak2));
26616}
26617
26618#[test]
26619fn get_mut() {
26620    let mut x = Rc::new(3);
26621    *Rc::get_mut(&mut x).unwrap() = 4;
26622    assert_eq!(*x, 4);
26623    let y = x.clone();
26624    assert!(Rc::get_mut(&mut x).is_none());
26625    drop(y);
26626    assert!(Rc::get_mut(&mut x).is_some());
26627    let _w = Rc::downgrade(&x);
26628    assert!(Rc::get_mut(&mut x).is_none());
26629}
26630
26631#[test]
26632fn test_cowrc_clone_make_unique() {
26633    let mut cow0 = Rc::new(75);
26634    let mut cow1 = cow0.clone();
26635    let mut cow2 = cow1.clone();
26636
26637    assert!(75 == *Rc::make_mut(&mut cow0));
26638    assert!(75 == *Rc::make_mut(&mut cow1));
26639    assert!(75 == *Rc::make_mut(&mut cow2));
26640
26641    *Rc::make_mut(&mut cow0) += 1;
26642    *Rc::make_mut(&mut cow1) += 2;
26643    *Rc::make_mut(&mut cow2) += 3;
26644
26645    assert!(76 == *cow0);
26646    assert!(77 == *cow1);
26647    assert!(78 == *cow2);
26648
26649    // none should point to the same backing memory
26650    assert!(*cow0 != *cow1);
26651    assert!(*cow0 != *cow2);
26652    assert!(*cow1 != *cow2);
26653}
26654
26655#[test]
26656fn test_cowrc_clone_unique2() {
26657    let mut cow0 = Rc::new(75);
26658    let cow1 = cow0.clone();
26659    let cow2 = cow1.clone();
26660
26661    assert!(75 == *cow0);
26662    assert!(75 == *cow1);
26663    assert!(75 == *cow2);
26664
26665    *Rc::make_mut(&mut cow0) += 1;
26666
26667    assert!(76 == *cow0);
26668    assert!(75 == *cow1);
26669    assert!(75 == *cow2);
26670
26671    // cow1 and cow2 should share the same contents
26672    // cow0 should have a unique reference
26673    assert!(*cow0 != *cow1);
26674    assert!(*cow0 != *cow2);
26675    assert!(*cow1 == *cow2);
26676}
26677
26678#[test]
26679fn test_cowrc_clone_weak() {
26680    let mut cow0 = Rc::new(75);
26681    let cow1_weak = Rc::downgrade(&cow0);
26682
26683    assert!(75 == *cow0);
26684    assert!(75 == *cow1_weak.upgrade().unwrap());
26685
26686    *Rc::make_mut(&mut cow0) += 1;
26687
26688    assert!(76 == *cow0);
26689    assert!(cow1_weak.upgrade().is_none());
26690}
26691
26692#[test]
26693fn test_show() {
26694    let foo = Rc::new(75);
26695    assert_eq!(format!("{:?}", foo), "75");
26696}
26697
26698#[test]
26699fn test_unsized() {
26700    let foo: Rc<[i32]> = Rc::new([1, 2, 3]);
26701    assert_eq!(foo, foo.clone());
26702}
26703
26704#[test]
26705fn test_maybe_thin_unsized() {
26706    // If/when custom thin DSTs exist, this test should be updated to use one
26707    use std::ffi::{CStr, CString};
26708
26709    let x: Rc<CStr> = Rc::from(CString::new("swordfish").unwrap().into_boxed_c_str());
26710    assert_eq!(format!("{:?}", x), "\"swordfish\"");
26711    let y: Weak<CStr> = Rc::downgrade(&x);
26712    drop(x);
26713
26714    // At this point, the weak points to a dropped DST
26715    assert!(y.upgrade().is_none());
26716    // But we still need to be able to get the alloc layout to drop.
26717    // CStr has no drop glue, but custom DSTs might, and need to work.
26718    drop(y);
26719}
26720
26721#[test]
26722fn test_from_owned() {
26723    let foo = 123;
26724    let foo_rc = Rc::from(foo);
26725    assert!(123 == *foo_rc);
26726}
26727
26728#[test]
26729fn test_new_weak() {
26730    let foo: Weak<usize> = Weak::new();
26731    assert!(foo.upgrade().is_none());
26732}
26733
26734#[test]
26735fn test_ptr_eq() {
26736    let five = Rc::new(5);
26737    let same_five = five.clone();
26738    let other_five = Rc::new(5);
26739
26740    assert!(Rc::ptr_eq(&five, &same_five));
26741    assert!(!Rc::ptr_eq(&five, &other_five));
26742}
26743
26744#[test]
26745fn test_from_str() {
26746    let r: Rc<str> = Rc::from("foo");
26747
26748    assert_eq!(&r[..], "foo");
26749}
26750
26751#[test]
26752fn test_copy_from_slice() {
26753    let s: &[u32] = &[1, 2, 3];
26754    let r: Rc<[u32]> = Rc::from(s);
26755
26756    assert_eq!(&r[..], [1, 2, 3]);
26757}
26758
26759#[test]
26760fn test_clone_from_slice() {
26761    #[derive(Clone, Debug, Eq, PartialEq)]
26762    struct X(u32);
26763
26764    let s: &[X] = &[X(1), X(2), X(3)];
26765    let r: Rc<[X]> = Rc::from(s);
26766
26767    assert_eq!(&r[..], s);
26768}
26769
26770#[test]
26771#[should_panic]
26772fn test_clone_from_slice_panic() {
26773    use std::string::{String, ToString};
26774
26775    struct Fail(u32, String);
26776
26777    impl Clone for Fail {
26778        fn clone(&self) -> Fail {
26779            if self.0 == 2 {
26780                panic!();
26781            }
26782            Fail(self.0, self.1.clone())
26783        }
26784    }
26785
26786    let s: &[Fail] =
26787        &[Fail(0, "foo".to_string()), Fail(1, "bar".to_string()), Fail(2, "baz".to_string())];
26788
26789    // Should panic, but not cause memory corruption
26790    let _r: Rc<[Fail]> = Rc::from(s);
26791}
26792
26793#[test]
26794fn test_from_box() {
26795    let b: Box<u32> = box 123;
26796    let r: Rc<u32> = Rc::from(b);
26797
26798    assert_eq!(*r, 123);
26799}
26800
26801#[test]
26802fn test_from_box_str() {
26803    use std::string::String;
26804
26805    let s = String::from("foo").into_boxed_str();
26806    let r: Rc<str> = Rc::from(s);
26807
26808    assert_eq!(&r[..], "foo");
26809}
26810
26811#[test]
26812fn test_from_box_slice() {
26813    let s = vec![1, 2, 3].into_boxed_slice();
26814    let r: Rc<[u32]> = Rc::from(s);
26815
26816    assert_eq!(&r[..], [1, 2, 3]);
26817}
26818
26819#[test]
26820fn test_from_box_trait() {
26821    use std::fmt::Display;
26822    use std::string::ToString;
26823
26824    let b: Box<dyn Display> = box 123;
26825    let r: Rc<dyn Display> = Rc::from(b);
26826
26827    assert_eq!(r.to_string(), "123");
26828}
26829
26830#[test]
26831fn test_from_box_trait_zero_sized() {
26832    use std::fmt::Debug;
26833
26834    let b: Box<dyn Debug> = box ();
26835    let r: Rc<dyn Debug> = Rc::from(b);
26836
26837    assert_eq!(format!("{:?}", r), "()");
26838}
26839
26840#[test]
26841fn test_from_vec() {
26842    let v = vec![1, 2, 3];
26843    let r: Rc<[u32]> = Rc::from(v);
26844
26845    assert_eq!(&r[..], [1, 2, 3]);
26846}
26847
26848#[test]
26849fn test_downcast() {
26850    use std::any::Any;
26851
26852    let r1: Rc<dyn Any> = Rc::new(i32::MAX);
26853    let r2: Rc<dyn Any> = Rc::new("abc");
26854
26855    assert!(r1.clone().downcast::<u32>().is_err());
26856
26857    let r1i32 = r1.downcast::<i32>();
26858    assert!(r1i32.is_ok());
26859    assert_eq!(r1i32.unwrap(), Rc::new(i32::MAX));
26860
26861    assert!(r2.clone().downcast::<i32>().is_err());
26862
26863    let r2str = r2.downcast::<&'static str>();
26864    assert!(r2str.is_ok());
26865    assert_eq!(r2str.unwrap(), Rc::new("abc"));
26866}
26867
26868#[test]
26869fn test_array_from_slice() {
26870    let v = vec![1, 2, 3];
26871    let r: Rc<[u32]> = Rc::from(v);
26872
26873    let a: Result<Rc<[u32; 3]>, _> = r.clone().try_into();
26874    assert!(a.is_ok());
26875
26876    let a: Result<Rc<[u32; 2]>, _> = r.clone().try_into();
26877    assert!(a.is_err());
26878}
26879
26880#[test]
26881fn test_rc_cyclic_with_zero_refs() {
26882    struct ZeroRefs {
26883        inner: Weak<ZeroRefs>,
26884    }
26885
26886    let zero_refs = Rc::new_cyclic(|inner| {
26887        assert_eq!(inner.strong_count(), 0);
26888        assert!(inner.upgrade().is_none());
26889        ZeroRefs { inner: Weak::new() }
26890    });
26891
26892    assert_eq!(Rc::strong_count(&zero_refs), 1);
26893    assert_eq!(Rc::weak_count(&zero_refs), 0);
26894    assert_eq!(zero_refs.inner.strong_count(), 0);
26895    assert_eq!(zero_refs.inner.weak_count(), 0);
26896}
26897
26898#[test]
26899fn test_rc_cyclic_with_one_ref() {
26900    struct OneRef {
26901        inner: Weak<OneRef>,
26902    }
26903
26904    let one_ref = Rc::new_cyclic(|inner| {
26905        assert_eq!(inner.strong_count(), 0);
26906        assert!(inner.upgrade().is_none());
26907        OneRef { inner: inner.clone() }
26908    });
26909
26910    assert_eq!(Rc::strong_count(&one_ref), 1);
26911    assert_eq!(Rc::weak_count(&one_ref), 1);
26912
26913    let one_ref2 = Weak::upgrade(&one_ref.inner).unwrap();
26914    assert!(Rc::ptr_eq(&one_ref, &one_ref2));
26915
26916    assert_eq!(one_ref.inner.strong_count(), 2);
26917    assert_eq!(one_ref.inner.weak_count(), 1);
26918}
26919
26920#[test]
26921fn test_rc_cyclic_with_two_ref() {
26922    struct TwoRefs {
26923        inner: Weak<TwoRefs>,
26924        inner1: Weak<TwoRefs>,
26925    }
26926
26927    let two_refs = Rc::new_cyclic(|inner| {
26928        assert_eq!(inner.strong_count(), 0);
26929        assert!(inner.upgrade().is_none());
26930        TwoRefs { inner: inner.clone(), inner1: inner.clone() }
26931    });
26932
26933    assert_eq!(Rc::strong_count(&two_refs), 1);
26934    assert_eq!(Rc::weak_count(&two_refs), 2);
26935
26936    let two_ref3 = Weak::upgrade(&two_refs.inner).unwrap();
26937    assert!(Rc::ptr_eq(&two_refs, &two_ref3));
26938
26939    let two_ref2 = Weak::upgrade(&two_refs.inner1).unwrap();
26940    assert!(Rc::ptr_eq(&two_refs, &two_ref2));
26941
26942    assert_eq!(Rc::strong_count(&two_refs), 3);
26943    assert_eq!(Rc::weak_count(&two_refs), 2);
26944}
26945//! A doubly-linked list with owned nodes.
26946//!
26947//! The `LinkedList` allows pushing and popping elements at either end
26948//! in constant time.
26949//!
26950//! NOTE: It is almost always better to use [`Vec`] or [`VecDeque`] because
26951//! array-based containers are generally faster,
26952//! more memory efficient, and make better use of CPU cache.
26953//!
26954//! [`Vec`]: crate::vec::Vec
26955//! [`VecDeque`]: super::vec_deque::VecDeque
26956
26957#![stable(feature = "rust1", since = "1.0.0")]
26958
26959use core::cmp::Ordering;
26960use core::fmt;
26961use core::hash::{Hash, Hasher};
26962use core::iter::{FromIterator, FusedIterator};
26963use core::marker::PhantomData;
26964use core::mem;
26965use core::ptr::NonNull;
26966
26967use super::SpecExtend;
26968use crate::boxed::Box;
26969
26970#[cfg(test)]
26971mod tests;
26972
26973/// A doubly-linked list with owned nodes.
26974///
26975/// The `LinkedList` allows pushing and popping elements at either end
26976/// in constant time.
26977///
26978/// NOTE: It is almost always better to use `Vec` or `VecDeque` because
26979/// array-based containers are generally faster,
26980/// more memory efficient, and make better use of CPU cache.
26981#[stable(feature = "rust1", since = "1.0.0")]
26982#[cfg_attr(not(test), rustc_diagnostic_item = "LinkedList")]
26983pub struct LinkedList<T> {
26984    head: Option<NonNull<Node<T>>>,
26985    tail: Option<NonNull<Node<T>>>,
26986    len: usize,
26987    marker: PhantomData<Box<Node<T>>>,
26988}
26989
26990struct Node<T> {
26991    next: Option<NonNull<Node<T>>>,
26992    prev: Option<NonNull<Node<T>>>,
26993    element: T,
26994}
26995
26996/// An iterator over the elements of a `LinkedList`.
26997///
26998/// This `struct` is created by [`LinkedList::iter()`]. See its
26999/// documentation for more.
27000#[stable(feature = "rust1", since = "1.0.0")]
27001pub struct Iter<'a, T: 'a> {
27002    head: Option<NonNull<Node<T>>>,
27003    tail: Option<NonNull<Node<T>>>,
27004    len: usize,
27005    marker: PhantomData<&'a Node<T>>,
27006}
27007
27008#[stable(feature = "collection_debug", since = "1.17.0")]
27009impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
27010    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
27011        f.debug_tuple("Iter").field(&self.len).finish()
27012    }
27013}
27014
27015// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
27016#[stable(feature = "rust1", since = "1.0.0")]
27017impl<T> Clone for Iter<'_, T> {
27018    fn clone(&self) -> Self {
27019        Iter { ..*self }
27020    }
27021}
27022
27023/// A mutable iterator over the elements of a `LinkedList`.
27024///
27025/// This `struct` is created by [`LinkedList::iter_mut()`]. See its
27026/// documentation for more.
27027#[stable(feature = "rust1", since = "1.0.0")]
27028pub struct IterMut<'a, T: 'a> {
27029    // We do *not* exclusively own the entire list here, references to node's `element`
27030    // have been handed out by the iterator! So be careful when using this; the methods
27031    // called must be aware that there can be aliasing pointers to `element`.
27032    list: &'a mut LinkedList<T>,
27033    head: Option<NonNull<Node<T>>>,
27034    tail: Option<NonNull<Node<T>>>,
27035    len: usize,
27036}
27037
27038#[stable(feature = "collection_debug", since = "1.17.0")]
27039impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
27040    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
27041        f.debug_tuple("IterMut").field(&self.list).field(&self.len).finish()
27042    }
27043}
27044
27045/// An owning iterator over the elements of a `LinkedList`.
27046///
27047/// This `struct` is created by the [`into_iter`] method on [`LinkedList`]
27048/// (provided by the `IntoIterator` trait). See its documentation for more.
27049///
27050/// [`into_iter`]: LinkedList::into_iter
27051#[derive(Clone)]
27052#[stable(feature = "rust1", since = "1.0.0")]
27053pub struct IntoIter<T> {
27054    list: LinkedList<T>,
27055}
27056
27057#[stable(feature = "collection_debug", since = "1.17.0")]
27058impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
27059    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
27060        f.debug_tuple("IntoIter").field(&self.list).finish()
27061    }
27062}
27063
27064impl<T> Node<T> {
27065    fn new(element: T) -> Self {
27066        Node { next: None, prev: None, element }
27067    }
27068
27069    fn into_element(self: Box<Self>) -> T {
27070        self.element
27071    }
27072}
27073
27074// private methods
27075impl<T> LinkedList<T> {
27076    /// Adds the given node to the front of the list.
27077    #[inline]
27078    fn push_front_node(&mut self, mut node: Box<Node<T>>) {
27079        // This method takes care not to create mutable references to whole nodes,
27080        // to maintain validity of aliasing pointers into `element`.
27081        unsafe {
27082            node.next = self.head;
27083            node.prev = None;
27084            let node = Some(Box::leak(node).into());
27085
27086            match self.head {
27087                None => self.tail = node,
27088                // Not creating new mutable (unique!) references overlapping `element`.
27089                Some(head) => (*head.as_ptr()).prev = node,
27090            }
27091
27092            self.head = node;
27093            self.len += 1;
27094        }
27095    }
27096
27097    /// Removes and returns the node at the front of the list.
27098    #[inline]
27099    fn pop_front_node(&mut self) -> Option<Box<Node<T>>> {
27100        // This method takes care not to create mutable references to whole nodes,
27101        // to maintain validity of aliasing pointers into `element`.
27102        self.head.map(|node| unsafe {
27103            let node = Box::from_raw(node.as_ptr());
27104            self.head = node.next;
27105
27106            match self.head {
27107                None => self.tail = None,
27108                // Not creating new mutable (unique!) references overlapping `element`.
27109                Some(head) => (*head.as_ptr()).prev = None,
27110            }
27111
27112            self.len -= 1;
27113            node
27114        })
27115    }
27116
27117    /// Adds the given node to the back of the list.
27118    #[inline]
27119    fn push_back_node(&mut self, mut node: Box<Node<T>>) {
27120        // This method takes care not to create mutable references to whole nodes,
27121        // to maintain validity of aliasing pointers into `element`.
27122        unsafe {
27123            node.next = None;
27124            node.prev = self.tail;
27125            let node = Some(Box::leak(node).into());
27126
27127            match self.tail {
27128                None => self.head = node,
27129                // Not creating new mutable (unique!) references overlapping `element`.
27130                Some(tail) => (*tail.as_ptr()).next = node,
27131            }
27132
27133            self.tail = node;
27134            self.len += 1;
27135        }
27136    }
27137
27138    /// Removes and returns the node at the back of the list.
27139    #[inline]
27140    fn pop_back_node(&mut self) -> Option<Box<Node<T>>> {
27141        // This method takes care not to create mutable references to whole nodes,
27142        // to maintain validity of aliasing pointers into `element`.
27143        self.tail.map(|node| unsafe {
27144            let node = Box::from_raw(node.as_ptr());
27145            self.tail = node.prev;
27146
27147            match self.tail {
27148                None => self.head = None,
27149                // Not creating new mutable (unique!) references overlapping `element`.
27150                Some(tail) => (*tail.as_ptr()).next = None,
27151            }
27152
27153            self.len -= 1;
27154            node
27155        })
27156    }
27157
27158    /// Unlinks the specified node from the current list.
27159    ///
27160    /// Warning: this will not check that the provided node belongs to the current list.
27161    ///
27162    /// This method takes care not to create mutable references to `element`, to
27163    /// maintain validity of aliasing pointers.
27164    #[inline]
27165    unsafe fn unlink_node(&mut self, mut node: NonNull<Node<T>>) {
27166        let node = unsafe { node.as_mut() }; // this one is ours now, we can create an &mut.
27167
27168        // Not creating new mutable (unique!) references overlapping `element`.
27169        match node.prev {
27170            Some(prev) => unsafe { (*prev.as_ptr()).next = node.next },
27171            // this node is the head node
27172            None => self.head = node.next,
27173        };
27174
27175        match node.next {
27176            Some(next) => unsafe { (*next.as_ptr()).prev = node.prev },
27177            // this node is the tail node
27178            None => self.tail = node.prev,
27179        };
27180
27181        self.len -= 1;
27182    }
27183
27184    /// Splices a series of nodes between two existing nodes.
27185    ///
27186    /// Warning: this will not check that the provided node belongs to the two existing lists.
27187    #[inline]
27188    unsafe fn splice_nodes(
27189        &mut self,
27190        existing_prev: Option<NonNull<Node<T>>>,
27191        existing_next: Option<NonNull<Node<T>>>,
27192        mut splice_start: NonNull<Node<T>>,
27193        mut splice_end: NonNull<Node<T>>,
27194        splice_length: usize,
27195    ) {
27196        // This method takes care not to create multiple mutable references to whole nodes at the same time,
27197        // to maintain validity of aliasing pointers into `element`.
27198        if let Some(mut existing_prev) = existing_prev {
27199            unsafe {
27200                existing_prev.as_mut().next = Some(splice_start);
27201            }
27202        } else {
27203            self.head = Some(splice_start);
27204        }
27205        if let Some(mut existing_next) = existing_next {
27206            unsafe {
27207                existing_next.as_mut().prev = Some(splice_end);
27208            }
27209        } else {
27210            self.tail = Some(splice_end);
27211        }
27212        unsafe {
27213            splice_start.as_mut().prev = existing_prev;
27214            splice_end.as_mut().next = existing_next;
27215        }
27216
27217        self.len += splice_length;
27218    }
27219
27220    /// Detaches all nodes from a linked list as a series of nodes.
27221    #[inline]
27222    fn detach_all_nodes(mut self) -> Option<(NonNull<Node<T>>, NonNull<Node<T>>, usize)> {
27223        let head = self.head.take();
27224        let tail = self.tail.take();
27225        let len = mem::replace(&mut self.len, 0);
27226        if let Some(head) = head {
27227            let tail = tail.unwrap_or_else(|| unsafe { core::hint::unreachable_unchecked() });
27228            Some((head, tail, len))
27229        } else {
27230            None
27231        }
27232    }
27233
27234    #[inline]
27235    unsafe fn split_off_before_node(
27236        &mut self,
27237        split_node: Option<NonNull<Node<T>>>,
27238        at: usize,
27239    ) -> Self {
27240        // The split node is the new head node of the second part
27241        if let Some(mut split_node) = split_node {
27242            let first_part_head;
27243            let first_part_tail;
27244            unsafe {
27245                first_part_tail = split_node.as_mut().prev.take();
27246            }
27247            if let Some(mut tail) = first_part_tail {
27248                unsafe {
27249                    tail.as_mut().next = None;
27250                }
27251                first_part_head = self.head;
27252            } else {
27253                first_part_head = None;
27254            }
27255
27256            let first_part = LinkedList {
27257                head: first_part_head,
27258                tail: first_part_tail,
27259                len: at,
27260                marker: PhantomData,
27261            };
27262
27263            // Fix the head ptr of the second part
27264            self.head = Some(split_node);
27265            self.len = self.len - at;
27266
27267            first_part
27268        } else {
27269            mem::replace(self, LinkedList::new())
27270        }
27271    }
27272
27273    #[inline]
27274    unsafe fn split_off_after_node(
27275        &mut self,
27276        split_node: Option<NonNull<Node<T>>>,
27277        at: usize,
27278    ) -> Self {
27279        // The split node is the new tail node of the first part and owns
27280        // the head of the second part.
27281        if let Some(mut split_node) = split_node {
27282            let second_part_head;
27283            let second_part_tail;
27284            unsafe {
27285                second_part_head = split_node.as_mut().next.take();
27286            }
27287            if let Some(mut head) = second_part_head {
27288                unsafe {
27289                    head.as_mut().prev = None;
27290                }
27291                second_part_tail = self.tail;
27292            } else {
27293                second_part_tail = None;
27294            }
27295
27296            let second_part = LinkedList {
27297                head: second_part_head,
27298                tail: second_part_tail,
27299                len: self.len - at,
27300                marker: PhantomData,
27301            };
27302
27303            // Fix the tail ptr of the first part
27304            self.tail = Some(split_node);
27305            self.len = at;
27306
27307            second_part
27308        } else {
27309            mem::replace(self, LinkedList::new())
27310        }
27311    }
27312}
27313
27314#[stable(feature = "rust1", since = "1.0.0")]
27315impl<T> Default for LinkedList<T> {
27316    /// Creates an empty `LinkedList<T>`.
27317    #[inline]
27318    fn default() -> Self {
27319        Self::new()
27320    }
27321}
27322
27323impl<T> LinkedList<T> {
27324    /// Creates an empty `LinkedList`.
27325    ///
27326    /// # Examples
27327    ///
27328    /// ```
27329    /// use std::collections::LinkedList;
27330    ///
27331    /// let list: LinkedList<u32> = LinkedList::new();
27332    /// ```
27333    #[inline]
27334    #[rustc_const_stable(feature = "const_linked_list_new", since = "1.32.0")]
27335    #[stable(feature = "rust1", since = "1.0.0")]
27336    pub const fn new() -> Self {
27337        LinkedList { head: None, tail: None, len: 0, marker: PhantomData }
27338    }
27339
27340    /// Moves all elements from `other` to the end of the list.
27341    ///
27342    /// This reuses all the nodes from `other` and moves them into `self`. After
27343    /// this operation, `other` becomes empty.
27344    ///
27345    /// This operation should compute in *O*(1) time and *O*(1) memory.
27346    ///
27347    /// # Examples
27348    ///
27349    /// ```
27350    /// use std::collections::LinkedList;
27351    ///
27352    /// let mut list1 = LinkedList::new();
27353    /// list1.push_back('a');
27354    ///
27355    /// let mut list2 = LinkedList::new();
27356    /// list2.push_back('b');
27357    /// list2.push_back('c');
27358    ///
27359    /// list1.append(&mut list2);
27360    ///
27361    /// let mut iter = list1.iter();
27362    /// assert_eq!(iter.next(), Some(&'a'));
27363    /// assert_eq!(iter.next(), Some(&'b'));
27364    /// assert_eq!(iter.next(), Some(&'c'));
27365    /// assert!(iter.next().is_none());
27366    ///
27367    /// assert!(list2.is_empty());
27368    /// ```
27369    #[stable(feature = "rust1", since = "1.0.0")]
27370    pub fn append(&mut self, other: &mut Self) {
27371        match self.tail {
27372            None => mem::swap(self, other),
27373            Some(mut tail) => {
27374                // `as_mut` is okay here because we have exclusive access to the entirety
27375                // of both lists.
27376                if let Some(mut other_head) = other.head.take() {
27377                    unsafe {
27378                        tail.as_mut().next = Some(other_head);
27379                        other_head.as_mut().prev = Some(tail);
27380                    }
27381
27382                    self.tail = other.tail.take();
27383                    self.len += mem::replace(&mut other.len, 0);
27384                }
27385            }
27386        }
27387    }
27388
27389    /// Moves all elements from `other` to the begin of the list.
27390    #[unstable(feature = "linked_list_prepend", issue = "none")]
27391    pub fn prepend(&mut self, other: &mut Self) {
27392        match self.head {
27393            None => mem::swap(self, other),
27394            Some(mut head) => {
27395                // `as_mut` is okay here because we have exclusive access to the entirety
27396                // of both lists.
27397                if let Some(mut other_tail) = other.tail.take() {
27398                    unsafe {
27399                        head.as_mut().prev = Some(other_tail);
27400                        other_tail.as_mut().next = Some(head);
27401                    }
27402
27403                    self.head = other.head.take();
27404                    self.len += mem::replace(&mut other.len, 0);
27405                }
27406            }
27407        }
27408    }
27409
27410    /// Provides a forward iterator.
27411    ///
27412    /// # Examples
27413    ///
27414    /// ```
27415    /// use std::collections::LinkedList;
27416    ///
27417    /// let mut list: LinkedList<u32> = LinkedList::new();
27418    ///
27419    /// list.push_back(0);
27420    /// list.push_back(1);
27421    /// list.push_back(2);
27422    ///
27423    /// let mut iter = list.iter();
27424    /// assert_eq!(iter.next(), Some(&0));
27425    /// assert_eq!(iter.next(), Some(&1));
27426    /// assert_eq!(iter.next(), Some(&2));
27427    /// assert_eq!(iter.next(), None);
27428    /// ```
27429    #[inline]
27430    #[stable(feature = "rust1", since = "1.0.0")]
27431    pub fn iter(&self) -> Iter<'_, T> {
27432        Iter { head: self.head, tail: self.tail, len: self.len, marker: PhantomData }
27433    }
27434
27435    /// Provides a forward iterator with mutable references.
27436    ///
27437    /// # Examples
27438    ///
27439    /// ```
27440    /// use std::collections::LinkedList;
27441    ///
27442    /// let mut list: LinkedList<u32> = LinkedList::new();
27443    ///
27444    /// list.push_back(0);
27445    /// list.push_back(1);
27446    /// list.push_back(2);
27447    ///
27448    /// for element in list.iter_mut() {
27449    ///     *element += 10;
27450    /// }
27451    ///
27452    /// let mut iter = list.iter();
27453    /// assert_eq!(iter.next(), Some(&10));
27454    /// assert_eq!(iter.next(), Some(&11));
27455    /// assert_eq!(iter.next(), Some(&12));
27456    /// assert_eq!(iter.next(), None);
27457    /// ```
27458    #[inline]
27459    #[stable(feature = "rust1", since = "1.0.0")]
27460    pub fn iter_mut(&mut self) -> IterMut<'_, T> {
27461        IterMut { head: self.head, tail: self.tail, len: self.len, list: self }
27462    }
27463
27464    /// Provides a cursor at the front element.
27465    ///
27466    /// The cursor is pointing to the "ghost" non-element if the list is empty.
27467    #[inline]
27468    #[unstable(feature = "linked_list_cursors", issue = "58533")]
27469    pub fn cursor_front(&self) -> Cursor<'_, T> {
27470        Cursor { index: 0, current: self.head, list: self }
27471    }
27472
27473    /// Provides a cursor with editing operations at the front element.
27474    ///
27475    /// The cursor is pointing to the "ghost" non-element if the list is empty.
27476    #[inline]
27477    #[unstable(feature = "linked_list_cursors", issue = "58533")]
27478    pub fn cursor_front_mut(&mut self) -> CursorMut<'_, T> {
27479        CursorMut { index: 0, current: self.head, list: self }
27480    }
27481
27482    /// Provides a cursor at the back element.
27483    ///
27484    /// The cursor is pointing to the "ghost" non-element if the list is empty.
27485    #[inline]
27486    #[unstable(feature = "linked_list_cursors", issue = "58533")]
27487    pub fn cursor_back(&self) -> Cursor<'_, T> {
27488        Cursor { index: self.len.checked_sub(1).unwrap_or(0), current: self.tail, list: self }
27489    }
27490
27491    /// Provides a cursor with editing operations at the back element.
27492    ///
27493    /// The cursor is pointing to the "ghost" non-element if the list is empty.
27494    #[inline]
27495    #[unstable(feature = "linked_list_cursors", issue = "58533")]
27496    pub fn cursor_back_mut(&mut self) -> CursorMut<'_, T> {
27497        CursorMut { index: self.len.checked_sub(1).unwrap_or(0), current: self.tail, list: self }
27498    }
27499
27500    /// Returns `true` if the `LinkedList` is empty.
27501    ///
27502    /// This operation should compute in *O*(1) time.
27503    ///
27504    /// # Examples
27505    ///
27506    /// ```
27507    /// use std::collections::LinkedList;
27508    ///
27509    /// let mut dl = LinkedList::new();
27510    /// assert!(dl.is_empty());
27511    ///
27512    /// dl.push_front("foo");
27513    /// assert!(!dl.is_empty());
27514    /// ```
27515    #[inline]
27516    #[stable(feature = "rust1", since = "1.0.0")]
27517    pub fn is_empty(&self) -> bool {
27518        self.head.is_none()
27519    }
27520
27521    /// Returns the length of the `LinkedList`.
27522    ///
27523    /// This operation should compute in *O*(1) time.
27524    ///
27525    /// # Examples
27526    ///
27527    /// ```
27528    /// use std::collections::LinkedList;
27529    ///
27530    /// let mut dl = LinkedList::new();
27531    ///
27532    /// dl.push_front(2);
27533    /// assert_eq!(dl.len(), 1);
27534    ///
27535    /// dl.push_front(1);
27536    /// assert_eq!(dl.len(), 2);
27537    ///
27538    /// dl.push_back(3);
27539    /// assert_eq!(dl.len(), 3);
27540    /// ```
27541    #[doc(alias = "length")]
27542    #[inline]
27543    #[stable(feature = "rust1", since = "1.0.0")]
27544    pub fn len(&self) -> usize {
27545        self.len
27546    }
27547
27548    /// Removes all elements from the `LinkedList`.
27549    ///
27550    /// This operation should compute in *O*(*n*) time.
27551    ///
27552    /// # Examples
27553    ///
27554    /// ```
27555    /// use std::collections::LinkedList;
27556    ///
27557    /// let mut dl = LinkedList::new();
27558    ///
27559    /// dl.push_front(2);
27560    /// dl.push_front(1);
27561    /// assert_eq!(dl.len(), 2);
27562    /// assert_eq!(dl.front(), Some(&1));
27563    ///
27564    /// dl.clear();
27565    /// assert_eq!(dl.len(), 0);
27566    /// assert_eq!(dl.front(), None);
27567    /// ```
27568    #[inline]
27569    #[stable(feature = "rust1", since = "1.0.0")]
27570    pub fn clear(&mut self) {
27571        *self = Self::new();
27572    }
27573
27574    /// Returns `true` if the `LinkedList` contains an element equal to the
27575    /// given value.
27576    ///
27577    /// # Examples
27578    ///
27579    /// ```
27580    /// use std::collections::LinkedList;
27581    ///
27582    /// let mut list: LinkedList<u32> = LinkedList::new();
27583    ///
27584    /// list.push_back(0);
27585    /// list.push_back(1);
27586    /// list.push_back(2);
27587    ///
27588    /// assert_eq!(list.contains(&0), true);
27589    /// assert_eq!(list.contains(&10), false);
27590    /// ```
27591    #[stable(feature = "linked_list_contains", since = "1.12.0")]
27592    pub fn contains(&self, x: &T) -> bool
27593    where
27594        T: PartialEq<T>,
27595    {
27596        self.iter().any(|e| e == x)
27597    }
27598
27599    /// Provides a reference to the front element, or `None` if the list is
27600    /// empty.
27601    ///
27602    /// # Examples
27603    ///
27604    /// ```
27605    /// use std::collections::LinkedList;
27606    ///
27607    /// let mut dl = LinkedList::new();
27608    /// assert_eq!(dl.front(), None);
27609    ///
27610    /// dl.push_front(1);
27611    /// assert_eq!(dl.front(), Some(&1));
27612    /// ```
27613    #[inline]
27614    #[stable(feature = "rust1", since = "1.0.0")]
27615    pub fn front(&self) -> Option<&T> {
27616        unsafe { self.head.as_ref().map(|node| &node.as_ref().element) }
27617    }
27618
27619    /// Provides a mutable reference to the front element, or `None` if the list
27620    /// is empty.
27621    ///
27622    /// # Examples
27623    ///
27624    /// ```
27625    /// use std::collections::LinkedList;
27626    ///
27627    /// let mut dl = LinkedList::new();
27628    /// assert_eq!(dl.front(), None);
27629    ///
27630    /// dl.push_front(1);
27631    /// assert_eq!(dl.front(), Some(&1));
27632    ///
27633    /// match dl.front_mut() {
27634    ///     None => {},
27635    ///     Some(x) => *x = 5,
27636    /// }
27637    /// assert_eq!(dl.front(), Some(&5));
27638    /// ```
27639    #[inline]
27640    #[stable(feature = "rust1", since = "1.0.0")]
27641    pub fn front_mut(&mut self) -> Option<&mut T> {
27642        unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) }
27643    }
27644
27645    /// Provides a reference to the back element, or `None` if the list is
27646    /// empty.
27647    ///
27648    /// # Examples
27649    ///
27650    /// ```
27651    /// use std::collections::LinkedList;
27652    ///
27653    /// let mut dl = LinkedList::new();
27654    /// assert_eq!(dl.back(), None);
27655    ///
27656    /// dl.push_back(1);
27657    /// assert_eq!(dl.back(), Some(&1));
27658    /// ```
27659    #[inline]
27660    #[stable(feature = "rust1", since = "1.0.0")]
27661    pub fn back(&self) -> Option<&T> {
27662        unsafe { self.tail.as_ref().map(|node| &node.as_ref().element) }
27663    }
27664
27665    /// Provides a mutable reference to the back element, or `None` if the list
27666    /// is empty.
27667    ///
27668    /// # Examples
27669    ///
27670    /// ```
27671    /// use std::collections::LinkedList;
27672    ///
27673    /// let mut dl = LinkedList::new();
27674    /// assert_eq!(dl.back(), None);
27675    ///
27676    /// dl.push_back(1);
27677    /// assert_eq!(dl.back(), Some(&1));
27678    ///
27679    /// match dl.back_mut() {
27680    ///     None => {},
27681    ///     Some(x) => *x = 5,
27682    /// }
27683    /// assert_eq!(dl.back(), Some(&5));
27684    /// ```
27685    #[inline]
27686    #[stable(feature = "rust1", since = "1.0.0")]
27687    pub fn back_mut(&mut self) -> Option<&mut T> {
27688        unsafe { self.tail.as_mut().map(|node| &mut node.as_mut().element) }
27689    }
27690
27691    /// Adds an element first in the list.
27692    ///
27693    /// This operation should compute in *O*(1) time.
27694    ///
27695    /// # Examples
27696    ///
27697    /// ```
27698    /// use std::collections::LinkedList;
27699    ///
27700    /// let mut dl = LinkedList::new();
27701    ///
27702    /// dl.push_front(2);
27703    /// assert_eq!(dl.front().unwrap(), &2);
27704    ///
27705    /// dl.push_front(1);
27706    /// assert_eq!(dl.front().unwrap(), &1);
27707    /// ```
27708    #[stable(feature = "rust1", since = "1.0.0")]
27709    pub fn push_front(&mut self, elt: T) {
27710        self.push_front_node(box Node::new(elt));
27711    }
27712
27713    /// Removes the first element and returns it, or `None` if the list is
27714    /// empty.
27715    ///
27716    /// This operation should compute in *O*(1) time.
27717    ///
27718    /// # Examples
27719    ///
27720    /// ```
27721    /// use std::collections::LinkedList;
27722    ///
27723    /// let mut d = LinkedList::new();
27724    /// assert_eq!(d.pop_front(), None);
27725    ///
27726    /// d.push_front(1);
27727    /// d.push_front(3);
27728    /// assert_eq!(d.pop_front(), Some(3));
27729    /// assert_eq!(d.pop_front(), Some(1));
27730    /// assert_eq!(d.pop_front(), None);
27731    /// ```
27732    #[stable(feature = "rust1", since = "1.0.0")]
27733    pub fn pop_front(&mut self) -> Option<T> {
27734        self.pop_front_node().map(Node::into_element)
27735    }
27736
27737    /// Appends an element to the back of a list.
27738    ///
27739    /// This operation should compute in *O*(1) time.
27740    ///
27741    /// # Examples
27742    ///
27743    /// ```
27744    /// use std::collections::LinkedList;
27745    ///
27746    /// let mut d = LinkedList::new();
27747    /// d.push_back(1);
27748    /// d.push_back(3);
27749    /// assert_eq!(3, *d.back().unwrap());
27750    /// ```
27751    #[stable(feature = "rust1", since = "1.0.0")]
27752    pub fn push_back(&mut self, elt: T) {
27753        self.push_back_node(box Node::new(elt));
27754    }
27755
27756    /// Removes the last element from a list and returns it, or `None` if
27757    /// it is empty.
27758    ///
27759    /// This operation should compute in *O*(1) time.
27760    ///
27761    /// # Examples
27762    ///
27763    /// ```
27764    /// use std::collections::LinkedList;
27765    ///
27766    /// let mut d = LinkedList::new();
27767    /// assert_eq!(d.pop_back(), None);
27768    /// d.push_back(1);
27769    /// d.push_back(3);
27770    /// assert_eq!(d.pop_back(), Some(3));
27771    /// ```
27772    #[stable(feature = "rust1", since = "1.0.0")]
27773    pub fn pop_back(&mut self) -> Option<T> {
27774        self.pop_back_node().map(Node::into_element)
27775    }
27776
27777    /// Splits the list into two at the given index. Returns everything after the given index,
27778    /// including the index.
27779    ///
27780    /// This operation should compute in *O*(*n*) time.
27781    ///
27782    /// # Panics
27783    ///
27784    /// Panics if `at > len`.
27785    ///
27786    /// # Examples
27787    ///
27788    /// ```
27789    /// use std::collections::LinkedList;
27790    ///
27791    /// let mut d = LinkedList::new();
27792    ///
27793    /// d.push_front(1);
27794    /// d.push_front(2);
27795    /// d.push_front(3);
27796    ///
27797    /// let mut split = d.split_off(2);
27798    ///
27799    /// assert_eq!(split.pop_front(), Some(1));
27800    /// assert_eq!(split.pop_front(), None);
27801    /// ```
27802    #[stable(feature = "rust1", since = "1.0.0")]
27803    pub fn split_off(&mut self, at: usize) -> LinkedList<T> {
27804        let len = self.len();
27805        assert!(at <= len, "Cannot split off at a nonexistent index");
27806        if at == 0 {
27807            return mem::take(self);
27808        } else if at == len {
27809            return Self::new();
27810        }
27811
27812        // Below, we iterate towards the `i-1`th node, either from the start or the end,
27813        // depending on which would be faster.
27814        let split_node = if at - 1 <= len - 1 - (at - 1) {
27815            let mut iter = self.iter_mut();
27816            // instead of skipping using .skip() (which creates a new struct),
27817            // we skip manually so we can access the head field without
27818            // depending on implementation details of Skip
27819            for _ in 0..at - 1 {
27820                iter.next();
27821            }
27822            iter.head
27823        } else {
27824            // better off starting from the end
27825            let mut iter = self.iter_mut();
27826            for _ in 0..len - 1 - (at - 1) {
27827                iter.next_back();
27828            }
27829            iter.tail
27830        };
27831        unsafe { self.split_off_after_node(split_node, at) }
27832    }
27833
27834    /// Removes the element at the given index and returns it.
27835    ///
27836    /// This operation should compute in *O*(*n*) time.
27837    ///
27838    /// # Panics
27839    /// Panics if at >= len
27840    ///
27841    /// # Examples
27842    ///
27843    /// ```
27844    /// #![feature(linked_list_remove)]
27845    /// use std::collections::LinkedList;
27846    ///
27847    /// let mut d = LinkedList::new();
27848    ///
27849    /// d.push_front(1);
27850    /// d.push_front(2);
27851    /// d.push_front(3);
27852    ///
27853    /// assert_eq!(d.remove(1), 2);
27854    /// assert_eq!(d.remove(0), 3);
27855    /// assert_eq!(d.remove(0), 1);
27856    /// ```
27857    #[unstable(feature = "linked_list_remove", issue = "69210")]
27858    pub fn remove(&mut self, at: usize) -> T {
27859        let len = self.len();
27860        assert!(at < len, "Cannot remove at an index outside of the list bounds");
27861
27862        // Below, we iterate towards the node at the given index, either from
27863        // the start or the end, depending on which would be faster.
27864        let offset_from_end = len - at - 1;
27865        if at <= offset_from_end {
27866            let mut cursor = self.cursor_front_mut();
27867            for _ in 0..at {
27868                cursor.move_next();
27869            }
27870            cursor.remove_current().unwrap()
27871        } else {
27872            let mut cursor = self.cursor_back_mut();
27873            for _ in 0..offset_from_end {
27874                cursor.move_prev();
27875            }
27876            cursor.remove_current().unwrap()
27877        }
27878    }
27879
27880    /// Creates an iterator which uses a closure to determine if an element should be removed.
27881    ///
27882    /// If the closure returns true, then the element is removed and yielded.
27883    /// If the closure returns false, the element will remain in the list and will not be yielded
27884    /// by the iterator.
27885    ///
27886    /// Note that `drain_filter` lets you mutate every element in the filter closure, regardless of
27887    /// whether you choose to keep or remove it.
27888    ///
27889    /// # Examples
27890    ///
27891    /// Splitting a list into evens and odds, reusing the original list:
27892    ///
27893    /// ```
27894    /// #![feature(drain_filter)]
27895    /// use std::collections::LinkedList;
27896    ///
27897    /// let mut numbers: LinkedList<u32> = LinkedList::new();
27898    /// numbers.extend(&[1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]);
27899    ///
27900    /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<LinkedList<_>>();
27901    /// let odds = numbers;
27902    ///
27903    /// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![2, 4, 6, 8, 14]);
27904    /// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 9, 11, 13, 15]);
27905    /// ```
27906    #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
27907    pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F>
27908    where
27909        F: FnMut(&mut T) -> bool,
27910    {
27911        // avoid borrow issues.
27912        let it = self.head;
27913        let old_len = self.len;
27914
27915        DrainFilter { list: self, it, pred: filter, idx: 0, old_len }
27916    }
27917}
27918
27919#[stable(feature = "rust1", since = "1.0.0")]
27920unsafe impl<#[may_dangle] T> Drop for LinkedList<T> {
27921    fn drop(&mut self) {
27922        struct DropGuard<'a, T>(&'a mut LinkedList<T>);
27923
27924        impl<'a, T> Drop for DropGuard<'a, T> {
27925            fn drop(&mut self) {
27926                // Continue the same loop we do below. This only runs when a destructor has
27927                // panicked. If another one panics this will abort.
27928                while self.0.pop_front_node().is_some() {}
27929            }
27930        }
27931
27932        while let Some(node) = self.pop_front_node() {
27933            let guard = DropGuard(self);
27934            drop(node);
27935            mem::forget(guard);
27936        }
27937    }
27938}
27939
27940#[stable(feature = "rust1", since = "1.0.0")]
27941impl<'a, T> Iterator for Iter<'a, T> {
27942    type Item = &'a T;
27943
27944    #[inline]
27945    fn next(&mut self) -> Option<&'a T> {
27946        if self.len == 0 {
27947            None
27948        } else {
27949            self.head.map(|node| unsafe {
27950                // Need an unbound lifetime to get 'a
27951                let node = &*node.as_ptr();
27952                self.len -= 1;
27953                self.head = node.next;
27954                &node.element
27955            })
27956        }
27957    }
27958
27959    #[inline]
27960    fn size_hint(&self) -> (usize, Option<usize>) {
27961        (self.len, Some(self.len))
27962    }
27963
27964    #[inline]
27965    fn last(mut self) -> Option<&'a T> {
27966        self.next_back()
27967    }
27968}
27969
27970#[stable(feature = "rust1", since = "1.0.0")]
27971impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
27972    #[inline]
27973    fn next_back(&mut self) -> Option<&'a T> {
27974        if self.len == 0 {
27975            None
27976        } else {
27977            self.tail.map(|node| unsafe {
27978                // Need an unbound lifetime to get 'a
27979                let node = &*node.as_ptr();
27980                self.len -= 1;
27981                self.tail = node.prev;
27982                &node.element
27983            })
27984        }
27985    }
27986}
27987
27988#[stable(feature = "rust1", since = "1.0.0")]
27989impl<T> ExactSizeIterator for Iter<'_, T> {}
27990
27991#[stable(feature = "fused", since = "1.26.0")]
27992impl<T> FusedIterator for Iter<'_, T> {}
27993
27994#[stable(feature = "rust1", since = "1.0.0")]
27995impl<'a, T> Iterator for IterMut<'a, T> {
27996    type Item = &'a mut T;
27997
27998    #[inline]
27999    fn next(&mut self) -> Option<&'a mut T> {
28000        if self.len == 0 {
28001            None
28002        } else {
28003            self.head.map(|node| unsafe {
28004                // Need an unbound lifetime to get 'a
28005                let node = &mut *node.as_ptr();
28006                self.len -= 1;
28007                self.head = node.next;
28008                &mut node.element
28009            })
28010        }
28011    }
28012
28013    #[inline]
28014    fn size_hint(&self) -> (usize, Option<usize>) {
28015        (self.len, Some(self.len))
28016    }
28017
28018    #[inline]
28019    fn last(mut self) -> Option<&'a mut T> {
28020        self.next_back()
28021    }
28022}
28023
28024#[stable(feature = "rust1", since = "1.0.0")]
28025impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
28026    #[inline]
28027    fn next_back(&mut self) -> Option<&'a mut T> {
28028        if self.len == 0 {
28029            None
28030        } else {
28031            self.tail.map(|node| unsafe {
28032                // Need an unbound lifetime to get 'a
28033                let node = &mut *node.as_ptr();
28034                self.len -= 1;
28035                self.tail = node.prev;
28036                &mut node.element
28037            })
28038        }
28039    }
28040}
28041
28042#[stable(feature = "rust1", since = "1.0.0")]
28043impl<T> ExactSizeIterator for IterMut<'_, T> {}
28044
28045#[stable(feature = "fused", since = "1.26.0")]
28046impl<T> FusedIterator for IterMut<'_, T> {}
28047
28048/// A cursor over a `LinkedList`.
28049///
28050/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth.
28051///
28052/// Cursors always rest between two elements in the list, and index in a logically circular way.
28053/// To accommodate this, there is a "ghost" non-element that yields `None` between the head and
28054/// tail of the list.
28055///
28056/// When created, cursors start at the front of the list, or the "ghost" non-element if the list is empty.
28057#[unstable(feature = "linked_list_cursors", issue = "58533")]
28058pub struct Cursor<'a, T: 'a> {
28059    index: usize,
28060    current: Option<NonNull<Node<T>>>,
28061    list: &'a LinkedList<T>,
28062}
28063
28064#[unstable(feature = "linked_list_cursors", issue = "58533")]
28065impl<T> Clone for Cursor<'_, T> {
28066    fn clone(&self) -> Self {
28067        let Cursor { index, current, list } = *self;
28068        Cursor { index, current, list }
28069    }
28070}
28071
28072#[unstable(feature = "linked_list_cursors", issue = "58533")]
28073impl<T: fmt::Debug> fmt::Debug for Cursor<'_, T> {
28074    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
28075        f.debug_tuple("Cursor").field(&self.list).field(&self.index()).finish()
28076    }
28077}
28078
28079/// A cursor over a `LinkedList` with editing operations.
28080///
28081/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth, and can
28082/// safely mutate the list during iteration. This is because the lifetime of its yielded
28083/// references is tied to its own lifetime, instead of just the underlying list. This means
28084/// cursors cannot yield multiple elements at once.
28085///
28086/// Cursors always rest between two elements in the list, and index in a logically circular way.
28087/// To accommodate this, there is a "ghost" non-element that yields `None` between the head and
28088/// tail of the list.
28089#[unstable(feature = "linked_list_cursors", issue = "58533")]
28090pub struct CursorMut<'a, T: 'a> {
28091    index: usize,
28092    current: Option<NonNull<Node<T>>>,
28093    list: &'a mut LinkedList<T>,
28094}
28095
28096#[unstable(feature = "linked_list_cursors", issue = "58533")]
28097impl<T: fmt::Debug> fmt::Debug for CursorMut<'_, T> {
28098    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
28099        f.debug_tuple("CursorMut").field(&self.list).field(&self.index()).finish()
28100    }
28101}
28102
28103impl<'a, T> Cursor<'a, T> {
28104    /// Returns the cursor position index within the `LinkedList`.
28105    ///
28106    /// This returns `None` if the cursor is currently pointing to the
28107    /// "ghost" non-element.
28108    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28109    pub fn index(&self) -> Option<usize> {
28110        let _ = self.current?;
28111        Some(self.index)
28112    }
28113
28114    /// Moves the cursor to the next element of the `LinkedList`.
28115    ///
28116    /// If the cursor is pointing to the "ghost" non-element then this will move it to
28117    /// the first element of the `LinkedList`. If it is pointing to the last
28118    /// element of the `LinkedList` then this will move it to the "ghost" non-element.
28119    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28120    pub fn move_next(&mut self) {
28121        match self.current.take() {
28122            // We had no current element; the cursor was sitting at the start position
28123            // Next element should be the head of the list
28124            None => {
28125                self.current = self.list.head;
28126                self.index = 0;
28127            }
28128            // We had a previous element, so let's go to its next
28129            Some(current) => unsafe {
28130                self.current = current.as_ref().next;
28131                self.index += 1;
28132            },
28133        }
28134    }
28135
28136    /// Moves the cursor to the previous element of the `LinkedList`.
28137    ///
28138    /// If the cursor is pointing to the "ghost" non-element then this will move it to
28139    /// the last element of the `LinkedList`. If it is pointing to the first
28140    /// element of the `LinkedList` then this will move it to the "ghost" non-element.
28141    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28142    pub fn move_prev(&mut self) {
28143        match self.current.take() {
28144            // No current. We're at the start of the list. Yield None and jump to the end.
28145            None => {
28146                self.current = self.list.tail;
28147                self.index = self.list.len().checked_sub(1).unwrap_or(0);
28148            }
28149            // Have a prev. Yield it and go to the previous element.
28150            Some(current) => unsafe {
28151                self.current = current.as_ref().prev;
28152                self.index = self.index.checked_sub(1).unwrap_or_else(|| self.list.len());
28153            },
28154        }
28155    }
28156
28157    /// Returns a reference to the element that the cursor is currently
28158    /// pointing to.
28159    ///
28160    /// This returns `None` if the cursor is currently pointing to the
28161    /// "ghost" non-element.
28162    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28163    pub fn current(&self) -> Option<&'a T> {
28164        unsafe { self.current.map(|current| &(*current.as_ptr()).element) }
28165    }
28166
28167    /// Returns a reference to the next element.
28168    ///
28169    /// If the cursor is pointing to the "ghost" non-element then this returns
28170    /// the first element of the `LinkedList`. If it is pointing to the last
28171    /// element of the `LinkedList` then this returns `None`.
28172    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28173    pub fn peek_next(&self) -> Option<&'a T> {
28174        unsafe {
28175            let next = match self.current {
28176                None => self.list.head,
28177                Some(current) => current.as_ref().next,
28178            };
28179            next.map(|next| &(*next.as_ptr()).element)
28180        }
28181    }
28182
28183    /// Returns a reference to the previous element.
28184    ///
28185    /// If the cursor is pointing to the "ghost" non-element then this returns
28186    /// the last element of the `LinkedList`. If it is pointing to the first
28187    /// element of the `LinkedList` then this returns `None`.
28188    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28189    pub fn peek_prev(&self) -> Option<&'a T> {
28190        unsafe {
28191            let prev = match self.current {
28192                None => self.list.tail,
28193                Some(current) => current.as_ref().prev,
28194            };
28195            prev.map(|prev| &(*prev.as_ptr()).element)
28196        }
28197    }
28198}
28199
28200impl<'a, T> CursorMut<'a, T> {
28201    /// Returns the cursor position index within the `LinkedList`.
28202    ///
28203    /// This returns `None` if the cursor is currently pointing to the
28204    /// "ghost" non-element.
28205    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28206    pub fn index(&self) -> Option<usize> {
28207        let _ = self.current?;
28208        Some(self.index)
28209    }
28210
28211    /// Moves the cursor to the next element of the `LinkedList`.
28212    ///
28213    /// If the cursor is pointing to the "ghost" non-element then this will move it to
28214    /// the first element of the `LinkedList`. If it is pointing to the last
28215    /// element of the `LinkedList` then this will move it to the "ghost" non-element.
28216    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28217    pub fn move_next(&mut self) {
28218        match self.current.take() {
28219            // We had no current element; the cursor was sitting at the start position
28220            // Next element should be the head of the list
28221            None => {
28222                self.current = self.list.head;
28223                self.index = 0;
28224            }
28225            // We had a previous element, so let's go to its next
28226            Some(current) => unsafe {
28227                self.current = current.as_ref().next;
28228                self.index += 1;
28229            },
28230        }
28231    }
28232
28233    /// Moves the cursor to the previous element of the `LinkedList`.
28234    ///
28235    /// If the cursor is pointing to the "ghost" non-element then this will move it to
28236    /// the last element of the `LinkedList`. If it is pointing to the first
28237    /// element of the `LinkedList` then this will move it to the "ghost" non-element.
28238    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28239    pub fn move_prev(&mut self) {
28240        match self.current.take() {
28241            // No current. We're at the start of the list. Yield None and jump to the end.
28242            None => {
28243                self.current = self.list.tail;
28244                self.index = self.list.len().checked_sub(1).unwrap_or(0);
28245            }
28246            // Have a prev. Yield it and go to the previous element.
28247            Some(current) => unsafe {
28248                self.current = current.as_ref().prev;
28249                self.index = self.index.checked_sub(1).unwrap_or_else(|| self.list.len());
28250            },
28251        }
28252    }
28253
28254    /// Returns a reference to the element that the cursor is currently
28255    /// pointing to.
28256    ///
28257    /// This returns `None` if the cursor is currently pointing to the
28258    /// "ghost" non-element.
28259    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28260    pub fn current(&mut self) -> Option<&mut T> {
28261        unsafe { self.current.map(|current| &mut (*current.as_ptr()).element) }
28262    }
28263
28264    /// Returns a reference to the next element.
28265    ///
28266    /// If the cursor is pointing to the "ghost" non-element then this returns
28267    /// the first element of the `LinkedList`. If it is pointing to the last
28268    /// element of the `LinkedList` then this returns `None`.
28269    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28270    pub fn peek_next(&mut self) -> Option<&mut T> {
28271        unsafe {
28272            let next = match self.current {
28273                None => self.list.head,
28274                Some(current) => current.as_ref().next,
28275            };
28276            next.map(|next| &mut (*next.as_ptr()).element)
28277        }
28278    }
28279
28280    /// Returns a reference to the previous element.
28281    ///
28282    /// If the cursor is pointing to the "ghost" non-element then this returns
28283    /// the last element of the `LinkedList`. If it is pointing to the first
28284    /// element of the `LinkedList` then this returns `None`.
28285    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28286    pub fn peek_prev(&mut self) -> Option<&mut T> {
28287        unsafe {
28288            let prev = match self.current {
28289                None => self.list.tail,
28290                Some(current) => current.as_ref().prev,
28291            };
28292            prev.map(|prev| &mut (*prev.as_ptr()).element)
28293        }
28294    }
28295
28296    /// Returns a read-only cursor pointing to the current element.
28297    ///
28298    /// The lifetime of the returned `Cursor` is bound to that of the
28299    /// `CursorMut`, which means it cannot outlive the `CursorMut` and that the
28300    /// `CursorMut` is frozen for the lifetime of the `Cursor`.
28301    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28302    pub fn as_cursor(&self) -> Cursor<'_, T> {
28303        Cursor { list: self.list, current: self.current, index: self.index }
28304    }
28305}
28306
28307// Now the list editing operations
28308
28309impl<'a, T> CursorMut<'a, T> {
28310    /// Inserts a new element into the `LinkedList` after the current one.
28311    ///
28312    /// If the cursor is pointing at the "ghost" non-element then the new element is
28313    /// inserted at the front of the `LinkedList`.
28314    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28315    pub fn insert_after(&mut self, item: T) {
28316        unsafe {
28317            let spliced_node = Box::leak(Box::new(Node::new(item))).into();
28318            let node_next = match self.current {
28319                None => self.list.head,
28320                Some(node) => node.as_ref().next,
28321            };
28322            self.list.splice_nodes(self.current, node_next, spliced_node, spliced_node, 1);
28323            if self.current.is_none() {
28324                // The "ghost" non-element's index has changed.
28325                self.index = self.list.len;
28326            }
28327        }
28328    }
28329
28330    /// Inserts a new element into the `LinkedList` before the current one.
28331    ///
28332    /// If the cursor is pointing at the "ghost" non-element then the new element is
28333    /// inserted at the end of the `LinkedList`.
28334    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28335    pub fn insert_before(&mut self, item: T) {
28336        unsafe {
28337            let spliced_node = Box::leak(Box::new(Node::new(item))).into();
28338            let node_prev = match self.current {
28339                None => self.list.tail,
28340                Some(node) => node.as_ref().prev,
28341            };
28342            self.list.splice_nodes(node_prev, self.current, spliced_node, spliced_node, 1);
28343            self.index += 1;
28344        }
28345    }
28346
28347    /// Removes the current element from the `LinkedList`.
28348    ///
28349    /// The element that was removed is returned, and the cursor is
28350    /// moved to point to the next element in the `LinkedList`.
28351    ///
28352    /// If the cursor is currently pointing to the "ghost" non-element then no element
28353    /// is removed and `None` is returned.
28354    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28355    pub fn remove_current(&mut self) -> Option<T> {
28356        let unlinked_node = self.current?;
28357        unsafe {
28358            self.current = unlinked_node.as_ref().next;
28359            self.list.unlink_node(unlinked_node);
28360            let unlinked_node = Box::from_raw(unlinked_node.as_ptr());
28361            Some(unlinked_node.element)
28362        }
28363    }
28364
28365    /// Removes the current element from the `LinkedList` without deallocating the list node.
28366    ///
28367    /// The node that was removed is returned as a new `LinkedList` containing only this node.
28368    /// The cursor is moved to point to the next element in the current `LinkedList`.
28369    ///
28370    /// If the cursor is currently pointing to the "ghost" non-element then no element
28371    /// is removed and `None` is returned.
28372    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28373    pub fn remove_current_as_list(&mut self) -> Option<LinkedList<T>> {
28374        let mut unlinked_node = self.current?;
28375        unsafe {
28376            self.current = unlinked_node.as_ref().next;
28377            self.list.unlink_node(unlinked_node);
28378
28379            unlinked_node.as_mut().prev = None;
28380            unlinked_node.as_mut().next = None;
28381            Some(LinkedList {
28382                head: Some(unlinked_node),
28383                tail: Some(unlinked_node),
28384                len: 1,
28385                marker: PhantomData,
28386            })
28387        }
28388    }
28389
28390    /// Inserts the elements from the given `LinkedList` after the current one.
28391    ///
28392    /// If the cursor is pointing at the "ghost" non-element then the new elements are
28393    /// inserted at the start of the `LinkedList`.
28394    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28395    pub fn splice_after(&mut self, list: LinkedList<T>) {
28396        unsafe {
28397            let (splice_head, splice_tail, splice_len) = match list.detach_all_nodes() {
28398                Some(parts) => parts,
28399                _ => return,
28400            };
28401            let node_next = match self.current {
28402                None => self.list.head,
28403                Some(node) => node.as_ref().next,
28404            };
28405            self.list.splice_nodes(self.current, node_next, splice_head, splice_tail, splice_len);
28406            if self.current.is_none() {
28407                // The "ghost" non-element's index has changed.
28408                self.index = self.list.len;
28409            }
28410        }
28411    }
28412
28413    /// Inserts the elements from the given `LinkedList` before the current one.
28414    ///
28415    /// If the cursor is pointing at the "ghost" non-element then the new elements are
28416    /// inserted at the end of the `LinkedList`.
28417    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28418    pub fn splice_before(&mut self, list: LinkedList<T>) {
28419        unsafe {
28420            let (splice_head, splice_tail, splice_len) = match list.detach_all_nodes() {
28421                Some(parts) => parts,
28422                _ => return,
28423            };
28424            let node_prev = match self.current {
28425                None => self.list.tail,
28426                Some(node) => node.as_ref().prev,
28427            };
28428            self.list.splice_nodes(node_prev, self.current, splice_head, splice_tail, splice_len);
28429            self.index += splice_len;
28430        }
28431    }
28432
28433    /// Splits the list into two after the current element. This will return a
28434    /// new list consisting of everything after the cursor, with the original
28435    /// list retaining everything before.
28436    ///
28437    /// If the cursor is pointing at the "ghost" non-element then the entire contents
28438    /// of the `LinkedList` are moved.
28439    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28440    pub fn split_after(&mut self) -> LinkedList<T> {
28441        let split_off_idx = if self.index == self.list.len { 0 } else { self.index + 1 };
28442        if self.index == self.list.len {
28443            // The "ghost" non-element's index has changed to 0.
28444            self.index = 0;
28445        }
28446        unsafe { self.list.split_off_after_node(self.current, split_off_idx) }
28447    }
28448
28449    /// Splits the list into two before the current element. This will return a
28450    /// new list consisting of everything before the cursor, with the original
28451    /// list retaining everything after.
28452    ///
28453    /// If the cursor is pointing at the "ghost" non-element then the entire contents
28454    /// of the `LinkedList` are moved.
28455    #[unstable(feature = "linked_list_cursors", issue = "58533")]
28456    pub fn split_before(&mut self) -> LinkedList<T> {
28457        let split_off_idx = self.index;
28458        self.index = 0;
28459        unsafe { self.list.split_off_before_node(self.current, split_off_idx) }
28460    }
28461}
28462
28463/// An iterator produced by calling `drain_filter` on LinkedList.
28464#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
28465pub struct DrainFilter<'a, T: 'a, F: 'a>
28466where
28467    F: FnMut(&mut T) -> bool,
28468{
28469    list: &'a mut LinkedList<T>,
28470    it: Option<NonNull<Node<T>>>,
28471    pred: F,
28472    idx: usize,
28473    old_len: usize,
28474}
28475
28476#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
28477impl<T, F> Iterator for DrainFilter<'_, T, F>
28478where
28479    F: FnMut(&mut T) -> bool,
28480{
28481    type Item = T;
28482
28483    fn next(&mut self) -> Option<T> {
28484        while let Some(mut node) = self.it {
28485            unsafe {
28486                self.it = node.as_ref().next;
28487                self.idx += 1;
28488
28489                if (self.pred)(&mut node.as_mut().element) {
28490                    // `unlink_node` is okay with aliasing `element` references.
28491                    self.list.unlink_node(node);
28492                    return Some(Box::from_raw(node.as_ptr()).element);
28493                }
28494            }
28495        }
28496
28497        None
28498    }
28499
28500    fn size_hint(&self) -> (usize, Option<usize>) {
28501        (0, Some(self.old_len - self.idx))
28502    }
28503}
28504
28505#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
28506impl<T, F> Drop for DrainFilter<'_, T, F>
28507where
28508    F: FnMut(&mut T) -> bool,
28509{
28510    fn drop(&mut self) {
28511        struct DropGuard<'r, 'a, T, F>(&'r mut DrainFilter<'a, T, F>)
28512        where
28513            F: FnMut(&mut T) -> bool;
28514
28515        impl<'r, 'a, T, F> Drop for DropGuard<'r, 'a, T, F>
28516        where
28517            F: FnMut(&mut T) -> bool,
28518        {
28519            fn drop(&mut self) {
28520                self.0.for_each(drop);
28521            }
28522        }
28523
28524        while let Some(item) = self.next() {
28525            let guard = DropGuard(self);
28526            drop(item);
28527            mem::forget(guard);
28528        }
28529    }
28530}
28531
28532#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
28533impl<T: fmt::Debug, F> fmt::Debug for DrainFilter<'_, T, F>
28534where
28535    F: FnMut(&mut T) -> bool,
28536{
28537    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
28538        f.debug_tuple("DrainFilter").field(&self.list).finish()
28539    }
28540}
28541
28542#[stable(feature = "rust1", since = "1.0.0")]
28543impl<T> Iterator for IntoIter<T> {
28544    type Item = T;
28545
28546    #[inline]
28547    fn next(&mut self) -> Option<T> {
28548        self.list.pop_front()
28549    }
28550
28551    #[inline]
28552    fn size_hint(&self) -> (usize, Option<usize>) {
28553        (self.list.len, Some(self.list.len))
28554    }
28555}
28556
28557#[stable(feature = "rust1", since = "1.0.0")]
28558impl<T> DoubleEndedIterator for IntoIter<T> {
28559    #[inline]
28560    fn next_back(&mut self) -> Option<T> {
28561        self.list.pop_back()
28562    }
28563}
28564
28565#[stable(feature = "rust1", since = "1.0.0")]
28566impl<T> ExactSizeIterator for IntoIter<T> {}
28567
28568#[stable(feature = "fused", since = "1.26.0")]
28569impl<T> FusedIterator for IntoIter<T> {}
28570
28571#[stable(feature = "rust1", since = "1.0.0")]
28572impl<T> FromIterator<T> for LinkedList<T> {
28573    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
28574        let mut list = Self::new();
28575        list.extend(iter);
28576        list
28577    }
28578}
28579
28580#[stable(feature = "rust1", since = "1.0.0")]
28581impl<T> IntoIterator for LinkedList<T> {
28582    type Item = T;
28583    type IntoIter = IntoIter<T>;
28584
28585    /// Consumes the list into an iterator yielding elements by value.
28586    #[inline]
28587    fn into_iter(self) -> IntoIter<T> {
28588        IntoIter { list: self }
28589    }
28590}
28591
28592#[stable(feature = "rust1", since = "1.0.0")]
28593impl<'a, T> IntoIterator for &'a LinkedList<T> {
28594    type Item = &'a T;
28595    type IntoIter = Iter<'a, T>;
28596
28597    fn into_iter(self) -> Iter<'a, T> {
28598        self.iter()
28599    }
28600}
28601
28602#[stable(feature = "rust1", since = "1.0.0")]
28603impl<'a, T> IntoIterator for &'a mut LinkedList<T> {
28604    type Item = &'a mut T;
28605    type IntoIter = IterMut<'a, T>;
28606
28607    fn into_iter(self) -> IterMut<'a, T> {
28608        self.iter_mut()
28609    }
28610}
28611
28612#[stable(feature = "rust1", since = "1.0.0")]
28613impl<T> Extend<T> for LinkedList<T> {
28614    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
28615        <Self as SpecExtend<I>>::spec_extend(self, iter);
28616    }
28617
28618    #[inline]
28619    fn extend_one(&mut self, elem: T) {
28620        self.push_back(elem);
28621    }
28622}
28623
28624impl<I: IntoIterator> SpecExtend<I> for LinkedList<I::Item> {
28625    default fn spec_extend(&mut self, iter: I) {
28626        iter.into_iter().for_each(move |elt| self.push_back(elt));
28627    }
28628}
28629
28630impl<T> SpecExtend<LinkedList<T>> for LinkedList<T> {
28631    fn spec_extend(&mut self, ref mut other: LinkedList<T>) {
28632        self.append(other);
28633    }
28634}
28635
28636#[stable(feature = "extend_ref", since = "1.2.0")]
28637impl<'a, T: 'a + Copy> Extend<&'a T> for LinkedList<T> {
28638    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
28639        self.extend(iter.into_iter().cloned());
28640    }
28641
28642    #[inline]
28643    fn extend_one(&mut self, &elem: &'a T) {
28644        self.push_back(elem);
28645    }
28646}
28647
28648#[stable(feature = "rust1", since = "1.0.0")]
28649impl<T: PartialEq> PartialEq for LinkedList<T> {
28650    fn eq(&self, other: &Self) -> bool {
28651        self.len() == other.len() && self.iter().eq(other)
28652    }
28653
28654    fn ne(&self, other: &Self) -> bool {
28655        self.len() != other.len() || self.iter().ne(other)
28656    }
28657}
28658
28659#[stable(feature = "rust1", since = "1.0.0")]
28660impl<T: Eq> Eq for LinkedList<T> {}
28661
28662#[stable(feature = "rust1", since = "1.0.0")]
28663impl<T: PartialOrd> PartialOrd for LinkedList<T> {
28664    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
28665        self.iter().partial_cmp(other)
28666    }
28667}
28668
28669#[stable(feature = "rust1", since = "1.0.0")]
28670impl<T: Ord> Ord for LinkedList<T> {
28671    #[inline]
28672    fn cmp(&self, other: &Self) -> Ordering {
28673        self.iter().cmp(other)
28674    }
28675}
28676
28677#[stable(feature = "rust1", since = "1.0.0")]
28678impl<T: Clone> Clone for LinkedList<T> {
28679    fn clone(&self) -> Self {
28680        self.iter().cloned().collect()
28681    }
28682
28683    fn clone_from(&mut self, other: &Self) {
28684        let mut iter_other = other.iter();
28685        if self.len() > other.len() {
28686            self.split_off(other.len());
28687        }
28688        for (elem, elem_other) in self.iter_mut().zip(&mut iter_other) {
28689            elem.clone_from(elem_other);
28690        }
28691        if !iter_other.is_empty() {
28692            self.extend(iter_other.cloned());
28693        }
28694    }
28695}
28696
28697#[stable(feature = "rust1", since = "1.0.0")]
28698impl<T: fmt::Debug> fmt::Debug for LinkedList<T> {
28699    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
28700        f.debug_list().entries(self).finish()
28701    }
28702}
28703
28704#[stable(feature = "rust1", since = "1.0.0")]
28705impl<T: Hash> Hash for LinkedList<T> {
28706    fn hash<H: Hasher>(&self, state: &mut H) {
28707        self.len().hash(state);
28708        for elt in self {
28709            elt.hash(state);
28710        }
28711    }
28712}
28713
28714// Ensure that `LinkedList` and its read-only iterators are covariant in their type parameters.
28715#[allow(dead_code)]
28716fn assert_covariance() {
28717    fn a<'a>(x: LinkedList<&'static str>) -> LinkedList<&'a str> {
28718        x
28719    }
28720    fn b<'i, 'a>(x: Iter<'i, &'static str>) -> Iter<'i, &'a str> {
28721        x
28722    }
28723    fn c<'a>(x: IntoIter<&'static str>) -> IntoIter<&'a str> {
28724        x
28725    }
28726}
28727
28728#[stable(feature = "rust1", since = "1.0.0")]
28729unsafe impl<T: Send> Send for LinkedList<T> {}
28730
28731#[stable(feature = "rust1", since = "1.0.0")]
28732unsafe impl<T: Sync> Sync for LinkedList<T> {}
28733
28734#[stable(feature = "rust1", since = "1.0.0")]
28735unsafe impl<T: Sync> Send for Iter<'_, T> {}
28736
28737#[stable(feature = "rust1", since = "1.0.0")]
28738unsafe impl<T: Sync> Sync for Iter<'_, T> {}
28739
28740#[stable(feature = "rust1", since = "1.0.0")]
28741unsafe impl<T: Send> Send for IterMut<'_, T> {}
28742
28743#[stable(feature = "rust1", since = "1.0.0")]
28744unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
28745
28746#[unstable(feature = "linked_list_cursors", issue = "58533")]
28747unsafe impl<T: Sync> Send for Cursor<'_, T> {}
28748
28749#[unstable(feature = "linked_list_cursors", issue = "58533")]
28750unsafe impl<T: Sync> Sync for Cursor<'_, T> {}
28751
28752#[unstable(feature = "linked_list_cursors", issue = "58533")]
28753unsafe impl<T: Send> Send for CursorMut<'_, T> {}
28754
28755#[unstable(feature = "linked_list_cursors", issue = "58533")]
28756unsafe impl<T: Sync> Sync for CursorMut<'_, T> {}
28757use super::*;
28758
28759use std::thread;
28760use std::vec::Vec;
28761
28762use rand::{thread_rng, RngCore};
28763
28764fn list_from<T: Clone>(v: &[T]) -> LinkedList<T> {
28765    v.iter().cloned().collect()
28766}
28767
28768pub fn check_links<T>(list: &LinkedList<T>) {
28769    unsafe {
28770        let mut len = 0;
28771        let mut last_ptr: Option<&Node<T>> = None;
28772        let mut node_ptr: &Node<T>;
28773        match list.head {
28774            None => {
28775                // tail node should also be None.
28776                assert!(list.tail.is_none());
28777                assert_eq!(0, list.len);
28778                return;
28779            }
28780            Some(node) => node_ptr = &*node.as_ptr(),
28781        }
28782        loop {
28783            match (last_ptr, node_ptr.prev) {
28784                (None, None) => {}
28785                (None, _) => panic!("prev link for head"),
28786                (Some(p), Some(pptr)) => {
28787                    assert_eq!(p as *const Node<T>, pptr.as_ptr() as *const Node<T>);
28788                }
28789                _ => panic!("prev link is none, not good"),
28790            }
28791            match node_ptr.next {
28792                Some(next) => {
28793                    last_ptr = Some(node_ptr);
28794                    node_ptr = &*next.as_ptr();
28795                    len += 1;
28796                }
28797                None => {
28798                    len += 1;
28799                    break;
28800                }
28801            }
28802        }
28803
28804        // verify that the tail node points to the last node.
28805        let tail = list.tail.as_ref().expect("some tail node").as_ref();
28806        assert_eq!(tail as *const Node<T>, node_ptr as *const Node<T>);
28807        // check that len matches interior links.
28808        assert_eq!(len, list.len);
28809    }
28810}
28811
28812#[test]
28813fn test_append() {
28814    // Empty to empty
28815    {
28816        let mut m = LinkedList::<i32>::new();
28817        let mut n = LinkedList::new();
28818        m.append(&mut n);
28819        check_links(&m);
28820        assert_eq!(m.len(), 0);
28821        assert_eq!(n.len(), 0);
28822    }
28823    // Non-empty to empty
28824    {
28825        let mut m = LinkedList::new();
28826        let mut n = LinkedList::new();
28827        n.push_back(2);
28828        m.append(&mut n);
28829        check_links(&m);
28830        assert_eq!(m.len(), 1);
28831        assert_eq!(m.pop_back(), Some(2));
28832        assert_eq!(n.len(), 0);
28833        check_links(&m);
28834    }
28835    // Empty to non-empty
28836    {
28837        let mut m = LinkedList::new();
28838        let mut n = LinkedList::new();
28839        m.push_back(2);
28840        m.append(&mut n);
28841        check_links(&m);
28842        assert_eq!(m.len(), 1);
28843        assert_eq!(m.pop_back(), Some(2));
28844        check_links(&m);
28845    }
28846
28847    // Non-empty to non-empty
28848    let v = vec![1, 2, 3, 4, 5];
28849    let u = vec![9, 8, 1, 2, 3, 4, 5];
28850    let mut m = list_from(&v);
28851    let mut n = list_from(&u);
28852    m.append(&mut n);
28853    check_links(&m);
28854    let mut sum = v;
28855    sum.extend_from_slice(&u);
28856    assert_eq!(sum.len(), m.len());
28857    for elt in sum {
28858        assert_eq!(m.pop_front(), Some(elt))
28859    }
28860    assert_eq!(n.len(), 0);
28861    // Let's make sure it's working properly, since we
28862    // did some direct changes to private members.
28863    n.push_back(3);
28864    assert_eq!(n.len(), 1);
28865    assert_eq!(n.pop_front(), Some(3));
28866    check_links(&n);
28867}
28868
28869#[test]
28870fn test_clone_from() {
28871    // Short cloned from long
28872    {
28873        let v = vec![1, 2, 3, 4, 5];
28874        let u = vec![8, 7, 6, 2, 3, 4, 5];
28875        let mut m = list_from(&v);
28876        let n = list_from(&u);
28877        m.clone_from(&n);
28878        check_links(&m);
28879        assert_eq!(m, n);
28880        for elt in u {
28881            assert_eq!(m.pop_front(), Some(elt))
28882        }
28883    }
28884    // Long cloned from short
28885    {
28886        let v = vec![1, 2, 3, 4, 5];
28887        let u = vec![6, 7, 8];
28888        let mut m = list_from(&v);
28889        let n = list_from(&u);
28890        m.clone_from(&n);
28891        check_links(&m);
28892        assert_eq!(m, n);
28893        for elt in u {
28894            assert_eq!(m.pop_front(), Some(elt))
28895        }
28896    }
28897    // Two equal length lists
28898    {
28899        let v = vec![1, 2, 3, 4, 5];
28900        let u = vec![9, 8, 1, 2, 3];
28901        let mut m = list_from(&v);
28902        let n = list_from(&u);
28903        m.clone_from(&n);
28904        check_links(&m);
28905        assert_eq!(m, n);
28906        for elt in u {
28907            assert_eq!(m.pop_front(), Some(elt))
28908        }
28909    }
28910}
28911
28912#[test]
28913#[cfg_attr(target_os = "emscripten", ignore)]
28914fn test_send() {
28915    let n = list_from(&[1, 2, 3]);
28916    thread::spawn(move || {
28917        check_links(&n);
28918        let a: &[_] = &[&1, &2, &3];
28919        assert_eq!(a, &*n.iter().collect::<Vec<_>>());
28920    })
28921    .join()
28922    .ok()
28923    .unwrap();
28924}
28925
28926#[test]
28927fn test_fuzz() {
28928    for _ in 0..25 {
28929        fuzz_test(3);
28930        fuzz_test(16);
28931        #[cfg(not(miri))] // Miri is too slow
28932        fuzz_test(189);
28933    }
28934}
28935
28936#[test]
28937fn test_26021() {
28938    // There was a bug in split_off that failed to null out the RHS's head's prev ptr.
28939    // This caused the RHS's dtor to walk up into the LHS at drop and delete all of
28940    // its nodes.
28941    //
28942    // https://github.com/rust-lang/rust/issues/26021
28943    let mut v1 = LinkedList::new();
28944    v1.push_front(1);
28945    v1.push_front(1);
28946    v1.push_front(1);
28947    v1.push_front(1);
28948    let _ = v1.split_off(3); // Dropping this now should not cause laundry consumption
28949    assert_eq!(v1.len(), 3);
28950
28951    assert_eq!(v1.iter().len(), 3);
28952    assert_eq!(v1.iter().collect::<Vec<_>>().len(), 3);
28953}
28954
28955#[test]
28956fn test_split_off() {
28957    let mut v1 = LinkedList::new();
28958    v1.push_front(1);
28959    v1.push_front(1);
28960    v1.push_front(1);
28961    v1.push_front(1);
28962
28963    // test all splits
28964    for ix in 0..1 + v1.len() {
28965        let mut a = v1.clone();
28966        let b = a.split_off(ix);
28967        check_links(&a);
28968        check_links(&b);
28969        a.extend(b);
28970        assert_eq!(v1, a);
28971    }
28972}
28973
28974fn fuzz_test(sz: i32) {
28975    let mut m: LinkedList<_> = LinkedList::new();
28976    let mut v = vec![];
28977    for i in 0..sz {
28978        check_links(&m);
28979        let r: u8 = thread_rng().next_u32() as u8;
28980        match r % 6 {
28981            0 => {
28982                m.pop_back();
28983                v.pop();
28984            }
28985            1 => {
28986                if !v.is_empty() {
28987                    m.pop_front();
28988                    v.remove(0);
28989                }
28990            }
28991            2 | 4 => {
28992                m.push_front(-i);
28993                v.insert(0, -i);
28994            }
28995            3 | 5 | _ => {
28996                m.push_back(i);
28997                v.push(i);
28998            }
28999        }
29000    }
29001
29002    check_links(&m);
29003
29004    let mut i = 0;
29005    for (a, &b) in m.into_iter().zip(&v) {
29006        i += 1;
29007        assert_eq!(a, b);
29008    }
29009    assert_eq!(i, v.len());
29010}
29011
29012#[test]
29013fn drain_filter_test() {
29014    let mut m: LinkedList<u32> = LinkedList::new();
29015    m.extend(&[1, 2, 3, 4, 5, 6]);
29016    let deleted = m.drain_filter(|v| *v < 4).collect::<Vec<_>>();
29017
29018    check_links(&m);
29019
29020    assert_eq!(deleted, &[1, 2, 3]);
29021    assert_eq!(m.into_iter().collect::<Vec<_>>(), &[4, 5, 6]);
29022}
29023
29024#[test]
29025fn drain_to_empty_test() {
29026    let mut m: LinkedList<u32> = LinkedList::new();
29027    m.extend(&[1, 2, 3, 4, 5, 6]);
29028    let deleted = m.drain_filter(|_| true).collect::<Vec<_>>();
29029
29030    check_links(&m);
29031
29032    assert_eq!(deleted, &[1, 2, 3, 4, 5, 6]);
29033    assert_eq!(m.into_iter().collect::<Vec<_>>(), &[]);
29034}
29035
29036#[test]
29037fn test_cursor_move_peek() {
29038    let mut m: LinkedList<u32> = LinkedList::new();
29039    m.extend(&[1, 2, 3, 4, 5, 6]);
29040    let mut cursor = m.cursor_front();
29041    assert_eq!(cursor.current(), Some(&1));
29042    assert_eq!(cursor.peek_next(), Some(&2));
29043    assert_eq!(cursor.peek_prev(), None);
29044    assert_eq!(cursor.index(), Some(0));
29045    cursor.move_prev();
29046    assert_eq!(cursor.current(), None);
29047    assert_eq!(cursor.peek_next(), Some(&1));
29048    assert_eq!(cursor.peek_prev(), Some(&6));
29049    assert_eq!(cursor.index(), None);
29050    cursor.move_next();
29051    cursor.move_next();
29052    assert_eq!(cursor.current(), Some(&2));
29053    assert_eq!(cursor.peek_next(), Some(&3));
29054    assert_eq!(cursor.peek_prev(), Some(&1));
29055    assert_eq!(cursor.index(), Some(1));
29056
29057    let mut cursor = m.cursor_back();
29058    assert_eq!(cursor.current(), Some(&6));
29059    assert_eq!(cursor.peek_next(), None);
29060    assert_eq!(cursor.peek_prev(), Some(&5));
29061    assert_eq!(cursor.index(), Some(5));
29062    cursor.move_next();
29063    assert_eq!(cursor.current(), None);
29064    assert_eq!(cursor.peek_next(), Some(&1));
29065    assert_eq!(cursor.peek_prev(), Some(&6));
29066    assert_eq!(cursor.index(), None);
29067    cursor.move_prev();
29068    cursor.move_prev();
29069    assert_eq!(cursor.current(), Some(&5));
29070    assert_eq!(cursor.peek_next(), Some(&6));
29071    assert_eq!(cursor.peek_prev(), Some(&4));
29072    assert_eq!(cursor.index(), Some(4));
29073
29074    let mut m: LinkedList<u32> = LinkedList::new();
29075    m.extend(&[1, 2, 3, 4, 5, 6]);
29076    let mut cursor = m.cursor_front_mut();
29077    assert_eq!(cursor.current(), Some(&mut 1));
29078    assert_eq!(cursor.peek_next(), Some(&mut 2));
29079    assert_eq!(cursor.peek_prev(), None);
29080    assert_eq!(cursor.index(), Some(0));
29081    cursor.move_prev();
29082    assert_eq!(cursor.current(), None);
29083    assert_eq!(cursor.peek_next(), Some(&mut 1));
29084    assert_eq!(cursor.peek_prev(), Some(&mut 6));
29085    assert_eq!(cursor.index(), None);
29086    cursor.move_next();
29087    cursor.move_next();
29088    assert_eq!(cursor.current(), Some(&mut 2));
29089    assert_eq!(cursor.peek_next(), Some(&mut 3));
29090    assert_eq!(cursor.peek_prev(), Some(&mut 1));
29091    assert_eq!(cursor.index(), Some(1));
29092    let mut cursor2 = cursor.as_cursor();
29093    assert_eq!(cursor2.current(), Some(&2));
29094    assert_eq!(cursor2.index(), Some(1));
29095    cursor2.move_next();
29096    assert_eq!(cursor2.current(), Some(&3));
29097    assert_eq!(cursor2.index(), Some(2));
29098    assert_eq!(cursor.current(), Some(&mut 2));
29099    assert_eq!(cursor.index(), Some(1));
29100
29101    let mut m: LinkedList<u32> = LinkedList::new();
29102    m.extend(&[1, 2, 3, 4, 5, 6]);
29103    let mut cursor = m.cursor_back_mut();
29104    assert_eq!(cursor.current(), Some(&mut 6));
29105    assert_eq!(cursor.peek_next(), None);
29106    assert_eq!(cursor.peek_prev(), Some(&mut 5));
29107    assert_eq!(cursor.index(), Some(5));
29108    cursor.move_next();
29109    assert_eq!(cursor.current(), None);
29110    assert_eq!(cursor.peek_next(), Some(&mut 1));
29111    assert_eq!(cursor.peek_prev(), Some(&mut 6));
29112    assert_eq!(cursor.index(), None);
29113    cursor.move_prev();
29114    cursor.move_prev();
29115    assert_eq!(cursor.current(), Some(&mut 5));
29116    assert_eq!(cursor.peek_next(), Some(&mut 6));
29117    assert_eq!(cursor.peek_prev(), Some(&mut 4));
29118    assert_eq!(cursor.index(), Some(4));
29119    let mut cursor2 = cursor.as_cursor();
29120    assert_eq!(cursor2.current(), Some(&5));
29121    assert_eq!(cursor2.index(), Some(4));
29122    cursor2.move_prev();
29123    assert_eq!(cursor2.current(), Some(&4));
29124    assert_eq!(cursor2.index(), Some(3));
29125    assert_eq!(cursor.current(), Some(&mut 5));
29126    assert_eq!(cursor.index(), Some(4));
29127}
29128
29129#[test]
29130fn test_cursor_mut_insert() {
29131    let mut m: LinkedList<u32> = LinkedList::new();
29132    m.extend(&[1, 2, 3, 4, 5, 6]);
29133    let mut cursor = m.cursor_front_mut();
29134    cursor.insert_before(7);
29135    cursor.insert_after(8);
29136    check_links(&m);
29137    assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[7, 1, 8, 2, 3, 4, 5, 6]);
29138    let mut cursor = m.cursor_front_mut();
29139    cursor.move_prev();
29140    cursor.insert_before(9);
29141    cursor.insert_after(10);
29142    check_links(&m);
29143    assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[10, 7, 1, 8, 2, 3, 4, 5, 6, 9]);
29144    let mut cursor = m.cursor_front_mut();
29145    cursor.move_prev();
29146    assert_eq!(cursor.remove_current(), None);
29147    cursor.move_next();
29148    cursor.move_next();
29149    assert_eq!(cursor.remove_current(), Some(7));
29150    cursor.move_prev();
29151    cursor.move_prev();
29152    cursor.move_prev();
29153    assert_eq!(cursor.remove_current(), Some(9));
29154    cursor.move_next();
29155    assert_eq!(cursor.remove_current(), Some(10));
29156    check_links(&m);
29157    assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[1, 8, 2, 3, 4, 5, 6]);
29158    let mut cursor = m.cursor_front_mut();
29159    let mut p: LinkedList<u32> = LinkedList::new();
29160    p.extend(&[100, 101, 102, 103]);
29161    let mut q: LinkedList<u32> = LinkedList::new();
29162    q.extend(&[200, 201, 202, 203]);
29163    cursor.splice_after(p);
29164    cursor.splice_before(q);
29165    check_links(&m);
29166    assert_eq!(
29167        m.iter().cloned().collect::<Vec<_>>(),
29168        &[200, 201, 202, 203, 1, 100, 101, 102, 103, 8, 2, 3, 4, 5, 6]
29169    );
29170    let mut cursor = m.cursor_front_mut();
29171    cursor.move_prev();
29172    let tmp = cursor.split_before();
29173    assert_eq!(m.into_iter().collect::<Vec<_>>(), &[]);
29174    m = tmp;
29175    let mut cursor = m.cursor_front_mut();
29176    cursor.move_next();
29177    cursor.move_next();
29178    cursor.move_next();
29179    cursor.move_next();
29180    cursor.move_next();
29181    cursor.move_next();
29182    let tmp = cursor.split_after();
29183    assert_eq!(tmp.into_iter().collect::<Vec<_>>(), &[102, 103, 8, 2, 3, 4, 5, 6]);
29184    check_links(&m);
29185    assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[200, 201, 202, 203, 1, 100, 101]);
29186}
29187use super::merge_iter::MergeIterInner;
29188use super::node::{self, Root};
29189use core::iter::FusedIterator;
29190
29191impl<K, V> Root<K, V> {
29192    /// Appends all key-value pairs from the union of two ascending iterators,
29193    /// incrementing a `length` variable along the way. The latter makes it
29194    /// easier for the caller to avoid a leak when a drop handler panicks.
29195    ///
29196    /// If both iterators produce the same key, this method drops the pair from
29197    /// the left iterator and appends the pair from the right iterator.
29198    ///
29199    /// If you want the tree to end up in a strictly ascending order, like for
29200    /// a `BTreeMap`, both iterators should produce keys in strictly ascending
29201    /// order, each greater than all keys in the tree, including any keys
29202    /// already in the tree upon entry.
29203    pub fn append_from_sorted_iters<I>(&mut self, left: I, right: I, length: &mut usize)
29204    where
29205        K: Ord,
29206        I: Iterator<Item = (K, V)> + FusedIterator,
29207    {
29208        // We prepare to merge `left` and `right` into a sorted sequence in linear time.
29209        let iter = MergeIter(MergeIterInner::new(left, right));
29210
29211        // Meanwhile, we build a tree from the sorted sequence in linear time.
29212        self.bulk_push(iter, length)
29213    }
29214
29215    /// Pushes all key-value pairs to the end of the tree, incrementing a
29216    /// `length` variable along the way. The latter makes it easier for the
29217    /// caller to avoid a leak when the iterator panicks.
29218    pub fn bulk_push<I>(&mut self, iter: I, length: &mut usize)
29219    where
29220        I: Iterator<Item = (K, V)>,
29221    {
29222        let mut cur_node = self.borrow_mut().last_leaf_edge().into_node();
29223        // Iterate through all key-value pairs, pushing them into nodes at the right level.
29224        for (key, value) in iter {
29225            // Try to push key-value pair into the current leaf node.
29226            if cur_node.len() < node::CAPACITY {
29227                cur_node.push(key, value);
29228            } else {
29229                // No space left, go up and push there.
29230                let mut open_node;
29231                let mut test_node = cur_node.forget_type();
29232                loop {
29233                    match test_node.ascend() {
29234                        Ok(parent) => {
29235                            let parent = parent.into_node();
29236                            if parent.len() < node::CAPACITY {
29237                                // Found a node with space left, push here.
29238                                open_node = parent;
29239                                break;
29240                            } else {
29241                                // Go up again.
29242                                test_node = parent.forget_type();
29243                            }
29244                        }
29245                        Err(_) => {
29246                            // We are at the top, create a new root node and push there.
29247                            open_node = self.push_internal_level();
29248                            break;
29249                        }
29250                    }
29251                }
29252
29253                // Push key-value pair and new right subtree.
29254                let tree_height = open_node.height() - 1;
29255                let mut right_tree = Root::new();
29256                for _ in 0..tree_height {
29257                    right_tree.push_internal_level();
29258                }
29259                open_node.push(key, value, right_tree);
29260
29261                // Go down to the right-most leaf again.
29262                cur_node = open_node.forget_type().last_leaf_edge().into_node();
29263            }
29264
29265            // Increment length every iteration, to make sure the map drops
29266            // the appended elements even if advancing the iterator panicks.
29267            *length += 1;
29268        }
29269        self.fix_right_border_of_plentiful();
29270    }
29271}
29272
29273// An iterator for merging two sorted sequences into one
29274struct MergeIter<K, V, I: Iterator<Item = (K, V)>>(MergeIterInner<I>);
29275
29276impl<K: Ord, V, I> Iterator for MergeIter<K, V, I>
29277where
29278    I: Iterator<Item = (K, V)> + FusedIterator,
29279{
29280    type Item = (K, V);
29281
29282    /// If two keys are equal, returns the key-value pair from the right source.
29283    fn next(&mut self) -> Option<(K, V)> {
29284        let (a_next, b_next) = self.0.nexts(|a: &(K, V), b: &(K, V)| K::cmp(&a.0, &b.0));
29285        b_next.or(a_next)
29286    }
29287}
29288use crate::fmt::Debug;
29289use std::cmp::Ordering;
29290use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
29291
29292/// A blueprint for crash test dummy instances that monitor particular events.
29293/// Some instances may be configured to panic at some point.
29294/// Events are `clone`, `drop` or some anonymous `query`.
29295///
29296/// Crash test dummies are identified and ordered by an id, so they can be used
29297/// as keys in a BTreeMap. The implementation intentionally uses does not rely
29298/// on anything defined in the crate, apart from the `Debug` trait.
29299#[derive(Debug)]
29300pub struct CrashTestDummy {
29301    id: usize,
29302    cloned: AtomicUsize,
29303    dropped: AtomicUsize,
29304    queried: AtomicUsize,
29305}
29306
29307impl CrashTestDummy {
29308    /// Creates a crash test dummy design. The `id` determines order and equality of instances.
29309    pub fn new(id: usize) -> CrashTestDummy {
29310        CrashTestDummy {
29311            id,
29312            cloned: AtomicUsize::new(0),
29313            dropped: AtomicUsize::new(0),
29314            queried: AtomicUsize::new(0),
29315        }
29316    }
29317
29318    /// Creates an instance of a crash test dummy that records what events it experiences
29319    /// and optionally panics.
29320    pub fn spawn(&self, panic: Panic) -> Instance<'_> {
29321        Instance { origin: self, panic }
29322    }
29323
29324    /// Returns how many times instances of the dummy have been cloned.
29325    pub fn cloned(&self) -> usize {
29326        self.cloned.load(SeqCst)
29327    }
29328
29329    /// Returns how many times instances of the dummy have been dropped.
29330    pub fn dropped(&self) -> usize {
29331        self.dropped.load(SeqCst)
29332    }
29333
29334    /// Returns how many times instances of the dummy have had their `query` member invoked.
29335    pub fn queried(&self) -> usize {
29336        self.queried.load(SeqCst)
29337    }
29338}
29339
29340#[derive(Debug)]
29341pub struct Instance<'a> {
29342    origin: &'a CrashTestDummy,
29343    panic: Panic,
29344}
29345
29346#[derive(Copy, Clone, Debug, PartialEq, Eq)]
29347pub enum Panic {
29348    Never,
29349    InClone,
29350    InDrop,
29351    InQuery,
29352}
29353
29354impl Instance<'_> {
29355    pub fn id(&self) -> usize {
29356        self.origin.id
29357    }
29358
29359    /// Some anonymous query, the result of which is already given.
29360    pub fn query<R>(&self, result: R) -> R {
29361        self.origin.queried.fetch_add(1, SeqCst);
29362        if self.panic == Panic::InQuery {
29363            panic!("panic in `query`");
29364        }
29365        result
29366    }
29367}
29368
29369impl Clone for Instance<'_> {
29370    fn clone(&self) -> Self {
29371        self.origin.cloned.fetch_add(1, SeqCst);
29372        if self.panic == Panic::InClone {
29373            panic!("panic in `clone`");
29374        }
29375        Self { origin: self.origin, panic: Panic::Never }
29376    }
29377}
29378
29379impl Drop for Instance<'_> {
29380    fn drop(&mut self) {
29381        self.origin.dropped.fetch_add(1, SeqCst);
29382        if self.panic == Panic::InDrop {
29383            panic!("panic in `drop`");
29384        }
29385    }
29386}
29387
29388impl PartialOrd for Instance<'_> {
29389    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
29390        self.id().partial_cmp(&other.id())
29391    }
29392}
29393
29394impl Ord for Instance<'_> {
29395    fn cmp(&self, other: &Self) -> Ordering {
29396        self.id().cmp(&other.id())
29397    }
29398}
29399
29400impl PartialEq for Instance<'_> {
29401    fn eq(&self, other: &Self) -> bool {
29402        self.id().eq(&other.id())
29403    }
29404}
29405
29406impl Eq for Instance<'_> {}
29407/// XorShiftRng
29408pub struct DeterministicRng {
29409    count: usize,
29410    x: u32,
29411    y: u32,
29412    z: u32,
29413    w: u32,
29414}
29415
29416impl DeterministicRng {
29417    pub fn new() -> Self {
29418        DeterministicRng { count: 0, x: 0x193a6754, y: 0xa8a7d469, z: 0x97830e05, w: 0x113ba7bb }
29419    }
29420
29421    /// Guarantees that each returned number is unique.
29422    pub fn next(&mut self) -> u32 {
29423        self.count += 1;
29424        assert!(self.count <= 70029);
29425        let x = self.x;
29426        let t = x ^ (x << 11);
29427        self.x = self.y;
29428        self.y = self.z;
29429        self.z = self.w;
29430        let w_ = self.w;
29431        self.w = w_ ^ (w_ >> 19) ^ (t ^ (t >> 8));
29432        self.w
29433    }
29434}
29435use std::cell::Cell;
29436use std::cmp::Ordering::{self, *};
29437use std::ptr;
29438
29439// Minimal type with an `Ord` implementation violating transitivity.
29440#[derive(Debug)]
29441pub enum Cyclic3 {
29442    A,
29443    B,
29444    C,
29445}
29446use Cyclic3::*;
29447
29448impl PartialOrd for Cyclic3 {
29449    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
29450        Some(self.cmp(other))
29451    }
29452}
29453
29454impl Ord for Cyclic3 {
29455    fn cmp(&self, other: &Self) -> Ordering {
29456        match (self, other) {
29457            (A, A) | (B, B) | (C, C) => Equal,
29458            (A, B) | (B, C) | (C, A) => Less,
29459            (A, C) | (B, A) | (C, B) => Greater,
29460        }
29461    }
29462}
29463
29464impl PartialEq for Cyclic3 {
29465    fn eq(&self, other: &Self) -> bool {
29466        self.cmp(&other) == Equal
29467    }
29468}
29469
29470impl Eq for Cyclic3 {}
29471
29472// Controls the ordering of values wrapped by `Governed`.
29473#[derive(Debug)]
29474pub struct Governor {
29475    flipped: Cell<bool>,
29476}
29477
29478impl Governor {
29479    pub fn new() -> Self {
29480        Governor { flipped: Cell::new(false) }
29481    }
29482
29483    pub fn flip(&self) {
29484        self.flipped.set(!self.flipped.get());
29485    }
29486}
29487
29488// Type with an `Ord` implementation that forms a total order at any moment
29489// (assuming that `T` respects total order), but can suddenly be made to invert
29490// that total order.
29491#[derive(Debug)]
29492pub struct Governed<'a, T>(pub T, pub &'a Governor);
29493
29494impl<T: Ord> PartialOrd for Governed<'_, T> {
29495    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
29496        Some(self.cmp(other))
29497    }
29498}
29499
29500impl<T: Ord> Ord for Governed<'_, T> {
29501    fn cmp(&self, other: &Self) -> Ordering {
29502        assert!(ptr::eq(self.1, other.1));
29503        let ord = self.0.cmp(&other.0);
29504        if self.1.flipped.get() { ord.reverse() } else { ord }
29505    }
29506}
29507
29508impl<T: PartialEq> PartialEq for Governed<'_, T> {
29509    fn eq(&self, other: &Self) -> bool {
29510        assert!(ptr::eq(self.1, other.1));
29511        self.0.eq(&other.0)
29512    }
29513}
29514
29515impl<T: Eq> Eq for Governed<'_, T> {}
29516pub mod crash_test;
29517pub mod ord_chaos;
29518pub mod rng;
29519use core::borrow::Borrow;
29520use core::cmp::Ordering;
29521use core::ops::{Bound, RangeBounds};
29522
29523use super::node::{marker, ForceResult::*, Handle, NodeRef};
29524
29525use SearchBound::*;
29526use SearchResult::*;
29527
29528pub enum SearchBound<T> {
29529    /// An inclusive bound to look for, just like `Bound::Included(T)`.
29530    Included(T),
29531    /// An exclusive bound to look for, just like `Bound::Excluded(T)`.
29532    Excluded(T),
29533    /// An unconditional inclusive bound, just like `Bound::Unbounded`.
29534    AllIncluded,
29535    /// An unconditional exclusive bound.
29536    AllExcluded,
29537}
29538
29539impl<T> SearchBound<T> {
29540    pub fn from_range(range_bound: Bound<T>) -> Self {
29541        match range_bound {
29542            Bound::Included(t) => Included(t),
29543            Bound::Excluded(t) => Excluded(t),
29544            Bound::Unbounded => AllIncluded,
29545        }
29546    }
29547}
29548
29549pub enum SearchResult<BorrowType, K, V, FoundType, GoDownType> {
29550    Found(Handle<NodeRef<BorrowType, K, V, FoundType>, marker::KV>),
29551    GoDown(Handle<NodeRef<BorrowType, K, V, GoDownType>, marker::Edge>),
29552}
29553
29554pub enum IndexResult {
29555    KV(usize),
29556    Edge(usize),
29557}
29558
29559impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
29560    /// Looks up a given key in a (sub)tree headed by the node, recursively.
29561    /// Returns a `Found` with the handle of the matching KV, if any. Otherwise,
29562    /// returns a `GoDown` with the handle of the leaf edge where the key belongs.
29563    ///
29564    /// The result is meaningful only if the tree is ordered by key, like the tree
29565    /// in a `BTreeMap` is.
29566    pub fn search_tree<Q: ?Sized>(
29567        mut self,
29568        key: &Q,
29569    ) -> SearchResult<BorrowType, K, V, marker::LeafOrInternal, marker::Leaf>
29570    where
29571        Q: Ord,
29572        K: Borrow<Q>,
29573    {
29574        loop {
29575            self = match self.search_node(key) {
29576                Found(handle) => return Found(handle),
29577                GoDown(handle) => match handle.force() {
29578                    Leaf(leaf) => return GoDown(leaf),
29579                    Internal(internal) => internal.descend(),
29580                },
29581            }
29582        }
29583    }
29584
29585    /// Descends to the nearest node where the edge matching the lower bound
29586    /// of the range is different from the edge matching the upper bound, i.e.,
29587    /// the nearest node that has at least one key contained in the range.
29588    ///
29589    /// If found, returns an `Ok` with that node, the strictly ascending pair of
29590    /// edge indices in the node delimiting the range, and the corresponding
29591    /// pair of bounds for continuing the search in the child nodes, in case
29592    /// the node is internal.
29593    ///
29594    /// If not found, returns an `Err` with the leaf edge matching the entire
29595    /// range.
29596    ///
29597    /// As a diagnostic service, panics if the range specifies impossible bounds.
29598    ///
29599    /// The result is meaningful only if the tree is ordered by key.
29600    pub fn search_tree_for_bifurcation<'r, Q: ?Sized, R>(
29601        mut self,
29602        range: &'r R,
29603    ) -> Result<
29604        (
29605            NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
29606            usize,
29607            usize,
29608            SearchBound<&'r Q>,
29609            SearchBound<&'r Q>,
29610        ),
29611        Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
29612    >
29613    where
29614        Q: Ord,
29615        K: Borrow<Q>,
29616        R: RangeBounds<Q>,
29617    {
29618        // Inlining these variables should be avoided. We assume the bounds reported by `range`
29619        // remain the same, but an adversarial implementation could change between calls (#81138).
29620        let (start, end) = (range.start_bound(), range.end_bound());
29621        match (start, end) {
29622            (Bound::Excluded(s), Bound::Excluded(e)) if s == e => {
29623                panic!("range start and end are equal and excluded in BTreeMap")
29624            }
29625            (Bound::Included(s) | Bound::Excluded(s), Bound::Included(e) | Bound::Excluded(e))
29626                if s > e =>
29627            {
29628                panic!("range start is greater than range end in BTreeMap")
29629            }
29630            _ => {}
29631        }
29632        let mut lower_bound = SearchBound::from_range(start);
29633        let mut upper_bound = SearchBound::from_range(end);
29634        loop {
29635            let (lower_edge_idx, lower_child_bound) = self.find_lower_bound_index(lower_bound);
29636            let (upper_edge_idx, upper_child_bound) =
29637                unsafe { self.find_upper_bound_index(upper_bound, lower_edge_idx) };
29638            if lower_edge_idx < upper_edge_idx {
29639                return Ok((
29640                    self,
29641                    lower_edge_idx,
29642                    upper_edge_idx,
29643                    lower_child_bound,
29644                    upper_child_bound,
29645                ));
29646            }
29647            debug_assert_eq!(lower_edge_idx, upper_edge_idx);
29648            let common_edge = unsafe { Handle::new_edge(self, lower_edge_idx) };
29649            match common_edge.force() {
29650                Leaf(common_edge) => return Err(common_edge),
29651                Internal(common_edge) => {
29652                    self = common_edge.descend();
29653                    lower_bound = lower_child_bound;
29654                    upper_bound = upper_child_bound;
29655                }
29656            }
29657        }
29658    }
29659
29660    /// Finds an edge in the node delimiting the lower bound of a range.
29661    /// Also returns the lower bound to be used for continuing the search in
29662    /// the matching child node, if `self` is an internal node.
29663    ///
29664    /// The result is meaningful only if the tree is ordered by key.
29665    pub fn find_lower_bound_edge<'r, Q>(
29666        self,
29667        bound: SearchBound<&'r Q>,
29668    ) -> (Handle<Self, marker::Edge>, SearchBound<&'r Q>)
29669    where
29670        Q: ?Sized + Ord,
29671        K: Borrow<Q>,
29672    {
29673        let (edge_idx, bound) = self.find_lower_bound_index(bound);
29674        let edge = unsafe { Handle::new_edge(self, edge_idx) };
29675        (edge, bound)
29676    }
29677
29678    /// Clone of `find_lower_bound_edge` for the upper bound.
29679    pub fn find_upper_bound_edge<'r, Q>(
29680        self,
29681        bound: SearchBound<&'r Q>,
29682    ) -> (Handle<Self, marker::Edge>, SearchBound<&'r Q>)
29683    where
29684        Q: ?Sized + Ord,
29685        K: Borrow<Q>,
29686    {
29687        let (edge_idx, bound) = unsafe { self.find_upper_bound_index(bound, 0) };
29688        let edge = unsafe { Handle::new_edge(self, edge_idx) };
29689        (edge, bound)
29690    }
29691}
29692
29693impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
29694    /// Looks up a given key in the node, without recursion.
29695    /// Returns a `Found` with the handle of the matching KV, if any. Otherwise,
29696    /// returns a `GoDown` with the handle of the edge where the key might be found
29697    /// (if the node is internal) or where the key can be inserted.
29698    ///
29699    /// The result is meaningful only if the tree is ordered by key, like the tree
29700    /// in a `BTreeMap` is.
29701    pub fn search_node<Q: ?Sized>(self, key: &Q) -> SearchResult<BorrowType, K, V, Type, Type>
29702    where
29703        Q: Ord,
29704        K: Borrow<Q>,
29705    {
29706        match unsafe { self.find_key_index(key, 0) } {
29707            IndexResult::KV(idx) => Found(unsafe { Handle::new_kv(self, idx) }),
29708            IndexResult::Edge(idx) => GoDown(unsafe { Handle::new_edge(self, idx) }),
29709        }
29710    }
29711
29712    /// Returns either the KV index in the node at which the key (or an equivalent)
29713    /// exists, or the edge index where the key belongs, starting from a particular index.
29714    ///
29715    /// The result is meaningful only if the tree is ordered by key, like the tree
29716    /// in a `BTreeMap` is.
29717    ///
29718    /// # Safety
29719    /// `start_index` must be a valid edge index for the node.
29720    unsafe fn find_key_index<Q: ?Sized>(&self, key: &Q, start_index: usize) -> IndexResult
29721    where
29722        Q: Ord,
29723        K: Borrow<Q>,
29724    {
29725        let node = self.reborrow();
29726        let keys = node.keys();
29727        debug_assert!(start_index <= keys.len());
29728        for (offset, k) in unsafe { keys.get_unchecked(start_index..) }.iter().enumerate() {
29729            match key.cmp(k.borrow()) {
29730                Ordering::Greater => {}
29731                Ordering::Equal => return IndexResult::KV(start_index + offset),
29732                Ordering::Less => return IndexResult::Edge(start_index + offset),
29733            }
29734        }
29735        IndexResult::Edge(keys.len())
29736    }
29737
29738    /// Finds an edge index in the node delimiting the lower bound of a range.
29739    /// Also returns the lower bound to be used for continuing the search in
29740    /// the matching child node, if `self` is an internal node.
29741    ///
29742    /// The result is meaningful only if the tree is ordered by key.
29743    fn find_lower_bound_index<'r, Q>(
29744        &self,
29745        bound: SearchBound<&'r Q>,
29746    ) -> (usize, SearchBound<&'r Q>)
29747    where
29748        Q: ?Sized + Ord,
29749        K: Borrow<Q>,
29750    {
29751        match bound {
29752            Included(key) => match unsafe { self.find_key_index(key, 0) } {
29753                IndexResult::KV(idx) => (idx, AllExcluded),
29754                IndexResult::Edge(idx) => (idx, bound),
29755            },
29756            Excluded(key) => match unsafe { self.find_key_index(key, 0) } {
29757                IndexResult::KV(idx) => (idx + 1, AllIncluded),
29758                IndexResult::Edge(idx) => (idx, bound),
29759            },
29760            AllIncluded => (0, AllIncluded),
29761            AllExcluded => (self.len(), AllExcluded),
29762        }
29763    }
29764
29765    /// Mirror image of `find_lower_bound_index` for the upper bound,
29766    /// with an additional parameter to skip part of the key array.
29767    ///
29768    /// # Safety
29769    /// `start_index` must be a valid edge index for the node.
29770    unsafe fn find_upper_bound_index<'r, Q>(
29771        &self,
29772        bound: SearchBound<&'r Q>,
29773        start_index: usize,
29774    ) -> (usize, SearchBound<&'r Q>)
29775    where
29776        Q: ?Sized + Ord,
29777        K: Borrow<Q>,
29778    {
29779        match bound {
29780            Included(key) => match unsafe { self.find_key_index(key, start_index) } {
29781                IndexResult::KV(idx) => (idx + 1, AllExcluded),
29782                IndexResult::Edge(idx) => (idx, bound),
29783            },
29784            Excluded(key) => match unsafe { self.find_key_index(key, start_index) } {
29785                IndexResult::KV(idx) => (idx, AllIncluded),
29786                IndexResult::Edge(idx) => (idx, bound),
29787            },
29788            AllIncluded => (self.len(), AllIncluded),
29789            AllExcluded => (start_index, AllExcluded),
29790        }
29791    }
29792}
29793use core::borrow::Borrow;
29794use core::cmp::Ordering;
29795use core::fmt::{self, Debug};
29796use core::hash::{Hash, Hasher};
29797use core::iter::{FromIterator, FusedIterator};
29798use core::marker::PhantomData;
29799use core::mem::{self, ManuallyDrop};
29800use core::ops::{Index, RangeBounds};
29801use core::ptr;
29802
29803use super::borrow::DormantMutRef;
29804use super::navigate::LeafRange;
29805use super::node::{self, marker, ForceResult::*, Handle, NodeRef, Root};
29806use super::search::SearchResult::*;
29807
29808mod entry;
29809pub use entry::{Entry, OccupiedEntry, OccupiedError, VacantEntry};
29810use Entry::*;
29811
29812/// Minimum number of elements in nodes that are not a root.
29813/// We might temporarily have fewer elements during methods.
29814pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT;
29815
29816// A tree in a `BTreeMap` is a tree in the `node` module with additional invariants:
29817// - Keys must appear in ascending order (according to the key's type).
29818// - If the root node is internal, it must contain at least 1 element.
29819// - Every non-root node contains at least MIN_LEN elements.
29820//
29821// An empty map may be represented both by the absence of a root node or by a
29822// root node that is an empty leaf.
29823
29824/// A map based on a [B-Tree].
29825///
29826/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
29827/// the amount of work performed in a search. In theory, a binary search tree (BST) is the optimal
29828/// choice for a sorted map, as a perfectly balanced BST performs the theoretical minimum amount of
29829/// comparisons necessary to find an element (log<sub>2</sub>n). However, in practice the way this
29830/// is done is *very* inefficient for modern computer architectures. In particular, every element
29831/// is stored in its own individually heap-allocated node. This means that every single insertion
29832/// triggers a heap-allocation, and every single comparison should be a cache-miss. Since these
29833/// are both notably expensive things to do in practice, we are forced to at very least reconsider
29834/// the BST strategy.
29835///
29836/// A B-Tree instead makes each node contain B-1 to 2B-1 elements in a contiguous array. By doing
29837/// this, we reduce the number of allocations by a factor of B, and improve cache efficiency in
29838/// searches. However, this does mean that searches will have to do *more* comparisons on average.
29839/// The precise number of comparisons depends on the node search strategy used. For optimal cache
29840/// efficiency, one could search the nodes linearly. For optimal comparisons, one could search
29841/// the node using binary search. As a compromise, one could also perform a linear search
29842/// that initially only checks every i<sup>th</sup> element for some choice of i.
29843///
29844/// Currently, our implementation simply performs naive linear search. This provides excellent
29845/// performance on *small* nodes of elements which are cheap to compare. However in the future we
29846/// would like to further explore choosing the optimal search strategy based on the choice of B,
29847/// and possibly other factors. Using linear search, searching for a random element is expected
29848/// to take O(B * log(n)) comparisons, which is generally worse than a BST. In practice,
29849/// however, performance is excellent.
29850///
29851/// It is a logic error for a key to be modified in such a way that the key's ordering relative to
29852/// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is
29853/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
29854/// The behavior resulting from such a logic error is not specified, but will not result in
29855/// undefined behavior. This could include panics, incorrect results, aborts, memory leaks, and
29856/// non-termination.
29857///
29858/// [B-Tree]: https://en.wikipedia.org/wiki/B-tree
29859/// [`Cell`]: core::cell::Cell
29860/// [`RefCell`]: core::cell::RefCell
29861///
29862/// # Examples
29863///
29864/// ```
29865/// use std::collections::BTreeMap;
29866///
29867/// // type inference lets us omit an explicit type signature (which
29868/// // would be `BTreeMap<&str, &str>` in this example).
29869/// let mut movie_reviews = BTreeMap::new();
29870///
29871/// // review some movies.
29872/// movie_reviews.insert("Office Space",       "Deals with real issues in the workplace.");
29873/// movie_reviews.insert("Pulp Fiction",       "Masterpiece.");
29874/// movie_reviews.insert("The Godfather",      "Very enjoyable.");
29875/// movie_reviews.insert("The Blues Brothers", "Eye lyked it a lot.");
29876///
29877/// // check for a specific one.
29878/// if !movie_reviews.contains_key("Les Misérables") {
29879///     println!("We've got {} reviews, but Les Misérables ain't one.",
29880///              movie_reviews.len());
29881/// }
29882///
29883/// // oops, this review has a lot of spelling mistakes, let's delete it.
29884/// movie_reviews.remove("The Blues Brothers");
29885///
29886/// // look up the values associated with some keys.
29887/// let to_find = ["Up!", "Office Space"];
29888/// for movie in &to_find {
29889///     match movie_reviews.get(movie) {
29890///        Some(review) => println!("{}: {}", movie, review),
29891///        None => println!("{} is unreviewed.", movie)
29892///     }
29893/// }
29894///
29895/// // Look up the value for a key (will panic if the key is not found).
29896/// println!("Movie review: {}", movie_reviews["Office Space"]);
29897///
29898/// // iterate over everything.
29899/// for (movie, review) in &movie_reviews {
29900///     println!("{}: \"{}\"", movie, review);
29901/// }
29902/// ```
29903///
29904/// `BTreeMap` also implements an [`Entry API`], which allows for more complex
29905/// methods of getting, setting, updating and removing keys and their values:
29906///
29907/// [`Entry API`]: BTreeMap::entry
29908///
29909/// ```
29910/// use std::collections::BTreeMap;
29911///
29912/// // type inference lets us omit an explicit type signature (which
29913/// // would be `BTreeMap<&str, u8>` in this example).
29914/// let mut player_stats = BTreeMap::new();
29915///
29916/// fn random_stat_buff() -> u8 {
29917///     // could actually return some random value here - let's just return
29918///     // some fixed value for now
29919///     42
29920/// }
29921///
29922/// // insert a key only if it doesn't already exist
29923/// player_stats.entry("health").or_insert(100);
29924///
29925/// // insert a key using a function that provides a new value only if it
29926/// // doesn't already exist
29927/// player_stats.entry("defence").or_insert_with(random_stat_buff);
29928///
29929/// // update a key, guarding against the key possibly not being set
29930/// let stat = player_stats.entry("attack").or_insert(100);
29931/// *stat += random_stat_buff();
29932/// ```
29933#[stable(feature = "rust1", since = "1.0.0")]
29934#[cfg_attr(not(test), rustc_diagnostic_item = "BTreeMap")]
29935pub struct BTreeMap<K, V> {
29936    root: Option<Root<K, V>>,
29937    length: usize,
29938}
29939
29940#[stable(feature = "btree_drop", since = "1.7.0")]
29941unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
29942    fn drop(&mut self) {
29943        if let Some(root) = self.root.take() {
29944            Dropper { front: root.into_dying().first_leaf_edge(), remaining_length: self.length };
29945        }
29946    }
29947}
29948
29949#[stable(feature = "rust1", since = "1.0.0")]
29950impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
29951    fn clone(&self) -> BTreeMap<K, V> {
29952        fn clone_subtree<'a, K: Clone, V: Clone>(
29953            node: NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>,
29954        ) -> BTreeMap<K, V>
29955        where
29956            K: 'a,
29957            V: 'a,
29958        {
29959            match node.force() {
29960                Leaf(leaf) => {
29961                    let mut out_tree = BTreeMap { root: Some(Root::new()), length: 0 };
29962
29963                    {
29964                        let root = out_tree.root.as_mut().unwrap(); // unwrap succeeds because we just wrapped
29965                        let mut out_node = match root.borrow_mut().force() {
29966                            Leaf(leaf) => leaf,
29967                            Internal(_) => unreachable!(),
29968                        };
29969
29970                        let mut in_edge = leaf.first_edge();
29971                        while let Ok(kv) = in_edge.right_kv() {
29972                            let (k, v) = kv.into_kv();
29973                            in_edge = kv.right_edge();
29974
29975                            out_node.push(k.clone(), v.clone());
29976                            out_tree.length += 1;
29977                        }
29978                    }
29979
29980                    out_tree
29981                }
29982                Internal(internal) => {
29983                    let mut out_tree = clone_subtree(internal.first_edge().descend());
29984
29985                    {
29986                        let out_root = BTreeMap::ensure_is_owned(&mut out_tree.root);
29987                        let mut out_node = out_root.push_internal_level();
29988                        let mut in_edge = internal.first_edge();
29989                        while let Ok(kv) = in_edge.right_kv() {
29990                            let (k, v) = kv.into_kv();
29991                            in_edge = kv.right_edge();
29992
29993                            let k = (*k).clone();
29994                            let v = (*v).clone();
29995                            let subtree = clone_subtree(in_edge.descend());
29996
29997                            // We can't destructure subtree directly
29998                            // because BTreeMap implements Drop
29999                            let (subroot, sublength) = unsafe {
30000                                let subtree = ManuallyDrop::new(subtree);
30001                                let root = ptr::read(&subtree.root);
30002                                let length = subtree.length;
30003                                (root, length)
30004                            };
30005
30006                            out_node.push(k, v, subroot.unwrap_or_else(Root::new));
30007                            out_tree.length += 1 + sublength;
30008                        }
30009                    }
30010
30011                    out_tree
30012                }
30013            }
30014        }
30015
30016        if self.is_empty() {
30017            // Ideally we'd call `BTreeMap::new` here, but that has the `K:
30018            // Ord` constraint, which this method lacks.
30019            BTreeMap { root: None, length: 0 }
30020        } else {
30021            clone_subtree(self.root.as_ref().unwrap().reborrow()) // unwrap succeeds because not empty
30022        }
30023    }
30024}
30025
30026impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
30027where
30028    K: Borrow<Q> + Ord,
30029    Q: Ord,
30030{
30031    type Key = K;
30032
30033    fn get(&self, key: &Q) -> Option<&K> {
30034        let root_node = self.root.as_ref()?.reborrow();
30035        match root_node.search_tree(key) {
30036            Found(handle) => Some(handle.into_kv().0),
30037            GoDown(_) => None,
30038        }
30039    }
30040
30041    fn take(&mut self, key: &Q) -> Option<K> {
30042        let (map, dormant_map) = DormantMutRef::new(self);
30043        let root_node = map.root.as_mut()?.borrow_mut();
30044        match root_node.search_tree(key) {
30045            Found(handle) => {
30046                Some(OccupiedEntry { handle, dormant_map, _marker: PhantomData }.remove_kv().0)
30047            }
30048            GoDown(_) => None,
30049        }
30050    }
30051
30052    fn replace(&mut self, key: K) -> Option<K> {
30053        let (map, dormant_map) = DormantMutRef::new(self);
30054        let root_node = Self::ensure_is_owned(&mut map.root).borrow_mut();
30055        match root_node.search_tree::<K>(&key) {
30056            Found(mut kv) => Some(mem::replace(kv.key_mut(), key)),
30057            GoDown(handle) => {
30058                VacantEntry { key, handle, dormant_map, _marker: PhantomData }.insert(());
30059                None
30060            }
30061        }
30062    }
30063}
30064
30065/// An iterator over the entries of a `BTreeMap`.
30066///
30067/// This `struct` is created by the [`iter`] method on [`BTreeMap`]. See its
30068/// documentation for more.
30069///
30070/// [`iter`]: BTreeMap::iter
30071#[stable(feature = "rust1", since = "1.0.0")]
30072pub struct Iter<'a, K: 'a, V: 'a> {
30073    range: Range<'a, K, V>,
30074    length: usize,
30075}
30076
30077#[stable(feature = "collection_debug", since = "1.17.0")]
30078impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Iter<'_, K, V> {
30079    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
30080        f.debug_list().entries(self.clone()).finish()
30081    }
30082}
30083
30084/// A mutable iterator over the entries of a `BTreeMap`.
30085///
30086/// This `struct` is created by the [`iter_mut`] method on [`BTreeMap`]. See its
30087/// documentation for more.
30088///
30089/// [`iter_mut`]: BTreeMap::iter_mut
30090#[stable(feature = "rust1", since = "1.0.0")]
30091#[derive(Debug)]
30092pub struct IterMut<'a, K: 'a, V: 'a> {
30093    range: RangeMut<'a, K, V>,
30094    length: usize,
30095}
30096
30097/// An owning iterator over the entries of a `BTreeMap`.
30098///
30099/// This `struct` is created by the [`into_iter`] method on [`BTreeMap`]
30100/// (provided by the `IntoIterator` trait). See its documentation for more.
30101///
30102/// [`into_iter`]: IntoIterator::into_iter
30103#[stable(feature = "rust1", since = "1.0.0")]
30104pub struct IntoIter<K, V> {
30105    range: LeafRange<marker::Dying, K, V>,
30106    length: usize,
30107}
30108
30109impl<K, V> IntoIter<K, V> {
30110    /// Returns an iterator of references over the remaining items.
30111    #[inline]
30112    pub(super) fn iter(&self) -> Iter<'_, K, V> {
30113        let range = Range { inner: self.range.reborrow() };
30114        Iter { range: range, length: self.length }
30115    }
30116}
30117
30118#[stable(feature = "collection_debug", since = "1.17.0")]
30119impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
30120    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
30121        f.debug_list().entries(self.iter()).finish()
30122    }
30123}
30124
30125/// A simplified version of `IntoIter` that is not double-ended and has only one
30126/// purpose: to drop the remainder of an `IntoIter`. Therefore it also serves to
30127/// drop an entire tree without the need to first look up a `back` leaf edge.
30128struct Dropper<K, V> {
30129    front: Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge>,
30130    remaining_length: usize,
30131}
30132
30133/// An iterator over the keys of a `BTreeMap`.
30134///
30135/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
30136/// documentation for more.
30137///
30138/// [`keys`]: BTreeMap::keys
30139#[stable(feature = "rust1", since = "1.0.0")]
30140pub struct Keys<'a, K: 'a, V: 'a> {
30141    inner: Iter<'a, K, V>,
30142}
30143
30144#[stable(feature = "collection_debug", since = "1.17.0")]
30145impl<K: fmt::Debug, V> fmt::Debug for Keys<'_, K, V> {
30146    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
30147        f.debug_list().entries(self.clone()).finish()
30148    }
30149}
30150
30151/// An iterator over the values of a `BTreeMap`.
30152///
30153/// This `struct` is created by the [`values`] method on [`BTreeMap`]. See its
30154/// documentation for more.
30155///
30156/// [`values`]: BTreeMap::values
30157#[stable(feature = "rust1", since = "1.0.0")]
30158pub struct Values<'a, K: 'a, V: 'a> {
30159    inner: Iter<'a, K, V>,
30160}
30161
30162#[stable(feature = "collection_debug", since = "1.17.0")]
30163impl<K, V: fmt::Debug> fmt::Debug for Values<'_, K, V> {
30164    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
30165        f.debug_list().entries(self.clone()).finish()
30166    }
30167}
30168
30169/// A mutable iterator over the values of a `BTreeMap`.
30170///
30171/// This `struct` is created by the [`values_mut`] method on [`BTreeMap`]. See its
30172/// documentation for more.
30173///
30174/// [`values_mut`]: BTreeMap::values_mut
30175#[stable(feature = "map_values_mut", since = "1.10.0")]
30176pub struct ValuesMut<'a, K: 'a, V: 'a> {
30177    inner: IterMut<'a, K, V>,
30178}
30179
30180#[stable(feature = "map_values_mut", since = "1.10.0")]
30181impl<K, V: fmt::Debug> fmt::Debug for ValuesMut<'_, K, V> {
30182    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
30183        f.debug_list().entries(self.inner.iter().map(|(_, val)| val)).finish()
30184    }
30185}
30186
30187/// An owning iterator over the keys of a `BTreeMap`.
30188///
30189/// This `struct` is created by the [`into_keys`] method on [`BTreeMap`].
30190/// See its documentation for more.
30191///
30192/// [`into_keys`]: BTreeMap::into_keys
30193#[unstable(feature = "map_into_keys_values", issue = "75294")]
30194pub struct IntoKeys<K, V> {
30195    inner: IntoIter<K, V>,
30196}
30197
30198#[unstable(feature = "map_into_keys_values", issue = "75294")]
30199impl<K: fmt::Debug, V> fmt::Debug for IntoKeys<K, V> {
30200    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
30201        f.debug_list().entries(self.inner.iter().map(|(key, _)| key)).finish()
30202    }
30203}
30204
30205/// An owning iterator over the values of a `BTreeMap`.
30206///
30207/// This `struct` is created by the [`into_values`] method on [`BTreeMap`].
30208/// See its documentation for more.
30209///
30210/// [`into_values`]: BTreeMap::into_values
30211#[unstable(feature = "map_into_keys_values", issue = "75294")]
30212pub struct IntoValues<K, V> {
30213    inner: IntoIter<K, V>,
30214}
30215
30216#[unstable(feature = "map_into_keys_values", issue = "75294")]
30217impl<K, V: fmt::Debug> fmt::Debug for IntoValues<K, V> {
30218    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
30219        f.debug_list().entries(self.inner.iter().map(|(_, val)| val)).finish()
30220    }
30221}
30222
30223/// An iterator over a sub-range of entries in a `BTreeMap`.
30224///
30225/// This `struct` is created by the [`range`] method on [`BTreeMap`]. See its
30226/// documentation for more.
30227///
30228/// [`range`]: BTreeMap::range
30229#[stable(feature = "btree_range", since = "1.17.0")]
30230pub struct Range<'a, K: 'a, V: 'a> {
30231    inner: LeafRange<marker::Immut<'a>, K, V>,
30232}
30233
30234#[stable(feature = "collection_debug", since = "1.17.0")]
30235impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Range<'_, K, V> {
30236    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
30237        f.debug_list().entries(self.clone()).finish()
30238    }
30239}
30240
30241/// A mutable iterator over a sub-range of entries in a `BTreeMap`.
30242///
30243/// This `struct` is created by the [`range_mut`] method on [`BTreeMap`]. See its
30244/// documentation for more.
30245///
30246/// [`range_mut`]: BTreeMap::range_mut
30247#[stable(feature = "btree_range", since = "1.17.0")]
30248pub struct RangeMut<'a, K: 'a, V: 'a> {
30249    inner: LeafRange<marker::ValMut<'a>, K, V>,
30250
30251    // Be invariant in `K` and `V`
30252    _marker: PhantomData<&'a mut (K, V)>,
30253}
30254
30255#[stable(feature = "collection_debug", since = "1.17.0")]
30256impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RangeMut<'_, K, V> {
30257    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
30258        let range = Range { inner: self.inner.reborrow() };
30259        f.debug_list().entries(range).finish()
30260    }
30261}
30262
30263impl<K, V> BTreeMap<K, V> {
30264    /// Makes a new, empty `BTreeMap`.
30265    ///
30266    /// Does not allocate anything on its own.
30267    ///
30268    /// # Examples
30269    ///
30270    /// Basic usage:
30271    ///
30272    /// ```
30273    /// use std::collections::BTreeMap;
30274    ///
30275    /// let mut map = BTreeMap::new();
30276    ///
30277    /// // entries can now be inserted into the empty map
30278    /// map.insert(1, "a");
30279    /// ```
30280    #[stable(feature = "rust1", since = "1.0.0")]
30281    #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
30282    pub const fn new() -> BTreeMap<K, V>
30283    where
30284        K: Ord,
30285    {
30286        BTreeMap { root: None, length: 0 }
30287    }
30288
30289    /// Clears the map, removing all elements.
30290    ///
30291    /// # Examples
30292    ///
30293    /// Basic usage:
30294    ///
30295    /// ```
30296    /// use std::collections::BTreeMap;
30297    ///
30298    /// let mut a = BTreeMap::new();
30299    /// a.insert(1, "a");
30300    /// a.clear();
30301    /// assert!(a.is_empty());
30302    /// ```
30303    #[stable(feature = "rust1", since = "1.0.0")]
30304    pub fn clear(&mut self) {
30305        *self = BTreeMap { root: None, length: 0 };
30306    }
30307
30308    /// Returns a reference to the value corresponding to the key.
30309    ///
30310    /// The key may be any borrowed form of the map's key type, but the ordering
30311    /// on the borrowed form *must* match the ordering on the key type.
30312    ///
30313    /// # Examples
30314    ///
30315    /// Basic usage:
30316    ///
30317    /// ```
30318    /// use std::collections::BTreeMap;
30319    ///
30320    /// let mut map = BTreeMap::new();
30321    /// map.insert(1, "a");
30322    /// assert_eq!(map.get(&1), Some(&"a"));
30323    /// assert_eq!(map.get(&2), None);
30324    /// ```
30325    #[stable(feature = "rust1", since = "1.0.0")]
30326    pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
30327    where
30328        K: Borrow<Q> + Ord,
30329        Q: Ord,
30330    {
30331        let root_node = self.root.as_ref()?.reborrow();
30332        match root_node.search_tree(key) {
30333            Found(handle) => Some(handle.into_kv().1),
30334            GoDown(_) => None,
30335        }
30336    }
30337
30338    /// Returns the key-value pair corresponding to the supplied key.
30339    ///
30340    /// The supplied key may be any borrowed form of the map's key type, but the ordering
30341    /// on the borrowed form *must* match the ordering on the key type.
30342    ///
30343    /// # Examples
30344    ///
30345    /// ```
30346    /// use std::collections::BTreeMap;
30347    ///
30348    /// let mut map = BTreeMap::new();
30349    /// map.insert(1, "a");
30350    /// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
30351    /// assert_eq!(map.get_key_value(&2), None);
30352    /// ```
30353    #[stable(feature = "map_get_key_value", since = "1.40.0")]
30354    pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
30355    where
30356        K: Borrow<Q> + Ord,
30357        Q: Ord,
30358    {
30359        let root_node = self.root.as_ref()?.reborrow();
30360        match root_node.search_tree(k) {
30361            Found(handle) => Some(handle.into_kv()),
30362            GoDown(_) => None,
30363        }
30364    }
30365
30366    /// Returns the first key-value pair in the map.
30367    /// The key in this pair is the minimum key in the map.
30368    ///
30369    /// # Examples
30370    ///
30371    /// Basic usage:
30372    ///
30373    /// ```
30374    /// #![feature(map_first_last)]
30375    /// use std::collections::BTreeMap;
30376    ///
30377    /// let mut map = BTreeMap::new();
30378    /// assert_eq!(map.first_key_value(), None);
30379    /// map.insert(1, "b");
30380    /// map.insert(2, "a");
30381    /// assert_eq!(map.first_key_value(), Some((&1, &"b")));
30382    /// ```
30383    #[unstable(feature = "map_first_last", issue = "62924")]
30384    pub fn first_key_value(&self) -> Option<(&K, &V)>
30385    where
30386        K: Ord,
30387    {
30388        let root_node = self.root.as_ref()?.reborrow();
30389        root_node.first_leaf_edge().right_kv().ok().map(Handle::into_kv)
30390    }
30391
30392    /// Returns the first entry in the map for in-place manipulation.
30393    /// The key of this entry is the minimum key in the map.
30394    ///
30395    /// # Examples
30396    ///
30397    /// ```
30398    /// #![feature(map_first_last)]
30399    /// use std::collections::BTreeMap;
30400    ///
30401    /// let mut map = BTreeMap::new();
30402    /// map.insert(1, "a");
30403    /// map.insert(2, "b");
30404    /// if let Some(mut entry) = map.first_entry() {
30405    ///     if *entry.key() > 0 {
30406    ///         entry.insert("first");
30407    ///     }
30408    /// }
30409    /// assert_eq!(*map.get(&1).unwrap(), "first");
30410    /// assert_eq!(*map.get(&2).unwrap(), "b");
30411    /// ```
30412    #[unstable(feature = "map_first_last", issue = "62924")]
30413    pub fn first_entry(&mut self) -> Option<OccupiedEntry<'_, K, V>>
30414    where
30415        K: Ord,
30416    {
30417        let (map, dormant_map) = DormantMutRef::new(self);
30418        let root_node = map.root.as_mut()?.borrow_mut();
30419        let kv = root_node.first_leaf_edge().right_kv().ok()?;
30420        Some(OccupiedEntry { handle: kv.forget_node_type(), dormant_map, _marker: PhantomData })
30421    }
30422
30423    /// Removes and returns the first element in the map.
30424    /// The key of this element is the minimum key that was in the map.
30425    ///
30426    /// # Examples
30427    ///
30428    /// Draining elements in ascending order, while keeping a usable map each iteration.
30429    ///
30430    /// ```
30431    /// #![feature(map_first_last)]
30432    /// use std::collections::BTreeMap;
30433    ///
30434    /// let mut map = BTreeMap::new();
30435    /// map.insert(1, "a");
30436    /// map.insert(2, "b");
30437    /// while let Some((key, _val)) = map.pop_first() {
30438    ///     assert!(map.iter().all(|(k, _v)| *k > key));
30439    /// }
30440    /// assert!(map.is_empty());
30441    /// ```
30442    #[unstable(feature = "map_first_last", issue = "62924")]
30443    pub fn pop_first(&mut self) -> Option<(K, V)>
30444    where
30445        K: Ord,
30446    {
30447        self.first_entry().map(|entry| entry.remove_entry())
30448    }
30449
30450    /// Returns the last key-value pair in the map.
30451    /// The key in this pair is the maximum key in the map.
30452    ///
30453    /// # Examples
30454    ///
30455    /// Basic usage:
30456    ///
30457    /// ```
30458    /// #![feature(map_first_last)]
30459    /// use std::collections::BTreeMap;
30460    ///
30461    /// let mut map = BTreeMap::new();
30462    /// map.insert(1, "b");
30463    /// map.insert(2, "a");
30464    /// assert_eq!(map.last_key_value(), Some((&2, &"a")));
30465    /// ```
30466    #[unstable(feature = "map_first_last", issue = "62924")]
30467    pub fn last_key_value(&self) -> Option<(&K, &V)>
30468    where
30469        K: Ord,
30470    {
30471        let root_node = self.root.as_ref()?.reborrow();
30472        root_node.last_leaf_edge().left_kv().ok().map(Handle::into_kv)
30473    }
30474
30475    /// Returns the last entry in the map for in-place manipulation.
30476    /// The key of this entry is the maximum key in the map.
30477    ///
30478    /// # Examples
30479    ///
30480    /// ```
30481    /// #![feature(map_first_last)]
30482    /// use std::collections::BTreeMap;
30483    ///
30484    /// let mut map = BTreeMap::new();
30485    /// map.insert(1, "a");
30486    /// map.insert(2, "b");
30487    /// if let Some(mut entry) = map.last_entry() {
30488    ///     if *entry.key() > 0 {
30489    ///         entry.insert("last");
30490    ///     }
30491    /// }
30492    /// assert_eq!(*map.get(&1).unwrap(), "a");
30493    /// assert_eq!(*map.get(&2).unwrap(), "last");
30494    /// ```
30495    #[unstable(feature = "map_first_last", issue = "62924")]
30496    pub fn last_entry(&mut self) -> Option<OccupiedEntry<'_, K, V>>
30497    where
30498        K: Ord,
30499    {
30500        let (map, dormant_map) = DormantMutRef::new(self);
30501        let root_node = map.root.as_mut()?.borrow_mut();
30502        let kv = root_node.last_leaf_edge().left_kv().ok()?;
30503        Some(OccupiedEntry { handle: kv.forget_node_type(), dormant_map, _marker: PhantomData })
30504    }
30505
30506    /// Removes and returns the last element in the map.
30507    /// The key of this element is the maximum key that was in the map.
30508    ///
30509    /// # Examples
30510    ///
30511    /// Draining elements in descending order, while keeping a usable map each iteration.
30512    ///
30513    /// ```
30514    /// #![feature(map_first_last)]
30515    /// use std::collections::BTreeMap;
30516    ///
30517    /// let mut map = BTreeMap::new();
30518    /// map.insert(1, "a");
30519    /// map.insert(2, "b");
30520    /// while let Some((key, _val)) = map.pop_last() {
30521    ///     assert!(map.iter().all(|(k, _v)| *k < key));
30522    /// }
30523    /// assert!(map.is_empty());
30524    /// ```
30525    #[unstable(feature = "map_first_last", issue = "62924")]
30526    pub fn pop_last(&mut self) -> Option<(K, V)>
30527    where
30528        K: Ord,
30529    {
30530        self.last_entry().map(|entry| entry.remove_entry())
30531    }
30532
30533    /// Returns `true` if the map contains a value for the specified key.
30534    ///
30535    /// The key may be any borrowed form of the map's key type, but the ordering
30536    /// on the borrowed form *must* match the ordering on the key type.
30537    ///
30538    /// # Examples
30539    ///
30540    /// Basic usage:
30541    ///
30542    /// ```
30543    /// use std::collections::BTreeMap;
30544    ///
30545    /// let mut map = BTreeMap::new();
30546    /// map.insert(1, "a");
30547    /// assert_eq!(map.contains_key(&1), true);
30548    /// assert_eq!(map.contains_key(&2), false);
30549    /// ```
30550    #[stable(feature = "rust1", since = "1.0.0")]
30551    pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
30552    where
30553        K: Borrow<Q> + Ord,
30554        Q: Ord,
30555    {
30556        self.get(key).is_some()
30557    }
30558
30559    /// Returns a mutable reference to the value corresponding to the key.
30560    ///
30561    /// The key may be any borrowed form of the map's key type, but the ordering
30562    /// on the borrowed form *must* match the ordering on the key type.
30563    ///
30564    /// # Examples
30565    ///
30566    /// Basic usage:
30567    ///
30568    /// ```
30569    /// use std::collections::BTreeMap;
30570    ///
30571    /// let mut map = BTreeMap::new();
30572    /// map.insert(1, "a");
30573    /// if let Some(x) = map.get_mut(&1) {
30574    ///     *x = "b";
30575    /// }
30576    /// assert_eq!(map[&1], "b");
30577    /// ```
30578    // See `get` for implementation notes, this is basically a copy-paste with mut's added
30579    #[stable(feature = "rust1", since = "1.0.0")]
30580    pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut V>
30581    where
30582        K: Borrow<Q> + Ord,
30583        Q: Ord,
30584    {
30585        let root_node = self.root.as_mut()?.borrow_mut();
30586        match root_node.search_tree(key) {
30587            Found(handle) => Some(handle.into_val_mut()),
30588            GoDown(_) => None,
30589        }
30590    }
30591
30592    /// Inserts a key-value pair into the map.
30593    ///
30594    /// If the map did not have this key present, `None` is returned.
30595    ///
30596    /// If the map did have this key present, the value is updated, and the old
30597    /// value is returned. The key is not updated, though; this matters for
30598    /// types that can be `==` without being identical. See the [module-level
30599    /// documentation] for more.
30600    ///
30601    /// [module-level documentation]: index.html#insert-and-complex-keys
30602    ///
30603    /// # Examples
30604    ///
30605    /// Basic usage:
30606    ///
30607    /// ```
30608    /// use std::collections::BTreeMap;
30609    ///
30610    /// let mut map = BTreeMap::new();
30611    /// assert_eq!(map.insert(37, "a"), None);
30612    /// assert_eq!(map.is_empty(), false);
30613    ///
30614    /// map.insert(37, "b");
30615    /// assert_eq!(map.insert(37, "c"), Some("b"));
30616    /// assert_eq!(map[&37], "c");
30617    /// ```
30618    #[stable(feature = "rust1", since = "1.0.0")]
30619    pub fn insert(&mut self, key: K, value: V) -> Option<V>
30620    where
30621        K: Ord,
30622    {
30623        match self.entry(key) {
30624            Occupied(mut entry) => Some(entry.insert(value)),
30625            Vacant(entry) => {
30626                entry.insert(value);
30627                None
30628            }
30629        }
30630    }
30631
30632    /// Tries to insert a key-value pair into the map, and returns
30633    /// a mutable reference to the value in the entry.
30634    ///
30635    /// If the map already had this key present, nothing is updated, and
30636    /// an error containing the occupied entry and the value is returned.
30637    ///
30638    /// # Examples
30639    ///
30640    /// Basic usage:
30641    ///
30642    /// ```
30643    /// #![feature(map_try_insert)]
30644    ///
30645    /// use std::collections::BTreeMap;
30646    ///
30647    /// let mut map = BTreeMap::new();
30648    /// assert_eq!(map.try_insert(37, "a").unwrap(), &"a");
30649    ///
30650    /// let err = map.try_insert(37, "b").unwrap_err();
30651    /// assert_eq!(err.entry.key(), &37);
30652    /// assert_eq!(err.entry.get(), &"a");
30653    /// assert_eq!(err.value, "b");
30654    /// ```
30655    #[unstable(feature = "map_try_insert", issue = "82766")]
30656    pub fn try_insert(&mut self, key: K, value: V) -> Result<&mut V, OccupiedError<'_, K, V>>
30657    where
30658        K: Ord,
30659    {
30660        match self.entry(key) {
30661            Occupied(entry) => Err(OccupiedError { entry, value }),
30662            Vacant(entry) => Ok(entry.insert(value)),
30663        }
30664    }
30665
30666    /// Removes a key from the map, returning the value at the key if the key
30667    /// was previously in the map.
30668    ///
30669    /// The key may be any borrowed form of the map's key type, but the ordering
30670    /// on the borrowed form *must* match the ordering on the key type.
30671    ///
30672    /// # Examples
30673    ///
30674    /// Basic usage:
30675    ///
30676    /// ```
30677    /// use std::collections::BTreeMap;
30678    ///
30679    /// let mut map = BTreeMap::new();
30680    /// map.insert(1, "a");
30681    /// assert_eq!(map.remove(&1), Some("a"));
30682    /// assert_eq!(map.remove(&1), None);
30683    /// ```
30684    #[doc(alias = "delete")]
30685    #[stable(feature = "rust1", since = "1.0.0")]
30686    pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V>
30687    where
30688        K: Borrow<Q> + Ord,
30689        Q: Ord,
30690    {
30691        self.remove_entry(key).map(|(_, v)| v)
30692    }
30693
30694    /// Removes a key from the map, returning the stored key and value if the key
30695    /// was previously in the map.
30696    ///
30697    /// The key may be any borrowed form of the map's key type, but the ordering
30698    /// on the borrowed form *must* match the ordering on the key type.
30699    ///
30700    /// # Examples
30701    ///
30702    /// Basic usage:
30703    ///
30704    /// ```
30705    /// use std::collections::BTreeMap;
30706    ///
30707    /// let mut map = BTreeMap::new();
30708    /// map.insert(1, "a");
30709    /// assert_eq!(map.remove_entry(&1), Some((1, "a")));
30710    /// assert_eq!(map.remove_entry(&1), None);
30711    /// ```
30712    #[stable(feature = "btreemap_remove_entry", since = "1.45.0")]
30713    pub fn remove_entry<Q: ?Sized>(&mut self, key: &Q) -> Option<(K, V)>
30714    where
30715        K: Borrow<Q> + Ord,
30716        Q: Ord,
30717    {
30718        let (map, dormant_map) = DormantMutRef::new(self);
30719        let root_node = map.root.as_mut()?.borrow_mut();
30720        match root_node.search_tree(key) {
30721            Found(handle) => {
30722                Some(OccupiedEntry { handle, dormant_map, _marker: PhantomData }.remove_entry())
30723            }
30724            GoDown(_) => None,
30725        }
30726    }
30727
30728    /// Retains only the elements specified by the predicate.
30729    ///
30730    /// In other words, remove all pairs `(k, v)` such that `f(&k, &mut v)` returns `false`.
30731    ///
30732    /// # Examples
30733    ///
30734    /// ```
30735    /// use std::collections::BTreeMap;
30736    ///
30737    /// let mut map: BTreeMap<i32, i32> = (0..8).map(|x| (x, x*10)).collect();
30738    /// // Keep only the elements with even-numbered keys.
30739    /// map.retain(|&k, _| k % 2 == 0);
30740    /// assert!(map.into_iter().eq(vec![(0, 0), (2, 20), (4, 40), (6, 60)]));
30741    /// ```
30742    #[inline]
30743    #[stable(feature = "btree_retain", since = "1.53.0")]
30744    pub fn retain<F>(&mut self, mut f: F)
30745    where
30746        K: Ord,
30747        F: FnMut(&K, &mut V) -> bool,
30748    {
30749        self.drain_filter(|k, v| !f(k, v));
30750    }
30751
30752    /// Moves all elements from `other` into `Self`, leaving `other` empty.
30753    ///
30754    /// # Examples
30755    ///
30756    /// ```
30757    /// use std::collections::BTreeMap;
30758    ///
30759    /// let mut a = BTreeMap::new();
30760    /// a.insert(1, "a");
30761    /// a.insert(2, "b");
30762    /// a.insert(3, "c");
30763    ///
30764    /// let mut b = BTreeMap::new();
30765    /// b.insert(3, "d");
30766    /// b.insert(4, "e");
30767    /// b.insert(5, "f");
30768    ///
30769    /// a.append(&mut b);
30770    ///
30771    /// assert_eq!(a.len(), 5);
30772    /// assert_eq!(b.len(), 0);
30773    ///
30774    /// assert_eq!(a[&1], "a");
30775    /// assert_eq!(a[&2], "b");
30776    /// assert_eq!(a[&3], "d");
30777    /// assert_eq!(a[&4], "e");
30778    /// assert_eq!(a[&5], "f");
30779    /// ```
30780    #[stable(feature = "btree_append", since = "1.11.0")]
30781    pub fn append(&mut self, other: &mut Self)
30782    where
30783        K: Ord,
30784    {
30785        // Do we have to append anything at all?
30786        if other.is_empty() {
30787            return;
30788        }
30789
30790        // We can just swap `self` and `other` if `self` is empty.
30791        if self.is_empty() {
30792            mem::swap(self, other);
30793            return;
30794        }
30795
30796        let self_iter = mem::take(self).into_iter();
30797        let other_iter = mem::take(other).into_iter();
30798        let root = BTreeMap::ensure_is_owned(&mut self.root);
30799        root.append_from_sorted_iters(self_iter, other_iter, &mut self.length)
30800    }
30801
30802    /// Constructs a double-ended iterator over a sub-range of elements in the map.
30803    /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
30804    /// yield elements from min (inclusive) to max (exclusive).
30805    /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
30806    /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
30807    /// range from 4 to 10.
30808    ///
30809    /// # Panics
30810    ///
30811    /// Panics if range `start > end`.
30812    /// Panics if range `start == end` and both bounds are `Excluded`.
30813    ///
30814    /// # Examples
30815    ///
30816    /// Basic usage:
30817    ///
30818    /// ```
30819    /// use std::collections::BTreeMap;
30820    /// use std::ops::Bound::Included;
30821    ///
30822    /// let mut map = BTreeMap::new();
30823    /// map.insert(3, "a");
30824    /// map.insert(5, "b");
30825    /// map.insert(8, "c");
30826    /// for (&key, &value) in map.range((Included(&4), Included(&8))) {
30827    ///     println!("{}: {}", key, value);
30828    /// }
30829    /// assert_eq!(Some((&5, &"b")), map.range(4..).next());
30830    /// ```
30831    #[stable(feature = "btree_range", since = "1.17.0")]
30832    pub fn range<T: ?Sized, R>(&self, range: R) -> Range<'_, K, V>
30833    where
30834        T: Ord,
30835        K: Borrow<T> + Ord,
30836        R: RangeBounds<T>,
30837    {
30838        if let Some(root) = &self.root {
30839            Range { inner: root.reborrow().range_search(range) }
30840        } else {
30841            Range { inner: LeafRange::none() }
30842        }
30843    }
30844
30845    /// Constructs a mutable double-ended iterator over a sub-range of elements in the map.
30846    /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
30847    /// yield elements from min (inclusive) to max (exclusive).
30848    /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
30849    /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
30850    /// range from 4 to 10.
30851    ///
30852    /// # Panics
30853    ///
30854    /// Panics if range `start > end`.
30855    /// Panics if range `start == end` and both bounds are `Excluded`.
30856    ///
30857    /// # Examples
30858    ///
30859    /// Basic usage:
30860    ///
30861    /// ```
30862    /// use std::collections::BTreeMap;
30863    ///
30864    /// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"]
30865    ///     .iter()
30866    ///     .map(|&s| (s, 0))
30867    ///     .collect();
30868    /// for (_, balance) in map.range_mut("B".."Cheryl") {
30869    ///     *balance += 100;
30870    /// }
30871    /// for (name, balance) in &map {
30872    ///     println!("{} => {}", name, balance);
30873    /// }
30874    /// ```
30875    #[stable(feature = "btree_range", since = "1.17.0")]
30876    pub fn range_mut<T: ?Sized, R>(&mut self, range: R) -> RangeMut<'_, K, V>
30877    where
30878        T: Ord,
30879        K: Borrow<T> + Ord,
30880        R: RangeBounds<T>,
30881    {
30882        if let Some(root) = &mut self.root {
30883            RangeMut { inner: root.borrow_valmut().range_search(range), _marker: PhantomData }
30884        } else {
30885            RangeMut { inner: LeafRange::none(), _marker: PhantomData }
30886        }
30887    }
30888
30889    /// Gets the given key's corresponding entry in the map for in-place manipulation.
30890    ///
30891    /// # Examples
30892    ///
30893    /// Basic usage:
30894    ///
30895    /// ```
30896    /// use std::collections::BTreeMap;
30897    ///
30898    /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
30899    ///
30900    /// // count the number of occurrences of letters in the vec
30901    /// for x in vec!["a", "b", "a", "c", "a", "b"] {
30902    ///     *count.entry(x).or_insert(0) += 1;
30903    /// }
30904    ///
30905    /// assert_eq!(count["a"], 3);
30906    /// ```
30907    #[stable(feature = "rust1", since = "1.0.0")]
30908    pub fn entry(&mut self, key: K) -> Entry<'_, K, V>
30909    where
30910        K: Ord,
30911    {
30912        // FIXME(@porglezomp) Avoid allocating if we don't insert
30913        let (map, dormant_map) = DormantMutRef::new(self);
30914        let root_node = Self::ensure_is_owned(&mut map.root).borrow_mut();
30915        match root_node.search_tree(&key) {
30916            Found(handle) => Occupied(OccupiedEntry { handle, dormant_map, _marker: PhantomData }),
30917            GoDown(handle) => {
30918                Vacant(VacantEntry { key, handle, dormant_map, _marker: PhantomData })
30919            }
30920        }
30921    }
30922
30923    /// Splits the collection into two at the given key. Returns everything after the given key,
30924    /// including the key.
30925    ///
30926    /// # Examples
30927    ///
30928    /// Basic usage:
30929    ///
30930    /// ```
30931    /// use std::collections::BTreeMap;
30932    ///
30933    /// let mut a = BTreeMap::new();
30934    /// a.insert(1, "a");
30935    /// a.insert(2, "b");
30936    /// a.insert(3, "c");
30937    /// a.insert(17, "d");
30938    /// a.insert(41, "e");
30939    ///
30940    /// let b = a.split_off(&3);
30941    ///
30942    /// assert_eq!(a.len(), 2);
30943    /// assert_eq!(b.len(), 3);
30944    ///
30945    /// assert_eq!(a[&1], "a");
30946    /// assert_eq!(a[&2], "b");
30947    ///
30948    /// assert_eq!(b[&3], "c");
30949    /// assert_eq!(b[&17], "d");
30950    /// assert_eq!(b[&41], "e");
30951    /// ```
30952    #[stable(feature = "btree_split_off", since = "1.11.0")]
30953    pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
30954    where
30955        K: Borrow<Q> + Ord,
30956    {
30957        if self.is_empty() {
30958            return Self::new();
30959        }
30960
30961        let total_num = self.len();
30962        let left_root = self.root.as_mut().unwrap(); // unwrap succeeds because not empty
30963
30964        let right_root = left_root.split_off(key);
30965
30966        let (new_left_len, right_len) = Root::calc_split_length(total_num, &left_root, &right_root);
30967        self.length = new_left_len;
30968
30969        BTreeMap { root: Some(right_root), length: right_len }
30970    }
30971
30972    /// Creates an iterator that visits all elements (key-value pairs) in
30973    /// ascending key order and uses a closure to determine if an element should
30974    /// be removed. If the closure returns `true`, the element is removed from
30975    /// the map and yielded. If the closure returns `false`, or panics, the
30976    /// element remains in the map and will not be yielded.
30977    ///
30978    /// The iterator also lets you mutate the value of each element in the
30979    /// closure, regardless of whether you choose to keep or remove it.
30980    ///
30981    /// If the iterator is only partially consumed or not consumed at all, each
30982    /// of the remaining elements is still subjected to the closure, which may
30983    /// change its value and, by returning `true`, have the element removed and
30984    /// dropped.
30985    ///
30986    /// It is unspecified how many more elements will be subjected to the
30987    /// closure if a panic occurs in the closure, or a panic occurs while
30988    /// dropping an element, or if the `DrainFilter` value is leaked.
30989    ///
30990    /// # Examples
30991    ///
30992    /// Splitting a map into even and odd keys, reusing the original map:
30993    ///
30994    /// ```
30995    /// #![feature(btree_drain_filter)]
30996    /// use std::collections::BTreeMap;
30997    ///
30998    /// let mut map: BTreeMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
30999    /// let evens: BTreeMap<_, _> = map.drain_filter(|k, _v| k % 2 == 0).collect();
31000    /// let odds = map;
31001    /// assert_eq!(evens.keys().copied().collect::<Vec<_>>(), vec![0, 2, 4, 6]);
31002    /// assert_eq!(odds.keys().copied().collect::<Vec<_>>(), vec![1, 3, 5, 7]);
31003    /// ```
31004    #[unstable(feature = "btree_drain_filter", issue = "70530")]
31005    pub fn drain_filter<F>(&mut self, pred: F) -> DrainFilter<'_, K, V, F>
31006    where
31007        K: Ord,
31008        F: FnMut(&K, &mut V) -> bool,
31009    {
31010        DrainFilter { pred, inner: self.drain_filter_inner() }
31011    }
31012
31013    pub(super) fn drain_filter_inner(&mut self) -> DrainFilterInner<'_, K, V>
31014    where
31015        K: Ord,
31016    {
31017        if let Some(root) = self.root.as_mut() {
31018            let (root, dormant_root) = DormantMutRef::new(root);
31019            let front = root.borrow_mut().first_leaf_edge();
31020            DrainFilterInner {
31021                length: &mut self.length,
31022                dormant_root: Some(dormant_root),
31023                cur_leaf_edge: Some(front),
31024            }
31025        } else {
31026            DrainFilterInner { length: &mut self.length, dormant_root: None, cur_leaf_edge: None }
31027        }
31028    }
31029
31030    /// Creates a consuming iterator visiting all the keys, in sorted order.
31031    /// The map cannot be used after calling this.
31032    /// The iterator element type is `K`.
31033    ///
31034    /// # Examples
31035    ///
31036    /// ```
31037    /// #![feature(map_into_keys_values)]
31038    /// use std::collections::BTreeMap;
31039    ///
31040    /// let mut a = BTreeMap::new();
31041    /// a.insert(2, "b");
31042    /// a.insert(1, "a");
31043    ///
31044    /// let keys: Vec<i32> = a.into_keys().collect();
31045    /// assert_eq!(keys, [1, 2]);
31046    /// ```
31047    #[inline]
31048    #[unstable(feature = "map_into_keys_values", issue = "75294")]
31049    pub fn into_keys(self) -> IntoKeys<K, V> {
31050        IntoKeys { inner: self.into_iter() }
31051    }
31052
31053    /// Creates a consuming iterator visiting all the values, in order by key.
31054    /// The map cannot be used after calling this.
31055    /// The iterator element type is `V`.
31056    ///
31057    /// # Examples
31058    ///
31059    /// ```
31060    /// #![feature(map_into_keys_values)]
31061    /// use std::collections::BTreeMap;
31062    ///
31063    /// let mut a = BTreeMap::new();
31064    /// a.insert(1, "hello");
31065    /// a.insert(2, "goodbye");
31066    ///
31067    /// let values: Vec<&str> = a.into_values().collect();
31068    /// assert_eq!(values, ["hello", "goodbye"]);
31069    /// ```
31070    #[inline]
31071    #[unstable(feature = "map_into_keys_values", issue = "75294")]
31072    pub fn into_values(self) -> IntoValues<K, V> {
31073        IntoValues { inner: self.into_iter() }
31074    }
31075}
31076
31077#[stable(feature = "rust1", since = "1.0.0")]
31078impl<'a, K, V> IntoIterator for &'a BTreeMap<K, V> {
31079    type Item = (&'a K, &'a V);
31080    type IntoIter = Iter<'a, K, V>;
31081
31082    fn into_iter(self) -> Iter<'a, K, V> {
31083        self.iter()
31084    }
31085}
31086
31087#[stable(feature = "rust1", since = "1.0.0")]
31088impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
31089    type Item = (&'a K, &'a V);
31090
31091    fn next(&mut self) -> Option<(&'a K, &'a V)> {
31092        if self.length == 0 {
31093            None
31094        } else {
31095            self.length -= 1;
31096            Some(unsafe { self.range.next_unchecked() })
31097        }
31098    }
31099
31100    fn size_hint(&self) -> (usize, Option<usize>) {
31101        (self.length, Some(self.length))
31102    }
31103
31104    fn last(mut self) -> Option<(&'a K, &'a V)> {
31105        self.next_back()
31106    }
31107
31108    fn min(mut self) -> Option<(&'a K, &'a V)> {
31109        self.next()
31110    }
31111
31112    fn max(mut self) -> Option<(&'a K, &'a V)> {
31113        self.next_back()
31114    }
31115}
31116
31117#[stable(feature = "fused", since = "1.26.0")]
31118impl<K, V> FusedIterator for Iter<'_, K, V> {}
31119
31120#[stable(feature = "rust1", since = "1.0.0")]
31121impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> {
31122    fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
31123        if self.length == 0 {
31124            None
31125        } else {
31126            self.length -= 1;
31127            Some(unsafe { self.range.next_back_unchecked() })
31128        }
31129    }
31130}
31131
31132#[stable(feature = "rust1", since = "1.0.0")]
31133impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
31134    fn len(&self) -> usize {
31135        self.length
31136    }
31137}
31138
31139#[stable(feature = "rust1", since = "1.0.0")]
31140impl<K, V> Clone for Iter<'_, K, V> {
31141    fn clone(&self) -> Self {
31142        Iter { range: self.range.clone(), length: self.length }
31143    }
31144}
31145
31146#[stable(feature = "rust1", since = "1.0.0")]
31147impl<'a, K, V> IntoIterator for &'a mut BTreeMap<K, V> {
31148    type Item = (&'a K, &'a mut V);
31149    type IntoIter = IterMut<'a, K, V>;
31150
31151    fn into_iter(self) -> IterMut<'a, K, V> {
31152        self.iter_mut()
31153    }
31154}
31155
31156#[stable(feature = "rust1", since = "1.0.0")]
31157impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> {
31158    type Item = (&'a K, &'a mut V);
31159
31160    fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
31161        if self.length == 0 {
31162            None
31163        } else {
31164            self.length -= 1;
31165            Some(unsafe { self.range.next_unchecked() })
31166        }
31167    }
31168
31169    fn size_hint(&self) -> (usize, Option<usize>) {
31170        (self.length, Some(self.length))
31171    }
31172
31173    fn last(mut self) -> Option<(&'a K, &'a mut V)> {
31174        self.next_back()
31175    }
31176
31177    fn min(mut self) -> Option<(&'a K, &'a mut V)> {
31178        self.next()
31179    }
31180
31181    fn max(mut self) -> Option<(&'a K, &'a mut V)> {
31182        self.next_back()
31183    }
31184}
31185
31186#[stable(feature = "rust1", since = "1.0.0")]
31187impl<'a, K: 'a, V: 'a> DoubleEndedIterator for IterMut<'a, K, V> {
31188    fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
31189        if self.length == 0 {
31190            None
31191        } else {
31192            self.length -= 1;
31193            Some(unsafe { self.range.next_back_unchecked() })
31194        }
31195    }
31196}
31197
31198#[stable(feature = "rust1", since = "1.0.0")]
31199impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
31200    fn len(&self) -> usize {
31201        self.length
31202    }
31203}
31204
31205#[stable(feature = "fused", since = "1.26.0")]
31206impl<K, V> FusedIterator for IterMut<'_, K, V> {}
31207
31208impl<'a, K, V> IterMut<'a, K, V> {
31209    /// Returns an iterator of references over the remaining items.
31210    #[inline]
31211    pub(super) fn iter(&self) -> Iter<'_, K, V> {
31212        Iter { range: self.range.iter(), length: self.length }
31213    }
31214}
31215
31216#[stable(feature = "rust1", since = "1.0.0")]
31217impl<K, V> IntoIterator for BTreeMap<K, V> {
31218    type Item = (K, V);
31219    type IntoIter = IntoIter<K, V>;
31220
31221    fn into_iter(self) -> IntoIter<K, V> {
31222        let mut me = ManuallyDrop::new(self);
31223        if let Some(root) = me.root.take() {
31224            let full_range = root.into_dying().full_range();
31225
31226            IntoIter { range: full_range, length: me.length }
31227        } else {
31228            IntoIter { range: LeafRange::none(), length: 0 }
31229        }
31230    }
31231}
31232
31233impl<K, V> Drop for Dropper<K, V> {
31234    fn drop(&mut self) {
31235        // Similar to advancing a non-fusing iterator.
31236        fn next_or_end<K, V>(this: &mut Dropper<K, V>) -> Option<(K, V)> {
31237            if this.remaining_length == 0 {
31238                unsafe { ptr::read(&this.front).deallocating_end() }
31239                None
31240            } else {
31241                this.remaining_length -= 1;
31242                Some(unsafe { this.front.deallocating_next_unchecked() })
31243            }
31244        }
31245
31246        struct DropGuard<'a, K, V>(&'a mut Dropper<K, V>);
31247
31248        impl<'a, K, V> Drop for DropGuard<'a, K, V> {
31249            fn drop(&mut self) {
31250                // Continue the same loop we perform below. This only runs when unwinding, so we
31251                // don't have to care about panics this time (they'll abort).
31252                while let Some(_pair) = next_or_end(&mut self.0) {}
31253            }
31254        }
31255
31256        while let Some(pair) = next_or_end(self) {
31257            let guard = DropGuard(self);
31258            drop(pair);
31259            mem::forget(guard);
31260        }
31261    }
31262}
31263
31264#[stable(feature = "btree_drop", since = "1.7.0")]
31265impl<K, V> Drop for IntoIter<K, V> {
31266    fn drop(&mut self) {
31267        if let Some(front) = self.range.front.take() {
31268            Dropper { front, remaining_length: self.length };
31269        }
31270    }
31271}
31272
31273#[stable(feature = "rust1", since = "1.0.0")]
31274impl<K, V> Iterator for IntoIter<K, V> {
31275    type Item = (K, V);
31276
31277    fn next(&mut self) -> Option<(K, V)> {
31278        if self.length == 0 {
31279            None
31280        } else {
31281            self.length -= 1;
31282            Some(unsafe { self.range.front.as_mut().unwrap().deallocating_next_unchecked() })
31283        }
31284    }
31285
31286    fn size_hint(&self) -> (usize, Option<usize>) {
31287        (self.length, Some(self.length))
31288    }
31289}
31290
31291#[stable(feature = "rust1", since = "1.0.0")]
31292impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
31293    fn next_back(&mut self) -> Option<(K, V)> {
31294        if self.length == 0 {
31295            None
31296        } else {
31297            self.length -= 1;
31298            Some(unsafe { self.range.back.as_mut().unwrap().deallocating_next_back_unchecked() })
31299        }
31300    }
31301}
31302
31303#[stable(feature = "rust1", since = "1.0.0")]
31304impl<K, V> ExactSizeIterator for IntoIter<K, V> {
31305    fn len(&self) -> usize {
31306        self.length
31307    }
31308}
31309
31310#[stable(feature = "fused", since = "1.26.0")]
31311impl<K, V> FusedIterator for IntoIter<K, V> {}
31312
31313#[stable(feature = "rust1", since = "1.0.0")]
31314impl<'a, K, V> Iterator for Keys<'a, K, V> {
31315    type Item = &'a K;
31316
31317    fn next(&mut self) -> Option<&'a K> {
31318        self.inner.next().map(|(k, _)| k)
31319    }
31320
31321    fn size_hint(&self) -> (usize, Option<usize>) {
31322        self.inner.size_hint()
31323    }
31324
31325    fn last(mut self) -> Option<&'a K> {
31326        self.next_back()
31327    }
31328
31329    fn min(mut self) -> Option<&'a K> {
31330        self.next()
31331    }
31332
31333    fn max(mut self) -> Option<&'a K> {
31334        self.next_back()
31335    }
31336}
31337
31338#[stable(feature = "rust1", since = "1.0.0")]
31339impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
31340    fn next_back(&mut self) -> Option<&'a K> {
31341        self.inner.next_back().map(|(k, _)| k)
31342    }
31343}
31344
31345#[stable(feature = "rust1", since = "1.0.0")]
31346impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
31347    fn len(&self) -> usize {
31348        self.inner.len()
31349    }
31350}
31351
31352#[stable(feature = "fused", since = "1.26.0")]
31353impl<K, V> FusedIterator for Keys<'_, K, V> {}
31354
31355#[stable(feature = "rust1", since = "1.0.0")]
31356impl<K, V> Clone for Keys<'_, K, V> {
31357    fn clone(&self) -> Self {
31358        Keys { inner: self.inner.clone() }
31359    }
31360}
31361
31362#[stable(feature = "rust1", since = "1.0.0")]
31363impl<'a, K, V> Iterator for Values<'a, K, V> {
31364    type Item = &'a V;
31365
31366    fn next(&mut self) -> Option<&'a V> {
31367        self.inner.next().map(|(_, v)| v)
31368    }
31369
31370    fn size_hint(&self) -> (usize, Option<usize>) {
31371        self.inner.size_hint()
31372    }
31373
31374    fn last(mut self) -> Option<&'a V> {
31375        self.next_back()
31376    }
31377}
31378
31379#[stable(feature = "rust1", since = "1.0.0")]
31380impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
31381    fn next_back(&mut self) -> Option<&'a V> {
31382        self.inner.next_back().map(|(_, v)| v)
31383    }
31384}
31385
31386#[stable(feature = "rust1", since = "1.0.0")]
31387impl<K, V> ExactSizeIterator for Values<'_, K, V> {
31388    fn len(&self) -> usize {
31389        self.inner.len()
31390    }
31391}
31392
31393#[stable(feature = "fused", since = "1.26.0")]
31394impl<K, V> FusedIterator for Values<'_, K, V> {}
31395
31396#[stable(feature = "rust1", since = "1.0.0")]
31397impl<K, V> Clone for Values<'_, K, V> {
31398    fn clone(&self) -> Self {
31399        Values { inner: self.inner.clone() }
31400    }
31401}
31402
31403/// An iterator produced by calling `drain_filter` on BTreeMap.
31404#[unstable(feature = "btree_drain_filter", issue = "70530")]
31405pub struct DrainFilter<'a, K, V, F>
31406where
31407    K: 'a,
31408    V: 'a,
31409    F: 'a + FnMut(&K, &mut V) -> bool,
31410{
31411    pred: F,
31412    inner: DrainFilterInner<'a, K, V>,
31413}
31414/// Most of the implementation of DrainFilter are generic over the type
31415/// of the predicate, thus also serving for BTreeSet::DrainFilter.
31416pub(super) struct DrainFilterInner<'a, K: 'a, V: 'a> {
31417    /// Reference to the length field in the borrowed map, updated live.
31418    length: &'a mut usize,
31419    /// Buried reference to the root field in the borrowed map.
31420    /// Wrapped in `Option` to allow drop handler to `take` it.
31421    dormant_root: Option<DormantMutRef<'a, Root<K, V>>>,
31422    /// Contains a leaf edge preceding the next element to be returned, or the last leaf edge.
31423    /// Empty if the map has no root, if iteration went beyond the last leaf edge,
31424    /// or if a panic occurred in the predicate.
31425    cur_leaf_edge: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
31426}
31427
31428#[unstable(feature = "btree_drain_filter", issue = "70530")]
31429impl<K, V, F> Drop for DrainFilter<'_, K, V, F>
31430where
31431    F: FnMut(&K, &mut V) -> bool,
31432{
31433    fn drop(&mut self) {
31434        self.for_each(drop);
31435    }
31436}
31437
31438#[unstable(feature = "btree_drain_filter", issue = "70530")]
31439impl<K, V, F> fmt::Debug for DrainFilter<'_, K, V, F>
31440where
31441    K: fmt::Debug,
31442    V: fmt::Debug,
31443    F: FnMut(&K, &mut V) -> bool,
31444{
31445    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
31446        f.debug_tuple("DrainFilter").field(&self.inner.peek()).finish()
31447    }
31448}
31449
31450#[unstable(feature = "btree_drain_filter", issue = "70530")]
31451impl<K, V, F> Iterator for DrainFilter<'_, K, V, F>
31452where
31453    F: FnMut(&K, &mut V) -> bool,
31454{
31455    type Item = (K, V);
31456
31457    fn next(&mut self) -> Option<(K, V)> {
31458        self.inner.next(&mut self.pred)
31459    }
31460
31461    fn size_hint(&self) -> (usize, Option<usize>) {
31462        self.inner.size_hint()
31463    }
31464}
31465
31466impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
31467    /// Allow Debug implementations to predict the next element.
31468    pub(super) fn peek(&self) -> Option<(&K, &V)> {
31469        let edge = self.cur_leaf_edge.as_ref()?;
31470        edge.reborrow().next_kv().ok().map(Handle::into_kv)
31471    }
31472
31473    /// Implementation of a typical `DrainFilter::next` method, given the predicate.
31474    pub(super) fn next<F>(&mut self, pred: &mut F) -> Option<(K, V)>
31475    where
31476        F: FnMut(&K, &mut V) -> bool,
31477    {
31478        while let Ok(mut kv) = self.cur_leaf_edge.take()?.next_kv() {
31479            let (k, v) = kv.kv_mut();
31480            if pred(k, v) {
31481                *self.length -= 1;
31482                let (kv, pos) = kv.remove_kv_tracking(|| {
31483                    // SAFETY: we will touch the root in a way that will not
31484                    // invalidate the position returned.
31485                    let root = unsafe { self.dormant_root.take().unwrap().awaken() };
31486                    root.pop_internal_level();
31487                    self.dormant_root = Some(DormantMutRef::new(root).1);
31488                });
31489                self.cur_leaf_edge = Some(pos);
31490                return Some(kv);
31491            }
31492            self.cur_leaf_edge = Some(kv.next_leaf_edge());
31493        }
31494        None
31495    }
31496
31497    /// Implementation of a typical `DrainFilter::size_hint` method.
31498    pub(super) fn size_hint(&self) -> (usize, Option<usize>) {
31499        // In most of the btree iterators, `self.length` is the number of elements
31500        // yet to be visited. Here, it includes elements that were visited and that
31501        // the predicate decided not to drain. Making this upper bound more accurate
31502        // requires maintaining an extra field and is not worth while.
31503        (0, Some(*self.length))
31504    }
31505}
31506
31507#[unstable(feature = "btree_drain_filter", issue = "70530")]
31508impl<K, V, F> FusedIterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
31509
31510#[stable(feature = "btree_range", since = "1.17.0")]
31511impl<'a, K, V> Iterator for Range<'a, K, V> {
31512    type Item = (&'a K, &'a V);
31513
31514    fn next(&mut self) -> Option<(&'a K, &'a V)> {
31515        if self.inner.is_empty() { None } else { Some(unsafe { self.next_unchecked() }) }
31516    }
31517
31518    fn last(mut self) -> Option<(&'a K, &'a V)> {
31519        self.next_back()
31520    }
31521
31522    fn min(mut self) -> Option<(&'a K, &'a V)> {
31523        self.next()
31524    }
31525
31526    fn max(mut self) -> Option<(&'a K, &'a V)> {
31527        self.next_back()
31528    }
31529}
31530
31531#[stable(feature = "map_values_mut", since = "1.10.0")]
31532impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
31533    type Item = &'a mut V;
31534
31535    fn next(&mut self) -> Option<&'a mut V> {
31536        self.inner.next().map(|(_, v)| v)
31537    }
31538
31539    fn size_hint(&self) -> (usize, Option<usize>) {
31540        self.inner.size_hint()
31541    }
31542
31543    fn last(mut self) -> Option<&'a mut V> {
31544        self.next_back()
31545    }
31546}
31547
31548#[stable(feature = "map_values_mut", since = "1.10.0")]
31549impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> {
31550    fn next_back(&mut self) -> Option<&'a mut V> {
31551        self.inner.next_back().map(|(_, v)| v)
31552    }
31553}
31554
31555#[stable(feature = "map_values_mut", since = "1.10.0")]
31556impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
31557    fn len(&self) -> usize {
31558        self.inner.len()
31559    }
31560}
31561
31562#[stable(feature = "fused", since = "1.26.0")]
31563impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
31564
31565impl<'a, K, V> Range<'a, K, V> {
31566    unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
31567        unsafe { self.inner.front.as_mut().unwrap_unchecked().next_unchecked() }
31568    }
31569}
31570
31571#[unstable(feature = "map_into_keys_values", issue = "75294")]
31572impl<K, V> Iterator for IntoKeys<K, V> {
31573    type Item = K;
31574
31575    fn next(&mut self) -> Option<K> {
31576        self.inner.next().map(|(k, _)| k)
31577    }
31578
31579    fn size_hint(&self) -> (usize, Option<usize>) {
31580        self.inner.size_hint()
31581    }
31582
31583    fn last(mut self) -> Option<K> {
31584        self.next_back()
31585    }
31586
31587    fn min(mut self) -> Option<K> {
31588        self.next()
31589    }
31590
31591    fn max(mut self) -> Option<K> {
31592        self.next_back()
31593    }
31594}
31595
31596#[unstable(feature = "map_into_keys_values", issue = "75294")]
31597impl<K, V> DoubleEndedIterator for IntoKeys<K, V> {
31598    fn next_back(&mut self) -> Option<K> {
31599        self.inner.next_back().map(|(k, _)| k)
31600    }
31601}
31602
31603#[unstable(feature = "map_into_keys_values", issue = "75294")]
31604impl<K, V> ExactSizeIterator for IntoKeys<K, V> {
31605    fn len(&self) -> usize {
31606        self.inner.len()
31607    }
31608}
31609
31610#[unstable(feature = "map_into_keys_values", issue = "75294")]
31611impl<K, V> FusedIterator for IntoKeys<K, V> {}
31612
31613#[unstable(feature = "map_into_keys_values", issue = "75294")]
31614impl<K, V> Iterator for IntoValues<K, V> {
31615    type Item = V;
31616
31617    fn next(&mut self) -> Option<V> {
31618        self.inner.next().map(|(_, v)| v)
31619    }
31620
31621    fn size_hint(&self) -> (usize, Option<usize>) {
31622        self.inner.size_hint()
31623    }
31624
31625    fn last(mut self) -> Option<V> {
31626        self.next_back()
31627    }
31628}
31629
31630#[unstable(feature = "map_into_keys_values", issue = "75294")]
31631impl<K, V> DoubleEndedIterator for IntoValues<K, V> {
31632    fn next_back(&mut self) -> Option<V> {
31633        self.inner.next_back().map(|(_, v)| v)
31634    }
31635}
31636
31637#[unstable(feature = "map_into_keys_values", issue = "75294")]
31638impl<K, V> ExactSizeIterator for IntoValues<K, V> {
31639    fn len(&self) -> usize {
31640        self.inner.len()
31641    }
31642}
31643
31644#[unstable(feature = "map_into_keys_values", issue = "75294")]
31645impl<K, V> FusedIterator for IntoValues<K, V> {}
31646
31647#[stable(feature = "btree_range", since = "1.17.0")]
31648impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
31649    fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
31650        if self.inner.is_empty() { None } else { Some(unsafe { self.next_back_unchecked() }) }
31651    }
31652}
31653
31654impl<'a, K, V> Range<'a, K, V> {
31655    unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
31656        unsafe { self.inner.back.as_mut().unwrap_unchecked().next_back_unchecked() }
31657    }
31658}
31659
31660#[stable(feature = "fused", since = "1.26.0")]
31661impl<K, V> FusedIterator for Range<'_, K, V> {}
31662
31663#[stable(feature = "btree_range", since = "1.17.0")]
31664impl<K, V> Clone for Range<'_, K, V> {
31665    fn clone(&self) -> Self {
31666        Range { inner: LeafRange { front: self.inner.front, back: self.inner.back } }
31667    }
31668}
31669
31670#[stable(feature = "btree_range", since = "1.17.0")]
31671impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
31672    type Item = (&'a K, &'a mut V);
31673
31674    fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
31675        if self.inner.is_empty() { None } else { Some(unsafe { self.next_unchecked() }) }
31676    }
31677
31678    fn last(mut self) -> Option<(&'a K, &'a mut V)> {
31679        self.next_back()
31680    }
31681
31682    fn min(mut self) -> Option<(&'a K, &'a mut V)> {
31683        self.next()
31684    }
31685
31686    fn max(mut self) -> Option<(&'a K, &'a mut V)> {
31687        self.next_back()
31688    }
31689}
31690
31691impl<'a, K, V> RangeMut<'a, K, V> {
31692    unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
31693        unsafe { self.inner.front.as_mut().unwrap_unchecked().next_unchecked() }
31694    }
31695
31696    /// Returns an iterator of references over the remaining items.
31697    #[inline]
31698    pub(super) fn iter(&self) -> Range<'_, K, V> {
31699        Range { inner: self.inner.reborrow() }
31700    }
31701}
31702
31703#[stable(feature = "btree_range", since = "1.17.0")]
31704impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> {
31705    fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
31706        if self.inner.is_empty() { None } else { Some(unsafe { self.next_back_unchecked() }) }
31707    }
31708}
31709
31710#[stable(feature = "fused", since = "1.26.0")]
31711impl<K, V> FusedIterator for RangeMut<'_, K, V> {}
31712
31713impl<'a, K, V> RangeMut<'a, K, V> {
31714    unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
31715        unsafe { self.inner.back.as_mut().unwrap_unchecked().next_back_unchecked() }
31716    }
31717}
31718
31719#[stable(feature = "rust1", since = "1.0.0")]
31720impl<K: Ord, V> FromIterator<(K, V)> for BTreeMap<K, V> {
31721    fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> BTreeMap<K, V> {
31722        let mut map = BTreeMap::new();
31723        map.extend(iter);
31724        map
31725    }
31726}
31727
31728#[stable(feature = "rust1", since = "1.0.0")]
31729impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> {
31730    #[inline]
31731    fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
31732        iter.into_iter().for_each(move |(k, v)| {
31733            self.insert(k, v);
31734        });
31735    }
31736
31737    #[inline]
31738    fn extend_one(&mut self, (k, v): (K, V)) {
31739        self.insert(k, v);
31740    }
31741}
31742
31743#[stable(feature = "extend_ref", since = "1.2.0")]
31744impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap<K, V> {
31745    fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
31746        self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
31747    }
31748
31749    #[inline]
31750    fn extend_one(&mut self, (&k, &v): (&'a K, &'a V)) {
31751        self.insert(k, v);
31752    }
31753}
31754
31755#[stable(feature = "rust1", since = "1.0.0")]
31756impl<K: Hash, V: Hash> Hash for BTreeMap<K, V> {
31757    fn hash<H: Hasher>(&self, state: &mut H) {
31758        for elt in self {
31759            elt.hash(state);
31760        }
31761    }
31762}
31763
31764#[stable(feature = "rust1", since = "1.0.0")]
31765impl<K: Ord, V> Default for BTreeMap<K, V> {
31766    /// Creates an empty `BTreeMap`.
31767    fn default() -> BTreeMap<K, V> {
31768        BTreeMap::new()
31769    }
31770}
31771
31772#[stable(feature = "rust1", since = "1.0.0")]
31773impl<K: PartialEq, V: PartialEq> PartialEq for BTreeMap<K, V> {
31774    fn eq(&self, other: &BTreeMap<K, V>) -> bool {
31775        self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b)
31776    }
31777}
31778
31779#[stable(feature = "rust1", since = "1.0.0")]
31780impl<K: Eq, V: Eq> Eq for BTreeMap<K, V> {}
31781
31782#[stable(feature = "rust1", since = "1.0.0")]
31783impl<K: PartialOrd, V: PartialOrd> PartialOrd for BTreeMap<K, V> {
31784    #[inline]
31785    fn partial_cmp(&self, other: &BTreeMap<K, V>) -> Option<Ordering> {
31786        self.iter().partial_cmp(other.iter())
31787    }
31788}
31789
31790#[stable(feature = "rust1", since = "1.0.0")]
31791impl<K: Ord, V: Ord> Ord for BTreeMap<K, V> {
31792    #[inline]
31793    fn cmp(&self, other: &BTreeMap<K, V>) -> Ordering {
31794        self.iter().cmp(other.iter())
31795    }
31796}
31797
31798#[stable(feature = "rust1", since = "1.0.0")]
31799impl<K: Debug, V: Debug> Debug for BTreeMap<K, V> {
31800    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
31801        f.debug_map().entries(self.iter()).finish()
31802    }
31803}
31804
31805#[stable(feature = "rust1", since = "1.0.0")]
31806impl<K, Q: ?Sized, V> Index<&Q> for BTreeMap<K, V>
31807where
31808    K: Borrow<Q> + Ord,
31809    Q: Ord,
31810{
31811    type Output = V;
31812
31813    /// Returns a reference to the value corresponding to the supplied key.
31814    ///
31815    /// # Panics
31816    ///
31817    /// Panics if the key is not present in the `BTreeMap`.
31818    #[inline]
31819    fn index(&self, key: &Q) -> &V {
31820        self.get(key).expect("no entry found for key")
31821    }
31822}
31823
31824impl<K, V> BTreeMap<K, V> {
31825    /// Gets an iterator over the entries of the map, sorted by key.
31826    ///
31827    /// # Examples
31828    ///
31829    /// Basic usage:
31830    ///
31831    /// ```
31832    /// use std::collections::BTreeMap;
31833    ///
31834    /// let mut map = BTreeMap::new();
31835    /// map.insert(3, "c");
31836    /// map.insert(2, "b");
31837    /// map.insert(1, "a");
31838    ///
31839    /// for (key, value) in map.iter() {
31840    ///     println!("{}: {}", key, value);
31841    /// }
31842    ///
31843    /// let (first_key, first_value) = map.iter().next().unwrap();
31844    /// assert_eq!((*first_key, *first_value), (1, "a"));
31845    /// ```
31846    #[stable(feature = "rust1", since = "1.0.0")]
31847    pub fn iter(&self) -> Iter<'_, K, V> {
31848        if let Some(root) = &self.root {
31849            let full_range = root.reborrow().full_range();
31850
31851            Iter { range: Range { inner: full_range }, length: self.length }
31852        } else {
31853            Iter { range: Range { inner: LeafRange::none() }, length: 0 }
31854        }
31855    }
31856
31857    /// Gets a mutable iterator over the entries of the map, sorted by key.
31858    ///
31859    /// # Examples
31860    ///
31861    /// Basic usage:
31862    ///
31863    /// ```
31864    /// use std::collections::BTreeMap;
31865    ///
31866    /// let mut map = BTreeMap::new();
31867    /// map.insert("a", 1);
31868    /// map.insert("b", 2);
31869    /// map.insert("c", 3);
31870    ///
31871    /// // add 10 to the value if the key isn't "a"
31872    /// for (key, value) in map.iter_mut() {
31873    ///     if key != &"a" {
31874    ///         *value += 10;
31875    ///     }
31876    /// }
31877    /// ```
31878    #[stable(feature = "rust1", since = "1.0.0")]
31879    pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
31880        if let Some(root) = &mut self.root {
31881            let full_range = root.borrow_valmut().full_range();
31882
31883            IterMut {
31884                range: RangeMut { inner: full_range, _marker: PhantomData },
31885                length: self.length,
31886            }
31887        } else {
31888            IterMut {
31889                range: RangeMut { inner: LeafRange::none(), _marker: PhantomData },
31890                length: 0,
31891            }
31892        }
31893    }
31894
31895    /// Gets an iterator over the keys of the map, in sorted order.
31896    ///
31897    /// # Examples
31898    ///
31899    /// Basic usage:
31900    ///
31901    /// ```
31902    /// use std::collections::BTreeMap;
31903    ///
31904    /// let mut a = BTreeMap::new();
31905    /// a.insert(2, "b");
31906    /// a.insert(1, "a");
31907    ///
31908    /// let keys: Vec<_> = a.keys().cloned().collect();
31909    /// assert_eq!(keys, [1, 2]);
31910    /// ```
31911    #[stable(feature = "rust1", since = "1.0.0")]
31912    pub fn keys(&self) -> Keys<'_, K, V> {
31913        Keys { inner: self.iter() }
31914    }
31915
31916    /// Gets an iterator over the values of the map, in order by key.
31917    ///
31918    /// # Examples
31919    ///
31920    /// Basic usage:
31921    ///
31922    /// ```
31923    /// use std::collections::BTreeMap;
31924    ///
31925    /// let mut a = BTreeMap::new();
31926    /// a.insert(1, "hello");
31927    /// a.insert(2, "goodbye");
31928    ///
31929    /// let values: Vec<&str> = a.values().cloned().collect();
31930    /// assert_eq!(values, ["hello", "goodbye"]);
31931    /// ```
31932    #[stable(feature = "rust1", since = "1.0.0")]
31933    pub fn values(&self) -> Values<'_, K, V> {
31934        Values { inner: self.iter() }
31935    }
31936
31937    /// Gets a mutable iterator over the values of the map, in order by key.
31938    ///
31939    /// # Examples
31940    ///
31941    /// Basic usage:
31942    ///
31943    /// ```
31944    /// use std::collections::BTreeMap;
31945    ///
31946    /// let mut a = BTreeMap::new();
31947    /// a.insert(1, String::from("hello"));
31948    /// a.insert(2, String::from("goodbye"));
31949    ///
31950    /// for value in a.values_mut() {
31951    ///     value.push_str("!");
31952    /// }
31953    ///
31954    /// let values: Vec<String> = a.values().cloned().collect();
31955    /// assert_eq!(values, [String::from("hello!"),
31956    ///                     String::from("goodbye!")]);
31957    /// ```
31958    #[stable(feature = "map_values_mut", since = "1.10.0")]
31959    pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
31960        ValuesMut { inner: self.iter_mut() }
31961    }
31962
31963    /// Returns the number of elements in the map.
31964    ///
31965    /// # Examples
31966    ///
31967    /// Basic usage:
31968    ///
31969    /// ```
31970    /// use std::collections::BTreeMap;
31971    ///
31972    /// let mut a = BTreeMap::new();
31973    /// assert_eq!(a.len(), 0);
31974    /// a.insert(1, "a");
31975    /// assert_eq!(a.len(), 1);
31976    /// ```
31977    #[doc(alias = "length")]
31978    #[stable(feature = "rust1", since = "1.0.0")]
31979    #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
31980    pub const fn len(&self) -> usize {
31981        self.length
31982    }
31983
31984    /// Returns `true` if the map contains no elements.
31985    ///
31986    /// # Examples
31987    ///
31988    /// Basic usage:
31989    ///
31990    /// ```
31991    /// use std::collections::BTreeMap;
31992    ///
31993    /// let mut a = BTreeMap::new();
31994    /// assert!(a.is_empty());
31995    /// a.insert(1, "a");
31996    /// assert!(!a.is_empty());
31997    /// ```
31998    #[stable(feature = "rust1", since = "1.0.0")]
31999    #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
32000    pub const fn is_empty(&self) -> bool {
32001        self.len() == 0
32002    }
32003
32004    /// If the root node is the empty (non-allocated) root node, allocate our
32005    /// own node. Is an associated function to avoid borrowing the entire BTreeMap.
32006    fn ensure_is_owned(root: &mut Option<Root<K, V>>) -> &mut Root<K, V> {
32007        root.get_or_insert_with(Root::new)
32008    }
32009}
32010
32011#[cfg(test)]
32012mod tests;
32013use core::marker::PhantomData;
32014use core::ptr::NonNull;
32015
32016/// Models a reborrow of some unique reference, when you know that the reborrow
32017/// and all its descendants (i.e., all pointers and references derived from it)
32018/// will not be used any more at some point, after which you want to use the
32019/// original unique reference again.
32020///
32021/// The borrow checker usually handles this stacking of borrows for you, but
32022/// some control flows that accomplish this stacking are too complicated for
32023/// the compiler to follow. A `DormantMutRef` allows you to check borrowing
32024/// yourself, while still expressing its stacked nature, and encapsulating
32025/// the raw pointer code needed to do this without undefined behavior.
32026pub struct DormantMutRef<'a, T> {
32027    ptr: NonNull<T>,
32028    _marker: PhantomData<&'a mut T>,
32029}
32030
32031unsafe impl<'a, T> Sync for DormantMutRef<'a, T> where &'a mut T: Sync {}
32032unsafe impl<'a, T> Send for DormantMutRef<'a, T> where &'a mut T: Send {}
32033
32034impl<'a, T> DormantMutRef<'a, T> {
32035    /// Capture a unique borrow, and immediately reborrow it. For the compiler,
32036    /// the lifetime of the new reference is the same as the lifetime of the
32037    /// original reference, but you promise to use it for a shorter period.
32038    pub fn new(t: &'a mut T) -> (&'a mut T, Self) {
32039        let ptr = NonNull::from(t);
32040        // SAFETY: we hold the borrow throughout 'a via `_marker`, and we expose
32041        // only this reference, so it is unique.
32042        let new_ref = unsafe { &mut *ptr.as_ptr() };
32043        (new_ref, Self { ptr, _marker: PhantomData })
32044    }
32045
32046    /// Revert to the unique borrow initially captured.
32047    ///
32048    /// # Safety
32049    ///
32050    /// The reborrow must have ended, i.e., the reference returned by `new` and
32051    /// all pointers and references derived from it, must not be used anymore.
32052    pub unsafe fn awaken(self) -> &'a mut T {
32053        // SAFETY: our own safety conditions imply this reference is again unique.
32054        unsafe { &mut *self.ptr.as_ptr() }
32055    }
32056}
32057
32058#[cfg(test)]
32059mod tests;
32060use core::intrinsics;
32061use core::mem;
32062use core::ptr;
32063
32064/// This replaces the value behind the `v` unique reference by calling the
32065/// relevant function.
32066///
32067/// If a panic occurs in the `change` closure, the entire process will be aborted.
32068#[allow(dead_code)] // keep as illustration and for future use
32069#[inline]
32070pub fn take_mut<T>(v: &mut T, change: impl FnOnce(T) -> T) {
32071    replace(v, |value| (change(value), ()))
32072}
32073
32074/// This replaces the value behind the `v` unique reference by calling the
32075/// relevant function, and returns a result obtained along the way.
32076///
32077/// If a panic occurs in the `change` closure, the entire process will be aborted.
32078#[inline]
32079pub fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
32080    struct PanicGuard;
32081    impl Drop for PanicGuard {
32082        fn drop(&mut self) {
32083            intrinsics::abort()
32084        }
32085    }
32086    let guard = PanicGuard;
32087    let value = unsafe { ptr::read(v) };
32088    let (new_value, ret) = change(value);
32089    unsafe {
32090        ptr::write(v, new_value);
32091    }
32092    mem::forget(guard);
32093    ret
32094}
32095use super::node::{ForceResult::*, Root};
32096use super::search::SearchResult::*;
32097use core::borrow::Borrow;
32098
32099impl<K, V> Root<K, V> {
32100    /// Calculates the length of both trees that result from splitting up
32101    /// a given number of distinct key-value pairs.
32102    pub fn calc_split_length(
32103        total_num: usize,
32104        root_a: &Root<K, V>,
32105        root_b: &Root<K, V>,
32106    ) -> (usize, usize) {
32107        let (length_a, length_b);
32108        if root_a.height() < root_b.height() {
32109            length_a = root_a.reborrow().calc_length();
32110            length_b = total_num - length_a;
32111            debug_assert_eq!(length_b, root_b.reborrow().calc_length());
32112        } else {
32113            length_b = root_b.reborrow().calc_length();
32114            length_a = total_num - length_b;
32115            debug_assert_eq!(length_a, root_a.reborrow().calc_length());
32116        }
32117        (length_a, length_b)
32118    }
32119
32120    /// Split off a tree with key-value pairs at and after the given key.
32121    /// The result is meaningful only if the tree is ordered by key,
32122    /// and if the ordering of `Q` corresponds to that of `K`.
32123    /// If `self` respects all `BTreeMap` tree invariants, then both
32124    /// `self` and the returned tree will respect those invariants.
32125    pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
32126    where
32127        K: Borrow<Q>,
32128    {
32129        let left_root = self;
32130        let mut right_root = Root::new_pillar(left_root.height());
32131        let mut left_node = left_root.borrow_mut();
32132        let mut right_node = right_root.borrow_mut();
32133
32134        loop {
32135            let mut split_edge = match left_node.search_node(key) {
32136                // key is going to the right tree
32137                Found(kv) => kv.left_edge(),
32138                GoDown(edge) => edge,
32139            };
32140
32141            split_edge.move_suffix(&mut right_node);
32142
32143            match (split_edge.force(), right_node.force()) {
32144                (Internal(edge), Internal(node)) => {
32145                    left_node = edge.descend();
32146                    right_node = node.first_edge().descend();
32147                }
32148                (Leaf(_), Leaf(_)) => break,
32149                _ => unreachable!(),
32150            }
32151        }
32152
32153        left_root.fix_right_border();
32154        right_root.fix_left_border();
32155        right_root
32156    }
32157
32158    /// Creates a tree consisting of empty nodes.
32159    fn new_pillar(height: usize) -> Self {
32160        let mut root = Root::new();
32161        for _ in 0..height {
32162            root.push_internal_level();
32163        }
32164        root
32165    }
32166}
32167// This is pretty much entirely stolen from TreeSet, since BTreeMap has an identical interface
32168// to TreeMap
32169
32170use core::borrow::Borrow;
32171use core::cmp::Ordering::{Equal, Greater, Less};
32172use core::cmp::{max, min};
32173use core::fmt::{self, Debug};
32174use core::iter::{FromIterator, FusedIterator, Peekable};
32175use core::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub};
32176
32177use super::map::{BTreeMap, Keys};
32178use super::merge_iter::MergeIterInner;
32179use super::Recover;
32180
32181// FIXME(conventions): implement bounded iterators
32182
32183/// A set based on a B-Tree.
32184///
32185/// See [`BTreeMap`]'s documentation for a detailed discussion of this collection's performance
32186/// benefits and drawbacks.
32187///
32188/// It is a logic error for an item to be modified in such a way that the item's ordering relative
32189/// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is
32190/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
32191/// The behavior resulting from such a logic error is not specified, but will not result in
32192/// undefined behavior. This could include panics, incorrect results, aborts, memory leaks, and
32193/// non-termination.
32194///
32195/// [`Ord`]: core::cmp::Ord
32196/// [`Cell`]: core::cell::Cell
32197/// [`RefCell`]: core::cell::RefCell
32198///
32199/// # Examples
32200///
32201/// ```
32202/// use std::collections::BTreeSet;
32203///
32204/// // Type inference lets us omit an explicit type signature (which
32205/// // would be `BTreeSet<&str>` in this example).
32206/// let mut books = BTreeSet::new();
32207///
32208/// // Add some books.
32209/// books.insert("A Dance With Dragons");
32210/// books.insert("To Kill a Mockingbird");
32211/// books.insert("The Odyssey");
32212/// books.insert("The Great Gatsby");
32213///
32214/// // Check for a specific one.
32215/// if !books.contains("The Winds of Winter") {
32216///     println!("We have {} books, but The Winds of Winter ain't one.",
32217///              books.len());
32218/// }
32219///
32220/// // Remove a book.
32221/// books.remove("The Odyssey");
32222///
32223/// // Iterate over everything.
32224/// for book in &books {
32225///     println!("{}", book);
32226/// }
32227/// ```
32228#[derive(Hash, PartialEq, Eq, Ord, PartialOrd)]
32229#[stable(feature = "rust1", since = "1.0.0")]
32230#[cfg_attr(not(test), rustc_diagnostic_item = "BTreeSet")]
32231pub struct BTreeSet<T> {
32232    map: BTreeMap<T, ()>,
32233}
32234
32235#[stable(feature = "rust1", since = "1.0.0")]
32236impl<T: Clone> Clone for BTreeSet<T> {
32237    fn clone(&self) -> Self {
32238        BTreeSet { map: self.map.clone() }
32239    }
32240
32241    fn clone_from(&mut self, other: &Self) {
32242        self.map.clone_from(&other.map);
32243    }
32244}
32245
32246/// An iterator over the items of a `BTreeSet`.
32247///
32248/// This `struct` is created by the [`iter`] method on [`BTreeSet`].
32249/// See its documentation for more.
32250///
32251/// [`iter`]: BTreeSet::iter
32252#[stable(feature = "rust1", since = "1.0.0")]
32253pub struct Iter<'a, T: 'a> {
32254    iter: Keys<'a, T, ()>,
32255}
32256
32257#[stable(feature = "collection_debug", since = "1.17.0")]
32258impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
32259    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32260        f.debug_tuple("Iter").field(&self.iter.clone()).finish()
32261    }
32262}
32263
32264/// An owning iterator over the items of a `BTreeSet`.
32265///
32266/// This `struct` is created by the [`into_iter`] method on [`BTreeSet`]
32267/// (provided by the `IntoIterator` trait). See its documentation for more.
32268///
32269/// [`into_iter`]: BTreeSet#method.into_iter
32270#[stable(feature = "rust1", since = "1.0.0")]
32271#[derive(Debug)]
32272pub struct IntoIter<T> {
32273    iter: super::map::IntoIter<T, ()>,
32274}
32275
32276/// An iterator over a sub-range of items in a `BTreeSet`.
32277///
32278/// This `struct` is created by the [`range`] method on [`BTreeSet`].
32279/// See its documentation for more.
32280///
32281/// [`range`]: BTreeSet::range
32282#[derive(Debug)]
32283#[stable(feature = "btree_range", since = "1.17.0")]
32284pub struct Range<'a, T: 'a> {
32285    iter: super::map::Range<'a, T, ()>,
32286}
32287
32288/// A lazy iterator producing elements in the difference of `BTreeSet`s.
32289///
32290/// This `struct` is created by the [`difference`] method on [`BTreeSet`].
32291/// See its documentation for more.
32292///
32293/// [`difference`]: BTreeSet::difference
32294#[stable(feature = "rust1", since = "1.0.0")]
32295pub struct Difference<'a, T: 'a> {
32296    inner: DifferenceInner<'a, T>,
32297}
32298#[derive(Debug)]
32299enum DifferenceInner<'a, T: 'a> {
32300    Stitch {
32301        // iterate all of `self` and some of `other`, spotting matches along the way
32302        self_iter: Iter<'a, T>,
32303        other_iter: Peekable<Iter<'a, T>>,
32304    },
32305    Search {
32306        // iterate `self`, look up in `other`
32307        self_iter: Iter<'a, T>,
32308        other_set: &'a BTreeSet<T>,
32309    },
32310    Iterate(Iter<'a, T>), // simply produce all values in `self`
32311}
32312
32313#[stable(feature = "collection_debug", since = "1.17.0")]
32314impl<T: fmt::Debug> fmt::Debug for Difference<'_, T> {
32315    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32316        f.debug_tuple("Difference").field(&self.inner).finish()
32317    }
32318}
32319
32320/// A lazy iterator producing elements in the symmetric difference of `BTreeSet`s.
32321///
32322/// This `struct` is created by the [`symmetric_difference`] method on
32323/// [`BTreeSet`]. See its documentation for more.
32324///
32325/// [`symmetric_difference`]: BTreeSet::symmetric_difference
32326#[stable(feature = "rust1", since = "1.0.0")]
32327pub struct SymmetricDifference<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
32328
32329#[stable(feature = "collection_debug", since = "1.17.0")]
32330impl<T: fmt::Debug> fmt::Debug for SymmetricDifference<'_, T> {
32331    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32332        f.debug_tuple("SymmetricDifference").field(&self.0).finish()
32333    }
32334}
32335
32336/// A lazy iterator producing elements in the intersection of `BTreeSet`s.
32337///
32338/// This `struct` is created by the [`intersection`] method on [`BTreeSet`].
32339/// See its documentation for more.
32340///
32341/// [`intersection`]: BTreeSet::intersection
32342#[stable(feature = "rust1", since = "1.0.0")]
32343pub struct Intersection<'a, T: 'a> {
32344    inner: IntersectionInner<'a, T>,
32345}
32346#[derive(Debug)]
32347enum IntersectionInner<'a, T: 'a> {
32348    Stitch {
32349        // iterate similarly sized sets jointly, spotting matches along the way
32350        a: Iter<'a, T>,
32351        b: Iter<'a, T>,
32352    },
32353    Search {
32354        // iterate a small set, look up in the large set
32355        small_iter: Iter<'a, T>,
32356        large_set: &'a BTreeSet<T>,
32357    },
32358    Answer(Option<&'a T>), // return a specific value or emptiness
32359}
32360
32361#[stable(feature = "collection_debug", since = "1.17.0")]
32362impl<T: fmt::Debug> fmt::Debug for Intersection<'_, T> {
32363    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32364        f.debug_tuple("Intersection").field(&self.inner).finish()
32365    }
32366}
32367
32368/// A lazy iterator producing elements in the union of `BTreeSet`s.
32369///
32370/// This `struct` is created by the [`union`] method on [`BTreeSet`].
32371/// See its documentation for more.
32372///
32373/// [`union`]: BTreeSet::union
32374#[stable(feature = "rust1", since = "1.0.0")]
32375pub struct Union<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
32376
32377#[stable(feature = "collection_debug", since = "1.17.0")]
32378impl<T: fmt::Debug> fmt::Debug for Union<'_, T> {
32379    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32380        f.debug_tuple("Union").field(&self.0).finish()
32381    }
32382}
32383
32384// This constant is used by functions that compare two sets.
32385// It estimates the relative size at which searching performs better
32386// than iterating, based on the benchmarks in
32387// https://github.com/ssomers/rust_bench_btreeset_intersection.
32388// It's used to divide rather than multiply sizes, to rule out overflow,
32389// and it's a power of two to make that division cheap.
32390const ITER_PERFORMANCE_TIPPING_SIZE_DIFF: usize = 16;
32391
32392impl<T> BTreeSet<T> {
32393    /// Makes a new, empty `BTreeSet`.
32394    ///
32395    /// Does not allocate anything on its own.
32396    ///
32397    /// # Examples
32398    ///
32399    /// ```
32400    /// # #![allow(unused_mut)]
32401    /// use std::collections::BTreeSet;
32402    ///
32403    /// let mut set: BTreeSet<i32> = BTreeSet::new();
32404    /// ```
32405    #[stable(feature = "rust1", since = "1.0.0")]
32406    #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
32407    pub const fn new() -> BTreeSet<T>
32408    where
32409        T: Ord,
32410    {
32411        BTreeSet { map: BTreeMap::new() }
32412    }
32413
32414    /// Constructs a double-ended iterator over a sub-range of elements in the set.
32415    /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
32416    /// yield elements from min (inclusive) to max (exclusive).
32417    /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
32418    /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
32419    /// range from 4 to 10.
32420    ///
32421    /// # Examples
32422    ///
32423    /// ```
32424    /// use std::collections::BTreeSet;
32425    /// use std::ops::Bound::Included;
32426    ///
32427    /// let mut set = BTreeSet::new();
32428    /// set.insert(3);
32429    /// set.insert(5);
32430    /// set.insert(8);
32431    /// for &elem in set.range((Included(&4), Included(&8))) {
32432    ///     println!("{}", elem);
32433    /// }
32434    /// assert_eq!(Some(&5), set.range(4..).next());
32435    /// ```
32436    #[stable(feature = "btree_range", since = "1.17.0")]
32437    pub fn range<K: ?Sized, R>(&self, range: R) -> Range<'_, T>
32438    where
32439        K: Ord,
32440        T: Borrow<K> + Ord,
32441        R: RangeBounds<K>,
32442    {
32443        Range { iter: self.map.range(range) }
32444    }
32445
32446    /// Visits the values representing the difference,
32447    /// i.e., the values that are in `self` but not in `other`,
32448    /// in ascending order.
32449    ///
32450    /// # Examples
32451    ///
32452    /// ```
32453    /// use std::collections::BTreeSet;
32454    ///
32455    /// let mut a = BTreeSet::new();
32456    /// a.insert(1);
32457    /// a.insert(2);
32458    ///
32459    /// let mut b = BTreeSet::new();
32460    /// b.insert(2);
32461    /// b.insert(3);
32462    ///
32463    /// let diff: Vec<_> = a.difference(&b).cloned().collect();
32464    /// assert_eq!(diff, [1]);
32465    /// ```
32466    #[stable(feature = "rust1", since = "1.0.0")]
32467    pub fn difference<'a>(&'a self, other: &'a BTreeSet<T>) -> Difference<'a, T>
32468    where
32469        T: Ord,
32470    {
32471        let (self_min, self_max) =
32472            if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
32473                (self_min, self_max)
32474            } else {
32475                return Difference { inner: DifferenceInner::Iterate(self.iter()) };
32476            };
32477        let (other_min, other_max) =
32478            if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
32479                (other_min, other_max)
32480            } else {
32481                return Difference { inner: DifferenceInner::Iterate(self.iter()) };
32482            };
32483        Difference {
32484            inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
32485                (Greater, _) | (_, Less) => DifferenceInner::Iterate(self.iter()),
32486                (Equal, _) => {
32487                    let mut self_iter = self.iter();
32488                    self_iter.next();
32489                    DifferenceInner::Iterate(self_iter)
32490                }
32491                (_, Equal) => {
32492                    let mut self_iter = self.iter();
32493                    self_iter.next_back();
32494                    DifferenceInner::Iterate(self_iter)
32495                }
32496                _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
32497                    DifferenceInner::Search { self_iter: self.iter(), other_set: other }
32498                }
32499                _ => DifferenceInner::Stitch {
32500                    self_iter: self.iter(),
32501                    other_iter: other.iter().peekable(),
32502                },
32503            },
32504        }
32505    }
32506
32507    /// Visits the values representing the symmetric difference,
32508    /// i.e., the values that are in `self` or in `other` but not in both,
32509    /// in ascending order.
32510    ///
32511    /// # Examples
32512    ///
32513    /// ```
32514    /// use std::collections::BTreeSet;
32515    ///
32516    /// let mut a = BTreeSet::new();
32517    /// a.insert(1);
32518    /// a.insert(2);
32519    ///
32520    /// let mut b = BTreeSet::new();
32521    /// b.insert(2);
32522    /// b.insert(3);
32523    ///
32524    /// let sym_diff: Vec<_> = a.symmetric_difference(&b).cloned().collect();
32525    /// assert_eq!(sym_diff, [1, 3]);
32526    /// ```
32527    #[stable(feature = "rust1", since = "1.0.0")]
32528    pub fn symmetric_difference<'a>(&'a self, other: &'a BTreeSet<T>) -> SymmetricDifference<'a, T>
32529    where
32530        T: Ord,
32531    {
32532        SymmetricDifference(MergeIterInner::new(self.iter(), other.iter()))
32533    }
32534
32535    /// Visits the values representing the intersection,
32536    /// i.e., the values that are both in `self` and `other`,
32537    /// in ascending order.
32538    ///
32539    /// # Examples
32540    ///
32541    /// ```
32542    /// use std::collections::BTreeSet;
32543    ///
32544    /// let mut a = BTreeSet::new();
32545    /// a.insert(1);
32546    /// a.insert(2);
32547    ///
32548    /// let mut b = BTreeSet::new();
32549    /// b.insert(2);
32550    /// b.insert(3);
32551    ///
32552    /// let intersection: Vec<_> = a.intersection(&b).cloned().collect();
32553    /// assert_eq!(intersection, [2]);
32554    /// ```
32555    #[stable(feature = "rust1", since = "1.0.0")]
32556    pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T>) -> Intersection<'a, T>
32557    where
32558        T: Ord,
32559    {
32560        let (self_min, self_max) =
32561            if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
32562                (self_min, self_max)
32563            } else {
32564                return Intersection { inner: IntersectionInner::Answer(None) };
32565            };
32566        let (other_min, other_max) =
32567            if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
32568                (other_min, other_max)
32569            } else {
32570                return Intersection { inner: IntersectionInner::Answer(None) };
32571            };
32572        Intersection {
32573            inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
32574                (Greater, _) | (_, Less) => IntersectionInner::Answer(None),
32575                (Equal, _) => IntersectionInner::Answer(Some(self_min)),
32576                (_, Equal) => IntersectionInner::Answer(Some(self_max)),
32577                _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
32578                    IntersectionInner::Search { small_iter: self.iter(), large_set: other }
32579                }
32580                _ if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
32581                    IntersectionInner::Search { small_iter: other.iter(), large_set: self }
32582                }
32583                _ => IntersectionInner::Stitch { a: self.iter(), b: other.iter() },
32584            },
32585        }
32586    }
32587
32588    /// Visits the values representing the union,
32589    /// i.e., all the values in `self` or `other`, without duplicates,
32590    /// in ascending order.
32591    ///
32592    /// # Examples
32593    ///
32594    /// ```
32595    /// use std::collections::BTreeSet;
32596    ///
32597    /// let mut a = BTreeSet::new();
32598    /// a.insert(1);
32599    ///
32600    /// let mut b = BTreeSet::new();
32601    /// b.insert(2);
32602    ///
32603    /// let union: Vec<_> = a.union(&b).cloned().collect();
32604    /// assert_eq!(union, [1, 2]);
32605    /// ```
32606    #[stable(feature = "rust1", since = "1.0.0")]
32607    pub fn union<'a>(&'a self, other: &'a BTreeSet<T>) -> Union<'a, T>
32608    where
32609        T: Ord,
32610    {
32611        Union(MergeIterInner::new(self.iter(), other.iter()))
32612    }
32613
32614    /// Clears the set, removing all values.
32615    ///
32616    /// # Examples
32617    ///
32618    /// ```
32619    /// use std::collections::BTreeSet;
32620    ///
32621    /// let mut v = BTreeSet::new();
32622    /// v.insert(1);
32623    /// v.clear();
32624    /// assert!(v.is_empty());
32625    /// ```
32626    #[stable(feature = "rust1", since = "1.0.0")]
32627    pub fn clear(&mut self) {
32628        self.map.clear()
32629    }
32630
32631    /// Returns `true` if the set contains a value.
32632    ///
32633    /// The value may be any borrowed form of the set's value type,
32634    /// but the ordering on the borrowed form *must* match the
32635    /// ordering on the value type.
32636    ///
32637    /// # Examples
32638    ///
32639    /// ```
32640    /// use std::collections::BTreeSet;
32641    ///
32642    /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
32643    /// assert_eq!(set.contains(&1), true);
32644    /// assert_eq!(set.contains(&4), false);
32645    /// ```
32646    #[stable(feature = "rust1", since = "1.0.0")]
32647    pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
32648    where
32649        T: Borrow<Q> + Ord,
32650        Q: Ord,
32651    {
32652        self.map.contains_key(value)
32653    }
32654
32655    /// Returns a reference to the value in the set, if any, that is equal to the given value.
32656    ///
32657    /// The value may be any borrowed form of the set's value type,
32658    /// but the ordering on the borrowed form *must* match the
32659    /// ordering on the value type.
32660    ///
32661    /// # Examples
32662    ///
32663    /// ```
32664    /// use std::collections::BTreeSet;
32665    ///
32666    /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
32667    /// assert_eq!(set.get(&2), Some(&2));
32668    /// assert_eq!(set.get(&4), None);
32669    /// ```
32670    #[stable(feature = "set_recovery", since = "1.9.0")]
32671    pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
32672    where
32673        T: Borrow<Q> + Ord,
32674        Q: Ord,
32675    {
32676        Recover::get(&self.map, value)
32677    }
32678
32679    /// Returns `true` if `self` has no elements in common with `other`.
32680    /// This is equivalent to checking for an empty intersection.
32681    ///
32682    /// # Examples
32683    ///
32684    /// ```
32685    /// use std::collections::BTreeSet;
32686    ///
32687    /// let a: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
32688    /// let mut b = BTreeSet::new();
32689    ///
32690    /// assert_eq!(a.is_disjoint(&b), true);
32691    /// b.insert(4);
32692    /// assert_eq!(a.is_disjoint(&b), true);
32693    /// b.insert(1);
32694    /// assert_eq!(a.is_disjoint(&b), false);
32695    /// ```
32696    #[stable(feature = "rust1", since = "1.0.0")]
32697    pub fn is_disjoint(&self, other: &BTreeSet<T>) -> bool
32698    where
32699        T: Ord,
32700    {
32701        self.intersection(other).next().is_none()
32702    }
32703
32704    /// Returns `true` if the set is a subset of another,
32705    /// i.e., `other` contains at least all the values in `self`.
32706    ///
32707    /// # Examples
32708    ///
32709    /// ```
32710    /// use std::collections::BTreeSet;
32711    ///
32712    /// let sup: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
32713    /// let mut set = BTreeSet::new();
32714    ///
32715    /// assert_eq!(set.is_subset(&sup), true);
32716    /// set.insert(2);
32717    /// assert_eq!(set.is_subset(&sup), true);
32718    /// set.insert(4);
32719    /// assert_eq!(set.is_subset(&sup), false);
32720    /// ```
32721    #[stable(feature = "rust1", since = "1.0.0")]
32722    pub fn is_subset(&self, other: &BTreeSet<T>) -> bool
32723    where
32724        T: Ord,
32725    {
32726        // Same result as self.difference(other).next().is_none()
32727        // but the code below is faster (hugely in some cases).
32728        if self.len() > other.len() {
32729            return false;
32730        }
32731        let (self_min, self_max) =
32732            if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
32733                (self_min, self_max)
32734            } else {
32735                return true; // self is empty
32736            };
32737        let (other_min, other_max) =
32738            if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
32739                (other_min, other_max)
32740            } else {
32741                return false; // other is empty
32742            };
32743        let mut self_iter = self.iter();
32744        match self_min.cmp(other_min) {
32745            Less => return false,
32746            Equal => {
32747                self_iter.next();
32748            }
32749            Greater => (),
32750        }
32751        match self_max.cmp(other_max) {
32752            Greater => return false,
32753            Equal => {
32754                self_iter.next_back();
32755            }
32756            Less => (),
32757        }
32758        if self_iter.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF {
32759            for next in self_iter {
32760                if !other.contains(next) {
32761                    return false;
32762                }
32763            }
32764        } else {
32765            let mut other_iter = other.iter();
32766            other_iter.next();
32767            other_iter.next_back();
32768            let mut self_next = self_iter.next();
32769            while let Some(self1) = self_next {
32770                match other_iter.next().map_or(Less, |other1| self1.cmp(other1)) {
32771                    Less => return false,
32772                    Equal => self_next = self_iter.next(),
32773                    Greater => (),
32774                }
32775            }
32776        }
32777        true
32778    }
32779
32780    /// Returns `true` if the set is a superset of another,
32781    /// i.e., `self` contains at least all the values in `other`.
32782    ///
32783    /// # Examples
32784    ///
32785    /// ```
32786    /// use std::collections::BTreeSet;
32787    ///
32788    /// let sub: BTreeSet<_> = [1, 2].iter().cloned().collect();
32789    /// let mut set = BTreeSet::new();
32790    ///
32791    /// assert_eq!(set.is_superset(&sub), false);
32792    ///
32793    /// set.insert(0);
32794    /// set.insert(1);
32795    /// assert_eq!(set.is_superset(&sub), false);
32796    ///
32797    /// set.insert(2);
32798    /// assert_eq!(set.is_superset(&sub), true);
32799    /// ```
32800    #[stable(feature = "rust1", since = "1.0.0")]
32801    pub fn is_superset(&self, other: &BTreeSet<T>) -> bool
32802    where
32803        T: Ord,
32804    {
32805        other.is_subset(self)
32806    }
32807
32808    /// Returns a reference to the first value in the set, if any.
32809    /// This value is always the minimum of all values in the set.
32810    ///
32811    /// # Examples
32812    ///
32813    /// Basic usage:
32814    ///
32815    /// ```
32816    /// #![feature(map_first_last)]
32817    /// use std::collections::BTreeSet;
32818    ///
32819    /// let mut set = BTreeSet::new();
32820    /// assert_eq!(set.first(), None);
32821    /// set.insert(1);
32822    /// assert_eq!(set.first(), Some(&1));
32823    /// set.insert(2);
32824    /// assert_eq!(set.first(), Some(&1));
32825    /// ```
32826    #[unstable(feature = "map_first_last", issue = "62924")]
32827    pub fn first(&self) -> Option<&T>
32828    where
32829        T: Ord,
32830    {
32831        self.map.first_key_value().map(|(k, _)| k)
32832    }
32833
32834    /// Returns a reference to the last value in the set, if any.
32835    /// This value is always the maximum of all values in the set.
32836    ///
32837    /// # Examples
32838    ///
32839    /// Basic usage:
32840    ///
32841    /// ```
32842    /// #![feature(map_first_last)]
32843    /// use std::collections::BTreeSet;
32844    ///
32845    /// let mut set = BTreeSet::new();
32846    /// assert_eq!(set.last(), None);
32847    /// set.insert(1);
32848    /// assert_eq!(set.last(), Some(&1));
32849    /// set.insert(2);
32850    /// assert_eq!(set.last(), Some(&2));
32851    /// ```
32852    #[unstable(feature = "map_first_last", issue = "62924")]
32853    pub fn last(&self) -> Option<&T>
32854    where
32855        T: Ord,
32856    {
32857        self.map.last_key_value().map(|(k, _)| k)
32858    }
32859
32860    /// Removes the first value from the set and returns it, if any.
32861    /// The first value is always the minimum value in the set.
32862    ///
32863    /// # Examples
32864    ///
32865    /// ```
32866    /// #![feature(map_first_last)]
32867    /// use std::collections::BTreeSet;
32868    ///
32869    /// let mut set = BTreeSet::new();
32870    ///
32871    /// set.insert(1);
32872    /// while let Some(n) = set.pop_first() {
32873    ///     assert_eq!(n, 1);
32874    /// }
32875    /// assert!(set.is_empty());
32876    /// ```
32877    #[unstable(feature = "map_first_last", issue = "62924")]
32878    pub fn pop_first(&mut self) -> Option<T>
32879    where
32880        T: Ord,
32881    {
32882        self.map.pop_first().map(|kv| kv.0)
32883    }
32884
32885    /// Removes the last value from the set and returns it, if any.
32886    /// The last value is always the maximum value in the set.
32887    ///
32888    /// # Examples
32889    ///
32890    /// ```
32891    /// #![feature(map_first_last)]
32892    /// use std::collections::BTreeSet;
32893    ///
32894    /// let mut set = BTreeSet::new();
32895    ///
32896    /// set.insert(1);
32897    /// while let Some(n) = set.pop_last() {
32898    ///     assert_eq!(n, 1);
32899    /// }
32900    /// assert!(set.is_empty());
32901    /// ```
32902    #[unstable(feature = "map_first_last", issue = "62924")]
32903    pub fn pop_last(&mut self) -> Option<T>
32904    where
32905        T: Ord,
32906    {
32907        self.map.pop_last().map(|kv| kv.0)
32908    }
32909
32910    /// Adds a value to the set.
32911    ///
32912    /// If the set did not have this value present, `true` is returned.
32913    ///
32914    /// If the set did have this value present, `false` is returned, and the
32915    /// entry is not updated. See the [module-level documentation] for more.
32916    ///
32917    /// [module-level documentation]: index.html#insert-and-complex-keys
32918    ///
32919    /// # Examples
32920    ///
32921    /// ```
32922    /// use std::collections::BTreeSet;
32923    ///
32924    /// let mut set = BTreeSet::new();
32925    ///
32926    /// assert_eq!(set.insert(2), true);
32927    /// assert_eq!(set.insert(2), false);
32928    /// assert_eq!(set.len(), 1);
32929    /// ```
32930    #[stable(feature = "rust1", since = "1.0.0")]
32931    pub fn insert(&mut self, value: T) -> bool
32932    where
32933        T: Ord,
32934    {
32935        self.map.insert(value, ()).is_none()
32936    }
32937
32938    /// Adds a value to the set, replacing the existing value, if any, that is equal to the given
32939    /// one. Returns the replaced value.
32940    ///
32941    /// # Examples
32942    ///
32943    /// ```
32944    /// use std::collections::BTreeSet;
32945    ///
32946    /// let mut set = BTreeSet::new();
32947    /// set.insert(Vec::<i32>::new());
32948    ///
32949    /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0);
32950    /// set.replace(Vec::with_capacity(10));
32951    /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
32952    /// ```
32953    #[stable(feature = "set_recovery", since = "1.9.0")]
32954    pub fn replace(&mut self, value: T) -> Option<T>
32955    where
32956        T: Ord,
32957    {
32958        Recover::replace(&mut self.map, value)
32959    }
32960
32961    /// Removes a value from the set. Returns whether the value was
32962    /// present in the set.
32963    ///
32964    /// The value may be any borrowed form of the set's value type,
32965    /// but the ordering on the borrowed form *must* match the
32966    /// ordering on the value type.
32967    ///
32968    /// # Examples
32969    ///
32970    /// ```
32971    /// use std::collections::BTreeSet;
32972    ///
32973    /// let mut set = BTreeSet::new();
32974    ///
32975    /// set.insert(2);
32976    /// assert_eq!(set.remove(&2), true);
32977    /// assert_eq!(set.remove(&2), false);
32978    /// ```
32979    #[doc(alias = "delete")]
32980    #[stable(feature = "rust1", since = "1.0.0")]
32981    pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
32982    where
32983        T: Borrow<Q> + Ord,
32984        Q: Ord,
32985    {
32986        self.map.remove(value).is_some()
32987    }
32988
32989    /// Removes and returns the value in the set, if any, that is equal to the given one.
32990    ///
32991    /// The value may be any borrowed form of the set's value type,
32992    /// but the ordering on the borrowed form *must* match the
32993    /// ordering on the value type.
32994    ///
32995    /// # Examples
32996    ///
32997    /// ```
32998    /// use std::collections::BTreeSet;
32999    ///
33000    /// let mut set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
33001    /// assert_eq!(set.take(&2), Some(2));
33002    /// assert_eq!(set.take(&2), None);
33003    /// ```
33004    #[stable(feature = "set_recovery", since = "1.9.0")]
33005    pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
33006    where
33007        T: Borrow<Q> + Ord,
33008        Q: Ord,
33009    {
33010        Recover::take(&mut self.map, value)
33011    }
33012
33013    /// Retains only the elements specified by the predicate.
33014    ///
33015    /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
33016    ///
33017    /// # Examples
33018    ///
33019    /// ```
33020    /// use std::collections::BTreeSet;
33021    ///
33022    /// let xs = [1, 2, 3, 4, 5, 6];
33023    /// let mut set: BTreeSet<i32> = xs.iter().cloned().collect();
33024    /// // Keep only the even numbers.
33025    /// set.retain(|&k| k % 2 == 0);
33026    /// assert!(set.iter().eq([2, 4, 6].iter()));
33027    /// ```
33028    #[stable(feature = "btree_retain", since = "1.53.0")]
33029    pub fn retain<F>(&mut self, mut f: F)
33030    where
33031        T: Ord,
33032        F: FnMut(&T) -> bool,
33033    {
33034        self.drain_filter(|v| !f(v));
33035    }
33036
33037    /// Moves all elements from `other` into `Self`, leaving `other` empty.
33038    ///
33039    /// # Examples
33040    ///
33041    /// ```
33042    /// use std::collections::BTreeSet;
33043    ///
33044    /// let mut a = BTreeSet::new();
33045    /// a.insert(1);
33046    /// a.insert(2);
33047    /// a.insert(3);
33048    ///
33049    /// let mut b = BTreeSet::new();
33050    /// b.insert(3);
33051    /// b.insert(4);
33052    /// b.insert(5);
33053    ///
33054    /// a.append(&mut b);
33055    ///
33056    /// assert_eq!(a.len(), 5);
33057    /// assert_eq!(b.len(), 0);
33058    ///
33059    /// assert!(a.contains(&1));
33060    /// assert!(a.contains(&2));
33061    /// assert!(a.contains(&3));
33062    /// assert!(a.contains(&4));
33063    /// assert!(a.contains(&5));
33064    /// ```
33065    #[stable(feature = "btree_append", since = "1.11.0")]
33066    pub fn append(&mut self, other: &mut Self)
33067    where
33068        T: Ord,
33069    {
33070        self.map.append(&mut other.map);
33071    }
33072
33073    /// Splits the collection into two at the given key. Returns everything after the given key,
33074    /// including the key.
33075    ///
33076    /// # Examples
33077    ///
33078    /// Basic usage:
33079    ///
33080    /// ```
33081    /// use std::collections::BTreeSet;
33082    ///
33083    /// let mut a = BTreeSet::new();
33084    /// a.insert(1);
33085    /// a.insert(2);
33086    /// a.insert(3);
33087    /// a.insert(17);
33088    /// a.insert(41);
33089    ///
33090    /// let b = a.split_off(&3);
33091    ///
33092    /// assert_eq!(a.len(), 2);
33093    /// assert_eq!(b.len(), 3);
33094    ///
33095    /// assert!(a.contains(&1));
33096    /// assert!(a.contains(&2));
33097    ///
33098    /// assert!(b.contains(&3));
33099    /// assert!(b.contains(&17));
33100    /// assert!(b.contains(&41));
33101    /// ```
33102    #[stable(feature = "btree_split_off", since = "1.11.0")]
33103    pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
33104    where
33105        T: Borrow<Q> + Ord,
33106    {
33107        BTreeSet { map: self.map.split_off(key) }
33108    }
33109
33110    /// Creates an iterator which uses a closure to determine if a value should be removed.
33111    ///
33112    /// If the closure returns true, then the value is removed and yielded.
33113    /// If the closure returns false, the value will remain in the list and will not be yielded
33114    /// by the iterator.
33115    ///
33116    /// If the iterator is only partially consumed or not consumed at all, each of the remaining
33117    /// values will still be subjected to the closure and removed and dropped if it returns true.
33118    ///
33119    /// It is unspecified how many more values will be subjected to the closure
33120    /// if a panic occurs in the closure, or if a panic occurs while dropping a value, or if the
33121    /// `DrainFilter` itself is leaked.
33122    ///
33123    /// # Examples
33124    ///
33125    /// Splitting a set into even and odd values, reusing the original set:
33126    ///
33127    /// ```
33128    /// #![feature(btree_drain_filter)]
33129    /// use std::collections::BTreeSet;
33130    ///
33131    /// let mut set: BTreeSet<i32> = (0..8).collect();
33132    /// let evens: BTreeSet<_> = set.drain_filter(|v| v % 2 == 0).collect();
33133    /// let odds = set;
33134    /// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![0, 2, 4, 6]);
33135    /// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 7]);
33136    /// ```
33137    #[unstable(feature = "btree_drain_filter", issue = "70530")]
33138    pub fn drain_filter<'a, F>(&'a mut self, pred: F) -> DrainFilter<'a, T, F>
33139    where
33140        T: Ord,
33141        F: 'a + FnMut(&T) -> bool,
33142    {
33143        DrainFilter { pred, inner: self.map.drain_filter_inner() }
33144    }
33145
33146    /// Gets an iterator that visits the values in the `BTreeSet` in ascending order.
33147    ///
33148    /// # Examples
33149    ///
33150    /// ```
33151    /// use std::collections::BTreeSet;
33152    ///
33153    /// let set: BTreeSet<usize> = [1, 2, 3].iter().cloned().collect();
33154    /// let mut set_iter = set.iter();
33155    /// assert_eq!(set_iter.next(), Some(&1));
33156    /// assert_eq!(set_iter.next(), Some(&2));
33157    /// assert_eq!(set_iter.next(), Some(&3));
33158    /// assert_eq!(set_iter.next(), None);
33159    /// ```
33160    ///
33161    /// Values returned by the iterator are returned in ascending order:
33162    ///
33163    /// ```
33164    /// use std::collections::BTreeSet;
33165    ///
33166    /// let set: BTreeSet<usize> = [3, 1, 2].iter().cloned().collect();
33167    /// let mut set_iter = set.iter();
33168    /// assert_eq!(set_iter.next(), Some(&1));
33169    /// assert_eq!(set_iter.next(), Some(&2));
33170    /// assert_eq!(set_iter.next(), Some(&3));
33171    /// assert_eq!(set_iter.next(), None);
33172    /// ```
33173    #[stable(feature = "rust1", since = "1.0.0")]
33174    pub fn iter(&self) -> Iter<'_, T> {
33175        Iter { iter: self.map.keys() }
33176    }
33177
33178    /// Returns the number of elements in the set.
33179    ///
33180    /// # Examples
33181    ///
33182    /// ```
33183    /// use std::collections::BTreeSet;
33184    ///
33185    /// let mut v = BTreeSet::new();
33186    /// assert_eq!(v.len(), 0);
33187    /// v.insert(1);
33188    /// assert_eq!(v.len(), 1);
33189    /// ```
33190    #[doc(alias = "length")]
33191    #[stable(feature = "rust1", since = "1.0.0")]
33192    #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
33193    pub const fn len(&self) -> usize {
33194        self.map.len()
33195    }
33196
33197    /// Returns `true` if the set contains no elements.
33198    ///
33199    /// # Examples
33200    ///
33201    /// ```
33202    /// use std::collections::BTreeSet;
33203    ///
33204    /// let mut v = BTreeSet::new();
33205    /// assert!(v.is_empty());
33206    /// v.insert(1);
33207    /// assert!(!v.is_empty());
33208    /// ```
33209    #[stable(feature = "rust1", since = "1.0.0")]
33210    #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
33211    pub const fn is_empty(&self) -> bool {
33212        self.len() == 0
33213    }
33214}
33215
33216#[stable(feature = "rust1", since = "1.0.0")]
33217impl<T: Ord> FromIterator<T> for BTreeSet<T> {
33218    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BTreeSet<T> {
33219        let mut set = BTreeSet::new();
33220        set.extend(iter);
33221        set
33222    }
33223}
33224
33225#[stable(feature = "rust1", since = "1.0.0")]
33226impl<T> IntoIterator for BTreeSet<T> {
33227    type Item = T;
33228    type IntoIter = IntoIter<T>;
33229
33230    /// Gets an iterator for moving out the `BTreeSet`'s contents.
33231    ///
33232    /// # Examples
33233    ///
33234    /// ```
33235    /// use std::collections::BTreeSet;
33236    ///
33237    /// let set: BTreeSet<usize> = [1, 2, 3, 4].iter().cloned().collect();
33238    ///
33239    /// let v: Vec<_> = set.into_iter().collect();
33240    /// assert_eq!(v, [1, 2, 3, 4]);
33241    /// ```
33242    fn into_iter(self) -> IntoIter<T> {
33243        IntoIter { iter: self.map.into_iter() }
33244    }
33245}
33246
33247#[stable(feature = "rust1", since = "1.0.0")]
33248impl<'a, T> IntoIterator for &'a BTreeSet<T> {
33249    type Item = &'a T;
33250    type IntoIter = Iter<'a, T>;
33251
33252    fn into_iter(self) -> Iter<'a, T> {
33253        self.iter()
33254    }
33255}
33256
33257/// An iterator produced by calling `drain_filter` on BTreeSet.
33258#[unstable(feature = "btree_drain_filter", issue = "70530")]
33259pub struct DrainFilter<'a, T, F>
33260where
33261    T: 'a,
33262    F: 'a + FnMut(&T) -> bool,
33263{
33264    pred: F,
33265    inner: super::map::DrainFilterInner<'a, T, ()>,
33266}
33267
33268#[unstable(feature = "btree_drain_filter", issue = "70530")]
33269impl<T, F> Drop for DrainFilter<'_, T, F>
33270where
33271    F: FnMut(&T) -> bool,
33272{
33273    fn drop(&mut self) {
33274        self.for_each(drop);
33275    }
33276}
33277
33278#[unstable(feature = "btree_drain_filter", issue = "70530")]
33279impl<T, F> fmt::Debug for DrainFilter<'_, T, F>
33280where
33281    T: fmt::Debug,
33282    F: FnMut(&T) -> bool,
33283{
33284    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
33285        f.debug_tuple("DrainFilter").field(&self.inner.peek().map(|(k, _)| k)).finish()
33286    }
33287}
33288
33289#[unstable(feature = "btree_drain_filter", issue = "70530")]
33290impl<'a, T, F> Iterator for DrainFilter<'_, T, F>
33291where
33292    F: 'a + FnMut(&T) -> bool,
33293{
33294    type Item = T;
33295
33296    fn next(&mut self) -> Option<T> {
33297        let pred = &mut self.pred;
33298        let mut mapped_pred = |k: &T, _v: &mut ()| pred(k);
33299        self.inner.next(&mut mapped_pred).map(|(k, _)| k)
33300    }
33301
33302    fn size_hint(&self) -> (usize, Option<usize>) {
33303        self.inner.size_hint()
33304    }
33305}
33306
33307#[unstable(feature = "btree_drain_filter", issue = "70530")]
33308impl<T, F> FusedIterator for DrainFilter<'_, T, F> where F: FnMut(&T) -> bool {}
33309
33310#[stable(feature = "rust1", since = "1.0.0")]
33311impl<T: Ord> Extend<T> for BTreeSet<T> {
33312    #[inline]
33313    fn extend<Iter: IntoIterator<Item = T>>(&mut self, iter: Iter) {
33314        iter.into_iter().for_each(move |elem| {
33315            self.insert(elem);
33316        });
33317    }
33318
33319    #[inline]
33320    fn extend_one(&mut self, elem: T) {
33321        self.insert(elem);
33322    }
33323}
33324
33325#[stable(feature = "extend_ref", since = "1.2.0")]
33326impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BTreeSet<T> {
33327    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
33328        self.extend(iter.into_iter().cloned());
33329    }
33330
33331    #[inline]
33332    fn extend_one(&mut self, &elem: &'a T) {
33333        self.insert(elem);
33334    }
33335}
33336
33337#[stable(feature = "rust1", since = "1.0.0")]
33338impl<T: Ord> Default for BTreeSet<T> {
33339    /// Creates an empty `BTreeSet`.
33340    fn default() -> BTreeSet<T> {
33341        BTreeSet::new()
33342    }
33343}
33344
33345#[stable(feature = "rust1", since = "1.0.0")]
33346impl<T: Ord + Clone> Sub<&BTreeSet<T>> for &BTreeSet<T> {
33347    type Output = BTreeSet<T>;
33348
33349    /// Returns the difference of `self` and `rhs` as a new `BTreeSet<T>`.
33350    ///
33351    /// # Examples
33352    ///
33353    /// ```
33354    /// use std::collections::BTreeSet;
33355    ///
33356    /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
33357    /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
33358    ///
33359    /// let result = &a - &b;
33360    /// let result_vec: Vec<_> = result.into_iter().collect();
33361    /// assert_eq!(result_vec, [1, 2]);
33362    /// ```
33363    fn sub(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
33364        self.difference(rhs).cloned().collect()
33365    }
33366}
33367
33368#[stable(feature = "rust1", since = "1.0.0")]
33369impl<T: Ord + Clone> BitXor<&BTreeSet<T>> for &BTreeSet<T> {
33370    type Output = BTreeSet<T>;
33371
33372    /// Returns the symmetric difference of `self` and `rhs` as a new `BTreeSet<T>`.
33373    ///
33374    /// # Examples
33375    ///
33376    /// ```
33377    /// use std::collections::BTreeSet;
33378    ///
33379    /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
33380    /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
33381    ///
33382    /// let result = &a ^ &b;
33383    /// let result_vec: Vec<_> = result.into_iter().collect();
33384    /// assert_eq!(result_vec, [1, 4]);
33385    /// ```
33386    fn bitxor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
33387        self.symmetric_difference(rhs).cloned().collect()
33388    }
33389}
33390
33391#[stable(feature = "rust1", since = "1.0.0")]
33392impl<T: Ord + Clone> BitAnd<&BTreeSet<T>> for &BTreeSet<T> {
33393    type Output = BTreeSet<T>;
33394
33395    /// Returns the intersection of `self` and `rhs` as a new `BTreeSet<T>`.
33396    ///
33397    /// # Examples
33398    ///
33399    /// ```
33400    /// use std::collections::BTreeSet;
33401    ///
33402    /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
33403    /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
33404    ///
33405    /// let result = &a & &b;
33406    /// let result_vec: Vec<_> = result.into_iter().collect();
33407    /// assert_eq!(result_vec, [2, 3]);
33408    /// ```
33409    fn bitand(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
33410        self.intersection(rhs).cloned().collect()
33411    }
33412}
33413
33414#[stable(feature = "rust1", since = "1.0.0")]
33415impl<T: Ord + Clone> BitOr<&BTreeSet<T>> for &BTreeSet<T> {
33416    type Output = BTreeSet<T>;
33417
33418    /// Returns the union of `self` and `rhs` as a new `BTreeSet<T>`.
33419    ///
33420    /// # Examples
33421    ///
33422    /// ```
33423    /// use std::collections::BTreeSet;
33424    ///
33425    /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
33426    /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
33427    ///
33428    /// let result = &a | &b;
33429    /// let result_vec: Vec<_> = result.into_iter().collect();
33430    /// assert_eq!(result_vec, [1, 2, 3, 4, 5]);
33431    /// ```
33432    fn bitor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
33433        self.union(rhs).cloned().collect()
33434    }
33435}
33436
33437#[stable(feature = "rust1", since = "1.0.0")]
33438impl<T: Debug> Debug for BTreeSet<T> {
33439    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
33440        f.debug_set().entries(self.iter()).finish()
33441    }
33442}
33443
33444#[stable(feature = "rust1", since = "1.0.0")]
33445impl<T> Clone for Iter<'_, T> {
33446    fn clone(&self) -> Self {
33447        Iter { iter: self.iter.clone() }
33448    }
33449}
33450#[stable(feature = "rust1", since = "1.0.0")]
33451impl<'a, T> Iterator for Iter<'a, T> {
33452    type Item = &'a T;
33453
33454    fn next(&mut self) -> Option<&'a T> {
33455        self.iter.next()
33456    }
33457
33458    fn size_hint(&self) -> (usize, Option<usize>) {
33459        self.iter.size_hint()
33460    }
33461
33462    fn last(mut self) -> Option<&'a T> {
33463        self.next_back()
33464    }
33465
33466    fn min(mut self) -> Option<&'a T> {
33467        self.next()
33468    }
33469
33470    fn max(mut self) -> Option<&'a T> {
33471        self.next_back()
33472    }
33473}
33474#[stable(feature = "rust1", since = "1.0.0")]
33475impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
33476    fn next_back(&mut self) -> Option<&'a T> {
33477        self.iter.next_back()
33478    }
33479}
33480#[stable(feature = "rust1", since = "1.0.0")]
33481impl<T> ExactSizeIterator for Iter<'_, T> {
33482    fn len(&self) -> usize {
33483        self.iter.len()
33484    }
33485}
33486
33487#[stable(feature = "fused", since = "1.26.0")]
33488impl<T> FusedIterator for Iter<'_, T> {}
33489
33490#[stable(feature = "rust1", since = "1.0.0")]
33491impl<T> Iterator for IntoIter<T> {
33492    type Item = T;
33493
33494    fn next(&mut self) -> Option<T> {
33495        self.iter.next().map(|(k, _)| k)
33496    }
33497
33498    fn size_hint(&self) -> (usize, Option<usize>) {
33499        self.iter.size_hint()
33500    }
33501}
33502#[stable(feature = "rust1", since = "1.0.0")]
33503impl<T> DoubleEndedIterator for IntoIter<T> {
33504    fn next_back(&mut self) -> Option<T> {
33505        self.iter.next_back().map(|(k, _)| k)
33506    }
33507}
33508#[stable(feature = "rust1", since = "1.0.0")]
33509impl<T> ExactSizeIterator for IntoIter<T> {
33510    fn len(&self) -> usize {
33511        self.iter.len()
33512    }
33513}
33514
33515#[stable(feature = "fused", since = "1.26.0")]
33516impl<T> FusedIterator for IntoIter<T> {}
33517
33518#[stable(feature = "btree_range", since = "1.17.0")]
33519impl<T> Clone for Range<'_, T> {
33520    fn clone(&self) -> Self {
33521        Range { iter: self.iter.clone() }
33522    }
33523}
33524
33525#[stable(feature = "btree_range", since = "1.17.0")]
33526impl<'a, T> Iterator for Range<'a, T> {
33527    type Item = &'a T;
33528
33529    fn next(&mut self) -> Option<&'a T> {
33530        self.iter.next().map(|(k, _)| k)
33531    }
33532
33533    fn last(mut self) -> Option<&'a T> {
33534        self.next_back()
33535    }
33536
33537    fn min(mut self) -> Option<&'a T> {
33538        self.next()
33539    }
33540
33541    fn max(mut self) -> Option<&'a T> {
33542        self.next_back()
33543    }
33544}
33545
33546#[stable(feature = "btree_range", since = "1.17.0")]
33547impl<'a, T> DoubleEndedIterator for Range<'a, T> {
33548    fn next_back(&mut self) -> Option<&'a T> {
33549        self.iter.next_back().map(|(k, _)| k)
33550    }
33551}
33552
33553#[stable(feature = "fused", since = "1.26.0")]
33554impl<T> FusedIterator for Range<'_, T> {}
33555
33556#[stable(feature = "rust1", since = "1.0.0")]
33557impl<T> Clone for Difference<'_, T> {
33558    fn clone(&self) -> Self {
33559        Difference {
33560            inner: match &self.inner {
33561                DifferenceInner::Stitch { self_iter, other_iter } => DifferenceInner::Stitch {
33562                    self_iter: self_iter.clone(),
33563                    other_iter: other_iter.clone(),
33564                },
33565                DifferenceInner::Search { self_iter, other_set } => {
33566                    DifferenceInner::Search { self_iter: self_iter.clone(), other_set }
33567                }
33568                DifferenceInner::Iterate(iter) => DifferenceInner::Iterate(iter.clone()),
33569            },
33570        }
33571    }
33572}
33573#[stable(feature = "rust1", since = "1.0.0")]
33574impl<'a, T: Ord> Iterator for Difference<'a, T> {
33575    type Item = &'a T;
33576
33577    fn next(&mut self) -> Option<&'a T> {
33578        match &mut self.inner {
33579            DifferenceInner::Stitch { self_iter, other_iter } => {
33580                let mut self_next = self_iter.next()?;
33581                loop {
33582                    match other_iter.peek().map_or(Less, |other_next| self_next.cmp(other_next)) {
33583                        Less => return Some(self_next),
33584                        Equal => {
33585                            self_next = self_iter.next()?;
33586                            other_iter.next();
33587                        }
33588                        Greater => {
33589                            other_iter.next();
33590                        }
33591                    }
33592                }
33593            }
33594            DifferenceInner::Search { self_iter, other_set } => loop {
33595                let self_next = self_iter.next()?;
33596                if !other_set.contains(&self_next) {
33597                    return Some(self_next);
33598                }
33599            },
33600            DifferenceInner::Iterate(iter) => iter.next(),
33601        }
33602    }
33603
33604    fn size_hint(&self) -> (usize, Option<usize>) {
33605        let (self_len, other_len) = match &self.inner {
33606            DifferenceInner::Stitch { self_iter, other_iter } => {
33607                (self_iter.len(), other_iter.len())
33608            }
33609            DifferenceInner::Search { self_iter, other_set } => (self_iter.len(), other_set.len()),
33610            DifferenceInner::Iterate(iter) => (iter.len(), 0),
33611        };
33612        (self_len.saturating_sub(other_len), Some(self_len))
33613    }
33614
33615    fn min(mut self) -> Option<&'a T> {
33616        self.next()
33617    }
33618}
33619
33620#[stable(feature = "fused", since = "1.26.0")]
33621impl<T: Ord> FusedIterator for Difference<'_, T> {}
33622
33623#[stable(feature = "rust1", since = "1.0.0")]
33624impl<T> Clone for SymmetricDifference<'_, T> {
33625    fn clone(&self) -> Self {
33626        SymmetricDifference(self.0.clone())
33627    }
33628}
33629#[stable(feature = "rust1", since = "1.0.0")]
33630impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> {
33631    type Item = &'a T;
33632
33633    fn next(&mut self) -> Option<&'a T> {
33634        loop {
33635            let (a_next, b_next) = self.0.nexts(Self::Item::cmp);
33636            if a_next.and(b_next).is_none() {
33637                return a_next.or(b_next);
33638            }
33639        }
33640    }
33641
33642    fn size_hint(&self) -> (usize, Option<usize>) {
33643        let (a_len, b_len) = self.0.lens();
33644        // No checked_add, because even if a and b refer to the same set,
33645        // and T is an empty type, the storage overhead of sets limits
33646        // the number of elements to less than half the range of usize.
33647        (0, Some(a_len + b_len))
33648    }
33649
33650    fn min(mut self) -> Option<&'a T> {
33651        self.next()
33652    }
33653}
33654
33655#[stable(feature = "fused", since = "1.26.0")]
33656impl<T: Ord> FusedIterator for SymmetricDifference<'_, T> {}
33657
33658#[stable(feature = "rust1", since = "1.0.0")]
33659impl<T> Clone for Intersection<'_, T> {
33660    fn clone(&self) -> Self {
33661        Intersection {
33662            inner: match &self.inner {
33663                IntersectionInner::Stitch { a, b } => {
33664                    IntersectionInner::Stitch { a: a.clone(), b: b.clone() }
33665                }
33666                IntersectionInner::Search { small_iter, large_set } => {
33667                    IntersectionInner::Search { small_iter: small_iter.clone(), large_set }
33668                }
33669                IntersectionInner::Answer(answer) => IntersectionInner::Answer(*answer),
33670            },
33671        }
33672    }
33673}
33674#[stable(feature = "rust1", since = "1.0.0")]
33675impl<'a, T: Ord> Iterator for Intersection<'a, T> {
33676    type Item = &'a T;
33677
33678    fn next(&mut self) -> Option<&'a T> {
33679        match &mut self.inner {
33680            IntersectionInner::Stitch { a, b } => {
33681                let mut a_next = a.next()?;
33682                let mut b_next = b.next()?;
33683                loop {
33684                    match a_next.cmp(b_next) {
33685                        Less => a_next = a.next()?,
33686                        Greater => b_next = b.next()?,
33687                        Equal => return Some(a_next),
33688                    }
33689                }
33690            }
33691            IntersectionInner::Search { small_iter, large_set } => loop {
33692                let small_next = small_iter.next()?;
33693                if large_set.contains(&small_next) {
33694                    return Some(small_next);
33695                }
33696            },
33697            IntersectionInner::Answer(answer) => answer.take(),
33698        }
33699    }
33700
33701    fn size_hint(&self) -> (usize, Option<usize>) {
33702        match &self.inner {
33703            IntersectionInner::Stitch { a, b } => (0, Some(min(a.len(), b.len()))),
33704            IntersectionInner::Search { small_iter, .. } => (0, Some(small_iter.len())),
33705            IntersectionInner::Answer(None) => (0, Some(0)),
33706            IntersectionInner::Answer(Some(_)) => (1, Some(1)),
33707        }
33708    }
33709
33710    fn min(mut self) -> Option<&'a T> {
33711        self.next()
33712    }
33713}
33714
33715#[stable(feature = "fused", since = "1.26.0")]
33716impl<T: Ord> FusedIterator for Intersection<'_, T> {}
33717
33718#[stable(feature = "rust1", since = "1.0.0")]
33719impl<T> Clone for Union<'_, T> {
33720    fn clone(&self) -> Self {
33721        Union(self.0.clone())
33722    }
33723}
33724#[stable(feature = "rust1", since = "1.0.0")]
33725impl<'a, T: Ord> Iterator for Union<'a, T> {
33726    type Item = &'a T;
33727
33728    fn next(&mut self) -> Option<&'a T> {
33729        let (a_next, b_next) = self.0.nexts(Self::Item::cmp);
33730        a_next.or(b_next)
33731    }
33732
33733    fn size_hint(&self) -> (usize, Option<usize>) {
33734        let (a_len, b_len) = self.0.lens();
33735        // No checked_add - see SymmetricDifference::size_hint.
33736        (max(a_len, b_len), Some(a_len + b_len))
33737    }
33738
33739    fn min(mut self) -> Option<&'a T> {
33740        self.next()
33741    }
33742}
33743
33744#[stable(feature = "fused", since = "1.26.0")]
33745impl<T: Ord> FusedIterator for Union<'_, T> {}
33746
33747#[cfg(test)]
33748mod tests;
33749use core::borrow::Borrow;
33750use core::ops::RangeBounds;
33751use core::ptr;
33752
33753use super::node::{marker, ForceResult::*, Handle, NodeRef};
33754
33755pub struct LeafRange<BorrowType, K, V> {
33756    pub front: Option<Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>>,
33757    pub back: Option<Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>>,
33758}
33759
33760impl<BorrowType, K, V> LeafRange<BorrowType, K, V> {
33761    pub fn none() -> Self {
33762        LeafRange { front: None, back: None }
33763    }
33764
33765    pub fn is_empty(&self) -> bool {
33766        self.front == self.back
33767    }
33768
33769    /// Temporarily takes out another, immutable equivalent of the same range.
33770    pub fn reborrow(&self) -> LeafRange<marker::Immut<'_>, K, V> {
33771        LeafRange {
33772            front: self.front.as_ref().map(|f| f.reborrow()),
33773            back: self.back.as_ref().map(|b| b.reborrow()),
33774        }
33775    }
33776}
33777
33778impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
33779    /// Finds the distinct leaf edges delimiting a specified range in a tree.
33780    ///
33781    /// If such distinct edges exist, returns them in ascending order, meaning
33782    /// that a non-zero number of calls to `next_unchecked` on the `front` of
33783    /// the result and/or calls to `next_back_unchecked` on the `back` of the
33784    /// result will eventually reach the same edge.
33785    ///
33786    /// If there are no such edges, i.e., if the tree contains no key within
33787    /// the range, returns a pair of empty options.
33788    ///
33789    /// # Safety
33790    /// Unless `BorrowType` is `Immut`, do not use the handles to visit the same
33791    /// KV twice.
33792    unsafe fn find_leaf_edges_spanning_range<Q: ?Sized, R>(
33793        self,
33794        range: R,
33795    ) -> LeafRange<BorrowType, K, V>
33796    where
33797        Q: Ord,
33798        K: Borrow<Q>,
33799        R: RangeBounds<Q>,
33800    {
33801        match self.search_tree_for_bifurcation(&range) {
33802            Err(_) => LeafRange::none(),
33803            Ok((
33804                node,
33805                lower_edge_idx,
33806                upper_edge_idx,
33807                mut lower_child_bound,
33808                mut upper_child_bound,
33809            )) => {
33810                let mut lower_edge = unsafe { Handle::new_edge(ptr::read(&node), lower_edge_idx) };
33811                let mut upper_edge = unsafe { Handle::new_edge(node, upper_edge_idx) };
33812                loop {
33813                    match (lower_edge.force(), upper_edge.force()) {
33814                        (Leaf(f), Leaf(b)) => return LeafRange { front: Some(f), back: Some(b) },
33815                        (Internal(f), Internal(b)) => {
33816                            (lower_edge, lower_child_bound) =
33817                                f.descend().find_lower_bound_edge(lower_child_bound);
33818                            (upper_edge, upper_child_bound) =
33819                                b.descend().find_upper_bound_edge(upper_child_bound);
33820                        }
33821                        _ => unreachable!("BTreeMap has different depths"),
33822                    }
33823                }
33824            }
33825        }
33826    }
33827}
33828
33829/// Equivalent to `(root1.first_leaf_edge(), root2.last_leaf_edge())` but more efficient.
33830fn full_range<BorrowType: marker::BorrowType, K, V>(
33831    root1: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
33832    root2: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
33833) -> LeafRange<BorrowType, K, V> {
33834    let mut min_node = root1;
33835    let mut max_node = root2;
33836    loop {
33837        let front = min_node.first_edge();
33838        let back = max_node.last_edge();
33839        match (front.force(), back.force()) {
33840            (Leaf(f), Leaf(b)) => {
33841                return LeafRange { front: Some(f), back: Some(b) };
33842            }
33843            (Internal(min_int), Internal(max_int)) => {
33844                min_node = min_int.descend();
33845                max_node = max_int.descend();
33846            }
33847            _ => unreachable!("BTreeMap has different depths"),
33848        };
33849    }
33850}
33851
33852impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
33853    /// Finds the pair of leaf edges delimiting a specific range in a tree.
33854    ///
33855    /// The result is meaningful only if the tree is ordered by key, like the tree
33856    /// in a `BTreeMap` is.
33857    pub fn range_search<Q, R>(self, range: R) -> LeafRange<marker::Immut<'a>, K, V>
33858    where
33859        Q: ?Sized + Ord,
33860        K: Borrow<Q>,
33861        R: RangeBounds<Q>,
33862    {
33863        // SAFETY: our borrow type is immutable.
33864        unsafe { self.find_leaf_edges_spanning_range(range) }
33865    }
33866
33867    /// Finds the pair of leaf edges delimiting an entire tree.
33868    pub fn full_range(self) -> LeafRange<marker::Immut<'a>, K, V> {
33869        full_range(self, self)
33870    }
33871}
33872
33873impl<'a, K: 'a, V: 'a> NodeRef<marker::ValMut<'a>, K, V, marker::LeafOrInternal> {
33874    /// Splits a unique reference into a pair of leaf edges delimiting a specified range.
33875    /// The result are non-unique references allowing (some) mutation, which must be used
33876    /// carefully.
33877    ///
33878    /// The result is meaningful only if the tree is ordered by key, like the tree
33879    /// in a `BTreeMap` is.
33880    ///
33881    /// # Safety
33882    /// Do not use the duplicate handles to visit the same KV twice.
33883    pub fn range_search<Q, R>(self, range: R) -> LeafRange<marker::ValMut<'a>, K, V>
33884    where
33885        Q: ?Sized + Ord,
33886        K: Borrow<Q>,
33887        R: RangeBounds<Q>,
33888    {
33889        unsafe { self.find_leaf_edges_spanning_range(range) }
33890    }
33891
33892    /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree.
33893    /// The results are non-unique references allowing mutation (of values only), so must be used
33894    /// with care.
33895    pub fn full_range(self) -> LeafRange<marker::ValMut<'a>, K, V> {
33896        // We duplicate the root NodeRef here -- we will never visit the same KV
33897        // twice, and never end up with overlapping value references.
33898        let self2 = unsafe { ptr::read(&self) };
33899        full_range(self, self2)
33900    }
33901}
33902
33903impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
33904    /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree.
33905    /// The results are non-unique references allowing massively destructive mutation, so must be
33906    /// used with the utmost care.
33907    pub fn full_range(self) -> LeafRange<marker::Dying, K, V> {
33908        // We duplicate the root NodeRef here -- we will never access it in a way
33909        // that overlaps references obtained from the root.
33910        let self2 = unsafe { ptr::read(&self) };
33911        full_range(self, self2)
33912    }
33913}
33914
33915impl<BorrowType: marker::BorrowType, K, V>
33916    Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>
33917{
33918    /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
33919    /// on the right side, which is either in the same leaf node or in an ancestor node.
33920    /// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node.
33921    pub fn next_kv(
33922        self,
33923    ) -> Result<
33924        Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
33925        NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
33926    > {
33927        let mut edge = self.forget_node_type();
33928        loop {
33929            edge = match edge.right_kv() {
33930                Ok(kv) => return Ok(kv),
33931                Err(last_edge) => match last_edge.into_node().ascend() {
33932                    Ok(parent_edge) => parent_edge.forget_node_type(),
33933                    Err(root) => return Err(root),
33934                },
33935            }
33936        }
33937    }
33938
33939    /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
33940    /// on the left side, which is either in the same leaf node or in an ancestor node.
33941    /// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
33942    pub fn next_back_kv(
33943        self,
33944    ) -> Result<
33945        Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
33946        NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
33947    > {
33948        let mut edge = self.forget_node_type();
33949        loop {
33950            edge = match edge.left_kv() {
33951                Ok(kv) => return Ok(kv),
33952                Err(last_edge) => match last_edge.into_node().ascend() {
33953                    Ok(parent_edge) => parent_edge.forget_node_type(),
33954                    Err(root) => return Err(root),
33955                },
33956            }
33957        }
33958    }
33959}
33960
33961impl<BorrowType: marker::BorrowType, K, V>
33962    Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
33963{
33964    /// Given an internal edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
33965    /// on the right side, which is either in the same internal node or in an ancestor node.
33966    /// If the internal edge is the last one in the tree, returns [`Result::Err`] with the root node.
33967    pub fn next_kv(
33968        self,
33969    ) -> Result<
33970        Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV>,
33971        NodeRef<BorrowType, K, V, marker::Internal>,
33972    > {
33973        let mut edge = self;
33974        loop {
33975            edge = match edge.right_kv() {
33976                Ok(internal_kv) => return Ok(internal_kv),
33977                Err(last_edge) => match last_edge.into_node().ascend() {
33978                    Ok(parent_edge) => parent_edge,
33979                    Err(root) => return Err(root),
33980                },
33981            }
33982        }
33983    }
33984}
33985
33986impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
33987    /// Given a leaf edge handle into a dying tree, returns the next leaf edge
33988    /// on the right side, and the key-value pair in between, which is either
33989    /// in the same leaf node, in an ancestor node, or non-existent.
33990    ///
33991    /// This method also deallocates any node(s) it reaches the end of. This
33992    /// implies that if no more key-value pair exists, the entire remainder of
33993    /// the tree will have been deallocated and there is nothing left to return.
33994    ///
33995    /// # Safety
33996    /// The given edge must not have been previously returned by counterpart
33997    /// `deallocating_next_back`.
33998    unsafe fn deallocating_next(self) -> Option<(Self, (K, V))> {
33999        let mut edge = self.forget_node_type();
34000        loop {
34001            edge = match edge.right_kv() {
34002                Ok(kv) => {
34003                    let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
34004                    let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
34005                    return Some((kv.next_leaf_edge(), (k, v)));
34006                }
34007                Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } {
34008                    Some(parent_edge) => parent_edge.forget_node_type(),
34009                    None => return None,
34010                },
34011            }
34012        }
34013    }
34014
34015    /// Given a leaf edge handle into a dying tree, returns the next leaf edge
34016    /// on the left side, and the key-value pair in between, which is either
34017    /// in the same leaf node, in an ancestor node, or non-existent.
34018    ///
34019    /// This method also deallocates any node(s) it reaches the end of. This
34020    /// implies that if no more key-value pair exists, the entire remainder of
34021    /// the tree will have been deallocated and there is nothing left to return.
34022    ///
34023    /// # Safety
34024    /// The given edge must not have been previously returned by counterpart
34025    /// `deallocating_next`.
34026    unsafe fn deallocating_next_back(self) -> Option<(Self, (K, V))> {
34027        let mut edge = self.forget_node_type();
34028        loop {
34029            edge = match edge.left_kv() {
34030                Ok(kv) => {
34031                    let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
34032                    let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
34033                    return Some((kv.next_back_leaf_edge(), (k, v)));
34034                }
34035                Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } {
34036                    Some(parent_edge) => parent_edge.forget_node_type(),
34037                    None => return None,
34038                },
34039            }
34040        }
34041    }
34042
34043    /// Deallocates a pile of nodes from the leaf up to the root.
34044    /// This is the only way to deallocate the remainder of a tree after
34045    /// `deallocating_next` and `deallocating_next_back` have been nibbling at
34046    /// both sides of the tree, and have hit the same edge. As it is intended
34047    /// only to be called when all keys and values have been returned,
34048    /// no cleanup is done on any of the keys or values.
34049    pub fn deallocating_end(self) {
34050        let mut edge = self.forget_node_type();
34051        while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend() } {
34052            edge = parent_edge.forget_node_type();
34053        }
34054    }
34055}
34056
34057impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
34058    /// Moves the leaf edge handle to the next leaf edge and returns references to the
34059    /// key and value in between.
34060    ///
34061    /// # Safety
34062    /// There must be another KV in the direction travelled.
34063    pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
34064        super::mem::replace(self, |leaf_edge| {
34065            let kv = leaf_edge.next_kv();
34066            let kv = unsafe { kv.ok().unwrap_unchecked() };
34067            (kv.next_leaf_edge(), kv.into_kv())
34068        })
34069    }
34070
34071    /// Moves the leaf edge handle to the previous leaf edge and returns references to the
34072    /// key and value in between.
34073    ///
34074    /// # Safety
34075    /// There must be another KV in the direction travelled.
34076    pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
34077        super::mem::replace(self, |leaf_edge| {
34078            let kv = leaf_edge.next_back_kv();
34079            let kv = unsafe { kv.ok().unwrap_unchecked() };
34080            (kv.next_back_leaf_edge(), kv.into_kv())
34081        })
34082    }
34083}
34084
34085impl<'a, K, V> Handle<NodeRef<marker::ValMut<'a>, K, V, marker::Leaf>, marker::Edge> {
34086    /// Moves the leaf edge handle to the next leaf edge and returns references to the
34087    /// key and value in between.
34088    ///
34089    /// # Safety
34090    /// There must be another KV in the direction travelled.
34091    pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
34092        let kv = super::mem::replace(self, |leaf_edge| {
34093            let kv = leaf_edge.next_kv();
34094            let kv = unsafe { kv.ok().unwrap_unchecked() };
34095            (unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)
34096        });
34097        // Doing this last is faster, according to benchmarks.
34098        kv.into_kv_valmut()
34099    }
34100
34101    /// Moves the leaf edge handle to the previous leaf and returns references to the
34102    /// key and value in between.
34103    ///
34104    /// # Safety
34105    /// There must be another KV in the direction travelled.
34106    pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
34107        let kv = super::mem::replace(self, |leaf_edge| {
34108            let kv = leaf_edge.next_back_kv();
34109            let kv = unsafe { kv.ok().unwrap_unchecked() };
34110            (unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)
34111        });
34112        // Doing this last is faster, according to benchmarks.
34113        kv.into_kv_valmut()
34114    }
34115}
34116
34117impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
34118    /// Moves the leaf edge handle to the next leaf edge and returns the key and value
34119    /// in between, deallocating any node left behind while leaving the corresponding
34120    /// edge in its parent node dangling.
34121    ///
34122    /// # Safety
34123    /// - There must be another KV in the direction travelled.
34124    /// - That KV was not previously returned by counterpart `next_back_unchecked`
34125    ///   on any copy of the handles being used to traverse the tree.
34126    ///
34127    /// The only safe way to proceed with the updated handle is to compare it, drop it,
34128    /// call this method again subject to its safety conditions, or call counterpart
34129    /// `next_back_unchecked` subject to its safety conditions.
34130    pub unsafe fn deallocating_next_unchecked(&mut self) -> (K, V) {
34131        super::mem::replace(self, |leaf_edge| unsafe {
34132            leaf_edge.deallocating_next().unwrap_unchecked()
34133        })
34134    }
34135
34136    /// Moves the leaf edge handle to the previous leaf edge and returns the key and value
34137    /// in between, deallocating any node left behind while leaving the corresponding
34138    /// edge in its parent node dangling.
34139    ///
34140    /// # Safety
34141    /// - There must be another KV in the direction travelled.
34142    /// - That leaf edge was not previously returned by counterpart `next_unchecked`
34143    ///   on any copy of the handles being used to traverse the tree.
34144    ///
34145    /// The only safe way to proceed with the updated handle is to compare it, drop it,
34146    /// call this method again subject to its safety conditions, or call counterpart
34147    /// `next_unchecked` subject to its safety conditions.
34148    pub unsafe fn deallocating_next_back_unchecked(&mut self) -> (K, V) {
34149        super::mem::replace(self, |leaf_edge| unsafe {
34150            leaf_edge.deallocating_next_back().unwrap_unchecked()
34151        })
34152    }
34153}
34154
34155impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
34156    /// Returns the leftmost leaf edge in or underneath a node - in other words, the edge
34157    /// you need first when navigating forward (or last when navigating backward).
34158    #[inline]
34159    pub fn first_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
34160        let mut node = self;
34161        loop {
34162            match node.force() {
34163                Leaf(leaf) => return leaf.first_edge(),
34164                Internal(internal) => node = internal.first_edge().descend(),
34165            }
34166        }
34167    }
34168
34169    /// Returns the rightmost leaf edge in or underneath a node - in other words, the edge
34170    /// you need last when navigating forward (or first when navigating backward).
34171    #[inline]
34172    pub fn last_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
34173        let mut node = self;
34174        loop {
34175            match node.force() {
34176                Leaf(leaf) => return leaf.last_edge(),
34177                Internal(internal) => node = internal.last_edge().descend(),
34178            }
34179        }
34180    }
34181}
34182
34183pub enum Position<BorrowType, K, V> {
34184    Leaf(NodeRef<BorrowType, K, V, marker::Leaf>),
34185    Internal(NodeRef<BorrowType, K, V, marker::Internal>),
34186    InternalKV(Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV>),
34187}
34188
34189impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
34190    /// Visits leaf nodes and internal KVs in order of ascending keys, and also
34191    /// visits internal nodes as a whole in a depth first order, meaning that
34192    /// internal nodes precede their individual KVs and their child nodes.
34193    pub fn visit_nodes_in_order<F>(self, mut visit: F)
34194    where
34195        F: FnMut(Position<marker::Immut<'a>, K, V>),
34196    {
34197        match self.force() {
34198            Leaf(leaf) => visit(Position::Leaf(leaf)),
34199            Internal(internal) => {
34200                visit(Position::Internal(internal));
34201                let mut edge = internal.first_edge();
34202                loop {
34203                    edge = match edge.descend().force() {
34204                        Leaf(leaf) => {
34205                            visit(Position::Leaf(leaf));
34206                            match edge.next_kv() {
34207                                Ok(kv) => {
34208                                    visit(Position::InternalKV(kv));
34209                                    kv.right_edge()
34210                                }
34211                                Err(_) => return,
34212                            }
34213                        }
34214                        Internal(internal) => {
34215                            visit(Position::Internal(internal));
34216                            internal.first_edge()
34217                        }
34218                    }
34219                }
34220            }
34221        }
34222    }
34223
34224    /// Calculates the number of elements in a (sub)tree.
34225    pub fn calc_length(self) -> usize {
34226        let mut result = 0;
34227        self.visit_nodes_in_order(|pos| match pos {
34228            Position::Leaf(node) => result += node.len(),
34229            Position::Internal(node) => result += node.len(),
34230            Position::InternalKV(_) => (),
34231        });
34232        result
34233    }
34234}
34235
34236impl<BorrowType: marker::BorrowType, K, V>
34237    Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>
34238{
34239    /// Returns the leaf edge closest to a KV for forward navigation.
34240    pub fn next_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
34241        match self.force() {
34242            Leaf(leaf_kv) => leaf_kv.right_edge(),
34243            Internal(internal_kv) => {
34244                let next_internal_edge = internal_kv.right_edge();
34245                next_internal_edge.descend().first_leaf_edge()
34246            }
34247        }
34248    }
34249
34250    /// Returns the leaf edge closest to a KV for backward navigation.
34251    pub fn next_back_leaf_edge(
34252        self,
34253    ) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
34254        match self.force() {
34255            Leaf(leaf_kv) => leaf_kv.left_edge(),
34256            Internal(internal_kv) => {
34257                let next_internal_edge = internal_kv.left_edge();
34258                next_internal_edge.descend().last_leaf_edge()
34259            }
34260        }
34261    }
34262}
34263mod append;
34264mod borrow;
34265mod fix;
34266pub mod map;
34267mod mem;
34268mod merge_iter;
34269mod navigate;
34270mod node;
34271mod remove;
34272mod search;
34273pub mod set;
34274mod split;
34275
34276#[doc(hidden)]
34277trait Recover<Q: ?Sized> {
34278    type Key;
34279
34280    fn get(&self, key: &Q) -> Option<&Self::Key>;
34281    fn take(&mut self, key: &Q) -> Option<Self::Key>;
34282    fn replace(&mut self, key: Self::Key) -> Option<Self::Key>;
34283}
34284
34285#[cfg(test)]
34286mod testing;
34287use super::super::testing::crash_test::{CrashTestDummy, Panic};
34288use super::super::testing::rng::DeterministicRng;
34289use super::*;
34290use crate::vec::Vec;
34291use std::cmp::Ordering;
34292use std::iter::FromIterator;
34293use std::panic::{catch_unwind, AssertUnwindSafe};
34294
34295#[test]
34296fn test_clone_eq() {
34297    let mut m = BTreeSet::new();
34298
34299    m.insert(1);
34300    m.insert(2);
34301
34302    assert_eq!(m.clone(), m);
34303}
34304
34305#[allow(dead_code)]
34306fn test_const() {
34307    const SET: &'static BTreeSet<()> = &BTreeSet::new();
34308    const LEN: usize = SET.len();
34309    const IS_EMPTY: bool = SET.is_empty();
34310}
34311
34312#[test]
34313fn test_iter_min_max() {
34314    let mut a = BTreeSet::new();
34315    assert_eq!(a.iter().min(), None);
34316    assert_eq!(a.iter().max(), None);
34317    assert_eq!(a.range(..).min(), None);
34318    assert_eq!(a.range(..).max(), None);
34319    assert_eq!(a.difference(&BTreeSet::new()).min(), None);
34320    assert_eq!(a.difference(&BTreeSet::new()).max(), None);
34321    assert_eq!(a.intersection(&a).min(), None);
34322    assert_eq!(a.intersection(&a).max(), None);
34323    assert_eq!(a.symmetric_difference(&BTreeSet::new()).min(), None);
34324    assert_eq!(a.symmetric_difference(&BTreeSet::new()).max(), None);
34325    assert_eq!(a.union(&a).min(), None);
34326    assert_eq!(a.union(&a).max(), None);
34327    a.insert(1);
34328    a.insert(2);
34329    assert_eq!(a.iter().min(), Some(&1));
34330    assert_eq!(a.iter().max(), Some(&2));
34331    assert_eq!(a.range(..).min(), Some(&1));
34332    assert_eq!(a.range(..).max(), Some(&2));
34333    assert_eq!(a.difference(&BTreeSet::new()).min(), Some(&1));
34334    assert_eq!(a.difference(&BTreeSet::new()).max(), Some(&2));
34335    assert_eq!(a.intersection(&a).min(), Some(&1));
34336    assert_eq!(a.intersection(&a).max(), Some(&2));
34337    assert_eq!(a.symmetric_difference(&BTreeSet::new()).min(), Some(&1));
34338    assert_eq!(a.symmetric_difference(&BTreeSet::new()).max(), Some(&2));
34339    assert_eq!(a.union(&a).min(), Some(&1));
34340    assert_eq!(a.union(&a).max(), Some(&2));
34341}
34342
34343fn check<F>(a: &[i32], b: &[i32], expected: &[i32], f: F)
34344where
34345    F: FnOnce(&BTreeSet<i32>, &BTreeSet<i32>, &mut dyn FnMut(&i32) -> bool) -> bool,
34346{
34347    let mut set_a = BTreeSet::new();
34348    let mut set_b = BTreeSet::new();
34349
34350    for x in a {
34351        assert!(set_a.insert(*x))
34352    }
34353    for y in b {
34354        assert!(set_b.insert(*y))
34355    }
34356
34357    let mut i = 0;
34358    f(&set_a, &set_b, &mut |&x| {
34359        if i < expected.len() {
34360            assert_eq!(x, expected[i]);
34361        }
34362        i += 1;
34363        true
34364    });
34365    assert_eq!(i, expected.len());
34366}
34367
34368#[test]
34369fn test_intersection() {
34370    fn check_intersection(a: &[i32], b: &[i32], expected: &[i32]) {
34371        check(a, b, expected, |x, y, f| x.intersection(y).all(f))
34372    }
34373
34374    check_intersection(&[], &[], &[]);
34375    check_intersection(&[1, 2, 3], &[], &[]);
34376    check_intersection(&[], &[1, 2, 3], &[]);
34377    check_intersection(&[2], &[1, 2, 3], &[2]);
34378    check_intersection(&[1, 2, 3], &[2], &[2]);
34379    check_intersection(&[11, 1, 3, 77, 103, 5, -5], &[2, 11, 77, -9, -42, 5, 3], &[3, 5, 11, 77]);
34380
34381    if cfg!(miri) {
34382        // Miri is too slow
34383        return;
34384    }
34385
34386    let large = (0..100).collect::<Vec<_>>();
34387    check_intersection(&[], &large, &[]);
34388    check_intersection(&large, &[], &[]);
34389    check_intersection(&[-1], &large, &[]);
34390    check_intersection(&large, &[-1], &[]);
34391    check_intersection(&[0], &large, &[0]);
34392    check_intersection(&large, &[0], &[0]);
34393    check_intersection(&[99], &large, &[99]);
34394    check_intersection(&large, &[99], &[99]);
34395    check_intersection(&[100], &large, &[]);
34396    check_intersection(&large, &[100], &[]);
34397    check_intersection(&[11, 5000, 1, 3, 77, 8924], &large, &[1, 3, 11, 77]);
34398}
34399
34400#[test]
34401fn test_intersection_size_hint() {
34402    let x: BTreeSet<i32> = [3, 4].iter().copied().collect();
34403    let y: BTreeSet<i32> = [1, 2, 3].iter().copied().collect();
34404    let mut iter = x.intersection(&y);
34405    assert_eq!(iter.size_hint(), (1, Some(1)));
34406    assert_eq!(iter.next(), Some(&3));
34407    assert_eq!(iter.size_hint(), (0, Some(0)));
34408    assert_eq!(iter.next(), None);
34409
34410    iter = y.intersection(&y);
34411    assert_eq!(iter.size_hint(), (0, Some(3)));
34412    assert_eq!(iter.next(), Some(&1));
34413    assert_eq!(iter.size_hint(), (0, Some(2)));
34414}
34415
34416#[test]
34417fn test_difference() {
34418    fn check_difference(a: &[i32], b: &[i32], expected: &[i32]) {
34419        check(a, b, expected, |x, y, f| x.difference(y).all(f))
34420    }
34421
34422    check_difference(&[], &[], &[]);
34423    check_difference(&[1, 12], &[], &[1, 12]);
34424    check_difference(&[], &[1, 2, 3, 9], &[]);
34425    check_difference(&[1, 3, 5, 9, 11], &[3, 9], &[1, 5, 11]);
34426    check_difference(&[1, 3, 5, 9, 11], &[3, 6, 9], &[1, 5, 11]);
34427    check_difference(&[1, 3, 5, 9, 11], &[0, 1], &[3, 5, 9, 11]);
34428    check_difference(&[1, 3, 5, 9, 11], &[11, 12], &[1, 3, 5, 9]);
34429    check_difference(
34430        &[-5, 11, 22, 33, 40, 42],
34431        &[-12, -5, 14, 23, 34, 38, 39, 50],
34432        &[11, 22, 33, 40, 42],
34433    );
34434
34435    if cfg!(miri) {
34436        // Miri is too slow
34437        return;
34438    }
34439
34440    let large = (0..100).collect::<Vec<_>>();
34441    check_difference(&[], &large, &[]);
34442    check_difference(&[-1], &large, &[-1]);
34443    check_difference(&[0], &large, &[]);
34444    check_difference(&[99], &large, &[]);
34445    check_difference(&[100], &large, &[100]);
34446    check_difference(&[11, 5000, 1, 3, 77, 8924], &large, &[5000, 8924]);
34447    check_difference(&large, &[], &large);
34448    check_difference(&large, &[-1], &large);
34449    check_difference(&large, &[100], &large);
34450}
34451
34452#[test]
34453fn test_difference_size_hint() {
34454    let s246: BTreeSet<i32> = [2, 4, 6].iter().copied().collect();
34455    let s23456: BTreeSet<i32> = (2..=6).collect();
34456    let mut iter = s246.difference(&s23456);
34457    assert_eq!(iter.size_hint(), (0, Some(3)));
34458    assert_eq!(iter.next(), None);
34459
34460    let s12345: BTreeSet<i32> = (1..=5).collect();
34461    iter = s246.difference(&s12345);
34462    assert_eq!(iter.size_hint(), (0, Some(3)));
34463    assert_eq!(iter.next(), Some(&6));
34464    assert_eq!(iter.size_hint(), (0, Some(0)));
34465    assert_eq!(iter.next(), None);
34466
34467    let s34567: BTreeSet<i32> = (3..=7).collect();
34468    iter = s246.difference(&s34567);
34469    assert_eq!(iter.size_hint(), (0, Some(3)));
34470    assert_eq!(iter.next(), Some(&2));
34471    assert_eq!(iter.size_hint(), (0, Some(2)));
34472    assert_eq!(iter.next(), None);
34473
34474    let s1: BTreeSet<i32> = (-9..=1).collect();
34475    iter = s246.difference(&s1);
34476    assert_eq!(iter.size_hint(), (3, Some(3)));
34477
34478    let s2: BTreeSet<i32> = (-9..=2).collect();
34479    iter = s246.difference(&s2);
34480    assert_eq!(iter.size_hint(), (2, Some(2)));
34481    assert_eq!(iter.next(), Some(&4));
34482    assert_eq!(iter.size_hint(), (1, Some(1)));
34483
34484    let s23: BTreeSet<i32> = (2..=3).collect();
34485    iter = s246.difference(&s23);
34486    assert_eq!(iter.size_hint(), (1, Some(3)));
34487    assert_eq!(iter.next(), Some(&4));
34488    assert_eq!(iter.size_hint(), (1, Some(1)));
34489
34490    let s4: BTreeSet<i32> = (4..=4).collect();
34491    iter = s246.difference(&s4);
34492    assert_eq!(iter.size_hint(), (2, Some(3)));
34493    assert_eq!(iter.next(), Some(&2));
34494    assert_eq!(iter.size_hint(), (1, Some(2)));
34495    assert_eq!(iter.next(), Some(&6));
34496    assert_eq!(iter.size_hint(), (0, Some(0)));
34497    assert_eq!(iter.next(), None);
34498
34499    let s56: BTreeSet<i32> = (5..=6).collect();
34500    iter = s246.difference(&s56);
34501    assert_eq!(iter.size_hint(), (1, Some(3)));
34502    assert_eq!(iter.next(), Some(&2));
34503    assert_eq!(iter.size_hint(), (0, Some(2)));
34504
34505    let s6: BTreeSet<i32> = (6..=19).collect();
34506    iter = s246.difference(&s6);
34507    assert_eq!(iter.size_hint(), (2, Some(2)));
34508    assert_eq!(iter.next(), Some(&2));
34509    assert_eq!(iter.size_hint(), (1, Some(1)));
34510
34511    let s7: BTreeSet<i32> = (7..=19).collect();
34512    iter = s246.difference(&s7);
34513    assert_eq!(iter.size_hint(), (3, Some(3)));
34514}
34515
34516#[test]
34517fn test_symmetric_difference() {
34518    fn check_symmetric_difference(a: &[i32], b: &[i32], expected: &[i32]) {
34519        check(a, b, expected, |x, y, f| x.symmetric_difference(y).all(f))
34520    }
34521
34522    check_symmetric_difference(&[], &[], &[]);
34523    check_symmetric_difference(&[1, 2, 3], &[2], &[1, 3]);
34524    check_symmetric_difference(&[2], &[1, 2, 3], &[1, 3]);
34525    check_symmetric_difference(&[1, 3, 5, 9, 11], &[-2, 3, 9, 14, 22], &[-2, 1, 5, 11, 14, 22]);
34526}
34527
34528#[test]
34529fn test_symmetric_difference_size_hint() {
34530    let x: BTreeSet<i32> = [2, 4].iter().copied().collect();
34531    let y: BTreeSet<i32> = [1, 2, 3].iter().copied().collect();
34532    let mut iter = x.symmetric_difference(&y);
34533    assert_eq!(iter.size_hint(), (0, Some(5)));
34534    assert_eq!(iter.next(), Some(&1));
34535    assert_eq!(iter.size_hint(), (0, Some(4)));
34536    assert_eq!(iter.next(), Some(&3));
34537    assert_eq!(iter.size_hint(), (0, Some(1)));
34538}
34539
34540#[test]
34541fn test_union() {
34542    fn check_union(a: &[i32], b: &[i32], expected: &[i32]) {
34543        check(a, b, expected, |x, y, f| x.union(y).all(f))
34544    }
34545
34546    check_union(&[], &[], &[]);
34547    check_union(&[1, 2, 3], &[2], &[1, 2, 3]);
34548    check_union(&[2], &[1, 2, 3], &[1, 2, 3]);
34549    check_union(
34550        &[1, 3, 5, 9, 11, 16, 19, 24],
34551        &[-2, 1, 5, 9, 13, 19],
34552        &[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24],
34553    );
34554}
34555
34556#[test]
34557fn test_union_size_hint() {
34558    let x: BTreeSet<i32> = [2, 4].iter().copied().collect();
34559    let y: BTreeSet<i32> = [1, 2, 3].iter().copied().collect();
34560    let mut iter = x.union(&y);
34561    assert_eq!(iter.size_hint(), (3, Some(5)));
34562    assert_eq!(iter.next(), Some(&1));
34563    assert_eq!(iter.size_hint(), (2, Some(4)));
34564    assert_eq!(iter.next(), Some(&2));
34565    assert_eq!(iter.size_hint(), (1, Some(2)));
34566}
34567
34568#[test]
34569// Only tests the simple function definition with respect to intersection
34570fn test_is_disjoint() {
34571    let one = [1].iter().collect::<BTreeSet<_>>();
34572    let two = [2].iter().collect::<BTreeSet<_>>();
34573    assert!(one.is_disjoint(&two));
34574}
34575
34576#[test]
34577// Also implicitly tests the trivial function definition of is_superset
34578fn test_is_subset() {
34579    fn is_subset(a: &[i32], b: &[i32]) -> bool {
34580        let set_a = a.iter().collect::<BTreeSet<_>>();
34581        let set_b = b.iter().collect::<BTreeSet<_>>();
34582        set_a.is_subset(&set_b)
34583    }
34584
34585    assert_eq!(is_subset(&[], &[]), true);
34586    assert_eq!(is_subset(&[], &[1, 2]), true);
34587    assert_eq!(is_subset(&[0], &[1, 2]), false);
34588    assert_eq!(is_subset(&[1], &[1, 2]), true);
34589    assert_eq!(is_subset(&[2], &[1, 2]), true);
34590    assert_eq!(is_subset(&[3], &[1, 2]), false);
34591    assert_eq!(is_subset(&[1, 2], &[1]), false);
34592    assert_eq!(is_subset(&[1, 2], &[1, 2]), true);
34593    assert_eq!(is_subset(&[1, 2], &[2, 3]), false);
34594    assert_eq!(
34595        is_subset(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 11, 14, 22, 23, 33, 34, 38, 39, 40, 42]),
34596        true
34597    );
34598    assert_eq!(is_subset(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 11, 14, 22, 23, 34, 38]), false);
34599
34600    if cfg!(miri) {
34601        // Miri is too slow
34602        return;
34603    }
34604
34605    let large = (0..100).collect::<Vec<_>>();
34606    assert_eq!(is_subset(&[], &large), true);
34607    assert_eq!(is_subset(&large, &[]), false);
34608    assert_eq!(is_subset(&[-1], &large), false);
34609    assert_eq!(is_subset(&[0], &large), true);
34610    assert_eq!(is_subset(&[1, 2], &large), true);
34611    assert_eq!(is_subset(&[99, 100], &large), false);
34612}
34613
34614#[test]
34615fn test_retain() {
34616    let xs = [1, 2, 3, 4, 5, 6];
34617    let mut set: BTreeSet<i32> = xs.iter().cloned().collect();
34618    set.retain(|&k| k % 2 == 0);
34619    assert_eq!(set.len(), 3);
34620    assert!(set.contains(&2));
34621    assert!(set.contains(&4));
34622    assert!(set.contains(&6));
34623}
34624
34625#[test]
34626fn test_drain_filter() {
34627    let mut x: BTreeSet<_> = [1].iter().copied().collect();
34628    let mut y: BTreeSet<_> = [1].iter().copied().collect();
34629
34630    x.drain_filter(|_| true);
34631    y.drain_filter(|_| false);
34632    assert_eq!(x.len(), 0);
34633    assert_eq!(y.len(), 1);
34634}
34635
34636#[test]
34637fn test_drain_filter_drop_panic_leak() {
34638    let a = CrashTestDummy::new(0);
34639    let b = CrashTestDummy::new(1);
34640    let c = CrashTestDummy::new(2);
34641    let mut set = BTreeSet::new();
34642    set.insert(a.spawn(Panic::Never));
34643    set.insert(b.spawn(Panic::InDrop));
34644    set.insert(c.spawn(Panic::Never));
34645
34646    catch_unwind(move || drop(set.drain_filter(|dummy| dummy.query(true)))).ok();
34647
34648    assert_eq!(a.queried(), 1);
34649    assert_eq!(b.queried(), 1);
34650    assert_eq!(c.queried(), 0);
34651    assert_eq!(a.dropped(), 1);
34652    assert_eq!(b.dropped(), 1);
34653    assert_eq!(c.dropped(), 1);
34654}
34655
34656#[test]
34657fn test_drain_filter_pred_panic_leak() {
34658    let a = CrashTestDummy::new(0);
34659    let b = CrashTestDummy::new(1);
34660    let c = CrashTestDummy::new(2);
34661    let mut set = BTreeSet::new();
34662    set.insert(a.spawn(Panic::Never));
34663    set.insert(b.spawn(Panic::InQuery));
34664    set.insert(c.spawn(Panic::InQuery));
34665
34666    catch_unwind(AssertUnwindSafe(|| drop(set.drain_filter(|dummy| dummy.query(true))))).ok();
34667
34668    assert_eq!(a.queried(), 1);
34669    assert_eq!(b.queried(), 1);
34670    assert_eq!(c.queried(), 0);
34671    assert_eq!(a.dropped(), 1);
34672    assert_eq!(b.dropped(), 0);
34673    assert_eq!(c.dropped(), 0);
34674    assert_eq!(set.len(), 2);
34675    assert_eq!(set.first().unwrap().id(), 1);
34676    assert_eq!(set.last().unwrap().id(), 2);
34677}
34678
34679#[test]
34680fn test_clear() {
34681    let mut x = BTreeSet::new();
34682    x.insert(1);
34683
34684    x.clear();
34685    assert!(x.is_empty());
34686}
34687
34688#[test]
34689fn test_zip() {
34690    let mut x = BTreeSet::new();
34691    x.insert(5);
34692    x.insert(12);
34693    x.insert(11);
34694
34695    let mut y = BTreeSet::new();
34696    y.insert("foo");
34697    y.insert("bar");
34698
34699    let x = x;
34700    let y = y;
34701    let mut z = x.iter().zip(&y);
34702
34703    assert_eq!(z.next().unwrap(), (&5, &("bar")));
34704    assert_eq!(z.next().unwrap(), (&11, &("foo")));
34705    assert!(z.next().is_none());
34706}
34707
34708#[test]
34709fn test_from_iter() {
34710    let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
34711
34712    let set: BTreeSet<_> = xs.iter().cloned().collect();
34713
34714    for x in &xs {
34715        assert!(set.contains(x));
34716    }
34717}
34718
34719#[test]
34720fn test_show() {
34721    let mut set = BTreeSet::new();
34722    let empty = BTreeSet::<i32>::new();
34723
34724    set.insert(1);
34725    set.insert(2);
34726
34727    let set_str = format!("{:?}", set);
34728
34729    assert_eq!(set_str, "{1, 2}");
34730    assert_eq!(format!("{:?}", empty), "{}");
34731}
34732
34733#[test]
34734fn test_extend_ref() {
34735    let mut a = BTreeSet::new();
34736    a.insert(1);
34737
34738    a.extend(&[2, 3, 4]);
34739
34740    assert_eq!(a.len(), 4);
34741    assert!(a.contains(&1));
34742    assert!(a.contains(&2));
34743    assert!(a.contains(&3));
34744    assert!(a.contains(&4));
34745
34746    let mut b = BTreeSet::new();
34747    b.insert(5);
34748    b.insert(6);
34749
34750    a.extend(&b);
34751
34752    assert_eq!(a.len(), 6);
34753    assert!(a.contains(&1));
34754    assert!(a.contains(&2));
34755    assert!(a.contains(&3));
34756    assert!(a.contains(&4));
34757    assert!(a.contains(&5));
34758    assert!(a.contains(&6));
34759}
34760
34761#[test]
34762fn test_recovery() {
34763    #[derive(Debug)]
34764    struct Foo(&'static str, i32);
34765
34766    impl PartialEq for Foo {
34767        fn eq(&self, other: &Self) -> bool {
34768            self.0 == other.0
34769        }
34770    }
34771
34772    impl Eq for Foo {}
34773
34774    impl PartialOrd for Foo {
34775        fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
34776            self.0.partial_cmp(&other.0)
34777        }
34778    }
34779
34780    impl Ord for Foo {
34781        fn cmp(&self, other: &Self) -> Ordering {
34782            self.0.cmp(&other.0)
34783        }
34784    }
34785
34786    let mut s = BTreeSet::new();
34787    assert_eq!(s.replace(Foo("a", 1)), None);
34788    assert_eq!(s.len(), 1);
34789    assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1)));
34790    assert_eq!(s.len(), 1);
34791
34792    {
34793        let mut it = s.iter();
34794        assert_eq!(it.next(), Some(&Foo("a", 2)));
34795        assert_eq!(it.next(), None);
34796    }
34797
34798    assert_eq!(s.get(&Foo("a", 1)), Some(&Foo("a", 2)));
34799    assert_eq!(s.take(&Foo("a", 1)), Some(Foo("a", 2)));
34800    assert_eq!(s.len(), 0);
34801
34802    assert_eq!(s.get(&Foo("a", 1)), None);
34803    assert_eq!(s.take(&Foo("a", 1)), None);
34804
34805    assert_eq!(s.iter().next(), None);
34806}
34807
34808#[allow(dead_code)]
34809fn test_variance() {
34810    fn set<'new>(v: BTreeSet<&'static str>) -> BTreeSet<&'new str> {
34811        v
34812    }
34813    fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> {
34814        v
34815    }
34816    fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> {
34817        v
34818    }
34819    fn range<'a, 'new>(v: Range<'a, &'static str>) -> Range<'a, &'new str> {
34820        v
34821    }
34822    // not applied to Difference, Intersection, SymmetricDifference, Union
34823}
34824
34825#[allow(dead_code)]
34826fn test_sync() {
34827    fn set<T: Sync>(v: &BTreeSet<T>) -> impl Sync + '_ {
34828        v
34829    }
34830
34831    fn iter<T: Sync>(v: &BTreeSet<T>) -> impl Sync + '_ {
34832        v.iter()
34833    }
34834
34835    fn into_iter<T: Sync>(v: BTreeSet<T>) -> impl Sync {
34836        v.into_iter()
34837    }
34838
34839    fn range<T: Sync + Ord>(v: &BTreeSet<T>) -> impl Sync + '_ {
34840        v.range(..)
34841    }
34842
34843    fn drain_filter<T: Sync + Ord>(v: &mut BTreeSet<T>) -> impl Sync + '_ {
34844        v.drain_filter(|_| false)
34845    }
34846
34847    fn difference<T: Sync + Ord>(v: &BTreeSet<T>) -> impl Sync + '_ {
34848        v.difference(&v)
34849    }
34850
34851    fn intersection<T: Sync + Ord>(v: &BTreeSet<T>) -> impl Sync + '_ {
34852        v.intersection(&v)
34853    }
34854
34855    fn symmetric_difference<T: Sync + Ord>(v: &BTreeSet<T>) -> impl Sync + '_ {
34856        v.symmetric_difference(&v)
34857    }
34858
34859    fn union<T: Sync + Ord>(v: &BTreeSet<T>) -> impl Sync + '_ {
34860        v.union(&v)
34861    }
34862}
34863
34864#[allow(dead_code)]
34865fn test_send() {
34866    fn set<T: Send>(v: BTreeSet<T>) -> impl Send {
34867        v
34868    }
34869
34870    fn iter<T: Send + Sync>(v: &BTreeSet<T>) -> impl Send + '_ {
34871        v.iter()
34872    }
34873
34874    fn into_iter<T: Send>(v: BTreeSet<T>) -> impl Send {
34875        v.into_iter()
34876    }
34877
34878    fn range<T: Send + Sync + Ord>(v: &BTreeSet<T>) -> impl Send + '_ {
34879        v.range(..)
34880    }
34881
34882    fn drain_filter<T: Send + Ord>(v: &mut BTreeSet<T>) -> impl Send + '_ {
34883        v.drain_filter(|_| false)
34884    }
34885
34886    fn difference<T: Send + Sync + Ord>(v: &BTreeSet<T>) -> impl Send + '_ {
34887        v.difference(&v)
34888    }
34889
34890    fn intersection<T: Send + Sync + Ord>(v: &BTreeSet<T>) -> impl Send + '_ {
34891        v.intersection(&v)
34892    }
34893
34894    fn symmetric_difference<T: Send + Sync + Ord>(v: &BTreeSet<T>) -> impl Send + '_ {
34895        v.symmetric_difference(&v)
34896    }
34897
34898    fn union<T: Send + Sync + Ord>(v: &BTreeSet<T>) -> impl Send + '_ {
34899        v.union(&v)
34900    }
34901}
34902
34903#[allow(dead_code)]
34904fn test_ord_absence() {
34905    fn set<K>(mut set: BTreeSet<K>) {
34906        set.is_empty();
34907        set.len();
34908        set.clear();
34909        set.iter();
34910        set.into_iter();
34911    }
34912
34913    fn set_debug<K: Debug>(set: BTreeSet<K>) {
34914        format!("{:?}", set);
34915        format!("{:?}", set.iter());
34916        format!("{:?}", set.into_iter());
34917    }
34918
34919    fn set_clone<K: Clone>(mut set: BTreeSet<K>) {
34920        set.clone_from(&set.clone());
34921    }
34922}
34923
34924#[test]
34925fn test_append() {
34926    let mut a = BTreeSet::new();
34927    a.insert(1);
34928    a.insert(2);
34929    a.insert(3);
34930
34931    let mut b = BTreeSet::new();
34932    b.insert(3);
34933    b.insert(4);
34934    b.insert(5);
34935
34936    a.append(&mut b);
34937
34938    assert_eq!(a.len(), 5);
34939    assert_eq!(b.len(), 0);
34940
34941    assert_eq!(a.contains(&1), true);
34942    assert_eq!(a.contains(&2), true);
34943    assert_eq!(a.contains(&3), true);
34944    assert_eq!(a.contains(&4), true);
34945    assert_eq!(a.contains(&5), true);
34946}
34947
34948#[test]
34949fn test_first_last() {
34950    let mut a = BTreeSet::new();
34951    assert_eq!(a.first(), None);
34952    assert_eq!(a.last(), None);
34953    a.insert(1);
34954    assert_eq!(a.first(), Some(&1));
34955    assert_eq!(a.last(), Some(&1));
34956    a.insert(2);
34957    assert_eq!(a.first(), Some(&1));
34958    assert_eq!(a.last(), Some(&2));
34959    for i in 3..=12 {
34960        a.insert(i);
34961    }
34962    assert_eq!(a.first(), Some(&1));
34963    assert_eq!(a.last(), Some(&12));
34964    assert_eq!(a.pop_first(), Some(1));
34965    assert_eq!(a.pop_last(), Some(12));
34966    assert_eq!(a.pop_first(), Some(2));
34967    assert_eq!(a.pop_last(), Some(11));
34968    assert_eq!(a.pop_first(), Some(3));
34969    assert_eq!(a.pop_last(), Some(10));
34970    assert_eq!(a.pop_first(), Some(4));
34971    assert_eq!(a.pop_first(), Some(5));
34972    assert_eq!(a.pop_first(), Some(6));
34973    assert_eq!(a.pop_first(), Some(7));
34974    assert_eq!(a.pop_first(), Some(8));
34975    assert_eq!(a.clone().pop_last(), Some(9));
34976    assert_eq!(a.pop_first(), Some(9));
34977    assert_eq!(a.pop_first(), None);
34978    assert_eq!(a.pop_last(), None);
34979}
34980
34981// Unlike the function with the same name in map/tests, returns no values.
34982// Which also means it returns different predetermined pseudo-random keys,
34983// and the test cases using this function explore slightly different trees.
34984fn rand_data(len: usize) -> Vec<u32> {
34985    let mut rng = DeterministicRng::new();
34986    Vec::from_iter((0..len).map(|_| rng.next()))
34987}
34988
34989#[test]
34990fn test_split_off_empty_right() {
34991    let mut data = rand_data(173);
34992
34993    let mut set = BTreeSet::from_iter(data.clone());
34994    let right = set.split_off(&(data.iter().max().unwrap() + 1));
34995
34996    data.sort();
34997    assert!(set.into_iter().eq(data));
34998    assert!(right.into_iter().eq(None));
34999}
35000
35001#[test]
35002fn test_split_off_empty_left() {
35003    let mut data = rand_data(314);
35004
35005    let mut set = BTreeSet::from_iter(data.clone());
35006    let right = set.split_off(data.iter().min().unwrap());
35007
35008    data.sort();
35009    assert!(set.into_iter().eq(None));
35010    assert!(right.into_iter().eq(data));
35011}
35012
35013#[test]
35014fn test_split_off_large_random_sorted() {
35015    // Miri is too slow
35016    let mut data = if cfg!(miri) { rand_data(529) } else { rand_data(1529) };
35017    // special case with maximum height.
35018    data.sort();
35019
35020    let mut set = BTreeSet::from_iter(data.clone());
35021    let key = data[data.len() / 2];
35022    let right = set.split_off(&key);
35023
35024    assert!(set.into_iter().eq(data.clone().into_iter().filter(|x| *x < key)));
35025    assert!(right.into_iter().eq(data.into_iter().filter(|x| *x >= key)));
35026}
35027use super::DormantMutRef;
35028
35029#[test]
35030fn test_borrow() {
35031    let mut data = 1;
35032    let mut stack = vec![];
35033    let mut rr = &mut data;
35034    for factor in [2, 3, 7].iter() {
35035        let (r, dormant_r) = DormantMutRef::new(rr);
35036        rr = r;
35037        assert_eq!(*rr, 1);
35038        stack.push((factor, dormant_r));
35039    }
35040    while let Some((factor, dormant_r)) = stack.pop() {
35041        let r = unsafe { dormant_r.awaken() };
35042        *r *= factor;
35043    }
35044    assert_eq!(data, 42);
35045}
35046use core::fmt::{self, Debug};
35047use core::marker::PhantomData;
35048use core::mem;
35049
35050use super::super::borrow::DormantMutRef;
35051use super::super::node::{marker, Handle, InsertResult::*, NodeRef};
35052use super::BTreeMap;
35053
35054use Entry::*;
35055
35056/// A view into a single entry in a map, which may either be vacant or occupied.
35057///
35058/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`].
35059///
35060/// [`entry`]: BTreeMap::entry
35061#[stable(feature = "rust1", since = "1.0.0")]
35062pub enum Entry<'a, K: 'a, V: 'a> {
35063    /// A vacant entry.
35064    #[stable(feature = "rust1", since = "1.0.0")]
35065    Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>),
35066
35067    /// An occupied entry.
35068    #[stable(feature = "rust1", since = "1.0.0")]
35069    Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>),
35070}
35071
35072#[stable(feature = "debug_btree_map", since = "1.12.0")]
35073impl<K: Debug + Ord, V: Debug> Debug for Entry<'_, K, V> {
35074    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
35075        match *self {
35076            Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
35077            Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
35078        }
35079    }
35080}
35081
35082/// A view into a vacant entry in a `BTreeMap`.
35083/// It is part of the [`Entry`] enum.
35084#[stable(feature = "rust1", since = "1.0.0")]
35085pub struct VacantEntry<'a, K: 'a, V: 'a> {
35086    pub(super) key: K,
35087    pub(super) handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
35088    pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V>>,
35089
35090    // Be invariant in `K` and `V`
35091    pub(super) _marker: PhantomData<&'a mut (K, V)>,
35092}
35093
35094#[stable(feature = "debug_btree_map", since = "1.12.0")]
35095impl<K: Debug + Ord, V> Debug for VacantEntry<'_, K, V> {
35096    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
35097        f.debug_tuple("VacantEntry").field(self.key()).finish()
35098    }
35099}
35100
35101/// A view into an occupied entry in a `BTreeMap`.
35102/// It is part of the [`Entry`] enum.
35103#[stable(feature = "rust1", since = "1.0.0")]
35104pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
35105    pub(super) handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>,
35106    pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V>>,
35107
35108    // Be invariant in `K` and `V`
35109    pub(super) _marker: PhantomData<&'a mut (K, V)>,
35110}
35111
35112#[stable(feature = "debug_btree_map", since = "1.12.0")]
35113impl<K: Debug + Ord, V: Debug> Debug for OccupiedEntry<'_, K, V> {
35114    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
35115        f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish()
35116    }
35117}
35118
35119/// The error returned by [`try_insert`](BTreeMap::try_insert) when the key already exists.
35120///
35121/// Contains the occupied entry, and the value that was not inserted.
35122#[unstable(feature = "map_try_insert", issue = "82766")]
35123pub struct OccupiedError<'a, K: 'a, V: 'a> {
35124    /// The entry in the map that was already occupied.
35125    pub entry: OccupiedEntry<'a, K, V>,
35126    /// The value which was not inserted, because the entry was already occupied.
35127    pub value: V,
35128}
35129
35130#[unstable(feature = "map_try_insert", issue = "82766")]
35131impl<K: Debug + Ord, V: Debug> Debug for OccupiedError<'_, K, V> {
35132    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
35133        f.debug_struct("OccupiedError")
35134            .field("key", self.entry.key())
35135            .field("old_value", self.entry.get())
35136            .field("new_value", &self.value)
35137            .finish()
35138    }
35139}
35140
35141#[unstable(feature = "map_try_insert", issue = "82766")]
35142impl<'a, K: Debug + Ord, V: Debug> fmt::Display for OccupiedError<'a, K, V> {
35143    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
35144        write!(
35145            f,
35146            "failed to insert {:?}, key {:?} already exists with value {:?}",
35147            self.value,
35148            self.entry.key(),
35149            self.entry.get(),
35150        )
35151    }
35152}
35153
35154impl<'a, K: Ord, V> Entry<'a, K, V> {
35155    /// Ensures a value is in the entry by inserting the default if empty, and returns
35156    /// a mutable reference to the value in the entry.
35157    ///
35158    /// # Examples
35159    ///
35160    /// ```
35161    /// use std::collections::BTreeMap;
35162    ///
35163    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35164    /// map.entry("poneyland").or_insert(12);
35165    ///
35166    /// assert_eq!(map["poneyland"], 12);
35167    /// ```
35168    #[stable(feature = "rust1", since = "1.0.0")]
35169    pub fn or_insert(self, default: V) -> &'a mut V {
35170        match self {
35171            Occupied(entry) => entry.into_mut(),
35172            Vacant(entry) => entry.insert(default),
35173        }
35174    }
35175
35176    /// Ensures a value is in the entry by inserting the result of the default function if empty,
35177    /// and returns a mutable reference to the value in the entry.
35178    ///
35179    /// # Examples
35180    ///
35181    /// ```
35182    /// use std::collections::BTreeMap;
35183    ///
35184    /// let mut map: BTreeMap<&str, String> = BTreeMap::new();
35185    /// let s = "hoho".to_string();
35186    ///
35187    /// map.entry("poneyland").or_insert_with(|| s);
35188    ///
35189    /// assert_eq!(map["poneyland"], "hoho".to_string());
35190    /// ```
35191    #[stable(feature = "rust1", since = "1.0.0")]
35192    pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
35193        match self {
35194            Occupied(entry) => entry.into_mut(),
35195            Vacant(entry) => entry.insert(default()),
35196        }
35197    }
35198
35199    /// Ensures a value is in the entry by inserting, if empty, the result of the default function.
35200    /// This method allows for generating key-derived values for insertion by providing the default
35201    /// function a reference to the key that was moved during the `.entry(key)` method call.
35202    ///
35203    /// The reference to the moved key is provided so that cloning or copying the key is
35204    /// unnecessary, unlike with `.or_insert_with(|| ... )`.
35205    ///
35206    /// # Examples
35207    ///
35208    /// ```
35209    /// use std::collections::BTreeMap;
35210    ///
35211    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35212    ///
35213    /// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
35214    ///
35215    /// assert_eq!(map["poneyland"], 9);
35216    /// ```
35217    #[inline]
35218    #[stable(feature = "or_insert_with_key", since = "1.50.0")]
35219    pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V {
35220        match self {
35221            Occupied(entry) => entry.into_mut(),
35222            Vacant(entry) => {
35223                let value = default(entry.key());
35224                entry.insert(value)
35225            }
35226        }
35227    }
35228
35229    /// Returns a reference to this entry's key.
35230    ///
35231    /// # Examples
35232    ///
35233    /// ```
35234    /// use std::collections::BTreeMap;
35235    ///
35236    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35237    /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
35238    /// ```
35239    #[stable(feature = "map_entry_keys", since = "1.10.0")]
35240    pub fn key(&self) -> &K {
35241        match *self {
35242            Occupied(ref entry) => entry.key(),
35243            Vacant(ref entry) => entry.key(),
35244        }
35245    }
35246
35247    /// Provides in-place mutable access to an occupied entry before any
35248    /// potential inserts into the map.
35249    ///
35250    /// # Examples
35251    ///
35252    /// ```
35253    /// use std::collections::BTreeMap;
35254    ///
35255    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35256    ///
35257    /// map.entry("poneyland")
35258    ///    .and_modify(|e| { *e += 1 })
35259    ///    .or_insert(42);
35260    /// assert_eq!(map["poneyland"], 42);
35261    ///
35262    /// map.entry("poneyland")
35263    ///    .and_modify(|e| { *e += 1 })
35264    ///    .or_insert(42);
35265    /// assert_eq!(map["poneyland"], 43);
35266    /// ```
35267    #[stable(feature = "entry_and_modify", since = "1.26.0")]
35268    pub fn and_modify<F>(self, f: F) -> Self
35269    where
35270        F: FnOnce(&mut V),
35271    {
35272        match self {
35273            Occupied(mut entry) => {
35274                f(entry.get_mut());
35275                Occupied(entry)
35276            }
35277            Vacant(entry) => Vacant(entry),
35278        }
35279    }
35280}
35281
35282impl<'a, K: Ord, V: Default> Entry<'a, K, V> {
35283    #[stable(feature = "entry_or_default", since = "1.28.0")]
35284    /// Ensures a value is in the entry by inserting the default value if empty,
35285    /// and returns a mutable reference to the value in the entry.
35286    ///
35287    /// # Examples
35288    ///
35289    /// ```
35290    /// use std::collections::BTreeMap;
35291    ///
35292    /// let mut map: BTreeMap<&str, Option<usize>> = BTreeMap::new();
35293    /// map.entry("poneyland").or_default();
35294    ///
35295    /// assert_eq!(map["poneyland"], None);
35296    /// ```
35297    pub fn or_default(self) -> &'a mut V {
35298        match self {
35299            Occupied(entry) => entry.into_mut(),
35300            Vacant(entry) => entry.insert(Default::default()),
35301        }
35302    }
35303}
35304
35305impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
35306    /// Gets a reference to the key that would be used when inserting a value
35307    /// through the VacantEntry.
35308    ///
35309    /// # Examples
35310    ///
35311    /// ```
35312    /// use std::collections::BTreeMap;
35313    ///
35314    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35315    /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
35316    /// ```
35317    #[stable(feature = "map_entry_keys", since = "1.10.0")]
35318    pub fn key(&self) -> &K {
35319        &self.key
35320    }
35321
35322    /// Take ownership of the key.
35323    ///
35324    /// # Examples
35325    ///
35326    /// ```
35327    /// use std::collections::BTreeMap;
35328    /// use std::collections::btree_map::Entry;
35329    ///
35330    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35331    ///
35332    /// if let Entry::Vacant(v) = map.entry("poneyland") {
35333    ///     v.into_key();
35334    /// }
35335    /// ```
35336    #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
35337    pub fn into_key(self) -> K {
35338        self.key
35339    }
35340
35341    /// Sets the value of the entry with the `VacantEntry`'s key,
35342    /// and returns a mutable reference to it.
35343    ///
35344    /// # Examples
35345    ///
35346    /// ```
35347    /// use std::collections::BTreeMap;
35348    /// use std::collections::btree_map::Entry;
35349    ///
35350    /// let mut map: BTreeMap<&str, u32> = BTreeMap::new();
35351    ///
35352    /// if let Entry::Vacant(o) = map.entry("poneyland") {
35353    ///     o.insert(37);
35354    /// }
35355    /// assert_eq!(map["poneyland"], 37);
35356    /// ```
35357    #[stable(feature = "rust1", since = "1.0.0")]
35358    pub fn insert(self, value: V) -> &'a mut V {
35359        let out_ptr = match self.handle.insert_recursing(self.key, value) {
35360            (Fit(_), val_ptr) => {
35361                // SAFETY: We have consumed self.handle and the handle returned.
35362                let map = unsafe { self.dormant_map.awaken() };
35363                map.length += 1;
35364                val_ptr
35365            }
35366            (Split(ins), val_ptr) => {
35367                drop(ins.left);
35368                // SAFETY: We have consumed self.handle and the reference returned.
35369                let map = unsafe { self.dormant_map.awaken() };
35370                let root = map.root.as_mut().unwrap();
35371                root.push_internal_level().push(ins.kv.0, ins.kv.1, ins.right);
35372                map.length += 1;
35373                val_ptr
35374            }
35375        };
35376        // Now that we have finished growing the tree using borrowed references,
35377        // dereference the pointer to a part of it, that we picked up along the way.
35378        unsafe { &mut *out_ptr }
35379    }
35380}
35381
35382impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
35383    /// Gets a reference to the key in the entry.
35384    ///
35385    /// # Examples
35386    ///
35387    /// ```
35388    /// use std::collections::BTreeMap;
35389    ///
35390    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35391    /// map.entry("poneyland").or_insert(12);
35392    /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
35393    /// ```
35394    #[stable(feature = "map_entry_keys", since = "1.10.0")]
35395    pub fn key(&self) -> &K {
35396        self.handle.reborrow().into_kv().0
35397    }
35398
35399    /// Take ownership of the key and value from the map.
35400    ///
35401    /// # Examples
35402    ///
35403    /// ```
35404    /// use std::collections::BTreeMap;
35405    /// use std::collections::btree_map::Entry;
35406    ///
35407    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35408    /// map.entry("poneyland").or_insert(12);
35409    ///
35410    /// if let Entry::Occupied(o) = map.entry("poneyland") {
35411    ///     // We delete the entry from the map.
35412    ///     o.remove_entry();
35413    /// }
35414    ///
35415    /// // If now try to get the value, it will panic:
35416    /// // println!("{}", map["poneyland"]);
35417    /// ```
35418    #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
35419    pub fn remove_entry(self) -> (K, V) {
35420        self.remove_kv()
35421    }
35422
35423    /// Gets a reference to the value in the entry.
35424    ///
35425    /// # Examples
35426    ///
35427    /// ```
35428    /// use std::collections::BTreeMap;
35429    /// use std::collections::btree_map::Entry;
35430    ///
35431    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35432    /// map.entry("poneyland").or_insert(12);
35433    ///
35434    /// if let Entry::Occupied(o) = map.entry("poneyland") {
35435    ///     assert_eq!(o.get(), &12);
35436    /// }
35437    /// ```
35438    #[stable(feature = "rust1", since = "1.0.0")]
35439    pub fn get(&self) -> &V {
35440        self.handle.reborrow().into_kv().1
35441    }
35442
35443    /// Gets a mutable reference to the value in the entry.
35444    ///
35445    /// If you need a reference to the `OccupiedEntry` that may outlive the
35446    /// destruction of the `Entry` value, see [`into_mut`].
35447    ///
35448    /// [`into_mut`]: OccupiedEntry::into_mut
35449    ///
35450    /// # Examples
35451    ///
35452    /// ```
35453    /// use std::collections::BTreeMap;
35454    /// use std::collections::btree_map::Entry;
35455    ///
35456    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35457    /// map.entry("poneyland").or_insert(12);
35458    ///
35459    /// assert_eq!(map["poneyland"], 12);
35460    /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
35461    ///     *o.get_mut() += 10;
35462    ///     assert_eq!(*o.get(), 22);
35463    ///
35464    ///     // We can use the same Entry multiple times.
35465    ///     *o.get_mut() += 2;
35466    /// }
35467    /// assert_eq!(map["poneyland"], 24);
35468    /// ```
35469    #[stable(feature = "rust1", since = "1.0.0")]
35470    pub fn get_mut(&mut self) -> &mut V {
35471        self.handle.kv_mut().1
35472    }
35473
35474    /// Converts the entry into a mutable reference to its value.
35475    ///
35476    /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
35477    ///
35478    /// [`get_mut`]: OccupiedEntry::get_mut
35479    ///
35480    /// # Examples
35481    ///
35482    /// ```
35483    /// use std::collections::BTreeMap;
35484    /// use std::collections::btree_map::Entry;
35485    ///
35486    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35487    /// map.entry("poneyland").or_insert(12);
35488    ///
35489    /// assert_eq!(map["poneyland"], 12);
35490    /// if let Entry::Occupied(o) = map.entry("poneyland") {
35491    ///     *o.into_mut() += 10;
35492    /// }
35493    /// assert_eq!(map["poneyland"], 22);
35494    /// ```
35495    #[stable(feature = "rust1", since = "1.0.0")]
35496    pub fn into_mut(self) -> &'a mut V {
35497        self.handle.into_val_mut()
35498    }
35499
35500    /// Sets the value of the entry with the `OccupiedEntry`'s key,
35501    /// and returns the entry's old value.
35502    ///
35503    /// # Examples
35504    ///
35505    /// ```
35506    /// use std::collections::BTreeMap;
35507    /// use std::collections::btree_map::Entry;
35508    ///
35509    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35510    /// map.entry("poneyland").or_insert(12);
35511    ///
35512    /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
35513    ///     assert_eq!(o.insert(15), 12);
35514    /// }
35515    /// assert_eq!(map["poneyland"], 15);
35516    /// ```
35517    #[stable(feature = "rust1", since = "1.0.0")]
35518    pub fn insert(&mut self, value: V) -> V {
35519        mem::replace(self.get_mut(), value)
35520    }
35521
35522    /// Takes the value of the entry out of the map, and returns it.
35523    ///
35524    /// # Examples
35525    ///
35526    /// ```
35527    /// use std::collections::BTreeMap;
35528    /// use std::collections::btree_map::Entry;
35529    ///
35530    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
35531    /// map.entry("poneyland").or_insert(12);
35532    ///
35533    /// if let Entry::Occupied(o) = map.entry("poneyland") {
35534    ///     assert_eq!(o.remove(), 12);
35535    /// }
35536    /// // If we try to get "poneyland"'s value, it'll panic:
35537    /// // println!("{}", map["poneyland"]);
35538    /// ```
35539    #[stable(feature = "rust1", since = "1.0.0")]
35540    pub fn remove(self) -> V {
35541        self.remove_kv().1
35542    }
35543
35544    // Body of `remove_entry`, probably separate because the name reflects the returned pair.
35545    pub(super) fn remove_kv(self) -> (K, V) {
35546        let mut emptied_internal_root = false;
35547        let (old_kv, _) = self.handle.remove_kv_tracking(|| emptied_internal_root = true);
35548        // SAFETY: we consumed the intermediate root borrow, `self.handle`.
35549        let map = unsafe { self.dormant_map.awaken() };
35550        map.length -= 1;
35551        if emptied_internal_root {
35552            let root = map.root.as_mut().unwrap();
35553            root.pop_internal_level();
35554        }
35555        old_kv
35556    }
35557}
35558use super::super::testing::crash_test::{CrashTestDummy, Panic};
35559use super::super::testing::ord_chaos::{Cyclic3, Governed, Governor};
35560use super::super::testing::rng::DeterministicRng;
35561use super::Entry::{Occupied, Vacant};
35562use super::*;
35563use crate::boxed::Box;
35564use crate::fmt::Debug;
35565use crate::rc::Rc;
35566use crate::string::{String, ToString};
35567use crate::vec::Vec;
35568use std::cmp::Ordering;
35569use std::convert::TryFrom;
35570use std::iter::{self, FromIterator};
35571use std::mem;
35572use std::ops::Bound::{self, Excluded, Included, Unbounded};
35573use std::ops::RangeBounds;
35574use std::panic::{catch_unwind, AssertUnwindSafe};
35575use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
35576
35577// Capacity of a tree with a single level,
35578// i.e., a tree who's root is a leaf node at height 0.
35579const NODE_CAPACITY: usize = node::CAPACITY;
35580
35581// Minimum number of elements to insert, to guarantee a tree with 2 levels,
35582// i.e., a tree who's root is an internal node at height 1, with edges to leaf nodes.
35583// It's not the minimum size: removing an element from such a tree does not always reduce height.
35584const MIN_INSERTS_HEIGHT_1: usize = NODE_CAPACITY + 1;
35585
35586// Minimum number of elements to insert in ascending order, to guarantee a tree with 3 levels,
35587// i.e., a tree who's root is an internal node at height 2, with edges to more internal nodes.
35588// It's not the minimum size: removing an element from such a tree does not always reduce height.
35589const MIN_INSERTS_HEIGHT_2: usize = 89;
35590
35591// Gathers all references from a mutable iterator and makes sure Miri notices if
35592// using them is dangerous.
35593fn test_all_refs<'a, T: 'a>(dummy: &mut T, iter: impl Iterator<Item = &'a mut T>) {
35594    // Gather all those references.
35595    let mut refs: Vec<&mut T> = iter.collect();
35596    // Use them all. Twice, to be sure we got all interleavings.
35597    for r in refs.iter_mut() {
35598        mem::swap(dummy, r);
35599    }
35600    for r in refs {
35601        mem::swap(dummy, r);
35602    }
35603}
35604
35605impl<K, V> BTreeMap<K, V> {
35606    // Panics if the map (or the code navigating it) is corrupted.
35607    fn check_invariants(&self) {
35608        if let Some(root) = &self.root {
35609            let root_node = root.reborrow();
35610
35611            // Check the back pointers top-down, before we attempt to rely on
35612            // more serious navigation code.
35613            assert!(root_node.ascend().is_err());
35614            root_node.assert_back_pointers();
35615
35616            // Check consistency of `length` with what navigation code encounters.
35617            assert_eq!(self.length, root_node.calc_length());
35618
35619            // Lastly, check the invariant causing the least harm.
35620            root_node.assert_min_len(if root_node.height() > 0 { 1 } else { 0 });
35621        } else {
35622            assert_eq!(self.length, 0);
35623        }
35624
35625        // Check that `assert_strictly_ascending` will encounter all keys.
35626        assert_eq!(self.length, self.keys().count());
35627    }
35628
35629    // Panics if the map is corrupted or if the keys are not in strictly
35630    // ascending order, in the current opinion of the `Ord` implementation.
35631    // If the `Ord` implementation violates transitivity, this method does not
35632    // guarantee that all keys are unique, just that adjacent keys are unique.
35633    fn check(&self)
35634    where
35635        K: Debug + Ord,
35636    {
35637        self.check_invariants();
35638        self.assert_strictly_ascending();
35639    }
35640
35641    // Returns the height of the root, if any.
35642    fn height(&self) -> Option<usize> {
35643        self.root.as_ref().map(node::Root::height)
35644    }
35645
35646    fn dump_keys(&self) -> String
35647    where
35648        K: Debug,
35649    {
35650        if let Some(root) = self.root.as_ref() {
35651            root.reborrow().dump_keys()
35652        } else {
35653            String::from("not yet allocated")
35654        }
35655    }
35656
35657    // Panics if the keys are not in strictly ascending order.
35658    fn assert_strictly_ascending(&self)
35659    where
35660        K: Debug + Ord,
35661    {
35662        let mut keys = self.keys();
35663        if let Some(mut previous) = keys.next() {
35664            for next in keys {
35665                assert!(previous < next, "{:?} >= {:?}", previous, next);
35666                previous = next;
35667            }
35668        }
35669    }
35670
35671    // Transform the tree to minimize wasted space, obtaining fewer nodes that
35672    // are mostly filled up to their capacity. The same compact tree could have
35673    // been obtained by inserting keys in a shrewd order.
35674    fn compact(&mut self)
35675    where
35676        K: Ord,
35677    {
35678        let iter = mem::take(self).into_iter();
35679        let root = BTreeMap::ensure_is_owned(&mut self.root);
35680        root.bulk_push(iter, &mut self.length);
35681    }
35682}
35683
35684impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
35685    fn assert_min_len(self, min_len: usize) {
35686        assert!(self.len() >= min_len, "node len {} < {}", self.len(), min_len);
35687        if let node::ForceResult::Internal(node) = self.force() {
35688            for idx in 0..=node.len() {
35689                let edge = unsafe { Handle::new_edge(node, idx) };
35690                edge.descend().assert_min_len(MIN_LEN);
35691            }
35692        }
35693    }
35694}
35695
35696// Tests our value of MIN_INSERTS_HEIGHT_2. Failure may mean you just need to
35697// adapt that value to match a change in node::CAPACITY or the choices made
35698// during insertion, otherwise other test cases may fail or be less useful.
35699#[test]
35700fn test_levels() {
35701    let mut map = BTreeMap::new();
35702    map.check();
35703    assert_eq!(map.height(), None);
35704    assert_eq!(map.len(), 0);
35705
35706    map.insert(0, ());
35707    while map.height() == Some(0) {
35708        let last_key = *map.last_key_value().unwrap().0;
35709        map.insert(last_key + 1, ());
35710    }
35711    map.check();
35712    // Structure:
35713    // - 1 element in internal root node with 2 children
35714    // - 6 elements in left leaf child
35715    // - 5 elements in right leaf child
35716    assert_eq!(map.height(), Some(1));
35717    assert_eq!(map.len(), MIN_INSERTS_HEIGHT_1, "{}", map.dump_keys());
35718
35719    while map.height() == Some(1) {
35720        let last_key = *map.last_key_value().unwrap().0;
35721        map.insert(last_key + 1, ());
35722    }
35723    map.check();
35724    // Structure:
35725    // - 1 element in internal root node with 2 children
35726    // - 6 elements in left internal child with 7 grandchildren
35727    // - 42 elements in left child's 7 grandchildren with 6 elements each
35728    // - 5 elements in right internal child with 6 grandchildren
35729    // - 30 elements in right child's 5 first grandchildren with 6 elements each
35730    // - 5 elements in right child's last grandchild
35731    assert_eq!(map.height(), Some(2));
35732    assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2, "{}", map.dump_keys());
35733}
35734
35735// Ensures the testing infrastructure usually notices order violations.
35736#[test]
35737#[should_panic]
35738fn test_check_ord_chaos() {
35739    let gov = Governor::new();
35740    let map: BTreeMap<_, _> = (0..2).map(|i| (Governed(i, &gov), ())).collect();
35741    gov.flip();
35742    map.check();
35743}
35744
35745// Ensures the testing infrastructure doesn't always mind order violations.
35746#[test]
35747fn test_check_invariants_ord_chaos() {
35748    let gov = Governor::new();
35749    let map: BTreeMap<_, _> = (0..2).map(|i| (Governed(i, &gov), ())).collect();
35750    gov.flip();
35751    map.check_invariants();
35752}
35753
35754#[test]
35755fn test_basic_large() {
35756    let mut map = BTreeMap::new();
35757    // Miri is too slow
35758    let size = if cfg!(miri) { MIN_INSERTS_HEIGHT_2 } else { 10000 };
35759    let size = size + (size % 2); // round up to even number
35760    assert_eq!(map.len(), 0);
35761
35762    for i in 0..size {
35763        assert_eq!(map.insert(i, 10 * i), None);
35764        assert_eq!(map.len(), i + 1);
35765    }
35766
35767    assert_eq!(map.first_key_value(), Some((&0, &0)));
35768    assert_eq!(map.last_key_value(), Some((&(size - 1), &(10 * (size - 1)))));
35769    assert_eq!(map.first_entry().unwrap().key(), &0);
35770    assert_eq!(map.last_entry().unwrap().key(), &(size - 1));
35771
35772    for i in 0..size {
35773        assert_eq!(map.get(&i).unwrap(), &(i * 10));
35774    }
35775
35776    for i in size..size * 2 {
35777        assert_eq!(map.get(&i), None);
35778    }
35779
35780    for i in 0..size {
35781        assert_eq!(map.insert(i, 100 * i), Some(10 * i));
35782        assert_eq!(map.len(), size);
35783    }
35784
35785    for i in 0..size {
35786        assert_eq!(map.get(&i).unwrap(), &(i * 100));
35787    }
35788
35789    for i in 0..size / 2 {
35790        assert_eq!(map.remove(&(i * 2)), Some(i * 200));
35791        assert_eq!(map.len(), size - i - 1);
35792    }
35793
35794    for i in 0..size / 2 {
35795        assert_eq!(map.get(&(2 * i)), None);
35796        assert_eq!(map.get(&(2 * i + 1)).unwrap(), &(i * 200 + 100));
35797    }
35798
35799    for i in 0..size / 2 {
35800        assert_eq!(map.remove(&(2 * i)), None);
35801        assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100));
35802        assert_eq!(map.len(), size / 2 - i - 1);
35803    }
35804    map.check();
35805}
35806
35807#[test]
35808fn test_basic_small() {
35809    let mut map = BTreeMap::new();
35810    // Empty, root is absent (None):
35811    assert_eq!(map.remove(&1), None);
35812    assert_eq!(map.len(), 0);
35813    assert_eq!(map.get(&1), None);
35814    assert_eq!(map.get_mut(&1), None);
35815    assert_eq!(map.first_key_value(), None);
35816    assert_eq!(map.last_key_value(), None);
35817    assert_eq!(map.keys().count(), 0);
35818    assert_eq!(map.values().count(), 0);
35819    assert_eq!(map.range(..).next(), None);
35820    assert_eq!(map.range(..1).next(), None);
35821    assert_eq!(map.range(1..).next(), None);
35822    assert_eq!(map.range(1..=1).next(), None);
35823    assert_eq!(map.range(1..2).next(), None);
35824    assert_eq!(map.height(), None);
35825    assert_eq!(map.insert(1, 1), None);
35826    assert_eq!(map.height(), Some(0));
35827    map.check();
35828
35829    // 1 key-value pair:
35830    assert_eq!(map.len(), 1);
35831    assert_eq!(map.get(&1), Some(&1));
35832    assert_eq!(map.get_mut(&1), Some(&mut 1));
35833    assert_eq!(map.first_key_value(), Some((&1, &1)));
35834    assert_eq!(map.last_key_value(), Some((&1, &1)));
35835    assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1]);
35836    assert_eq!(map.values().collect::<Vec<_>>(), vec![&1]);
35837    assert_eq!(map.insert(1, 2), Some(1));
35838    assert_eq!(map.len(), 1);
35839    assert_eq!(map.get(&1), Some(&2));
35840    assert_eq!(map.get_mut(&1), Some(&mut 2));
35841    assert_eq!(map.first_key_value(), Some((&1, &2)));
35842    assert_eq!(map.last_key_value(), Some((&1, &2)));
35843    assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1]);
35844    assert_eq!(map.values().collect::<Vec<_>>(), vec![&2]);
35845    assert_eq!(map.insert(2, 4), None);
35846    assert_eq!(map.height(), Some(0));
35847    map.check();
35848
35849    // 2 key-value pairs:
35850    assert_eq!(map.len(), 2);
35851    assert_eq!(map.get(&2), Some(&4));
35852    assert_eq!(map.get_mut(&2), Some(&mut 4));
35853    assert_eq!(map.first_key_value(), Some((&1, &2)));
35854    assert_eq!(map.last_key_value(), Some((&2, &4)));
35855    assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1, &2]);
35856    assert_eq!(map.values().collect::<Vec<_>>(), vec![&2, &4]);
35857    assert_eq!(map.remove(&1), Some(2));
35858    assert_eq!(map.height(), Some(0));
35859    map.check();
35860
35861    // 1 key-value pair:
35862    assert_eq!(map.len(), 1);
35863    assert_eq!(map.get(&1), None);
35864    assert_eq!(map.get_mut(&1), None);
35865    assert_eq!(map.get(&2), Some(&4));
35866    assert_eq!(map.get_mut(&2), Some(&mut 4));
35867    assert_eq!(map.first_key_value(), Some((&2, &4)));
35868    assert_eq!(map.last_key_value(), Some((&2, &4)));
35869    assert_eq!(map.keys().collect::<Vec<_>>(), vec![&2]);
35870    assert_eq!(map.values().collect::<Vec<_>>(), vec![&4]);
35871    assert_eq!(map.remove(&2), Some(4));
35872    assert_eq!(map.height(), Some(0));
35873    map.check();
35874
35875    // Empty but root is owned (Some(...)):
35876    assert_eq!(map.len(), 0);
35877    assert_eq!(map.get(&1), None);
35878    assert_eq!(map.get_mut(&1), None);
35879    assert_eq!(map.first_key_value(), None);
35880    assert_eq!(map.last_key_value(), None);
35881    assert_eq!(map.keys().count(), 0);
35882    assert_eq!(map.values().count(), 0);
35883    assert_eq!(map.range(..).next(), None);
35884    assert_eq!(map.range(..1).next(), None);
35885    assert_eq!(map.range(1..).next(), None);
35886    assert_eq!(map.range(1..=1).next(), None);
35887    assert_eq!(map.range(1..2).next(), None);
35888    assert_eq!(map.remove(&1), None);
35889    assert_eq!(map.height(), Some(0));
35890    map.check();
35891}
35892
35893#[test]
35894fn test_iter() {
35895    // Miri is too slow
35896    let size = if cfg!(miri) { 200 } else { 10000 };
35897
35898    let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
35899
35900    fn test<T>(size: usize, mut iter: T)
35901    where
35902        T: Iterator<Item = (usize, usize)>,
35903    {
35904        for i in 0..size {
35905            assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
35906            assert_eq!(iter.next().unwrap(), (i, i));
35907        }
35908        assert_eq!(iter.size_hint(), (0, Some(0)));
35909        assert_eq!(iter.next(), None);
35910    }
35911    test(size, map.iter().map(|(&k, &v)| (k, v)));
35912    test(size, map.iter_mut().map(|(&k, &mut v)| (k, v)));
35913    test(size, map.into_iter());
35914}
35915
35916#[test]
35917fn test_iter_rev() {
35918    // Miri is too slow
35919    let size = if cfg!(miri) { 200 } else { 10000 };
35920
35921    let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
35922
35923    fn test<T>(size: usize, mut iter: T)
35924    where
35925        T: Iterator<Item = (usize, usize)>,
35926    {
35927        for i in 0..size {
35928            assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
35929            assert_eq!(iter.next().unwrap(), (size - i - 1, size - i - 1));
35930        }
35931        assert_eq!(iter.size_hint(), (0, Some(0)));
35932        assert_eq!(iter.next(), None);
35933    }
35934    test(size, map.iter().rev().map(|(&k, &v)| (k, v)));
35935    test(size, map.iter_mut().rev().map(|(&k, &mut v)| (k, v)));
35936    test(size, map.into_iter().rev());
35937}
35938
35939// Specifically tests iter_mut's ability to mutate the value of pairs in-line.
35940fn do_test_iter_mut_mutation<T>(size: usize)
35941where
35942    T: Copy + Debug + Ord + TryFrom<usize>,
35943    <T as TryFrom<usize>>::Error: Debug,
35944{
35945    let zero = T::try_from(0).unwrap();
35946    let mut map: BTreeMap<T, T> = (0..size).map(|i| (T::try_from(i).unwrap(), zero)).collect();
35947
35948    // Forward and backward iteration sees enough pairs (also tested elsewhere)
35949    assert_eq!(map.iter_mut().count(), size);
35950    assert_eq!(map.iter_mut().rev().count(), size);
35951
35952    // Iterate forwards, trying to mutate to unique values
35953    for (i, (k, v)) in map.iter_mut().enumerate() {
35954        assert_eq!(*k, T::try_from(i).unwrap());
35955        assert_eq!(*v, zero);
35956        *v = T::try_from(i + 1).unwrap();
35957    }
35958
35959    // Iterate backwards, checking that mutations succeeded and trying to mutate again
35960    for (i, (k, v)) in map.iter_mut().rev().enumerate() {
35961        assert_eq!(*k, T::try_from(size - i - 1).unwrap());
35962        assert_eq!(*v, T::try_from(size - i).unwrap());
35963        *v = T::try_from(2 * size - i).unwrap();
35964    }
35965
35966    // Check that backward mutations succeeded
35967    for (i, (k, v)) in map.iter_mut().enumerate() {
35968        assert_eq!(*k, T::try_from(i).unwrap());
35969        assert_eq!(*v, T::try_from(size + i + 1).unwrap());
35970    }
35971    map.check();
35972}
35973
35974#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
35975#[repr(align(32))]
35976struct Align32(usize);
35977
35978impl TryFrom<usize> for Align32 {
35979    type Error = ();
35980
35981    fn try_from(s: usize) -> Result<Align32, ()> {
35982        Ok(Align32(s))
35983    }
35984}
35985
35986#[test]
35987fn test_iter_mut_mutation() {
35988    // Check many alignments and trees with roots at various heights.
35989    do_test_iter_mut_mutation::<u8>(0);
35990    do_test_iter_mut_mutation::<u8>(1);
35991    do_test_iter_mut_mutation::<u8>(MIN_INSERTS_HEIGHT_1);
35992    do_test_iter_mut_mutation::<u8>(MIN_INSERTS_HEIGHT_2);
35993    do_test_iter_mut_mutation::<u16>(1);
35994    do_test_iter_mut_mutation::<u16>(MIN_INSERTS_HEIGHT_1);
35995    do_test_iter_mut_mutation::<u16>(MIN_INSERTS_HEIGHT_2);
35996    do_test_iter_mut_mutation::<u32>(1);
35997    do_test_iter_mut_mutation::<u32>(MIN_INSERTS_HEIGHT_1);
35998    do_test_iter_mut_mutation::<u32>(MIN_INSERTS_HEIGHT_2);
35999    do_test_iter_mut_mutation::<u64>(1);
36000    do_test_iter_mut_mutation::<u64>(MIN_INSERTS_HEIGHT_1);
36001    do_test_iter_mut_mutation::<u64>(MIN_INSERTS_HEIGHT_2);
36002    do_test_iter_mut_mutation::<u128>(1);
36003    do_test_iter_mut_mutation::<u128>(MIN_INSERTS_HEIGHT_1);
36004    do_test_iter_mut_mutation::<u128>(MIN_INSERTS_HEIGHT_2);
36005    do_test_iter_mut_mutation::<Align32>(1);
36006    do_test_iter_mut_mutation::<Align32>(MIN_INSERTS_HEIGHT_1);
36007    do_test_iter_mut_mutation::<Align32>(MIN_INSERTS_HEIGHT_2);
36008}
36009
36010#[test]
36011fn test_values_mut() {
36012    let mut a: BTreeMap<_, _> = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)).collect();
36013    test_all_refs(&mut 13, a.values_mut());
36014    a.check();
36015}
36016
36017#[test]
36018fn test_values_mut_mutation() {
36019    let mut a = BTreeMap::new();
36020    a.insert(1, String::from("hello"));
36021    a.insert(2, String::from("goodbye"));
36022
36023    for value in a.values_mut() {
36024        value.push_str("!");
36025    }
36026
36027    let values: Vec<String> = a.values().cloned().collect();
36028    assert_eq!(values, [String::from("hello!"), String::from("goodbye!")]);
36029    a.check();
36030}
36031
36032#[test]
36033fn test_iter_entering_root_twice() {
36034    let mut map: BTreeMap<_, _> = (0..2).map(|i| (i, i)).collect();
36035    let mut it = map.iter_mut();
36036    let front = it.next().unwrap();
36037    let back = it.next_back().unwrap();
36038    assert_eq!(front, (&0, &mut 0));
36039    assert_eq!(back, (&1, &mut 1));
36040    *front.1 = 24;
36041    *back.1 = 42;
36042    assert_eq!(front, (&0, &mut 24));
36043    assert_eq!(back, (&1, &mut 42));
36044    assert_eq!(it.next(), None);
36045    assert_eq!(it.next_back(), None);
36046    map.check();
36047}
36048
36049#[test]
36050fn test_iter_descending_to_same_node_twice() {
36051    let mut map: BTreeMap<_, _> = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i)).collect();
36052    let mut it = map.iter_mut();
36053    // Descend into first child.
36054    let front = it.next().unwrap();
36055    // Descend into first child again, after running through second child.
36056    while it.next_back().is_some() {}
36057    // Check immutable access.
36058    assert_eq!(front, (&0, &mut 0));
36059    // Perform mutable access.
36060    *front.1 = 42;
36061    map.check();
36062}
36063
36064#[test]
36065fn test_iter_mixed() {
36066    // Miri is too slow
36067    let size = if cfg!(miri) { 200 } else { 10000 };
36068
36069    let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
36070
36071    fn test<T>(size: usize, mut iter: T)
36072    where
36073        T: Iterator<Item = (usize, usize)> + DoubleEndedIterator,
36074    {
36075        for i in 0..size / 4 {
36076            assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2)));
36077            assert_eq!(iter.next().unwrap(), (i, i));
36078            assert_eq!(iter.next_back().unwrap(), (size - i - 1, size - i - 1));
36079        }
36080        for i in size / 4..size * 3 / 4 {
36081            assert_eq!(iter.size_hint(), (size * 3 / 4 - i, Some(size * 3 / 4 - i)));
36082            assert_eq!(iter.next().unwrap(), (i, i));
36083        }
36084        assert_eq!(iter.size_hint(), (0, Some(0)));
36085        assert_eq!(iter.next(), None);
36086    }
36087    test(size, map.iter().map(|(&k, &v)| (k, v)));
36088    test(size, map.iter_mut().map(|(&k, &mut v)| (k, v)));
36089    test(size, map.into_iter());
36090}
36091
36092#[test]
36093fn test_iter_min_max() {
36094    let mut a = BTreeMap::new();
36095    assert_eq!(a.iter().min(), None);
36096    assert_eq!(a.iter().max(), None);
36097    assert_eq!(a.iter_mut().min(), None);
36098    assert_eq!(a.iter_mut().max(), None);
36099    assert_eq!(a.range(..).min(), None);
36100    assert_eq!(a.range(..).max(), None);
36101    assert_eq!(a.range_mut(..).min(), None);
36102    assert_eq!(a.range_mut(..).max(), None);
36103    assert_eq!(a.keys().min(), None);
36104    assert_eq!(a.keys().max(), None);
36105    assert_eq!(a.values().min(), None);
36106    assert_eq!(a.values().max(), None);
36107    assert_eq!(a.values_mut().min(), None);
36108    assert_eq!(a.values_mut().max(), None);
36109    a.insert(1, 42);
36110    a.insert(2, 24);
36111    assert_eq!(a.iter().min(), Some((&1, &42)));
36112    assert_eq!(a.iter().max(), Some((&2, &24)));
36113    assert_eq!(a.iter_mut().min(), Some((&1, &mut 42)));
36114    assert_eq!(a.iter_mut().max(), Some((&2, &mut 24)));
36115    assert_eq!(a.range(..).min(), Some((&1, &42)));
36116    assert_eq!(a.range(..).max(), Some((&2, &24)));
36117    assert_eq!(a.range_mut(..).min(), Some((&1, &mut 42)));
36118    assert_eq!(a.range_mut(..).max(), Some((&2, &mut 24)));
36119    assert_eq!(a.keys().min(), Some(&1));
36120    assert_eq!(a.keys().max(), Some(&2));
36121    assert_eq!(a.values().min(), Some(&24));
36122    assert_eq!(a.values().max(), Some(&42));
36123    assert_eq!(a.values_mut().min(), Some(&mut 24));
36124    assert_eq!(a.values_mut().max(), Some(&mut 42));
36125    a.check();
36126}
36127
36128fn range_keys(map: &BTreeMap<i32, i32>, range: impl RangeBounds<i32>) -> Vec<i32> {
36129    map.range(range)
36130        .map(|(&k, &v)| {
36131            assert_eq!(k, v);
36132            k
36133        })
36134        .collect()
36135}
36136
36137#[test]
36138fn test_range_small() {
36139    let size = 4;
36140
36141    let map: BTreeMap<_, _> = (1..=size).map(|i| (i, i)).collect();
36142    let all: Vec<_> = (1..=size).collect();
36143    let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]);
36144
36145    assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all);
36146    assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all);
36147    assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all);
36148    assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all);
36149    assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all);
36150    assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all);
36151    assert_eq!(range_keys(&map, (Included(0), Included(size))), all);
36152    assert_eq!(range_keys(&map, (Included(0), Unbounded)), all);
36153    assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all);
36154    assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all);
36155    assert_eq!(range_keys(&map, (Included(1), Included(size))), all);
36156    assert_eq!(range_keys(&map, (Included(1), Unbounded)), all);
36157    assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all);
36158    assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all);
36159    assert_eq!(range_keys(&map, (Unbounded, Included(size))), all);
36160    assert_eq!(range_keys(&map, ..), all);
36161
36162    assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]);
36163    assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]);
36164    assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]);
36165    assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]);
36166    assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]);
36167    assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]);
36168    assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first);
36169    assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first);
36170    assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first);
36171    assert_eq!(range_keys(&map, (Included(0), Included(1))), first);
36172    assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first);
36173    assert_eq!(range_keys(&map, (Included(1), Included(1))), first);
36174    assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first);
36175    assert_eq!(range_keys(&map, (Unbounded, Included(1))), first);
36176    assert_eq!(range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), last);
36177    assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size + 1))), last);
36178    assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last);
36179    assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last);
36180    assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last);
36181    assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last);
36182    assert_eq!(range_keys(&map, (Included(size), Included(size))), last);
36183    assert_eq!(range_keys(&map, (Included(size), Unbounded)), last);
36184    assert_eq!(range_keys(&map, (Excluded(size), Excluded(size + 1))), vec![]);
36185    assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]);
36186    assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]);
36187    assert_eq!(range_keys(&map, (Included(size + 1), Excluded(size + 1))), vec![]);
36188    assert_eq!(range_keys(&map, (Included(size + 1), Included(size + 1))), vec![]);
36189    assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]);
36190
36191    assert_eq!(range_keys(&map, ..3), vec![1, 2]);
36192    assert_eq!(range_keys(&map, 3..), vec![3, 4]);
36193    assert_eq!(range_keys(&map, 2..=3), vec![2, 3]);
36194}
36195
36196#[test]
36197fn test_range_height_1() {
36198    // Tests tree with a root and 2 leaves. The single key in the root node is
36199    // close to the middle among the keys.
36200
36201    let map: BTreeMap<_, _> = (0..MIN_INSERTS_HEIGHT_1 as i32).map(|i| (i, i)).collect();
36202    let middle = MIN_INSERTS_HEIGHT_1 as i32 / 2;
36203    for root in middle - 2..=middle + 2 {
36204        assert_eq!(range_keys(&map, (Excluded(root), Excluded(root + 1))), vec![]);
36205        assert_eq!(range_keys(&map, (Excluded(root), Included(root + 1))), vec![root + 1]);
36206        assert_eq!(range_keys(&map, (Included(root), Excluded(root + 1))), vec![root]);
36207        assert_eq!(range_keys(&map, (Included(root), Included(root + 1))), vec![root, root + 1]);
36208
36209        assert_eq!(range_keys(&map, (Excluded(root - 1), Excluded(root))), vec![]);
36210        assert_eq!(range_keys(&map, (Included(root - 1), Excluded(root))), vec![root - 1]);
36211        assert_eq!(range_keys(&map, (Excluded(root - 1), Included(root))), vec![root]);
36212        assert_eq!(range_keys(&map, (Included(root - 1), Included(root))), vec![root - 1, root]);
36213    }
36214}
36215
36216#[test]
36217fn test_range_large() {
36218    let size = 200;
36219
36220    let map: BTreeMap<_, _> = (1..=size).map(|i| (i, i)).collect();
36221    let all: Vec<_> = (1..=size).collect();
36222    let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]);
36223
36224    assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all);
36225    assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all);
36226    assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all);
36227    assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all);
36228    assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all);
36229    assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all);
36230    assert_eq!(range_keys(&map, (Included(0), Included(size))), all);
36231    assert_eq!(range_keys(&map, (Included(0), Unbounded)), all);
36232    assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all);
36233    assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all);
36234    assert_eq!(range_keys(&map, (Included(1), Included(size))), all);
36235    assert_eq!(range_keys(&map, (Included(1), Unbounded)), all);
36236    assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all);
36237    assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all);
36238    assert_eq!(range_keys(&map, (Unbounded, Included(size))), all);
36239    assert_eq!(range_keys(&map, ..), all);
36240
36241    assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]);
36242    assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]);
36243    assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]);
36244    assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]);
36245    assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]);
36246    assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]);
36247    assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first);
36248    assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first);
36249    assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first);
36250    assert_eq!(range_keys(&map, (Included(0), Included(1))), first);
36251    assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first);
36252    assert_eq!(range_keys(&map, (Included(1), Included(1))), first);
36253    assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first);
36254    assert_eq!(range_keys(&map, (Unbounded, Included(1))), first);
36255    assert_eq!(range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), last);
36256    assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size + 1))), last);
36257    assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last);
36258    assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last);
36259    assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last);
36260    assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last);
36261    assert_eq!(range_keys(&map, (Included(size), Included(size))), last);
36262    assert_eq!(range_keys(&map, (Included(size), Unbounded)), last);
36263    assert_eq!(range_keys(&map, (Excluded(size), Excluded(size + 1))), vec![]);
36264    assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]);
36265    assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]);
36266    assert_eq!(range_keys(&map, (Included(size + 1), Excluded(size + 1))), vec![]);
36267    assert_eq!(range_keys(&map, (Included(size + 1), Included(size + 1))), vec![]);
36268    assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]);
36269
36270    fn check<'a, L, R>(lhs: L, rhs: R)
36271    where
36272        L: IntoIterator<Item = (&'a i32, &'a i32)>,
36273        R: IntoIterator<Item = (&'a i32, &'a i32)>,
36274    {
36275        let lhs: Vec<_> = lhs.into_iter().collect();
36276        let rhs: Vec<_> = rhs.into_iter().collect();
36277        assert_eq!(lhs, rhs);
36278    }
36279
36280    check(map.range(..=100), map.range(..101));
36281    check(map.range(5..=8), vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)]);
36282    check(map.range(-1..=2), vec![(&1, &1), (&2, &2)]);
36283}
36284
36285#[test]
36286fn test_range_inclusive_max_value() {
36287    let max = usize::MAX;
36288    let map: BTreeMap<_, _> = vec![(max, 0)].into_iter().collect();
36289
36290    assert_eq!(map.range(max..=max).collect::<Vec<_>>(), &[(&max, &0)]);
36291}
36292
36293#[test]
36294fn test_range_equal_empty_cases() {
36295    let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
36296    assert_eq!(map.range((Included(2), Excluded(2))).next(), None);
36297    assert_eq!(map.range((Excluded(2), Included(2))).next(), None);
36298}
36299
36300#[test]
36301#[should_panic]
36302fn test_range_equal_excluded() {
36303    let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
36304    map.range((Excluded(2), Excluded(2)));
36305}
36306
36307#[test]
36308#[should_panic]
36309fn test_range_backwards_1() {
36310    let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
36311    map.range((Included(3), Included(2)));
36312}
36313
36314#[test]
36315#[should_panic]
36316fn test_range_backwards_2() {
36317    let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
36318    map.range((Included(3), Excluded(2)));
36319}
36320
36321#[test]
36322#[should_panic]
36323fn test_range_backwards_3() {
36324    let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
36325    map.range((Excluded(3), Included(2)));
36326}
36327
36328#[test]
36329#[should_panic]
36330fn test_range_backwards_4() {
36331    let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
36332    map.range((Excluded(3), Excluded(2)));
36333}
36334
36335#[test]
36336fn test_range_finding_ill_order_in_map() {
36337    let mut map = BTreeMap::new();
36338    map.insert(Cyclic3::B, ());
36339    // Lacking static_assert, call `range` conditionally, to emphasise that
36340    // we cause a different panic than `test_range_backwards_1` does.
36341    // A more refined `should_panic` would be welcome.
36342    if Cyclic3::C < Cyclic3::A {
36343        map.range(Cyclic3::C..=Cyclic3::A);
36344    }
36345}
36346
36347#[test]
36348fn test_range_finding_ill_order_in_range_ord() {
36349    // Has proper order the first time asked, then flips around.
36350    struct EvilTwin(i32);
36351
36352    impl PartialOrd for EvilTwin {
36353        fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
36354            Some(self.cmp(other))
36355        }
36356    }
36357
36358    static COMPARES: AtomicUsize = AtomicUsize::new(0);
36359    impl Ord for EvilTwin {
36360        fn cmp(&self, other: &Self) -> Ordering {
36361            let ord = self.0.cmp(&other.0);
36362            if COMPARES.fetch_add(1, SeqCst) > 0 { ord.reverse() } else { ord }
36363        }
36364    }
36365
36366    impl PartialEq for EvilTwin {
36367        fn eq(&self, other: &Self) -> bool {
36368            self.0.eq(&other.0)
36369        }
36370    }
36371
36372    impl Eq for EvilTwin {}
36373
36374    #[derive(PartialEq, Eq, PartialOrd, Ord)]
36375    struct CompositeKey(i32, EvilTwin);
36376
36377    impl Borrow<EvilTwin> for CompositeKey {
36378        fn borrow(&self) -> &EvilTwin {
36379            &self.1
36380        }
36381    }
36382
36383    let map = (0..12).map(|i| (CompositeKey(i, EvilTwin(i)), ())).collect::<BTreeMap<_, _>>();
36384    map.range(EvilTwin(5)..=EvilTwin(7));
36385}
36386
36387#[test]
36388fn test_range_1000() {
36389    // Miri is too slow
36390    let size = if cfg!(miri) { MIN_INSERTS_HEIGHT_2 as u32 } else { 1000 };
36391    let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
36392
36393    fn test(map: &BTreeMap<u32, u32>, size: u32, min: Bound<&u32>, max: Bound<&u32>) {
36394        let mut kvs = map.range((min, max)).map(|(&k, &v)| (k, v));
36395        let mut pairs = (0..size).map(|i| (i, i));
36396
36397        for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
36398            assert_eq!(kv, pair);
36399        }
36400        assert_eq!(kvs.next(), None);
36401        assert_eq!(pairs.next(), None);
36402    }
36403    test(&map, size, Included(&0), Excluded(&size));
36404    test(&map, size, Unbounded, Excluded(&size));
36405    test(&map, size, Included(&0), Included(&(size - 1)));
36406    test(&map, size, Unbounded, Included(&(size - 1)));
36407    test(&map, size, Included(&0), Unbounded);
36408    test(&map, size, Unbounded, Unbounded);
36409}
36410
36411#[test]
36412fn test_range_borrowed_key() {
36413    let mut map = BTreeMap::new();
36414    map.insert("aardvark".to_string(), 1);
36415    map.insert("baboon".to_string(), 2);
36416    map.insert("coyote".to_string(), 3);
36417    map.insert("dingo".to_string(), 4);
36418    // NOTE: would like to use simply "b".."d" here...
36419    let mut iter = map.range::<str, _>((Included("b"), Excluded("d")));
36420    assert_eq!(iter.next(), Some((&"baboon".to_string(), &2)));
36421    assert_eq!(iter.next(), Some((&"coyote".to_string(), &3)));
36422    assert_eq!(iter.next(), None);
36423}
36424
36425#[test]
36426fn test_range() {
36427    let size = 200;
36428    // Miri is too slow
36429    let step = if cfg!(miri) { 66 } else { 1 };
36430    let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
36431
36432    for i in (0..size).step_by(step) {
36433        for j in (i..size).step_by(step) {
36434            let mut kvs = map.range((Included(&i), Included(&j))).map(|(&k, &v)| (k, v));
36435            let mut pairs = (i..=j).map(|i| (i, i));
36436
36437            for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
36438                assert_eq!(kv, pair);
36439            }
36440            assert_eq!(kvs.next(), None);
36441            assert_eq!(pairs.next(), None);
36442        }
36443    }
36444}
36445
36446#[test]
36447fn test_range_mut() {
36448    let size = 200;
36449    // Miri is too slow
36450    let step = if cfg!(miri) { 66 } else { 1 };
36451    let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
36452
36453    for i in (0..size).step_by(step) {
36454        for j in (i..size).step_by(step) {
36455            let mut kvs = map.range_mut((Included(&i), Included(&j))).map(|(&k, &mut v)| (k, v));
36456            let mut pairs = (i..=j).map(|i| (i, i));
36457
36458            for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
36459                assert_eq!(kv, pair);
36460            }
36461            assert_eq!(kvs.next(), None);
36462            assert_eq!(pairs.next(), None);
36463        }
36464    }
36465    map.check();
36466}
36467
36468#[test]
36469fn test_retain() {
36470    let mut map: BTreeMap<i32, i32> = (0..100).map(|x| (x, x * 10)).collect();
36471
36472    map.retain(|&k, _| k % 2 == 0);
36473    assert_eq!(map.len(), 50);
36474    assert_eq!(map[&2], 20);
36475    assert_eq!(map[&4], 40);
36476    assert_eq!(map[&6], 60);
36477}
36478
36479mod test_drain_filter {
36480    use super::*;
36481
36482    #[test]
36483    fn empty() {
36484        let mut map: BTreeMap<i32, i32> = BTreeMap::new();
36485        map.drain_filter(|_, _| unreachable!("there's nothing to decide on"));
36486        assert!(map.is_empty());
36487        map.check();
36488    }
36489
36490    // Explicitly consumes the iterator, where most test cases drop it instantly.
36491    #[test]
36492    fn consumed_keeping_all() {
36493        let pairs = (0..3).map(|i| (i, i));
36494        let mut map: BTreeMap<_, _> = pairs.collect();
36495        assert!(map.drain_filter(|_, _| false).eq(iter::empty()));
36496        map.check();
36497    }
36498
36499    // Explicitly consumes the iterator, where most test cases drop it instantly.
36500    #[test]
36501    fn consumed_removing_all() {
36502        let pairs = (0..3).map(|i| (i, i));
36503        let mut map: BTreeMap<_, _> = pairs.clone().collect();
36504        assert!(map.drain_filter(|_, _| true).eq(pairs));
36505        assert!(map.is_empty());
36506        map.check();
36507    }
36508
36509    // Explicitly consumes the iterator and modifies values through it.
36510    #[test]
36511    fn mutating_and_keeping() {
36512        let pairs = (0..3).map(|i| (i, i));
36513        let mut map: BTreeMap<_, _> = pairs.collect();
36514        assert!(
36515            map.drain_filter(|_, v| {
36516                *v += 6;
36517                false
36518            })
36519            .eq(iter::empty())
36520        );
36521        assert!(map.keys().copied().eq(0..3));
36522        assert!(map.values().copied().eq(6..9));
36523        map.check();
36524    }
36525
36526    // Explicitly consumes the iterator and modifies values through it.
36527    #[test]
36528    fn mutating_and_removing() {
36529        let pairs = (0..3).map(|i| (i, i));
36530        let mut map: BTreeMap<_, _> = pairs.collect();
36531        assert!(
36532            map.drain_filter(|_, v| {
36533                *v += 6;
36534                true
36535            })
36536            .eq((0..3).map(|i| (i, i + 6)))
36537        );
36538        assert!(map.is_empty());
36539        map.check();
36540    }
36541
36542    #[test]
36543    fn underfull_keeping_all() {
36544        let pairs = (0..3).map(|i| (i, i));
36545        let mut map: BTreeMap<_, _> = pairs.collect();
36546        map.drain_filter(|_, _| false);
36547        assert!(map.keys().copied().eq(0..3));
36548        map.check();
36549    }
36550
36551    #[test]
36552    fn underfull_removing_one() {
36553        let pairs = (0..3).map(|i| (i, i));
36554        for doomed in 0..3 {
36555            let mut map: BTreeMap<_, _> = pairs.clone().collect();
36556            map.drain_filter(|i, _| *i == doomed);
36557            assert_eq!(map.len(), 2);
36558            map.check();
36559        }
36560    }
36561
36562    #[test]
36563    fn underfull_keeping_one() {
36564        let pairs = (0..3).map(|i| (i, i));
36565        for sacred in 0..3 {
36566            let mut map: BTreeMap<_, _> = pairs.clone().collect();
36567            map.drain_filter(|i, _| *i != sacred);
36568            assert!(map.keys().copied().eq(sacred..=sacred));
36569            map.check();
36570        }
36571    }
36572
36573    #[test]
36574    fn underfull_removing_all() {
36575        let pairs = (0..3).map(|i| (i, i));
36576        let mut map: BTreeMap<_, _> = pairs.collect();
36577        map.drain_filter(|_, _| true);
36578        assert!(map.is_empty());
36579        map.check();
36580    }
36581
36582    #[test]
36583    fn height_0_keeping_all() {
36584        let pairs = (0..NODE_CAPACITY).map(|i| (i, i));
36585        let mut map: BTreeMap<_, _> = pairs.collect();
36586        map.drain_filter(|_, _| false);
36587        assert!(map.keys().copied().eq(0..NODE_CAPACITY));
36588        map.check();
36589    }
36590
36591    #[test]
36592    fn height_0_removing_one() {
36593        let pairs = (0..NODE_CAPACITY).map(|i| (i, i));
36594        for doomed in 0..NODE_CAPACITY {
36595            let mut map: BTreeMap<_, _> = pairs.clone().collect();
36596            map.drain_filter(|i, _| *i == doomed);
36597            assert_eq!(map.len(), NODE_CAPACITY - 1);
36598            map.check();
36599        }
36600    }
36601
36602    #[test]
36603    fn height_0_keeping_one() {
36604        let pairs = (0..NODE_CAPACITY).map(|i| (i, i));
36605        for sacred in 0..NODE_CAPACITY {
36606            let mut map: BTreeMap<_, _> = pairs.clone().collect();
36607            map.drain_filter(|i, _| *i != sacred);
36608            assert!(map.keys().copied().eq(sacred..=sacred));
36609            map.check();
36610        }
36611    }
36612
36613    #[test]
36614    fn height_0_removing_all() {
36615        let pairs = (0..NODE_CAPACITY).map(|i| (i, i));
36616        let mut map: BTreeMap<_, _> = pairs.collect();
36617        map.drain_filter(|_, _| true);
36618        assert!(map.is_empty());
36619        map.check();
36620    }
36621
36622    #[test]
36623    fn height_0_keeping_half() {
36624        let mut map: BTreeMap<_, _> = (0..16).map(|i| (i, i)).collect();
36625        assert_eq!(map.drain_filter(|i, _| *i % 2 == 0).count(), 8);
36626        assert_eq!(map.len(), 8);
36627        map.check();
36628    }
36629
36630    #[test]
36631    fn height_1_removing_all() {
36632        let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
36633        let mut map: BTreeMap<_, _> = pairs.collect();
36634        map.drain_filter(|_, _| true);
36635        assert!(map.is_empty());
36636        map.check();
36637    }
36638
36639    #[test]
36640    fn height_1_removing_one() {
36641        let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
36642        for doomed in 0..MIN_INSERTS_HEIGHT_1 {
36643            let mut map: BTreeMap<_, _> = pairs.clone().collect();
36644            map.drain_filter(|i, _| *i == doomed);
36645            assert_eq!(map.len(), MIN_INSERTS_HEIGHT_1 - 1);
36646            map.check();
36647        }
36648    }
36649
36650    #[test]
36651    fn height_1_keeping_one() {
36652        let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
36653        for sacred in 0..MIN_INSERTS_HEIGHT_1 {
36654            let mut map: BTreeMap<_, _> = pairs.clone().collect();
36655            map.drain_filter(|i, _| *i != sacred);
36656            assert!(map.keys().copied().eq(sacred..=sacred));
36657            map.check();
36658        }
36659    }
36660
36661    #[test]
36662    fn height_2_removing_one() {
36663        let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
36664        for doomed in (0..MIN_INSERTS_HEIGHT_2).step_by(12) {
36665            let mut map: BTreeMap<_, _> = pairs.clone().collect();
36666            map.drain_filter(|i, _| *i == doomed);
36667            assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2 - 1);
36668            map.check();
36669        }
36670    }
36671
36672    #[test]
36673    fn height_2_keeping_one() {
36674        let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
36675        for sacred in (0..MIN_INSERTS_HEIGHT_2).step_by(12) {
36676            let mut map: BTreeMap<_, _> = pairs.clone().collect();
36677            map.drain_filter(|i, _| *i != sacred);
36678            assert!(map.keys().copied().eq(sacred..=sacred));
36679            map.check();
36680        }
36681    }
36682
36683    #[test]
36684    fn height_2_removing_all() {
36685        let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
36686        let mut map: BTreeMap<_, _> = pairs.collect();
36687        map.drain_filter(|_, _| true);
36688        assert!(map.is_empty());
36689        map.check();
36690    }
36691
36692    #[test]
36693    fn drop_panic_leak() {
36694        let a = CrashTestDummy::new(0);
36695        let b = CrashTestDummy::new(1);
36696        let c = CrashTestDummy::new(2);
36697        let mut map = BTreeMap::new();
36698        map.insert(a.spawn(Panic::Never), ());
36699        map.insert(b.spawn(Panic::InDrop), ());
36700        map.insert(c.spawn(Panic::Never), ());
36701
36702        catch_unwind(move || drop(map.drain_filter(|dummy, _| dummy.query(true)))).unwrap_err();
36703
36704        assert_eq!(a.queried(), 1);
36705        assert_eq!(b.queried(), 1);
36706        assert_eq!(c.queried(), 0);
36707        assert_eq!(a.dropped(), 1);
36708        assert_eq!(b.dropped(), 1);
36709        assert_eq!(c.dropped(), 1);
36710    }
36711
36712    #[test]
36713    fn pred_panic_leak() {
36714        let a = CrashTestDummy::new(0);
36715        let b = CrashTestDummy::new(1);
36716        let c = CrashTestDummy::new(2);
36717        let mut map = BTreeMap::new();
36718        map.insert(a.spawn(Panic::Never), ());
36719        map.insert(b.spawn(Panic::InQuery), ());
36720        map.insert(c.spawn(Panic::InQuery), ());
36721
36722        catch_unwind(AssertUnwindSafe(|| drop(map.drain_filter(|dummy, _| dummy.query(true)))))
36723            .unwrap_err();
36724
36725        assert_eq!(a.queried(), 1);
36726        assert_eq!(b.queried(), 1);
36727        assert_eq!(c.queried(), 0);
36728        assert_eq!(a.dropped(), 1);
36729        assert_eq!(b.dropped(), 0);
36730        assert_eq!(c.dropped(), 0);
36731        assert_eq!(map.len(), 2);
36732        assert_eq!(map.first_entry().unwrap().key().id(), 1);
36733        assert_eq!(map.last_entry().unwrap().key().id(), 2);
36734        map.check();
36735    }
36736
36737    // Same as above, but attempt to use the iterator again after the panic in the predicate
36738    #[test]
36739    fn pred_panic_reuse() {
36740        let a = CrashTestDummy::new(0);
36741        let b = CrashTestDummy::new(1);
36742        let c = CrashTestDummy::new(2);
36743        let mut map = BTreeMap::new();
36744        map.insert(a.spawn(Panic::Never), ());
36745        map.insert(b.spawn(Panic::InQuery), ());
36746        map.insert(c.spawn(Panic::InQuery), ());
36747
36748        {
36749            let mut it = map.drain_filter(|dummy, _| dummy.query(true));
36750            catch_unwind(AssertUnwindSafe(|| while it.next().is_some() {})).unwrap_err();
36751            // Iterator behaviour after a panic is explicitly unspecified,
36752            // so this is just the current implementation:
36753            let result = catch_unwind(AssertUnwindSafe(|| it.next()));
36754            assert!(matches!(result, Ok(None)));
36755        }
36756
36757        assert_eq!(a.queried(), 1);
36758        assert_eq!(b.queried(), 1);
36759        assert_eq!(c.queried(), 0);
36760        assert_eq!(a.dropped(), 1);
36761        assert_eq!(b.dropped(), 0);
36762        assert_eq!(c.dropped(), 0);
36763        assert_eq!(map.len(), 2);
36764        assert_eq!(map.first_entry().unwrap().key().id(), 1);
36765        assert_eq!(map.last_entry().unwrap().key().id(), 2);
36766        map.check();
36767    }
36768}
36769
36770#[test]
36771fn test_borrow() {
36772    // make sure these compile -- using the Borrow trait
36773    {
36774        let mut map = BTreeMap::new();
36775        map.insert("0".to_string(), 1);
36776        assert_eq!(map["0"], 1);
36777    }
36778
36779    {
36780        let mut map = BTreeMap::new();
36781        map.insert(Box::new(0), 1);
36782        assert_eq!(map[&0], 1);
36783    }
36784
36785    {
36786        let mut map = BTreeMap::new();
36787        map.insert(Box::new([0, 1]) as Box<[i32]>, 1);
36788        assert_eq!(map[&[0, 1][..]], 1);
36789    }
36790
36791    {
36792        let mut map = BTreeMap::new();
36793        map.insert(Rc::new(0), 1);
36794        assert_eq!(map[&0], 1);
36795    }
36796
36797    #[allow(dead_code)]
36798    fn get<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: &T) {
36799        v.get(t);
36800    }
36801
36802    #[allow(dead_code)]
36803    fn get_mut<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
36804        v.get_mut(t);
36805    }
36806
36807    #[allow(dead_code)]
36808    fn get_key_value<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: &T) {
36809        v.get_key_value(t);
36810    }
36811
36812    #[allow(dead_code)]
36813    fn contains_key<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: &T) {
36814        v.contains_key(t);
36815    }
36816
36817    #[allow(dead_code)]
36818    fn range<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: T) {
36819        v.range(t..);
36820    }
36821
36822    #[allow(dead_code)]
36823    fn range_mut<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: T) {
36824        v.range_mut(t..);
36825    }
36826
36827    #[allow(dead_code)]
36828    fn remove<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
36829        v.remove(t);
36830    }
36831
36832    #[allow(dead_code)]
36833    fn remove_entry<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
36834        v.remove_entry(t);
36835    }
36836
36837    #[allow(dead_code)]
36838    fn split_off<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
36839        v.split_off(t);
36840    }
36841}
36842
36843#[test]
36844fn test_entry() {
36845    let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
36846
36847    let mut map: BTreeMap<_, _> = xs.iter().cloned().collect();
36848
36849    // Existing key (insert)
36850    match map.entry(1) {
36851        Vacant(_) => unreachable!(),
36852        Occupied(mut view) => {
36853            assert_eq!(view.get(), &10);
36854            assert_eq!(view.insert(100), 10);
36855        }
36856    }
36857    assert_eq!(map.get(&1).unwrap(), &100);
36858    assert_eq!(map.len(), 6);
36859
36860    // Existing key (update)
36861    match map.entry(2) {
36862        Vacant(_) => unreachable!(),
36863        Occupied(mut view) => {
36864            let v = view.get_mut();
36865            *v *= 10;
36866        }
36867    }
36868    assert_eq!(map.get(&2).unwrap(), &200);
36869    assert_eq!(map.len(), 6);
36870    map.check();
36871
36872    // Existing key (take)
36873    match map.entry(3) {
36874        Vacant(_) => unreachable!(),
36875        Occupied(view) => {
36876            assert_eq!(view.remove(), 30);
36877        }
36878    }
36879    assert_eq!(map.get(&3), None);
36880    assert_eq!(map.len(), 5);
36881    map.check();
36882
36883    // Inexistent key (insert)
36884    match map.entry(10) {
36885        Occupied(_) => unreachable!(),
36886        Vacant(view) => {
36887            assert_eq!(*view.insert(1000), 1000);
36888        }
36889    }
36890    assert_eq!(map.get(&10).unwrap(), &1000);
36891    assert_eq!(map.len(), 6);
36892    map.check();
36893}
36894
36895#[test]
36896fn test_extend_ref() {
36897    let mut a = BTreeMap::new();
36898    a.insert(1, "one");
36899    let mut b = BTreeMap::new();
36900    b.insert(2, "two");
36901    b.insert(3, "three");
36902
36903    a.extend(&b);
36904
36905    assert_eq!(a.len(), 3);
36906    assert_eq!(a[&1], "one");
36907    assert_eq!(a[&2], "two");
36908    assert_eq!(a[&3], "three");
36909    a.check();
36910}
36911
36912#[test]
36913fn test_zst() {
36914    let mut m = BTreeMap::new();
36915    assert_eq!(m.len(), 0);
36916
36917    assert_eq!(m.insert((), ()), None);
36918    assert_eq!(m.len(), 1);
36919
36920    assert_eq!(m.insert((), ()), Some(()));
36921    assert_eq!(m.len(), 1);
36922    assert_eq!(m.iter().count(), 1);
36923
36924    m.clear();
36925    assert_eq!(m.len(), 0);
36926
36927    for _ in 0..100 {
36928        m.insert((), ());
36929    }
36930
36931    assert_eq!(m.len(), 1);
36932    assert_eq!(m.iter().count(), 1);
36933    m.check();
36934}
36935
36936// This test's only purpose is to ensure that zero-sized keys with nonsensical orderings
36937// do not cause segfaults when used with zero-sized values. All other map behavior is
36938// undefined.
36939#[test]
36940fn test_bad_zst() {
36941    #[derive(Clone, Copy, Debug)]
36942    struct Bad;
36943
36944    impl PartialEq for Bad {
36945        fn eq(&self, _: &Self) -> bool {
36946            false
36947        }
36948    }
36949
36950    impl Eq for Bad {}
36951
36952    impl PartialOrd for Bad {
36953        fn partial_cmp(&self, _: &Self) -> Option<Ordering> {
36954            Some(Ordering::Less)
36955        }
36956    }
36957
36958    impl Ord for Bad {
36959        fn cmp(&self, _: &Self) -> Ordering {
36960            Ordering::Less
36961        }
36962    }
36963
36964    let mut m = BTreeMap::new();
36965
36966    for _ in 0..100 {
36967        m.insert(Bad, Bad);
36968    }
36969    m.check();
36970}
36971
36972#[test]
36973fn test_clear() {
36974    let mut map = BTreeMap::new();
36975    for &len in &[MIN_INSERTS_HEIGHT_1, MIN_INSERTS_HEIGHT_2, 0, NODE_CAPACITY] {
36976        for i in 0..len {
36977            map.insert(i, ());
36978        }
36979        assert_eq!(map.len(), len);
36980        map.clear();
36981        map.check();
36982        assert!(map.is_empty());
36983    }
36984}
36985
36986#[test]
36987fn test_clear_drop_panic_leak() {
36988    let a = CrashTestDummy::new(0);
36989    let b = CrashTestDummy::new(1);
36990    let c = CrashTestDummy::new(2);
36991
36992    let mut map = BTreeMap::new();
36993    map.insert(a.spawn(Panic::Never), ());
36994    map.insert(b.spawn(Panic::InDrop), ());
36995    map.insert(c.spawn(Panic::Never), ());
36996
36997    catch_unwind(AssertUnwindSafe(|| map.clear())).unwrap_err();
36998    assert_eq!(a.dropped(), 1);
36999    assert_eq!(b.dropped(), 1);
37000    assert_eq!(c.dropped(), 1);
37001    assert_eq!(map.len(), 0);
37002
37003    drop(map);
37004    assert_eq!(a.dropped(), 1);
37005    assert_eq!(b.dropped(), 1);
37006    assert_eq!(c.dropped(), 1);
37007}
37008
37009#[test]
37010fn test_clone() {
37011    let mut map = BTreeMap::new();
37012    let size = MIN_INSERTS_HEIGHT_1;
37013    assert_eq!(map.len(), 0);
37014
37015    for i in 0..size {
37016        assert_eq!(map.insert(i, 10 * i), None);
37017        assert_eq!(map.len(), i + 1);
37018        map.check();
37019        assert_eq!(map, map.clone());
37020    }
37021
37022    for i in 0..size {
37023        assert_eq!(map.insert(i, 100 * i), Some(10 * i));
37024        assert_eq!(map.len(), size);
37025        map.check();
37026        assert_eq!(map, map.clone());
37027    }
37028
37029    for i in 0..size / 2 {
37030        assert_eq!(map.remove(&(i * 2)), Some(i * 200));
37031        assert_eq!(map.len(), size - i - 1);
37032        map.check();
37033        assert_eq!(map, map.clone());
37034    }
37035
37036    for i in 0..size / 2 {
37037        assert_eq!(map.remove(&(2 * i)), None);
37038        assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100));
37039        assert_eq!(map.len(), size / 2 - i - 1);
37040        map.check();
37041        assert_eq!(map, map.clone());
37042    }
37043
37044    // Test a tree with 2 semi-full levels and a tree with 3 levels.
37045    map = (1..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)).collect();
37046    assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2 - 1);
37047    assert_eq!(map, map.clone());
37048    map.insert(0, 0);
37049    assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2);
37050    assert_eq!(map, map.clone());
37051    map.check();
37052}
37053
37054#[test]
37055fn test_clone_panic_leak() {
37056    let a = CrashTestDummy::new(0);
37057    let b = CrashTestDummy::new(1);
37058    let c = CrashTestDummy::new(2);
37059
37060    let mut map = BTreeMap::new();
37061    map.insert(a.spawn(Panic::Never), ());
37062    map.insert(b.spawn(Panic::InClone), ());
37063    map.insert(c.spawn(Panic::Never), ());
37064
37065    catch_unwind(|| map.clone()).unwrap_err();
37066    assert_eq!(a.cloned(), 1);
37067    assert_eq!(b.cloned(), 1);
37068    assert_eq!(c.cloned(), 0);
37069    assert_eq!(a.dropped(), 1);
37070    assert_eq!(b.dropped(), 0);
37071    assert_eq!(c.dropped(), 0);
37072    assert_eq!(map.len(), 3);
37073
37074    drop(map);
37075    assert_eq!(a.cloned(), 1);
37076    assert_eq!(b.cloned(), 1);
37077    assert_eq!(c.cloned(), 0);
37078    assert_eq!(a.dropped(), 2);
37079    assert_eq!(b.dropped(), 1);
37080    assert_eq!(c.dropped(), 1);
37081}
37082
37083#[test]
37084fn test_clone_from() {
37085    let mut map1 = BTreeMap::new();
37086    let max_size = MIN_INSERTS_HEIGHT_1;
37087
37088    // Range to max_size inclusive, because i is the size of map1 being tested.
37089    for i in 0..=max_size {
37090        let mut map2 = BTreeMap::new();
37091        for j in 0..i {
37092            let mut map1_copy = map2.clone();
37093            map1_copy.clone_from(&map1); // small cloned from large
37094            assert_eq!(map1_copy, map1);
37095            let mut map2_copy = map1.clone();
37096            map2_copy.clone_from(&map2); // large cloned from small
37097            assert_eq!(map2_copy, map2);
37098            map2.insert(100 * j + 1, 2 * j + 1);
37099        }
37100        map2.clone_from(&map1); // same length
37101        map2.check();
37102        assert_eq!(map2, map1);
37103        map1.insert(i, 10 * i);
37104        map1.check();
37105    }
37106}
37107
37108#[allow(dead_code)]
37109fn test_variance() {
37110    fn map_key<'new>(v: BTreeMap<&'static str, ()>) -> BTreeMap<&'new str, ()> {
37111        v
37112    }
37113    fn map_val<'new>(v: BTreeMap<(), &'static str>) -> BTreeMap<(), &'new str> {
37114        v
37115    }
37116
37117    fn iter_key<'a, 'new>(v: Iter<'a, &'static str, ()>) -> Iter<'a, &'new str, ()> {
37118        v
37119    }
37120    fn iter_val<'a, 'new>(v: Iter<'a, (), &'static str>) -> Iter<'a, (), &'new str> {
37121        v
37122    }
37123
37124    fn into_iter_key<'new>(v: IntoIter<&'static str, ()>) -> IntoIter<&'new str, ()> {
37125        v
37126    }
37127    fn into_iter_val<'new>(v: IntoIter<(), &'static str>) -> IntoIter<(), &'new str> {
37128        v
37129    }
37130
37131    fn into_keys_key<'new>(v: IntoKeys<&'static str, ()>) -> IntoKeys<&'new str, ()> {
37132        v
37133    }
37134    fn into_keys_val<'new>(v: IntoKeys<(), &'static str>) -> IntoKeys<(), &'new str> {
37135        v
37136    }
37137
37138    fn into_values_key<'new>(v: IntoValues<&'static str, ()>) -> IntoValues<&'new str, ()> {
37139        v
37140    }
37141    fn into_values_val<'new>(v: IntoValues<(), &'static str>) -> IntoValues<(), &'new str> {
37142        v
37143    }
37144
37145    fn range_key<'a, 'new>(v: Range<'a, &'static str, ()>) -> Range<'a, &'new str, ()> {
37146        v
37147    }
37148    fn range_val<'a, 'new>(v: Range<'a, (), &'static str>) -> Range<'a, (), &'new str> {
37149        v
37150    }
37151
37152    fn keys_key<'a, 'new>(v: Keys<'a, &'static str, ()>) -> Keys<'a, &'new str, ()> {
37153        v
37154    }
37155    fn keys_val<'a, 'new>(v: Keys<'a, (), &'static str>) -> Keys<'a, (), &'new str> {
37156        v
37157    }
37158
37159    fn values_key<'a, 'new>(v: Values<'a, &'static str, ()>) -> Values<'a, &'new str, ()> {
37160        v
37161    }
37162    fn values_val<'a, 'new>(v: Values<'a, (), &'static str>) -> Values<'a, (), &'new str> {
37163        v
37164    }
37165}
37166
37167#[allow(dead_code)]
37168fn test_sync() {
37169    fn map<T: Sync>(v: &BTreeMap<T, T>) -> impl Sync + '_ {
37170        v
37171    }
37172
37173    fn into_iter<T: Sync>(v: BTreeMap<T, T>) -> impl Sync {
37174        v.into_iter()
37175    }
37176
37177    fn into_keys<T: Sync + Ord>(v: BTreeMap<T, T>) -> impl Sync {
37178        v.into_keys()
37179    }
37180
37181    fn into_values<T: Sync + Ord>(v: BTreeMap<T, T>) -> impl Sync {
37182        v.into_values()
37183    }
37184
37185    fn drain_filter<T: Sync + Ord>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
37186        v.drain_filter(|_, _| false)
37187    }
37188
37189    fn iter<T: Sync>(v: &BTreeMap<T, T>) -> impl Sync + '_ {
37190        v.iter()
37191    }
37192
37193    fn iter_mut<T: Sync>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
37194        v.iter_mut()
37195    }
37196
37197    fn keys<T: Sync>(v: &BTreeMap<T, T>) -> impl Sync + '_ {
37198        v.keys()
37199    }
37200
37201    fn values<T: Sync>(v: &BTreeMap<T, T>) -> impl Sync + '_ {
37202        v.values()
37203    }
37204
37205    fn values_mut<T: Sync>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
37206        v.values_mut()
37207    }
37208
37209    fn range<T: Sync + Ord>(v: &BTreeMap<T, T>) -> impl Sync + '_ {
37210        v.range(..)
37211    }
37212
37213    fn range_mut<T: Sync + Ord>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
37214        v.range_mut(..)
37215    }
37216
37217    fn entry<T: Sync + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
37218        v.entry(Default::default())
37219    }
37220
37221    fn occupied_entry<T: Sync + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
37222        match v.entry(Default::default()) {
37223            Occupied(entry) => entry,
37224            _ => unreachable!(),
37225        }
37226    }
37227
37228    fn vacant_entry<T: Sync + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
37229        match v.entry(Default::default()) {
37230            Vacant(entry) => entry,
37231            _ => unreachable!(),
37232        }
37233    }
37234}
37235
37236#[allow(dead_code)]
37237fn test_send() {
37238    fn map<T: Send>(v: BTreeMap<T, T>) -> impl Send {
37239        v
37240    }
37241
37242    fn into_iter<T: Send>(v: BTreeMap<T, T>) -> impl Send {
37243        v.into_iter()
37244    }
37245
37246    fn into_keys<T: Send + Ord>(v: BTreeMap<T, T>) -> impl Send {
37247        v.into_keys()
37248    }
37249
37250    fn into_values<T: Send + Ord>(v: BTreeMap<T, T>) -> impl Send {
37251        v.into_values()
37252    }
37253
37254    fn drain_filter<T: Send + Ord>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
37255        v.drain_filter(|_, _| false)
37256    }
37257
37258    fn iter<T: Send + Sync>(v: &BTreeMap<T, T>) -> impl Send + '_ {
37259        v.iter()
37260    }
37261
37262    fn iter_mut<T: Send>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
37263        v.iter_mut()
37264    }
37265
37266    fn keys<T: Send + Sync>(v: &BTreeMap<T, T>) -> impl Send + '_ {
37267        v.keys()
37268    }
37269
37270    fn values<T: Send + Sync>(v: &BTreeMap<T, T>) -> impl Send + '_ {
37271        v.values()
37272    }
37273
37274    fn values_mut<T: Send>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
37275        v.values_mut()
37276    }
37277
37278    fn range<T: Send + Sync + Ord>(v: &BTreeMap<T, T>) -> impl Send + '_ {
37279        v.range(..)
37280    }
37281
37282    fn range_mut<T: Send + Ord>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
37283        v.range_mut(..)
37284    }
37285
37286    fn entry<T: Send + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
37287        v.entry(Default::default())
37288    }
37289
37290    fn occupied_entry<T: Send + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
37291        match v.entry(Default::default()) {
37292            Occupied(entry) => entry,
37293            _ => unreachable!(),
37294        }
37295    }
37296
37297    fn vacant_entry<T: Send + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
37298        match v.entry(Default::default()) {
37299            Vacant(entry) => entry,
37300            _ => unreachable!(),
37301        }
37302    }
37303}
37304
37305#[allow(dead_code)]
37306fn test_ord_absence() {
37307    fn map<K>(mut map: BTreeMap<K, ()>) {
37308        map.is_empty();
37309        map.len();
37310        map.clear();
37311        map.iter();
37312        map.iter_mut();
37313        map.keys();
37314        map.values();
37315        map.values_mut();
37316        if true {
37317            map.into_values();
37318        } else if true {
37319            map.into_iter();
37320        } else {
37321            map.into_keys();
37322        }
37323    }
37324
37325    fn map_debug<K: Debug>(mut map: BTreeMap<K, ()>) {
37326        format!("{:?}", map);
37327        format!("{:?}", map.iter());
37328        format!("{:?}", map.iter_mut());
37329        format!("{:?}", map.keys());
37330        format!("{:?}", map.values());
37331        format!("{:?}", map.values_mut());
37332        if true {
37333            format!("{:?}", map.into_iter());
37334        } else if true {
37335            format!("{:?}", map.into_keys());
37336        } else {
37337            format!("{:?}", map.into_values());
37338        }
37339    }
37340
37341    fn map_clone<K: Clone>(mut map: BTreeMap<K, ()>) {
37342        map.clone_from(&map.clone());
37343    }
37344}
37345
37346#[allow(dead_code)]
37347fn test_const() {
37348    const MAP: &'static BTreeMap<(), ()> = &BTreeMap::new();
37349    const LEN: usize = MAP.len();
37350    const IS_EMPTY: bool = MAP.is_empty();
37351}
37352
37353#[test]
37354fn test_occupied_entry_key() {
37355    let mut a = BTreeMap::new();
37356    let key = "hello there";
37357    let value = "value goes here";
37358    assert!(a.is_empty());
37359    a.insert(key, value);
37360    assert_eq!(a.len(), 1);
37361    assert_eq!(a[key], value);
37362
37363    match a.entry(key) {
37364        Vacant(_) => panic!(),
37365        Occupied(e) => assert_eq!(key, *e.key()),
37366    }
37367    assert_eq!(a.len(), 1);
37368    assert_eq!(a[key], value);
37369    a.check();
37370}
37371
37372#[test]
37373fn test_vacant_entry_key() {
37374    let mut a = BTreeMap::new();
37375    let key = "hello there";
37376    let value = "value goes here";
37377
37378    assert!(a.is_empty());
37379    match a.entry(key) {
37380        Occupied(_) => panic!(),
37381        Vacant(e) => {
37382            assert_eq!(key, *e.key());
37383            e.insert(value);
37384        }
37385    }
37386    assert_eq!(a.len(), 1);
37387    assert_eq!(a[key], value);
37388    a.check();
37389}
37390
37391#[test]
37392fn test_first_last_entry() {
37393    let mut a = BTreeMap::new();
37394    assert!(a.first_entry().is_none());
37395    assert!(a.last_entry().is_none());
37396    a.insert(1, 42);
37397    assert_eq!(a.first_entry().unwrap().key(), &1);
37398    assert_eq!(a.last_entry().unwrap().key(), &1);
37399    a.insert(2, 24);
37400    assert_eq!(a.first_entry().unwrap().key(), &1);
37401    assert_eq!(a.last_entry().unwrap().key(), &2);
37402    a.insert(0, 6);
37403    assert_eq!(a.first_entry().unwrap().key(), &0);
37404    assert_eq!(a.last_entry().unwrap().key(), &2);
37405    let (k1, v1) = a.first_entry().unwrap().remove_entry();
37406    assert_eq!(k1, 0);
37407    assert_eq!(v1, 6);
37408    let (k2, v2) = a.last_entry().unwrap().remove_entry();
37409    assert_eq!(k2, 2);
37410    assert_eq!(v2, 24);
37411    assert_eq!(a.first_entry().unwrap().key(), &1);
37412    assert_eq!(a.last_entry().unwrap().key(), &1);
37413    a.check();
37414}
37415
37416#[test]
37417fn test_insert_into_full_height_0() {
37418    let size = NODE_CAPACITY;
37419    for pos in 0..=size {
37420        let mut map: BTreeMap<_, _> = (0..size).map(|i| (i * 2 + 1, ())).collect();
37421        assert!(map.insert(pos * 2, ()).is_none());
37422        map.check();
37423    }
37424}
37425
37426#[test]
37427fn test_insert_into_full_height_1() {
37428    let size = NODE_CAPACITY + 1 + NODE_CAPACITY;
37429    for pos in 0..=size {
37430        let mut map: BTreeMap<_, _> = (0..size).map(|i| (i * 2 + 1, ())).collect();
37431        map.compact();
37432        let root_node = map.root.as_ref().unwrap().reborrow();
37433        assert_eq!(root_node.len(), 1);
37434        assert_eq!(root_node.first_leaf_edge().into_node().len(), NODE_CAPACITY);
37435        assert_eq!(root_node.last_leaf_edge().into_node().len(), NODE_CAPACITY);
37436
37437        assert!(map.insert(pos * 2, ()).is_none());
37438        map.check();
37439    }
37440}
37441
37442macro_rules! create_append_test {
37443    ($name:ident, $len:expr) => {
37444        #[test]
37445        fn $name() {
37446            let mut a = BTreeMap::new();
37447            for i in 0..8 {
37448                a.insert(i, i);
37449            }
37450
37451            let mut b = BTreeMap::new();
37452            for i in 5..$len {
37453                b.insert(i, 2 * i);
37454            }
37455
37456            a.append(&mut b);
37457
37458            assert_eq!(a.len(), $len);
37459            assert_eq!(b.len(), 0);
37460
37461            for i in 0..$len {
37462                if i < 5 {
37463                    assert_eq!(a[&i], i);
37464                } else {
37465                    assert_eq!(a[&i], 2 * i);
37466                }
37467            }
37468
37469            a.check();
37470            assert_eq!(a.remove(&($len - 1)), Some(2 * ($len - 1)));
37471            assert_eq!(a.insert($len - 1, 20), None);
37472            a.check();
37473        }
37474    };
37475}
37476
37477// These are mostly for testing the algorithm that "fixes" the right edge after insertion.
37478// Single node.
37479create_append_test!(test_append_9, 9);
37480// Two leafs that don't need fixing.
37481create_append_test!(test_append_17, 17);
37482// Two leafs where the second one ends up underfull and needs stealing at the end.
37483create_append_test!(test_append_14, 14);
37484// Two leafs where the second one ends up empty because the insertion finished at the root.
37485create_append_test!(test_append_12, 12);
37486// Three levels; insertion finished at the root.
37487create_append_test!(test_append_144, 144);
37488// Three levels; insertion finished at leaf while there is an empty node on the second level.
37489create_append_test!(test_append_145, 145);
37490// Tests for several randomly chosen sizes.
37491create_append_test!(test_append_170, 170);
37492create_append_test!(test_append_181, 181);
37493#[cfg(not(miri))] // Miri is too slow
37494create_append_test!(test_append_239, 239);
37495#[cfg(not(miri))] // Miri is too slow
37496create_append_test!(test_append_1700, 1700);
37497
37498#[test]
37499fn test_append_drop_leak() {
37500    let a = CrashTestDummy::new(0);
37501    let b = CrashTestDummy::new(1);
37502    let c = CrashTestDummy::new(2);
37503    let mut left = BTreeMap::new();
37504    let mut right = BTreeMap::new();
37505    left.insert(a.spawn(Panic::Never), ());
37506    left.insert(b.spawn(Panic::InDrop), ()); // first duplicate key, dropped during append
37507    left.insert(c.spawn(Panic::Never), ());
37508    right.insert(b.spawn(Panic::Never), ());
37509    right.insert(c.spawn(Panic::Never), ());
37510
37511    catch_unwind(move || left.append(&mut right)).unwrap_err();
37512    assert_eq!(a.dropped(), 1);
37513    assert_eq!(b.dropped(), 1); // should be 2 were it not for Rust issue #47949
37514    assert_eq!(c.dropped(), 2);
37515}
37516
37517#[test]
37518fn test_append_ord_chaos() {
37519    let mut map1 = BTreeMap::new();
37520    map1.insert(Cyclic3::A, ());
37521    map1.insert(Cyclic3::B, ());
37522    let mut map2 = BTreeMap::new();
37523    map2.insert(Cyclic3::A, ());
37524    map2.insert(Cyclic3::B, ());
37525    map2.insert(Cyclic3::C, ()); // lands first, before A
37526    map2.insert(Cyclic3::B, ()); // lands first, before C
37527    map1.check();
37528    map2.check(); // keys are not unique but still strictly ascending
37529    assert_eq!(map1.len(), 2);
37530    assert_eq!(map2.len(), 4);
37531    map1.append(&mut map2);
37532    assert_eq!(map1.len(), 5);
37533    assert_eq!(map2.len(), 0);
37534    map1.check();
37535    map2.check();
37536}
37537
37538fn rand_data(len: usize) -> Vec<(u32, u32)> {
37539    let mut rng = DeterministicRng::new();
37540    Vec::from_iter((0..len).map(|_| (rng.next(), rng.next())))
37541}
37542
37543#[test]
37544fn test_split_off_empty_right() {
37545    let mut data = rand_data(173);
37546
37547    let mut map = BTreeMap::from_iter(data.clone());
37548    let right = map.split_off(&(data.iter().max().unwrap().0 + 1));
37549    map.check();
37550    right.check();
37551
37552    data.sort();
37553    assert!(map.into_iter().eq(data));
37554    assert!(right.into_iter().eq(None));
37555}
37556
37557#[test]
37558fn test_split_off_empty_left() {
37559    let mut data = rand_data(314);
37560
37561    let mut map = BTreeMap::from_iter(data.clone());
37562    let right = map.split_off(&data.iter().min().unwrap().0);
37563    map.check();
37564    right.check();
37565
37566    data.sort();
37567    assert!(map.into_iter().eq(None));
37568    assert!(right.into_iter().eq(data));
37569}
37570
37571// In a tree with 3 levels, if all but a part of the first leaf node is split off,
37572// make sure fix_top eliminates both top levels.
37573#[test]
37574fn test_split_off_tiny_left_height_2() {
37575    let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
37576    let mut left: BTreeMap<_, _> = pairs.clone().collect();
37577    let right = left.split_off(&1);
37578    left.check();
37579    right.check();
37580    assert_eq!(left.len(), 1);
37581    assert_eq!(right.len(), MIN_INSERTS_HEIGHT_2 - 1);
37582    assert_eq!(*left.first_key_value().unwrap().0, 0);
37583    assert_eq!(*right.first_key_value().unwrap().0, 1);
37584}
37585
37586// In a tree with 3 levels, if only part of the last leaf node is split off,
37587// make sure fix_top eliminates both top levels.
37588#[test]
37589fn test_split_off_tiny_right_height_2() {
37590    let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
37591    let last = MIN_INSERTS_HEIGHT_2 - 1;
37592    let mut left: BTreeMap<_, _> = pairs.clone().collect();
37593    assert_eq!(*left.last_key_value().unwrap().0, last);
37594    let right = left.split_off(&last);
37595    left.check();
37596    right.check();
37597    assert_eq!(left.len(), MIN_INSERTS_HEIGHT_2 - 1);
37598    assert_eq!(right.len(), 1);
37599    assert_eq!(*left.last_key_value().unwrap().0, last - 1);
37600    assert_eq!(*right.last_key_value().unwrap().0, last);
37601}
37602
37603#[test]
37604fn test_split_off_halfway() {
37605    let mut rng = DeterministicRng::new();
37606    for &len in &[NODE_CAPACITY, 25, 50, 75, 100] {
37607        let mut data = Vec::from_iter((0..len).map(|_| (rng.next(), ())));
37608        // Insertion in non-ascending order creates some variation in node length.
37609        let mut map = BTreeMap::from_iter(data.iter().copied());
37610        data.sort();
37611        let small_keys = data.iter().take(len / 2).map(|kv| kv.0);
37612        let large_keys = data.iter().skip(len / 2).map(|kv| kv.0);
37613        let split_key = large_keys.clone().next().unwrap();
37614        let right = map.split_off(&split_key);
37615        map.check();
37616        right.check();
37617        assert!(map.keys().copied().eq(small_keys));
37618        assert!(right.keys().copied().eq(large_keys));
37619    }
37620}
37621
37622#[test]
37623fn test_split_off_large_random_sorted() {
37624    // Miri is too slow
37625    let mut data = if cfg!(miri) { rand_data(529) } else { rand_data(1529) };
37626    // special case with maximum height.
37627    data.sort();
37628
37629    let mut map = BTreeMap::from_iter(data.clone());
37630    let key = data[data.len() / 2].0;
37631    let right = map.split_off(&key);
37632    map.check();
37633    right.check();
37634
37635    assert!(map.into_iter().eq(data.clone().into_iter().filter(|x| x.0 < key)));
37636    assert!(right.into_iter().eq(data.into_iter().filter(|x| x.0 >= key)));
37637}
37638
37639#[test]
37640fn test_into_iter_drop_leak_height_0() {
37641    let a = CrashTestDummy::new(0);
37642    let b = CrashTestDummy::new(1);
37643    let c = CrashTestDummy::new(2);
37644    let d = CrashTestDummy::new(3);
37645    let e = CrashTestDummy::new(4);
37646    let mut map = BTreeMap::new();
37647    map.insert("a", a.spawn(Panic::Never));
37648    map.insert("b", b.spawn(Panic::Never));
37649    map.insert("c", c.spawn(Panic::Never));
37650    map.insert("d", d.spawn(Panic::InDrop));
37651    map.insert("e", e.spawn(Panic::Never));
37652
37653    catch_unwind(move || drop(map.into_iter())).unwrap_err();
37654
37655    assert_eq!(a.dropped(), 1);
37656    assert_eq!(b.dropped(), 1);
37657    assert_eq!(c.dropped(), 1);
37658    assert_eq!(d.dropped(), 1);
37659    assert_eq!(e.dropped(), 1);
37660}
37661
37662#[test]
37663fn test_into_iter_drop_leak_height_1() {
37664    let size = MIN_INSERTS_HEIGHT_1;
37665    for panic_point in vec![0, 1, size - 2, size - 1] {
37666        let dummies: Vec<_> = (0..size).map(|i| CrashTestDummy::new(i)).collect();
37667        let map: BTreeMap<_, _> = (0..size)
37668            .map(|i| {
37669                let panic = if i == panic_point { Panic::InDrop } else { Panic::Never };
37670                (dummies[i].spawn(Panic::Never), dummies[i].spawn(panic))
37671            })
37672            .collect();
37673        catch_unwind(move || drop(map.into_iter())).unwrap_err();
37674        for i in 0..size {
37675            assert_eq!(dummies[i].dropped(), 2);
37676        }
37677    }
37678}
37679
37680#[test]
37681fn test_into_keys() {
37682    let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
37683    let map: BTreeMap<_, _> = vec.into_iter().collect();
37684    let keys: Vec<_> = map.into_keys().collect();
37685
37686    assert_eq!(keys.len(), 3);
37687    assert!(keys.contains(&1));
37688    assert!(keys.contains(&2));
37689    assert!(keys.contains(&3));
37690}
37691
37692#[test]
37693fn test_into_values() {
37694    let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
37695    let map: BTreeMap<_, _> = vec.into_iter().collect();
37696    let values: Vec<_> = map.into_values().collect();
37697
37698    assert_eq!(values.len(), 3);
37699    assert!(values.contains(&'a'));
37700    assert!(values.contains(&'b'));
37701    assert!(values.contains(&'c'));
37702}
37703
37704#[test]
37705fn test_insert_remove_intertwined() {
37706    let loops = if cfg!(miri) { 100 } else { 1_000_000 };
37707    let mut map = BTreeMap::new();
37708    let mut i = 1;
37709    let offset = 165; // somewhat arbitrarily chosen to cover some code paths
37710    for _ in 0..loops {
37711        i = (i + offset) & 0xFF;
37712        map.insert(i, i);
37713        map.remove(&(0xFF - i));
37714    }
37715    map.check();
37716}
37717
37718#[test]
37719fn test_insert_remove_intertwined_ord_chaos() {
37720    let loops = if cfg!(miri) { 100 } else { 1_000_000 };
37721    let gov = Governor::new();
37722    let mut map = BTreeMap::new();
37723    let mut i = 1;
37724    let offset = 165; // more arbitrarily copied from above
37725    for _ in 0..loops {
37726        i = (i + offset) & 0xFF;
37727        map.insert(Governed(i, &gov), ());
37728        map.remove(&Governed(0xFF - i, &gov));
37729        gov.flip();
37730    }
37731    map.check_invariants();
37732}
37733use super::map::MIN_LEN;
37734use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef};
37735
37736impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
37737    /// Removes a key-value pair from the tree, and returns that pair, as well as
37738    /// the leaf edge corresponding to that former pair. It's possible this empties
37739    /// a root node that is internal, which the caller should pop from the map
37740    /// holding the tree. The caller should also decrement the map's length.
37741    pub fn remove_kv_tracking<F: FnOnce()>(
37742        self,
37743        handle_emptied_internal_root: F,
37744    ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
37745        match self.force() {
37746            Leaf(node) => node.remove_leaf_kv(handle_emptied_internal_root),
37747            Internal(node) => node.remove_internal_kv(handle_emptied_internal_root),
37748        }
37749    }
37750}
37751
37752impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
37753    fn remove_leaf_kv<F: FnOnce()>(
37754        self,
37755        handle_emptied_internal_root: F,
37756    ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
37757        let (old_kv, mut pos) = self.remove();
37758        let len = pos.reborrow().into_node().len();
37759        if len < MIN_LEN {
37760            let idx = pos.idx();
37761            // We have to temporarily forget the child type, because there is no
37762            // distinct node type for the immediate parents of a leaf.
37763            let new_pos = match pos.into_node().forget_type().choose_parent_kv() {
37764                Ok(Left(left_parent_kv)) => {
37765                    debug_assert!(left_parent_kv.right_child_len() == MIN_LEN - 1);
37766                    if left_parent_kv.can_merge() {
37767                        left_parent_kv.merge_tracking_child_edge(Right(idx))
37768                    } else {
37769                        debug_assert!(left_parent_kv.left_child_len() > MIN_LEN);
37770                        left_parent_kv.steal_left(idx)
37771                    }
37772                }
37773                Ok(Right(right_parent_kv)) => {
37774                    debug_assert!(right_parent_kv.left_child_len() == MIN_LEN - 1);
37775                    if right_parent_kv.can_merge() {
37776                        right_parent_kv.merge_tracking_child_edge(Left(idx))
37777                    } else {
37778                        debug_assert!(right_parent_kv.right_child_len() > MIN_LEN);
37779                        right_parent_kv.steal_right(idx)
37780                    }
37781                }
37782                Err(pos) => unsafe { Handle::new_edge(pos, idx) },
37783            };
37784            // SAFETY: `new_pos` is the leaf we started from or a sibling.
37785            pos = unsafe { new_pos.cast_to_leaf_unchecked() };
37786
37787            // Only if we merged, the parent (if any) has shrunk, but skipping
37788            // the following step otherwise does not pay off in benchmarks.
37789            //
37790            // SAFETY: We won't destroy or rearrange the leaf where `pos` is at
37791            // by handling its parent recursively; at worst we will destroy or
37792            // rearrange the parent through the grandparent, thus change the
37793            // link to the parent inside the leaf.
37794            if let Ok(parent) = unsafe { pos.reborrow_mut() }.into_node().ascend() {
37795                if !parent.into_node().forget_type().fix_node_and_affected_ancestors() {
37796                    handle_emptied_internal_root();
37797                }
37798            }
37799        }
37800        (old_kv, pos)
37801    }
37802}
37803
37804impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
37805    fn remove_internal_kv<F: FnOnce()>(
37806        self,
37807        handle_emptied_internal_root: F,
37808    ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
37809        // Remove an adjacent KV from its leaf and then put it back in place of
37810        // the element we were asked to remove. Prefer the left adjacent KV,
37811        // for the reasons listed in `choose_parent_kv`.
37812        let left_leaf_kv = self.left_edge().descend().last_leaf_edge().left_kv();
37813        let left_leaf_kv = unsafe { left_leaf_kv.ok().unwrap_unchecked() };
37814        let (left_kv, left_hole) = left_leaf_kv.remove_leaf_kv(handle_emptied_internal_root);
37815
37816        // The internal node may have been stolen from or merged. Go back right
37817        // to find where the original KV ended up.
37818        let mut internal = unsafe { left_hole.next_kv().ok().unwrap_unchecked() };
37819        let old_kv = internal.replace_kv(left_kv.0, left_kv.1);
37820        let pos = internal.next_leaf_edge();
37821        (old_kv, pos)
37822    }
37823}
37824use core::cmp::Ordering;
37825use core::fmt::{self, Debug};
37826use core::iter::FusedIterator;
37827
37828/// Core of an iterator that merges the output of two strictly ascending iterators,
37829/// for instance a union or a symmetric difference.
37830pub struct MergeIterInner<I: Iterator> {
37831    a: I,
37832    b: I,
37833    peeked: Option<Peeked<I>>,
37834}
37835
37836/// Benchmarks faster than wrapping both iterators in a Peekable,
37837/// probably because we can afford to impose a FusedIterator bound.
37838#[derive(Clone, Debug)]
37839enum Peeked<I: Iterator> {
37840    A(I::Item),
37841    B(I::Item),
37842}
37843
37844impl<I: Iterator> Clone for MergeIterInner<I>
37845where
37846    I: Clone,
37847    I::Item: Clone,
37848{
37849    fn clone(&self) -> Self {
37850        Self { a: self.a.clone(), b: self.b.clone(), peeked: self.peeked.clone() }
37851    }
37852}
37853
37854impl<I: Iterator> Debug for MergeIterInner<I>
37855where
37856    I: Debug,
37857    I::Item: Debug,
37858{
37859    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
37860        f.debug_tuple("MergeIterInner").field(&self.a).field(&self.b).field(&self.peeked).finish()
37861    }
37862}
37863
37864impl<I: Iterator> MergeIterInner<I> {
37865    /// Creates a new core for an iterator merging a pair of sources.
37866    pub fn new(a: I, b: I) -> Self {
37867        MergeIterInner { a, b, peeked: None }
37868    }
37869
37870    /// Returns the next pair of items stemming from the pair of sources
37871    /// being merged. If both returned options contain a value, that value
37872    /// is equal and occurs in both sources. If one of the returned options
37873    /// contains a value, that value doesn't occur in the other source (or
37874    /// the sources are not strictly ascending). If neither returned option
37875    /// contains a value, iteration has finished and subsequent calls will
37876    /// return the same empty pair.
37877    pub fn nexts<Cmp: Fn(&I::Item, &I::Item) -> Ordering>(
37878        &mut self,
37879        cmp: Cmp,
37880    ) -> (Option<I::Item>, Option<I::Item>)
37881    where
37882        I: FusedIterator,
37883    {
37884        let mut a_next;
37885        let mut b_next;
37886        match self.peeked.take() {
37887            Some(Peeked::A(next)) => {
37888                a_next = Some(next);
37889                b_next = self.b.next();
37890            }
37891            Some(Peeked::B(next)) => {
37892                b_next = Some(next);
37893                a_next = self.a.next();
37894            }
37895            None => {
37896                a_next = self.a.next();
37897                b_next = self.b.next();
37898            }
37899        }
37900        if let (Some(ref a1), Some(ref b1)) = (&a_next, &b_next) {
37901            match cmp(a1, b1) {
37902                Ordering::Less => self.peeked = b_next.take().map(Peeked::B),
37903                Ordering::Greater => self.peeked = a_next.take().map(Peeked::A),
37904                Ordering::Equal => (),
37905            }
37906        }
37907        (a_next, b_next)
37908    }
37909
37910    /// Returns a pair of upper bounds for the `size_hint` of the final iterator.
37911    pub fn lens(&self) -> (usize, usize)
37912    where
37913        I: ExactSizeIterator,
37914    {
37915        match self.peeked {
37916            Some(Peeked::A(_)) => (1 + self.a.len(), self.b.len()),
37917            Some(Peeked::B(_)) => (self.a.len(), 1 + self.b.len()),
37918            _ => (self.a.len(), self.b.len()),
37919        }
37920    }
37921}
37922// This is an attempt at an implementation following the ideal
37923//
37924// ```
37925// struct BTreeMap<K, V> {
37926//     height: usize,
37927//     root: Option<Box<Node<K, V, height>>>
37928// }
37929//
37930// struct Node<K, V, height: usize> {
37931//     keys: [K; 2 * B - 1],
37932//     vals: [V; 2 * B - 1],
37933//     edges: [if height > 0 { Box<Node<K, V, height - 1>> } else { () }; 2 * B],
37934//     parent: Option<(NonNull<Node<K, V, height + 1>>, u16)>,
37935//     len: u16,
37936// }
37937// ```
37938//
37939// Since Rust doesn't actually have dependent types and polymorphic recursion,
37940// we make do with lots of unsafety.
37941
37942// A major goal of this module is to avoid complexity by treating the tree as a generic (if
37943// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
37944// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
37945// even what underfull means. However, we do rely on a few invariants:
37946//
37947// - Trees must have uniform depth/height. This means that every path down to a leaf from a
37948//   given node has exactly the same length.
37949// - A node of length `n` has `n` keys, `n` values, and `n + 1` edges.
37950//   This implies that even an empty node has at least one edge.
37951//   For a leaf node, "having an edge" only means we can identify a position in the node,
37952//   since leaf edges are empty and need no data representation. In an internal node,
37953//   an edge both identifies a position and contains a pointer to a child node.
37954
37955use core::marker::PhantomData;
37956use core::mem::{self, MaybeUninit};
37957use core::ptr::{self, NonNull};
37958use core::slice::SliceIndex;
37959
37960use crate::alloc::{Allocator, Global, Layout};
37961use crate::boxed::Box;
37962
37963const B: usize = 6;
37964pub const CAPACITY: usize = 2 * B - 1;
37965pub const MIN_LEN_AFTER_SPLIT: usize = B - 1;
37966const KV_IDX_CENTER: usize = B - 1;
37967const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
37968const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
37969
37970/// The underlying representation of leaf nodes and part of the representation of internal nodes.
37971struct LeafNode<K, V> {
37972    /// We want to be covariant in `K` and `V`.
37973    parent: Option<NonNull<InternalNode<K, V>>>,
37974
37975    /// This node's index into the parent node's `edges` array.
37976    /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
37977    /// This is only guaranteed to be initialized when `parent` is non-null.
37978    parent_idx: MaybeUninit<u16>,
37979
37980    /// The number of keys and values this node stores.
37981    len: u16,
37982
37983    /// The arrays storing the actual data of the node. Only the first `len` elements of each
37984    /// array are initialized and valid.
37985    keys: [MaybeUninit<K>; CAPACITY],
37986    vals: [MaybeUninit<V>; CAPACITY],
37987}
37988
37989impl<K, V> LeafNode<K, V> {
37990    /// Initializes a new `LeafNode` in-place.
37991    unsafe fn init(this: *mut Self) {
37992        // As a general policy, we leave fields uninitialized if they can be, as this should
37993        // be both slightly faster and easier to track in Valgrind.
37994        unsafe {
37995            // parent_idx, keys, and vals are all MaybeUninit
37996            ptr::addr_of_mut!((*this).parent).write(None);
37997            ptr::addr_of_mut!((*this).len).write(0);
37998        }
37999    }
38000
38001    /// Creates a new boxed `LeafNode`.
38002    fn new() -> Box<Self> {
38003        unsafe {
38004            let mut leaf = Box::new_uninit();
38005            LeafNode::init(leaf.as_mut_ptr());
38006            leaf.assume_init()
38007        }
38008    }
38009}
38010
38011/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
38012/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
38013/// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
38014/// node, allowing code to act on leaf and internal nodes generically without having to even check
38015/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
38016#[repr(C)]
38017// gdb_providers.py uses this type name for introspection.
38018struct InternalNode<K, V> {
38019    data: LeafNode<K, V>,
38020
38021    /// The pointers to the children of this node. `len + 1` of these are considered
38022    /// initialized and valid, except that near the end, while the tree is held
38023    /// through borrow type `Dying`, some of these pointers are dangling.
38024    edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
38025}
38026
38027impl<K, V> InternalNode<K, V> {
38028    /// Creates a new boxed `InternalNode`.
38029    ///
38030    /// # Safety
38031    /// An invariant of internal nodes is that they have at least one
38032    /// initialized and valid edge. This function does not set up
38033    /// such an edge.
38034    unsafe fn new() -> Box<Self> {
38035        unsafe {
38036            let mut node = Box::<Self>::new_uninit();
38037            // We only need to initialize the data; the edges are MaybeUninit.
38038            LeafNode::init(ptr::addr_of_mut!((*node.as_mut_ptr()).data));
38039            node.assume_init()
38040        }
38041    }
38042}
38043
38044/// A managed, non-null pointer to a node. This is either an owned pointer to
38045/// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
38046///
38047/// However, `BoxedNode` contains no information as to which of the two types
38048/// of nodes it actually contains, and, partially due to this lack of information,
38049/// is not a separate type and has no destructor.
38050type BoxedNode<K, V> = NonNull<LeafNode<K, V>>;
38051
38052// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
38053// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
38054// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
38055// However, whenever a public type wraps `NodeRef`, make sure that it has the
38056// correct variance.
38057///
38058/// A reference to a node.
38059///
38060/// This type has a number of parameters that controls how it acts:
38061/// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime.
38062///    - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`.
38063///    - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node`
38064///      with respect to keys and tree structure, but also allows many
38065///      mutable references to values throughout the tree to coexist.
38066///    - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
38067///      although insert methods allow a mutable pointer to a value to coexist.
38068///    - When this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`,
38069///      but does not have a destructor, and must be cleaned up manually.
38070///    - When this is `Dying`, the `NodeRef` still acts roughly like `Box<Node>`,
38071///      but has methods to destroy the tree bit by bit, and ordinary methods,
38072///      while not marked as unsafe to call, can invoke UB if called incorrectly.
38073///   Since any `NodeRef` allows navigating through the tree, `BorrowType`
38074///   effectively applies to the entire tree, not just to the node itself.
38075/// - `K` and `V`: These are the types of keys and values stored in the nodes.
38076/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
38077///   `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
38078///   `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
38079///   `NodeRef` could be pointing to either type of node.
38080///   `Type` is named `NodeType` when used outside `NodeRef`.
38081///
38082/// Both `BorrowType` and `NodeType` restrict what methods we implement, to
38083/// exploit static type safety. There are limitations in the way we can apply
38084/// such restrictions:
38085/// - For each type parameter, we can only define a method either generically
38086///   or for one particular type. For example, we cannot define a method like
38087///   `into_kv` generically for all `BorrowType`, or once for all types that
38088///   carry a lifetime, because we want it to return `&'a` references.
38089///   Therefore, we define it only for the least powerful type `Immut<'a>`.
38090/// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`.
38091///   Therefore, we have to explicitly call `reborrow` on a more powerfull
38092///   `NodeRef` in order to reach a method like `into_kv`.
38093///
38094/// All methods on `NodeRef` that return some kind of reference, either:
38095/// - Take `self` by value, and return the lifetime carried by `BorrowType`.
38096///   Sometimes, to invoke such a method, we need to call `reborrow_mut`.
38097/// - Take `self` by reference, and (implicitly) return that reference's
38098///   lifetime, instead of the lifetime carried by `BorrowType`. That way,
38099///   the borrow checker guarantees that the `NodeRef` remains borrowed as long
38100///   as the returned reference is used.
38101///   The methods supporting insert bend this rule by returning a raw pointer,
38102///   i.e., a reference without any lifetime.
38103pub struct NodeRef<BorrowType, K, V, Type> {
38104    /// The number of levels that the node and the level of leaves are apart, a
38105    /// constant of the node that cannot be entirely described by `Type`, and that
38106    /// the node itself does not store. We only need to store the height of the root
38107    /// node, and derive every other node's height from it.
38108    /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`.
38109    height: usize,
38110    /// The pointer to the leaf or internal node. The definition of `InternalNode`
38111    /// ensures that the pointer is valid either way.
38112    node: NonNull<LeafNode<K, V>>,
38113    _marker: PhantomData<(BorrowType, Type)>,
38114}
38115
38116/// The root node of an owned tree.
38117///
38118/// Note that this does not have a destructor, and must be cleaned up manually.
38119pub type Root<K, V> = NodeRef<marker::Owned, K, V, marker::LeafOrInternal>;
38120
38121impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
38122impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
38123    fn clone(&self) -> Self {
38124        *self
38125    }
38126}
38127
38128unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
38129
38130unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
38131unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
38132unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::ValMut<'a>, K, V, Type> {}
38133unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
38134unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
38135
38136impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
38137    fn new_leaf() -> Self {
38138        Self::from_new_leaf(LeafNode::new())
38139    }
38140
38141    fn from_new_leaf(leaf: Box<LeafNode<K, V>>) -> Self {
38142        NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData }
38143    }
38144}
38145
38146impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
38147    fn new_internal(child: Root<K, V>) -> Self {
38148        let mut new_node = unsafe { InternalNode::new() };
38149        new_node.edges[0].write(child.node);
38150        unsafe { NodeRef::from_new_internal(new_node, child.height + 1) }
38151    }
38152
38153    /// # Safety
38154    /// `height` must not be zero.
38155    unsafe fn from_new_internal(internal: Box<InternalNode<K, V>>, height: usize) -> Self {
38156        debug_assert!(height > 0);
38157        let node = NonNull::from(Box::leak(internal)).cast();
38158        let mut this = NodeRef { height, node, _marker: PhantomData };
38159        this.borrow_mut().correct_all_childrens_parent_links();
38160        this
38161    }
38162}
38163
38164impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
38165    /// Unpack a node reference that was packed as `NodeRef::parent`.
38166    fn from_internal(node: NonNull<InternalNode<K, V>>, height: usize) -> Self {
38167        debug_assert!(height > 0);
38168        NodeRef { height, node: node.cast(), _marker: PhantomData }
38169    }
38170}
38171
38172impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
38173    /// Exposes the data of an internal node.
38174    ///
38175    /// Returns a raw ptr to avoid invalidating other references to this node.
38176    fn as_internal_ptr(this: &Self) -> *mut InternalNode<K, V> {
38177        // SAFETY: the static node type is `Internal`.
38178        this.node.as_ptr() as *mut InternalNode<K, V>
38179    }
38180}
38181
38182impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
38183    /// Borrows exclusive access to the data of an internal node.
38184    fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
38185        let ptr = Self::as_internal_ptr(self);
38186        unsafe { &mut *ptr }
38187    }
38188}
38189
38190impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
38191    /// Finds the length of the node. This is the number of keys or values.
38192    /// The number of edges is `len() + 1`.
38193    /// Note that, despite being safe, calling this function can have the side effect
38194    /// of invalidating mutable references that unsafe code has created.
38195    pub fn len(&self) -> usize {
38196        // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut,
38197        // there might be outstanding mutable references to values that we must not invalidate.
38198        unsafe { usize::from((*Self::as_leaf_ptr(self)).len) }
38199    }
38200
38201    /// Returns the number of levels that the node and leaves are apart. Zero
38202    /// height means the node is a leaf itself. If you picture trees with the
38203    /// root on top, the number says at which elevation the node appears.
38204    /// If you picture trees with leaves on top, the number says how high
38205    /// the tree extends above the node.
38206    pub fn height(&self) -> usize {
38207        self.height
38208    }
38209
38210    /// Temporarily takes out another, immutable reference to the same node.
38211    pub fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
38212        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
38213    }
38214
38215    /// Exposes the leaf portion of any leaf or internal node.
38216    ///
38217    /// Returns a raw ptr to avoid invalidating other references to this node.
38218    fn as_leaf_ptr(this: &Self) -> *mut LeafNode<K, V> {
38219        // The node must be valid for at least the LeafNode portion.
38220        // This is not a reference in the NodeRef type because we don't know if
38221        // it should be unique or shared.
38222        this.node.as_ptr()
38223    }
38224}
38225
38226impl<BorrowType: marker::BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
38227    /// Finds the parent of the current node. Returns `Ok(handle)` if the current
38228    /// node actually has a parent, where `handle` points to the edge of the parent
38229    /// that points to the current node. Returns `Err(self)` if the current node has
38230    /// no parent, giving back the original `NodeRef`.
38231    ///
38232    /// The method name assumes you picture trees with the root node on top.
38233    ///
38234    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
38235    /// both, upon success, do nothing.
38236    pub fn ascend(
38237        self,
38238    ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
38239        assert!(BorrowType::PERMITS_TRAVERSAL);
38240        // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut,
38241        // there might be outstanding mutable references to values that we must not invalidate.
38242        let leaf_ptr: *const _ = Self::as_leaf_ptr(&self);
38243        unsafe { (*leaf_ptr).parent }
38244            .as_ref()
38245            .map(|parent| Handle {
38246                node: NodeRef::from_internal(*parent, self.height + 1),
38247                idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) },
38248                _marker: PhantomData,
38249            })
38250            .ok_or(self)
38251    }
38252
38253    pub fn first_edge(self) -> Handle<Self, marker::Edge> {
38254        unsafe { Handle::new_edge(self, 0) }
38255    }
38256
38257    pub fn last_edge(self) -> Handle<Self, marker::Edge> {
38258        let len = self.len();
38259        unsafe { Handle::new_edge(self, len) }
38260    }
38261
38262    /// Note that `self` must be nonempty.
38263    pub fn first_kv(self) -> Handle<Self, marker::KV> {
38264        let len = self.len();
38265        assert!(len > 0);
38266        unsafe { Handle::new_kv(self, 0) }
38267    }
38268
38269    /// Note that `self` must be nonempty.
38270    pub fn last_kv(self) -> Handle<Self, marker::KV> {
38271        let len = self.len();
38272        assert!(len > 0);
38273        unsafe { Handle::new_kv(self, len - 1) }
38274    }
38275}
38276
38277impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
38278    /// Could be a public implementation of PartialEq, but only used in this module.
38279    fn eq(&self, other: &Self) -> bool {
38280        let Self { node, height, _marker } = self;
38281        if node.eq(&other.node) {
38282            debug_assert_eq!(*height, other.height);
38283            true
38284        } else {
38285            false
38286        }
38287    }
38288}
38289
38290impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
38291    /// Exposes the leaf portion of any leaf or internal node in an immutable tree.
38292    fn into_leaf(self) -> &'a LeafNode<K, V> {
38293        let ptr = Self::as_leaf_ptr(&self);
38294        // SAFETY: there can be no mutable references into this tree borrowed as `Immut`.
38295        unsafe { &*ptr }
38296    }
38297
38298    /// Borrows a view into the keys stored in the node.
38299    pub fn keys(&self) -> &[K] {
38300        let leaf = self.into_leaf();
38301        unsafe {
38302            MaybeUninit::slice_assume_init_ref(leaf.keys.get_unchecked(..usize::from(leaf.len)))
38303        }
38304    }
38305}
38306
38307impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
38308    /// Similar to `ascend`, gets a reference to a node's parent node, but also
38309    /// deallocates the current node in the process. This is unsafe because the
38310    /// current node will still be accessible despite being deallocated.
38311    pub unsafe fn deallocate_and_ascend(
38312        self,
38313    ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
38314        let height = self.height;
38315        let node = self.node;
38316        let ret = self.ascend().ok();
38317        unsafe {
38318            Global.deallocate(
38319                node.cast(),
38320                if height > 0 {
38321                    Layout::new::<InternalNode<K, V>>()
38322                } else {
38323                    Layout::new::<LeafNode<K, V>>()
38324                },
38325            );
38326        }
38327        ret
38328    }
38329}
38330
38331impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
38332    /// Temporarily takes out another, mutable reference to the same node. Beware, as
38333    /// this method is very dangerous, doubly so since it may not immediately appear
38334    /// dangerous.
38335    ///
38336    /// Because mutable pointers can roam anywhere around the tree, the returned
38337    /// pointer can easily be used to make the original pointer dangling, out of
38338    /// bounds, or invalid under stacked borrow rules.
38339    // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef`
38340    // that restricts the use of navigation methods on reborrowed pointers,
38341    // preventing this unsafety.
38342    unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
38343        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
38344    }
38345
38346    /// Borrows exclusive access to the leaf portion of any leaf or internal node.
38347    fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
38348        let ptr = Self::as_leaf_ptr(self);
38349        // SAFETY: we have exclusive access to the entire node.
38350        unsafe { &mut *ptr }
38351    }
38352
38353    /// Offers exclusive access to the leaf portion of any leaf or internal node.
38354    fn into_leaf_mut(mut self) -> &'a mut LeafNode<K, V> {
38355        let ptr = Self::as_leaf_ptr(&mut self);
38356        // SAFETY: we have exclusive access to the entire node.
38357        unsafe { &mut *ptr }
38358    }
38359}
38360
38361impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
38362    /// Borrows exclusive access to an element of the key storage area.
38363    ///
38364    /// # Safety
38365    /// `index` is in bounds of 0..CAPACITY
38366    unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
38367    where
38368        I: SliceIndex<[MaybeUninit<K>], Output = Output>,
38369    {
38370        // SAFETY: the caller will not be able to call further methods on self
38371        // until the key slice reference is dropped, as we have unique access
38372        // for the lifetime of the borrow.
38373        unsafe { self.as_leaf_mut().keys.as_mut_slice().get_unchecked_mut(index) }
38374    }
38375
38376    /// Borrows exclusive access to an element or slice of the node's value storage area.
38377    ///
38378    /// # Safety
38379    /// `index` is in bounds of 0..CAPACITY
38380    unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
38381    where
38382        I: SliceIndex<[MaybeUninit<V>], Output = Output>,
38383    {
38384        // SAFETY: the caller will not be able to call further methods on self
38385        // until the value slice reference is dropped, as we have unique access
38386        // for the lifetime of the borrow.
38387        unsafe { self.as_leaf_mut().vals.as_mut_slice().get_unchecked_mut(index) }
38388    }
38389}
38390
38391impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
38392    /// Borrows exclusive access to an element or slice of the node's storage area for edge contents.
38393    ///
38394    /// # Safety
38395    /// `index` is in bounds of 0..CAPACITY + 1
38396    unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
38397    where
38398        I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
38399    {
38400        // SAFETY: the caller will not be able to call further methods on self
38401        // until the edge slice reference is dropped, as we have unique access
38402        // for the lifetime of the borrow.
38403        unsafe { self.as_internal_mut().edges.as_mut_slice().get_unchecked_mut(index) }
38404    }
38405}
38406
38407impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
38408    /// # Safety
38409    /// - The node has more than `idx` initialized elements.
38410    unsafe fn into_key_val_mut_at(mut self, idx: usize) -> (&'a K, &'a mut V) {
38411        // We only create a reference to the one element we are interested in,
38412        // to avoid aliasing with outstanding references to other elements,
38413        // in particular, those returned to the caller in earlier iterations.
38414        let leaf = Self::as_leaf_ptr(&mut self);
38415        let keys = unsafe { ptr::addr_of!((*leaf).keys) };
38416        let vals = unsafe { ptr::addr_of_mut!((*leaf).vals) };
38417        // We must coerce to unsized array pointers because of Rust issue #74679.
38418        let keys: *const [_] = keys;
38419        let vals: *mut [_] = vals;
38420        let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() };
38421        let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() };
38422        (key, val)
38423    }
38424}
38425
38426impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
38427    /// Borrows exclusive access to the length of the node.
38428    pub fn len_mut(&mut self) -> &mut u16 {
38429        &mut self.as_leaf_mut().len
38430    }
38431}
38432
38433impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
38434    /// # Safety
38435    /// Every item returned by `range` is a valid edge index for the node.
38436    unsafe fn correct_childrens_parent_links<R: Iterator<Item = usize>>(&mut self, range: R) {
38437        for i in range {
38438            debug_assert!(i <= self.len());
38439            unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
38440        }
38441    }
38442
38443    fn correct_all_childrens_parent_links(&mut self) {
38444        let len = self.len();
38445        unsafe { self.correct_childrens_parent_links(0..=len) };
38446    }
38447}
38448
38449impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
38450    /// Sets the node's link to its parent edge,
38451    /// without invalidating other references to the node.
38452    fn set_parent_link(&mut self, parent: NonNull<InternalNode<K, V>>, parent_idx: usize) {
38453        let leaf = Self::as_leaf_ptr(self);
38454        unsafe { (*leaf).parent = Some(parent) };
38455        unsafe { (*leaf).parent_idx.write(parent_idx as u16) };
38456    }
38457}
38458
38459impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
38460    /// Clears the root's link to its parent edge.
38461    fn clear_parent_link(&mut self) {
38462        let mut root_node = self.borrow_mut();
38463        let leaf = root_node.as_leaf_mut();
38464        leaf.parent = None;
38465    }
38466}
38467
38468impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
38469    /// Returns a new owned tree, with its own root node that is initially empty.
38470    pub fn new() -> Self {
38471        NodeRef::new_leaf().forget_type()
38472    }
38473
38474    /// Adds a new internal node with a single edge pointing to the previous root node,
38475    /// make that new node the root node, and return it. This increases the height by 1
38476    /// and is the opposite of `pop_internal_level`.
38477    pub fn push_internal_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
38478        super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root).forget_type());
38479
38480        // `self.borrow_mut()`, except that we just forgot we're internal now:
38481        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
38482    }
38483
38484    /// Removes the internal root node, using its first child as the new root node.
38485    /// As it is intended only to be called when the root node has only one child,
38486    /// no cleanup is done on any of the keys, values and other children.
38487    /// This decreases the height by 1 and is the opposite of `push_internal_level`.
38488    ///
38489    /// Requires exclusive access to the `Root` object but not to the root node;
38490    /// it will not invalidate other handles or references to the root node.
38491    ///
38492    /// Panics if there is no internal level, i.e., if the root node is a leaf.
38493    pub fn pop_internal_level(&mut self) {
38494        assert!(self.height > 0);
38495
38496        let top = self.node;
38497
38498        // SAFETY: we asserted to be internal.
38499        let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() };
38500        // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive.
38501        let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) };
38502        // SAFETY: the first edge is always initialized.
38503        self.node = unsafe { internal_node.edges[0].assume_init_read() };
38504        self.height -= 1;
38505        self.clear_parent_link();
38506
38507        unsafe {
38508            Global.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
38509        }
38510    }
38511}
38512
38513impl<K, V, Type> NodeRef<marker::Owned, K, V, Type> {
38514    /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe
38515    /// because the return value cannot be used to destroy the root, and there
38516    /// cannot be other references to the tree.
38517    pub fn borrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
38518        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
38519    }
38520
38521    /// Slightly mutably borrows the owned root node.
38522    pub fn borrow_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, Type> {
38523        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
38524    }
38525
38526    /// Irreversibly transitions to a reference that permits traversal and offers
38527    /// destructive methods and little else.
38528    pub fn into_dying(self) -> NodeRef<marker::Dying, K, V, Type> {
38529        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
38530    }
38531}
38532
38533impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
38534    /// Adds a key-value pair to the end of the node.
38535    pub fn push(&mut self, key: K, val: V) {
38536        let len = self.len_mut();
38537        let idx = usize::from(*len);
38538        assert!(idx < CAPACITY);
38539        *len += 1;
38540        unsafe {
38541            self.key_area_mut(idx).write(key);
38542            self.val_area_mut(idx).write(val);
38543        }
38544    }
38545}
38546
38547impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
38548    /// Adds a key-value pair, and an edge to go to the right of that pair,
38549    /// to the end of the node.
38550    pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
38551        assert!(edge.height == self.height - 1);
38552
38553        let len = self.len_mut();
38554        let idx = usize::from(*len);
38555        assert!(idx < CAPACITY);
38556        *len += 1;
38557        unsafe {
38558            self.key_area_mut(idx).write(key);
38559            self.val_area_mut(idx).write(val);
38560            self.edge_area_mut(idx + 1).write(edge.node);
38561            Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
38562        }
38563    }
38564}
38565
38566impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
38567    /// Removes any static information asserting that this node is a `Leaf` node.
38568    pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
38569        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
38570    }
38571}
38572
38573impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
38574    /// Removes any static information asserting that this node is an `Internal` node.
38575    pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
38576        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
38577    }
38578}
38579
38580impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
38581    /// Checks whether a node is an `Internal` node or a `Leaf` node.
38582    pub fn force(
38583        self,
38584    ) -> ForceResult<
38585        NodeRef<BorrowType, K, V, marker::Leaf>,
38586        NodeRef<BorrowType, K, V, marker::Internal>,
38587    > {
38588        if self.height == 0 {
38589            ForceResult::Leaf(NodeRef {
38590                height: self.height,
38591                node: self.node,
38592                _marker: PhantomData,
38593            })
38594        } else {
38595            ForceResult::Internal(NodeRef {
38596                height: self.height,
38597                node: self.node,
38598                _marker: PhantomData,
38599            })
38600        }
38601    }
38602}
38603
38604impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
38605    /// Unsafely asserts to the compiler the static information that this node is a `Leaf`.
38606    unsafe fn cast_to_leaf_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
38607        debug_assert!(self.height == 0);
38608        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
38609    }
38610
38611    /// Unsafely asserts to the compiler the static information that this node is an `Internal`.
38612    unsafe fn cast_to_internal_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
38613        debug_assert!(self.height > 0);
38614        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
38615    }
38616}
38617
38618/// A reference to a specific key-value pair or edge within a node. The `Node` parameter
38619/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value
38620/// pair) or `Edge` (signifying a handle on an edge).
38621///
38622/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
38623/// a child node, these represent the spaces where child pointers would go between the key-value
38624/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
38625/// to the left of the node, one between the two pairs, and one at the right of the node.
38626pub struct Handle<Node, Type> {
38627    node: Node,
38628    idx: usize,
38629    _marker: PhantomData<Type>,
38630}
38631
38632impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
38633// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
38634// `Clone`able is when it is an immutable reference and therefore `Copy`.
38635impl<Node: Copy, Type> Clone for Handle<Node, Type> {
38636    fn clone(&self) -> Self {
38637        *self
38638    }
38639}
38640
38641impl<Node, Type> Handle<Node, Type> {
38642    /// Retrieves the node that contains the edge or key-value pair this handle points to.
38643    pub fn into_node(self) -> Node {
38644        self.node
38645    }
38646
38647    /// Returns the position of this handle in the node.
38648    pub fn idx(&self) -> usize {
38649        self.idx
38650    }
38651}
38652
38653impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
38654    /// Creates a new handle to a key-value pair in `node`.
38655    /// Unsafe because the caller must ensure that `idx < node.len()`.
38656    pub unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
38657        debug_assert!(idx < node.len());
38658
38659        Handle { node, idx, _marker: PhantomData }
38660    }
38661
38662    pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
38663        unsafe { Handle::new_edge(self.node, self.idx) }
38664    }
38665
38666    pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
38667        unsafe { Handle::new_edge(self.node, self.idx + 1) }
38668    }
38669}
38670
38671impl<BorrowType, K, V, NodeType, HandleType> PartialEq
38672    for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
38673{
38674    fn eq(&self, other: &Self) -> bool {
38675        let Self { node, idx, _marker } = self;
38676        node.eq(&other.node) && *idx == other.idx
38677    }
38678}
38679
38680impl<BorrowType, K, V, NodeType, HandleType>
38681    Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
38682{
38683    /// Temporarily takes out another, immutable handle on the same location.
38684    pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
38685        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
38686        Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
38687    }
38688}
38689
38690impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
38691    /// Temporarily takes out another, mutable handle on the same location. Beware, as
38692    /// this method is very dangerous, doubly so since it may not immediately appear
38693    /// dangerous.
38694    ///
38695    /// For details, see `NodeRef::reborrow_mut`.
38696    pub unsafe fn reborrow_mut(
38697        &mut self,
38698    ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
38699        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
38700        Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
38701    }
38702}
38703
38704impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
38705    /// Creates a new handle to an edge in `node`.
38706    /// Unsafe because the caller must ensure that `idx <= node.len()`.
38707    pub unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
38708        debug_assert!(idx <= node.len());
38709
38710        Handle { node, idx, _marker: PhantomData }
38711    }
38712
38713    pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
38714        if self.idx > 0 {
38715            Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
38716        } else {
38717            Err(self)
38718        }
38719    }
38720
38721    pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
38722        if self.idx < self.node.len() {
38723            Ok(unsafe { Handle::new_kv(self.node, self.idx) })
38724        } else {
38725            Err(self)
38726        }
38727    }
38728}
38729
38730pub enum LeftOrRight<T> {
38731    Left(T),
38732    Right(T),
38733}
38734
38735/// Given an edge index where we want to insert into a node filled to capacity,
38736/// computes a sensible KV index of a split point and where to perform the insertion.
38737/// The goal of the split point is for its key and value to end up in a parent node;
38738/// the keys, values and edges to the left of the split point become the left child;
38739/// the keys, values and edges to the right of the split point become the right child.
38740fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight<usize>) {
38741    debug_assert!(edge_idx <= CAPACITY);
38742    // Rust issue #74834 tries to explain these symmetric rules.
38743    match edge_idx {
38744        0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)),
38745        EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)),
38746        EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)),
38747        _ => (KV_IDX_CENTER + 1, LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))),
38748    }
38749}
38750
38751impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
38752    /// Inserts a new key-value pair between the key-value pairs to the right and left of
38753    /// this edge. This method assumes that there is enough space in the node for the new
38754    /// pair to fit.
38755    ///
38756    /// The returned pointer points to the inserted value.
38757    fn insert_fit(&mut self, key: K, val: V) -> *mut V {
38758        debug_assert!(self.node.len() < CAPACITY);
38759        let new_len = self.node.len() + 1;
38760
38761        unsafe {
38762            slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
38763            slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
38764            *self.node.len_mut() = new_len as u16;
38765
38766            self.node.val_area_mut(self.idx).assume_init_mut()
38767        }
38768    }
38769}
38770
38771impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
38772    /// Inserts a new key-value pair between the key-value pairs to the right and left of
38773    /// this edge. This method splits the node if there isn't enough room.
38774    ///
38775    /// The returned pointer points to the inserted value.
38776    fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
38777        if self.node.len() < CAPACITY {
38778            let val_ptr = self.insert_fit(key, val);
38779            let kv = unsafe { Handle::new_kv(self.node, self.idx) };
38780            (InsertResult::Fit(kv), val_ptr)
38781        } else {
38782            let (middle_kv_idx, insertion) = splitpoint(self.idx);
38783            let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
38784            let mut result = middle.split();
38785            let mut insertion_edge = match insertion {
38786                LeftOrRight::Left(insert_idx) => unsafe {
38787                    Handle::new_edge(result.left.reborrow_mut(), insert_idx)
38788                },
38789                LeftOrRight::Right(insert_idx) => unsafe {
38790                    Handle::new_edge(result.right.borrow_mut(), insert_idx)
38791                },
38792            };
38793            let val_ptr = insertion_edge.insert_fit(key, val);
38794            (InsertResult::Split(result), val_ptr)
38795        }
38796    }
38797}
38798
38799impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
38800    /// Fixes the parent pointer and index in the child node that this edge
38801    /// links to. This is useful when the ordering of edges has been changed,
38802    fn correct_parent_link(self) {
38803        // Create backpointer without invalidating other references to the node.
38804        let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) };
38805        let idx = self.idx;
38806        let mut child = self.descend();
38807        child.set_parent_link(ptr, idx);
38808    }
38809}
38810
38811impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
38812    /// Inserts a new key-value pair and an edge that will go to the right of that new pair
38813    /// between this edge and the key-value pair to the right of this edge. This method assumes
38814    /// that there is enough space in the node for the new pair to fit.
38815    fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
38816        debug_assert!(self.node.len() < CAPACITY);
38817        debug_assert!(edge.height == self.node.height - 1);
38818        let new_len = self.node.len() + 1;
38819
38820        unsafe {
38821            slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
38822            slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
38823            slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node);
38824            *self.node.len_mut() = new_len as u16;
38825
38826            self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1);
38827        }
38828    }
38829
38830    /// Inserts a new key-value pair and an edge that will go to the right of that new pair
38831    /// between this edge and the key-value pair to the right of this edge. This method splits
38832    /// the node if there isn't enough room.
38833    fn insert(
38834        mut self,
38835        key: K,
38836        val: V,
38837        edge: Root<K, V>,
38838    ) -> InsertResult<'a, K, V, marker::Internal> {
38839        assert!(edge.height == self.node.height - 1);
38840
38841        if self.node.len() < CAPACITY {
38842            self.insert_fit(key, val, edge);
38843            let kv = unsafe { Handle::new_kv(self.node, self.idx) };
38844            InsertResult::Fit(kv)
38845        } else {
38846            let (middle_kv_idx, insertion) = splitpoint(self.idx);
38847            let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
38848            let mut result = middle.split();
38849            let mut insertion_edge = match insertion {
38850                LeftOrRight::Left(insert_idx) => unsafe {
38851                    Handle::new_edge(result.left.reborrow_mut(), insert_idx)
38852                },
38853                LeftOrRight::Right(insert_idx) => unsafe {
38854                    Handle::new_edge(result.right.borrow_mut(), insert_idx)
38855                },
38856            };
38857            insertion_edge.insert_fit(key, val, edge);
38858            InsertResult::Split(result)
38859        }
38860    }
38861}
38862
38863impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
38864    /// Inserts a new key-value pair between the key-value pairs to the right and left of
38865    /// this edge. This method splits the node if there isn't enough room, and tries to
38866    /// insert the split off portion into the parent node recursively, until the root is reached.
38867    ///
38868    /// If the returned result is a `Fit`, its handle's node can be this edge's node or an ancestor.
38869    /// If the returned result is a `Split`, the `left` field will be the root node.
38870    /// The returned pointer points to the inserted value.
38871    pub fn insert_recursing(
38872        self,
38873        key: K,
38874        value: V,
38875    ) -> (InsertResult<'a, K, V, marker::LeafOrInternal>, *mut V) {
38876        let (mut split, val_ptr) = match self.insert(key, value) {
38877            (InsertResult::Fit(handle), ptr) => {
38878                return (InsertResult::Fit(handle.forget_node_type()), ptr);
38879            }
38880            (InsertResult::Split(split), val_ptr) => (split.forget_node_type(), val_ptr),
38881        };
38882
38883        loop {
38884            split = match split.left.ascend() {
38885                Ok(parent) => match parent.insert(split.kv.0, split.kv.1, split.right) {
38886                    InsertResult::Fit(handle) => {
38887                        return (InsertResult::Fit(handle.forget_node_type()), val_ptr);
38888                    }
38889                    InsertResult::Split(split) => split.forget_node_type(),
38890                },
38891                Err(root) => {
38892                    return (InsertResult::Split(SplitResult { left: root, ..split }), val_ptr);
38893                }
38894            };
38895        }
38896    }
38897}
38898
38899impl<BorrowType: marker::BorrowType, K, V>
38900    Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
38901{
38902    /// Finds the node pointed to by this edge.
38903    ///
38904    /// The method name assumes you picture trees with the root node on top.
38905    ///
38906    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
38907    /// both, upon success, do nothing.
38908    pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
38909        assert!(BorrowType::PERMITS_TRAVERSAL);
38910        // We need to use raw pointers to nodes because, if BorrowType is
38911        // marker::ValMut, there might be outstanding mutable references to
38912        // values that we must not invalidate. There's no worry accessing the
38913        // height field because that value is copied. Beware that, once the
38914        // node pointer is dereferenced, we access the edges array with a
38915        // reference (Rust issue #73987) and invalidate any other references
38916        // to or inside the array, should any be around.
38917        let parent_ptr = NodeRef::as_internal_ptr(&self.node);
38918        let node = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() };
38919        NodeRef { node, height: self.node.height - 1, _marker: PhantomData }
38920    }
38921}
38922
38923impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
38924    pub fn into_kv(self) -> (&'a K, &'a V) {
38925        debug_assert!(self.idx < self.node.len());
38926        let leaf = self.node.into_leaf();
38927        let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
38928        let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() };
38929        (k, v)
38930    }
38931}
38932
38933impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
38934    pub fn key_mut(&mut self) -> &mut K {
38935        unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
38936    }
38937
38938    pub fn into_val_mut(self) -> &'a mut V {
38939        debug_assert!(self.idx < self.node.len());
38940        let leaf = self.node.into_leaf_mut();
38941        unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
38942    }
38943}
38944
38945impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
38946    pub fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
38947        unsafe { self.node.into_key_val_mut_at(self.idx) }
38948    }
38949}
38950
38951impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
38952    pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
38953        debug_assert!(self.idx < self.node.len());
38954        // We cannot call separate key and value methods, because calling the second one
38955        // invalidates the reference returned by the first.
38956        unsafe {
38957            let leaf = self.node.as_leaf_mut();
38958            let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut();
38959            let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut();
38960            (key, val)
38961        }
38962    }
38963
38964    /// Replace the key and value that the KV handle refers to.
38965    pub fn replace_kv(&mut self, k: K, v: V) -> (K, V) {
38966        let (key, val) = self.kv_mut();
38967        (mem::replace(key, k), mem::replace(val, v))
38968    }
38969}
38970
38971impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
38972    /// Helps implementations of `split` for a particular `NodeType`,
38973    /// by taking care of leaf data.
38974    fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
38975        debug_assert!(self.idx < self.node.len());
38976        let old_len = self.node.len();
38977        let new_len = old_len - self.idx - 1;
38978        new_node.len = new_len as u16;
38979        unsafe {
38980            let k = self.node.key_area_mut(self.idx).assume_init_read();
38981            let v = self.node.val_area_mut(self.idx).assume_init_read();
38982
38983            move_to_slice(
38984                self.node.key_area_mut(self.idx + 1..old_len),
38985                &mut new_node.keys[..new_len],
38986            );
38987            move_to_slice(
38988                self.node.val_area_mut(self.idx + 1..old_len),
38989                &mut new_node.vals[..new_len],
38990            );
38991
38992            *self.node.len_mut() = self.idx as u16;
38993            (k, v)
38994        }
38995    }
38996}
38997
38998impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
38999    /// Splits the underlying node into three parts:
39000    ///
39001    /// - The node is truncated to only contain the key-value pairs to the left of
39002    ///   this handle.
39003    /// - The key and value pointed to by this handle are extracted.
39004    /// - All the key-value pairs to the right of this handle are put into a newly
39005    ///   allocated node.
39006    pub fn split(mut self) -> SplitResult<'a, K, V, marker::Leaf> {
39007        let mut new_node = LeafNode::new();
39008
39009        let kv = self.split_leaf_data(&mut new_node);
39010
39011        let right = NodeRef::from_new_leaf(new_node);
39012        SplitResult { left: self.node, kv, right }
39013    }
39014
39015    /// Removes the key-value pair pointed to by this handle and returns it, along with the edge
39016    /// that the key-value pair collapsed into.
39017    pub fn remove(
39018        mut self,
39019    ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
39020        let old_len = self.node.len();
39021        unsafe {
39022            let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
39023            let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
39024            *self.node.len_mut() = (old_len - 1) as u16;
39025            ((k, v), self.left_edge())
39026        }
39027    }
39028}
39029
39030impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
39031    /// Splits the underlying node into three parts:
39032    ///
39033    /// - The node is truncated to only contain the edges and key-value pairs to the
39034    ///   left of this handle.
39035    /// - The key and value pointed to by this handle are extracted.
39036    /// - All the edges and key-value pairs to the right of this handle are put into
39037    ///   a newly allocated node.
39038    pub fn split(mut self) -> SplitResult<'a, K, V, marker::Internal> {
39039        let old_len = self.node.len();
39040        unsafe {
39041            let mut new_node = InternalNode::new();
39042            let kv = self.split_leaf_data(&mut new_node.data);
39043            let new_len = usize::from(new_node.data.len);
39044            move_to_slice(
39045                self.node.edge_area_mut(self.idx + 1..old_len + 1),
39046                &mut new_node.edges[..new_len + 1],
39047            );
39048
39049            let height = self.node.height;
39050            let right = NodeRef::from_new_internal(new_node, height);
39051
39052            SplitResult { left: self.node, kv, right }
39053        }
39054    }
39055}
39056
39057/// Represents a session for evaluating and performing a balancing operation
39058/// around an internal key-value pair.
39059pub struct BalancingContext<'a, K, V> {
39060    parent: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV>,
39061    left_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
39062    right_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
39063}
39064
39065impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
39066    pub fn consider_for_balancing(self) -> BalancingContext<'a, K, V> {
39067        let self1 = unsafe { ptr::read(&self) };
39068        let self2 = unsafe { ptr::read(&self) };
39069        BalancingContext {
39070            parent: self,
39071            left_child: self1.left_edge().descend(),
39072            right_child: self2.right_edge().descend(),
39073        }
39074    }
39075}
39076
39077impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
39078    /// Chooses a balancing context involving the node as a child, thus between
39079    /// the KV immediately to the left or to the right in the parent node.
39080    /// Returns an `Err` if there is no parent.
39081    /// Panics if the parent is empty.
39082    ///
39083    /// Prefers the left side, to be optimal if the given node is somehow
39084    /// underfull, meaning here only that it has fewer elements than its left
39085    /// sibling and than its right sibling, if they exist. In that case,
39086    /// merging with the left sibling is faster, since we only need to move
39087    /// the node's N elements, instead of shifting them to the right and moving
39088    /// more than N elements in front. Stealing from the left sibling is also
39089    /// typically faster, since we only need to shift the node's N elements to
39090    /// the right, instead of shifting at least N of the sibling's elements to
39091    /// the left.
39092    pub fn choose_parent_kv(self) -> Result<LeftOrRight<BalancingContext<'a, K, V>>, Self> {
39093        match unsafe { ptr::read(&self) }.ascend() {
39094            Ok(parent_edge) => match parent_edge.left_kv() {
39095                Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext {
39096                    parent: unsafe { ptr::read(&left_parent_kv) },
39097                    left_child: left_parent_kv.left_edge().descend(),
39098                    right_child: self,
39099                })),
39100                Err(parent_edge) => match parent_edge.right_kv() {
39101                    Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext {
39102                        parent: unsafe { ptr::read(&right_parent_kv) },
39103                        left_child: self,
39104                        right_child: right_parent_kv.right_edge().descend(),
39105                    })),
39106                    Err(_) => unreachable!("empty internal node"),
39107                },
39108            },
39109            Err(root) => Err(root),
39110        }
39111    }
39112}
39113
39114impl<'a, K, V> BalancingContext<'a, K, V> {
39115    pub fn left_child_len(&self) -> usize {
39116        self.left_child.len()
39117    }
39118
39119    pub fn right_child_len(&self) -> usize {
39120        self.right_child.len()
39121    }
39122
39123    pub fn into_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
39124        self.left_child
39125    }
39126
39127    pub fn into_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
39128        self.right_child
39129    }
39130
39131    /// Returns whether merging is possible, i.e., whether there is enough room
39132    /// in a node to combine the central KV with both adjacent child nodes.
39133    pub fn can_merge(&self) -> bool {
39134        self.left_child.len() + 1 + self.right_child.len() <= CAPACITY
39135    }
39136}
39137
39138impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
39139    /// Performs a merge and lets a closure decide what to return.
39140    fn do_merge<
39141        F: FnOnce(
39142            NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
39143            NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
39144        ) -> R,
39145        R,
39146    >(
39147        self,
39148        result: F,
39149    ) -> R {
39150        let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent;
39151        let old_parent_len = parent_node.len();
39152        let mut left_node = self.left_child;
39153        let old_left_len = left_node.len();
39154        let mut right_node = self.right_child;
39155        let right_len = right_node.len();
39156        let new_left_len = old_left_len + 1 + right_len;
39157
39158        assert!(new_left_len <= CAPACITY);
39159
39160        unsafe {
39161            *left_node.len_mut() = new_left_len as u16;
39162
39163            let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
39164            left_node.key_area_mut(old_left_len).write(parent_key);
39165            move_to_slice(
39166                right_node.key_area_mut(..right_len),
39167                left_node.key_area_mut(old_left_len + 1..new_left_len),
39168            );
39169
39170            let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
39171            left_node.val_area_mut(old_left_len).write(parent_val);
39172            move_to_slice(
39173                right_node.val_area_mut(..right_len),
39174                left_node.val_area_mut(old_left_len + 1..new_left_len),
39175            );
39176
39177            slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1);
39178            parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
39179            *parent_node.len_mut() -= 1;
39180
39181            if parent_node.height > 1 {
39182                // SAFETY: the height of the nodes being merged is one below the height
39183                // of the node of this edge, thus above zero, so they are internal.
39184                let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
39185                let mut right_node = right_node.cast_to_internal_unchecked();
39186                move_to_slice(
39187                    right_node.edge_area_mut(..right_len + 1),
39188                    left_node.edge_area_mut(old_left_len + 1..new_left_len + 1),
39189                );
39190
39191                left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
39192
39193                Global.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
39194            } else {
39195                Global.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
39196            }
39197        }
39198        result(parent_node, left_node)
39199    }
39200
39201    /// Merges the parent's key-value pair and both adjacent child nodes into
39202    /// the left child node and returns the shrunk parent node.
39203    ///
39204    /// Panics unless we `.can_merge()`.
39205    pub fn merge_tracking_parent(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
39206        self.do_merge(|parent, _child| parent)
39207    }
39208
39209    /// Merges the parent's key-value pair and both adjacent child nodes into
39210    /// the left child node and returns that child node.
39211    ///
39212    /// Panics unless we `.can_merge()`.
39213    pub fn merge_tracking_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
39214        self.do_merge(|_parent, child| child)
39215    }
39216
39217    /// Merges the parent's key-value pair and both adjacent child nodes into
39218    /// the left child node and returns the edge handle in that child node
39219    /// where the tracked child edge ended up,
39220    ///
39221    /// Panics unless we `.can_merge()`.
39222    pub fn merge_tracking_child_edge(
39223        self,
39224        track_edge_idx: LeftOrRight<usize>,
39225    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
39226        let old_left_len = self.left_child.len();
39227        let right_len = self.right_child.len();
39228        assert!(match track_edge_idx {
39229            LeftOrRight::Left(idx) => idx <= old_left_len,
39230            LeftOrRight::Right(idx) => idx <= right_len,
39231        });
39232        let child = self.merge_tracking_child();
39233        let new_idx = match track_edge_idx {
39234            LeftOrRight::Left(idx) => idx,
39235            LeftOrRight::Right(idx) => old_left_len + 1 + idx,
39236        };
39237        unsafe { Handle::new_edge(child, new_idx) }
39238    }
39239
39240    /// Removes a key-value pair from the left child and places it in the key-value storage
39241    /// of the parent, while pushing the old parent key-value pair into the right child.
39242    /// Returns a handle to the edge in the right child corresponding to where the original
39243    /// edge specified by `track_right_edge_idx` ended up.
39244    pub fn steal_left(
39245        mut self,
39246        track_right_edge_idx: usize,
39247    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
39248        self.bulk_steal_left(1);
39249        unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) }
39250    }
39251
39252    /// Removes a key-value pair from the right child and places it in the key-value storage
39253    /// of the parent, while pushing the old parent key-value pair onto the left child.
39254    /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`,
39255    /// which didn't move.
39256    pub fn steal_right(
39257        mut self,
39258        track_left_edge_idx: usize,
39259    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
39260        self.bulk_steal_right(1);
39261        unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) }
39262    }
39263
39264    /// This does stealing similar to `steal_left` but steals multiple elements at once.
39265    pub fn bulk_steal_left(&mut self, count: usize) {
39266        assert!(count > 0);
39267        unsafe {
39268            let left_node = &mut self.left_child;
39269            let old_left_len = left_node.len();
39270            let right_node = &mut self.right_child;
39271            let old_right_len = right_node.len();
39272
39273            // Make sure that we may steal safely.
39274            assert!(old_right_len + count <= CAPACITY);
39275            assert!(old_left_len >= count);
39276
39277            let new_left_len = old_left_len - count;
39278            let new_right_len = old_right_len + count;
39279            *left_node.len_mut() = new_left_len as u16;
39280            *right_node.len_mut() = new_right_len as u16;
39281
39282            // Move leaf data.
39283            {
39284                // Make room for stolen elements in the right child.
39285                slice_shr(right_node.key_area_mut(..new_right_len), count);
39286                slice_shr(right_node.val_area_mut(..new_right_len), count);
39287
39288                // Move elements from the left child to the right one.
39289                move_to_slice(
39290                    left_node.key_area_mut(new_left_len + 1..old_left_len),
39291                    right_node.key_area_mut(..count - 1),
39292                );
39293                move_to_slice(
39294                    left_node.val_area_mut(new_left_len + 1..old_left_len),
39295                    right_node.val_area_mut(..count - 1),
39296                );
39297
39298                // Move the left-most stolen pair to the parent.
39299                let k = left_node.key_area_mut(new_left_len).assume_init_read();
39300                let v = left_node.val_area_mut(new_left_len).assume_init_read();
39301                let (k, v) = self.parent.replace_kv(k, v);
39302
39303                // Move parent's key-value pair to the right child.
39304                right_node.key_area_mut(count - 1).write(k);
39305                right_node.val_area_mut(count - 1).write(v);
39306            }
39307
39308            match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
39309                (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
39310                    // Make room for stolen edges.
39311                    slice_shr(right.edge_area_mut(..new_right_len + 1), count);
39312
39313                    // Steal edges.
39314                    move_to_slice(
39315                        left.edge_area_mut(new_left_len + 1..old_left_len + 1),
39316                        right.edge_area_mut(..count),
39317                    );
39318
39319                    right.correct_childrens_parent_links(0..new_right_len + 1);
39320                }
39321                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
39322                _ => unreachable!(),
39323            }
39324        }
39325    }
39326
39327    /// The symmetric clone of `bulk_steal_left`.
39328    pub fn bulk_steal_right(&mut self, count: usize) {
39329        assert!(count > 0);
39330        unsafe {
39331            let left_node = &mut self.left_child;
39332            let old_left_len = left_node.len();
39333            let right_node = &mut self.right_child;
39334            let old_right_len = right_node.len();
39335
39336            // Make sure that we may steal safely.
39337            assert!(old_left_len + count <= CAPACITY);
39338            assert!(old_right_len >= count);
39339
39340            let new_left_len = old_left_len + count;
39341            let new_right_len = old_right_len - count;
39342            *left_node.len_mut() = new_left_len as u16;
39343            *right_node.len_mut() = new_right_len as u16;
39344
39345            // Move leaf data.
39346            {
39347                // Move the right-most stolen pair to the parent.
39348                let k = right_node.key_area_mut(count - 1).assume_init_read();
39349                let v = right_node.val_area_mut(count - 1).assume_init_read();
39350                let (k, v) = self.parent.replace_kv(k, v);
39351
39352                // Move parent's key-value pair to the left child.
39353                left_node.key_area_mut(old_left_len).write(k);
39354                left_node.val_area_mut(old_left_len).write(v);
39355
39356                // Move elements from the right child to the left one.
39357                move_to_slice(
39358                    right_node.key_area_mut(..count - 1),
39359                    left_node.key_area_mut(old_left_len + 1..new_left_len),
39360                );
39361                move_to_slice(
39362                    right_node.val_area_mut(..count - 1),
39363                    left_node.val_area_mut(old_left_len + 1..new_left_len),
39364                );
39365
39366                // Fill gap where stolen elements used to be.
39367                slice_shl(right_node.key_area_mut(..old_right_len), count);
39368                slice_shl(right_node.val_area_mut(..old_right_len), count);
39369            }
39370
39371            match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
39372                (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
39373                    // Steal edges.
39374                    move_to_slice(
39375                        right.edge_area_mut(..count),
39376                        left.edge_area_mut(old_left_len + 1..new_left_len + 1),
39377                    );
39378
39379                    // Fill gap where stolen edges used to be.
39380                    slice_shl(right.edge_area_mut(..old_right_len + 1), count);
39381
39382                    left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
39383                    right.correct_childrens_parent_links(0..new_right_len + 1);
39384                }
39385                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
39386                _ => unreachable!(),
39387            }
39388        }
39389    }
39390}
39391
39392impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
39393    pub fn forget_node_type(
39394        self,
39395    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
39396        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
39397    }
39398}
39399
39400impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
39401    pub fn forget_node_type(
39402        self,
39403    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
39404        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
39405    }
39406}
39407
39408impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
39409    pub fn forget_node_type(
39410        self,
39411    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
39412        unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
39413    }
39414}
39415
39416impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV> {
39417    pub fn forget_node_type(
39418        self,
39419    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
39420        unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
39421    }
39422}
39423
39424impl<BorrowType, K, V, Type> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, Type> {
39425    /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
39426    pub fn force(
39427        self,
39428    ) -> ForceResult<
39429        Handle<NodeRef<BorrowType, K, V, marker::Leaf>, Type>,
39430        Handle<NodeRef<BorrowType, K, V, marker::Internal>, Type>,
39431    > {
39432        match self.node.force() {
39433            ForceResult::Leaf(node) => {
39434                ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
39435            }
39436            ForceResult::Internal(node) => {
39437                ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
39438            }
39439        }
39440    }
39441}
39442
39443impl<'a, K, V, Type> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, Type> {
39444    /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`.
39445    pub unsafe fn cast_to_leaf_unchecked(
39446        self,
39447    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, Type> {
39448        let node = unsafe { self.node.cast_to_leaf_unchecked() };
39449        Handle { node, idx: self.idx, _marker: PhantomData }
39450    }
39451}
39452
39453impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
39454    /// Move the suffix after `self` from one node to another one. `right` must be empty.
39455    /// The first edge of `right` remains unchanged.
39456    pub fn move_suffix(
39457        &mut self,
39458        right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
39459    ) {
39460        unsafe {
39461            let new_left_len = self.idx;
39462            let mut left_node = self.reborrow_mut().into_node();
39463            let old_left_len = left_node.len();
39464
39465            let new_right_len = old_left_len - new_left_len;
39466            let mut right_node = right.reborrow_mut();
39467
39468            assert!(right_node.len() == 0);
39469            assert!(left_node.height == right_node.height);
39470
39471            if new_right_len > 0 {
39472                *left_node.len_mut() = new_left_len as u16;
39473                *right_node.len_mut() = new_right_len as u16;
39474
39475                move_to_slice(
39476                    left_node.key_area_mut(new_left_len..old_left_len),
39477                    right_node.key_area_mut(..new_right_len),
39478                );
39479                move_to_slice(
39480                    left_node.val_area_mut(new_left_len..old_left_len),
39481                    right_node.val_area_mut(..new_right_len),
39482                );
39483                match (left_node.force(), right_node.force()) {
39484                    (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
39485                        move_to_slice(
39486                            left.edge_area_mut(new_left_len + 1..old_left_len + 1),
39487                            right.edge_area_mut(1..new_right_len + 1),
39488                        );
39489                        right.correct_childrens_parent_links(1..new_right_len + 1);
39490                    }
39491                    (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
39492                    _ => unreachable!(),
39493                }
39494            }
39495        }
39496    }
39497}
39498
39499pub enum ForceResult<Leaf, Internal> {
39500    Leaf(Leaf),
39501    Internal(Internal),
39502}
39503
39504/// Result of insertion, when a node needed to expand beyond its capacity.
39505pub struct SplitResult<'a, K, V, NodeType> {
39506    // Altered node in existing tree with elements and edges that belong to the left of `kv`.
39507    pub left: NodeRef<marker::Mut<'a>, K, V, NodeType>,
39508    // Some key and value split off, to be inserted elsewhere.
39509    pub kv: (K, V),
39510    // Owned, unattached, new node with elements and edges that belong to the right of `kv`.
39511    pub right: NodeRef<marker::Owned, K, V, NodeType>,
39512}
39513
39514impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> {
39515    pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
39516        SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
39517    }
39518}
39519
39520impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> {
39521    pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
39522        SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
39523    }
39524}
39525
39526pub enum InsertResult<'a, K, V, NodeType> {
39527    Fit(Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV>),
39528    Split(SplitResult<'a, K, V, NodeType>),
39529}
39530
39531pub mod marker {
39532    use core::marker::PhantomData;
39533
39534    pub enum Leaf {}
39535    pub enum Internal {}
39536    pub enum LeafOrInternal {}
39537
39538    pub enum Owned {}
39539    pub enum Dying {}
39540    pub struct Immut<'a>(PhantomData<&'a ()>);
39541    pub struct Mut<'a>(PhantomData<&'a mut ()>);
39542    pub struct ValMut<'a>(PhantomData<&'a mut ()>);
39543
39544    pub trait BorrowType {
39545        // Whether node references of this borrow type allow traversing
39546        // to other nodes in the tree.
39547        const PERMITS_TRAVERSAL: bool = true;
39548    }
39549    impl BorrowType for Owned {
39550        // Traversal isn't needede, it happens using the result of `borrow_mut`.
39551        // By disabling traversal, and only creating new references to roots,
39552        // we know that every reference of the `Owned` type is to a root node.
39553        const PERMITS_TRAVERSAL: bool = false;
39554    }
39555    impl BorrowType for Dying {}
39556    impl<'a> BorrowType for Immut<'a> {}
39557    impl<'a> BorrowType for Mut<'a> {}
39558    impl<'a> BorrowType for ValMut<'a> {}
39559
39560    pub enum KV {}
39561    pub enum Edge {}
39562}
39563
39564/// Inserts a value into a slice of initialized elements followed by one uninitialized element.
39565///
39566/// # Safety
39567/// The slice has more than `idx` elements.
39568unsafe fn slice_insert<T>(slice: &mut [MaybeUninit<T>], idx: usize, val: T) {
39569    unsafe {
39570        let len = slice.len();
39571        debug_assert!(len > idx);
39572        let slice_ptr = slice.as_mut_ptr();
39573        if len > idx + 1 {
39574            ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1);
39575        }
39576        (*slice_ptr.add(idx)).write(val);
39577    }
39578}
39579
39580/// Removes and returns a value from a slice of all initialized elements, leaving behind one
39581/// trailing uninitialized element.
39582///
39583/// # Safety
39584/// The slice has more than `idx` elements.
39585unsafe fn slice_remove<T>(slice: &mut [MaybeUninit<T>], idx: usize) -> T {
39586    unsafe {
39587        let len = slice.len();
39588        debug_assert!(idx < len);
39589        let slice_ptr = slice.as_mut_ptr();
39590        let ret = (*slice_ptr.add(idx)).assume_init_read();
39591        ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1);
39592        ret
39593    }
39594}
39595
39596/// Shifts the elements in a slice `distance` positions to the left.
39597///
39598/// # Safety
39599/// The slice has at least `distance` elements.
39600unsafe fn slice_shl<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
39601    unsafe {
39602        let slice_ptr = slice.as_mut_ptr();
39603        ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance);
39604    }
39605}
39606
39607/// Shifts the elements in a slice `distance` positions to the right.
39608///
39609/// # Safety
39610/// The slice has at least `distance` elements.
39611unsafe fn slice_shr<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
39612    unsafe {
39613        let slice_ptr = slice.as_mut_ptr();
39614        ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance);
39615    }
39616}
39617
39618/// Moves all values from a slice of initialized elements to a slice
39619/// of uninitialized elements, leaving behind `src` as all uninitialized.
39620/// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`.
39621fn move_to_slice<T>(src: &mut [MaybeUninit<T>], dst: &mut [MaybeUninit<T>]) {
39622    assert!(src.len() == dst.len());
39623    unsafe {
39624        ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());
39625    }
39626}
39627
39628#[cfg(test)]
39629mod tests;
39630use super::super::navigate;
39631use super::*;
39632use crate::fmt::Debug;
39633use crate::string::String;
39634
39635impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
39636    // Asserts that the back pointer in each reachable node points to its parent.
39637    pub fn assert_back_pointers(self) {
39638        if let ForceResult::Internal(node) = self.force() {
39639            for idx in 0..=node.len() {
39640                let edge = unsafe { Handle::new_edge(node, idx) };
39641                let child = edge.descend();
39642                assert!(child.ascend().ok() == Some(edge));
39643                child.assert_back_pointers();
39644            }
39645        }
39646    }
39647
39648    // Renders a multi-line display of the keys in order and in tree hierarchy,
39649    // picturing the tree growing sideways from its root on the left to its
39650    // leaves on the right.
39651    pub fn dump_keys(self) -> String
39652    where
39653        K: Debug,
39654    {
39655        let mut result = String::new();
39656        self.visit_nodes_in_order(|pos| match pos {
39657            navigate::Position::Leaf(leaf) => {
39658                let depth = self.height();
39659                let indent = "  ".repeat(depth);
39660                result += &format!("\n{}{:?}", indent, leaf.keys());
39661            }
39662            navigate::Position::Internal(_) => {}
39663            navigate::Position::InternalKV(kv) => {
39664                let depth = self.height() - kv.into_node().height();
39665                let indent = "  ".repeat(depth);
39666                result += &format!("\n{}{:?}", indent, kv.into_kv().0);
39667            }
39668        });
39669        result
39670    }
39671}
39672
39673#[test]
39674fn test_splitpoint() {
39675    for idx in 0..=CAPACITY {
39676        let (middle_kv_idx, insertion) = splitpoint(idx);
39677
39678        // Simulate performing the split:
39679        let mut left_len = middle_kv_idx;
39680        let mut right_len = CAPACITY - middle_kv_idx - 1;
39681        match insertion {
39682            LeftOrRight::Left(edge_idx) => {
39683                assert!(edge_idx <= left_len);
39684                left_len += 1;
39685            }
39686            LeftOrRight::Right(edge_idx) => {
39687                assert!(edge_idx <= right_len);
39688                right_len += 1;
39689            }
39690        }
39691        assert!(left_len >= MIN_LEN_AFTER_SPLIT);
39692        assert!(right_len >= MIN_LEN_AFTER_SPLIT);
39693        assert!(left_len + right_len == CAPACITY);
39694    }
39695}
39696
39697#[test]
39698fn test_partial_eq() {
39699    let mut root1 = NodeRef::new_leaf();
39700    root1.borrow_mut().push(1, ());
39701    let mut root1 = NodeRef::new_internal(root1.forget_type()).forget_type();
39702    let root2 = Root::new();
39703    root1.reborrow().assert_back_pointers();
39704    root2.reborrow().assert_back_pointers();
39705
39706    let leaf_edge_1a = root1.reborrow().first_leaf_edge().forget_node_type();
39707    let leaf_edge_1b = root1.reborrow().last_leaf_edge().forget_node_type();
39708    let top_edge_1 = root1.reborrow().first_edge();
39709    let top_edge_2 = root2.reborrow().first_edge();
39710
39711    assert!(leaf_edge_1a == leaf_edge_1a);
39712    assert!(leaf_edge_1a != leaf_edge_1b);
39713    assert!(leaf_edge_1a != top_edge_1);
39714    assert!(leaf_edge_1a != top_edge_2);
39715    assert!(top_edge_1 == top_edge_1);
39716    assert!(top_edge_1 != top_edge_2);
39717
39718    root1.pop_internal_level();
39719    unsafe { root1.into_dying().deallocate_and_ascend() };
39720    unsafe { root2.into_dying().deallocate_and_ascend() };
39721}
39722
39723#[test]
39724#[cfg(target_arch = "x86_64")]
39725fn test_sizes() {
39726    assert_eq!(core::mem::size_of::<LeafNode<(), ()>>(), 16);
39727    assert_eq!(core::mem::size_of::<LeafNode<i64, i64>>(), 16 + CAPACITY * 2 * 8);
39728    assert_eq!(core::mem::size_of::<InternalNode<(), ()>>(), 16 + (CAPACITY + 1) * 8);
39729    assert_eq!(core::mem::size_of::<InternalNode<i64, i64>>(), 16 + (CAPACITY * 3 + 1) * 8);
39730}
39731use super::map::MIN_LEN;
39732use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef, Root};
39733
39734impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
39735    /// Stocks up a possibly underfull node by merging with or stealing from a
39736    /// sibling. If succesful but at the cost of shrinking the parent node,
39737    /// returns that shrunk parent node. Returns an `Err` if the node is
39738    /// an empty root.
39739    fn fix_node_through_parent(
39740        self,
39741    ) -> Result<Option<NodeRef<marker::Mut<'a>, K, V, marker::Internal>>, Self> {
39742        let len = self.len();
39743        if len >= MIN_LEN {
39744            Ok(None)
39745        } else {
39746            match self.choose_parent_kv() {
39747                Ok(Left(mut left_parent_kv)) => {
39748                    if left_parent_kv.can_merge() {
39749                        let parent = left_parent_kv.merge_tracking_parent();
39750                        Ok(Some(parent))
39751                    } else {
39752                        left_parent_kv.bulk_steal_left(MIN_LEN - len);
39753                        Ok(None)
39754                    }
39755                }
39756                Ok(Right(mut right_parent_kv)) => {
39757                    if right_parent_kv.can_merge() {
39758                        let parent = right_parent_kv.merge_tracking_parent();
39759                        Ok(Some(parent))
39760                    } else {
39761                        right_parent_kv.bulk_steal_right(MIN_LEN - len);
39762                        Ok(None)
39763                    }
39764                }
39765                Err(root) => {
39766                    if len > 0 {
39767                        Ok(None)
39768                    } else {
39769                        Err(root)
39770                    }
39771                }
39772            }
39773        }
39774    }
39775}
39776
39777impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
39778    /// Stocks up a possibly underfull node, and if that causes its parent node
39779    /// to shrink, stocks up the parent, recursively.
39780    /// Returns `true` if it fixed the tree, `false` if it couldn't because the
39781    /// root node became empty.
39782    ///
39783    /// This method does not expect ancestors to already be underfull upon entry
39784    /// and panics if it encounters an empty ancestor.
39785    pub fn fix_node_and_affected_ancestors(mut self) -> bool {
39786        loop {
39787            match self.fix_node_through_parent() {
39788                Ok(Some(parent)) => self = parent.forget_type(),
39789                Ok(None) => return true,
39790                Err(_) => return false,
39791            }
39792        }
39793    }
39794}
39795
39796impl<K, V> Root<K, V> {
39797    /// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty.
39798    pub fn fix_top(&mut self) {
39799        while self.height() > 0 && self.len() == 0 {
39800            self.pop_internal_level();
39801        }
39802    }
39803
39804    /// Stocks up or merge away any underfull nodes on the right border of the
39805    /// tree. The other nodes, those that are not the root nor a rightmost edge,
39806    /// must already have at least MIN_LEN elements.
39807    pub fn fix_right_border(&mut self) {
39808        self.fix_top();
39809        if self.len() > 0 {
39810            self.borrow_mut().last_kv().fix_right_border_of_right_edge();
39811            self.fix_top();
39812        }
39813    }
39814
39815    /// The symmetric clone of `fix_right_border`.
39816    pub fn fix_left_border(&mut self) {
39817        self.fix_top();
39818        if self.len() > 0 {
39819            self.borrow_mut().first_kv().fix_left_border_of_left_edge();
39820            self.fix_top();
39821        }
39822    }
39823
39824    /// Stock up any underfull nodes on the right border of the tree.
39825    /// The other nodes, those that are not the root nor a rightmost edge,
39826    /// must be prepared to have up to MIN_LEN elements stolen.
39827    pub fn fix_right_border_of_plentiful(&mut self) {
39828        let mut cur_node = self.borrow_mut();
39829        while let Internal(internal) = cur_node.force() {
39830            // Check if right-most child is underfull.
39831            let mut last_kv = internal.last_kv().consider_for_balancing();
39832            debug_assert!(last_kv.left_child_len() >= MIN_LEN * 2);
39833            let right_child_len = last_kv.right_child_len();
39834            if right_child_len < MIN_LEN {
39835                // We need to steal.
39836                last_kv.bulk_steal_left(MIN_LEN - right_child_len);
39837            }
39838
39839            // Go further down.
39840            cur_node = last_kv.into_right_child();
39841        }
39842    }
39843}
39844
39845impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
39846    fn fix_left_border_of_left_edge(mut self) {
39847        while let Internal(internal_kv) = self.force() {
39848            self = internal_kv.fix_left_child().first_kv();
39849            debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
39850        }
39851    }
39852
39853    fn fix_right_border_of_right_edge(mut self) {
39854        while let Internal(internal_kv) = self.force() {
39855            self = internal_kv.fix_right_child().last_kv();
39856            debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
39857        }
39858    }
39859}
39860
39861impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
39862    /// Stocks up the left child, assuming the right child isn't underfull, and
39863    /// provisions an extra element to allow merging its children in turn
39864    /// without becoming underfull.
39865    /// Returns the left child.
39866    fn fix_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
39867        let mut internal_kv = self.consider_for_balancing();
39868        let left_len = internal_kv.left_child_len();
39869        debug_assert!(internal_kv.right_child_len() >= MIN_LEN);
39870        if internal_kv.can_merge() {
39871            internal_kv.merge_tracking_child()
39872        } else {
39873            // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
39874            let count = (MIN_LEN + 1).saturating_sub(left_len);
39875            if count > 0 {
39876                internal_kv.bulk_steal_right(count);
39877            }
39878            internal_kv.into_left_child()
39879        }
39880    }
39881
39882    /// Stocks up the right child, assuming the left child isn't underfull, and
39883    /// provisions an extra element to allow merging its children in turn
39884    /// without becoming underfull.
39885    /// Returns wherever the right child ended up.
39886    fn fix_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
39887        let mut internal_kv = self.consider_for_balancing();
39888        let right_len = internal_kv.right_child_len();
39889        debug_assert!(internal_kv.left_child_len() >= MIN_LEN);
39890        if internal_kv.can_merge() {
39891            internal_kv.merge_tracking_child()
39892        } else {
39893            // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
39894            let count = (MIN_LEN + 1).saturating_sub(right_len);
39895            if count > 0 {
39896                internal_kv.bulk_steal_left(count);
39897            }
39898            internal_kv.into_right_child()
39899        }
39900    }
39901}
39902//! Collection types.
39903
39904#![stable(feature = "rust1", since = "1.0.0")]
39905
39906pub mod binary_heap;
39907mod btree;
39908pub mod linked_list;
39909pub mod vec_deque;
39910
39911#[stable(feature = "rust1", since = "1.0.0")]
39912pub mod btree_map {
39913    //! A map based on a B-Tree.
39914    #[stable(feature = "rust1", since = "1.0.0")]
39915    pub use super::btree::map::*;
39916}
39917
39918#[stable(feature = "rust1", since = "1.0.0")]
39919pub mod btree_set {
39920    //! A set based on a B-Tree.
39921    #[stable(feature = "rust1", since = "1.0.0")]
39922    pub use super::btree::set::*;
39923}
39924
39925#[stable(feature = "rust1", since = "1.0.0")]
39926#[doc(no_inline)]
39927pub use binary_heap::BinaryHeap;
39928
39929#[stable(feature = "rust1", since = "1.0.0")]
39930#[doc(no_inline)]
39931pub use btree_map::BTreeMap;
39932
39933#[stable(feature = "rust1", since = "1.0.0")]
39934#[doc(no_inline)]
39935pub use btree_set::BTreeSet;
39936
39937#[stable(feature = "rust1", since = "1.0.0")]
39938#[doc(no_inline)]
39939pub use linked_list::LinkedList;
39940
39941#[stable(feature = "rust1", since = "1.0.0")]
39942#[doc(no_inline)]
39943pub use vec_deque::VecDeque;
39944
39945use crate::alloc::{Layout, LayoutError};
39946use core::fmt::Display;
39947
39948/// The error type for `try_reserve` methods.
39949#[derive(Clone, PartialEq, Eq, Debug)]
39950#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
39951pub enum TryReserveError {
39952    /// Error due to the computed capacity exceeding the collection's maximum
39953    /// (usually `isize::MAX` bytes).
39954    CapacityOverflow,
39955
39956    /// The memory allocator returned an error
39957    AllocError {
39958        /// The layout of allocation request that failed
39959        layout: Layout,
39960
39961        #[doc(hidden)]
39962        #[unstable(
39963            feature = "container_error_extra",
39964            issue = "none",
39965            reason = "\
39966            Enable exposing the allocator’s custom error value \
39967            if an associated type is added in the future: \
39968            https://github.com/rust-lang/wg-allocators/issues/23"
39969        )]
39970        non_exhaustive: (),
39971    },
39972}
39973
39974#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
39975impl From<LayoutError> for TryReserveError {
39976    #[inline]
39977    fn from(_: LayoutError) -> Self {
39978        TryReserveError::CapacityOverflow
39979    }
39980}
39981
39982#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
39983impl Display for TryReserveError {
39984    fn fmt(
39985        &self,
39986        fmt: &mut core::fmt::Formatter<'_>,
39987    ) -> core::result::Result<(), core::fmt::Error> {
39988        fmt.write_str("memory allocation failed")?;
39989        let reason = match &self {
39990            TryReserveError::CapacityOverflow => {
39991                " because the computed capacity exceeded the collection's maximum"
39992            }
39993            TryReserveError::AllocError { .. } => " because the memory allocator returned a error",
39994        };
39995        fmt.write_str(reason)
39996    }
39997}
39998
39999/// An intermediate trait for specialization of `Extend`.
40000#[doc(hidden)]
40001trait SpecExtend<I: IntoIterator> {
40002    /// Extends `self` with the contents of the given iterator.
40003    fn spec_extend(&mut self, iter: I);
40004}
40005use core::fmt;
40006use core::iter::{FusedIterator, TrustedLen, TrustedRandomAccess};
40007
40008use super::VecDeque;
40009
40010/// An owning iterator over the elements of a `VecDeque`.
40011///
40012/// This `struct` is created by the [`into_iter`] method on [`VecDeque`]
40013/// (provided by the `IntoIterator` trait). See its documentation for more.
40014///
40015/// [`into_iter`]: VecDeque::into_iter
40016#[derive(Clone)]
40017#[stable(feature = "rust1", since = "1.0.0")]
40018pub struct IntoIter<T> {
40019    pub(crate) inner: VecDeque<T>,
40020}
40021
40022#[stable(feature = "collection_debug", since = "1.17.0")]
40023impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
40024    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
40025        f.debug_tuple("IntoIter").field(&self.inner).finish()
40026    }
40027}
40028
40029#[stable(feature = "rust1", since = "1.0.0")]
40030impl<T> Iterator for IntoIter<T> {
40031    type Item = T;
40032
40033    #[inline]
40034    fn next(&mut self) -> Option<T> {
40035        self.inner.pop_front()
40036    }
40037
40038    #[inline]
40039    fn size_hint(&self) -> (usize, Option<usize>) {
40040        let len = self.inner.len();
40041        (len, Some(len))
40042    }
40043
40044    #[inline]
40045    unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item
40046    where
40047        Self: TrustedRandomAccess,
40048    {
40049        // Safety: The TrustedRandomAccess contract requires that callers only pass an index
40050        // that is in bounds.
40051        // Additionally Self: TrustedRandomAccess is only implemented for T: Copy which means even
40052        // multiple repeated reads of the same index would be safe and the
40053        // values are !Drop, thus won't suffer from double drops.
40054        unsafe {
40055            let idx = self.inner.wrap_add(self.inner.tail, idx);
40056            self.inner.buffer_read(idx)
40057        }
40058    }
40059}
40060
40061#[stable(feature = "rust1", since = "1.0.0")]
40062impl<T> DoubleEndedIterator for IntoIter<T> {
40063    #[inline]
40064    fn next_back(&mut self) -> Option<T> {
40065        self.inner.pop_back()
40066    }
40067}
40068
40069#[stable(feature = "rust1", since = "1.0.0")]
40070impl<T> ExactSizeIterator for IntoIter<T> {
40071    fn is_empty(&self) -> bool {
40072        self.inner.is_empty()
40073    }
40074}
40075
40076#[stable(feature = "fused", since = "1.26.0")]
40077impl<T> FusedIterator for IntoIter<T> {}
40078
40079#[unstable(feature = "trusted_len", issue = "37572")]
40080unsafe impl<T> TrustedLen for IntoIter<T> {}
40081
40082#[doc(hidden)]
40083#[unstable(feature = "trusted_random_access", issue = "none")]
40084// T: Copy as approximation for !Drop since get_unchecked does not update the pointers
40085// and thus we can't implement drop-handling
40086unsafe impl<T> TrustedRandomAccess for IntoIter<T>
40087where
40088    T: Copy,
40089{
40090    const MAY_HAVE_SIDE_EFFECT: bool = false;
40091}
40092macro_rules! __impl_slice_eq1 {
40093    ([$($vars:tt)*] $lhs:ty, $rhs:ty, $($constraints:tt)*) => {
40094        #[stable(feature = "vec_deque_partial_eq_slice", since = "1.17.0")]
40095        impl<A, B, $($vars)*> PartialEq<$rhs> for $lhs
40096        where
40097            A: PartialEq<B>,
40098            $($constraints)*
40099        {
40100            fn eq(&self, other: &$rhs) -> bool {
40101                if self.len() != other.len() {
40102                    return false;
40103                }
40104                let (sa, sb) = self.as_slices();
40105                let (oa, ob) = other[..].split_at(sa.len());
40106                sa == oa && sb == ob
40107            }
40108        }
40109    }
40110}
40111use core::iter::FusedIterator;
40112use core::ptr::{self, NonNull};
40113use core::{fmt, mem};
40114
40115use super::{count, Iter, VecDeque};
40116
40117/// A draining iterator over the elements of a `VecDeque`.
40118///
40119/// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its
40120/// documentation for more.
40121///
40122/// [`drain`]: VecDeque::drain
40123#[stable(feature = "drain", since = "1.6.0")]
40124pub struct Drain<'a, T: 'a> {
40125    pub(crate) after_tail: usize,
40126    pub(crate) after_head: usize,
40127    pub(crate) iter: Iter<'a, T>,
40128    pub(crate) deque: NonNull<VecDeque<T>>,
40129}
40130
40131#[stable(feature = "collection_debug", since = "1.17.0")]
40132impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
40133    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
40134        f.debug_tuple("Drain")
40135            .field(&self.after_tail)
40136            .field(&self.after_head)
40137            .field(&self.iter)
40138            .finish()
40139    }
40140}
40141
40142#[stable(feature = "drain", since = "1.6.0")]
40143unsafe impl<T: Sync> Sync for Drain<'_, T> {}
40144#[stable(feature = "drain", since = "1.6.0")]
40145unsafe impl<T: Send> Send for Drain<'_, T> {}
40146
40147#[stable(feature = "drain", since = "1.6.0")]
40148impl<T> Drop for Drain<'_, T> {
40149    fn drop(&mut self) {
40150        struct DropGuard<'r, 'a, T>(&'r mut Drain<'a, T>);
40151
40152        impl<'r, 'a, T> Drop for DropGuard<'r, 'a, T> {
40153            fn drop(&mut self) {
40154                self.0.for_each(drop);
40155
40156                let source_deque = unsafe { self.0.deque.as_mut() };
40157
40158                // T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
40159                //
40160                //        T   t   h   H
40161                // [. . . o o x x o o . . .]
40162                //
40163                let orig_tail = source_deque.tail;
40164                let drain_tail = source_deque.head;
40165                let drain_head = self.0.after_tail;
40166                let orig_head = self.0.after_head;
40167
40168                let tail_len = count(orig_tail, drain_tail, source_deque.cap());
40169                let head_len = count(drain_head, orig_head, source_deque.cap());
40170
40171                // Restore the original head value
40172                source_deque.head = orig_head;
40173
40174                match (tail_len, head_len) {
40175                    (0, 0) => {
40176                        source_deque.head = 0;
40177                        source_deque.tail = 0;
40178                    }
40179                    (0, _) => {
40180                        source_deque.tail = drain_head;
40181                    }
40182                    (_, 0) => {
40183                        source_deque.head = drain_tail;
40184                    }
40185                    _ => unsafe {
40186                        if tail_len <= head_len {
40187                            source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
40188                            source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
40189                        } else {
40190                            source_deque.head = source_deque.wrap_add(drain_tail, head_len);
40191                            source_deque.wrap_copy(drain_tail, drain_head, head_len);
40192                        }
40193                    },
40194                }
40195            }
40196        }
40197
40198        while let Some(item) = self.next() {
40199            let guard = DropGuard(self);
40200            drop(item);
40201            mem::forget(guard);
40202        }
40203
40204        DropGuard(self);
40205    }
40206}
40207
40208#[stable(feature = "drain", since = "1.6.0")]
40209impl<T> Iterator for Drain<'_, T> {
40210    type Item = T;
40211
40212    #[inline]
40213    fn next(&mut self) -> Option<T> {
40214        self.iter.next().map(|elt| unsafe { ptr::read(elt) })
40215    }
40216
40217    #[inline]
40218    fn size_hint(&self) -> (usize, Option<usize>) {
40219        self.iter.size_hint()
40220    }
40221}
40222
40223#[stable(feature = "drain", since = "1.6.0")]
40224impl<T> DoubleEndedIterator for Drain<'_, T> {
40225    #[inline]
40226    fn next_back(&mut self) -> Option<T> {
40227        self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
40228    }
40229}
40230
40231#[stable(feature = "drain", since = "1.6.0")]
40232impl<T> ExactSizeIterator for Drain<'_, T> {}
40233
40234#[stable(feature = "fused", since = "1.26.0")]
40235impl<T> FusedIterator for Drain<'_, T> {}
40236use core::ptr::{self};
40237
40238/// Returns the two slices that cover the `VecDeque`'s valid range
40239pub trait RingSlices: Sized {
40240    fn slice(self, from: usize, to: usize) -> Self;
40241    fn split_at(self, i: usize) -> (Self, Self);
40242
40243    fn ring_slices(buf: Self, head: usize, tail: usize) -> (Self, Self) {
40244        let contiguous = tail <= head;
40245        if contiguous {
40246            let (empty, buf) = buf.split_at(0);
40247            (buf.slice(tail, head), empty)
40248        } else {
40249            let (mid, right) = buf.split_at(tail);
40250            let (left, _) = mid.split_at(head);
40251            (right, left)
40252        }
40253    }
40254}
40255
40256impl<T> RingSlices for &[T] {
40257    fn slice(self, from: usize, to: usize) -> Self {
40258        &self[from..to]
40259    }
40260    fn split_at(self, i: usize) -> (Self, Self) {
40261        (*self).split_at(i)
40262    }
40263}
40264
40265impl<T> RingSlices for &mut [T] {
40266    fn slice(self, from: usize, to: usize) -> Self {
40267        &mut self[from..to]
40268    }
40269    fn split_at(self, i: usize) -> (Self, Self) {
40270        (*self).split_at_mut(i)
40271    }
40272}
40273
40274impl<T> RingSlices for *mut [T] {
40275    fn slice(self, from: usize, to: usize) -> Self {
40276        assert!(from <= to && to < self.len());
40277        // Not using `get_unchecked_mut` to keep this a safe operation.
40278        let len = to - from;
40279        ptr::slice_from_raw_parts_mut(self.as_mut_ptr().wrapping_add(from), len)
40280    }
40281
40282    fn split_at(self, mid: usize) -> (Self, Self) {
40283        let len = self.len();
40284        let ptr = self.as_mut_ptr();
40285        assert!(mid <= len);
40286        (
40287            ptr::slice_from_raw_parts_mut(ptr, mid),
40288            ptr::slice_from_raw_parts_mut(ptr.wrapping_add(mid), len - mid),
40289        )
40290    }
40291}
40292use core::array;
40293use core::cmp::{self};
40294use core::mem::replace;
40295
40296use super::VecDeque;
40297
40298/// PairSlices pairs up equal length slice parts of two deques
40299///
40300/// For example, given deques "A" and "B" with the following division into slices:
40301///
40302/// A: [0 1 2] [3 4 5]
40303/// B: [a b] [c d e]
40304///
40305/// It produces the following sequence of matching slices:
40306///
40307/// ([0 1], [a b])
40308/// (\[2\], \[c\])
40309/// ([3 4], [d e])
40310///
40311/// and the uneven remainder of either A or B is skipped.
40312pub struct PairSlices<'a, 'b, T> {
40313    pub(crate) a0: &'a mut [T],
40314    pub(crate) a1: &'a mut [T],
40315    pub(crate) b0: &'b [T],
40316    pub(crate) b1: &'b [T],
40317}
40318
40319impl<'a, 'b, T> PairSlices<'a, 'b, T> {
40320    pub fn from(to: &'a mut VecDeque<T>, from: &'b VecDeque<T>) -> Self {
40321        let (a0, a1) = to.as_mut_slices();
40322        let (b0, b1) = from.as_slices();
40323        PairSlices { a0, a1, b0, b1 }
40324    }
40325
40326    pub fn has_remainder(&self) -> bool {
40327        !self.b0.is_empty()
40328    }
40329
40330    pub fn remainder(self) -> impl Iterator<Item = &'b [T]> {
40331        array::IntoIter::new([self.b0, self.b1])
40332    }
40333}
40334
40335impl<'a, 'b, T> Iterator for PairSlices<'a, 'b, T> {
40336    type Item = (&'a mut [T], &'b [T]);
40337    fn next(&mut self) -> Option<Self::Item> {
40338        // Get next part length
40339        let part = cmp::min(self.a0.len(), self.b0.len());
40340        if part == 0 {
40341            return None;
40342        }
40343        let (p0, p1) = replace(&mut self.a0, &mut []).split_at_mut(part);
40344        let (q0, q1) = self.b0.split_at(part);
40345
40346        // Move a1 into a0, if it's empty (and b1, b0 the same way).
40347        self.a0 = p1;
40348        self.b0 = q1;
40349        if self.a0.is_empty() {
40350            self.a0 = replace(&mut self.a1, &mut []);
40351        }
40352        if self.b0.is_empty() {
40353            self.b0 = replace(&mut self.b1, &[]);
40354        }
40355        Some((p0, q0))
40356    }
40357}
40358//! A double-ended queue implemented with a growable ring buffer.
40359//!
40360//! This queue has *O*(1) amortized inserts and removals from both ends of the
40361//! container. It also has *O*(1) indexing like a vector. The contained elements
40362//! are not required to be copyable, and the queue will be sendable if the
40363//! contained type is sendable.
40364
40365#![stable(feature = "rust1", since = "1.0.0")]
40366
40367use core::cmp::{self, Ordering};
40368use core::fmt;
40369use core::hash::{Hash, Hasher};
40370use core::iter::{repeat_with, FromIterator};
40371use core::marker::PhantomData;
40372use core::mem::{self, ManuallyDrop};
40373use core::ops::{Index, IndexMut, Range, RangeBounds};
40374use core::ptr::{self, NonNull};
40375use core::slice;
40376
40377use crate::collections::TryReserveError;
40378use crate::raw_vec::RawVec;
40379use crate::vec::Vec;
40380
40381#[macro_use]
40382mod macros;
40383
40384#[stable(feature = "drain", since = "1.6.0")]
40385pub use self::drain::Drain;
40386
40387mod drain;
40388
40389#[stable(feature = "rust1", since = "1.0.0")]
40390pub use self::iter_mut::IterMut;
40391
40392mod iter_mut;
40393
40394#[stable(feature = "rust1", since = "1.0.0")]
40395pub use self::into_iter::IntoIter;
40396
40397mod into_iter;
40398
40399#[stable(feature = "rust1", since = "1.0.0")]
40400pub use self::iter::Iter;
40401
40402mod iter;
40403
40404use self::pair_slices::PairSlices;
40405
40406mod pair_slices;
40407
40408use self::ring_slices::RingSlices;
40409
40410mod ring_slices;
40411
40412#[cfg(test)]
40413mod tests;
40414
40415const INITIAL_CAPACITY: usize = 7; // 2^3 - 1
40416const MINIMUM_CAPACITY: usize = 1; // 2 - 1
40417
40418const MAXIMUM_ZST_CAPACITY: usize = 1 << (usize::BITS - 1); // Largest possible power of two
40419
40420/// A double-ended queue implemented with a growable ring buffer.
40421///
40422/// The "default" usage of this type as a queue is to use [`push_back`] to add to
40423/// the queue, and [`pop_front`] to remove from the queue. [`extend`] and [`append`]
40424/// push onto the back in this manner, and iterating over `VecDeque` goes front
40425/// to back.
40426///
40427/// Since `VecDeque` is a ring buffer, its elements are not necessarily contiguous
40428/// in memory. If you want to access the elements as a single slice, such as for
40429/// efficient sorting, you can use [`make_contiguous`]. It rotates the `VecDeque`
40430/// so that its elements do not wrap, and returns a mutable slice to the
40431/// now-contiguous element sequence.
40432///
40433/// [`push_back`]: VecDeque::push_back
40434/// [`pop_front`]: VecDeque::pop_front
40435/// [`extend`]: VecDeque::extend
40436/// [`append`]: VecDeque::append
40437/// [`make_contiguous`]: VecDeque::make_contiguous
40438#[cfg_attr(not(test), rustc_diagnostic_item = "vecdeque_type")]
40439#[stable(feature = "rust1", since = "1.0.0")]
40440pub struct VecDeque<T> {
40441    // tail and head are pointers into the buffer. Tail always points
40442    // to the first element that could be read, Head always points
40443    // to where data should be written.
40444    // If tail == head the buffer is empty. The length of the ringbuffer
40445    // is defined as the distance between the two.
40446    tail: usize,
40447    head: usize,
40448    buf: RawVec<T>,
40449}
40450
40451#[stable(feature = "rust1", since = "1.0.0")]
40452impl<T: Clone> Clone for VecDeque<T> {
40453    fn clone(&self) -> VecDeque<T> {
40454        self.iter().cloned().collect()
40455    }
40456
40457    fn clone_from(&mut self, other: &Self) {
40458        self.truncate(other.len());
40459
40460        let mut iter = PairSlices::from(self, other);
40461        while let Some((dst, src)) = iter.next() {
40462            dst.clone_from_slice(&src);
40463        }
40464
40465        if iter.has_remainder() {
40466            for remainder in iter.remainder() {
40467                self.extend(remainder.iter().cloned());
40468            }
40469        }
40470    }
40471}
40472
40473#[stable(feature = "rust1", since = "1.0.0")]
40474unsafe impl<#[may_dangle] T> Drop for VecDeque<T> {
40475    fn drop(&mut self) {
40476        /// Runs the destructor for all items in the slice when it gets dropped (normally or
40477        /// during unwinding).
40478        struct Dropper<'a, T>(&'a mut [T]);
40479
40480        impl<'a, T> Drop for Dropper<'a, T> {
40481            fn drop(&mut self) {
40482                unsafe {
40483                    ptr::drop_in_place(self.0);
40484                }
40485            }
40486        }
40487
40488        let (front, back) = self.as_mut_slices();
40489        unsafe {
40490            let _back_dropper = Dropper(back);
40491            // use drop for [T]
40492            ptr::drop_in_place(front);
40493        }
40494        // RawVec handles deallocation
40495    }
40496}
40497
40498#[stable(feature = "rust1", since = "1.0.0")]
40499impl<T> Default for VecDeque<T> {
40500    /// Creates an empty `VecDeque<T>`.
40501    #[inline]
40502    fn default() -> VecDeque<T> {
40503        VecDeque::new()
40504    }
40505}
40506
40507impl<T> VecDeque<T> {
40508    /// Marginally more convenient
40509    #[inline]
40510    fn ptr(&self) -> *mut T {
40511        self.buf.ptr()
40512    }
40513
40514    /// Marginally more convenient
40515    #[inline]
40516    fn cap(&self) -> usize {
40517        if mem::size_of::<T>() == 0 {
40518            // For zero sized types, we are always at maximum capacity
40519            MAXIMUM_ZST_CAPACITY
40520        } else {
40521            self.buf.capacity()
40522        }
40523    }
40524
40525    /// Turn ptr into a slice
40526    #[inline]
40527    unsafe fn buffer_as_slice(&self) -> &[T] {
40528        unsafe { slice::from_raw_parts(self.ptr(), self.cap()) }
40529    }
40530
40531    /// Turn ptr into a mut slice
40532    #[inline]
40533    unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
40534        unsafe { slice::from_raw_parts_mut(self.ptr(), self.cap()) }
40535    }
40536
40537    /// Moves an element out of the buffer
40538    #[inline]
40539    unsafe fn buffer_read(&mut self, off: usize) -> T {
40540        unsafe { ptr::read(self.ptr().add(off)) }
40541    }
40542
40543    /// Writes an element into the buffer, moving it.
40544    #[inline]
40545    unsafe fn buffer_write(&mut self, off: usize, value: T) {
40546        unsafe {
40547            ptr::write(self.ptr().add(off), value);
40548        }
40549    }
40550
40551    /// Returns `true` if the buffer is at full capacity.
40552    #[inline]
40553    fn is_full(&self) -> bool {
40554        self.cap() - self.len() == 1
40555    }
40556
40557    /// Returns the index in the underlying buffer for a given logical element
40558    /// index.
40559    #[inline]
40560    fn wrap_index(&self, idx: usize) -> usize {
40561        wrap_index(idx, self.cap())
40562    }
40563
40564    /// Returns the index in the underlying buffer for a given logical element
40565    /// index + addend.
40566    #[inline]
40567    fn wrap_add(&self, idx: usize, addend: usize) -> usize {
40568        wrap_index(idx.wrapping_add(addend), self.cap())
40569    }
40570
40571    /// Returns the index in the underlying buffer for a given logical element
40572    /// index - subtrahend.
40573    #[inline]
40574    fn wrap_sub(&self, idx: usize, subtrahend: usize) -> usize {
40575        wrap_index(idx.wrapping_sub(subtrahend), self.cap())
40576    }
40577
40578    /// Copies a contiguous block of memory len long from src to dst
40579    #[inline]
40580    unsafe fn copy(&self, dst: usize, src: usize, len: usize) {
40581        debug_assert!(
40582            dst + len <= self.cap(),
40583            "cpy dst={} src={} len={} cap={}",
40584            dst,
40585            src,
40586            len,
40587            self.cap()
40588        );
40589        debug_assert!(
40590            src + len <= self.cap(),
40591            "cpy dst={} src={} len={} cap={}",
40592            dst,
40593            src,
40594            len,
40595            self.cap()
40596        );
40597        unsafe {
40598            ptr::copy(self.ptr().add(src), self.ptr().add(dst), len);
40599        }
40600    }
40601
40602    /// Copies a contiguous block of memory len long from src to dst
40603    #[inline]
40604    unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) {
40605        debug_assert!(
40606            dst + len <= self.cap(),
40607            "cno dst={} src={} len={} cap={}",
40608            dst,
40609            src,
40610            len,
40611            self.cap()
40612        );
40613        debug_assert!(
40614            src + len <= self.cap(),
40615            "cno dst={} src={} len={} cap={}",
40616            dst,
40617            src,
40618            len,
40619            self.cap()
40620        );
40621        unsafe {
40622            ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len);
40623        }
40624    }
40625
40626    /// Copies a potentially wrapping block of memory len long from src to dest.
40627    /// (abs(dst - src) + len) must be no larger than cap() (There must be at
40628    /// most one continuous overlapping region between src and dest).
40629    unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) {
40630        #[allow(dead_code)]
40631        fn diff(a: usize, b: usize) -> usize {
40632            if a <= b { b - a } else { a - b }
40633        }
40634        debug_assert!(
40635            cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(),
40636            "wrc dst={} src={} len={} cap={}",
40637            dst,
40638            src,
40639            len,
40640            self.cap()
40641        );
40642
40643        if src == dst || len == 0 {
40644            return;
40645        }
40646
40647        let dst_after_src = self.wrap_sub(dst, src) < len;
40648
40649        let src_pre_wrap_len = self.cap() - src;
40650        let dst_pre_wrap_len = self.cap() - dst;
40651        let src_wraps = src_pre_wrap_len < len;
40652        let dst_wraps = dst_pre_wrap_len < len;
40653
40654        match (dst_after_src, src_wraps, dst_wraps) {
40655            (_, false, false) => {
40656                // src doesn't wrap, dst doesn't wrap
40657                //
40658                //        S . . .
40659                // 1 [_ _ A A B B C C _]
40660                // 2 [_ _ A A A A B B _]
40661                //            D . . .
40662                //
40663                unsafe {
40664                    self.copy(dst, src, len);
40665                }
40666            }
40667            (false, false, true) => {
40668                // dst before src, src doesn't wrap, dst wraps
40669                //
40670                //    S . . .
40671                // 1 [A A B B _ _ _ C C]
40672                // 2 [A A B B _ _ _ A A]
40673                // 3 [B B B B _ _ _ A A]
40674                //    . .           D .
40675                //
40676                unsafe {
40677                    self.copy(dst, src, dst_pre_wrap_len);
40678                    self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
40679                }
40680            }
40681            (true, false, true) => {
40682                // src before dst, src doesn't wrap, dst wraps
40683                //
40684                //              S . . .
40685                // 1 [C C _ _ _ A A B B]
40686                // 2 [B B _ _ _ A A B B]
40687                // 3 [B B _ _ _ A A A A]
40688                //    . .           D .
40689                //
40690                unsafe {
40691                    self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
40692                    self.copy(dst, src, dst_pre_wrap_len);
40693                }
40694            }
40695            (false, true, false) => {
40696                // dst before src, src wraps, dst doesn't wrap
40697                //
40698                //    . .           S .
40699                // 1 [C C _ _ _ A A B B]
40700                // 2 [C C _ _ _ B B B B]
40701                // 3 [C C _ _ _ B B C C]
40702                //              D . . .
40703                //
40704                unsafe {
40705                    self.copy(dst, src, src_pre_wrap_len);
40706                    self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
40707                }
40708            }
40709            (true, true, false) => {
40710                // src before dst, src wraps, dst doesn't wrap
40711                //
40712                //    . .           S .
40713                // 1 [A A B B _ _ _ C C]
40714                // 2 [A A A A _ _ _ C C]
40715                // 3 [C C A A _ _ _ C C]
40716                //    D . . .
40717                //
40718                unsafe {
40719                    self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
40720                    self.copy(dst, src, src_pre_wrap_len);
40721                }
40722            }
40723            (false, true, true) => {
40724                // dst before src, src wraps, dst wraps
40725                //
40726                //    . . .         S .
40727                // 1 [A B C D _ E F G H]
40728                // 2 [A B C D _ E G H H]
40729                // 3 [A B C D _ E G H A]
40730                // 4 [B C C D _ E G H A]
40731                //    . .         D . .
40732                //
40733                debug_assert!(dst_pre_wrap_len > src_pre_wrap_len);
40734                let delta = dst_pre_wrap_len - src_pre_wrap_len;
40735                unsafe {
40736                    self.copy(dst, src, src_pre_wrap_len);
40737                    self.copy(dst + src_pre_wrap_len, 0, delta);
40738                    self.copy(0, delta, len - dst_pre_wrap_len);
40739                }
40740            }
40741            (true, true, true) => {
40742                // src before dst, src wraps, dst wraps
40743                //
40744                //    . .         S . .
40745                // 1 [A B C D _ E F G H]
40746                // 2 [A A B D _ E F G H]
40747                // 3 [H A B D _ E F G H]
40748                // 4 [H A B D _ E F F G]
40749                //    . . .         D .
40750                //
40751                debug_assert!(src_pre_wrap_len > dst_pre_wrap_len);
40752                let delta = src_pre_wrap_len - dst_pre_wrap_len;
40753                unsafe {
40754                    self.copy(delta, 0, len - src_pre_wrap_len);
40755                    self.copy(0, self.cap() - delta, delta);
40756                    self.copy(dst, src, dst_pre_wrap_len);
40757                }
40758            }
40759        }
40760    }
40761
40762    /// Frobs the head and tail sections around to handle the fact that we
40763    /// just reallocated. Unsafe because it trusts old_capacity.
40764    #[inline]
40765    unsafe fn handle_capacity_increase(&mut self, old_capacity: usize) {
40766        let new_capacity = self.cap();
40767
40768        // Move the shortest contiguous section of the ring buffer
40769        //    T             H
40770        //   [o o o o o o o . ]
40771        //    T             H
40772        // A [o o o o o o o . . . . . . . . . ]
40773        //        H T
40774        //   [o o . o o o o o ]
40775        //          T             H
40776        // B [. . . o o o o o o o . . . . . . ]
40777        //              H T
40778        //   [o o o o o . o o ]
40779        //              H                 T
40780        // C [o o o o o . . . . . . . . . o o ]
40781
40782        if self.tail <= self.head {
40783            // A
40784            // Nop
40785        } else if self.head < old_capacity - self.tail {
40786            // B
40787            unsafe {
40788                self.copy_nonoverlapping(old_capacity, 0, self.head);
40789            }
40790            self.head += old_capacity;
40791            debug_assert!(self.head > self.tail);
40792        } else {
40793            // C
40794            let new_tail = new_capacity - (old_capacity - self.tail);
40795            unsafe {
40796                self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail);
40797            }
40798            self.tail = new_tail;
40799            debug_assert!(self.head < self.tail);
40800        }
40801        debug_assert!(self.head < self.cap());
40802        debug_assert!(self.tail < self.cap());
40803        debug_assert!(self.cap().count_ones() == 1);
40804    }
40805}
40806
40807impl<T> VecDeque<T> {
40808    /// Creates an empty `VecDeque`.
40809    ///
40810    /// # Examples
40811    ///
40812    /// ```
40813    /// use std::collections::VecDeque;
40814    ///
40815    /// let vector: VecDeque<u32> = VecDeque::new();
40816    /// ```
40817    #[stable(feature = "rust1", since = "1.0.0")]
40818    pub fn new() -> VecDeque<T> {
40819        VecDeque::with_capacity(INITIAL_CAPACITY)
40820    }
40821
40822    /// Creates an empty `VecDeque` with space for at least `capacity` elements.
40823    ///
40824    /// # Examples
40825    ///
40826    /// ```
40827    /// use std::collections::VecDeque;
40828    ///
40829    /// let vector: VecDeque<u32> = VecDeque::with_capacity(10);
40830    /// ```
40831    #[stable(feature = "rust1", since = "1.0.0")]
40832    pub fn with_capacity(capacity: usize) -> VecDeque<T> {
40833        // +1 since the ringbuffer always leaves one space empty
40834        let cap = cmp::max(capacity + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
40835        assert!(cap > capacity, "capacity overflow");
40836
40837        VecDeque { tail: 0, head: 0, buf: RawVec::with_capacity(cap) }
40838    }
40839
40840    /// Provides a reference to the element at the given index.
40841    ///
40842    /// Element at index 0 is the front of the queue.
40843    ///
40844    /// # Examples
40845    ///
40846    /// ```
40847    /// use std::collections::VecDeque;
40848    ///
40849    /// let mut buf = VecDeque::new();
40850    /// buf.push_back(3);
40851    /// buf.push_back(4);
40852    /// buf.push_back(5);
40853    /// assert_eq!(buf.get(1), Some(&4));
40854    /// ```
40855    #[stable(feature = "rust1", since = "1.0.0")]
40856    pub fn get(&self, index: usize) -> Option<&T> {
40857        if index < self.len() {
40858            let idx = self.wrap_add(self.tail, index);
40859            unsafe { Some(&*self.ptr().add(idx)) }
40860        } else {
40861            None
40862        }
40863    }
40864
40865    /// Provides a mutable reference to the element at the given index.
40866    ///
40867    /// Element at index 0 is the front of the queue.
40868    ///
40869    /// # Examples
40870    ///
40871    /// ```
40872    /// use std::collections::VecDeque;
40873    ///
40874    /// let mut buf = VecDeque::new();
40875    /// buf.push_back(3);
40876    /// buf.push_back(4);
40877    /// buf.push_back(5);
40878    /// if let Some(elem) = buf.get_mut(1) {
40879    ///     *elem = 7;
40880    /// }
40881    ///
40882    /// assert_eq!(buf[1], 7);
40883    /// ```
40884    #[stable(feature = "rust1", since = "1.0.0")]
40885    pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
40886        if index < self.len() {
40887            let idx = self.wrap_add(self.tail, index);
40888            unsafe { Some(&mut *self.ptr().add(idx)) }
40889        } else {
40890            None
40891        }
40892    }
40893
40894    /// Swaps elements at indices `i` and `j`.
40895    ///
40896    /// `i` and `j` may be equal.
40897    ///
40898    /// Element at index 0 is the front of the queue.
40899    ///
40900    /// # Panics
40901    ///
40902    /// Panics if either index is out of bounds.
40903    ///
40904    /// # Examples
40905    ///
40906    /// ```
40907    /// use std::collections::VecDeque;
40908    ///
40909    /// let mut buf = VecDeque::new();
40910    /// buf.push_back(3);
40911    /// buf.push_back(4);
40912    /// buf.push_back(5);
40913    /// assert_eq!(buf, [3, 4, 5]);
40914    /// buf.swap(0, 2);
40915    /// assert_eq!(buf, [5, 4, 3]);
40916    /// ```
40917    #[stable(feature = "rust1", since = "1.0.0")]
40918    pub fn swap(&mut self, i: usize, j: usize) {
40919        assert!(i < self.len());
40920        assert!(j < self.len());
40921        let ri = self.wrap_add(self.tail, i);
40922        let rj = self.wrap_add(self.tail, j);
40923        unsafe { ptr::swap(self.ptr().add(ri), self.ptr().add(rj)) }
40924    }
40925
40926    /// Returns the number of elements the `VecDeque` can hold without
40927    /// reallocating.
40928    ///
40929    /// # Examples
40930    ///
40931    /// ```
40932    /// use std::collections::VecDeque;
40933    ///
40934    /// let buf: VecDeque<i32> = VecDeque::with_capacity(10);
40935    /// assert!(buf.capacity() >= 10);
40936    /// ```
40937    #[inline]
40938    #[stable(feature = "rust1", since = "1.0.0")]
40939    pub fn capacity(&self) -> usize {
40940        self.cap() - 1
40941    }
40942
40943    /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
40944    /// given `VecDeque`. Does nothing if the capacity is already sufficient.
40945    ///
40946    /// Note that the allocator may give the collection more space than it requests. Therefore
40947    /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
40948    /// insertions are expected.
40949    ///
40950    /// # Panics
40951    ///
40952    /// Panics if the new capacity overflows `usize`.
40953    ///
40954    /// # Examples
40955    ///
40956    /// ```
40957    /// use std::collections::VecDeque;
40958    ///
40959    /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
40960    /// buf.reserve_exact(10);
40961    /// assert!(buf.capacity() >= 11);
40962    /// ```
40963    ///
40964    /// [`reserve`]: VecDeque::reserve
40965    #[stable(feature = "rust1", since = "1.0.0")]
40966    pub fn reserve_exact(&mut self, additional: usize) {
40967        self.reserve(additional);
40968    }
40969
40970    /// Reserves capacity for at least `additional` more elements to be inserted in the given
40971    /// `VecDeque`. The collection may reserve more space to avoid frequent reallocations.
40972    ///
40973    /// # Panics
40974    ///
40975    /// Panics if the new capacity overflows `usize`.
40976    ///
40977    /// # Examples
40978    ///
40979    /// ```
40980    /// use std::collections::VecDeque;
40981    ///
40982    /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
40983    /// buf.reserve(10);
40984    /// assert!(buf.capacity() >= 11);
40985    /// ```
40986    #[stable(feature = "rust1", since = "1.0.0")]
40987    pub fn reserve(&mut self, additional: usize) {
40988        let old_cap = self.cap();
40989        let used_cap = self.len() + 1;
40990        let new_cap = used_cap
40991            .checked_add(additional)
40992            .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
40993            .expect("capacity overflow");
40994
40995        if new_cap > old_cap {
40996            self.buf.reserve_exact(used_cap, new_cap - used_cap);
40997            unsafe {
40998                self.handle_capacity_increase(old_cap);
40999            }
41000        }
41001    }
41002
41003    /// Tries to reserve the minimum capacity for exactly `additional` more elements to
41004    /// be inserted in the given `VecDeque<T>`. After calling `try_reserve_exact`,
41005    /// capacity will be greater than or equal to `self.len() + additional`.
41006    /// Does nothing if the capacity is already sufficient.
41007    ///
41008    /// Note that the allocator may give the collection more space than it
41009    /// requests. Therefore, capacity can not be relied upon to be precisely
41010    /// minimal. Prefer `reserve` if future insertions are expected.
41011    ///
41012    /// # Errors
41013    ///
41014    /// If the capacity overflows `usize`, or the allocator reports a failure, then an error
41015    /// is returned.
41016    ///
41017    /// # Examples
41018    ///
41019    /// ```
41020    /// #![feature(try_reserve)]
41021    /// use std::collections::TryReserveError;
41022    /// use std::collections::VecDeque;
41023    ///
41024    /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, TryReserveError> {
41025    ///     let mut output = VecDeque::new();
41026    ///
41027    ///     // Pre-reserve the memory, exiting if we can't
41028    ///     output.try_reserve_exact(data.len())?;
41029    ///
41030    ///     // Now we know this can't OOM(Out-Of-Memory) in the middle of our complex work
41031    ///     output.extend(data.iter().map(|&val| {
41032    ///         val * 2 + 5 // very complicated
41033    ///     }));
41034    ///
41035    ///     Ok(output)
41036    /// }
41037    /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
41038    /// ```
41039    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
41040    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
41041        self.try_reserve(additional)
41042    }
41043
41044    /// Tries to reserve capacity for at least `additional` more elements to be inserted
41045    /// in the given `VecDeque<T>`. The collection may reserve more space to avoid
41046    /// frequent reallocations. After calling `try_reserve`, capacity will be
41047    /// greater than or equal to `self.len() + additional`. Does nothing if
41048    /// capacity is already sufficient.
41049    ///
41050    /// # Errors
41051    ///
41052    /// If the capacity overflows `usize`, or the allocator reports a failure, then an error
41053    /// is returned.
41054    ///
41055    /// # Examples
41056    ///
41057    /// ```
41058    /// #![feature(try_reserve)]
41059    /// use std::collections::TryReserveError;
41060    /// use std::collections::VecDeque;
41061    ///
41062    /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, TryReserveError> {
41063    ///     let mut output = VecDeque::new();
41064    ///
41065    ///     // Pre-reserve the memory, exiting if we can't
41066    ///     output.try_reserve(data.len())?;
41067    ///
41068    ///     // Now we know this can't OOM in the middle of our complex work
41069    ///     output.extend(data.iter().map(|&val| {
41070    ///         val * 2 + 5 // very complicated
41071    ///     }));
41072    ///
41073    ///     Ok(output)
41074    /// }
41075    /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
41076    /// ```
41077    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
41078    pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
41079        let old_cap = self.cap();
41080        let used_cap = self.len() + 1;
41081        let new_cap = used_cap
41082            .checked_add(additional)
41083            .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
41084            .ok_or(TryReserveError::CapacityOverflow)?;
41085
41086        if new_cap > old_cap {
41087            self.buf.try_reserve_exact(used_cap, new_cap - used_cap)?;
41088            unsafe {
41089                self.handle_capacity_increase(old_cap);
41090            }
41091        }
41092        Ok(())
41093    }
41094
41095    /// Shrinks the capacity of the `VecDeque` as much as possible.
41096    ///
41097    /// It will drop down as close as possible to the length but the allocator may still inform the
41098    /// `VecDeque` that there is space for a few more elements.
41099    ///
41100    /// # Examples
41101    ///
41102    /// ```
41103    /// use std::collections::VecDeque;
41104    ///
41105    /// let mut buf = VecDeque::with_capacity(15);
41106    /// buf.extend(0..4);
41107    /// assert_eq!(buf.capacity(), 15);
41108    /// buf.shrink_to_fit();
41109    /// assert!(buf.capacity() >= 4);
41110    /// ```
41111    #[stable(feature = "deque_extras_15", since = "1.5.0")]
41112    pub fn shrink_to_fit(&mut self) {
41113        self.shrink_to(0);
41114    }
41115
41116    /// Shrinks the capacity of the `VecDeque` with a lower bound.
41117    ///
41118    /// The capacity will remain at least as large as both the length
41119    /// and the supplied value.
41120    ///
41121    /// If the current capacity is less than the lower limit, this is a no-op.
41122    ///
41123    /// # Examples
41124    ///
41125    /// ```
41126    /// #![feature(shrink_to)]
41127    /// use std::collections::VecDeque;
41128    ///
41129    /// let mut buf = VecDeque::with_capacity(15);
41130    /// buf.extend(0..4);
41131    /// assert_eq!(buf.capacity(), 15);
41132    /// buf.shrink_to(6);
41133    /// assert!(buf.capacity() >= 6);
41134    /// buf.shrink_to(0);
41135    /// assert!(buf.capacity() >= 4);
41136    /// ```
41137    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
41138    pub fn shrink_to(&mut self, min_capacity: usize) {
41139        let min_capacity = cmp::min(min_capacity, self.capacity());
41140        // We don't have to worry about an overflow as neither `self.len()` nor `self.capacity()`
41141        // can ever be `usize::MAX`. +1 as the ringbuffer always leaves one space empty.
41142        let target_cap = cmp::max(cmp::max(min_capacity, self.len()) + 1, MINIMUM_CAPACITY + 1)
41143            .next_power_of_two();
41144
41145        if target_cap < self.cap() {
41146            // There are three cases of interest:
41147            //   All elements are out of desired bounds
41148            //   Elements are contiguous, and head is out of desired bounds
41149            //   Elements are discontiguous, and tail is out of desired bounds
41150            //
41151            // At all other times, element positions are unaffected.
41152            //
41153            // Indicates that elements at the head should be moved.
41154            let head_outside = self.head == 0 || self.head >= target_cap;
41155            // Move elements from out of desired bounds (positions after target_cap)
41156            if self.tail >= target_cap && head_outside {
41157                //                    T             H
41158                //   [. . . . . . . . o o o o o o o . ]
41159                //    T             H
41160                //   [o o o o o o o . ]
41161                unsafe {
41162                    self.copy_nonoverlapping(0, self.tail, self.len());
41163                }
41164                self.head = self.len();
41165                self.tail = 0;
41166            } else if self.tail != 0 && self.tail < target_cap && head_outside {
41167                //          T             H
41168                //   [. . . o o o o o o o . . . . . . ]
41169                //        H T
41170                //   [o o . o o o o o ]
41171                let len = self.wrap_sub(self.head, target_cap);
41172                unsafe {
41173                    self.copy_nonoverlapping(0, target_cap, len);
41174                }
41175                self.head = len;
41176                debug_assert!(self.head < self.tail);
41177            } else if self.tail >= target_cap {
41178                //              H                 T
41179                //   [o o o o o . . . . . . . . . o o ]
41180                //              H T
41181                //   [o o o o o . o o ]
41182                debug_assert!(self.wrap_sub(self.head, 1) < target_cap);
41183                let len = self.cap() - self.tail;
41184                let new_tail = target_cap - len;
41185                unsafe {
41186                    self.copy_nonoverlapping(new_tail, self.tail, len);
41187                }
41188                self.tail = new_tail;
41189                debug_assert!(self.head < self.tail);
41190            }
41191
41192            self.buf.shrink_to_fit(target_cap);
41193
41194            debug_assert!(self.head < self.cap());
41195            debug_assert!(self.tail < self.cap());
41196            debug_assert!(self.cap().count_ones() == 1);
41197        }
41198    }
41199
41200    /// Shortens the `VecDeque`, keeping the first `len` elements and dropping
41201    /// the rest.
41202    ///
41203    /// If `len` is greater than the `VecDeque`'s current length, this has no
41204    /// effect.
41205    ///
41206    /// # Examples
41207    ///
41208    /// ```
41209    /// use std::collections::VecDeque;
41210    ///
41211    /// let mut buf = VecDeque::new();
41212    /// buf.push_back(5);
41213    /// buf.push_back(10);
41214    /// buf.push_back(15);
41215    /// assert_eq!(buf, [5, 10, 15]);
41216    /// buf.truncate(1);
41217    /// assert_eq!(buf, [5]);
41218    /// ```
41219    #[stable(feature = "deque_extras", since = "1.16.0")]
41220    pub fn truncate(&mut self, len: usize) {
41221        /// Runs the destructor for all items in the slice when it gets dropped (normally or
41222        /// during unwinding).
41223        struct Dropper<'a, T>(&'a mut [T]);
41224
41225        impl<'a, T> Drop for Dropper<'a, T> {
41226            fn drop(&mut self) {
41227                unsafe {
41228                    ptr::drop_in_place(self.0);
41229                }
41230            }
41231        }
41232
41233        // Safe because:
41234        //
41235        // * Any slice passed to `drop_in_place` is valid; the second case has
41236        //   `len <= front.len()` and returning on `len > self.len()` ensures
41237        //   `begin <= back.len()` in the first case
41238        // * The head of the VecDeque is moved before calling `drop_in_place`,
41239        //   so no value is dropped twice if `drop_in_place` panics
41240        unsafe {
41241            if len > self.len() {
41242                return;
41243            }
41244            let num_dropped = self.len() - len;
41245            let (front, back) = self.as_mut_slices();
41246            if len > front.len() {
41247                let begin = len - front.len();
41248                let drop_back = back.get_unchecked_mut(begin..) as *mut _;
41249                self.head = self.wrap_sub(self.head, num_dropped);
41250                ptr::drop_in_place(drop_back);
41251            } else {
41252                let drop_back = back as *mut _;
41253                let drop_front = front.get_unchecked_mut(len..) as *mut _;
41254                self.head = self.wrap_sub(self.head, num_dropped);
41255
41256                // Make sure the second half is dropped even when a destructor
41257                // in the first one panics.
41258                let _back_dropper = Dropper(&mut *drop_back);
41259                ptr::drop_in_place(drop_front);
41260            }
41261        }
41262    }
41263
41264    /// Returns a front-to-back iterator.
41265    ///
41266    /// # Examples
41267    ///
41268    /// ```
41269    /// use std::collections::VecDeque;
41270    ///
41271    /// let mut buf = VecDeque::new();
41272    /// buf.push_back(5);
41273    /// buf.push_back(3);
41274    /// buf.push_back(4);
41275    /// let b: &[_] = &[&5, &3, &4];
41276    /// let c: Vec<&i32> = buf.iter().collect();
41277    /// assert_eq!(&c[..], b);
41278    /// ```
41279    #[stable(feature = "rust1", since = "1.0.0")]
41280    pub fn iter(&self) -> Iter<'_, T> {
41281        Iter { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_slice() } }
41282    }
41283
41284    /// Returns a front-to-back iterator that returns mutable references.
41285    ///
41286    /// # Examples
41287    ///
41288    /// ```
41289    /// use std::collections::VecDeque;
41290    ///
41291    /// let mut buf = VecDeque::new();
41292    /// buf.push_back(5);
41293    /// buf.push_back(3);
41294    /// buf.push_back(4);
41295    /// for num in buf.iter_mut() {
41296    ///     *num = *num - 2;
41297    /// }
41298    /// let b: &[_] = &[&mut 3, &mut 1, &mut 2];
41299    /// assert_eq!(&buf.iter_mut().collect::<Vec<&mut i32>>()[..], b);
41300    /// ```
41301    #[stable(feature = "rust1", since = "1.0.0")]
41302    pub fn iter_mut(&mut self) -> IterMut<'_, T> {
41303        // SAFETY: The internal `IterMut` safety invariant is established because the
41304        // `ring` we create is a dereferencable slice for lifetime '_.
41305        IterMut {
41306            tail: self.tail,
41307            head: self.head,
41308            ring: ptr::slice_from_raw_parts_mut(self.ptr(), self.cap()),
41309            phantom: PhantomData,
41310        }
41311    }
41312
41313    /// Returns a pair of slices which contain, in order, the contents of the
41314    /// `VecDeque`.
41315    ///
41316    /// If [`make_contiguous`] was previously called, all elements of the
41317    /// `VecDeque` will be in the first slice and the second slice will be empty.
41318    ///
41319    /// [`make_contiguous`]: VecDeque::make_contiguous
41320    ///
41321    /// # Examples
41322    ///
41323    /// ```
41324    /// use std::collections::VecDeque;
41325    ///
41326    /// let mut vector = VecDeque::new();
41327    ///
41328    /// vector.push_back(0);
41329    /// vector.push_back(1);
41330    /// vector.push_back(2);
41331    ///
41332    /// assert_eq!(vector.as_slices(), (&[0, 1, 2][..], &[][..]));
41333    ///
41334    /// vector.push_front(10);
41335    /// vector.push_front(9);
41336    ///
41337    /// assert_eq!(vector.as_slices(), (&[9, 10][..], &[0, 1, 2][..]));
41338    /// ```
41339    #[inline]
41340    #[stable(feature = "deque_extras_15", since = "1.5.0")]
41341    pub fn as_slices(&self) -> (&[T], &[T]) {
41342        unsafe {
41343            let buf = self.buffer_as_slice();
41344            RingSlices::ring_slices(buf, self.head, self.tail)
41345        }
41346    }
41347
41348    /// Returns a pair of slices which contain, in order, the contents of the
41349    /// `VecDeque`.
41350    ///
41351    /// If [`make_contiguous`] was previously called, all elements of the
41352    /// `VecDeque` will be in the first slice and the second slice will be empty.
41353    ///
41354    /// [`make_contiguous`]: VecDeque::make_contiguous
41355    ///
41356    /// # Examples
41357    ///
41358    /// ```
41359    /// use std::collections::VecDeque;
41360    ///
41361    /// let mut vector = VecDeque::new();
41362    ///
41363    /// vector.push_back(0);
41364    /// vector.push_back(1);
41365    ///
41366    /// vector.push_front(10);
41367    /// vector.push_front(9);
41368    ///
41369    /// vector.as_mut_slices().0[0] = 42;
41370    /// vector.as_mut_slices().1[0] = 24;
41371    /// assert_eq!(vector.as_slices(), (&[42, 10][..], &[24, 1][..]));
41372    /// ```
41373    #[inline]
41374    #[stable(feature = "deque_extras_15", since = "1.5.0")]
41375    pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) {
41376        unsafe {
41377            let head = self.head;
41378            let tail = self.tail;
41379            let buf = self.buffer_as_mut_slice();
41380            RingSlices::ring_slices(buf, head, tail)
41381        }
41382    }
41383
41384    /// Returns the number of elements in the `VecDeque`.
41385    ///
41386    /// # Examples
41387    ///
41388    /// ```
41389    /// use std::collections::VecDeque;
41390    ///
41391    /// let mut v = VecDeque::new();
41392    /// assert_eq!(v.len(), 0);
41393    /// v.push_back(1);
41394    /// assert_eq!(v.len(), 1);
41395    /// ```
41396    #[doc(alias = "length")]
41397    #[stable(feature = "rust1", since = "1.0.0")]
41398    pub fn len(&self) -> usize {
41399        count(self.tail, self.head, self.cap())
41400    }
41401
41402    /// Returns `true` if the `VecDeque` is empty.
41403    ///
41404    /// # Examples
41405    ///
41406    /// ```
41407    /// use std::collections::VecDeque;
41408    ///
41409    /// let mut v = VecDeque::new();
41410    /// assert!(v.is_empty());
41411    /// v.push_front(1);
41412    /// assert!(!v.is_empty());
41413    /// ```
41414    #[stable(feature = "rust1", since = "1.0.0")]
41415    pub fn is_empty(&self) -> bool {
41416        self.tail == self.head
41417    }
41418
41419    fn range_tail_head<R>(&self, range: R) -> (usize, usize)
41420    where
41421        R: RangeBounds<usize>,
41422    {
41423        let Range { start, end } = slice::range(range, ..self.len());
41424        let tail = self.wrap_add(self.tail, start);
41425        let head = self.wrap_add(self.tail, end);
41426        (tail, head)
41427    }
41428
41429    /// Creates an iterator that covers the specified range in the `VecDeque`.
41430    ///
41431    /// # Panics
41432    ///
41433    /// Panics if the starting point is greater than the end point or if
41434    /// the end point is greater than the length of the vector.
41435    ///
41436    /// # Examples
41437    ///
41438    /// ```
41439    /// use std::collections::VecDeque;
41440    ///
41441    /// let v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
41442    /// let range = v.range(2..).copied().collect::<VecDeque<_>>();
41443    /// assert_eq!(range, [3]);
41444    ///
41445    /// // A full range covers all contents
41446    /// let all = v.range(..);
41447    /// assert_eq!(all.len(), 3);
41448    /// ```
41449    #[inline]
41450    #[stable(feature = "deque_range", since = "1.51.0")]
41451    pub fn range<R>(&self, range: R) -> Iter<'_, T>
41452    where
41453        R: RangeBounds<usize>,
41454    {
41455        let (tail, head) = self.range_tail_head(range);
41456        Iter {
41457            tail,
41458            head,
41459            // The shared reference we have in &self is maintained in the '_ of Iter.
41460            ring: unsafe { self.buffer_as_slice() },
41461        }
41462    }
41463
41464    /// Creates an iterator that covers the specified mutable range in the `VecDeque`.
41465    ///
41466    /// # Panics
41467    ///
41468    /// Panics if the starting point is greater than the end point or if
41469    /// the end point is greater than the length of the vector.
41470    ///
41471    /// # Examples
41472    ///
41473    /// ```
41474    /// use std::collections::VecDeque;
41475    ///
41476    /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
41477    /// for v in v.range_mut(2..) {
41478    ///   *v *= 2;
41479    /// }
41480    /// assert_eq!(v, vec![1, 2, 6]);
41481    ///
41482    /// // A full range covers all contents
41483    /// for v in v.range_mut(..) {
41484    ///   *v *= 2;
41485    /// }
41486    /// assert_eq!(v, vec![2, 4, 12]);
41487    /// ```
41488    #[inline]
41489    #[stable(feature = "deque_range", since = "1.51.0")]
41490    pub fn range_mut<R>(&mut self, range: R) -> IterMut<'_, T>
41491    where
41492        R: RangeBounds<usize>,
41493    {
41494        let (tail, head) = self.range_tail_head(range);
41495
41496        // SAFETY: The internal `IterMut` safety invariant is established because the
41497        // `ring` we create is a dereferencable slice for lifetime '_.
41498        IterMut {
41499            tail,
41500            head,
41501            ring: ptr::slice_from_raw_parts_mut(self.ptr(), self.cap()),
41502            phantom: PhantomData,
41503        }
41504    }
41505
41506    /// Creates a draining iterator that removes the specified range in the
41507    /// `VecDeque` and yields the removed items.
41508    ///
41509    /// Note 1: The element range is removed even if the iterator is not
41510    /// consumed until the end.
41511    ///
41512    /// Note 2: It is unspecified how many elements are removed from the deque,
41513    /// if the `Drain` value is not dropped, but the borrow it holds expires
41514    /// (e.g., due to `mem::forget`).
41515    ///
41516    /// # Panics
41517    ///
41518    /// Panics if the starting point is greater than the end point or if
41519    /// the end point is greater than the length of the vector.
41520    ///
41521    /// # Examples
41522    ///
41523    /// ```
41524    /// use std::collections::VecDeque;
41525    ///
41526    /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
41527    /// let drained = v.drain(2..).collect::<VecDeque<_>>();
41528    /// assert_eq!(drained, [3]);
41529    /// assert_eq!(v, [1, 2]);
41530    ///
41531    /// // A full range clears all contents
41532    /// v.drain(..);
41533    /// assert!(v.is_empty());
41534    /// ```
41535    #[inline]
41536    #[stable(feature = "drain", since = "1.6.0")]
41537    pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
41538    where
41539        R: RangeBounds<usize>,
41540    {
41541        // Memory safety
41542        //
41543        // When the Drain is first created, the source deque is shortened to
41544        // make sure no uninitialized or moved-from elements are accessible at
41545        // all if the Drain's destructor never gets to run.
41546        //
41547        // Drain will ptr::read out the values to remove.
41548        // When finished, the remaining data will be copied back to cover the hole,
41549        // and the head/tail values will be restored correctly.
41550        //
41551        let (drain_tail, drain_head) = self.range_tail_head(range);
41552
41553        // The deque's elements are parted into three segments:
41554        // * self.tail  -> drain_tail
41555        // * drain_tail -> drain_head
41556        // * drain_head -> self.head
41557        //
41558        // T = self.tail; H = self.head; t = drain_tail; h = drain_head
41559        //
41560        // We store drain_tail as self.head, and drain_head and self.head as
41561        // after_tail and after_head respectively on the Drain. This also
41562        // truncates the effective array such that if the Drain is leaked, we
41563        // have forgotten about the potentially moved values after the start of
41564        // the drain.
41565        //
41566        //        T   t   h   H
41567        // [. . . o o x x o o . . .]
41568        //
41569        let head = self.head;
41570
41571        // "forget" about the values after the start of the drain until after
41572        // the drain is complete and the Drain destructor is run.
41573        self.head = drain_tail;
41574
41575        Drain {
41576            deque: NonNull::from(&mut *self),
41577            after_tail: drain_head,
41578            after_head: head,
41579            iter: Iter {
41580                tail: drain_tail,
41581                head: drain_head,
41582                // Crucially, we only create shared references from `self` here and read from
41583                // it.  We do not write to `self` nor reborrow to a mutable reference.
41584                // Hence the raw pointer we created above, for `deque`, remains valid.
41585                ring: unsafe { self.buffer_as_slice() },
41586            },
41587        }
41588    }
41589
41590    /// Clears the `VecDeque`, removing all values.
41591    ///
41592    /// # Examples
41593    ///
41594    /// ```
41595    /// use std::collections::VecDeque;
41596    ///
41597    /// let mut v = VecDeque::new();
41598    /// v.push_back(1);
41599    /// v.clear();
41600    /// assert!(v.is_empty());
41601    /// ```
41602    #[stable(feature = "rust1", since = "1.0.0")]
41603    #[inline]
41604    pub fn clear(&mut self) {
41605        self.truncate(0);
41606    }
41607
41608    /// Returns `true` if the `VecDeque` contains an element equal to the
41609    /// given value.
41610    ///
41611    /// # Examples
41612    ///
41613    /// ```
41614    /// use std::collections::VecDeque;
41615    ///
41616    /// let mut vector: VecDeque<u32> = VecDeque::new();
41617    ///
41618    /// vector.push_back(0);
41619    /// vector.push_back(1);
41620    ///
41621    /// assert_eq!(vector.contains(&1), true);
41622    /// assert_eq!(vector.contains(&10), false);
41623    /// ```
41624    #[stable(feature = "vec_deque_contains", since = "1.12.0")]
41625    pub fn contains(&self, x: &T) -> bool
41626    where
41627        T: PartialEq<T>,
41628    {
41629        let (a, b) = self.as_slices();
41630        a.contains(x) || b.contains(x)
41631    }
41632
41633    /// Provides a reference to the front element, or `None` if the `VecDeque` is
41634    /// empty.
41635    ///
41636    /// # Examples
41637    ///
41638    /// ```
41639    /// use std::collections::VecDeque;
41640    ///
41641    /// let mut d = VecDeque::new();
41642    /// assert_eq!(d.front(), None);
41643    ///
41644    /// d.push_back(1);
41645    /// d.push_back(2);
41646    /// assert_eq!(d.front(), Some(&1));
41647    /// ```
41648    #[stable(feature = "rust1", since = "1.0.0")]
41649    pub fn front(&self) -> Option<&T> {
41650        self.get(0)
41651    }
41652
41653    /// Provides a mutable reference to the front element, or `None` if the
41654    /// `VecDeque` is empty.
41655    ///
41656    /// # Examples
41657    ///
41658    /// ```
41659    /// use std::collections::VecDeque;
41660    ///
41661    /// let mut d = VecDeque::new();
41662    /// assert_eq!(d.front_mut(), None);
41663    ///
41664    /// d.push_back(1);
41665    /// d.push_back(2);
41666    /// match d.front_mut() {
41667    ///     Some(x) => *x = 9,
41668    ///     None => (),
41669    /// }
41670    /// assert_eq!(d.front(), Some(&9));
41671    /// ```
41672    #[stable(feature = "rust1", since = "1.0.0")]
41673    pub fn front_mut(&mut self) -> Option<&mut T> {
41674        self.get_mut(0)
41675    }
41676
41677    /// Provides a reference to the back element, or `None` if the `VecDeque` is
41678    /// empty.
41679    ///
41680    /// # Examples
41681    ///
41682    /// ```
41683    /// use std::collections::VecDeque;
41684    ///
41685    /// let mut d = VecDeque::new();
41686    /// assert_eq!(d.back(), None);
41687    ///
41688    /// d.push_back(1);
41689    /// d.push_back(2);
41690    /// assert_eq!(d.back(), Some(&2));
41691    /// ```
41692    #[stable(feature = "rust1", since = "1.0.0")]
41693    pub fn back(&self) -> Option<&T> {
41694        self.get(self.len().wrapping_sub(1))
41695    }
41696
41697    /// Provides a mutable reference to the back element, or `None` if the
41698    /// `VecDeque` is empty.
41699    ///
41700    /// # Examples
41701    ///
41702    /// ```
41703    /// use std::collections::VecDeque;
41704    ///
41705    /// let mut d = VecDeque::new();
41706    /// assert_eq!(d.back(), None);
41707    ///
41708    /// d.push_back(1);
41709    /// d.push_back(2);
41710    /// match d.back_mut() {
41711    ///     Some(x) => *x = 9,
41712    ///     None => (),
41713    /// }
41714    /// assert_eq!(d.back(), Some(&9));
41715    /// ```
41716    #[stable(feature = "rust1", since = "1.0.0")]
41717    pub fn back_mut(&mut self) -> Option<&mut T> {
41718        self.get_mut(self.len().wrapping_sub(1))
41719    }
41720
41721    /// Removes the first element and returns it, or `None` if the `VecDeque` is
41722    /// empty.
41723    ///
41724    /// # Examples
41725    ///
41726    /// ```
41727    /// use std::collections::VecDeque;
41728    ///
41729    /// let mut d = VecDeque::new();
41730    /// d.push_back(1);
41731    /// d.push_back(2);
41732    ///
41733    /// assert_eq!(d.pop_front(), Some(1));
41734    /// assert_eq!(d.pop_front(), Some(2));
41735    /// assert_eq!(d.pop_front(), None);
41736    /// ```
41737    #[stable(feature = "rust1", since = "1.0.0")]
41738    pub fn pop_front(&mut self) -> Option<T> {
41739        if self.is_empty() {
41740            None
41741        } else {
41742            let tail = self.tail;
41743            self.tail = self.wrap_add(self.tail, 1);
41744            unsafe { Some(self.buffer_read(tail)) }
41745        }
41746    }
41747
41748    /// Removes the last element from the `VecDeque` and returns it, or `None` if
41749    /// it is empty.
41750    ///
41751    /// # Examples
41752    ///
41753    /// ```
41754    /// use std::collections::VecDeque;
41755    ///
41756    /// let mut buf = VecDeque::new();
41757    /// assert_eq!(buf.pop_back(), None);
41758    /// buf.push_back(1);
41759    /// buf.push_back(3);
41760    /// assert_eq!(buf.pop_back(), Some(3));
41761    /// ```
41762    #[stable(feature = "rust1", since = "1.0.0")]
41763    pub fn pop_back(&mut self) -> Option<T> {
41764        if self.is_empty() {
41765            None
41766        } else {
41767            self.head = self.wrap_sub(self.head, 1);
41768            let head = self.head;
41769            unsafe { Some(self.buffer_read(head)) }
41770        }
41771    }
41772
41773    /// Prepends an element to the `VecDeque`.
41774    ///
41775    /// # Examples
41776    ///
41777    /// ```
41778    /// use std::collections::VecDeque;
41779    ///
41780    /// let mut d = VecDeque::new();
41781    /// d.push_front(1);
41782    /// d.push_front(2);
41783    /// assert_eq!(d.front(), Some(&2));
41784    /// ```
41785    #[stable(feature = "rust1", since = "1.0.0")]
41786    pub fn push_front(&mut self, value: T) {
41787        if self.is_full() {
41788            self.grow();
41789        }
41790
41791        self.tail = self.wrap_sub(self.tail, 1);
41792        let tail = self.tail;
41793        unsafe {
41794            self.buffer_write(tail, value);
41795        }
41796    }
41797
41798    /// Appends an element to the back of the `VecDeque`.
41799    ///
41800    /// # Examples
41801    ///
41802    /// ```
41803    /// use std::collections::VecDeque;
41804    ///
41805    /// let mut buf = VecDeque::new();
41806    /// buf.push_back(1);
41807    /// buf.push_back(3);
41808    /// assert_eq!(3, *buf.back().unwrap());
41809    /// ```
41810    #[stable(feature = "rust1", since = "1.0.0")]
41811    pub fn push_back(&mut self, value: T) {
41812        if self.is_full() {
41813            self.grow();
41814        }
41815
41816        let head = self.head;
41817        self.head = self.wrap_add(self.head, 1);
41818        unsafe { self.buffer_write(head, value) }
41819    }
41820
41821    #[inline]
41822    fn is_contiguous(&self) -> bool {
41823        // FIXME: Should we consider `head == 0` to mean
41824        // that `self` is contiguous?
41825        self.tail <= self.head
41826    }
41827
41828    /// Removes an element from anywhere in the `VecDeque` and returns it,
41829    /// replacing it with the first element.
41830    ///
41831    /// This does not preserve ordering, but is *O*(1).
41832    ///
41833    /// Returns `None` if `index` is out of bounds.
41834    ///
41835    /// Element at index 0 is the front of the queue.
41836    ///
41837    /// # Examples
41838    ///
41839    /// ```
41840    /// use std::collections::VecDeque;
41841    ///
41842    /// let mut buf = VecDeque::new();
41843    /// assert_eq!(buf.swap_remove_front(0), None);
41844    /// buf.push_back(1);
41845    /// buf.push_back(2);
41846    /// buf.push_back(3);
41847    /// assert_eq!(buf, [1, 2, 3]);
41848    ///
41849    /// assert_eq!(buf.swap_remove_front(2), Some(3));
41850    /// assert_eq!(buf, [2, 1]);
41851    /// ```
41852    #[stable(feature = "deque_extras_15", since = "1.5.0")]
41853    pub fn swap_remove_front(&mut self, index: usize) -> Option<T> {
41854        let length = self.len();
41855        if length > 0 && index < length && index != 0 {
41856            self.swap(index, 0);
41857        } else if index >= length {
41858            return None;
41859        }
41860        self.pop_front()
41861    }
41862
41863    /// Removes an element from anywhere in the `VecDeque` and returns it, replacing it with the
41864    /// last element.
41865    ///
41866    /// This does not preserve ordering, but is *O*(1).
41867    ///
41868    /// Returns `None` if `index` is out of bounds.
41869    ///
41870    /// Element at index 0 is the front of the queue.
41871    ///
41872    /// # Examples
41873    ///
41874    /// ```
41875    /// use std::collections::VecDeque;
41876    ///
41877    /// let mut buf = VecDeque::new();
41878    /// assert_eq!(buf.swap_remove_back(0), None);
41879    /// buf.push_back(1);
41880    /// buf.push_back(2);
41881    /// buf.push_back(3);
41882    /// assert_eq!(buf, [1, 2, 3]);
41883    ///
41884    /// assert_eq!(buf.swap_remove_back(0), Some(1));
41885    /// assert_eq!(buf, [3, 2]);
41886    /// ```
41887    #[stable(feature = "deque_extras_15", since = "1.5.0")]
41888    pub fn swap_remove_back(&mut self, index: usize) -> Option<T> {
41889        let length = self.len();
41890        if length > 0 && index < length - 1 {
41891            self.swap(index, length - 1);
41892        } else if index >= length {
41893            return None;
41894        }
41895        self.pop_back()
41896    }
41897
41898    /// Inserts an element at `index` within the `VecDeque`, shifting all elements with indices
41899    /// greater than or equal to `index` towards the back.
41900    ///
41901    /// Element at index 0 is the front of the queue.
41902    ///
41903    /// # Panics
41904    ///
41905    /// Panics if `index` is greater than `VecDeque`'s length
41906    ///
41907    /// # Examples
41908    ///
41909    /// ```
41910    /// use std::collections::VecDeque;
41911    ///
41912    /// let mut vec_deque = VecDeque::new();
41913    /// vec_deque.push_back('a');
41914    /// vec_deque.push_back('b');
41915    /// vec_deque.push_back('c');
41916    /// assert_eq!(vec_deque, &['a', 'b', 'c']);
41917    ///
41918    /// vec_deque.insert(1, 'd');
41919    /// assert_eq!(vec_deque, &['a', 'd', 'b', 'c']);
41920    /// ```
41921    #[stable(feature = "deque_extras_15", since = "1.5.0")]
41922    pub fn insert(&mut self, index: usize, value: T) {
41923        assert!(index <= self.len(), "index out of bounds");
41924        if self.is_full() {
41925            self.grow();
41926        }
41927
41928        // Move the least number of elements in the ring buffer and insert
41929        // the given object
41930        //
41931        // At most len/2 - 1 elements will be moved. O(min(n, n-i))
41932        //
41933        // There are three main cases:
41934        //  Elements are contiguous
41935        //      - special case when tail is 0
41936        //  Elements are discontiguous and the insert is in the tail section
41937        //  Elements are discontiguous and the insert is in the head section
41938        //
41939        // For each of those there are two more cases:
41940        //  Insert is closer to tail
41941        //  Insert is closer to head
41942        //
41943        // Key: H - self.head
41944        //      T - self.tail
41945        //      o - Valid element
41946        //      I - Insertion element
41947        //      A - The element that should be after the insertion point
41948        //      M - Indicates element was moved
41949
41950        let idx = self.wrap_add(self.tail, index);
41951
41952        let distance_to_tail = index;
41953        let distance_to_head = self.len() - index;
41954
41955        let contiguous = self.is_contiguous();
41956
41957        match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
41958            (true, true, _) if index == 0 => {
41959                // push_front
41960                //
41961                //       T
41962                //       I             H
41963                //      [A o o o o o o . . . . . . . . .]
41964                //
41965                //                       H         T
41966                //      [A o o o o o o o . . . . . I]
41967                //
41968
41969                self.tail = self.wrap_sub(self.tail, 1);
41970            }
41971            (true, true, _) => {
41972                unsafe {
41973                    // contiguous, insert closer to tail:
41974                    //
41975                    //             T   I         H
41976                    //      [. . . o o A o o o o . . . . . .]
41977                    //
41978                    //           T               H
41979                    //      [. . o o I A o o o o . . . . . .]
41980                    //           M M
41981                    //
41982                    // contiguous, insert closer to tail and tail is 0:
41983                    //
41984                    //
41985                    //       T   I         H
41986                    //      [o o A o o o o . . . . . . . . .]
41987                    //
41988                    //                       H             T
41989                    //      [o I A o o o o o . . . . . . . o]
41990                    //       M                             M
41991
41992                    let new_tail = self.wrap_sub(self.tail, 1);
41993
41994                    self.copy(new_tail, self.tail, 1);
41995                    // Already moved the tail, so we only copy `index - 1` elements.
41996                    self.copy(self.tail, self.tail + 1, index - 1);
41997
41998                    self.tail = new_tail;
41999                }
42000            }
42001            (true, false, _) => {
42002                unsafe {
42003                    //  contiguous, insert closer to head:
42004                    //
42005                    //             T       I     H
42006                    //      [. . . o o o o A o o . . . . . .]
42007                    //
42008                    //             T               H
42009                    //      [. . . o o o o I A o o . . . . .]
42010                    //                       M M M
42011
42012                    self.copy(idx + 1, idx, self.head - idx);
42013                    self.head = self.wrap_add(self.head, 1);
42014                }
42015            }
42016            (false, true, true) => {
42017                unsafe {
42018                    // discontiguous, insert closer to tail, tail section:
42019                    //
42020                    //                   H         T   I
42021                    //      [o o o o o o . . . . . o o A o o]
42022                    //
42023                    //                   H       T
42024                    //      [o o o o o o . . . . o o I A o o]
42025                    //                           M M
42026
42027                    self.copy(self.tail - 1, self.tail, index);
42028                    self.tail -= 1;
42029                }
42030            }
42031            (false, false, true) => {
42032                unsafe {
42033                    // discontiguous, insert closer to head, tail section:
42034                    //
42035                    //           H             T         I
42036                    //      [o o . . . . . . . o o o o o A o]
42037                    //
42038                    //             H           T
42039                    //      [o o o . . . . . . o o o o o I A]
42040                    //       M M M                         M
42041
42042                    // copy elements up to new head
42043                    self.copy(1, 0, self.head);
42044
42045                    // copy last element into empty spot at bottom of buffer
42046                    self.copy(0, self.cap() - 1, 1);
42047
42048                    // move elements from idx to end forward not including ^ element
42049                    self.copy(idx + 1, idx, self.cap() - 1 - idx);
42050
42051                    self.head += 1;
42052                }
42053            }
42054            (false, true, false) if idx == 0 => {
42055                unsafe {
42056                    // discontiguous, insert is closer to tail, head section,
42057                    // and is at index zero in the internal buffer:
42058                    //
42059                    //       I                   H     T
42060                    //      [A o o o o o o o o o . . . o o o]
42061                    //
42062                    //                           H   T
42063                    //      [A o o o o o o o o o . . o o o I]
42064                    //                               M M M
42065
42066                    // copy elements up to new tail
42067                    self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
42068
42069                    // copy last element into empty spot at bottom of buffer
42070                    self.copy(self.cap() - 1, 0, 1);
42071
42072                    self.tail -= 1;
42073                }
42074            }
42075            (false, true, false) => {
42076                unsafe {
42077                    // discontiguous, insert closer to tail, head section:
42078                    //
42079                    //             I             H     T
42080                    //      [o o o A o o o o o o . . . o o o]
42081                    //
42082                    //                           H   T
42083                    //      [o o I A o o o o o o . . o o o o]
42084                    //       M M                     M M M M
42085
42086                    // copy elements up to new tail
42087                    self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
42088
42089                    // copy last element into empty spot at bottom of buffer
42090                    self.copy(self.cap() - 1, 0, 1);
42091
42092                    // move elements from idx-1 to end forward not including ^ element
42093                    self.copy(0, 1, idx - 1);
42094
42095                    self.tail -= 1;
42096                }
42097            }
42098            (false, false, false) => {
42099                unsafe {
42100                    // discontiguous, insert closer to head, head section:
42101                    //
42102                    //               I     H           T
42103                    //      [o o o o A o o . . . . . . o o o]
42104                    //
42105                    //                     H           T
42106                    //      [o o o o I A o o . . . . . o o o]
42107                    //                 M M M
42108
42109                    self.copy(idx + 1, idx, self.head - idx);
42110                    self.head += 1;
42111                }
42112            }
42113        }
42114
42115        // tail might've been changed so we need to recalculate
42116        let new_idx = self.wrap_add(self.tail, index);
42117        unsafe {
42118            self.buffer_write(new_idx, value);
42119        }
42120    }
42121
42122    /// Removes and returns the element at `index` from the `VecDeque`.
42123    /// Whichever end is closer to the removal point will be moved to make
42124    /// room, and all the affected elements will be moved to new positions.
42125    /// Returns `None` if `index` is out of bounds.
42126    ///
42127    /// Element at index 0 is the front of the queue.
42128    ///
42129    /// # Examples
42130    ///
42131    /// ```
42132    /// use std::collections::VecDeque;
42133    ///
42134    /// let mut buf = VecDeque::new();
42135    /// buf.push_back(1);
42136    /// buf.push_back(2);
42137    /// buf.push_back(3);
42138    /// assert_eq!(buf, [1, 2, 3]);
42139    ///
42140    /// assert_eq!(buf.remove(1), Some(2));
42141    /// assert_eq!(buf, [1, 3]);
42142    /// ```
42143    #[stable(feature = "rust1", since = "1.0.0")]
42144    pub fn remove(&mut self, index: usize) -> Option<T> {
42145        if self.is_empty() || self.len() <= index {
42146            return None;
42147        }
42148
42149        // There are three main cases:
42150        //  Elements are contiguous
42151        //  Elements are discontiguous and the removal is in the tail section
42152        //  Elements are discontiguous and the removal is in the head section
42153        //      - special case when elements are technically contiguous,
42154        //        but self.head = 0
42155        //
42156        // For each of those there are two more cases:
42157        //  Insert is closer to tail
42158        //  Insert is closer to head
42159        //
42160        // Key: H - self.head
42161        //      T - self.tail
42162        //      o - Valid element
42163        //      x - Element marked for removal
42164        //      R - Indicates element that is being removed
42165        //      M - Indicates element was moved
42166
42167        let idx = self.wrap_add(self.tail, index);
42168
42169        let elem = unsafe { Some(self.buffer_read(idx)) };
42170
42171        let distance_to_tail = index;
42172        let distance_to_head = self.len() - index;
42173
42174        let contiguous = self.is_contiguous();
42175
42176        match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
42177            (true, true, _) => {
42178                unsafe {
42179                    // contiguous, remove closer to tail:
42180                    //
42181                    //             T   R         H
42182                    //      [. . . o o x o o o o . . . . . .]
42183                    //
42184                    //               T           H
42185                    //      [. . . . o o o o o o . . . . . .]
42186                    //               M M
42187
42188                    self.copy(self.tail + 1, self.tail, index);
42189                    self.tail += 1;
42190                }
42191            }
42192            (true, false, _) => {
42193                unsafe {
42194                    // contiguous, remove closer to head:
42195                    //
42196                    //             T       R     H
42197                    //      [. . . o o o o x o o . . . . . .]
42198                    //
42199                    //             T           H
42200                    //      [. . . o o o o o o . . . . . . .]
42201                    //                     M M
42202
42203                    self.copy(idx, idx + 1, self.head - idx - 1);
42204                    self.head -= 1;
42205                }
42206            }
42207            (false, true, true) => {
42208                unsafe {
42209                    // discontiguous, remove closer to tail, tail section:
42210                    //
42211                    //                   H         T   R
42212                    //      [o o o o o o . . . . . o o x o o]
42213                    //
42214                    //                   H           T
42215                    //      [o o o o o o . . . . . . o o o o]
42216                    //                               M M
42217
42218                    self.copy(self.tail + 1, self.tail, index);
42219                    self.tail = self.wrap_add(self.tail, 1);
42220                }
42221            }
42222            (false, false, false) => {
42223                unsafe {
42224                    // discontiguous, remove closer to head, head section:
42225                    //
42226                    //               R     H           T
42227                    //      [o o o o x o o . . . . . . o o o]
42228                    //
42229                    //                   H             T
42230                    //      [o o o o o o . . . . . . . o o o]
42231                    //               M M
42232
42233                    self.copy(idx, idx + 1, self.head - idx - 1);
42234                    self.head -= 1;
42235                }
42236            }
42237            (false, false, true) => {
42238                unsafe {
42239                    // discontiguous, remove closer to head, tail section:
42240                    //
42241                    //             H           T         R
42242                    //      [o o o . . . . . . o o o o o x o]
42243                    //
42244                    //           H             T
42245                    //      [o o . . . . . . . o o o o o o o]
42246                    //       M M                         M M
42247                    //
42248                    // or quasi-discontiguous, remove next to head, tail section:
42249                    //
42250                    //       H                 T         R
42251                    //      [. . . . . . . . . o o o o o x o]
42252                    //
42253                    //                         T           H
42254                    //      [. . . . . . . . . o o o o o o .]
42255                    //                                   M
42256
42257                    // draw in elements in the tail section
42258                    self.copy(idx, idx + 1, self.cap() - idx - 1);
42259
42260                    // Prevents underflow.
42261                    if self.head != 0 {
42262                        // copy first element into empty spot
42263                        self.copy(self.cap() - 1, 0, 1);
42264
42265                        // move elements in the head section backwards
42266                        self.copy(0, 1, self.head - 1);
42267                    }
42268
42269                    self.head = self.wrap_sub(self.head, 1);
42270                }
42271            }
42272            (false, true, false) => {
42273                unsafe {
42274                    // discontiguous, remove closer to tail, head section:
42275                    //
42276                    //           R               H     T
42277                    //      [o o x o o o o o o o . . . o o o]
42278                    //
42279                    //                           H       T
42280                    //      [o o o o o o o o o o . . . . o o]
42281                    //       M M M                       M M
42282
42283                    // draw in elements up to idx
42284                    self.copy(1, 0, idx);
42285
42286                    // copy last element into empty spot
42287                    self.copy(0, self.cap() - 1, 1);
42288
42289                    // move elements from tail to end forward, excluding the last one
42290                    self.copy(self.tail + 1, self.tail, self.cap() - self.tail - 1);
42291
42292                    self.tail = self.wrap_add(self.tail, 1);
42293                }
42294            }
42295        }
42296
42297        elem
42298    }
42299
42300    /// Splits the `VecDeque` into two at the given index.
42301    ///
42302    /// Returns a newly allocated `VecDeque`. `self` contains elements `[0, at)`,
42303    /// and the returned `VecDeque` contains elements `[at, len)`.
42304    ///
42305    /// Note that the capacity of `self` does not change.
42306    ///
42307    /// Element at index 0 is the front of the queue.
42308    ///
42309    /// # Panics
42310    ///
42311    /// Panics if `at > len`.
42312    ///
42313    /// # Examples
42314    ///
42315    /// ```
42316    /// use std::collections::VecDeque;
42317    ///
42318    /// let mut buf: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
42319    /// let buf2 = buf.split_off(1);
42320    /// assert_eq!(buf, [1]);
42321    /// assert_eq!(buf2, [2, 3]);
42322    /// ```
42323    #[inline]
42324    #[must_use = "use `.truncate()` if you don't need the other half"]
42325    #[stable(feature = "split_off", since = "1.4.0")]
42326    pub fn split_off(&mut self, at: usize) -> Self {
42327        let len = self.len();
42328        assert!(at <= len, "`at` out of bounds");
42329
42330        let other_len = len - at;
42331        let mut other = VecDeque::with_capacity(other_len);
42332
42333        unsafe {
42334            let (first_half, second_half) = self.as_slices();
42335
42336            let first_len = first_half.len();
42337            let second_len = second_half.len();
42338            if at < first_len {
42339                // `at` lies in the first half.
42340                let amount_in_first = first_len - at;
42341
42342                ptr::copy_nonoverlapping(first_half.as_ptr().add(at), other.ptr(), amount_in_first);
42343
42344                // just take all of the second half.
42345                ptr::copy_nonoverlapping(
42346                    second_half.as_ptr(),
42347                    other.ptr().add(amount_in_first),
42348                    second_len,
42349                );
42350            } else {
42351                // `at` lies in the second half, need to factor in the elements we skipped
42352                // in the first half.
42353                let offset = at - first_len;
42354                let amount_in_second = second_len - offset;
42355                ptr::copy_nonoverlapping(
42356                    second_half.as_ptr().add(offset),
42357                    other.ptr(),
42358                    amount_in_second,
42359                );
42360            }
42361        }
42362
42363        // Cleanup where the ends of the buffers are
42364        self.head = self.wrap_sub(self.head, other_len);
42365        other.head = other.wrap_index(other_len);
42366
42367        other
42368    }
42369
42370    /// Moves all the elements of `other` into `self`, leaving `other` empty.
42371    ///
42372    /// # Panics
42373    ///
42374    /// Panics if the new number of elements in self overflows a `usize`.
42375    ///
42376    /// # Examples
42377    ///
42378    /// ```
42379    /// use std::collections::VecDeque;
42380    ///
42381    /// let mut buf: VecDeque<_> = vec![1, 2].into_iter().collect();
42382    /// let mut buf2: VecDeque<_> = vec![3, 4].into_iter().collect();
42383    /// buf.append(&mut buf2);
42384    /// assert_eq!(buf, [1, 2, 3, 4]);
42385    /// assert_eq!(buf2, []);
42386    /// ```
42387    #[inline]
42388    #[stable(feature = "append", since = "1.4.0")]
42389    pub fn append(&mut self, other: &mut Self) {
42390        // naive impl
42391        self.extend(other.drain(..));
42392    }
42393
42394    /// Retains only the elements specified by the predicate.
42395    ///
42396    /// In other words, remove all elements `e` such that `f(&e)` returns false.
42397    /// This method operates in place, visiting each element exactly once in the
42398    /// original order, and preserves the order of the retained elements.
42399    ///
42400    /// # Examples
42401    ///
42402    /// ```
42403    /// use std::collections::VecDeque;
42404    ///
42405    /// let mut buf = VecDeque::new();
42406    /// buf.extend(1..5);
42407    /// buf.retain(|&x| x % 2 == 0);
42408    /// assert_eq!(buf, [2, 4]);
42409    /// ```
42410    ///
42411    /// The exact order may be useful for tracking external state, like an index.
42412    ///
42413    /// ```
42414    /// use std::collections::VecDeque;
42415    ///
42416    /// let mut buf = VecDeque::new();
42417    /// buf.extend(1..6);
42418    ///
42419    /// let keep = [false, true, true, false, true];
42420    /// let mut i = 0;
42421    /// buf.retain(|_| (keep[i], i += 1).0);
42422    /// assert_eq!(buf, [2, 3, 5]);
42423    /// ```
42424    #[stable(feature = "vec_deque_retain", since = "1.4.0")]
42425    pub fn retain<F>(&mut self, mut f: F)
42426    where
42427        F: FnMut(&T) -> bool,
42428    {
42429        let len = self.len();
42430        let mut del = 0;
42431        for i in 0..len {
42432            if !f(&self[i]) {
42433                del += 1;
42434            } else if del > 0 {
42435                self.swap(i - del, i);
42436            }
42437        }
42438        if del > 0 {
42439            self.truncate(len - del);
42440        }
42441    }
42442
42443    // This may panic or abort
42444    #[inline(never)]
42445    fn grow(&mut self) {
42446        if self.is_full() {
42447            let old_cap = self.cap();
42448            // Double the buffer size.
42449            self.buf.reserve_exact(old_cap, old_cap);
42450            assert!(self.cap() == old_cap * 2);
42451            unsafe {
42452                self.handle_capacity_increase(old_cap);
42453            }
42454            debug_assert!(!self.is_full());
42455        }
42456    }
42457
42458    /// Modifies the `VecDeque` in-place so that `len()` is equal to `new_len`,
42459    /// either by removing excess elements from the back or by appending
42460    /// elements generated by calling `generator` to the back.
42461    ///
42462    /// # Examples
42463    ///
42464    /// ```
42465    /// use std::collections::VecDeque;
42466    ///
42467    /// let mut buf = VecDeque::new();
42468    /// buf.push_back(5);
42469    /// buf.push_back(10);
42470    /// buf.push_back(15);
42471    /// assert_eq!(buf, [5, 10, 15]);
42472    ///
42473    /// buf.resize_with(5, Default::default);
42474    /// assert_eq!(buf, [5, 10, 15, 0, 0]);
42475    ///
42476    /// buf.resize_with(2, || unreachable!());
42477    /// assert_eq!(buf, [5, 10]);
42478    ///
42479    /// let mut state = 100;
42480    /// buf.resize_with(5, || { state += 1; state });
42481    /// assert_eq!(buf, [5, 10, 101, 102, 103]);
42482    /// ```
42483    #[stable(feature = "vec_resize_with", since = "1.33.0")]
42484    pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut() -> T) {
42485        let len = self.len();
42486
42487        if new_len > len {
42488            self.extend(repeat_with(generator).take(new_len - len))
42489        } else {
42490            self.truncate(new_len);
42491        }
42492    }
42493
42494    /// Rearranges the internal storage of this deque so it is one contiguous
42495    /// slice, which is then returned.
42496    ///
42497    /// This method does not allocate and does not change the order of the
42498    /// inserted elements. As it returns a mutable slice, this can be used to
42499    /// sort a deque.
42500    ///
42501    /// Once the internal storage is contiguous, the [`as_slices`] and
42502    /// [`as_mut_slices`] methods will return the entire contents of the
42503    /// `VecDeque` in a single slice.
42504    ///
42505    /// [`as_slices`]: VecDeque::as_slices
42506    /// [`as_mut_slices`]: VecDeque::as_mut_slices
42507    ///
42508    /// # Examples
42509    ///
42510    /// Sorting the content of a deque.
42511    ///
42512    /// ```
42513    /// use std::collections::VecDeque;
42514    ///
42515    /// let mut buf = VecDeque::with_capacity(15);
42516    ///
42517    /// buf.push_back(2);
42518    /// buf.push_back(1);
42519    /// buf.push_front(3);
42520    ///
42521    /// // sorting the deque
42522    /// buf.make_contiguous().sort();
42523    /// assert_eq!(buf.as_slices(), (&[1, 2, 3] as &[_], &[] as &[_]));
42524    ///
42525    /// // sorting it in reverse order
42526    /// buf.make_contiguous().sort_by(|a, b| b.cmp(a));
42527    /// assert_eq!(buf.as_slices(), (&[3, 2, 1] as &[_], &[] as &[_]));
42528    /// ```
42529    ///
42530    /// Getting immutable access to the contiguous slice.
42531    ///
42532    /// ```rust
42533    /// use std::collections::VecDeque;
42534    ///
42535    /// let mut buf = VecDeque::new();
42536    ///
42537    /// buf.push_back(2);
42538    /// buf.push_back(1);
42539    /// buf.push_front(3);
42540    ///
42541    /// buf.make_contiguous();
42542    /// if let (slice, &[]) = buf.as_slices() {
42543    ///     // we can now be sure that `slice` contains all elements of the deque,
42544    ///     // while still having immutable access to `buf`.
42545    ///     assert_eq!(buf.len(), slice.len());
42546    ///     assert_eq!(slice, &[3, 2, 1] as &[_]);
42547    /// }
42548    /// ```
42549    #[stable(feature = "deque_make_contiguous", since = "1.48.0")]
42550    pub fn make_contiguous(&mut self) -> &mut [T] {
42551        if self.is_contiguous() {
42552            let tail = self.tail;
42553            let head = self.head;
42554            return unsafe { RingSlices::ring_slices(self.buffer_as_mut_slice(), head, tail).0 };
42555        }
42556
42557        let buf = self.buf.ptr();
42558        let cap = self.cap();
42559        let len = self.len();
42560
42561        let free = self.tail - self.head;
42562        let tail_len = cap - self.tail;
42563
42564        if free >= tail_len {
42565            // there is enough free space to copy the tail in one go,
42566            // this means that we first shift the head backwards, and then
42567            // copy the tail to the correct position.
42568            //
42569            // from: DEFGH....ABC
42570            // to:   ABCDEFGH....
42571            unsafe {
42572                ptr::copy(buf, buf.add(tail_len), self.head);
42573                // ...DEFGH.ABC
42574                ptr::copy_nonoverlapping(buf.add(self.tail), buf, tail_len);
42575                // ABCDEFGH....
42576
42577                self.tail = 0;
42578                self.head = len;
42579            }
42580        } else if free > self.head {
42581            // FIXME: We currently do not consider ....ABCDEFGH
42582            // to be contiguous because `head` would be `0` in this
42583            // case. While we probably want to change this it
42584            // isn't trivial as a few places expect `is_contiguous`
42585            // to mean that we can just slice using `buf[tail..head]`.
42586
42587            // there is enough free space to copy the head in one go,
42588            // this means that we first shift the tail forwards, and then
42589            // copy the head to the correct position.
42590            //
42591            // from: FGH....ABCDE
42592            // to:   ...ABCDEFGH.
42593            unsafe {
42594                ptr::copy(buf.add(self.tail), buf.add(self.head), tail_len);
42595                // FGHABCDE....
42596                ptr::copy_nonoverlapping(buf, buf.add(self.head + tail_len), self.head);
42597                // ...ABCDEFGH.
42598
42599                self.tail = self.head;
42600                self.head = self.wrap_add(self.tail, len);
42601            }
42602        } else {
42603            // free is smaller than both head and tail,
42604            // this means we have to slowly "swap" the tail and the head.
42605            //
42606            // from: EFGHI...ABCD or HIJK.ABCDEFG
42607            // to:   ABCDEFGHI... or ABCDEFGHIJK.
42608            let mut left_edge: usize = 0;
42609            let mut right_edge: usize = self.tail;
42610            unsafe {
42611                // The general problem looks like this
42612                // GHIJKLM...ABCDEF - before any swaps
42613                // ABCDEFM...GHIJKL - after 1 pass of swaps
42614                // ABCDEFGHIJM...KL - swap until the left edge reaches the temp store
42615                //                  - then restart the algorithm with a new (smaller) store
42616                // Sometimes the temp store is reached when the right edge is at the end
42617                // of the buffer - this means we've hit the right order with fewer swaps!
42618                // E.g
42619                // EF..ABCD
42620                // ABCDEF.. - after four only swaps we've finished
42621                while left_edge < len && right_edge != cap {
42622                    let mut right_offset = 0;
42623                    for i in left_edge..right_edge {
42624                        right_offset = (i - left_edge) % (cap - right_edge);
42625                        let src: isize = (right_edge + right_offset) as isize;
42626                        ptr::swap(buf.add(i), buf.offset(src));
42627                    }
42628                    let n_ops = right_edge - left_edge;
42629                    left_edge += n_ops;
42630                    right_edge += right_offset + 1;
42631                }
42632
42633                self.tail = 0;
42634                self.head = len;
42635            }
42636        }
42637
42638        let tail = self.tail;
42639        let head = self.head;
42640        unsafe { RingSlices::ring_slices(self.buffer_as_mut_slice(), head, tail).0 }
42641    }
42642
42643    /// Rotates the double-ended queue `mid` places to the left.
42644    ///
42645    /// Equivalently,
42646    /// - Rotates item `mid` into the first position.
42647    /// - Pops the first `mid` items and pushes them to the end.
42648    /// - Rotates `len() - mid` places to the right.
42649    ///
42650    /// # Panics
42651    ///
42652    /// If `mid` is greater than `len()`. Note that `mid == len()`
42653    /// does _not_ panic and is a no-op rotation.
42654    ///
42655    /// # Complexity
42656    ///
42657    /// Takes `*O*(min(mid, len() - mid))` time and no extra space.
42658    ///
42659    /// # Examples
42660    ///
42661    /// ```
42662    /// use std::collections::VecDeque;
42663    ///
42664    /// let mut buf: VecDeque<_> = (0..10).collect();
42665    ///
42666    /// buf.rotate_left(3);
42667    /// assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 0, 1, 2]);
42668    ///
42669    /// for i in 1..10 {
42670    ///     assert_eq!(i * 3 % 10, buf[0]);
42671    ///     buf.rotate_left(3);
42672    /// }
42673    /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
42674    /// ```
42675    #[stable(feature = "vecdeque_rotate", since = "1.36.0")]
42676    pub fn rotate_left(&mut self, mid: usize) {
42677        assert!(mid <= self.len());
42678        let k = self.len() - mid;
42679        if mid <= k {
42680            unsafe { self.rotate_left_inner(mid) }
42681        } else {
42682            unsafe { self.rotate_right_inner(k) }
42683        }
42684    }
42685
42686    /// Rotates the double-ended queue `k` places to the right.
42687    ///
42688    /// Equivalently,
42689    /// - Rotates the first item into position `k`.
42690    /// - Pops the last `k` items and pushes them to the front.
42691    /// - Rotates `len() - k` places to the left.
42692    ///
42693    /// # Panics
42694    ///
42695    /// If `k` is greater than `len()`. Note that `k == len()`
42696    /// does _not_ panic and is a no-op rotation.
42697    ///
42698    /// # Complexity
42699    ///
42700    /// Takes `*O*(min(k, len() - k))` time and no extra space.
42701    ///
42702    /// # Examples
42703    ///
42704    /// ```
42705    /// use std::collections::VecDeque;
42706    ///
42707    /// let mut buf: VecDeque<_> = (0..10).collect();
42708    ///
42709    /// buf.rotate_right(3);
42710    /// assert_eq!(buf, [7, 8, 9, 0, 1, 2, 3, 4, 5, 6]);
42711    ///
42712    /// for i in 1..10 {
42713    ///     assert_eq!(0, buf[i * 3 % 10]);
42714    ///     buf.rotate_right(3);
42715    /// }
42716    /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
42717    /// ```
42718    #[stable(feature = "vecdeque_rotate", since = "1.36.0")]
42719    pub fn rotate_right(&mut self, k: usize) {
42720        assert!(k <= self.len());
42721        let mid = self.len() - k;
42722        if k <= mid {
42723            unsafe { self.rotate_right_inner(k) }
42724        } else {
42725            unsafe { self.rotate_left_inner(mid) }
42726        }
42727    }
42728
42729    // SAFETY: the following two methods require that the rotation amount
42730    // be less than half the length of the deque.
42731    //
42732    // `wrap_copy` requires that `min(x, cap() - x) + copy_len <= cap()`,
42733    // but than `min` is never more than half the capacity, regardless of x,
42734    // so it's sound to call here because we're calling with something
42735    // less than half the length, which is never above half the capacity.
42736
42737    unsafe fn rotate_left_inner(&mut self, mid: usize) {
42738        debug_assert!(mid * 2 <= self.len());
42739        unsafe {
42740            self.wrap_copy(self.head, self.tail, mid);
42741        }
42742        self.head = self.wrap_add(self.head, mid);
42743        self.tail = self.wrap_add(self.tail, mid);
42744    }
42745
42746    unsafe fn rotate_right_inner(&mut self, k: usize) {
42747        debug_assert!(k * 2 <= self.len());
42748        self.head = self.wrap_sub(self.head, k);
42749        self.tail = self.wrap_sub(self.tail, k);
42750        unsafe {
42751            self.wrap_copy(self.tail, self.head, k);
42752        }
42753    }
42754
42755    /// Binary searches this sorted `VecDeque` for a given element.
42756    ///
42757    /// If the value is found then [`Result::Ok`] is returned, containing the
42758    /// index of the matching element. If there are multiple matches, then any
42759    /// one of the matches could be returned. If the value is not found then
42760    /// [`Result::Err`] is returned, containing the index where a matching
42761    /// element could be inserted while maintaining sorted order.
42762    ///
42763    /// See also [`binary_search_by`], [`binary_search_by_key`], and [`partition_point`].
42764    ///
42765    /// [`binary_search_by`]: VecDeque::binary_search_by
42766    /// [`binary_search_by_key`]: VecDeque::binary_search_by_key
42767    /// [`partition_point`]: VecDeque::partition_point
42768    ///
42769    /// # Examples
42770    ///
42771    /// Looks up a series of four elements. The first is found, with a
42772    /// uniquely determined position; the second and third are not
42773    /// found; the fourth could match any position in `[1, 4]`.
42774    ///
42775    /// ```
42776    /// #![feature(vecdeque_binary_search)]
42777    /// use std::collections::VecDeque;
42778    ///
42779    /// let deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into();
42780    ///
42781    /// assert_eq!(deque.binary_search(&13),  Ok(9));
42782    /// assert_eq!(deque.binary_search(&4),   Err(7));
42783    /// assert_eq!(deque.binary_search(&100), Err(13));
42784    /// let r = deque.binary_search(&1);
42785    /// assert!(matches!(r, Ok(1..=4)));
42786    /// ```
42787    ///
42788    /// If you want to insert an item to a sorted `VecDeque`, while maintaining
42789    /// sort order:
42790    ///
42791    /// ```
42792    /// #![feature(vecdeque_binary_search)]
42793    /// use std::collections::VecDeque;
42794    ///
42795    /// let mut deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into();
42796    /// let num = 42;
42797    /// let idx = deque.binary_search(&num).unwrap_or_else(|x| x);
42798    /// deque.insert(idx, num);
42799    /// assert_eq!(deque, &[0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]);
42800    /// ```
42801    #[unstable(feature = "vecdeque_binary_search", issue = "78021")]
42802    #[inline]
42803    pub fn binary_search(&self, x: &T) -> Result<usize, usize>
42804    where
42805        T: Ord,
42806    {
42807        self.binary_search_by(|e| e.cmp(x))
42808    }
42809
42810    /// Binary searches this sorted `VecDeque` with a comparator function.
42811    ///
42812    /// The comparator function should implement an order consistent
42813    /// with the sort order of the underlying `VecDeque`, returning an
42814    /// order code that indicates whether its argument is `Less`,
42815    /// `Equal` or `Greater` than the desired target.
42816    ///
42817    /// If the value is found then [`Result::Ok`] is returned, containing the
42818    /// index of the matching element. If there are multiple matches, then any
42819    /// one of the matches could be returned. If the value is not found then
42820    /// [`Result::Err`] is returned, containing the index where a matching
42821    /// element could be inserted while maintaining sorted order.
42822    ///
42823    /// See also [`binary_search`], [`binary_search_by_key`], and [`partition_point`].
42824    ///
42825    /// [`binary_search`]: VecDeque::binary_search
42826    /// [`binary_search_by_key`]: VecDeque::binary_search_by_key
42827    /// [`partition_point`]: VecDeque::partition_point
42828    ///
42829    /// # Examples
42830    ///
42831    /// Looks up a series of four elements. The first is found, with a
42832    /// uniquely determined position; the second and third are not
42833    /// found; the fourth could match any position in `[1, 4]`.
42834    ///
42835    /// ```
42836    /// #![feature(vecdeque_binary_search)]
42837    /// use std::collections::VecDeque;
42838    ///
42839    /// let deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into();
42840    ///
42841    /// assert_eq!(deque.binary_search_by(|x| x.cmp(&13)),  Ok(9));
42842    /// assert_eq!(deque.binary_search_by(|x| x.cmp(&4)),   Err(7));
42843    /// assert_eq!(deque.binary_search_by(|x| x.cmp(&100)), Err(13));
42844    /// let r = deque.binary_search_by(|x| x.cmp(&1));
42845    /// assert!(matches!(r, Ok(1..=4)));
42846    /// ```
42847    #[unstable(feature = "vecdeque_binary_search", issue = "78021")]
42848    pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
42849    where
42850        F: FnMut(&'a T) -> Ordering,
42851    {
42852        let (front, back) = self.as_slices();
42853        let cmp_back = back.first().map(|elem| f(elem));
42854
42855        if let Some(Ordering::Equal) = cmp_back {
42856            Ok(front.len())
42857        } else if let Some(Ordering::Less) = cmp_back {
42858            back.binary_search_by(f).map(|idx| idx + front.len()).map_err(|idx| idx + front.len())
42859        } else {
42860            front.binary_search_by(f)
42861        }
42862    }
42863
42864    /// Binary searches this sorted `VecDeque` with a key extraction function.
42865    ///
42866    /// Assumes that the `VecDeque` is sorted by the key, for instance with
42867    /// [`make_contiguous().sort_by_key()`] using the same key extraction function.
42868    ///
42869    /// If the value is found then [`Result::Ok`] is returned, containing the
42870    /// index of the matching element. If there are multiple matches, then any
42871    /// one of the matches could be returned. If the value is not found then
42872    /// [`Result::Err`] is returned, containing the index where a matching
42873    /// element could be inserted while maintaining sorted order.
42874    ///
42875    /// See also [`binary_search`], [`binary_search_by`], and [`partition_point`].
42876    ///
42877    /// [`make_contiguous().sort_by_key()`]: VecDeque::make_contiguous
42878    /// [`binary_search`]: VecDeque::binary_search
42879    /// [`binary_search_by`]: VecDeque::binary_search_by
42880    /// [`partition_point`]: VecDeque::partition_point
42881    ///
42882    /// # Examples
42883    ///
42884    /// Looks up a series of four elements in a slice of pairs sorted by
42885    /// their second elements. The first is found, with a uniquely
42886    /// determined position; the second and third are not found; the
42887    /// fourth could match any position in `[1, 4]`.
42888    ///
42889    /// ```
42890    /// #![feature(vecdeque_binary_search)]
42891    /// use std::collections::VecDeque;
42892    ///
42893    /// let deque: VecDeque<_> = vec![(0, 0), (2, 1), (4, 1), (5, 1),
42894    ///          (3, 1), (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
42895    ///          (1, 21), (2, 34), (4, 55)].into();
42896    ///
42897    /// assert_eq!(deque.binary_search_by_key(&13, |&(a, b)| b),  Ok(9));
42898    /// assert_eq!(deque.binary_search_by_key(&4, |&(a, b)| b),   Err(7));
42899    /// assert_eq!(deque.binary_search_by_key(&100, |&(a, b)| b), Err(13));
42900    /// let r = deque.binary_search_by_key(&1, |&(a, b)| b);
42901    /// assert!(matches!(r, Ok(1..=4)));
42902    /// ```
42903    #[unstable(feature = "vecdeque_binary_search", issue = "78021")]
42904    #[inline]
42905    pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
42906    where
42907        F: FnMut(&'a T) -> B,
42908        B: Ord,
42909    {
42910        self.binary_search_by(|k| f(k).cmp(b))
42911    }
42912
42913    /// Returns the index of the partition point according to the given predicate
42914    /// (the index of the first element of the second partition).
42915    ///
42916    /// The deque is assumed to be partitioned according to the given predicate.
42917    /// This means that all elements for which the predicate returns true are at the start of the deque
42918    /// and all elements for which the predicate returns false are at the end.
42919    /// For example, [7, 15, 3, 5, 4, 12, 6] is a partitioned under the predicate x % 2 != 0
42920    /// (all odd numbers are at the start, all even at the end).
42921    ///
42922    /// If this deque is not partitioned, the returned result is unspecified and meaningless,
42923    /// as this method performs a kind of binary search.
42924    ///
42925    /// See also [`binary_search`], [`binary_search_by`], and [`binary_search_by_key`].
42926    ///
42927    /// [`binary_search`]: VecDeque::binary_search
42928    /// [`binary_search_by`]: VecDeque::binary_search_by
42929    /// [`binary_search_by_key`]: VecDeque::binary_search_by_key
42930    ///
42931    /// # Examples
42932    ///
42933    /// ```
42934    /// #![feature(vecdeque_binary_search)]
42935    /// use std::collections::VecDeque;
42936    ///
42937    /// let deque: VecDeque<_> = vec![1, 2, 3, 3, 5, 6, 7].into();
42938    /// let i = deque.partition_point(|&x| x < 5);
42939    ///
42940    /// assert_eq!(i, 4);
42941    /// assert!(deque.iter().take(i).all(|&x| x < 5));
42942    /// assert!(deque.iter().skip(i).all(|&x| !(x < 5)));
42943    /// ```
42944    #[unstable(feature = "vecdeque_binary_search", issue = "78021")]
42945    pub fn partition_point<P>(&self, mut pred: P) -> usize
42946    where
42947        P: FnMut(&T) -> bool,
42948    {
42949        let (front, back) = self.as_slices();
42950
42951        if let Some(true) = back.first().map(|v| pred(v)) {
42952            back.partition_point(pred) + front.len()
42953        } else {
42954            front.partition_point(pred)
42955        }
42956    }
42957}
42958
42959impl<T: Clone> VecDeque<T> {
42960    /// Modifies the `VecDeque` in-place so that `len()` is equal to new_len,
42961    /// either by removing excess elements from the back or by appending clones of `value`
42962    /// to the back.
42963    ///
42964    /// # Examples
42965    ///
42966    /// ```
42967    /// use std::collections::VecDeque;
42968    ///
42969    /// let mut buf = VecDeque::new();
42970    /// buf.push_back(5);
42971    /// buf.push_back(10);
42972    /// buf.push_back(15);
42973    /// assert_eq!(buf, [5, 10, 15]);
42974    ///
42975    /// buf.resize(2, 0);
42976    /// assert_eq!(buf, [5, 10]);
42977    ///
42978    /// buf.resize(5, 20);
42979    /// assert_eq!(buf, [5, 10, 20, 20, 20]);
42980    /// ```
42981    #[stable(feature = "deque_extras", since = "1.16.0")]
42982    pub fn resize(&mut self, new_len: usize, value: T) {
42983        self.resize_with(new_len, || value.clone());
42984    }
42985}
42986
42987/// Returns the index in the underlying buffer for a given logical element index.
42988#[inline]
42989fn wrap_index(index: usize, size: usize) -> usize {
42990    // size is always a power of 2
42991    debug_assert!(size.is_power_of_two());
42992    index & (size - 1)
42993}
42994
42995/// Calculate the number of elements left to be read in the buffer
42996#[inline]
42997fn count(tail: usize, head: usize, size: usize) -> usize {
42998    // size is always a power of 2
42999    (head.wrapping_sub(tail)) & (size - 1)
43000}
43001
43002#[stable(feature = "rust1", since = "1.0.0")]
43003impl<A: PartialEq> PartialEq for VecDeque<A> {
43004    fn eq(&self, other: &VecDeque<A>) -> bool {
43005        if self.len() != other.len() {
43006            return false;
43007        }
43008        let (sa, sb) = self.as_slices();
43009        let (oa, ob) = other.as_slices();
43010        if sa.len() == oa.len() {
43011            sa == oa && sb == ob
43012        } else if sa.len() < oa.len() {
43013            // Always divisible in three sections, for example:
43014            // self:  [a b c|d e f]
43015            // other: [0 1 2 3|4 5]
43016            // front = 3, mid = 1,
43017            // [a b c] == [0 1 2] && [d] == [3] && [e f] == [4 5]
43018            let front = sa.len();
43019            let mid = oa.len() - front;
43020
43021            let (oa_front, oa_mid) = oa.split_at(front);
43022            let (sb_mid, sb_back) = sb.split_at(mid);
43023            debug_assert_eq!(sa.len(), oa_front.len());
43024            debug_assert_eq!(sb_mid.len(), oa_mid.len());
43025            debug_assert_eq!(sb_back.len(), ob.len());
43026            sa == oa_front && sb_mid == oa_mid && sb_back == ob
43027        } else {
43028            let front = oa.len();
43029            let mid = sa.len() - front;
43030
43031            let (sa_front, sa_mid) = sa.split_at(front);
43032            let (ob_mid, ob_back) = ob.split_at(mid);
43033            debug_assert_eq!(sa_front.len(), oa.len());
43034            debug_assert_eq!(sa_mid.len(), ob_mid.len());
43035            debug_assert_eq!(sb.len(), ob_back.len());
43036            sa_front == oa && sa_mid == ob_mid && sb == ob_back
43037        }
43038    }
43039}
43040
43041#[stable(feature = "rust1", since = "1.0.0")]
43042impl<A: Eq> Eq for VecDeque<A> {}
43043
43044__impl_slice_eq1! { [] VecDeque<A>, Vec<B>, }
43045__impl_slice_eq1! { [] VecDeque<A>, &[B], }
43046__impl_slice_eq1! { [] VecDeque<A>, &mut [B], }
43047__impl_slice_eq1! { [const N: usize] VecDeque<A>, [B; N], }
43048__impl_slice_eq1! { [const N: usize] VecDeque<A>, &[B; N], }
43049__impl_slice_eq1! { [const N: usize] VecDeque<A>, &mut [B; N], }
43050
43051#[stable(feature = "rust1", since = "1.0.0")]
43052impl<A: PartialOrd> PartialOrd for VecDeque<A> {
43053    fn partial_cmp(&self, other: &VecDeque<A>) -> Option<Ordering> {
43054        self.iter().partial_cmp(other.iter())
43055    }
43056}
43057
43058#[stable(feature = "rust1", since = "1.0.0")]
43059impl<A: Ord> Ord for VecDeque<A> {
43060    #[inline]
43061    fn cmp(&self, other: &VecDeque<A>) -> Ordering {
43062        self.iter().cmp(other.iter())
43063    }
43064}
43065
43066#[stable(feature = "rust1", since = "1.0.0")]
43067impl<A: Hash> Hash for VecDeque<A> {
43068    fn hash<H: Hasher>(&self, state: &mut H) {
43069        self.len().hash(state);
43070        // It's not possible to use Hash::hash_slice on slices
43071        // returned by as_slices method as their length can vary
43072        // in otherwise identical deques.
43073        //
43074        // Hasher only guarantees equivalence for the exact same
43075        // set of calls to its methods.
43076        self.iter().for_each(|elem| elem.hash(state));
43077    }
43078}
43079
43080#[stable(feature = "rust1", since = "1.0.0")]
43081impl<A> Index<usize> for VecDeque<A> {
43082    type Output = A;
43083
43084    #[inline]
43085    fn index(&self, index: usize) -> &A {
43086        self.get(index).expect("Out of bounds access")
43087    }
43088}
43089
43090#[stable(feature = "rust1", since = "1.0.0")]
43091impl<A> IndexMut<usize> for VecDeque<A> {
43092    #[inline]
43093    fn index_mut(&mut self, index: usize) -> &mut A {
43094        self.get_mut(index).expect("Out of bounds access")
43095    }
43096}
43097
43098#[stable(feature = "rust1", since = "1.0.0")]
43099impl<A> FromIterator<A> for VecDeque<A> {
43100    fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> VecDeque<A> {
43101        let iterator = iter.into_iter();
43102        let (lower, _) = iterator.size_hint();
43103        let mut deq = VecDeque::with_capacity(lower);
43104        deq.extend(iterator);
43105        deq
43106    }
43107}
43108
43109#[stable(feature = "rust1", since = "1.0.0")]
43110impl<T> IntoIterator for VecDeque<T> {
43111    type Item = T;
43112    type IntoIter = IntoIter<T>;
43113
43114    /// Consumes the `VecDeque` into a front-to-back iterator yielding elements by
43115    /// value.
43116    fn into_iter(self) -> IntoIter<T> {
43117        IntoIter { inner: self }
43118    }
43119}
43120
43121#[stable(feature = "rust1", since = "1.0.0")]
43122impl<'a, T> IntoIterator for &'a VecDeque<T> {
43123    type Item = &'a T;
43124    type IntoIter = Iter<'a, T>;
43125
43126    fn into_iter(self) -> Iter<'a, T> {
43127        self.iter()
43128    }
43129}
43130
43131#[stable(feature = "rust1", since = "1.0.0")]
43132impl<'a, T> IntoIterator for &'a mut VecDeque<T> {
43133    type Item = &'a mut T;
43134    type IntoIter = IterMut<'a, T>;
43135
43136    fn into_iter(self) -> IterMut<'a, T> {
43137        self.iter_mut()
43138    }
43139}
43140
43141#[stable(feature = "rust1", since = "1.0.0")]
43142impl<A> Extend<A> for VecDeque<A> {
43143    fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
43144        // This function should be the moral equivalent of:
43145        //
43146        //      for item in iter.into_iter() {
43147        //          self.push_back(item);
43148        //      }
43149        let mut iter = iter.into_iter();
43150        while let Some(element) = iter.next() {
43151            if self.len() == self.capacity() {
43152                let (lower, _) = iter.size_hint();
43153                self.reserve(lower.saturating_add(1));
43154            }
43155
43156            let head = self.head;
43157            self.head = self.wrap_add(self.head, 1);
43158            unsafe {
43159                self.buffer_write(head, element);
43160            }
43161        }
43162    }
43163
43164    #[inline]
43165    fn extend_one(&mut self, elem: A) {
43166        self.push_back(elem);
43167    }
43168
43169    #[inline]
43170    fn extend_reserve(&mut self, additional: usize) {
43171        self.reserve(additional);
43172    }
43173}
43174
43175#[stable(feature = "extend_ref", since = "1.2.0")]
43176impl<'a, T: 'a + Copy> Extend<&'a T> for VecDeque<T> {
43177    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
43178        self.extend(iter.into_iter().cloned());
43179    }
43180
43181    #[inline]
43182    fn extend_one(&mut self, &elem: &T) {
43183        self.push_back(elem);
43184    }
43185
43186    #[inline]
43187    fn extend_reserve(&mut self, additional: usize) {
43188        self.reserve(additional);
43189    }
43190}
43191
43192#[stable(feature = "rust1", since = "1.0.0")]
43193impl<T: fmt::Debug> fmt::Debug for VecDeque<T> {
43194    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
43195        f.debug_list().entries(self).finish()
43196    }
43197}
43198
43199#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
43200impl<T> From<Vec<T>> for VecDeque<T> {
43201    /// Turn a [`Vec<T>`] into a [`VecDeque<T>`].
43202    ///
43203    /// [`Vec<T>`]: crate::vec::Vec
43204    /// [`VecDeque<T>`]: crate::collections::VecDeque
43205    ///
43206    /// This avoids reallocating where possible, but the conditions for that are
43207    /// strict, and subject to change, and so shouldn't be relied upon unless the
43208    /// `Vec<T>` came from `From<VecDeque<T>>` and hasn't been reallocated.
43209    fn from(mut other: Vec<T>) -> Self {
43210        let len = other.len();
43211        if mem::size_of::<T>() == 0 {
43212            // There's no actual allocation for ZSTs to worry about capacity,
43213            // but `VecDeque` can't handle as much length as `Vec`.
43214            assert!(len < MAXIMUM_ZST_CAPACITY, "capacity overflow");
43215        } else {
43216            // We need to resize if the capacity is not a power of two, too small or
43217            // doesn't have at least one free space. We do this while it's still in
43218            // the `Vec` so the items will drop on panic.
43219            let min_cap = cmp::max(MINIMUM_CAPACITY, len) + 1;
43220            let cap = cmp::max(min_cap, other.capacity()).next_power_of_two();
43221            if other.capacity() != cap {
43222                other.reserve_exact(cap - len);
43223            }
43224        }
43225
43226        unsafe {
43227            let (other_buf, len, capacity) = other.into_raw_parts();
43228            let buf = RawVec::from_raw_parts(other_buf, capacity);
43229            VecDeque { tail: 0, head: len, buf }
43230        }
43231    }
43232}
43233
43234#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
43235impl<T> From<VecDeque<T>> for Vec<T> {
43236    /// Turn a [`VecDeque<T>`] into a [`Vec<T>`].
43237    ///
43238    /// [`Vec<T>`]: crate::vec::Vec
43239    /// [`VecDeque<T>`]: crate::collections::VecDeque
43240    ///
43241    /// This never needs to re-allocate, but does need to do *O*(*n*) data movement if
43242    /// the circular buffer doesn't happen to be at the beginning of the allocation.
43243    ///
43244    /// # Examples
43245    ///
43246    /// ```
43247    /// use std::collections::VecDeque;
43248    ///
43249    /// // This one is *O*(1).
43250    /// let deque: VecDeque<_> = (1..5).collect();
43251    /// let ptr = deque.as_slices().0.as_ptr();
43252    /// let vec = Vec::from(deque);
43253    /// assert_eq!(vec, [1, 2, 3, 4]);
43254    /// assert_eq!(vec.as_ptr(), ptr);
43255    ///
43256    /// // This one needs data rearranging.
43257    /// let mut deque: VecDeque<_> = (1..5).collect();
43258    /// deque.push_front(9);
43259    /// deque.push_front(8);
43260    /// let ptr = deque.as_slices().1.as_ptr();
43261    /// let vec = Vec::from(deque);
43262    /// assert_eq!(vec, [8, 9, 1, 2, 3, 4]);
43263    /// assert_eq!(vec.as_ptr(), ptr);
43264    /// ```
43265    fn from(mut other: VecDeque<T>) -> Self {
43266        other.make_contiguous();
43267
43268        unsafe {
43269            let other = ManuallyDrop::new(other);
43270            let buf = other.buf.ptr();
43271            let len = other.len();
43272            let cap = other.cap();
43273
43274            if other.tail != 0 {
43275                ptr::copy(buf.add(other.tail), buf, len);
43276            }
43277            Vec::from_raw_parts(buf, len, cap)
43278        }
43279    }
43280}
43281use core::fmt;
43282use core::iter::{FusedIterator, TrustedLen, TrustedRandomAccess};
43283use core::ops::Try;
43284
43285use super::{count, wrap_index, RingSlices};
43286
43287/// An iterator over the elements of a `VecDeque`.
43288///
43289/// This `struct` is created by the [`iter`] method on [`super::VecDeque`]. See its
43290/// documentation for more.
43291///
43292/// [`iter`]: super::VecDeque::iter
43293#[stable(feature = "rust1", since = "1.0.0")]
43294pub struct Iter<'a, T: 'a> {
43295    pub(crate) ring: &'a [T],
43296    pub(crate) tail: usize,
43297    pub(crate) head: usize,
43298}
43299
43300#[stable(feature = "collection_debug", since = "1.17.0")]
43301impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
43302    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
43303        let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
43304        f.debug_tuple("Iter").field(&front).field(&back).finish()
43305    }
43306}
43307
43308// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
43309#[stable(feature = "rust1", since = "1.0.0")]
43310impl<T> Clone for Iter<'_, T> {
43311    fn clone(&self) -> Self {
43312        Iter { ring: self.ring, tail: self.tail, head: self.head }
43313    }
43314}
43315
43316#[stable(feature = "rust1", since = "1.0.0")]
43317impl<'a, T> Iterator for Iter<'a, T> {
43318    type Item = &'a T;
43319
43320    #[inline]
43321    fn next(&mut self) -> Option<&'a T> {
43322        if self.tail == self.head {
43323            return None;
43324        }
43325        let tail = self.tail;
43326        self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
43327        unsafe { Some(self.ring.get_unchecked(tail)) }
43328    }
43329
43330    #[inline]
43331    fn size_hint(&self) -> (usize, Option<usize>) {
43332        let len = count(self.tail, self.head, self.ring.len());
43333        (len, Some(len))
43334    }
43335
43336    fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
43337    where
43338        F: FnMut(Acc, Self::Item) -> Acc,
43339    {
43340        let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
43341        accum = front.iter().fold(accum, &mut f);
43342        back.iter().fold(accum, &mut f)
43343    }
43344
43345    fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R
43346    where
43347        Self: Sized,
43348        F: FnMut(B, Self::Item) -> R,
43349        R: Try<Ok = B>,
43350    {
43351        let (mut iter, final_res);
43352        if self.tail <= self.head {
43353            // single slice self.ring[self.tail..self.head]
43354            iter = self.ring[self.tail..self.head].iter();
43355            final_res = iter.try_fold(init, &mut f);
43356        } else {
43357            // two slices: self.ring[self.tail..], self.ring[..self.head]
43358            let (front, back) = self.ring.split_at(self.tail);
43359            let mut back_iter = back.iter();
43360            let res = back_iter.try_fold(init, &mut f);
43361            let len = self.ring.len();
43362            self.tail = (self.ring.len() - back_iter.len()) & (len - 1);
43363            iter = front[..self.head].iter();
43364            final_res = iter.try_fold(res?, &mut f);
43365        }
43366        self.tail = self.head - iter.len();
43367        final_res
43368    }
43369
43370    fn nth(&mut self, n: usize) -> Option<Self::Item> {
43371        if n >= count(self.tail, self.head, self.ring.len()) {
43372            self.tail = self.head;
43373            None
43374        } else {
43375            self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len());
43376            self.next()
43377        }
43378    }
43379
43380    #[inline]
43381    fn last(mut self) -> Option<&'a T> {
43382        self.next_back()
43383    }
43384
43385    #[inline]
43386    unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item
43387    where
43388        Self: TrustedRandomAccess,
43389    {
43390        // Safety: The TrustedRandomAccess contract requires that callers only  pass an index
43391        // that is in bounds.
43392        unsafe {
43393            let idx = wrap_index(self.tail.wrapping_add(idx), self.ring.len());
43394            self.ring.get_unchecked(idx)
43395        }
43396    }
43397}
43398
43399#[stable(feature = "rust1", since = "1.0.0")]
43400impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
43401    #[inline]
43402    fn next_back(&mut self) -> Option<&'a T> {
43403        if self.tail == self.head {
43404            return None;
43405        }
43406        self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
43407        unsafe { Some(self.ring.get_unchecked(self.head)) }
43408    }
43409
43410    fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
43411    where
43412        F: FnMut(Acc, Self::Item) -> Acc,
43413    {
43414        let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
43415        accum = back.iter().rfold(accum, &mut f);
43416        front.iter().rfold(accum, &mut f)
43417    }
43418
43419    fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R
43420    where
43421        Self: Sized,
43422        F: FnMut(B, Self::Item) -> R,
43423        R: Try<Ok = B>,
43424    {
43425        let (mut iter, final_res);
43426        if self.tail <= self.head {
43427            // single slice self.ring[self.tail..self.head]
43428            iter = self.ring[self.tail..self.head].iter();
43429            final_res = iter.try_rfold(init, &mut f);
43430        } else {
43431            // two slices: self.ring[self.tail..], self.ring[..self.head]
43432            let (front, back) = self.ring.split_at(self.tail);
43433            let mut front_iter = front[..self.head].iter();
43434            let res = front_iter.try_rfold(init, &mut f);
43435            self.head = front_iter.len();
43436            iter = back.iter();
43437            final_res = iter.try_rfold(res?, &mut f);
43438        }
43439        self.head = self.tail + iter.len();
43440        final_res
43441    }
43442}
43443
43444#[stable(feature = "rust1", since = "1.0.0")]
43445impl<T> ExactSizeIterator for Iter<'_, T> {
43446    fn is_empty(&self) -> bool {
43447        self.head == self.tail
43448    }
43449}
43450
43451#[stable(feature = "fused", since = "1.26.0")]
43452impl<T> FusedIterator for Iter<'_, T> {}
43453
43454#[unstable(feature = "trusted_len", issue = "37572")]
43455unsafe impl<T> TrustedLen for Iter<'_, T> {}
43456
43457#[doc(hidden)]
43458#[unstable(feature = "trusted_random_access", issue = "none")]
43459unsafe impl<T> TrustedRandomAccess for Iter<'_, T> {
43460    const MAY_HAVE_SIDE_EFFECT: bool = false;
43461}
43462use super::*;
43463
43464#[bench]
43465#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
43466fn bench_push_back_100(b: &mut test::Bencher) {
43467    let mut deq = VecDeque::with_capacity(101);
43468    b.iter(|| {
43469        for i in 0..100 {
43470            deq.push_back(i);
43471        }
43472        deq.head = 0;
43473        deq.tail = 0;
43474    })
43475}
43476
43477#[bench]
43478#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
43479fn bench_push_front_100(b: &mut test::Bencher) {
43480    let mut deq = VecDeque::with_capacity(101);
43481    b.iter(|| {
43482        for i in 0..100 {
43483            deq.push_front(i);
43484        }
43485        deq.head = 0;
43486        deq.tail = 0;
43487    })
43488}
43489
43490#[bench]
43491#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
43492fn bench_pop_back_100(b: &mut test::Bencher) {
43493    let mut deq = VecDeque::<i32>::with_capacity(101);
43494
43495    b.iter(|| {
43496        deq.head = 100;
43497        deq.tail = 0;
43498        while !deq.is_empty() {
43499            test::black_box(deq.pop_back());
43500        }
43501    })
43502}
43503
43504#[bench]
43505#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
43506fn bench_pop_front_100(b: &mut test::Bencher) {
43507    let mut deq = VecDeque::<i32>::with_capacity(101);
43508
43509    b.iter(|| {
43510        deq.head = 100;
43511        deq.tail = 0;
43512        while !deq.is_empty() {
43513            test::black_box(deq.pop_front());
43514        }
43515    })
43516}
43517
43518#[test]
43519fn test_swap_front_back_remove() {
43520    fn test(back: bool) {
43521        // This test checks that every single combination of tail position and length is tested.
43522        // Capacity 15 should be large enough to cover every case.
43523        let mut tester = VecDeque::with_capacity(15);
43524        let usable_cap = tester.capacity();
43525        let final_len = usable_cap / 2;
43526
43527        for len in 0..final_len {
43528            let expected: VecDeque<_> =
43529                if back { (0..len).collect() } else { (0..len).rev().collect() };
43530            for tail_pos in 0..usable_cap {
43531                tester.tail = tail_pos;
43532                tester.head = tail_pos;
43533                if back {
43534                    for i in 0..len * 2 {
43535                        tester.push_front(i);
43536                    }
43537                    for i in 0..len {
43538                        assert_eq!(tester.swap_remove_back(i), Some(len * 2 - 1 - i));
43539                    }
43540                } else {
43541                    for i in 0..len * 2 {
43542                        tester.push_back(i);
43543                    }
43544                    for i in 0..len {
43545                        let idx = tester.len() - 1 - i;
43546                        assert_eq!(tester.swap_remove_front(idx), Some(len * 2 - 1 - i));
43547                    }
43548                }
43549                assert!(tester.tail < tester.cap());
43550                assert!(tester.head < tester.cap());
43551                assert_eq!(tester, expected);
43552            }
43553        }
43554    }
43555    test(true);
43556    test(false);
43557}
43558
43559#[test]
43560fn test_insert() {
43561    // This test checks that every single combination of tail position, length, and
43562    // insertion position is tested. Capacity 15 should be large enough to cover every case.
43563
43564    let mut tester = VecDeque::with_capacity(15);
43565    // can't guarantee we got 15, so have to get what we got.
43566    // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
43567    // this test isn't covering what it wants to
43568    let cap = tester.capacity();
43569
43570    // len is the length *after* insertion
43571    let minlen = if cfg!(miri) { cap - 1 } else { 1 }; // Miri is too slow
43572    for len in minlen..cap {
43573        // 0, 1, 2, .., len - 1
43574        let expected = (0..).take(len).collect::<VecDeque<_>>();
43575        for tail_pos in 0..cap {
43576            for to_insert in 0..len {
43577                tester.tail = tail_pos;
43578                tester.head = tail_pos;
43579                for i in 0..len {
43580                    if i != to_insert {
43581                        tester.push_back(i);
43582                    }
43583                }
43584                tester.insert(to_insert, to_insert);
43585                assert!(tester.tail < tester.cap());
43586                assert!(tester.head < tester.cap());
43587                assert_eq!(tester, expected);
43588            }
43589        }
43590    }
43591}
43592
43593#[test]
43594fn make_contiguous_big_tail() {
43595    let mut tester = VecDeque::with_capacity(15);
43596
43597    for i in 0..3 {
43598        tester.push_back(i);
43599    }
43600
43601    for i in 3..10 {
43602        tester.push_front(i);
43603    }
43604
43605    // 012......9876543
43606    assert_eq!(tester.capacity(), 15);
43607    assert_eq!((&[9, 8, 7, 6, 5, 4, 3] as &[_], &[0, 1, 2] as &[_]), tester.as_slices());
43608
43609    let expected_start = tester.head;
43610    tester.make_contiguous();
43611    assert_eq!(tester.tail, expected_start);
43612    assert_eq!((&[9, 8, 7, 6, 5, 4, 3, 0, 1, 2] as &[_], &[] as &[_]), tester.as_slices());
43613}
43614
43615#[test]
43616fn make_contiguous_big_head() {
43617    let mut tester = VecDeque::with_capacity(15);
43618
43619    for i in 0..8 {
43620        tester.push_back(i);
43621    }
43622
43623    for i in 8..10 {
43624        tester.push_front(i);
43625    }
43626
43627    // 01234567......98
43628    let expected_start = 0;
43629    tester.make_contiguous();
43630    assert_eq!(tester.tail, expected_start);
43631    assert_eq!((&[9, 8, 0, 1, 2, 3, 4, 5, 6, 7] as &[_], &[] as &[_]), tester.as_slices());
43632}
43633
43634#[test]
43635fn make_contiguous_small_free() {
43636    let mut tester = VecDeque::with_capacity(15);
43637
43638    for i in 'A' as u8..'I' as u8 {
43639        tester.push_back(i as char);
43640    }
43641
43642    for i in 'I' as u8..'N' as u8 {
43643        tester.push_front(i as char);
43644    }
43645
43646    // ABCDEFGH...MLKJI
43647    let expected_start = 0;
43648    tester.make_contiguous();
43649    assert_eq!(tester.tail, expected_start);
43650    assert_eq!(
43651        (&['M', 'L', 'K', 'J', 'I', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] as &[_], &[] as &[_]),
43652        tester.as_slices()
43653    );
43654
43655    tester.clear();
43656    for i in 'I' as u8..'N' as u8 {
43657        tester.push_back(i as char);
43658    }
43659
43660    for i in 'A' as u8..'I' as u8 {
43661        tester.push_front(i as char);
43662    }
43663
43664    // IJKLM...HGFEDCBA
43665    let expected_start = 0;
43666    tester.make_contiguous();
43667    assert_eq!(tester.tail, expected_start);
43668    assert_eq!(
43669        (&['H', 'G', 'F', 'E', 'D', 'C', 'B', 'A', 'I', 'J', 'K', 'L', 'M'] as &[_], &[] as &[_]),
43670        tester.as_slices()
43671    );
43672}
43673
43674#[test]
43675fn make_contiguous_head_to_end() {
43676    let mut dq = VecDeque::with_capacity(3);
43677    dq.push_front('B');
43678    dq.push_front('A');
43679    dq.push_back('C');
43680    dq.make_contiguous();
43681    let expected_tail = 0;
43682    let expected_head = 3;
43683    assert_eq!(expected_tail, dq.tail);
43684    assert_eq!(expected_head, dq.head);
43685    assert_eq!((&['A', 'B', 'C'] as &[_], &[] as &[_]), dq.as_slices());
43686}
43687
43688#[test]
43689fn make_contiguous_head_to_end_2() {
43690    // Another test case for #79808, taken from #80293.
43691
43692    let mut dq = VecDeque::from_iter(0..6);
43693    dq.pop_front();
43694    dq.pop_front();
43695    dq.push_back(6);
43696    dq.push_back(7);
43697    dq.push_back(8);
43698    dq.make_contiguous();
43699    let collected: Vec<_> = dq.iter().copied().collect();
43700    assert_eq!(dq.as_slices(), (&collected[..], &[] as &[_]));
43701}
43702
43703#[test]
43704fn test_remove() {
43705    // This test checks that every single combination of tail position, length, and
43706    // removal position is tested. Capacity 15 should be large enough to cover every case.
43707
43708    let mut tester = VecDeque::with_capacity(15);
43709    // can't guarantee we got 15, so have to get what we got.
43710    // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
43711    // this test isn't covering what it wants to
43712    let cap = tester.capacity();
43713
43714    // len is the length *after* removal
43715    let minlen = if cfg!(miri) { cap - 2 } else { 0 }; // Miri is too slow
43716    for len in minlen..cap - 1 {
43717        // 0, 1, 2, .., len - 1
43718        let expected = (0..).take(len).collect::<VecDeque<_>>();
43719        for tail_pos in 0..cap {
43720            for to_remove in 0..=len {
43721                tester.tail = tail_pos;
43722                tester.head = tail_pos;
43723                for i in 0..len {
43724                    if i == to_remove {
43725                        tester.push_back(1234);
43726                    }
43727                    tester.push_back(i);
43728                }
43729                if to_remove == len {
43730                    tester.push_back(1234);
43731                }
43732                tester.remove(to_remove);
43733                assert!(tester.tail < tester.cap());
43734                assert!(tester.head < tester.cap());
43735                assert_eq!(tester, expected);
43736            }
43737        }
43738    }
43739}
43740
43741#[test]
43742fn test_range() {
43743    let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
43744
43745    let cap = tester.capacity();
43746    let minlen = if cfg!(miri) { cap - 1 } else { 0 }; // Miri is too slow
43747    for len in minlen..=cap {
43748        for tail in 0..=cap {
43749            for start in 0..=len {
43750                for end in start..=len {
43751                    tester.tail = tail;
43752                    tester.head = tail;
43753                    for i in 0..len {
43754                        tester.push_back(i);
43755                    }
43756
43757                    // Check that we iterate over the correct values
43758                    let range: VecDeque<_> = tester.range(start..end).copied().collect();
43759                    let expected: VecDeque<_> = (start..end).collect();
43760                    assert_eq!(range, expected);
43761                }
43762            }
43763        }
43764    }
43765}
43766
43767#[test]
43768fn test_range_mut() {
43769    let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
43770
43771    let cap = tester.capacity();
43772    for len in 0..=cap {
43773        for tail in 0..=cap {
43774            for start in 0..=len {
43775                for end in start..=len {
43776                    tester.tail = tail;
43777                    tester.head = tail;
43778                    for i in 0..len {
43779                        tester.push_back(i);
43780                    }
43781
43782                    let head_was = tester.head;
43783                    let tail_was = tester.tail;
43784
43785                    // Check that we iterate over the correct values
43786                    let range: VecDeque<_> = tester.range_mut(start..end).map(|v| *v).collect();
43787                    let expected: VecDeque<_> = (start..end).collect();
43788                    assert_eq!(range, expected);
43789
43790                    // We shouldn't have changed the capacity or made the
43791                    // head or tail out of bounds
43792                    assert_eq!(tester.capacity(), cap);
43793                    assert_eq!(tester.tail, tail_was);
43794                    assert_eq!(tester.head, head_was);
43795                }
43796            }
43797        }
43798    }
43799}
43800
43801#[test]
43802fn test_drain() {
43803    let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
43804
43805    let cap = tester.capacity();
43806    for len in 0..=cap {
43807        for tail in 0..=cap {
43808            for drain_start in 0..=len {
43809                for drain_end in drain_start..=len {
43810                    tester.tail = tail;
43811                    tester.head = tail;
43812                    for i in 0..len {
43813                        tester.push_back(i);
43814                    }
43815
43816                    // Check that we drain the correct values
43817                    let drained: VecDeque<_> = tester.drain(drain_start..drain_end).collect();
43818                    let drained_expected: VecDeque<_> = (drain_start..drain_end).collect();
43819                    assert_eq!(drained, drained_expected);
43820
43821                    // We shouldn't have changed the capacity or made the
43822                    // head or tail out of bounds
43823                    assert_eq!(tester.capacity(), cap);
43824                    assert!(tester.tail < tester.cap());
43825                    assert!(tester.head < tester.cap());
43826
43827                    // We should see the correct values in the VecDeque
43828                    let expected: VecDeque<_> = (0..drain_start).chain(drain_end..len).collect();
43829                    assert_eq!(expected, tester);
43830                }
43831            }
43832        }
43833    }
43834}
43835
43836#[test]
43837fn test_shrink_to_fit() {
43838    // This test checks that every single combination of head and tail position,
43839    // is tested. Capacity 15 should be large enough to cover every case.
43840
43841    let mut tester = VecDeque::with_capacity(15);
43842    // can't guarantee we got 15, so have to get what we got.
43843    // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
43844    // this test isn't covering what it wants to
43845    let cap = tester.capacity();
43846    tester.reserve(63);
43847    let max_cap = tester.capacity();
43848
43849    for len in 0..=cap {
43850        // 0, 1, 2, .., len - 1
43851        let expected = (0..).take(len).collect::<VecDeque<_>>();
43852        for tail_pos in 0..=max_cap {
43853            tester.tail = tail_pos;
43854            tester.head = tail_pos;
43855            tester.reserve(63);
43856            for i in 0..len {
43857                tester.push_back(i);
43858            }
43859            tester.shrink_to_fit();
43860            assert!(tester.capacity() <= cap);
43861            assert!(tester.tail < tester.cap());
43862            assert!(tester.head < tester.cap());
43863            assert_eq!(tester, expected);
43864        }
43865    }
43866}
43867
43868#[test]
43869fn test_split_off() {
43870    // This test checks that every single combination of tail position, length, and
43871    // split position is tested. Capacity 15 should be large enough to cover every case.
43872
43873    let mut tester = VecDeque::with_capacity(15);
43874    // can't guarantee we got 15, so have to get what we got.
43875    // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
43876    // this test isn't covering what it wants to
43877    let cap = tester.capacity();
43878
43879    // len is the length *before* splitting
43880    let minlen = if cfg!(miri) { cap - 1 } else { 0 }; // Miri is too slow
43881    for len in minlen..cap {
43882        // index to split at
43883        for at in 0..=len {
43884            // 0, 1, 2, .., at - 1 (may be empty)
43885            let expected_self = (0..).take(at).collect::<VecDeque<_>>();
43886            // at, at + 1, .., len - 1 (may be empty)
43887            let expected_other = (at..).take(len - at).collect::<VecDeque<_>>();
43888
43889            for tail_pos in 0..cap {
43890                tester.tail = tail_pos;
43891                tester.head = tail_pos;
43892                for i in 0..len {
43893                    tester.push_back(i);
43894                }
43895                let result = tester.split_off(at);
43896                assert!(tester.tail < tester.cap());
43897                assert!(tester.head < tester.cap());
43898                assert!(result.tail < result.cap());
43899                assert!(result.head < result.cap());
43900                assert_eq!(tester, expected_self);
43901                assert_eq!(result, expected_other);
43902            }
43903        }
43904    }
43905}
43906
43907#[test]
43908fn test_from_vec() {
43909    use crate::vec::Vec;
43910    for cap in 0..35 {
43911        for len in 0..=cap {
43912            let mut vec = Vec::with_capacity(cap);
43913            vec.extend(0..len);
43914
43915            let vd = VecDeque::from(vec.clone());
43916            assert!(vd.cap().is_power_of_two());
43917            assert_eq!(vd.len(), vec.len());
43918            assert!(vd.into_iter().eq(vec));
43919        }
43920    }
43921
43922    let vec = Vec::from([(); MAXIMUM_ZST_CAPACITY - 1]);
43923    let vd = VecDeque::from(vec.clone());
43924    assert!(vd.cap().is_power_of_two());
43925    assert_eq!(vd.len(), vec.len());
43926}
43927
43928#[test]
43929#[should_panic = "capacity overflow"]
43930fn test_from_vec_zst_overflow() {
43931    use crate::vec::Vec;
43932    let vec = Vec::from([(); MAXIMUM_ZST_CAPACITY]);
43933    let vd = VecDeque::from(vec.clone()); // no room for +1
43934    assert!(vd.cap().is_power_of_two());
43935    assert_eq!(vd.len(), vec.len());
43936}
43937
43938#[test]
43939fn test_vec_from_vecdeque() {
43940    use crate::vec::Vec;
43941
43942    fn create_vec_and_test_convert(capacity: usize, offset: usize, len: usize) {
43943        let mut vd = VecDeque::with_capacity(capacity);
43944        for _ in 0..offset {
43945            vd.push_back(0);
43946            vd.pop_front();
43947        }
43948        vd.extend(0..len);
43949
43950        let vec: Vec<_> = Vec::from(vd.clone());
43951        assert_eq!(vec.len(), vd.len());
43952        assert!(vec.into_iter().eq(vd));
43953    }
43954
43955    // Miri is too slow
43956    let max_pwr = if cfg!(miri) { 5 } else { 7 };
43957
43958    for cap_pwr in 0..max_pwr {
43959        // Make capacity as a (2^x)-1, so that the ring size is 2^x
43960        let cap = (2i32.pow(cap_pwr) - 1) as usize;
43961
43962        // In these cases there is enough free space to solve it with copies
43963        for len in 0..((cap + 1) / 2) {
43964            // Test contiguous cases
43965            for offset in 0..(cap - len) {
43966                create_vec_and_test_convert(cap, offset, len)
43967            }
43968
43969            // Test cases where block at end of buffer is bigger than block at start
43970            for offset in (cap - len)..(cap - (len / 2)) {
43971                create_vec_and_test_convert(cap, offset, len)
43972            }
43973
43974            // Test cases where block at start of buffer is bigger than block at end
43975            for offset in (cap - (len / 2))..cap {
43976                create_vec_and_test_convert(cap, offset, len)
43977            }
43978        }
43979
43980        // Now there's not (necessarily) space to straighten the ring with simple copies,
43981        // the ring will use swapping when:
43982        // (cap + 1 - offset) > (cap + 1 - len) && (len - (cap + 1 - offset)) > (cap + 1 - len))
43983        //  right block size  >   free space    &&      left block size       >    free space
43984        for len in ((cap + 1) / 2)..cap {
43985            // Test contiguous cases
43986            for offset in 0..(cap - len) {
43987                create_vec_and_test_convert(cap, offset, len)
43988            }
43989
43990            // Test cases where block at end of buffer is bigger than block at start
43991            for offset in (cap - len)..(cap - (len / 2)) {
43992                create_vec_and_test_convert(cap, offset, len)
43993            }
43994
43995            // Test cases where block at start of buffer is bigger than block at end
43996            for offset in (cap - (len / 2))..cap {
43997                create_vec_and_test_convert(cap, offset, len)
43998            }
43999        }
44000    }
44001}
44002
44003#[test]
44004fn test_clone_from() {
44005    let m = vec![1; 8];
44006    let n = vec![2; 12];
44007    let limit = if cfg!(miri) { 4 } else { 8 }; // Miri is too slow
44008    for pfv in 0..limit {
44009        for pfu in 0..limit {
44010            for longer in 0..2 {
44011                let (vr, ur) = if longer == 0 { (&m, &n) } else { (&n, &m) };
44012                let mut v = VecDeque::from(vr.clone());
44013                for _ in 0..pfv {
44014                    v.push_front(1);
44015                }
44016                let mut u = VecDeque::from(ur.clone());
44017                for _ in 0..pfu {
44018                    u.push_front(2);
44019                }
44020                v.clone_from(&u);
44021                assert_eq!(&v, &u);
44022            }
44023        }
44024    }
44025}
44026
44027#[test]
44028fn test_vec_deque_truncate_drop() {
44029    static mut DROPS: u32 = 0;
44030    #[derive(Clone)]
44031    struct Elem(i32);
44032    impl Drop for Elem {
44033        fn drop(&mut self) {
44034            unsafe {
44035                DROPS += 1;
44036            }
44037        }
44038    }
44039
44040    let v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
44041    for push_front in 0..=v.len() {
44042        let v = v.clone();
44043        let mut tester = VecDeque::with_capacity(5);
44044        for (index, elem) in v.into_iter().enumerate() {
44045            if index < push_front {
44046                tester.push_front(elem);
44047            } else {
44048                tester.push_back(elem);
44049            }
44050        }
44051        assert_eq!(unsafe { DROPS }, 0);
44052        tester.truncate(3);
44053        assert_eq!(unsafe { DROPS }, 2);
44054        tester.truncate(0);
44055        assert_eq!(unsafe { DROPS }, 5);
44056        unsafe {
44057            DROPS = 0;
44058        }
44059    }
44060}
44061
44062#[test]
44063fn issue_53529() {
44064    use crate::boxed::Box;
44065
44066    let mut dst = VecDeque::new();
44067    dst.push_front(Box::new(1));
44068    dst.push_front(Box::new(2));
44069    assert_eq!(*dst.pop_back().unwrap(), 1);
44070
44071    let mut src = VecDeque::new();
44072    src.push_front(Box::new(2));
44073    dst.append(&mut src);
44074    for a in dst {
44075        assert_eq!(*a, 2);
44076    }
44077}
44078
44079#[test]
44080fn issue_80303() {
44081    use core::iter;
44082    use core::num::Wrapping;
44083
44084    // This is a valid, albeit rather bad hash function implementation.
44085    struct SimpleHasher(Wrapping<u64>);
44086
44087    impl Hasher for SimpleHasher {
44088        fn finish(&self) -> u64 {
44089            self.0.0
44090        }
44091
44092        fn write(&mut self, bytes: &[u8]) {
44093            // This particular implementation hashes value 24 in addition to bytes.
44094            // Such an implementation is valid as Hasher only guarantees equivalence
44095            // for the exact same set of calls to its methods.
44096            for &v in iter::once(&24).chain(bytes) {
44097                self.0 = Wrapping(31) * self.0 + Wrapping(u64::from(v));
44098            }
44099        }
44100    }
44101
44102    fn hash_code(value: impl Hash) -> u64 {
44103        let mut hasher = SimpleHasher(Wrapping(1));
44104        value.hash(&mut hasher);
44105        hasher.finish()
44106    }
44107
44108    // This creates two deques for which values returned by as_slices
44109    // method differ.
44110    let vda: VecDeque<u8> = (0..10).collect();
44111    let mut vdb = VecDeque::with_capacity(10);
44112    vdb.extend(5..10);
44113    (0..5).rev().for_each(|elem| vdb.push_front(elem));
44114    assert_ne!(vda.as_slices(), vdb.as_slices());
44115    assert_eq!(vda, vdb);
44116    assert_eq!(hash_code(vda), hash_code(vdb));
44117}
44118use core::fmt;
44119use core::iter::{FusedIterator, TrustedLen, TrustedRandomAccess};
44120use core::marker::PhantomData;
44121
44122use super::{count, wrap_index, RingSlices};
44123
44124/// A mutable iterator over the elements of a `VecDeque`.
44125///
44126/// This `struct` is created by the [`iter_mut`] method on [`super::VecDeque`]. See its
44127/// documentation for more.
44128///
44129/// [`iter_mut`]: super::VecDeque::iter_mut
44130#[stable(feature = "rust1", since = "1.0.0")]
44131pub struct IterMut<'a, T: 'a> {
44132    // Internal safety invariant: the entire slice is dereferencable.
44133    pub(crate) ring: *mut [T],
44134    pub(crate) tail: usize,
44135    pub(crate) head: usize,
44136    pub(crate) phantom: PhantomData<&'a mut [T]>,
44137}
44138
44139// SAFETY: we do nothing thread-local and there is no interior mutability,
44140// so the usual structural `Send`/`Sync` apply.
44141#[stable(feature = "rust1", since = "1.0.0")]
44142unsafe impl<T: Send> Send for IterMut<'_, T> {}
44143#[stable(feature = "rust1", since = "1.0.0")]
44144unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
44145
44146#[stable(feature = "collection_debug", since = "1.17.0")]
44147impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
44148    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
44149        let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
44150        // SAFETY: these are the elements we have not handed out yet, so aliasing is fine.
44151        // The `IterMut` invariant also ensures everything is dereferencable.
44152        let (front, back) = unsafe { (&*front, &*back) };
44153        f.debug_tuple("IterMut").field(&front).field(&back).finish()
44154    }
44155}
44156
44157#[stable(feature = "rust1", since = "1.0.0")]
44158impl<'a, T> Iterator for IterMut<'a, T> {
44159    type Item = &'a mut T;
44160
44161    #[inline]
44162    fn next(&mut self) -> Option<&'a mut T> {
44163        if self.tail == self.head {
44164            return None;
44165        }
44166        let tail = self.tail;
44167        self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
44168
44169        unsafe {
44170            let elem = self.ring.get_unchecked_mut(tail);
44171            Some(&mut *elem)
44172        }
44173    }
44174
44175    #[inline]
44176    fn size_hint(&self) -> (usize, Option<usize>) {
44177        let len = count(self.tail, self.head, self.ring.len());
44178        (len, Some(len))
44179    }
44180
44181    fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
44182    where
44183        F: FnMut(Acc, Self::Item) -> Acc,
44184    {
44185        let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
44186        // SAFETY: these are the elements we have not handed out yet, so aliasing is fine.
44187        // The `IterMut` invariant also ensures everything is dereferencable.
44188        let (front, back) = unsafe { (&mut *front, &mut *back) };
44189        accum = front.iter_mut().fold(accum, &mut f);
44190        back.iter_mut().fold(accum, &mut f)
44191    }
44192
44193    fn nth(&mut self, n: usize) -> Option<Self::Item> {
44194        if n >= count(self.tail, self.head, self.ring.len()) {
44195            self.tail = self.head;
44196            None
44197        } else {
44198            self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len());
44199            self.next()
44200        }
44201    }
44202
44203    #[inline]
44204    fn last(mut self) -> Option<&'a mut T> {
44205        self.next_back()
44206    }
44207
44208    #[inline]
44209    unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item
44210    where
44211        Self: TrustedRandomAccess,
44212    {
44213        // Safety: The TrustedRandomAccess contract requires that callers only  pass an index
44214        // that is in bounds.
44215        unsafe {
44216            let idx = wrap_index(self.tail.wrapping_add(idx), self.ring.len());
44217            &mut *self.ring.get_unchecked_mut(idx)
44218        }
44219    }
44220}
44221
44222#[stable(feature = "rust1", since = "1.0.0")]
44223impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
44224    #[inline]
44225    fn next_back(&mut self) -> Option<&'a mut T> {
44226        if self.tail == self.head {
44227            return None;
44228        }
44229        self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
44230
44231        unsafe {
44232            let elem = self.ring.get_unchecked_mut(self.head);
44233            Some(&mut *elem)
44234        }
44235    }
44236
44237    fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
44238    where
44239        F: FnMut(Acc, Self::Item) -> Acc,
44240    {
44241        let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
44242        // SAFETY: these are the elements we have not handed out yet, so aliasing is fine.
44243        // The `IterMut` invariant also ensures everything is dereferencable.
44244        let (front, back) = unsafe { (&mut *front, &mut *back) };
44245        accum = back.iter_mut().rfold(accum, &mut f);
44246        front.iter_mut().rfold(accum, &mut f)
44247    }
44248}
44249
44250#[stable(feature = "rust1", since = "1.0.0")]
44251impl<T> ExactSizeIterator for IterMut<'_, T> {
44252    fn is_empty(&self) -> bool {
44253        self.head == self.tail
44254    }
44255}
44256
44257#[stable(feature = "fused", since = "1.26.0")]
44258impl<T> FusedIterator for IterMut<'_, T> {}
44259
44260#[unstable(feature = "trusted_len", issue = "37572")]
44261unsafe impl<T> TrustedLen for IterMut<'_, T> {}
44262
44263#[doc(hidden)]
44264#[unstable(feature = "trusted_random_access", issue = "none")]
44265unsafe impl<T> TrustedRandomAccess for IterMut<'_, T> {
44266    const MAY_HAVE_SIDE_EFFECT: bool = false;
44267}
44268//! A priority queue implemented with a binary heap.
44269//!
44270//! Insertion and popping the largest element have *O*(log(*n*)) time complexity.
44271//! Checking the largest element is *O*(1). Converting a vector to a binary heap
44272//! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be
44273//! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* \* log(*n*))
44274//! in-place heapsort.
44275//!
44276//! # Examples
44277//!
44278//! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
44279//! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
44280//! It shows how to use [`BinaryHeap`] with custom types.
44281//!
44282//! [dijkstra]: https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
44283//! [sssp]: https://en.wikipedia.org/wiki/Shortest_path_problem
44284//! [dir_graph]: https://en.wikipedia.org/wiki/Directed_graph
44285//!
44286//! ```
44287//! use std::cmp::Ordering;
44288//! use std::collections::BinaryHeap;
44289//!
44290//! #[derive(Copy, Clone, Eq, PartialEq)]
44291//! struct State {
44292//!     cost: usize,
44293//!     position: usize,
44294//! }
44295//!
44296//! // The priority queue depends on `Ord`.
44297//! // Explicitly implement the trait so the queue becomes a min-heap
44298//! // instead of a max-heap.
44299//! impl Ord for State {
44300//!     fn cmp(&self, other: &Self) -> Ordering {
44301//!         // Notice that the we flip the ordering on costs.
44302//!         // In case of a tie we compare positions - this step is necessary
44303//!         // to make implementations of `PartialEq` and `Ord` consistent.
44304//!         other.cost.cmp(&self.cost)
44305//!             .then_with(|| self.position.cmp(&other.position))
44306//!     }
44307//! }
44308//!
44309//! // `PartialOrd` needs to be implemented as well.
44310//! impl PartialOrd for State {
44311//!     fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
44312//!         Some(self.cmp(other))
44313//!     }
44314//! }
44315//!
44316//! // Each node is represented as an `usize`, for a shorter implementation.
44317//! struct Edge {
44318//!     node: usize,
44319//!     cost: usize,
44320//! }
44321//!
44322//! // Dijkstra's shortest path algorithm.
44323//!
44324//! // Start at `start` and use `dist` to track the current shortest distance
44325//! // to each node. This implementation isn't memory-efficient as it may leave duplicate
44326//! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
44327//! // for a simpler implementation.
44328//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
44329//!     // dist[node] = current shortest distance from `start` to `node`
44330//!     let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
44331//!
44332//!     let mut heap = BinaryHeap::new();
44333//!
44334//!     // We're at `start`, with a zero cost
44335//!     dist[start] = 0;
44336//!     heap.push(State { cost: 0, position: start });
44337//!
44338//!     // Examine the frontier with lower cost nodes first (min-heap)
44339//!     while let Some(State { cost, position }) = heap.pop() {
44340//!         // Alternatively we could have continued to find all shortest paths
44341//!         if position == goal { return Some(cost); }
44342//!
44343//!         // Important as we may have already found a better way
44344//!         if cost > dist[position] { continue; }
44345//!
44346//!         // For each node we can reach, see if we can find a way with
44347//!         // a lower cost going through this node
44348//!         for edge in &adj_list[position] {
44349//!             let next = State { cost: cost + edge.cost, position: edge.node };
44350//!
44351//!             // If so, add it to the frontier and continue
44352//!             if next.cost < dist[next.position] {
44353//!                 heap.push(next);
44354//!                 // Relaxation, we have now found a better way
44355//!                 dist[next.position] = next.cost;
44356//!             }
44357//!         }
44358//!     }
44359//!
44360//!     // Goal not reachable
44361//!     None
44362//! }
44363//!
44364//! fn main() {
44365//!     // This is the directed graph we're going to use.
44366//!     // The node numbers correspond to the different states,
44367//!     // and the edge weights symbolize the cost of moving
44368//!     // from one node to another.
44369//!     // Note that the edges are one-way.
44370//!     //
44371//!     //                  7
44372//!     //          +-----------------+
44373//!     //          |                 |
44374//!     //          v   1        2    |  2
44375//!     //          0 -----> 1 -----> 3 ---> 4
44376//!     //          |        ^        ^      ^
44377//!     //          |        | 1      |      |
44378//!     //          |        |        | 3    | 1
44379//!     //          +------> 2 -------+      |
44380//!     //           10      |               |
44381//!     //                   +---------------+
44382//!     //
44383//!     // The graph is represented as an adjacency list where each index,
44384//!     // corresponding to a node value, has a list of outgoing edges.
44385//!     // Chosen for its efficiency.
44386//!     let graph = vec![
44387//!         // Node 0
44388//!         vec![Edge { node: 2, cost: 10 },
44389//!              Edge { node: 1, cost: 1 }],
44390//!         // Node 1
44391//!         vec![Edge { node: 3, cost: 2 }],
44392//!         // Node 2
44393//!         vec![Edge { node: 1, cost: 1 },
44394//!              Edge { node: 3, cost: 3 },
44395//!              Edge { node: 4, cost: 1 }],
44396//!         // Node 3
44397//!         vec![Edge { node: 0, cost: 7 },
44398//!              Edge { node: 4, cost: 2 }],
44399//!         // Node 4
44400//!         vec![]];
44401//!
44402//!     assert_eq!(shortest_path(&graph, 0, 1), Some(1));
44403//!     assert_eq!(shortest_path(&graph, 0, 3), Some(3));
44404//!     assert_eq!(shortest_path(&graph, 3, 0), Some(7));
44405//!     assert_eq!(shortest_path(&graph, 0, 4), Some(5));
44406//!     assert_eq!(shortest_path(&graph, 4, 0), None);
44407//! }
44408//! ```
44409
44410#![allow(missing_docs)]
44411#![stable(feature = "rust1", since = "1.0.0")]
44412
44413use core::fmt;
44414use core::iter::{FromIterator, FusedIterator, InPlaceIterable, SourceIter, TrustedLen};
44415use core::mem::{self, swap, ManuallyDrop};
44416use core::ops::{Deref, DerefMut};
44417use core::ptr;
44418
44419use crate::slice;
44420use crate::vec::{self, AsIntoIter, Vec};
44421
44422use super::SpecExtend;
44423
44424/// A priority queue implemented with a binary heap.
44425///
44426/// This will be a max-heap.
44427///
44428/// It is a logic error for an item to be modified in such a way that the
44429/// item's ordering relative to any other item, as determined by the `Ord`
44430/// trait, changes while it is in the heap. This is normally only possible
44431/// through `Cell`, `RefCell`, global state, I/O, or unsafe code. The
44432/// behavior resulting from such a logic error is not specified, but will
44433/// not result in undefined behavior. This could include panics, incorrect
44434/// results, aborts, memory leaks, and non-termination.
44435///
44436/// # Examples
44437///
44438/// ```
44439/// use std::collections::BinaryHeap;
44440///
44441/// // Type inference lets us omit an explicit type signature (which
44442/// // would be `BinaryHeap<i32>` in this example).
44443/// let mut heap = BinaryHeap::new();
44444///
44445/// // We can use peek to look at the next item in the heap. In this case,
44446/// // there's no items in there yet so we get None.
44447/// assert_eq!(heap.peek(), None);
44448///
44449/// // Let's add some scores...
44450/// heap.push(1);
44451/// heap.push(5);
44452/// heap.push(2);
44453///
44454/// // Now peek shows the most important item in the heap.
44455/// assert_eq!(heap.peek(), Some(&5));
44456///
44457/// // We can check the length of a heap.
44458/// assert_eq!(heap.len(), 3);
44459///
44460/// // We can iterate over the items in the heap, although they are returned in
44461/// // a random order.
44462/// for x in &heap {
44463///     println!("{}", x);
44464/// }
44465///
44466/// // If we instead pop these scores, they should come back in order.
44467/// assert_eq!(heap.pop(), Some(5));
44468/// assert_eq!(heap.pop(), Some(2));
44469/// assert_eq!(heap.pop(), Some(1));
44470/// assert_eq!(heap.pop(), None);
44471///
44472/// // We can clear the heap of any remaining items.
44473/// heap.clear();
44474///
44475/// // The heap should now be empty.
44476/// assert!(heap.is_empty())
44477/// ```
44478///
44479/// ## Min-heap
44480///
44481/// Either `std::cmp::Reverse` or a custom `Ord` implementation can be used to
44482/// make `BinaryHeap` a min-heap. This makes `heap.pop()` return the smallest
44483/// value instead of the greatest one.
44484///
44485/// ```
44486/// use std::collections::BinaryHeap;
44487/// use std::cmp::Reverse;
44488///
44489/// let mut heap = BinaryHeap::new();
44490///
44491/// // Wrap values in `Reverse`
44492/// heap.push(Reverse(1));
44493/// heap.push(Reverse(5));
44494/// heap.push(Reverse(2));
44495///
44496/// // If we pop these scores now, they should come back in the reverse order.
44497/// assert_eq!(heap.pop(), Some(Reverse(1)));
44498/// assert_eq!(heap.pop(), Some(Reverse(2)));
44499/// assert_eq!(heap.pop(), Some(Reverse(5)));
44500/// assert_eq!(heap.pop(), None);
44501/// ```
44502///
44503/// # Time complexity
44504///
44505/// | [push] | [pop]     | [peek]/[peek\_mut] |
44506/// |--------|-----------|--------------------|
44507/// | O(1)~  | *O*(log(*n*)) | *O*(1)               |
44508///
44509/// The value for `push` is an expected cost; the method documentation gives a
44510/// more detailed analysis.
44511///
44512/// [push]: BinaryHeap::push
44513/// [pop]: BinaryHeap::pop
44514/// [peek]: BinaryHeap::peek
44515/// [peek\_mut]: BinaryHeap::peek_mut
44516#[stable(feature = "rust1", since = "1.0.0")]
44517#[cfg_attr(not(test), rustc_diagnostic_item = "BinaryHeap")]
44518pub struct BinaryHeap<T> {
44519    data: Vec<T>,
44520}
44521
44522/// Structure wrapping a mutable reference to the greatest item on a
44523/// `BinaryHeap`.
44524///
44525/// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
44526/// its documentation for more.
44527///
44528/// [`peek_mut`]: BinaryHeap::peek_mut
44529#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
44530pub struct PeekMut<'a, T: 'a + Ord> {
44531    heap: &'a mut BinaryHeap<T>,
44532    sift: bool,
44533}
44534
44535#[stable(feature = "collection_debug", since = "1.17.0")]
44536impl<T: Ord + fmt::Debug> fmt::Debug for PeekMut<'_, T> {
44537    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
44538        f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
44539    }
44540}
44541
44542#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
44543impl<T: Ord> Drop for PeekMut<'_, T> {
44544    fn drop(&mut self) {
44545        if self.sift {
44546            // SAFETY: PeekMut is only instantiated for non-empty heaps.
44547            unsafe { self.heap.sift_down(0) };
44548        }
44549    }
44550}
44551
44552#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
44553impl<T: Ord> Deref for PeekMut<'_, T> {
44554    type Target = T;
44555    fn deref(&self) -> &T {
44556        debug_assert!(!self.heap.is_empty());
44557        // SAFE: PeekMut is only instantiated for non-empty heaps
44558        unsafe { self.heap.data.get_unchecked(0) }
44559    }
44560}
44561
44562#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
44563impl<T: Ord> DerefMut for PeekMut<'_, T> {
44564    fn deref_mut(&mut self) -> &mut T {
44565        debug_assert!(!self.heap.is_empty());
44566        self.sift = true;
44567        // SAFE: PeekMut is only instantiated for non-empty heaps
44568        unsafe { self.heap.data.get_unchecked_mut(0) }
44569    }
44570}
44571
44572impl<'a, T: Ord> PeekMut<'a, T> {
44573    /// Removes the peeked value from the heap and returns it.
44574    #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
44575    pub fn pop(mut this: PeekMut<'a, T>) -> T {
44576        let value = this.heap.pop().unwrap();
44577        this.sift = false;
44578        value
44579    }
44580}
44581
44582#[stable(feature = "rust1", since = "1.0.0")]
44583impl<T: Clone> Clone for BinaryHeap<T> {
44584    fn clone(&self) -> Self {
44585        BinaryHeap { data: self.data.clone() }
44586    }
44587
44588    fn clone_from(&mut self, source: &Self) {
44589        self.data.clone_from(&source.data);
44590    }
44591}
44592
44593#[stable(feature = "rust1", since = "1.0.0")]
44594impl<T: Ord> Default for BinaryHeap<T> {
44595    /// Creates an empty `BinaryHeap<T>`.
44596    #[inline]
44597    fn default() -> BinaryHeap<T> {
44598        BinaryHeap::new()
44599    }
44600}
44601
44602#[stable(feature = "binaryheap_debug", since = "1.4.0")]
44603impl<T: fmt::Debug> fmt::Debug for BinaryHeap<T> {
44604    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
44605        f.debug_list().entries(self.iter()).finish()
44606    }
44607}
44608
44609impl<T: Ord> BinaryHeap<T> {
44610    /// Creates an empty `BinaryHeap` as a max-heap.
44611    ///
44612    /// # Examples
44613    ///
44614    /// Basic usage:
44615    ///
44616    /// ```
44617    /// use std::collections::BinaryHeap;
44618    /// let mut heap = BinaryHeap::new();
44619    /// heap.push(4);
44620    /// ```
44621    #[stable(feature = "rust1", since = "1.0.0")]
44622    pub fn new() -> BinaryHeap<T> {
44623        BinaryHeap { data: vec![] }
44624    }
44625
44626    /// Creates an empty `BinaryHeap` with a specific capacity.
44627    /// This preallocates enough memory for `capacity` elements,
44628    /// so that the `BinaryHeap` does not have to be reallocated
44629    /// until it contains at least that many values.
44630    ///
44631    /// # Examples
44632    ///
44633    /// Basic usage:
44634    ///
44635    /// ```
44636    /// use std::collections::BinaryHeap;
44637    /// let mut heap = BinaryHeap::with_capacity(10);
44638    /// heap.push(4);
44639    /// ```
44640    #[stable(feature = "rust1", since = "1.0.0")]
44641    pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
44642        BinaryHeap { data: Vec::with_capacity(capacity) }
44643    }
44644
44645    /// Returns a mutable reference to the greatest item in the binary heap, or
44646    /// `None` if it is empty.
44647    ///
44648    /// Note: If the `PeekMut` value is leaked, the heap may be in an
44649    /// inconsistent state.
44650    ///
44651    /// # Examples
44652    ///
44653    /// Basic usage:
44654    ///
44655    /// ```
44656    /// use std::collections::BinaryHeap;
44657    /// let mut heap = BinaryHeap::new();
44658    /// assert!(heap.peek_mut().is_none());
44659    ///
44660    /// heap.push(1);
44661    /// heap.push(5);
44662    /// heap.push(2);
44663    /// {
44664    ///     let mut val = heap.peek_mut().unwrap();
44665    ///     *val = 0;
44666    /// }
44667    /// assert_eq!(heap.peek(), Some(&2));
44668    /// ```
44669    ///
44670    /// # Time complexity
44671    ///
44672    /// If the item is modified then the worst case time complexity is *O*(log(*n*)),
44673    /// otherwise it's *O*(1).
44674    #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
44675    pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T>> {
44676        if self.is_empty() { None } else { Some(PeekMut { heap: self, sift: false }) }
44677    }
44678
44679    /// Removes the greatest item from the binary heap and returns it, or `None` if it
44680    /// is empty.
44681    ///
44682    /// # Examples
44683    ///
44684    /// Basic usage:
44685    ///
44686    /// ```
44687    /// use std::collections::BinaryHeap;
44688    /// let mut heap = BinaryHeap::from(vec![1, 3]);
44689    ///
44690    /// assert_eq!(heap.pop(), Some(3));
44691    /// assert_eq!(heap.pop(), Some(1));
44692    /// assert_eq!(heap.pop(), None);
44693    /// ```
44694    ///
44695    /// # Time complexity
44696    ///
44697    /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
44698    #[stable(feature = "rust1", since = "1.0.0")]
44699    pub fn pop(&mut self) -> Option<T> {
44700        self.data.pop().map(|mut item| {
44701            if !self.is_empty() {
44702                swap(&mut item, &mut self.data[0]);
44703                // SAFETY: !self.is_empty() means that self.len() > 0
44704                unsafe { self.sift_down_to_bottom(0) };
44705            }
44706            item
44707        })
44708    }
44709
44710    /// Pushes an item onto the binary heap.
44711    ///
44712    /// # Examples
44713    ///
44714    /// Basic usage:
44715    ///
44716    /// ```
44717    /// use std::collections::BinaryHeap;
44718    /// let mut heap = BinaryHeap::new();
44719    /// heap.push(3);
44720    /// heap.push(5);
44721    /// heap.push(1);
44722    ///
44723    /// assert_eq!(heap.len(), 3);
44724    /// assert_eq!(heap.peek(), Some(&5));
44725    /// ```
44726    ///
44727    /// # Time complexity
44728    ///
44729    /// The expected cost of `push`, averaged over every possible ordering of
44730    /// the elements being pushed, and over a sufficiently large number of
44731    /// pushes, is *O*(1). This is the most meaningful cost metric when pushing
44732    /// elements that are *not* already in any sorted pattern.
44733    ///
44734    /// The time complexity degrades if elements are pushed in predominantly
44735    /// ascending order. In the worst case, elements are pushed in ascending
44736    /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap
44737    /// containing *n* elements.
44738    ///
44739    /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case
44740    /// occurs when capacity is exhausted and needs a resize. The resize cost
44741    /// has been amortized in the previous figures.
44742    #[stable(feature = "rust1", since = "1.0.0")]
44743    pub fn push(&mut self, item: T) {
44744        let old_len = self.len();
44745        self.data.push(item);
44746        // SAFETY: Since we pushed a new item it means that
44747        //  old_len = self.len() - 1 < self.len()
44748        unsafe { self.sift_up(0, old_len) };
44749    }
44750
44751    /// Consumes the `BinaryHeap` and returns a vector in sorted
44752    /// (ascending) order.
44753    ///
44754    /// # Examples
44755    ///
44756    /// Basic usage:
44757    ///
44758    /// ```
44759    /// use std::collections::BinaryHeap;
44760    ///
44761    /// let mut heap = BinaryHeap::from(vec![1, 2, 4, 5, 7]);
44762    /// heap.push(6);
44763    /// heap.push(3);
44764    ///
44765    /// let vec = heap.into_sorted_vec();
44766    /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
44767    /// ```
44768    #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
44769    pub fn into_sorted_vec(mut self) -> Vec<T> {
44770        let mut end = self.len();
44771        while end > 1 {
44772            end -= 1;
44773            // SAFETY: `end` goes from `self.len() - 1` to 1 (both included),
44774            //  so it's always a valid index to access.
44775            //  It is safe to access index 0 (i.e. `ptr`), because
44776            //  1 <= end < self.len(), which means self.len() >= 2.
44777            unsafe {
44778                let ptr = self.data.as_mut_ptr();
44779                ptr::swap(ptr, ptr.add(end));
44780            }
44781            // SAFETY: `end` goes from `self.len() - 1` to 1 (both included) so:
44782            //  0 < 1 <= end <= self.len() - 1 < self.len()
44783            //  Which means 0 < end and end < self.len().
44784            unsafe { self.sift_down_range(0, end) };
44785        }
44786        self.into_vec()
44787    }
44788
44789    // The implementations of sift_up and sift_down use unsafe blocks in
44790    // order to move an element out of the vector (leaving behind a
44791    // hole), shift along the others and move the removed element back into the
44792    // vector at the final location of the hole.
44793    // The `Hole` type is used to represent this, and make sure
44794    // the hole is filled back at the end of its scope, even on panic.
44795    // Using a hole reduces the constant factor compared to using swaps,
44796    // which involves twice as many moves.
44797
44798    /// # Safety
44799    ///
44800    /// The caller must guarantee that `pos < self.len()`.
44801    unsafe fn sift_up(&mut self, start: usize, pos: usize) -> usize {
44802        // Take out the value at `pos` and create a hole.
44803        // SAFETY: The caller guarantees that pos < self.len()
44804        let mut hole = unsafe { Hole::new(&mut self.data, pos) };
44805
44806        while hole.pos() > start {
44807            let parent = (hole.pos() - 1) / 2;
44808
44809            // SAFETY: hole.pos() > start >= 0, which means hole.pos() > 0
44810            //  and so hole.pos() - 1 can't underflow.
44811            //  This guarantees that parent < hole.pos() so
44812            //  it's a valid index and also != hole.pos().
44813            if hole.element() <= unsafe { hole.get(parent) } {
44814                break;
44815            }
44816
44817            // SAFETY: Same as above
44818            unsafe { hole.move_to(parent) };
44819        }
44820
44821        hole.pos()
44822    }
44823
44824    /// Take an element at `pos` and move it down the heap,
44825    /// while its children are larger.
44826    ///
44827    /// # Safety
44828    ///
44829    /// The caller must guarantee that `pos < end <= self.len()`.
44830    unsafe fn sift_down_range(&mut self, pos: usize, end: usize) {
44831        // SAFETY: The caller guarantees that pos < end <= self.len().
44832        let mut hole = unsafe { Hole::new(&mut self.data, pos) };
44833        let mut child = 2 * hole.pos() + 1;
44834
44835        // Loop invariant: child == 2 * hole.pos() + 1.
44836        while child <= end.saturating_sub(2) {
44837            // compare with the greater of the two children
44838            // SAFETY: child < end - 1 < self.len() and
44839            //  child + 1 < end <= self.len(), so they're valid indexes.
44840            //  child == 2 * hole.pos() + 1 != hole.pos() and
44841            //  child + 1 == 2 * hole.pos() + 2 != hole.pos().
44842            // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
44843            //  if T is a ZST
44844            child += unsafe { hole.get(child) <= hole.get(child + 1) } as usize;
44845
44846            // if we are already in order, stop.
44847            // SAFETY: child is now either the old child or the old child+1
44848            //  We already proven that both are < self.len() and != hole.pos()
44849            if hole.element() >= unsafe { hole.get(child) } {
44850                return;
44851            }
44852
44853            // SAFETY: same as above.
44854            unsafe { hole.move_to(child) };
44855            child = 2 * hole.pos() + 1;
44856        }
44857
44858        // SAFETY: && short circuit, which means that in the
44859        //  second condition it's already true that child == end - 1 < self.len().
44860        if child == end - 1 && hole.element() < unsafe { hole.get(child) } {
44861            // SAFETY: child is already proven to be a valid index and
44862            //  child == 2 * hole.pos() + 1 != hole.pos().
44863            unsafe { hole.move_to(child) };
44864        }
44865    }
44866
44867    /// # Safety
44868    ///
44869    /// The caller must guarantee that `pos < self.len()`.
44870    unsafe fn sift_down(&mut self, pos: usize) {
44871        let len = self.len();
44872        // SAFETY: pos < len is guaranteed by the caller and
44873        //  obviously len = self.len() <= self.len().
44874        unsafe { self.sift_down_range(pos, len) };
44875    }
44876
44877    /// Take an element at `pos` and move it all the way down the heap,
44878    /// then sift it up to its position.
44879    ///
44880    /// Note: This is faster when the element is known to be large / should
44881    /// be closer to the bottom.
44882    ///
44883    /// # Safety
44884    ///
44885    /// The caller must guarantee that `pos < self.len()`.
44886    unsafe fn sift_down_to_bottom(&mut self, mut pos: usize) {
44887        let end = self.len();
44888        let start = pos;
44889
44890        // SAFETY: The caller guarantees that pos < self.len().
44891        let mut hole = unsafe { Hole::new(&mut self.data, pos) };
44892        let mut child = 2 * hole.pos() + 1;
44893
44894        // Loop invariant: child == 2 * hole.pos() + 1.
44895        while child <= end.saturating_sub(2) {
44896            // SAFETY: child < end - 1 < self.len() and
44897            //  child + 1 < end <= self.len(), so they're valid indexes.
44898            //  child == 2 * hole.pos() + 1 != hole.pos() and
44899            //  child + 1 == 2 * hole.pos() + 2 != hole.pos().
44900            // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
44901            //  if T is a ZST
44902            child += unsafe { hole.get(child) <= hole.get(child + 1) } as usize;
44903
44904            // SAFETY: Same as above
44905            unsafe { hole.move_to(child) };
44906            child = 2 * hole.pos() + 1;
44907        }
44908
44909        if child == end - 1 {
44910            // SAFETY: child == end - 1 < self.len(), so it's a valid index
44911            //  and child == 2 * hole.pos() + 1 != hole.pos().
44912            unsafe { hole.move_to(child) };
44913        }
44914        pos = hole.pos();
44915        drop(hole);
44916
44917        // SAFETY: pos is the position in the hole and was already proven
44918        //  to be a valid index.
44919        unsafe { self.sift_up(start, pos) };
44920    }
44921
44922    /// Rebuild assuming data[0..start] is still a proper heap.
44923    fn rebuild_tail(&mut self, start: usize) {
44924        if start == self.len() {
44925            return;
44926        }
44927
44928        let tail_len = self.len() - start;
44929
44930        #[inline(always)]
44931        fn log2_fast(x: usize) -> usize {
44932            (usize::BITS - x.leading_zeros() - 1) as usize
44933        }
44934
44935        // `rebuild` takes O(self.len()) operations
44936        // and about 2 * self.len() comparisons in the worst case
44937        // while repeating `sift_up` takes O(tail_len * log(start)) operations
44938        // and about 1 * tail_len * log_2(start) comparisons in the worst case,
44939        // assuming start >= tail_len. For larger heaps, the crossover point
44940        // no longer follows this reasoning and was determined empirically.
44941        let better_to_rebuild = if start < tail_len {
44942            true
44943        } else if self.len() <= 2048 {
44944            2 * self.len() < tail_len * log2_fast(start)
44945        } else {
44946            2 * self.len() < tail_len * 11
44947        };
44948
44949        if better_to_rebuild {
44950            self.rebuild();
44951        } else {
44952            for i in start..self.len() {
44953                // SAFETY: The index `i` is always less than self.len().
44954                unsafe { self.sift_up(0, i) };
44955            }
44956        }
44957    }
44958
44959    fn rebuild(&mut self) {
44960        let mut n = self.len() / 2;
44961        while n > 0 {
44962            n -= 1;
44963            // SAFETY: n starts from self.len() / 2 and goes down to 0.
44964            //  The only case when !(n < self.len()) is if
44965            //  self.len() == 0, but it's ruled out by the loop condition.
44966            unsafe { self.sift_down(n) };
44967        }
44968    }
44969
44970    /// Moves all the elements of `other` into `self`, leaving `other` empty.
44971    ///
44972    /// # Examples
44973    ///
44974    /// Basic usage:
44975    ///
44976    /// ```
44977    /// use std::collections::BinaryHeap;
44978    ///
44979    /// let v = vec![-10, 1, 2, 3, 3];
44980    /// let mut a = BinaryHeap::from(v);
44981    ///
44982    /// let v = vec![-20, 5, 43];
44983    /// let mut b = BinaryHeap::from(v);
44984    ///
44985    /// a.append(&mut b);
44986    ///
44987    /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
44988    /// assert!(b.is_empty());
44989    /// ```
44990    #[stable(feature = "binary_heap_append", since = "1.11.0")]
44991    pub fn append(&mut self, other: &mut Self) {
44992        if self.len() < other.len() {
44993            swap(self, other);
44994        }
44995
44996        let start = self.data.len();
44997
44998        self.data.append(&mut other.data);
44999
45000        self.rebuild_tail(start);
45001    }
45002
45003    /// Returns an iterator which retrieves elements in heap order.
45004    /// The retrieved elements are removed from the original heap.
45005    /// The remaining elements will be removed on drop in heap order.
45006    ///
45007    /// Note:
45008    /// * `.drain_sorted()` is *O*(*n* \* log(*n*)); much slower than `.drain()`.
45009    ///   You should use the latter for most cases.
45010    ///
45011    /// # Examples
45012    ///
45013    /// Basic usage:
45014    ///
45015    /// ```
45016    /// #![feature(binary_heap_drain_sorted)]
45017    /// use std::collections::BinaryHeap;
45018    ///
45019    /// let mut heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]);
45020    /// assert_eq!(heap.len(), 5);
45021    ///
45022    /// drop(heap.drain_sorted()); // removes all elements in heap order
45023    /// assert_eq!(heap.len(), 0);
45024    /// ```
45025    #[inline]
45026    #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
45027    pub fn drain_sorted(&mut self) -> DrainSorted<'_, T> {
45028        DrainSorted { inner: self }
45029    }
45030
45031    /// Retains only the elements specified by the predicate.
45032    ///
45033    /// In other words, remove all elements `e` such that `f(&e)` returns
45034    /// `false`. The elements are visited in unsorted (and unspecified) order.
45035    ///
45036    /// # Examples
45037    ///
45038    /// Basic usage:
45039    ///
45040    /// ```
45041    /// #![feature(binary_heap_retain)]
45042    /// use std::collections::BinaryHeap;
45043    ///
45044    /// let mut heap = BinaryHeap::from(vec![-10, -5, 1, 2, 4, 13]);
45045    ///
45046    /// heap.retain(|x| x % 2 == 0); // only keep even numbers
45047    ///
45048    /// assert_eq!(heap.into_sorted_vec(), [-10, 2, 4])
45049    /// ```
45050    #[unstable(feature = "binary_heap_retain", issue = "71503")]
45051    pub fn retain<F>(&mut self, mut f: F)
45052    where
45053        F: FnMut(&T) -> bool,
45054    {
45055        let mut first_removed = self.len();
45056        let mut i = 0;
45057        self.data.retain(|e| {
45058            let keep = f(e);
45059            if !keep && i < first_removed {
45060                first_removed = i;
45061            }
45062            i += 1;
45063            keep
45064        });
45065        // data[0..first_removed] is untouched, so we only need to rebuild the tail:
45066        self.rebuild_tail(first_removed);
45067    }
45068}
45069
45070impl<T> BinaryHeap<T> {
45071    /// Returns an iterator visiting all values in the underlying vector, in
45072    /// arbitrary order.
45073    ///
45074    /// # Examples
45075    ///
45076    /// Basic usage:
45077    ///
45078    /// ```
45079    /// use std::collections::BinaryHeap;
45080    /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
45081    ///
45082    /// // Print 1, 2, 3, 4 in arbitrary order
45083    /// for x in heap.iter() {
45084    ///     println!("{}", x);
45085    /// }
45086    /// ```
45087    #[stable(feature = "rust1", since = "1.0.0")]
45088    pub fn iter(&self) -> Iter<'_, T> {
45089        Iter { iter: self.data.iter() }
45090    }
45091
45092    /// Returns an iterator which retrieves elements in heap order.
45093    /// This method consumes the original heap.
45094    ///
45095    /// # Examples
45096    ///
45097    /// Basic usage:
45098    ///
45099    /// ```
45100    /// #![feature(binary_heap_into_iter_sorted)]
45101    /// use std::collections::BinaryHeap;
45102    /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]);
45103    ///
45104    /// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), vec![5, 4]);
45105    /// ```
45106    #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
45107    pub fn into_iter_sorted(self) -> IntoIterSorted<T> {
45108        IntoIterSorted { inner: self }
45109    }
45110
45111    /// Returns the greatest item in the binary heap, or `None` if it is empty.
45112    ///
45113    /// # Examples
45114    ///
45115    /// Basic usage:
45116    ///
45117    /// ```
45118    /// use std::collections::BinaryHeap;
45119    /// let mut heap = BinaryHeap::new();
45120    /// assert_eq!(heap.peek(), None);
45121    ///
45122    /// heap.push(1);
45123    /// heap.push(5);
45124    /// heap.push(2);
45125    /// assert_eq!(heap.peek(), Some(&5));
45126    ///
45127    /// ```
45128    ///
45129    /// # Time complexity
45130    ///
45131    /// Cost is *O*(1) in the worst case.
45132    #[stable(feature = "rust1", since = "1.0.0")]
45133    pub fn peek(&self) -> Option<&T> {
45134        self.data.get(0)
45135    }
45136
45137    /// Returns the number of elements the binary heap can hold without reallocating.
45138    ///
45139    /// # Examples
45140    ///
45141    /// Basic usage:
45142    ///
45143    /// ```
45144    /// use std::collections::BinaryHeap;
45145    /// let mut heap = BinaryHeap::with_capacity(100);
45146    /// assert!(heap.capacity() >= 100);
45147    /// heap.push(4);
45148    /// ```
45149    #[stable(feature = "rust1", since = "1.0.0")]
45150    pub fn capacity(&self) -> usize {
45151        self.data.capacity()
45152    }
45153
45154    /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
45155    /// given `BinaryHeap`. Does nothing if the capacity is already sufficient.
45156    ///
45157    /// Note that the allocator may give the collection more space than it requests. Therefore
45158    /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
45159    /// insertions are expected.
45160    ///
45161    /// # Panics
45162    ///
45163    /// Panics if the new capacity overflows `usize`.
45164    ///
45165    /// # Examples
45166    ///
45167    /// Basic usage:
45168    ///
45169    /// ```
45170    /// use std::collections::BinaryHeap;
45171    /// let mut heap = BinaryHeap::new();
45172    /// heap.reserve_exact(100);
45173    /// assert!(heap.capacity() >= 100);
45174    /// heap.push(4);
45175    /// ```
45176    ///
45177    /// [`reserve`]: BinaryHeap::reserve
45178    #[stable(feature = "rust1", since = "1.0.0")]
45179    pub fn reserve_exact(&mut self, additional: usize) {
45180        self.data.reserve_exact(additional);
45181    }
45182
45183    /// Reserves capacity for at least `additional` more elements to be inserted in the
45184    /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations.
45185    ///
45186    /// # Panics
45187    ///
45188    /// Panics if the new capacity overflows `usize`.
45189    ///
45190    /// # Examples
45191    ///
45192    /// Basic usage:
45193    ///
45194    /// ```
45195    /// use std::collections::BinaryHeap;
45196    /// let mut heap = BinaryHeap::new();
45197    /// heap.reserve(100);
45198    /// assert!(heap.capacity() >= 100);
45199    /// heap.push(4);
45200    /// ```
45201    #[stable(feature = "rust1", since = "1.0.0")]
45202    pub fn reserve(&mut self, additional: usize) {
45203        self.data.reserve(additional);
45204    }
45205
45206    /// Discards as much additional capacity as possible.
45207    ///
45208    /// # Examples
45209    ///
45210    /// Basic usage:
45211    ///
45212    /// ```
45213    /// use std::collections::BinaryHeap;
45214    /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
45215    ///
45216    /// assert!(heap.capacity() >= 100);
45217    /// heap.shrink_to_fit();
45218    /// assert!(heap.capacity() == 0);
45219    /// ```
45220    #[stable(feature = "rust1", since = "1.0.0")]
45221    pub fn shrink_to_fit(&mut self) {
45222        self.data.shrink_to_fit();
45223    }
45224
45225    /// Discards capacity with a lower bound.
45226    ///
45227    /// The capacity will remain at least as large as both the length
45228    /// and the supplied value.
45229    ///
45230    /// If the current capacity is less than the lower limit, this is a no-op.
45231    ///
45232    /// # Examples
45233    ///
45234    /// ```
45235    /// #![feature(shrink_to)]
45236    /// use std::collections::BinaryHeap;
45237    /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
45238    ///
45239    /// assert!(heap.capacity() >= 100);
45240    /// heap.shrink_to(10);
45241    /// assert!(heap.capacity() >= 10);
45242    /// ```
45243    #[inline]
45244    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
45245    pub fn shrink_to(&mut self, min_capacity: usize) {
45246        self.data.shrink_to(min_capacity)
45247    }
45248
45249    /// Returns a slice of all values in the underlying vector, in arbitrary
45250    /// order.
45251    ///
45252    /// # Examples
45253    ///
45254    /// Basic usage:
45255    ///
45256    /// ```
45257    /// #![feature(binary_heap_as_slice)]
45258    /// use std::collections::BinaryHeap;
45259    /// use std::io::{self, Write};
45260    ///
45261    /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]);
45262    ///
45263    /// io::sink().write(heap.as_slice()).unwrap();
45264    /// ```
45265    #[unstable(feature = "binary_heap_as_slice", issue = "83659")]
45266    pub fn as_slice(&self) -> &[T] {
45267        self.data.as_slice()
45268    }
45269
45270    /// Consumes the `BinaryHeap` and returns the underlying vector
45271    /// in arbitrary order.
45272    ///
45273    /// # Examples
45274    ///
45275    /// Basic usage:
45276    ///
45277    /// ```
45278    /// use std::collections::BinaryHeap;
45279    /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]);
45280    /// let vec = heap.into_vec();
45281    ///
45282    /// // Will print in some order
45283    /// for x in vec {
45284    ///     println!("{}", x);
45285    /// }
45286    /// ```
45287    #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
45288    pub fn into_vec(self) -> Vec<T> {
45289        self.into()
45290    }
45291
45292    /// Returns the length of the binary heap.
45293    ///
45294    /// # Examples
45295    ///
45296    /// Basic usage:
45297    ///
45298    /// ```
45299    /// use std::collections::BinaryHeap;
45300    /// let heap = BinaryHeap::from(vec![1, 3]);
45301    ///
45302    /// assert_eq!(heap.len(), 2);
45303    /// ```
45304    #[doc(alias = "length")]
45305    #[stable(feature = "rust1", since = "1.0.0")]
45306    pub fn len(&self) -> usize {
45307        self.data.len()
45308    }
45309
45310    /// Checks if the binary heap is empty.
45311    ///
45312    /// # Examples
45313    ///
45314    /// Basic usage:
45315    ///
45316    /// ```
45317    /// use std::collections::BinaryHeap;
45318    /// let mut heap = BinaryHeap::new();
45319    ///
45320    /// assert!(heap.is_empty());
45321    ///
45322    /// heap.push(3);
45323    /// heap.push(5);
45324    /// heap.push(1);
45325    ///
45326    /// assert!(!heap.is_empty());
45327    /// ```
45328    #[stable(feature = "rust1", since = "1.0.0")]
45329    pub fn is_empty(&self) -> bool {
45330        self.len() == 0
45331    }
45332
45333    /// Clears the binary heap, returning an iterator over the removed elements.
45334    ///
45335    /// The elements are removed in arbitrary order.
45336    ///
45337    /// # Examples
45338    ///
45339    /// Basic usage:
45340    ///
45341    /// ```
45342    /// use std::collections::BinaryHeap;
45343    /// let mut heap = BinaryHeap::from(vec![1, 3]);
45344    ///
45345    /// assert!(!heap.is_empty());
45346    ///
45347    /// for x in heap.drain() {
45348    ///     println!("{}", x);
45349    /// }
45350    ///
45351    /// assert!(heap.is_empty());
45352    /// ```
45353    #[inline]
45354    #[stable(feature = "drain", since = "1.6.0")]
45355    pub fn drain(&mut self) -> Drain<'_, T> {
45356        Drain { iter: self.data.drain(..) }
45357    }
45358
45359    /// Drops all items from the binary heap.
45360    ///
45361    /// # Examples
45362    ///
45363    /// Basic usage:
45364    ///
45365    /// ```
45366    /// use std::collections::BinaryHeap;
45367    /// let mut heap = BinaryHeap::from(vec![1, 3]);
45368    ///
45369    /// assert!(!heap.is_empty());
45370    ///
45371    /// heap.clear();
45372    ///
45373    /// assert!(heap.is_empty());
45374    /// ```
45375    #[stable(feature = "rust1", since = "1.0.0")]
45376    pub fn clear(&mut self) {
45377        self.drain();
45378    }
45379}
45380
45381/// Hole represents a hole in a slice i.e., an index without valid value
45382/// (because it was moved from or duplicated).
45383/// In drop, `Hole` will restore the slice by filling the hole
45384/// position with the value that was originally removed.
45385struct Hole<'a, T: 'a> {
45386    data: &'a mut [T],
45387    elt: ManuallyDrop<T>,
45388    pos: usize,
45389}
45390
45391impl<'a, T> Hole<'a, T> {
45392    /// Create a new `Hole` at index `pos`.
45393    ///
45394    /// Unsafe because pos must be within the data slice.
45395    #[inline]
45396    unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
45397        debug_assert!(pos < data.len());
45398        // SAFE: pos should be inside the slice
45399        let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
45400        Hole { data, elt: ManuallyDrop::new(elt), pos }
45401    }
45402
45403    #[inline]
45404    fn pos(&self) -> usize {
45405        self.pos
45406    }
45407
45408    /// Returns a reference to the element removed.
45409    #[inline]
45410    fn element(&self) -> &T {
45411        &self.elt
45412    }
45413
45414    /// Returns a reference to the element at `index`.
45415    ///
45416    /// Unsafe because index must be within the data slice and not equal to pos.
45417    #[inline]
45418    unsafe fn get(&self, index: usize) -> &T {
45419        debug_assert!(index != self.pos);
45420        debug_assert!(index < self.data.len());
45421        unsafe { self.data.get_unchecked(index) }
45422    }
45423
45424    /// Move hole to new location
45425    ///
45426    /// Unsafe because index must be within the data slice and not equal to pos.
45427    #[inline]
45428    unsafe fn move_to(&mut self, index: usize) {
45429        debug_assert!(index != self.pos);
45430        debug_assert!(index < self.data.len());
45431        unsafe {
45432            let ptr = self.data.as_mut_ptr();
45433            let index_ptr: *const _ = ptr.add(index);
45434            let hole_ptr = ptr.add(self.pos);
45435            ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
45436        }
45437        self.pos = index;
45438    }
45439}
45440
45441impl<T> Drop for Hole<'_, T> {
45442    #[inline]
45443    fn drop(&mut self) {
45444        // fill the hole again
45445        unsafe {
45446            let pos = self.pos;
45447            ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
45448        }
45449    }
45450}
45451
45452/// An iterator over the elements of a `BinaryHeap`.
45453///
45454/// This `struct` is created by [`BinaryHeap::iter()`]. See its
45455/// documentation for more.
45456///
45457/// [`iter`]: BinaryHeap::iter
45458#[stable(feature = "rust1", since = "1.0.0")]
45459pub struct Iter<'a, T: 'a> {
45460    iter: slice::Iter<'a, T>,
45461}
45462
45463#[stable(feature = "collection_debug", since = "1.17.0")]
45464impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
45465    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
45466        f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
45467    }
45468}
45469
45470// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
45471#[stable(feature = "rust1", since = "1.0.0")]
45472impl<T> Clone for Iter<'_, T> {
45473    fn clone(&self) -> Self {
45474        Iter { iter: self.iter.clone() }
45475    }
45476}
45477
45478#[stable(feature = "rust1", since = "1.0.0")]
45479impl<'a, T> Iterator for Iter<'a, T> {
45480    type Item = &'a T;
45481
45482    #[inline]
45483    fn next(&mut self) -> Option<&'a T> {
45484        self.iter.next()
45485    }
45486
45487    #[inline]
45488    fn size_hint(&self) -> (usize, Option<usize>) {
45489        self.iter.size_hint()
45490    }
45491
45492    #[inline]
45493    fn last(self) -> Option<&'a T> {
45494        self.iter.last()
45495    }
45496}
45497
45498#[stable(feature = "rust1", since = "1.0.0")]
45499impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
45500    #[inline]
45501    fn next_back(&mut self) -> Option<&'a T> {
45502        self.iter.next_back()
45503    }
45504}
45505
45506#[stable(feature = "rust1", since = "1.0.0")]
45507impl<T> ExactSizeIterator for Iter<'_, T> {
45508    fn is_empty(&self) -> bool {
45509        self.iter.is_empty()
45510    }
45511}
45512
45513#[stable(feature = "fused", since = "1.26.0")]
45514impl<T> FusedIterator for Iter<'_, T> {}
45515
45516/// An owning iterator over the elements of a `BinaryHeap`.
45517///
45518/// This `struct` is created by [`BinaryHeap::into_iter()`]
45519/// (provided by the `IntoIterator` trait). See its documentation for more.
45520///
45521/// [`into_iter`]: BinaryHeap::into_iter
45522#[stable(feature = "rust1", since = "1.0.0")]
45523#[derive(Clone)]
45524pub struct IntoIter<T> {
45525    iter: vec::IntoIter<T>,
45526}
45527
45528#[stable(feature = "collection_debug", since = "1.17.0")]
45529impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
45530    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
45531        f.debug_tuple("IntoIter").field(&self.iter.as_slice()).finish()
45532    }
45533}
45534
45535#[stable(feature = "rust1", since = "1.0.0")]
45536impl<T> Iterator for IntoIter<T> {
45537    type Item = T;
45538
45539    #[inline]
45540    fn next(&mut self) -> Option<T> {
45541        self.iter.next()
45542    }
45543
45544    #[inline]
45545    fn size_hint(&self) -> (usize, Option<usize>) {
45546        self.iter.size_hint()
45547    }
45548}
45549
45550#[stable(feature = "rust1", since = "1.0.0")]
45551impl<T> DoubleEndedIterator for IntoIter<T> {
45552    #[inline]
45553    fn next_back(&mut self) -> Option<T> {
45554        self.iter.next_back()
45555    }
45556}
45557
45558#[stable(feature = "rust1", since = "1.0.0")]
45559impl<T> ExactSizeIterator for IntoIter<T> {
45560    fn is_empty(&self) -> bool {
45561        self.iter.is_empty()
45562    }
45563}
45564
45565#[stable(feature = "fused", since = "1.26.0")]
45566impl<T> FusedIterator for IntoIter<T> {}
45567
45568#[unstable(issue = "none", feature = "inplace_iteration")]
45569unsafe impl<T> SourceIter for IntoIter<T> {
45570    type Source = IntoIter<T>;
45571
45572    #[inline]
45573    unsafe fn as_inner(&mut self) -> &mut Self::Source {
45574        self
45575    }
45576}
45577
45578#[unstable(issue = "none", feature = "inplace_iteration")]
45579unsafe impl<I> InPlaceIterable for IntoIter<I> {}
45580
45581impl<I> AsIntoIter for IntoIter<I> {
45582    type Item = I;
45583
45584    fn as_into_iter(&mut self) -> &mut vec::IntoIter<Self::Item> {
45585        &mut self.iter
45586    }
45587}
45588
45589#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
45590#[derive(Clone, Debug)]
45591pub struct IntoIterSorted<T> {
45592    inner: BinaryHeap<T>,
45593}
45594
45595#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
45596impl<T: Ord> Iterator for IntoIterSorted<T> {
45597    type Item = T;
45598
45599    #[inline]
45600    fn next(&mut self) -> Option<T> {
45601        self.inner.pop()
45602    }
45603
45604    #[inline]
45605    fn size_hint(&self) -> (usize, Option<usize>) {
45606        let exact = self.inner.len();
45607        (exact, Some(exact))
45608    }
45609}
45610
45611#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
45612impl<T: Ord> ExactSizeIterator for IntoIterSorted<T> {}
45613
45614#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
45615impl<T: Ord> FusedIterator for IntoIterSorted<T> {}
45616
45617#[unstable(feature = "trusted_len", issue = "37572")]
45618unsafe impl<T: Ord> TrustedLen for IntoIterSorted<T> {}
45619
45620/// A draining iterator over the elements of a `BinaryHeap`.
45621///
45622/// This `struct` is created by [`BinaryHeap::drain()`]. See its
45623/// documentation for more.
45624///
45625/// [`drain`]: BinaryHeap::drain
45626#[stable(feature = "drain", since = "1.6.0")]
45627#[derive(Debug)]
45628pub struct Drain<'a, T: 'a> {
45629    iter: vec::Drain<'a, T>,
45630}
45631
45632#[stable(feature = "drain", since = "1.6.0")]
45633impl<T> Iterator for Drain<'_, T> {
45634    type Item = T;
45635
45636    #[inline]
45637    fn next(&mut self) -> Option<T> {
45638        self.iter.next()
45639    }
45640
45641    #[inline]
45642    fn size_hint(&self) -> (usize, Option<usize>) {
45643        self.iter.size_hint()
45644    }
45645}
45646
45647#[stable(feature = "drain", since = "1.6.0")]
45648impl<T> DoubleEndedIterator for Drain<'_, T> {
45649    #[inline]
45650    fn next_back(&mut self) -> Option<T> {
45651        self.iter.next_back()
45652    }
45653}
45654
45655#[stable(feature = "drain", since = "1.6.0")]
45656impl<T> ExactSizeIterator for Drain<'_, T> {
45657    fn is_empty(&self) -> bool {
45658        self.iter.is_empty()
45659    }
45660}
45661
45662#[stable(feature = "fused", since = "1.26.0")]
45663impl<T> FusedIterator for Drain<'_, T> {}
45664
45665/// A draining iterator over the elements of a `BinaryHeap`.
45666///
45667/// This `struct` is created by [`BinaryHeap::drain_sorted()`]. See its
45668/// documentation for more.
45669///
45670/// [`drain_sorted`]: BinaryHeap::drain_sorted
45671#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
45672#[derive(Debug)]
45673pub struct DrainSorted<'a, T: Ord> {
45674    inner: &'a mut BinaryHeap<T>,
45675}
45676
45677#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
45678impl<'a, T: Ord> Drop for DrainSorted<'a, T> {
45679    /// Removes heap elements in heap order.
45680    fn drop(&mut self) {
45681        struct DropGuard<'r, 'a, T: Ord>(&'r mut DrainSorted<'a, T>);
45682
45683        impl<'r, 'a, T: Ord> Drop for DropGuard<'r, 'a, T> {
45684            fn drop(&mut self) {
45685                while self.0.inner.pop().is_some() {}
45686            }
45687        }
45688
45689        while let Some(item) = self.inner.pop() {
45690            let guard = DropGuard(self);
45691            drop(item);
45692            mem::forget(guard);
45693        }
45694    }
45695}
45696
45697#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
45698impl<T: Ord> Iterator for DrainSorted<'_, T> {
45699    type Item = T;
45700
45701    #[inline]
45702    fn next(&mut self) -> Option<T> {
45703        self.inner.pop()
45704    }
45705
45706    #[inline]
45707    fn size_hint(&self) -> (usize, Option<usize>) {
45708        let exact = self.inner.len();
45709        (exact, Some(exact))
45710    }
45711}
45712
45713#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
45714impl<T: Ord> ExactSizeIterator for DrainSorted<'_, T> {}
45715
45716#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
45717impl<T: Ord> FusedIterator for DrainSorted<'_, T> {}
45718
45719#[unstable(feature = "trusted_len", issue = "37572")]
45720unsafe impl<T: Ord> TrustedLen for DrainSorted<'_, T> {}
45721
45722#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
45723impl<T: Ord> From<Vec<T>> for BinaryHeap<T> {
45724    /// Converts a `Vec<T>` into a `BinaryHeap<T>`.
45725    ///
45726    /// This conversion happens in-place, and has *O*(*n*) time complexity.
45727    fn from(vec: Vec<T>) -> BinaryHeap<T> {
45728        let mut heap = BinaryHeap { data: vec };
45729        heap.rebuild();
45730        heap
45731    }
45732}
45733
45734#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
45735impl<T> From<BinaryHeap<T>> for Vec<T> {
45736    /// Converts a `BinaryHeap<T>` into a `Vec<T>`.
45737    ///
45738    /// This conversion requires no data movement or allocation, and has
45739    /// constant time complexity.
45740    fn from(heap: BinaryHeap<T>) -> Vec<T> {
45741        heap.data
45742    }
45743}
45744
45745#[stable(feature = "rust1", since = "1.0.0")]
45746impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
45747    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BinaryHeap<T> {
45748        BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
45749    }
45750}
45751
45752#[stable(feature = "rust1", since = "1.0.0")]
45753impl<T> IntoIterator for BinaryHeap<T> {
45754    type Item = T;
45755    type IntoIter = IntoIter<T>;
45756
45757    /// Creates a consuming iterator, that is, one that moves each value out of
45758    /// the binary heap in arbitrary order. The binary heap cannot be used
45759    /// after calling this.
45760    ///
45761    /// # Examples
45762    ///
45763    /// Basic usage:
45764    ///
45765    /// ```
45766    /// use std::collections::BinaryHeap;
45767    /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
45768    ///
45769    /// // Print 1, 2, 3, 4 in arbitrary order
45770    /// for x in heap.into_iter() {
45771    ///     // x has type i32, not &i32
45772    ///     println!("{}", x);
45773    /// }
45774    /// ```
45775    fn into_iter(self) -> IntoIter<T> {
45776        IntoIter { iter: self.data.into_iter() }
45777    }
45778}
45779
45780#[stable(feature = "rust1", since = "1.0.0")]
45781impl<'a, T> IntoIterator for &'a BinaryHeap<T> {
45782    type Item = &'a T;
45783    type IntoIter = Iter<'a, T>;
45784
45785    fn into_iter(self) -> Iter<'a, T> {
45786        self.iter()
45787    }
45788}
45789
45790#[stable(feature = "rust1", since = "1.0.0")]
45791impl<T: Ord> Extend<T> for BinaryHeap<T> {
45792    #[inline]
45793    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
45794        <Self as SpecExtend<I>>::spec_extend(self, iter);
45795    }
45796
45797    #[inline]
45798    fn extend_one(&mut self, item: T) {
45799        self.push(item);
45800    }
45801
45802    #[inline]
45803    fn extend_reserve(&mut self, additional: usize) {
45804        self.reserve(additional);
45805    }
45806}
45807
45808impl<T: Ord, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<T> {
45809    default fn spec_extend(&mut self, iter: I) {
45810        self.extend_desugared(iter.into_iter());
45811    }
45812}
45813
45814impl<T: Ord> SpecExtend<BinaryHeap<T>> for BinaryHeap<T> {
45815    fn spec_extend(&mut self, ref mut other: BinaryHeap<T>) {
45816        self.append(other);
45817    }
45818}
45819
45820impl<T: Ord> BinaryHeap<T> {
45821    fn extend_desugared<I: IntoIterator<Item = T>>(&mut self, iter: I) {
45822        let iterator = iter.into_iter();
45823        let (lower, _) = iterator.size_hint();
45824
45825        self.reserve(lower);
45826
45827        iterator.for_each(move |elem| self.push(elem));
45828    }
45829}
45830
45831#[stable(feature = "extend_ref", since = "1.2.0")]
45832impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BinaryHeap<T> {
45833    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
45834        self.extend(iter.into_iter().cloned());
45835    }
45836
45837    #[inline]
45838    fn extend_one(&mut self, &item: &'a T) {
45839        self.push(item);
45840    }
45841
45842    #[inline]
45843    fn extend_reserve(&mut self, additional: usize) {
45844        self.reserve(additional);
45845    }
45846}
45847use crate::alloc::{Allocator, Global};
45848use crate::raw_vec::RawVec;
45849use core::fmt;
45850use core::intrinsics::arith_offset;
45851use core::iter::{FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccess};
45852use core::marker::PhantomData;
45853use core::mem::{self};
45854use core::ptr::{self, NonNull};
45855use core::slice::{self};
45856
45857/// An iterator that moves out of a vector.
45858///
45859/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
45860/// (provided by the [`IntoIterator`] trait).
45861///
45862/// # Example
45863///
45864/// ```
45865/// let v = vec![0, 1, 2];
45866/// let iter: std::vec::IntoIter<_> = v.into_iter();
45867/// ```
45868#[stable(feature = "rust1", since = "1.0.0")]
45869pub struct IntoIter<
45870    T,
45871    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
45872> {
45873    pub(super) buf: NonNull<T>,
45874    pub(super) phantom: PhantomData<T>,
45875    pub(super) cap: usize,
45876    pub(super) alloc: A,
45877    pub(super) ptr: *const T,
45878    pub(super) end: *const T,
45879}
45880
45881#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
45882impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
45883    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
45884        f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
45885    }
45886}
45887
45888impl<T, A: Allocator> IntoIter<T, A> {
45889    /// Returns the remaining items of this iterator as a slice.
45890    ///
45891    /// # Examples
45892    ///
45893    /// ```
45894    /// let vec = vec!['a', 'b', 'c'];
45895    /// let mut into_iter = vec.into_iter();
45896    /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
45897    /// let _ = into_iter.next().unwrap();
45898    /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
45899    /// ```
45900    #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
45901    pub fn as_slice(&self) -> &[T] {
45902        unsafe { slice::from_raw_parts(self.ptr, self.len()) }
45903    }
45904
45905    /// Returns the remaining items of this iterator as a mutable slice.
45906    ///
45907    /// # Examples
45908    ///
45909    /// ```
45910    /// let vec = vec!['a', 'b', 'c'];
45911    /// let mut into_iter = vec.into_iter();
45912    /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
45913    /// into_iter.as_mut_slice()[2] = 'z';
45914    /// assert_eq!(into_iter.next().unwrap(), 'a');
45915    /// assert_eq!(into_iter.next().unwrap(), 'b');
45916    /// assert_eq!(into_iter.next().unwrap(), 'z');
45917    /// ```
45918    #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
45919    pub fn as_mut_slice(&mut self) -> &mut [T] {
45920        unsafe { &mut *self.as_raw_mut_slice() }
45921    }
45922
45923    /// Returns a reference to the underlying allocator.
45924    #[unstable(feature = "allocator_api", issue = "32838")]
45925    #[inline]
45926    pub fn allocator(&self) -> &A {
45927        &self.alloc
45928    }
45929
45930    fn as_raw_mut_slice(&mut self) -> *mut [T] {
45931        ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
45932    }
45933
45934    /// Drops remaining elements and relinquishes the backing allocation.
45935    ///
45936    /// This is roughly equivalent to the following, but more efficient
45937    ///
45938    /// ```
45939    /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
45940    /// (&mut into_iter).for_each(core::mem::drop);
45941    /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); }
45942    /// ```
45943    pub(super) fn forget_allocation_drop_remaining(&mut self) {
45944        let remaining = self.as_raw_mut_slice();
45945
45946        // overwrite the individual fields instead of creating a new
45947        // struct and then overwriting &mut self.
45948        // this creates less assembly
45949        self.cap = 0;
45950        self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
45951        self.ptr = self.buf.as_ptr();
45952        self.end = self.buf.as_ptr();
45953
45954        unsafe {
45955            ptr::drop_in_place(remaining);
45956        }
45957    }
45958}
45959
45960#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
45961impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
45962    fn as_ref(&self) -> &[T] {
45963        self.as_slice()
45964    }
45965}
45966
45967#[stable(feature = "rust1", since = "1.0.0")]
45968unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
45969#[stable(feature = "rust1", since = "1.0.0")]
45970unsafe impl<T: Sync, A: Allocator> Sync for IntoIter<T, A> {}
45971
45972#[stable(feature = "rust1", since = "1.0.0")]
45973impl<T, A: Allocator> Iterator for IntoIter<T, A> {
45974    type Item = T;
45975
45976    #[inline]
45977    fn next(&mut self) -> Option<T> {
45978        if self.ptr as *const _ == self.end {
45979            None
45980        } else if mem::size_of::<T>() == 0 {
45981            // purposefully don't use 'ptr.offset' because for
45982            // vectors with 0-size elements this would return the
45983            // same pointer.
45984            self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T };
45985
45986            // Make up a value of this ZST.
45987            Some(unsafe { mem::zeroed() })
45988        } else {
45989            let old = self.ptr;
45990            self.ptr = unsafe { self.ptr.offset(1) };
45991
45992            Some(unsafe { ptr::read(old) })
45993        }
45994    }
45995
45996    #[inline]
45997    fn size_hint(&self) -> (usize, Option<usize>) {
45998        let exact = if mem::size_of::<T>() == 0 {
45999            (self.end as usize).wrapping_sub(self.ptr as usize)
46000        } else {
46001            unsafe { self.end.offset_from(self.ptr) as usize }
46002        };
46003        (exact, Some(exact))
46004    }
46005
46006    #[inline]
46007    fn count(self) -> usize {
46008        self.len()
46009    }
46010
46011    unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
46012    where
46013        Self: TrustedRandomAccess,
46014    {
46015        // SAFETY: the caller must guarantee that `i` is in bounds of the
46016        // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
46017        // is guaranteed to pointer to an element of the `Vec<T>` and
46018        // thus guaranteed to be valid to dereference.
46019        //
46020        // Also note the implementation of `Self: TrustedRandomAccess` requires
46021        // that `T: Copy` so reading elements from the buffer doesn't invalidate
46022        // them for `Drop`.
46023        unsafe {
46024            if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
46025        }
46026    }
46027}
46028
46029#[stable(feature = "rust1", since = "1.0.0")]
46030impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
46031    #[inline]
46032    fn next_back(&mut self) -> Option<T> {
46033        if self.end == self.ptr {
46034            None
46035        } else if mem::size_of::<T>() == 0 {
46036            // See above for why 'ptr.offset' isn't used
46037            self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T };
46038
46039            // Make up a value of this ZST.
46040            Some(unsafe { mem::zeroed() })
46041        } else {
46042            self.end = unsafe { self.end.offset(-1) };
46043
46044            Some(unsafe { ptr::read(self.end) })
46045        }
46046    }
46047}
46048
46049#[stable(feature = "rust1", since = "1.0.0")]
46050impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
46051    fn is_empty(&self) -> bool {
46052        self.ptr == self.end
46053    }
46054}
46055
46056#[stable(feature = "fused", since = "1.26.0")]
46057impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
46058
46059#[unstable(feature = "trusted_len", issue = "37572")]
46060unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
46061
46062#[doc(hidden)]
46063#[unstable(issue = "none", feature = "std_internals")]
46064// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
46065// and thus we can't implement drop-handling
46066unsafe impl<T, A: Allocator> TrustedRandomAccess for IntoIter<T, A>
46067where
46068    T: Copy,
46069{
46070    const MAY_HAVE_SIDE_EFFECT: bool = false;
46071}
46072
46073#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
46074impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
46075    #[cfg(not(test))]
46076    fn clone(&self) -> Self {
46077        self.as_slice().to_vec_in(self.alloc.clone()).into_iter()
46078    }
46079    #[cfg(test)]
46080    fn clone(&self) -> Self {
46081        crate::slice::to_vec(self.as_slice(), self.alloc.clone()).into_iter()
46082    }
46083}
46084
46085#[stable(feature = "rust1", since = "1.0.0")]
46086unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
46087    fn drop(&mut self) {
46088        struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
46089
46090        impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
46091            fn drop(&mut self) {
46092                unsafe {
46093                    // `IntoIter::alloc` is not used anymore after this
46094                    let alloc = ptr::read(&self.0.alloc);
46095                    // RawVec handles deallocation
46096                    let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
46097                }
46098            }
46099        }
46100
46101        let guard = DropGuard(self);
46102        // destroy the remaining elements
46103        unsafe {
46104            ptr::drop_in_place(guard.0.as_raw_mut_slice());
46105        }
46106        // now `guard` will be dropped and do the rest
46107    }
46108}
46109
46110#[unstable(issue = "none", feature = "inplace_iteration")]
46111unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {}
46112
46113#[unstable(issue = "none", feature = "inplace_iteration")]
46114unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
46115    type Source = Self;
46116
46117    #[inline]
46118    unsafe fn as_inner(&mut self) -> &mut Self::Source {
46119        self
46120    }
46121}
46122
46123// internal helper trait for in-place iteration specialization.
46124#[rustc_specialization_trait]
46125pub(crate) trait AsIntoIter {
46126    type Item;
46127    fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item>;
46128}
46129
46130impl<T> AsIntoIter for IntoIter<T> {
46131    type Item = T;
46132
46133    fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
46134        self
46135    }
46136}
46137use crate::alloc::Allocator;
46138use core::iter::TrustedLen;
46139use core::ptr::{self};
46140use core::slice::{self};
46141
46142use super::{IntoIter, SetLenOnDrop, Vec};
46143
46144// Specialization trait used for Vec::extend
46145pub(super) trait SpecExtend<T, I> {
46146    fn spec_extend(&mut self, iter: I);
46147}
46148
46149impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A>
46150where
46151    I: Iterator<Item = T>,
46152{
46153    default fn spec_extend(&mut self, iter: I) {
46154        self.extend_desugared(iter)
46155    }
46156}
46157
46158impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A>
46159where
46160    I: TrustedLen<Item = T>,
46161{
46162    default fn spec_extend(&mut self, iterator: I) {
46163        // This is the case for a TrustedLen iterator.
46164        let (low, high) = iterator.size_hint();
46165        if let Some(additional) = high {
46166            debug_assert_eq!(
46167                low,
46168                additional,
46169                "TrustedLen iterator's size hint is not exact: {:?}",
46170                (low, high)
46171            );
46172            self.reserve(additional);
46173            unsafe {
46174                let mut ptr = self.as_mut_ptr().add(self.len());
46175                let mut local_len = SetLenOnDrop::new(&mut self.len);
46176                iterator.for_each(move |element| {
46177                    ptr::write(ptr, element);
46178                    ptr = ptr.offset(1);
46179                    // NB can't overflow since we would have had to alloc the address space
46180                    local_len.increment_len(1);
46181                });
46182            }
46183        } else {
46184            // Per TrustedLen contract a `None` upper bound means that the iterator length
46185            // truly exceeds usize::MAX, which would eventually lead to a capacity overflow anyway.
46186            // Since the other branch already panics eagerly (via `reserve()`) we do the same here.
46187            // This avoids additional codegen for a fallback code path which would eventually
46188            // panic anyway.
46189            panic!("capacity overflow");
46190        }
46191    }
46192}
46193
46194impl<T, A: Allocator> SpecExtend<T, IntoIter<T>> for Vec<T, A> {
46195    fn spec_extend(&mut self, mut iterator: IntoIter<T>) {
46196        unsafe {
46197            self.append_elements(iterator.as_slice() as _);
46198        }
46199        iterator.ptr = iterator.end;
46200    }
46201}
46202
46203impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec<T, A>
46204where
46205    I: Iterator<Item = &'a T>,
46206    T: Clone,
46207{
46208    default fn spec_extend(&mut self, iterator: I) {
46209        self.spec_extend(iterator.cloned())
46210    }
46211}
46212
46213impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
46214where
46215    T: Copy,
46216{
46217    fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
46218        let slice = iterator.as_slice();
46219        unsafe { self.append_elements(slice) };
46220    }
46221}
46222use crate::alloc::Allocator;
46223use crate::raw_vec::RawVec;
46224use core::ptr::{self};
46225
46226use super::{ExtendElement, IsZero, Vec};
46227
46228// Specialization trait used for Vec::from_elem
46229pub(super) trait SpecFromElem: Sized {
46230    fn from_elem<A: Allocator>(elem: Self, n: usize, alloc: A) -> Vec<Self, A>;
46231}
46232
46233impl<T: Clone> SpecFromElem for T {
46234    default fn from_elem<A: Allocator>(elem: Self, n: usize, alloc: A) -> Vec<Self, A> {
46235        let mut v = Vec::with_capacity_in(n, alloc);
46236        v.extend_with(n, ExtendElement(elem));
46237        v
46238    }
46239}
46240
46241impl SpecFromElem for i8 {
46242    #[inline]
46243    fn from_elem<A: Allocator>(elem: i8, n: usize, alloc: A) -> Vec<i8, A> {
46244        if elem == 0 {
46245            return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
46246        }
46247        unsafe {
46248            let mut v = Vec::with_capacity_in(n, alloc);
46249            ptr::write_bytes(v.as_mut_ptr(), elem as u8, n);
46250            v.set_len(n);
46251            v
46252        }
46253    }
46254}
46255
46256impl SpecFromElem for u8 {
46257    #[inline]
46258    fn from_elem<A: Allocator>(elem: u8, n: usize, alloc: A) -> Vec<u8, A> {
46259        if elem == 0 {
46260            return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
46261        }
46262        unsafe {
46263            let mut v = Vec::with_capacity_in(n, alloc);
46264            ptr::write_bytes(v.as_mut_ptr(), elem, n);
46265            v.set_len(n);
46266            v
46267        }
46268    }
46269}
46270
46271impl<T: Clone + IsZero> SpecFromElem for T {
46272    #[inline]
46273    fn from_elem<A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> {
46274        if elem.is_zero() {
46275            return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
46276        }
46277        let mut v = Vec::with_capacity_in(n, alloc);
46278        v.extend_with(n, ExtendElement(elem));
46279        v
46280    }
46281}
46282use crate::alloc::{Allocator, Global};
46283use core::fmt;
46284use core::iter::{FusedIterator, TrustedLen};
46285use core::mem::{self};
46286use core::ptr::{self, NonNull};
46287use core::slice::{self};
46288
46289use super::Vec;
46290
46291/// A draining iterator for `Vec<T>`.
46292///
46293/// This `struct` is created by [`Vec::drain`].
46294/// See its documentation for more.
46295///
46296/// # Example
46297///
46298/// ```
46299/// let mut v = vec![0, 1, 2];
46300/// let iter: std::vec::Drain<_> = v.drain(..);
46301/// ```
46302#[stable(feature = "drain", since = "1.6.0")]
46303pub struct Drain<
46304    'a,
46305    T: 'a,
46306    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global,
46307> {
46308    /// Index of tail to preserve
46309    pub(super) tail_start: usize,
46310    /// Length of tail
46311    pub(super) tail_len: usize,
46312    /// Current remaining range to remove
46313    pub(super) iter: slice::Iter<'a, T>,
46314    pub(super) vec: NonNull<Vec<T, A>>,
46315}
46316
46317#[stable(feature = "collection_debug", since = "1.17.0")]
46318impl<T: fmt::Debug, A: Allocator> fmt::Debug for Drain<'_, T, A> {
46319    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
46320        f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
46321    }
46322}
46323
46324impl<'a, T, A: Allocator> Drain<'a, T, A> {
46325    /// Returns the remaining items of this iterator as a slice.
46326    ///
46327    /// # Examples
46328    ///
46329    /// ```
46330    /// let mut vec = vec!['a', 'b', 'c'];
46331    /// let mut drain = vec.drain(..);
46332    /// assert_eq!(drain.as_slice(), &['a', 'b', 'c']);
46333    /// let _ = drain.next().unwrap();
46334    /// assert_eq!(drain.as_slice(), &['b', 'c']);
46335    /// ```
46336    #[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
46337    pub fn as_slice(&self) -> &[T] {
46338        self.iter.as_slice()
46339    }
46340
46341    /// Returns a reference to the underlying allocator.
46342    #[unstable(feature = "allocator_api", issue = "32838")]
46343    #[inline]
46344    pub fn allocator(&self) -> &A {
46345        unsafe { self.vec.as_ref().allocator() }
46346    }
46347}
46348
46349#[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
46350impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> {
46351    fn as_ref(&self) -> &[T] {
46352        self.as_slice()
46353    }
46354}
46355
46356#[stable(feature = "drain", since = "1.6.0")]
46357unsafe impl<T: Sync, A: Sync + Allocator> Sync for Drain<'_, T, A> {}
46358#[stable(feature = "drain", since = "1.6.0")]
46359unsafe impl<T: Send, A: Send + Allocator> Send for Drain<'_, T, A> {}
46360
46361#[stable(feature = "drain", since = "1.6.0")]
46362impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
46363    type Item = T;
46364
46365    #[inline]
46366    fn next(&mut self) -> Option<T> {
46367        self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) })
46368    }
46369
46370    fn size_hint(&self) -> (usize, Option<usize>) {
46371        self.iter.size_hint()
46372    }
46373}
46374
46375#[stable(feature = "drain", since = "1.6.0")]
46376impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
46377    #[inline]
46378    fn next_back(&mut self) -> Option<T> {
46379        self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) })
46380    }
46381}
46382
46383#[stable(feature = "drain", since = "1.6.0")]
46384impl<T, A: Allocator> Drop for Drain<'_, T, A> {
46385    fn drop(&mut self) {
46386        /// Continues dropping the remaining elements in the `Drain`, then moves back the
46387        /// un-`Drain`ed elements to restore the original `Vec`.
46388        struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>);
46389
46390        impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> {
46391            fn drop(&mut self) {
46392                // Continue the same loop we have below. If the loop already finished, this does
46393                // nothing.
46394                self.0.for_each(drop);
46395
46396                if self.0.tail_len > 0 {
46397                    unsafe {
46398                        let source_vec = self.0.vec.as_mut();
46399                        // memmove back untouched tail, update to new length
46400                        let start = source_vec.len();
46401                        let tail = self.0.tail_start;
46402                        if tail != start {
46403                            let src = source_vec.as_ptr().add(tail);
46404                            let dst = source_vec.as_mut_ptr().add(start);
46405                            ptr::copy(src, dst, self.0.tail_len);
46406                        }
46407                        source_vec.set_len(start + self.0.tail_len);
46408                    }
46409                }
46410            }
46411        }
46412
46413        // exhaust self first
46414        while let Some(item) = self.next() {
46415            let guard = DropGuard(self);
46416            drop(item);
46417            mem::forget(guard);
46418        }
46419
46420        // Drop a `DropGuard` to move back the non-drained tail of `self`.
46421        DropGuard(self);
46422    }
46423}
46424
46425#[stable(feature = "drain", since = "1.6.0")]
46426impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {
46427    fn is_empty(&self) -> bool {
46428        self.iter.is_empty()
46429    }
46430}
46431
46432#[unstable(feature = "trusted_len", issue = "37572")]
46433unsafe impl<T, A: Allocator> TrustedLen for Drain<'_, T, A> {}
46434
46435#[stable(feature = "fused", since = "1.26.0")]
46436impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}
46437use core::iter::TrustedLen;
46438use core::ptr::{self};
46439
46440use super::{SpecExtend, Vec};
46441
46442/// Another specialization trait for Vec::from_iter
46443/// necessary to manually prioritize overlapping specializations
46444/// see [`SpecFromIter`](super::SpecFromIter) for details.
46445pub(super) trait SpecFromIterNested<T, I> {
46446    fn from_iter(iter: I) -> Self;
46447}
46448
46449impl<T, I> SpecFromIterNested<T, I> for Vec<T>
46450where
46451    I: Iterator<Item = T>,
46452{
46453    default fn from_iter(mut iterator: I) -> Self {
46454        // Unroll the first iteration, as the vector is going to be
46455        // expanded on this iteration in every case when the iterable is not
46456        // empty, but the loop in extend_desugared() is not going to see the
46457        // vector being full in the few subsequent loop iterations.
46458        // So we get better branch prediction.
46459        let mut vector = match iterator.next() {
46460            None => return Vec::new(),
46461            Some(element) => {
46462                let (lower, _) = iterator.size_hint();
46463                let mut vector = Vec::with_capacity(lower.saturating_add(1));
46464                unsafe {
46465                    ptr::write(vector.as_mut_ptr(), element);
46466                    vector.set_len(1);
46467                }
46468                vector
46469            }
46470        };
46471        // must delegate to spec_extend() since extend() itself delegates
46472        // to spec_from for empty Vecs
46473        <Vec<T> as SpecExtend<T, I>>::spec_extend(&mut vector, iterator);
46474        vector
46475    }
46476}
46477
46478impl<T, I> SpecFromIterNested<T, I> for Vec<T>
46479where
46480    I: TrustedLen<Item = T>,
46481{
46482    fn from_iter(iterator: I) -> Self {
46483        let mut vector = match iterator.size_hint() {
46484            (_, Some(upper)) => Vec::with_capacity(upper),
46485            // TrustedLen contract guarantees that `size_hint() == (_, None)` means that there
46486            // are more than `usize::MAX` elements.
46487            // Since the previous branch would eagerly panic if the capacity is too large
46488            // (via `with_capacity`) we do the same here.
46489            _ => panic!("capacity overflow"),
46490        };
46491        // reuse extend specialization for TrustedLen
46492        vector.spec_extend(iterator);
46493        vector
46494    }
46495}
46496use crate::boxed::Box;
46497
46498#[rustc_specialization_trait]
46499pub(super) unsafe trait IsZero {
46500    /// Whether this value is zero
46501    fn is_zero(&self) -> bool;
46502}
46503
46504macro_rules! impl_is_zero {
46505    ($t:ty, $is_zero:expr) => {
46506        unsafe impl IsZero for $t {
46507            #[inline]
46508            fn is_zero(&self) -> bool {
46509                $is_zero(*self)
46510            }
46511        }
46512    };
46513}
46514
46515impl_is_zero!(i16, |x| x == 0);
46516impl_is_zero!(i32, |x| x == 0);
46517impl_is_zero!(i64, |x| x == 0);
46518impl_is_zero!(i128, |x| x == 0);
46519impl_is_zero!(isize, |x| x == 0);
46520
46521impl_is_zero!(u16, |x| x == 0);
46522impl_is_zero!(u32, |x| x == 0);
46523impl_is_zero!(u64, |x| x == 0);
46524impl_is_zero!(u128, |x| x == 0);
46525impl_is_zero!(usize, |x| x == 0);
46526
46527impl_is_zero!(bool, |x| x == false);
46528impl_is_zero!(char, |x| x == '\0');
46529
46530impl_is_zero!(f32, |x: f32| x.to_bits() == 0);
46531impl_is_zero!(f64, |x: f64| x.to_bits() == 0);
46532
46533unsafe impl<T> IsZero for *const T {
46534    #[inline]
46535    fn is_zero(&self) -> bool {
46536        (*self).is_null()
46537    }
46538}
46539
46540unsafe impl<T> IsZero for *mut T {
46541    #[inline]
46542    fn is_zero(&self) -> bool {
46543        (*self).is_null()
46544    }
46545}
46546
46547// `Option<&T>` and `Option<Box<T>>` are guaranteed to represent `None` as null.
46548// For fat pointers, the bytes that would be the pointer metadata in the `Some`
46549// variant are padding in the `None` variant, so ignoring them and
46550// zero-initializing instead is ok.
46551// `Option<&mut T>` never implements `Clone`, so there's no need for an impl of
46552// `SpecFromElem`.
46553
46554unsafe impl<T: ?Sized> IsZero for Option<&T> {
46555    #[inline]
46556    fn is_zero(&self) -> bool {
46557        self.is_none()
46558    }
46559}
46560
46561unsafe impl<T: ?Sized> IsZero for Option<Box<T>> {
46562    #[inline]
46563    fn is_zero(&self) -> bool {
46564        self.is_none()
46565    }
46566}
46567use core::mem::ManuallyDrop;
46568use core::ptr::{self};
46569use core::slice::{self};
46570
46571use super::{IntoIter, SpecExtend, SpecFromIterNested, Vec};
46572
46573/// Specialization trait used for Vec::from_iter
46574///
46575/// ## The delegation graph:
46576///
46577/// ```text
46578/// +-------------+
46579/// |FromIterator |
46580/// +-+-----------+
46581///   |
46582///   v
46583/// +-+-------------------------------+  +---------------------+
46584/// |SpecFromIter                  +---->+SpecFromIterNested   |
46585/// |where I:                      |  |  |where I:             |
46586/// |  Iterator (default)----------+  |  |  Iterator (default) |
46587/// |  vec::IntoIter               |  |  |  TrustedLen         |
46588/// |  SourceIterMarker---fallback-+  |  |                     |
46589/// |  slice::Iter                    |  |                     |
46590/// |  Iterator<Item = &Clone>        |  +---------------------+
46591/// +---------------------------------+
46592/// ```
46593pub(super) trait SpecFromIter<T, I> {
46594    fn from_iter(iter: I) -> Self;
46595}
46596
46597impl<T, I> SpecFromIter<T, I> for Vec<T>
46598where
46599    I: Iterator<Item = T>,
46600{
46601    default fn from_iter(iterator: I) -> Self {
46602        SpecFromIterNested::from_iter(iterator)
46603    }
46604}
46605
46606impl<T> SpecFromIter<T, IntoIter<T>> for Vec<T> {
46607    fn from_iter(iterator: IntoIter<T>) -> Self {
46608        // A common case is passing a vector into a function which immediately
46609        // re-collects into a vector. We can short circuit this if the IntoIter
46610        // has not been advanced at all.
46611        // When it has been advanced We can also reuse the memory and move the data to the front.
46612        // But we only do so when the resulting Vec wouldn't have more unused capacity
46613        // than creating it through the generic FromIterator implementation would. That limitation
46614        // is not strictly necessary as Vec's allocation behavior is intentionally unspecified.
46615        // But it is a conservative choice.
46616        let has_advanced = iterator.buf.as_ptr() as *const _ != iterator.ptr;
46617        if !has_advanced || iterator.len() >= iterator.cap / 2 {
46618            unsafe {
46619                let it = ManuallyDrop::new(iterator);
46620                if has_advanced {
46621                    ptr::copy(it.ptr, it.buf.as_ptr(), it.len());
46622                }
46623                return Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap);
46624            }
46625        }
46626
46627        let mut vec = Vec::new();
46628        // must delegate to spec_extend() since extend() itself delegates
46629        // to spec_from for empty Vecs
46630        vec.spec_extend(iterator);
46631        vec
46632    }
46633}
46634
46635impl<'a, T: 'a, I> SpecFromIter<&'a T, I> for Vec<T>
46636where
46637    I: Iterator<Item = &'a T>,
46638    T: Clone,
46639{
46640    default fn from_iter(iterator: I) -> Self {
46641        SpecFromIter::from_iter(iterator.cloned())
46642    }
46643}
46644
46645// This utilizes `iterator.as_slice().to_vec()` since spec_extend
46646// must take more steps to reason about the final capacity + length
46647// and thus do more work. `to_vec()` directly allocates the correct amount
46648// and fills it exactly.
46649impl<'a, T: 'a + Clone> SpecFromIter<&'a T, slice::Iter<'a, T>> for Vec<T> {
46650    #[cfg(not(test))]
46651    fn from_iter(iterator: slice::Iter<'a, T>) -> Self {
46652        iterator.as_slice().to_vec()
46653    }
46654
46655    // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
46656    // required for this method definition, is not available. Instead use the
46657    // `slice::to_vec`  function which is only available with cfg(test)
46658    // NB see the slice::hack module in slice.rs for more information
46659    #[cfg(test)]
46660    fn from_iter(iterator: slice::Iter<'a, T>) -> Self {
46661        crate::slice::to_vec(iterator.as_slice(), crate::alloc::Global)
46662    }
46663}
46664use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccess};
46665use core::mem::{self, ManuallyDrop};
46666use core::ptr::{self};
46667
46668use super::{AsIntoIter, InPlaceDrop, SpecFromIter, SpecFromIterNested, Vec};
46669
46670/// Specialization marker for collecting an iterator pipeline into a Vec while reusing the
46671/// source allocation, i.e. executing the pipeline in place.
46672///
46673/// The SourceIter parent trait is necessary for the specializing function to access the allocation
46674/// which is to be reused. But it is not sufficient for the specialization to be valid. See
46675/// additional bounds on the impl.
46676#[rustc_unsafe_specialization_marker]
46677pub(super) trait SourceIterMarker: SourceIter<Source: AsIntoIter> {}
46678
46679// The std-internal SourceIter/InPlaceIterable traits are only implemented by chains of
46680// Adapter<Adapter<Adapter<IntoIter>>> (all owned by core/std). Additional bounds
46681// on the adapter implementations (beyond `impl<I: Trait> Trait for Adapter<I>`) only depend on other
46682// traits already marked as specialization traits (Copy, TrustedRandomAccess, FusedIterator).
46683// I.e. the marker does not depend on lifetimes of user-supplied types. Modulo the Copy hole, which
46684// several other specializations already depend on.
46685impl<T> SourceIterMarker for T where T: SourceIter<Source: AsIntoIter> + InPlaceIterable {}
46686
46687impl<T, I> SpecFromIter<T, I> for Vec<T>
46688where
46689    I: Iterator<Item = T> + SourceIterMarker,
46690{
46691    default fn from_iter(mut iterator: I) -> Self {
46692        // Additional requirements which cannot expressed via trait bounds. We rely on const eval
46693        // instead:
46694        // a) no ZSTs as there would be no allocation to reuse and pointer arithmetic would panic
46695        // b) size match as required by Alloc contract
46696        // c) alignments match as required by Alloc contract
46697        if mem::size_of::<T>() == 0
46698            || mem::size_of::<T>()
46699                != mem::size_of::<<<I as SourceIter>::Source as AsIntoIter>::Item>()
46700            || mem::align_of::<T>()
46701                != mem::align_of::<<<I as SourceIter>::Source as AsIntoIter>::Item>()
46702        {
46703            // fallback to more generic implementations
46704            return SpecFromIterNested::from_iter(iterator);
46705        }
46706
46707        let (src_buf, src_ptr, dst_buf, dst_end, cap) = unsafe {
46708            let inner = iterator.as_inner().as_into_iter();
46709            (
46710                inner.buf.as_ptr(),
46711                inner.ptr,
46712                inner.buf.as_ptr() as *mut T,
46713                inner.end as *const T,
46714                inner.cap,
46715            )
46716        };
46717
46718        let len = SpecInPlaceCollect::collect_in_place(&mut iterator, dst_buf, dst_end);
46719
46720        let src = unsafe { iterator.as_inner().as_into_iter() };
46721        // check if SourceIter contract was upheld
46722        // caveat: if they weren't we may not even make it to this point
46723        debug_assert_eq!(src_buf, src.buf.as_ptr());
46724        // check InPlaceIterable contract. This is only possible if the iterator advanced the
46725        // source pointer at all. If it uses unchecked access via TrustedRandomAccess
46726        // then the source pointer will stay in its initial position and we can't use it as reference
46727        if src.ptr != src_ptr {
46728            debug_assert!(
46729                unsafe { dst_buf.add(len) as *const _ } <= src.ptr,
46730                "InPlaceIterable contract violation, write pointer advanced beyond read pointer"
46731            );
46732        }
46733
46734        // drop any remaining values at the tail of the source
46735        // but prevent drop of the allocation itself once IntoIter goes out of scope
46736        // if the drop panics then we also leak any elements collected into dst_buf
46737        src.forget_allocation_drop_remaining();
46738
46739        let vec = unsafe { Vec::from_raw_parts(dst_buf, len, cap) };
46740
46741        vec
46742    }
46743}
46744
46745fn write_in_place_with_drop<T>(
46746    src_end: *const T,
46747) -> impl FnMut(InPlaceDrop<T>, T) -> Result<InPlaceDrop<T>, !> {
46748    move |mut sink, item| {
46749        unsafe {
46750            // the InPlaceIterable contract cannot be verified precisely here since
46751            // try_fold has an exclusive reference to the source pointer
46752            // all we can do is check if it's still in range
46753            debug_assert!(sink.dst as *const _ <= src_end, "InPlaceIterable contract violation");
46754            ptr::write(sink.dst, item);
46755            sink.dst = sink.dst.add(1);
46756        }
46757        Ok(sink)
46758    }
46759}
46760
46761/// Helper trait to hold specialized implementations of the in-place iterate-collect loop
46762trait SpecInPlaceCollect<T, I>: Iterator<Item = T> {
46763    /// Collects an iterator (`self`) into the destination buffer (`dst`) and returns the number of items
46764    /// collected. `end` is the last writable element of the allocation and used for bounds checks.
46765    fn collect_in_place(&mut self, dst: *mut T, end: *const T) -> usize;
46766}
46767
46768impl<T, I> SpecInPlaceCollect<T, I> for I
46769where
46770    I: Iterator<Item = T>,
46771{
46772    #[inline]
46773    default fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize {
46774        // use try-fold since
46775        // - it vectorizes better for some iterator adapters
46776        // - unlike most internal iteration methods, it only takes a &mut self
46777        // - it lets us thread the write pointer through its innards and get it back in the end
46778        let sink = InPlaceDrop { inner: dst_buf, dst: dst_buf };
46779        let sink =
46780            self.try_fold::<_, _, Result<_, !>>(sink, write_in_place_with_drop(end)).unwrap();
46781        // iteration succeeded, don't drop head
46782        unsafe { ManuallyDrop::new(sink).dst.offset_from(dst_buf) as usize }
46783    }
46784}
46785
46786impl<T, I> SpecInPlaceCollect<T, I> for I
46787where
46788    I: Iterator<Item = T> + TrustedRandomAccess,
46789{
46790    #[inline]
46791    fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize {
46792        let len = self.size();
46793        let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf };
46794        for i in 0..len {
46795            // Safety: InplaceIterable contract guarantees that for every element we read
46796            // one slot in the underlying storage will have been freed up and we can immediately
46797            // write back the result.
46798            unsafe {
46799                let dst = dst_buf.offset(i as isize);
46800                debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation");
46801                ptr::write(dst, self.__iterator_get_unchecked(i));
46802                drop_guard.dst = dst.add(1);
46803            }
46804        }
46805        mem::forget(drop_guard);
46806        len
46807    }
46808}
46809//! A contiguous growable array type with heap-allocated contents, written
46810//! `Vec<T>`.
46811//!
46812//! Vectors have `O(1)` indexing, amortized `O(1)` push (to the end) and
46813//! `O(1)` pop (from the end).
46814//!
46815//! Vectors ensure they never allocate more than `isize::MAX` bytes.
46816//!
46817//! # Examples
46818//!
46819//! You can explicitly create a [`Vec`] with [`Vec::new`]:
46820//!
46821//! ```
46822//! let v: Vec<i32> = Vec::new();
46823//! ```
46824//!
46825//! ...or by using the [`vec!`] macro:
46826//!
46827//! ```
46828//! let v: Vec<i32> = vec![];
46829//!
46830//! let v = vec![1, 2, 3, 4, 5];
46831//!
46832//! let v = vec![0; 10]; // ten zeroes
46833//! ```
46834//!
46835//! You can [`push`] values onto the end of a vector (which will grow the vector
46836//! as needed):
46837//!
46838//! ```
46839//! let mut v = vec![1, 2];
46840//!
46841//! v.push(3);
46842//! ```
46843//!
46844//! Popping values works in much the same way:
46845//!
46846//! ```
46847//! let mut v = vec![1, 2];
46848//!
46849//! let two = v.pop();
46850//! ```
46851//!
46852//! Vectors also support indexing (through the [`Index`] and [`IndexMut`] traits):
46853//!
46854//! ```
46855//! let mut v = vec![1, 2, 3];
46856//! let three = v[2];
46857//! v[1] = v[1] + 5;
46858//! ```
46859//!
46860//! [`push`]: Vec::push
46861
46862#![stable(feature = "rust1", since = "1.0.0")]
46863
46864use core::cmp::{self, Ordering};
46865use core::convert::TryFrom;
46866use core::fmt;
46867use core::hash::{Hash, Hasher};
46868use core::intrinsics::{arith_offset, assume};
46869use core::iter::{self, FromIterator};
46870use core::marker::PhantomData;
46871use core::mem::{self, ManuallyDrop, MaybeUninit};
46872use core::ops::{self, Index, IndexMut, Range, RangeBounds};
46873use core::ptr::{self, NonNull};
46874use core::slice::{self, SliceIndex};
46875
46876use crate::alloc::{Allocator, Global};
46877use crate::borrow::{Cow, ToOwned};
46878use crate::boxed::Box;
46879use crate::collections::TryReserveError;
46880use crate::raw_vec::RawVec;
46881
46882#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
46883pub use self::drain_filter::DrainFilter;
46884
46885mod drain_filter;
46886
46887#[stable(feature = "vec_splice", since = "1.21.0")]
46888pub use self::splice::Splice;
46889
46890mod splice;
46891
46892#[stable(feature = "drain", since = "1.6.0")]
46893pub use self::drain::Drain;
46894
46895mod drain;
46896
46897mod cow;
46898
46899pub(crate) use self::into_iter::AsIntoIter;
46900#[stable(feature = "rust1", since = "1.0.0")]
46901pub use self::into_iter::IntoIter;
46902
46903mod into_iter;
46904
46905use self::is_zero::IsZero;
46906
46907mod is_zero;
46908
46909mod source_iter_marker;
46910
46911mod partial_eq;
46912
46913use self::spec_from_elem::SpecFromElem;
46914
46915mod spec_from_elem;
46916
46917use self::set_len_on_drop::SetLenOnDrop;
46918
46919mod set_len_on_drop;
46920
46921use self::in_place_drop::InPlaceDrop;
46922
46923mod in_place_drop;
46924
46925use self::spec_from_iter_nested::SpecFromIterNested;
46926
46927mod spec_from_iter_nested;
46928
46929use self::spec_from_iter::SpecFromIter;
46930
46931mod spec_from_iter;
46932
46933use self::spec_extend::SpecExtend;
46934
46935mod spec_extend;
46936
46937/// A contiguous growable array type, written as `Vec<T>` and pronounced 'vector'.
46938///
46939/// # Examples
46940///
46941/// ```
46942/// let mut vec = Vec::new();
46943/// vec.push(1);
46944/// vec.push(2);
46945///
46946/// assert_eq!(vec.len(), 2);
46947/// assert_eq!(vec[0], 1);
46948///
46949/// assert_eq!(vec.pop(), Some(2));
46950/// assert_eq!(vec.len(), 1);
46951///
46952/// vec[0] = 7;
46953/// assert_eq!(vec[0], 7);
46954///
46955/// vec.extend([1, 2, 3].iter().copied());
46956///
46957/// for x in &vec {
46958///     println!("{}", x);
46959/// }
46960/// assert_eq!(vec, [7, 1, 2, 3]);
46961/// ```
46962///
46963/// The [`vec!`] macro is provided to make initialization more convenient:
46964///
46965/// ```
46966/// let mut vec = vec![1, 2, 3];
46967/// vec.push(4);
46968/// assert_eq!(vec, [1, 2, 3, 4]);
46969/// ```
46970///
46971/// It can also initialize each element of a `Vec<T>` with a given value.
46972/// This may be more efficient than performing allocation and initialization
46973/// in separate steps, especially when initializing a vector of zeros:
46974///
46975/// ```
46976/// let vec = vec![0; 5];
46977/// assert_eq!(vec, [0, 0, 0, 0, 0]);
46978///
46979/// // The following is equivalent, but potentially slower:
46980/// let mut vec = Vec::with_capacity(5);
46981/// vec.resize(5, 0);
46982/// assert_eq!(vec, [0, 0, 0, 0, 0]);
46983/// ```
46984///
46985/// For more information, see
46986/// [Capacity and Reallocation](#capacity-and-reallocation).
46987///
46988/// Use a `Vec<T>` as an efficient stack:
46989///
46990/// ```
46991/// let mut stack = Vec::new();
46992///
46993/// stack.push(1);
46994/// stack.push(2);
46995/// stack.push(3);
46996///
46997/// while let Some(top) = stack.pop() {
46998///     // Prints 3, 2, 1
46999///     println!("{}", top);
47000/// }
47001/// ```
47002///
47003/// # Indexing
47004///
47005/// The `Vec` type allows to access values by index, because it implements the
47006/// [`Index`] trait. An example will be more explicit:
47007///
47008/// ```
47009/// let v = vec![0, 2, 4, 6];
47010/// println!("{}", v[1]); // it will display '2'
47011/// ```
47012///
47013/// However be careful: if you try to access an index which isn't in the `Vec`,
47014/// your software will panic! You cannot do this:
47015///
47016/// ```should_panic
47017/// let v = vec![0, 2, 4, 6];
47018/// println!("{}", v[6]); // it will panic!
47019/// ```
47020///
47021/// Use [`get`] and [`get_mut`] if you want to check whether the index is in
47022/// the `Vec`.
47023///
47024/// # Slicing
47025///
47026/// A `Vec` can be mutable. On the other hand, slices are read-only objects.
47027/// To get a [slice][prim@slice], use [`&`]. Example:
47028///
47029/// ```
47030/// fn read_slice(slice: &[usize]) {
47031///     // ...
47032/// }
47033///
47034/// let v = vec![0, 1];
47035/// read_slice(&v);
47036///
47037/// // ... and that's all!
47038/// // you can also do it like this:
47039/// let u: &[usize] = &v;
47040/// // or like this:
47041/// let u: &[_] = &v;
47042/// ```
47043///
47044/// In Rust, it's more common to pass slices as arguments rather than vectors
47045/// when you just want to provide read access. The same goes for [`String`] and
47046/// [`&str`].
47047///
47048/// # Capacity and reallocation
47049///
47050/// The capacity of a vector is the amount of space allocated for any future
47051/// elements that will be added onto the vector. This is not to be confused with
47052/// the *length* of a vector, which specifies the number of actual elements
47053/// within the vector. If a vector's length exceeds its capacity, its capacity
47054/// will automatically be increased, but its elements will have to be
47055/// reallocated.
47056///
47057/// For example, a vector with capacity 10 and length 0 would be an empty vector
47058/// with space for 10 more elements. Pushing 10 or fewer elements onto the
47059/// vector will not change its capacity or cause reallocation to occur. However,
47060/// if the vector's length is increased to 11, it will have to reallocate, which
47061/// can be slow. For this reason, it is recommended to use [`Vec::with_capacity`]
47062/// whenever possible to specify how big the vector is expected to get.
47063///
47064/// # Guarantees
47065///
47066/// Due to its incredibly fundamental nature, `Vec` makes a lot of guarantees
47067/// about its design. This ensures that it's as low-overhead as possible in
47068/// the general case, and can be correctly manipulated in primitive ways
47069/// by unsafe code. Note that these guarantees refer to an unqualified `Vec<T>`.
47070/// If additional type parameters are added (e.g., to support custom allocators),
47071/// overriding their defaults may change the behavior.
47072///
47073/// Most fundamentally, `Vec` is and always will be a (pointer, capacity, length)
47074/// triplet. No more, no less. The order of these fields is completely
47075/// unspecified, and you should use the appropriate methods to modify these.
47076/// The pointer will never be null, so this type is null-pointer-optimized.
47077///
47078/// However, the pointer might not actually point to allocated memory. In particular,
47079/// if you construct a `Vec` with capacity 0 via [`Vec::new`], [`vec![]`][`vec!`],
47080/// [`Vec::with_capacity(0)`][`Vec::with_capacity`], or by calling [`shrink_to_fit`]
47081/// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized
47082/// types inside a `Vec`, it will not allocate space for them. *Note that in this case
47083/// the `Vec` might not report a [`capacity`] of 0*. `Vec` will allocate if and only
47084/// if [`mem::size_of::<T>`]`() * capacity() > 0`. In general, `Vec`'s allocation
47085/// details are very subtle &mdash; if you intend to allocate memory using a `Vec`
47086/// and use it for something else (either to pass to unsafe code, or to build your
47087/// own memory-backed collection), be sure to deallocate this memory by using
47088/// `from_raw_parts` to recover the `Vec` and then dropping it.
47089///
47090/// If a `Vec` *has* allocated memory, then the memory it points to is on the heap
47091/// (as defined by the allocator Rust is configured to use by default), and its
47092/// pointer points to [`len`] initialized, contiguous elements in order (what
47093/// you would see if you coerced it to a slice), followed by [`capacity`]` -
47094/// `[`len`] logically uninitialized, contiguous elements.
47095///
47096/// A vector containing the elements `'a'` and `'b'` with capacity 4 can be
47097/// visualized as below. The top part is the `Vec` struct, it contains a
47098/// pointer to the head of the allocation in the heap, length and capacity.
47099/// The bottom part is the allocation on the heap, a contiguous memory block.
47100///
47101/// ```text
47102///             ptr      len  capacity
47103///        +--------+--------+--------+
47104///        | 0x0123 |      2 |      4 |
47105///        +--------+--------+--------+
47106///             |
47107///             v
47108/// Heap   +--------+--------+--------+--------+
47109///        |    'a' |    'b' | uninit | uninit |
47110///        +--------+--------+--------+--------+
47111/// ```
47112///
47113/// - **uninit** represents memory that is not initialized, see [`MaybeUninit`].
47114/// - Note: the ABI is not stable and `Vec` makes no guarantees about its memory
47115///   layout (including the order of fields).
47116///
47117/// `Vec` will never perform a "small optimization" where elements are actually
47118/// stored on the stack for two reasons:
47119///
47120/// * It would make it more difficult for unsafe code to correctly manipulate
47121///   a `Vec`. The contents of a `Vec` wouldn't have a stable address if it were
47122///   only moved, and it would be more difficult to determine if a `Vec` had
47123///   actually allocated memory.
47124///
47125/// * It would penalize the general case, incurring an additional branch
47126///   on every access.
47127///
47128/// `Vec` will never automatically shrink itself, even if completely empty. This
47129/// ensures no unnecessary allocations or deallocations occur. Emptying a `Vec`
47130/// and then filling it back up to the same [`len`] should incur no calls to
47131/// the allocator. If you wish to free up unused memory, use
47132/// [`shrink_to_fit`] or [`shrink_to`].
47133///
47134/// [`push`] and [`insert`] will never (re)allocate if the reported capacity is
47135/// sufficient. [`push`] and [`insert`] *will* (re)allocate if
47136/// [`len`]` == `[`capacity`]. That is, the reported capacity is completely
47137/// accurate, and can be relied on. It can even be used to manually free the memory
47138/// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even
47139/// when not necessary.
47140///
47141/// `Vec` does not guarantee any particular growth strategy when reallocating
47142/// when full, nor when [`reserve`] is called. The current strategy is basic
47143/// and it may prove desirable to use a non-constant growth factor. Whatever
47144/// strategy is used will of course guarantee *O*(1) amortized [`push`].
47145///
47146/// `vec![x; n]`, `vec![a, b, c, d]`, and
47147/// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all produce a `Vec`
47148/// with exactly the requested capacity. If [`len`]` == `[`capacity`],
47149/// (as is the case for the [`vec!`] macro), then a `Vec<T>` can be converted to
47150/// and from a [`Box<[T]>`][owned slice] without reallocating or moving the elements.
47151///
47152/// `Vec` will not specifically overwrite any data that is removed from it,
47153/// but also won't specifically preserve it. Its uninitialized memory is
47154/// scratch space that it may use however it wants. It will generally just do
47155/// whatever is most efficient or otherwise easy to implement. Do not rely on
47156/// removed data to be erased for security purposes. Even if you drop a `Vec`, its
47157/// buffer may simply be reused by another `Vec`. Even if you zero a `Vec`'s memory
47158/// first, that might not actually happen because the optimizer does not consider
47159/// this a side-effect that must be preserved. There is one case which we will
47160/// not break, however: using `unsafe` code to write to the excess capacity,
47161/// and then increasing the length to match, is always valid.
47162///
47163/// Currently, `Vec` does not guarantee the order in which elements are dropped.
47164/// The order has changed in the past and may change again.
47165///
47166/// [`get`]: ../../std/vec/struct.Vec.html#method.get
47167/// [`get_mut`]: ../../std/vec/struct.Vec.html#method.get_mut
47168/// [`String`]: crate::string::String
47169/// [`&str`]: type@str
47170/// [`shrink_to_fit`]: Vec::shrink_to_fit
47171/// [`shrink_to`]: Vec::shrink_to
47172/// [`capacity`]: Vec::capacity
47173/// [`mem::size_of::<T>`]: core::mem::size_of
47174/// [`len`]: Vec::len
47175/// [`push`]: Vec::push
47176/// [`insert`]: Vec::insert
47177/// [`reserve`]: Vec::reserve
47178/// [`MaybeUninit`]: core::mem::MaybeUninit
47179/// [owned slice]: Box
47180#[stable(feature = "rust1", since = "1.0.0")]
47181#[cfg_attr(not(test), rustc_diagnostic_item = "vec_type")]
47182pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global> {
47183    buf: RawVec<T, A>,
47184    len: usize,
47185}
47186
47187////////////////////////////////////////////////////////////////////////////////
47188// Inherent methods
47189////////////////////////////////////////////////////////////////////////////////
47190
47191impl<T> Vec<T> {
47192    /// Constructs a new, empty `Vec<T>`.
47193    ///
47194    /// The vector will not allocate until elements are pushed onto it.
47195    ///
47196    /// # Examples
47197    ///
47198    /// ```
47199    /// # #![allow(unused_mut)]
47200    /// let mut vec: Vec<i32> = Vec::new();
47201    /// ```
47202    #[inline]
47203    #[rustc_const_stable(feature = "const_vec_new", since = "1.39.0")]
47204    #[stable(feature = "rust1", since = "1.0.0")]
47205    pub const fn new() -> Self {
47206        Vec { buf: RawVec::NEW, len: 0 }
47207    }
47208
47209    /// Constructs a new, empty `Vec<T>` with the specified capacity.
47210    ///
47211    /// The vector will be able to hold exactly `capacity` elements without
47212    /// reallocating. If `capacity` is 0, the vector will not allocate.
47213    ///
47214    /// It is important to note that although the returned vector has the
47215    /// *capacity* specified, the vector will have a zero *length*. For an
47216    /// explanation of the difference between length and capacity, see
47217    /// *[Capacity and reallocation]*.
47218    ///
47219    /// [Capacity and reallocation]: #capacity-and-reallocation
47220    ///
47221    /// # Panics
47222    ///
47223    /// Panics if the new capacity exceeds `isize::MAX` bytes.
47224    ///
47225    /// # Examples
47226    ///
47227    /// ```
47228    /// let mut vec = Vec::with_capacity(10);
47229    ///
47230    /// // The vector contains no items, even though it has capacity for more
47231    /// assert_eq!(vec.len(), 0);
47232    /// assert_eq!(vec.capacity(), 10);
47233    ///
47234    /// // These are all done without reallocating...
47235    /// for i in 0..10 {
47236    ///     vec.push(i);
47237    /// }
47238    /// assert_eq!(vec.len(), 10);
47239    /// assert_eq!(vec.capacity(), 10);
47240    ///
47241    /// // ...but this may make the vector reallocate
47242    /// vec.push(11);
47243    /// assert_eq!(vec.len(), 11);
47244    /// assert!(vec.capacity() >= 11);
47245    /// ```
47246    #[inline]
47247    #[doc(alias = "malloc")]
47248    #[stable(feature = "rust1", since = "1.0.0")]
47249    pub fn with_capacity(capacity: usize) -> Self {
47250        Self::with_capacity_in(capacity, Global)
47251    }
47252
47253    /// Creates a `Vec<T>` directly from the raw components of another vector.
47254    ///
47255    /// # Safety
47256    ///
47257    /// This is highly unsafe, due to the number of invariants that aren't
47258    /// checked:
47259    ///
47260    /// * `ptr` needs to have been previously allocated via [`String`]/`Vec<T>`
47261    ///   (at least, it's highly likely to be incorrect if it wasn't).
47262    /// * `T` needs to have the same size and alignment as what `ptr` was allocated with.
47263    ///   (`T` having a less strict alignment is not sufficient, the alignment really
47264    ///   needs to be equal to satisfy the [`dealloc`] requirement that memory must be
47265    ///   allocated and deallocated with the same layout.)
47266    /// * `length` needs to be less than or equal to `capacity`.
47267    /// * `capacity` needs to be the capacity that the pointer was allocated with.
47268    ///
47269    /// Violating these may cause problems like corrupting the allocator's
47270    /// internal data structures. For example it is **not** safe
47271    /// to build a `Vec<u8>` from a pointer to a C `char` array with length `size_t`.
47272    /// It's also not safe to build one from a `Vec<u16>` and its length, because
47273    /// the allocator cares about the alignment, and these two types have different
47274    /// alignments. The buffer was allocated with alignment 2 (for `u16`), but after
47275    /// turning it into a `Vec<u8>` it'll be deallocated with alignment 1.
47276    ///
47277    /// The ownership of `ptr` is effectively transferred to the
47278    /// `Vec<T>` which may then deallocate, reallocate or change the
47279    /// contents of memory pointed to by the pointer at will. Ensure
47280    /// that nothing else uses the pointer after calling this
47281    /// function.
47282    ///
47283    /// [`String`]: crate::string::String
47284    /// [`dealloc`]: crate::alloc::GlobalAlloc::dealloc
47285    ///
47286    /// # Examples
47287    ///
47288    /// ```
47289    /// use std::ptr;
47290    /// use std::mem;
47291    ///
47292    /// let v = vec![1, 2, 3];
47293    ///
47294    // FIXME Update this when vec_into_raw_parts is stabilized
47295    /// // Prevent running `v`'s destructor so we are in complete control
47296    /// // of the allocation.
47297    /// let mut v = mem::ManuallyDrop::new(v);
47298    ///
47299    /// // Pull out the various important pieces of information about `v`
47300    /// let p = v.as_mut_ptr();
47301    /// let len = v.len();
47302    /// let cap = v.capacity();
47303    ///
47304    /// unsafe {
47305    ///     // Overwrite memory with 4, 5, 6
47306    ///     for i in 0..len as isize {
47307    ///         ptr::write(p.offset(i), 4 + i);
47308    ///     }
47309    ///
47310    ///     // Put everything back together into a Vec
47311    ///     let rebuilt = Vec::from_raw_parts(p, len, cap);
47312    ///     assert_eq!(rebuilt, [4, 5, 6]);
47313    /// }
47314    /// ```
47315    #[inline]
47316    #[stable(feature = "rust1", since = "1.0.0")]
47317    pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self {
47318        unsafe { Self::from_raw_parts_in(ptr, length, capacity, Global) }
47319    }
47320}
47321
47322impl<T, A: Allocator> Vec<T, A> {
47323    /// Constructs a new, empty `Vec<T, A>`.
47324    ///
47325    /// The vector will not allocate until elements are pushed onto it.
47326    ///
47327    /// # Examples
47328    ///
47329    /// ```
47330    /// #![feature(allocator_api)]
47331    ///
47332    /// use std::alloc::System;
47333    ///
47334    /// # #[allow(unused_mut)]
47335    /// let mut vec: Vec<i32, _> = Vec::new_in(System);
47336    /// ```
47337    #[inline]
47338    #[unstable(feature = "allocator_api", issue = "32838")]
47339    pub const fn new_in(alloc: A) -> Self {
47340        Vec { buf: RawVec::new_in(alloc), len: 0 }
47341    }
47342
47343    /// Constructs a new, empty `Vec<T, A>` with the specified capacity with the provided
47344    /// allocator.
47345    ///
47346    /// The vector will be able to hold exactly `capacity` elements without
47347    /// reallocating. If `capacity` is 0, the vector will not allocate.
47348    ///
47349    /// It is important to note that although the returned vector has the
47350    /// *capacity* specified, the vector will have a zero *length*. For an
47351    /// explanation of the difference between length and capacity, see
47352    /// *[Capacity and reallocation]*.
47353    ///
47354    /// [Capacity and reallocation]: #capacity-and-reallocation
47355    ///
47356    /// # Panics
47357    ///
47358    /// Panics if the new capacity exceeds `isize::MAX` bytes.
47359    ///
47360    /// # Examples
47361    ///
47362    /// ```
47363    /// #![feature(allocator_api)]
47364    ///
47365    /// use std::alloc::System;
47366    ///
47367    /// let mut vec = Vec::with_capacity_in(10, System);
47368    ///
47369    /// // The vector contains no items, even though it has capacity for more
47370    /// assert_eq!(vec.len(), 0);
47371    /// assert_eq!(vec.capacity(), 10);
47372    ///
47373    /// // These are all done without reallocating...
47374    /// for i in 0..10 {
47375    ///     vec.push(i);
47376    /// }
47377    /// assert_eq!(vec.len(), 10);
47378    /// assert_eq!(vec.capacity(), 10);
47379    ///
47380    /// // ...but this may make the vector reallocate
47381    /// vec.push(11);
47382    /// assert_eq!(vec.len(), 11);
47383    /// assert!(vec.capacity() >= 11);
47384    /// ```
47385    #[inline]
47386    #[unstable(feature = "allocator_api", issue = "32838")]
47387    pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
47388        Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 }
47389    }
47390
47391    /// Creates a `Vec<T, A>` directly from the raw components of another vector.
47392    ///
47393    /// # Safety
47394    ///
47395    /// This is highly unsafe, due to the number of invariants that aren't
47396    /// checked:
47397    ///
47398    /// * `ptr` needs to have been previously allocated via [`String`]/`Vec<T>`
47399    ///   (at least, it's highly likely to be incorrect if it wasn't).
47400    /// * `T` needs to have the same size and alignment as what `ptr` was allocated with.
47401    ///   (`T` having a less strict alignment is not sufficient, the alignment really
47402    ///   needs to be equal to satisfy the [`dealloc`] requirement that memory must be
47403    ///   allocated and deallocated with the same layout.)
47404    /// * `length` needs to be less than or equal to `capacity`.
47405    /// * `capacity` needs to be the capacity that the pointer was allocated with.
47406    ///
47407    /// Violating these may cause problems like corrupting the allocator's
47408    /// internal data structures. For example it is **not** safe
47409    /// to build a `Vec<u8>` from a pointer to a C `char` array with length `size_t`.
47410    /// It's also not safe to build one from a `Vec<u16>` and its length, because
47411    /// the allocator cares about the alignment, and these two types have different
47412    /// alignments. The buffer was allocated with alignment 2 (for `u16`), but after
47413    /// turning it into a `Vec<u8>` it'll be deallocated with alignment 1.
47414    ///
47415    /// The ownership of `ptr` is effectively transferred to the
47416    /// `Vec<T>` which may then deallocate, reallocate or change the
47417    /// contents of memory pointed to by the pointer at will. Ensure
47418    /// that nothing else uses the pointer after calling this
47419    /// function.
47420    ///
47421    /// [`String`]: crate::string::String
47422    /// [`dealloc`]: crate::alloc::GlobalAlloc::dealloc
47423    ///
47424    /// # Examples
47425    ///
47426    /// ```
47427    /// #![feature(allocator_api)]
47428    ///
47429    /// use std::alloc::System;
47430    ///
47431    /// use std::ptr;
47432    /// use std::mem;
47433    ///
47434    /// let mut v = Vec::with_capacity_in(3, System);
47435    /// v.push(1);
47436    /// v.push(2);
47437    /// v.push(3);
47438    ///
47439    // FIXME Update this when vec_into_raw_parts is stabilized
47440    /// // Prevent running `v`'s destructor so we are in complete control
47441    /// // of the allocation.
47442    /// let mut v = mem::ManuallyDrop::new(v);
47443    ///
47444    /// // Pull out the various important pieces of information about `v`
47445    /// let p = v.as_mut_ptr();
47446    /// let len = v.len();
47447    /// let cap = v.capacity();
47448    /// let alloc = v.allocator();
47449    ///
47450    /// unsafe {
47451    ///     // Overwrite memory with 4, 5, 6
47452    ///     for i in 0..len as isize {
47453    ///         ptr::write(p.offset(i), 4 + i);
47454    ///     }
47455    ///
47456    ///     // Put everything back together into a Vec
47457    ///     let rebuilt = Vec::from_raw_parts_in(p, len, cap, alloc.clone());
47458    ///     assert_eq!(rebuilt, [4, 5, 6]);
47459    /// }
47460    /// ```
47461    #[inline]
47462    #[unstable(feature = "allocator_api", issue = "32838")]
47463    pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self {
47464        unsafe { Vec { buf: RawVec::from_raw_parts_in(ptr, capacity, alloc), len: length } }
47465    }
47466
47467    /// Decomposes a `Vec<T>` into its raw components.
47468    ///
47469    /// Returns the raw pointer to the underlying data, the length of
47470    /// the vector (in elements), and the allocated capacity of the
47471    /// data (in elements). These are the same arguments in the same
47472    /// order as the arguments to [`from_raw_parts`].
47473    ///
47474    /// After calling this function, the caller is responsible for the
47475    /// memory previously managed by the `Vec`. The only way to do
47476    /// this is to convert the raw pointer, length, and capacity back
47477    /// into a `Vec` with the [`from_raw_parts`] function, allowing
47478    /// the destructor to perform the cleanup.
47479    ///
47480    /// [`from_raw_parts`]: Vec::from_raw_parts
47481    ///
47482    /// # Examples
47483    ///
47484    /// ```
47485    /// #![feature(vec_into_raw_parts)]
47486    /// let v: Vec<i32> = vec![-1, 0, 1];
47487    ///
47488    /// let (ptr, len, cap) = v.into_raw_parts();
47489    ///
47490    /// let rebuilt = unsafe {
47491    ///     // We can now make changes to the components, such as
47492    ///     // transmuting the raw pointer to a compatible type.
47493    ///     let ptr = ptr as *mut u32;
47494    ///
47495    ///     Vec::from_raw_parts(ptr, len, cap)
47496    /// };
47497    /// assert_eq!(rebuilt, [4294967295, 0, 1]);
47498    /// ```
47499    #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
47500    pub fn into_raw_parts(self) -> (*mut T, usize, usize) {
47501        let mut me = ManuallyDrop::new(self);
47502        (me.as_mut_ptr(), me.len(), me.capacity())
47503    }
47504
47505    /// Decomposes a `Vec<T>` into its raw components.
47506    ///
47507    /// Returns the raw pointer to the underlying data, the length of the vector (in elements),
47508    /// the allocated capacity of the data (in elements), and the allocator. These are the same
47509    /// arguments in the same order as the arguments to [`from_raw_parts_in`].
47510    ///
47511    /// After calling this function, the caller is responsible for the
47512    /// memory previously managed by the `Vec`. The only way to do
47513    /// this is to convert the raw pointer, length, and capacity back
47514    /// into a `Vec` with the [`from_raw_parts_in`] function, allowing
47515    /// the destructor to perform the cleanup.
47516    ///
47517    /// [`from_raw_parts_in`]: Vec::from_raw_parts_in
47518    ///
47519    /// # Examples
47520    ///
47521    /// ```
47522    /// #![feature(allocator_api, vec_into_raw_parts)]
47523    ///
47524    /// use std::alloc::System;
47525    ///
47526    /// let mut v: Vec<i32, System> = Vec::new_in(System);
47527    /// v.push(-1);
47528    /// v.push(0);
47529    /// v.push(1);
47530    ///
47531    /// let (ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
47532    ///
47533    /// let rebuilt = unsafe {
47534    ///     // We can now make changes to the components, such as
47535    ///     // transmuting the raw pointer to a compatible type.
47536    ///     let ptr = ptr as *mut u32;
47537    ///
47538    ///     Vec::from_raw_parts_in(ptr, len, cap, alloc)
47539    /// };
47540    /// assert_eq!(rebuilt, [4294967295, 0, 1]);
47541    /// ```
47542    #[unstable(feature = "allocator_api", issue = "32838")]
47543    // #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
47544    pub fn into_raw_parts_with_alloc(self) -> (*mut T, usize, usize, A) {
47545        let mut me = ManuallyDrop::new(self);
47546        let len = me.len();
47547        let capacity = me.capacity();
47548        let ptr = me.as_mut_ptr();
47549        let alloc = unsafe { ptr::read(me.allocator()) };
47550        (ptr, len, capacity, alloc)
47551    }
47552
47553    /// Returns the number of elements the vector can hold without
47554    /// reallocating.
47555    ///
47556    /// # Examples
47557    ///
47558    /// ```
47559    /// let vec: Vec<i32> = Vec::with_capacity(10);
47560    /// assert_eq!(vec.capacity(), 10);
47561    /// ```
47562    #[inline]
47563    #[stable(feature = "rust1", since = "1.0.0")]
47564    pub fn capacity(&self) -> usize {
47565        self.buf.capacity()
47566    }
47567
47568    /// Reserves capacity for at least `additional` more elements to be inserted
47569    /// in the given `Vec<T>`. The collection may reserve more space to avoid
47570    /// frequent reallocations. After calling `reserve`, capacity will be
47571    /// greater than or equal to `self.len() + additional`. Does nothing if
47572    /// capacity is already sufficient.
47573    ///
47574    /// # Panics
47575    ///
47576    /// Panics if the new capacity exceeds `isize::MAX` bytes.
47577    ///
47578    /// # Examples
47579    ///
47580    /// ```
47581    /// let mut vec = vec![1];
47582    /// vec.reserve(10);
47583    /// assert!(vec.capacity() >= 11);
47584    /// ```
47585    #[doc(alias = "realloc")]
47586    #[stable(feature = "rust1", since = "1.0.0")]
47587    pub fn reserve(&mut self, additional: usize) {
47588        self.buf.reserve(self.len, additional);
47589    }
47590
47591    /// Reserves the minimum capacity for exactly `additional` more elements to
47592    /// be inserted in the given `Vec<T>`. After calling `reserve_exact`,
47593    /// capacity will be greater than or equal to `self.len() + additional`.
47594    /// Does nothing if the capacity is already sufficient.
47595    ///
47596    /// Note that the allocator may give the collection more space than it
47597    /// requests. Therefore, capacity can not be relied upon to be precisely
47598    /// minimal. Prefer `reserve` if future insertions are expected.
47599    ///
47600    /// # Panics
47601    ///
47602    /// Panics if the new capacity overflows `usize`.
47603    ///
47604    /// # Examples
47605    ///
47606    /// ```
47607    /// let mut vec = vec![1];
47608    /// vec.reserve_exact(10);
47609    /// assert!(vec.capacity() >= 11);
47610    /// ```
47611    #[doc(alias = "realloc")]
47612    #[stable(feature = "rust1", since = "1.0.0")]
47613    pub fn reserve_exact(&mut self, additional: usize) {
47614        self.buf.reserve_exact(self.len, additional);
47615    }
47616
47617    /// Tries to reserve capacity for at least `additional` more elements to be inserted
47618    /// in the given `Vec<T>`. The collection may reserve more space to avoid
47619    /// frequent reallocations. After calling `try_reserve`, capacity will be
47620    /// greater than or equal to `self.len() + additional`. Does nothing if
47621    /// capacity is already sufficient.
47622    ///
47623    /// # Errors
47624    ///
47625    /// If the capacity overflows, or the allocator reports a failure, then an error
47626    /// is returned.
47627    ///
47628    /// # Examples
47629    ///
47630    /// ```
47631    /// #![feature(try_reserve)]
47632    /// use std::collections::TryReserveError;
47633    ///
47634    /// fn process_data(data: &[u32]) -> Result<Vec<u32>, TryReserveError> {
47635    ///     let mut output = Vec::new();
47636    ///
47637    ///     // Pre-reserve the memory, exiting if we can't
47638    ///     output.try_reserve(data.len())?;
47639    ///
47640    ///     // Now we know this can't OOM in the middle of our complex work
47641    ///     output.extend(data.iter().map(|&val| {
47642    ///         val * 2 + 5 // very complicated
47643    ///     }));
47644    ///
47645    ///     Ok(output)
47646    /// }
47647    /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
47648    /// ```
47649    #[doc(alias = "realloc")]
47650    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
47651    pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
47652        self.buf.try_reserve(self.len, additional)
47653    }
47654
47655    /// Tries to reserve the minimum capacity for exactly `additional`
47656    /// elements to be inserted in the given `Vec<T>`. After calling
47657    /// `try_reserve_exact`, capacity will be greater than or equal to
47658    /// `self.len() + additional` if it returns `Ok(())`.
47659    /// Does nothing if the capacity is already sufficient.
47660    ///
47661    /// Note that the allocator may give the collection more space than it
47662    /// requests. Therefore, capacity can not be relied upon to be precisely
47663    /// minimal. Prefer `reserve` if future insertions are expected.
47664    ///
47665    /// # Errors
47666    ///
47667    /// If the capacity overflows, or the allocator reports a failure, then an error
47668    /// is returned.
47669    ///
47670    /// # Examples
47671    ///
47672    /// ```
47673    /// #![feature(try_reserve)]
47674    /// use std::collections::TryReserveError;
47675    ///
47676    /// fn process_data(data: &[u32]) -> Result<Vec<u32>, TryReserveError> {
47677    ///     let mut output = Vec::new();
47678    ///
47679    ///     // Pre-reserve the memory, exiting if we can't
47680    ///     output.try_reserve_exact(data.len())?;
47681    ///
47682    ///     // Now we know this can't OOM in the middle of our complex work
47683    ///     output.extend(data.iter().map(|&val| {
47684    ///         val * 2 + 5 // very complicated
47685    ///     }));
47686    ///
47687    ///     Ok(output)
47688    /// }
47689    /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
47690    /// ```
47691    #[doc(alias = "realloc")]
47692    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
47693    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
47694        self.buf.try_reserve_exact(self.len, additional)
47695    }
47696
47697    /// Shrinks the capacity of the vector as much as possible.
47698    ///
47699    /// It will drop down as close as possible to the length but the allocator
47700    /// may still inform the vector that there is space for a few more elements.
47701    ///
47702    /// # Examples
47703    ///
47704    /// ```
47705    /// let mut vec = Vec::with_capacity(10);
47706    /// vec.extend([1, 2, 3].iter().cloned());
47707    /// assert_eq!(vec.capacity(), 10);
47708    /// vec.shrink_to_fit();
47709    /// assert!(vec.capacity() >= 3);
47710    /// ```
47711    #[doc(alias = "realloc")]
47712    #[stable(feature = "rust1", since = "1.0.0")]
47713    pub fn shrink_to_fit(&mut self) {
47714        // The capacity is never less than the length, and there's nothing to do when
47715        // they are equal, so we can avoid the panic case in `RawVec::shrink_to_fit`
47716        // by only calling it with a greater capacity.
47717        if self.capacity() > self.len {
47718            self.buf.shrink_to_fit(self.len);
47719        }
47720    }
47721
47722    /// Shrinks the capacity of the vector with a lower bound.
47723    ///
47724    /// The capacity will remain at least as large as both the length
47725    /// and the supplied value.
47726    ///
47727    /// If the current capacity is less than the lower limit, this is a no-op.
47728    ///
47729    /// # Examples
47730    ///
47731    /// ```
47732    /// #![feature(shrink_to)]
47733    /// let mut vec = Vec::with_capacity(10);
47734    /// vec.extend([1, 2, 3].iter().cloned());
47735    /// assert_eq!(vec.capacity(), 10);
47736    /// vec.shrink_to(4);
47737    /// assert!(vec.capacity() >= 4);
47738    /// vec.shrink_to(0);
47739    /// assert!(vec.capacity() >= 3);
47740    /// ```
47741    #[doc(alias = "realloc")]
47742    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
47743    pub fn shrink_to(&mut self, min_capacity: usize) {
47744        if self.capacity() > min_capacity {
47745            self.buf.shrink_to_fit(cmp::max(self.len, min_capacity));
47746        }
47747    }
47748
47749    /// Converts the vector into [`Box<[T]>`][owned slice].
47750    ///
47751    /// Note that this will drop any excess capacity.
47752    ///
47753    /// [owned slice]: Box
47754    ///
47755    /// # Examples
47756    ///
47757    /// ```
47758    /// let v = vec![1, 2, 3];
47759    ///
47760    /// let slice = v.into_boxed_slice();
47761    /// ```
47762    ///
47763    /// Any excess capacity is removed:
47764    ///
47765    /// ```
47766    /// let mut vec = Vec::with_capacity(10);
47767    /// vec.extend([1, 2, 3].iter().cloned());
47768    ///
47769    /// assert_eq!(vec.capacity(), 10);
47770    /// let slice = vec.into_boxed_slice();
47771    /// assert_eq!(slice.into_vec().capacity(), 3);
47772    /// ```
47773    #[stable(feature = "rust1", since = "1.0.0")]
47774    pub fn into_boxed_slice(mut self) -> Box<[T], A> {
47775        unsafe {
47776            self.shrink_to_fit();
47777            let me = ManuallyDrop::new(self);
47778            let buf = ptr::read(&me.buf);
47779            let len = me.len();
47780            buf.into_box(len).assume_init()
47781        }
47782    }
47783
47784    /// Shortens the vector, keeping the first `len` elements and dropping
47785    /// the rest.
47786    ///
47787    /// If `len` is greater than the vector's current length, this has no
47788    /// effect.
47789    ///
47790    /// The [`drain`] method can emulate `truncate`, but causes the excess
47791    /// elements to be returned instead of dropped.
47792    ///
47793    /// Note that this method has no effect on the allocated capacity
47794    /// of the vector.
47795    ///
47796    /// # Examples
47797    ///
47798    /// Truncating a five element vector to two elements:
47799    ///
47800    /// ```
47801    /// let mut vec = vec![1, 2, 3, 4, 5];
47802    /// vec.truncate(2);
47803    /// assert_eq!(vec, [1, 2]);
47804    /// ```
47805    ///
47806    /// No truncation occurs when `len` is greater than the vector's current
47807    /// length:
47808    ///
47809    /// ```
47810    /// let mut vec = vec![1, 2, 3];
47811    /// vec.truncate(8);
47812    /// assert_eq!(vec, [1, 2, 3]);
47813    /// ```
47814    ///
47815    /// Truncating when `len == 0` is equivalent to calling the [`clear`]
47816    /// method.
47817    ///
47818    /// ```
47819    /// let mut vec = vec![1, 2, 3];
47820    /// vec.truncate(0);
47821    /// assert_eq!(vec, []);
47822    /// ```
47823    ///
47824    /// [`clear`]: Vec::clear
47825    /// [`drain`]: Vec::drain
47826    #[stable(feature = "rust1", since = "1.0.0")]
47827    pub fn truncate(&mut self, len: usize) {
47828        // This is safe because:
47829        //
47830        // * the slice passed to `drop_in_place` is valid; the `len > self.len`
47831        //   case avoids creating an invalid slice, and
47832        // * the `len` of the vector is shrunk before calling `drop_in_place`,
47833        //   such that no value will be dropped twice in case `drop_in_place`
47834        //   were to panic once (if it panics twice, the program aborts).
47835        unsafe {
47836            // Note: It's intentional that this is `>` and not `>=`.
47837            //       Changing it to `>=` has negative performance
47838            //       implications in some cases. See #78884 for more.
47839            if len > self.len {
47840                return;
47841            }
47842            let remaining_len = self.len - len;
47843            let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len);
47844            self.len = len;
47845            ptr::drop_in_place(s);
47846        }
47847    }
47848
47849    /// Extracts a slice containing the entire vector.
47850    ///
47851    /// Equivalent to `&s[..]`.
47852    ///
47853    /// # Examples
47854    ///
47855    /// ```
47856    /// use std::io::{self, Write};
47857    /// let buffer = vec![1, 2, 3, 5, 8];
47858    /// io::sink().write(buffer.as_slice()).unwrap();
47859    /// ```
47860    #[inline]
47861    #[stable(feature = "vec_as_slice", since = "1.7.0")]
47862    pub fn as_slice(&self) -> &[T] {
47863        self
47864    }
47865
47866    /// Extracts a mutable slice of the entire vector.
47867    ///
47868    /// Equivalent to `&mut s[..]`.
47869    ///
47870    /// # Examples
47871    ///
47872    /// ```
47873    /// use std::io::{self, Read};
47874    /// let mut buffer = vec![0; 3];
47875    /// io::repeat(0b101).read_exact(buffer.as_mut_slice()).unwrap();
47876    /// ```
47877    #[inline]
47878    #[stable(feature = "vec_as_slice", since = "1.7.0")]
47879    pub fn as_mut_slice(&mut self) -> &mut [T] {
47880        self
47881    }
47882
47883    /// Returns a raw pointer to the vector's buffer.
47884    ///
47885    /// The caller must ensure that the vector outlives the pointer this
47886    /// function returns, or else it will end up pointing to garbage.
47887    /// Modifying the vector may cause its buffer to be reallocated,
47888    /// which would also make any pointers to it invalid.
47889    ///
47890    /// The caller must also ensure that the memory the pointer (non-transitively) points to
47891    /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
47892    /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
47893    ///
47894    /// # Examples
47895    ///
47896    /// ```
47897    /// let x = vec![1, 2, 4];
47898    /// let x_ptr = x.as_ptr();
47899    ///
47900    /// unsafe {
47901    ///     for i in 0..x.len() {
47902    ///         assert_eq!(*x_ptr.add(i), 1 << i);
47903    ///     }
47904    /// }
47905    /// ```
47906    ///
47907    /// [`as_mut_ptr`]: Vec::as_mut_ptr
47908    #[stable(feature = "vec_as_ptr", since = "1.37.0")]
47909    #[inline]
47910    pub fn as_ptr(&self) -> *const T {
47911        // We shadow the slice method of the same name to avoid going through
47912        // `deref`, which creates an intermediate reference.
47913        let ptr = self.buf.ptr();
47914        unsafe {
47915            assume(!ptr.is_null());
47916        }
47917        ptr
47918    }
47919
47920    /// Returns an unsafe mutable pointer to the vector's buffer.
47921    ///
47922    /// The caller must ensure that the vector outlives the pointer this
47923    /// function returns, or else it will end up pointing to garbage.
47924    /// Modifying the vector may cause its buffer to be reallocated,
47925    /// which would also make any pointers to it invalid.
47926    ///
47927    /// # Examples
47928    ///
47929    /// ```
47930    /// // Allocate vector big enough for 4 elements.
47931    /// let size = 4;
47932    /// let mut x: Vec<i32> = Vec::with_capacity(size);
47933    /// let x_ptr = x.as_mut_ptr();
47934    ///
47935    /// // Initialize elements via raw pointer writes, then set length.
47936    /// unsafe {
47937    ///     for i in 0..size {
47938    ///         *x_ptr.add(i) = i as i32;
47939    ///     }
47940    ///     x.set_len(size);
47941    /// }
47942    /// assert_eq!(&*x, &[0, 1, 2, 3]);
47943    /// ```
47944    #[stable(feature = "vec_as_ptr", since = "1.37.0")]
47945    #[inline]
47946    pub fn as_mut_ptr(&mut self) -> *mut T {
47947        // We shadow the slice method of the same name to avoid going through
47948        // `deref_mut`, which creates an intermediate reference.
47949        let ptr = self.buf.ptr();
47950        unsafe {
47951            assume(!ptr.is_null());
47952        }
47953        ptr
47954    }
47955
47956    /// Returns a reference to the underlying allocator.
47957    #[unstable(feature = "allocator_api", issue = "32838")]
47958    #[inline]
47959    pub fn allocator(&self) -> &A {
47960        self.buf.allocator()
47961    }
47962
47963    /// Forces the length of the vector to `new_len`.
47964    ///
47965    /// This is a low-level operation that maintains none of the normal
47966    /// invariants of the type. Normally changing the length of a vector
47967    /// is done using one of the safe operations instead, such as
47968    /// [`truncate`], [`resize`], [`extend`], or [`clear`].
47969    ///
47970    /// [`truncate`]: Vec::truncate
47971    /// [`resize`]: Vec::resize
47972    /// [`extend`]: Extend::extend
47973    /// [`clear`]: Vec::clear
47974    ///
47975    /// # Safety
47976    ///
47977    /// - `new_len` must be less than or equal to [`capacity()`].
47978    /// - The elements at `old_len..new_len` must be initialized.
47979    ///
47980    /// [`capacity()`]: Vec::capacity
47981    ///
47982    /// # Examples
47983    ///
47984    /// This method can be useful for situations in which the vector
47985    /// is serving as a buffer for other code, particularly over FFI:
47986    ///
47987    /// ```no_run
47988    /// # #![allow(dead_code)]
47989    /// # // This is just a minimal skeleton for the doc example;
47990    /// # // don't use this as a starting point for a real library.
47991    /// # pub struct StreamWrapper { strm: *mut std::ffi::c_void }
47992    /// # const Z_OK: i32 = 0;
47993    /// # extern "C" {
47994    /// #     fn deflateGetDictionary(
47995    /// #         strm: *mut std::ffi::c_void,
47996    /// #         dictionary: *mut u8,
47997    /// #         dictLength: *mut usize,
47998    /// #     ) -> i32;
47999    /// # }
48000    /// # impl StreamWrapper {
48001    /// pub fn get_dictionary(&self) -> Option<Vec<u8>> {
48002    ///     // Per the FFI method's docs, "32768 bytes is always enough".
48003    ///     let mut dict = Vec::with_capacity(32_768);
48004    ///     let mut dict_length = 0;
48005    ///     // SAFETY: When `deflateGetDictionary` returns `Z_OK`, it holds that:
48006    ///     // 1. `dict_length` elements were initialized.
48007    ///     // 2. `dict_length` <= the capacity (32_768)
48008    ///     // which makes `set_len` safe to call.
48009    ///     unsafe {
48010    ///         // Make the FFI call...
48011    ///         let r = deflateGetDictionary(self.strm, dict.as_mut_ptr(), &mut dict_length);
48012    ///         if r == Z_OK {
48013    ///             // ...and update the length to what was initialized.
48014    ///             dict.set_len(dict_length);
48015    ///             Some(dict)
48016    ///         } else {
48017    ///             None
48018    ///         }
48019    ///     }
48020    /// }
48021    /// # }
48022    /// ```
48023    ///
48024    /// While the following example is sound, there is a memory leak since
48025    /// the inner vectors were not freed prior to the `set_len` call:
48026    ///
48027    /// ```
48028    /// let mut vec = vec![vec![1, 0, 0],
48029    ///                    vec![0, 1, 0],
48030    ///                    vec![0, 0, 1]];
48031    /// // SAFETY:
48032    /// // 1. `old_len..0` is empty so no elements need to be initialized.
48033    /// // 2. `0 <= capacity` always holds whatever `capacity` is.
48034    /// unsafe {
48035    ///     vec.set_len(0);
48036    /// }
48037    /// ```
48038    ///
48039    /// Normally, here, one would use [`clear`] instead to correctly drop
48040    /// the contents and thus not leak memory.
48041    #[inline]
48042    #[stable(feature = "rust1", since = "1.0.0")]
48043    pub unsafe fn set_len(&mut self, new_len: usize) {
48044        debug_assert!(new_len <= self.capacity());
48045
48046        self.len = new_len;
48047    }
48048
48049    /// Removes an element from the vector and returns it.
48050    ///
48051    /// The removed element is replaced by the last element of the vector.
48052    ///
48053    /// This does not preserve ordering, but is O(1).
48054    ///
48055    /// # Panics
48056    ///
48057    /// Panics if `index` is out of bounds.
48058    ///
48059    /// # Examples
48060    ///
48061    /// ```
48062    /// let mut v = vec!["foo", "bar", "baz", "qux"];
48063    ///
48064    /// assert_eq!(v.swap_remove(1), "bar");
48065    /// assert_eq!(v, ["foo", "qux", "baz"]);
48066    ///
48067    /// assert_eq!(v.swap_remove(0), "foo");
48068    /// assert_eq!(v, ["baz", "qux"]);
48069    /// ```
48070    #[inline]
48071    #[stable(feature = "rust1", since = "1.0.0")]
48072    pub fn swap_remove(&mut self, index: usize) -> T {
48073        #[cold]
48074        #[inline(never)]
48075        fn assert_failed(index: usize, len: usize) -> ! {
48076            panic!("swap_remove index (is {}) should be < len (is {})", index, len);
48077        }
48078
48079        let len = self.len();
48080        if index >= len {
48081            assert_failed(index, len);
48082        }
48083        unsafe {
48084            // We replace self[index] with the last element. Note that if the
48085            // bounds check above succeeds there must be a last element (which
48086            // can be self[index] itself).
48087            let last = ptr::read(self.as_ptr().add(len - 1));
48088            let hole = self.as_mut_ptr().add(index);
48089            self.set_len(len - 1);
48090            ptr::replace(hole, last)
48091        }
48092    }
48093
48094    /// Inserts an element at position `index` within the vector, shifting all
48095    /// elements after it to the right.
48096    ///
48097    /// # Panics
48098    ///
48099    /// Panics if `index > len`.
48100    ///
48101    /// # Examples
48102    ///
48103    /// ```
48104    /// let mut vec = vec![1, 2, 3];
48105    /// vec.insert(1, 4);
48106    /// assert_eq!(vec, [1, 4, 2, 3]);
48107    /// vec.insert(4, 5);
48108    /// assert_eq!(vec, [1, 4, 2, 3, 5]);
48109    /// ```
48110    #[stable(feature = "rust1", since = "1.0.0")]
48111    pub fn insert(&mut self, index: usize, element: T) {
48112        #[cold]
48113        #[inline(never)]
48114        fn assert_failed(index: usize, len: usize) -> ! {
48115            panic!("insertion index (is {}) should be <= len (is {})", index, len);
48116        }
48117
48118        let len = self.len();
48119        if index > len {
48120            assert_failed(index, len);
48121        }
48122
48123        // space for the new element
48124        if len == self.buf.capacity() {
48125            self.reserve(1);
48126        }
48127
48128        unsafe {
48129            // infallible
48130            // The spot to put the new value
48131            {
48132                let p = self.as_mut_ptr().add(index);
48133                // Shift everything over to make space. (Duplicating the
48134                // `index`th element into two consecutive places.)
48135                ptr::copy(p, p.offset(1), len - index);
48136                // Write it in, overwriting the first copy of the `index`th
48137                // element.
48138                ptr::write(p, element);
48139            }
48140            self.set_len(len + 1);
48141        }
48142    }
48143
48144    /// Removes and returns the element at position `index` within the vector,
48145    /// shifting all elements after it to the left.
48146    ///
48147    /// # Panics
48148    ///
48149    /// Panics if `index` is out of bounds.
48150    ///
48151    /// # Examples
48152    ///
48153    /// ```
48154    /// let mut v = vec![1, 2, 3];
48155    /// assert_eq!(v.remove(1), 2);
48156    /// assert_eq!(v, [1, 3]);
48157    /// ```
48158    #[stable(feature = "rust1", since = "1.0.0")]
48159    pub fn remove(&mut self, index: usize) -> T {
48160        #[cold]
48161        #[inline(never)]
48162        fn assert_failed(index: usize, len: usize) -> ! {
48163            panic!("removal index (is {}) should be < len (is {})", index, len);
48164        }
48165
48166        let len = self.len();
48167        if index >= len {
48168            assert_failed(index, len);
48169        }
48170        unsafe {
48171            // infallible
48172            let ret;
48173            {
48174                // the place we are taking from.
48175                let ptr = self.as_mut_ptr().add(index);
48176                // copy it out, unsafely having a copy of the value on
48177                // the stack and in the vector at the same time.
48178                ret = ptr::read(ptr);
48179
48180                // Shift everything down to fill in that spot.
48181                ptr::copy(ptr.offset(1), ptr, len - index - 1);
48182            }
48183            self.set_len(len - 1);
48184            ret
48185        }
48186    }
48187
48188    /// Retains only the elements specified by the predicate.
48189    ///
48190    /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
48191    /// This method operates in place, visiting each element exactly once in the
48192    /// original order, and preserves the order of the retained elements.
48193    ///
48194    /// # Examples
48195    ///
48196    /// ```
48197    /// let mut vec = vec![1, 2, 3, 4];
48198    /// vec.retain(|&x| x % 2 == 0);
48199    /// assert_eq!(vec, [2, 4]);
48200    /// ```
48201    ///
48202    /// Because the elements are visited exactly once in the original order,
48203    /// external state may be used to decide which elements to keep.
48204    ///
48205    /// ```
48206    /// let mut vec = vec![1, 2, 3, 4, 5];
48207    /// let keep = [false, true, true, false, true];
48208    /// let mut iter = keep.iter();
48209    /// vec.retain(|_| *iter.next().unwrap());
48210    /// assert_eq!(vec, [2, 3, 5]);
48211    /// ```
48212    #[stable(feature = "rust1", since = "1.0.0")]
48213    pub fn retain<F>(&mut self, mut f: F)
48214    where
48215        F: FnMut(&T) -> bool,
48216    {
48217        let original_len = self.len();
48218        // Avoid double drop if the drop guard is not executed,
48219        // since we may make some holes during the process.
48220        unsafe { self.set_len(0) };
48221
48222        // Vec: [Kept, Kept, Hole, Hole, Hole, Hole, Unchecked, Unchecked]
48223        //      |<-              processed len   ->| ^- next to check
48224        //                  |<-  deleted cnt     ->|
48225        //      |<-              original_len                          ->|
48226        // Kept: Elements which predicate returns true on.
48227        // Hole: Moved or dropped element slot.
48228        // Unchecked: Unchecked valid elements.
48229        //
48230        // This drop guard will be invoked when predicate or `drop` of element panicked.
48231        // It shifts unchecked elements to cover holes and `set_len` to the correct length.
48232        // In cases when predicate and `drop` never panick, it will be optimized out.
48233        struct BackshiftOnDrop<'a, T, A: Allocator> {
48234            v: &'a mut Vec<T, A>,
48235            processed_len: usize,
48236            deleted_cnt: usize,
48237            original_len: usize,
48238        }
48239
48240        impl<T, A: Allocator> Drop for BackshiftOnDrop<'_, T, A> {
48241            fn drop(&mut self) {
48242                if self.deleted_cnt > 0 {
48243                    // SAFETY: Trailing unchecked items must be valid since we never touch them.
48244                    unsafe {
48245                        ptr::copy(
48246                            self.v.as_ptr().add(self.processed_len),
48247                            self.v.as_mut_ptr().add(self.processed_len - self.deleted_cnt),
48248                            self.original_len - self.processed_len,
48249                        );
48250                    }
48251                }
48252                // SAFETY: After filling holes, all items are in contiguous memory.
48253                unsafe {
48254                    self.v.set_len(self.original_len - self.deleted_cnt);
48255                }
48256            }
48257        }
48258
48259        let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len };
48260
48261        while g.processed_len < original_len {
48262            // SAFETY: Unchecked element must be valid.
48263            let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) };
48264            if !f(cur) {
48265                // Advance early to avoid double drop if `drop_in_place` panicked.
48266                g.processed_len += 1;
48267                g.deleted_cnt += 1;
48268                // SAFETY: We never touch this element again after dropped.
48269                unsafe { ptr::drop_in_place(cur) };
48270                // We already advanced the counter.
48271                continue;
48272            }
48273            if g.deleted_cnt > 0 {
48274                // SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element.
48275                // We use copy for move, and never touch this element again.
48276                unsafe {
48277                    let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt);
48278                    ptr::copy_nonoverlapping(cur, hole_slot, 1);
48279                }
48280            }
48281            g.processed_len += 1;
48282        }
48283
48284        // All item are processed. This can be optimized to `set_len` by LLVM.
48285        drop(g);
48286    }
48287
48288    /// Removes all but the first of consecutive elements in the vector that resolve to the same
48289    /// key.
48290    ///
48291    /// If the vector is sorted, this removes all duplicates.
48292    ///
48293    /// # Examples
48294    ///
48295    /// ```
48296    /// let mut vec = vec![10, 20, 21, 30, 20];
48297    ///
48298    /// vec.dedup_by_key(|i| *i / 10);
48299    ///
48300    /// assert_eq!(vec, [10, 20, 30, 20]);
48301    /// ```
48302    #[stable(feature = "dedup_by", since = "1.16.0")]
48303    #[inline]
48304    pub fn dedup_by_key<F, K>(&mut self, mut key: F)
48305    where
48306        F: FnMut(&mut T) -> K,
48307        K: PartialEq,
48308    {
48309        self.dedup_by(|a, b| key(a) == key(b))
48310    }
48311
48312    /// Removes all but the first of consecutive elements in the vector satisfying a given equality
48313    /// relation.
48314    ///
48315    /// The `same_bucket` function is passed references to two elements from the vector and
48316    /// must determine if the elements compare equal. The elements are passed in opposite order
48317    /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is removed.
48318    ///
48319    /// If the vector is sorted, this removes all duplicates.
48320    ///
48321    /// # Examples
48322    ///
48323    /// ```
48324    /// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"];
48325    ///
48326    /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b));
48327    ///
48328    /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
48329    /// ```
48330    #[stable(feature = "dedup_by", since = "1.16.0")]
48331    pub fn dedup_by<F>(&mut self, mut same_bucket: F)
48332    where
48333        F: FnMut(&mut T, &mut T) -> bool,
48334    {
48335        let len = self.len();
48336        if len <= 1 {
48337            return;
48338        }
48339
48340        /* INVARIANT: vec.len() > read >= write > write-1 >= 0 */
48341        struct FillGapOnDrop<'a, T, A: core::alloc::Allocator> {
48342            /* Offset of the element we want to check if it is duplicate */
48343            read: usize,
48344
48345            /* Offset of the place where we want to place the non-duplicate
48346             * when we find it. */
48347            write: usize,
48348
48349            /* The Vec that would need correction if `same_bucket` panicked */
48350            vec: &'a mut Vec<T, A>,
48351        }
48352
48353        impl<'a, T, A: core::alloc::Allocator> Drop for FillGapOnDrop<'a, T, A> {
48354            fn drop(&mut self) {
48355                /* This code gets executed when `same_bucket` panics */
48356
48357                /* SAFETY: invariant guarantees that `read - write`
48358                 * and `len - read` never overflow and that the copy is always
48359                 * in-bounds. */
48360                unsafe {
48361                    let ptr = self.vec.as_mut_ptr();
48362                    let len = self.vec.len();
48363
48364                    /* How many items were left when `same_bucket` paniced.
48365                     * Basically vec[read..].len() */
48366                    let items_left = len.wrapping_sub(self.read);
48367
48368                    /* Pointer to first item in vec[write..write+items_left] slice */
48369                    let dropped_ptr = ptr.add(self.write);
48370                    /* Pointer to first item in vec[read..] slice */
48371                    let valid_ptr = ptr.add(self.read);
48372
48373                    /* Copy `vec[read..]` to `vec[write..write+items_left]`.
48374                     * The slices can overlap, so `copy_nonoverlapping` cannot be used */
48375                    ptr::copy(valid_ptr, dropped_ptr, items_left);
48376
48377                    /* How many items have been already dropped
48378                     * Basically vec[read..write].len() */
48379                    let dropped = self.read.wrapping_sub(self.write);
48380
48381                    self.vec.set_len(len - dropped);
48382                }
48383            }
48384        }
48385
48386        let mut gap = FillGapOnDrop { read: 1, write: 1, vec: self };
48387        let ptr = gap.vec.as_mut_ptr();
48388
48389        /* Drop items while going through Vec, it should be more efficient than
48390         * doing slice partition_dedup + truncate */
48391
48392        /* SAFETY: Because of the invariant, read_ptr, prev_ptr and write_ptr
48393         * are always in-bounds and read_ptr never aliases prev_ptr */
48394        unsafe {
48395            while gap.read < len {
48396                let read_ptr = ptr.add(gap.read);
48397                let prev_ptr = ptr.add(gap.write.wrapping_sub(1));
48398
48399                if same_bucket(&mut *read_ptr, &mut *prev_ptr) {
48400                    /* We have found duplicate, drop it in-place */
48401                    ptr::drop_in_place(read_ptr);
48402                } else {
48403                    let write_ptr = ptr.add(gap.write);
48404
48405                    /* Because `read_ptr` can be equal to `write_ptr`, we either
48406                     * have to use `copy` or conditional `copy_nonoverlapping`.
48407                     * Looks like the first option is faster. */
48408                    ptr::copy(read_ptr, write_ptr, 1);
48409
48410                    /* We have filled that place, so go further */
48411                    gap.write += 1;
48412                }
48413
48414                gap.read += 1;
48415            }
48416
48417            /* Technically we could let `gap` clean up with its Drop, but
48418             * when `same_bucket` is guaranteed to not panic, this bloats a little
48419             * the codegen, so we just do it manually */
48420            gap.vec.set_len(gap.write);
48421            mem::forget(gap);
48422        }
48423    }
48424
48425    /// Appends an element to the back of a collection.
48426    ///
48427    /// # Panics
48428    ///
48429    /// Panics if the new capacity exceeds `isize::MAX` bytes.
48430    ///
48431    /// # Examples
48432    ///
48433    /// ```
48434    /// let mut vec = vec![1, 2];
48435    /// vec.push(3);
48436    /// assert_eq!(vec, [1, 2, 3]);
48437    /// ```
48438    #[inline]
48439    #[stable(feature = "rust1", since = "1.0.0")]
48440    pub fn push(&mut self, value: T) {
48441        // This will panic or abort if we would allocate > isize::MAX bytes
48442        // or if the length increment would overflow for zero-sized types.
48443        if self.len == self.buf.capacity() {
48444            self.reserve(1);
48445        }
48446        unsafe {
48447            let end = self.as_mut_ptr().add(self.len);
48448            ptr::write(end, value);
48449            self.len += 1;
48450        }
48451    }
48452
48453    /// Removes the last element from a vector and returns it, or [`None`] if it
48454    /// is empty.
48455    ///
48456    /// # Examples
48457    ///
48458    /// ```
48459    /// let mut vec = vec![1, 2, 3];
48460    /// assert_eq!(vec.pop(), Some(3));
48461    /// assert_eq!(vec, [1, 2]);
48462    /// ```
48463    #[inline]
48464    #[stable(feature = "rust1", since = "1.0.0")]
48465    pub fn pop(&mut self) -> Option<T> {
48466        if self.len == 0 {
48467            None
48468        } else {
48469            unsafe {
48470                self.len -= 1;
48471                Some(ptr::read(self.as_ptr().add(self.len())))
48472            }
48473        }
48474    }
48475
48476    /// Moves all the elements of `other` into `Self`, leaving `other` empty.
48477    ///
48478    /// # Panics
48479    ///
48480    /// Panics if the number of elements in the vector overflows a `usize`.
48481    ///
48482    /// # Examples
48483    ///
48484    /// ```
48485    /// let mut vec = vec![1, 2, 3];
48486    /// let mut vec2 = vec![4, 5, 6];
48487    /// vec.append(&mut vec2);
48488    /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
48489    /// assert_eq!(vec2, []);
48490    /// ```
48491    #[inline]
48492    #[stable(feature = "append", since = "1.4.0")]
48493    pub fn append(&mut self, other: &mut Self) {
48494        unsafe {
48495            self.append_elements(other.as_slice() as _);
48496            other.set_len(0);
48497        }
48498    }
48499
48500    /// Appends elements to `Self` from other buffer.
48501    #[inline]
48502    unsafe fn append_elements(&mut self, other: *const [T]) {
48503        let count = unsafe { (*other).len() };
48504        self.reserve(count);
48505        let len = self.len();
48506        unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) };
48507        self.len += count;
48508    }
48509
48510    /// Creates a draining iterator that removes the specified range in the vector
48511    /// and yields the removed items.
48512    ///
48513    /// When the iterator **is** dropped, all elements in the range are removed
48514    /// from the vector, even if the iterator was not fully consumed. If the
48515    /// iterator **is not** dropped (with [`mem::forget`] for example), it is
48516    /// unspecified how many elements are removed.
48517    ///
48518    /// # Panics
48519    ///
48520    /// Panics if the starting point is greater than the end point or if
48521    /// the end point is greater than the length of the vector.
48522    ///
48523    /// # Examples
48524    ///
48525    /// ```
48526    /// let mut v = vec![1, 2, 3];
48527    /// let u: Vec<_> = v.drain(1..).collect();
48528    /// assert_eq!(v, &[1]);
48529    /// assert_eq!(u, &[2, 3]);
48530    ///
48531    /// // A full range clears the vector
48532    /// v.drain(..);
48533    /// assert_eq!(v, &[]);
48534    /// ```
48535    #[stable(feature = "drain", since = "1.6.0")]
48536    pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, A>
48537    where
48538        R: RangeBounds<usize>,
48539    {
48540        // Memory safety
48541        //
48542        // When the Drain is first created, it shortens the length of
48543        // the source vector to make sure no uninitialized or moved-from elements
48544        // are accessible at all if the Drain's destructor never gets to run.
48545        //
48546        // Drain will ptr::read out the values to remove.
48547        // When finished, remaining tail of the vec is copied back to cover
48548        // the hole, and the vector length is restored to the new length.
48549        //
48550        let len = self.len();
48551        let Range { start, end } = slice::range(range, ..len);
48552
48553        unsafe {
48554            // set self.vec length's to start, to be safe in case Drain is leaked
48555            self.set_len(start);
48556            // Use the borrow in the IterMut to indicate borrowing behavior of the
48557            // whole Drain iterator (like &mut T).
48558            let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start);
48559            Drain {
48560                tail_start: end,
48561                tail_len: len - end,
48562                iter: range_slice.iter(),
48563                vec: NonNull::from(self),
48564            }
48565        }
48566    }
48567
48568    /// Clears the vector, removing all values.
48569    ///
48570    /// Note that this method has no effect on the allocated capacity
48571    /// of the vector.
48572    ///
48573    /// # Examples
48574    ///
48575    /// ```
48576    /// let mut v = vec![1, 2, 3];
48577    ///
48578    /// v.clear();
48579    ///
48580    /// assert!(v.is_empty());
48581    /// ```
48582    #[inline]
48583    #[stable(feature = "rust1", since = "1.0.0")]
48584    pub fn clear(&mut self) {
48585        self.truncate(0)
48586    }
48587
48588    /// Returns the number of elements in the vector, also referred to
48589    /// as its 'length'.
48590    ///
48591    /// # Examples
48592    ///
48593    /// ```
48594    /// let a = vec![1, 2, 3];
48595    /// assert_eq!(a.len(), 3);
48596    /// ```
48597    #[doc(alias = "length")]
48598    #[inline]
48599    #[stable(feature = "rust1", since = "1.0.0")]
48600    pub fn len(&self) -> usize {
48601        self.len
48602    }
48603
48604    /// Returns `true` if the vector contains no elements.
48605    ///
48606    /// # Examples
48607    ///
48608    /// ```
48609    /// let mut v = Vec::new();
48610    /// assert!(v.is_empty());
48611    ///
48612    /// v.push(1);
48613    /// assert!(!v.is_empty());
48614    /// ```
48615    #[stable(feature = "rust1", since = "1.0.0")]
48616    pub fn is_empty(&self) -> bool {
48617        self.len() == 0
48618    }
48619
48620    /// Splits the collection into two at the given index.
48621    ///
48622    /// Returns a newly allocated vector containing the elements in the range
48623    /// `[at, len)`. After the call, the original vector will be left containing
48624    /// the elements `[0, at)` with its previous capacity unchanged.
48625    ///
48626    /// # Panics
48627    ///
48628    /// Panics if `at > len`.
48629    ///
48630    /// # Examples
48631    ///
48632    /// ```
48633    /// let mut vec = vec![1, 2, 3];
48634    /// let vec2 = vec.split_off(1);
48635    /// assert_eq!(vec, [1]);
48636    /// assert_eq!(vec2, [2, 3]);
48637    /// ```
48638    #[inline]
48639    #[must_use = "use `.truncate()` if you don't need the other half"]
48640    #[stable(feature = "split_off", since = "1.4.0")]
48641    pub fn split_off(&mut self, at: usize) -> Self
48642    where
48643        A: Clone,
48644    {
48645        #[cold]
48646        #[inline(never)]
48647        fn assert_failed(at: usize, len: usize) -> ! {
48648            panic!("`at` split index (is {}) should be <= len (is {})", at, len);
48649        }
48650
48651        if at > self.len() {
48652            assert_failed(at, self.len());
48653        }
48654
48655        if at == 0 {
48656            // the new vector can take over the original buffer and avoid the copy
48657            return mem::replace(
48658                self,
48659                Vec::with_capacity_in(self.capacity(), self.allocator().clone()),
48660            );
48661        }
48662
48663        let other_len = self.len - at;
48664        let mut other = Vec::with_capacity_in(other_len, self.allocator().clone());
48665
48666        // Unsafely `set_len` and copy items to `other`.
48667        unsafe {
48668            self.set_len(at);
48669            other.set_len(other_len);
48670
48671            ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len());
48672        }
48673        other
48674    }
48675
48676    /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
48677    ///
48678    /// If `new_len` is greater than `len`, the `Vec` is extended by the
48679    /// difference, with each additional slot filled with the result of
48680    /// calling the closure `f`. The return values from `f` will end up
48681    /// in the `Vec` in the order they have been generated.
48682    ///
48683    /// If `new_len` is less than `len`, the `Vec` is simply truncated.
48684    ///
48685    /// This method uses a closure to create new values on every push. If
48686    /// you'd rather [`Clone`] a given value, use [`Vec::resize`]. If you
48687    /// want to use the [`Default`] trait to generate values, you can
48688    /// pass [`Default::default`] as the second argument.
48689    ///
48690    /// # Examples
48691    ///
48692    /// ```
48693    /// let mut vec = vec![1, 2, 3];
48694    /// vec.resize_with(5, Default::default);
48695    /// assert_eq!(vec, [1, 2, 3, 0, 0]);
48696    ///
48697    /// let mut vec = vec![];
48698    /// let mut p = 1;
48699    /// vec.resize_with(4, || { p *= 2; p });
48700    /// assert_eq!(vec, [2, 4, 8, 16]);
48701    /// ```
48702    #[stable(feature = "vec_resize_with", since = "1.33.0")]
48703    pub fn resize_with<F>(&mut self, new_len: usize, f: F)
48704    where
48705        F: FnMut() -> T,
48706    {
48707        let len = self.len();
48708        if new_len > len {
48709            self.extend_with(new_len - len, ExtendFunc(f));
48710        } else {
48711            self.truncate(new_len);
48712        }
48713    }
48714
48715    /// Consumes and leaks the `Vec`, returning a mutable reference to the contents,
48716    /// `&'a mut [T]`. Note that the type `T` must outlive the chosen lifetime
48717    /// `'a`. If the type has only static references, or none at all, then this
48718    /// may be chosen to be `'static`.
48719    ///
48720    /// This function is similar to the [`leak`][Box::leak] function on [`Box`]
48721    /// except that there is no way to recover the leaked memory.
48722    ///
48723    /// This function is mainly useful for data that lives for the remainder of
48724    /// the program's life. Dropping the returned reference will cause a memory
48725    /// leak.
48726    ///
48727    /// # Examples
48728    ///
48729    /// Simple usage:
48730    ///
48731    /// ```
48732    /// let x = vec![1, 2, 3];
48733    /// let static_ref: &'static mut [usize] = x.leak();
48734    /// static_ref[0] += 1;
48735    /// assert_eq!(static_ref, &[2, 2, 3]);
48736    /// ```
48737    #[stable(feature = "vec_leak", since = "1.47.0")]
48738    #[inline]
48739    pub fn leak<'a>(self) -> &'a mut [T]
48740    where
48741        A: 'a,
48742    {
48743        Box::leak(self.into_boxed_slice())
48744    }
48745
48746    /// Returns the remaining spare capacity of the vector as a slice of
48747    /// `MaybeUninit<T>`.
48748    ///
48749    /// The returned slice can be used to fill the vector with data (e.g. by
48750    /// reading from a file) before marking the data as initialized using the
48751    /// [`set_len`] method.
48752    ///
48753    /// [`set_len`]: Vec::set_len
48754    ///
48755    /// # Examples
48756    ///
48757    /// ```
48758    /// #![feature(vec_spare_capacity, maybe_uninit_extra)]
48759    ///
48760    /// // Allocate vector big enough for 10 elements.
48761    /// let mut v = Vec::with_capacity(10);
48762    ///
48763    /// // Fill in the first 3 elements.
48764    /// let uninit = v.spare_capacity_mut();
48765    /// uninit[0].write(0);
48766    /// uninit[1].write(1);
48767    /// uninit[2].write(2);
48768    ///
48769    /// // Mark the first 3 elements of the vector as being initialized.
48770    /// unsafe {
48771    ///     v.set_len(3);
48772    /// }
48773    ///
48774    /// assert_eq!(&v, &[0, 1, 2]);
48775    /// ```
48776    #[unstable(feature = "vec_spare_capacity", issue = "75017")]
48777    #[inline]
48778    pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
48779        // Note:
48780        // This method is not implemented in terms of `split_at_spare_mut`,
48781        // to prevent invalidation of pointers to the buffer.
48782        unsafe {
48783            slice::from_raw_parts_mut(
48784                self.as_mut_ptr().add(self.len) as *mut MaybeUninit<T>,
48785                self.buf.capacity() - self.len,
48786            )
48787        }
48788    }
48789
48790    /// Returns vector content as a slice of `T`, along with the remaining spare
48791    /// capacity of the vector as a slice of `MaybeUninit<T>`.
48792    ///
48793    /// The returned spare capacity slice can be used to fill the vector with data
48794    /// (e.g. by reading from a file) before marking the data as initialized using
48795    /// the [`set_len`] method.
48796    ///
48797    /// [`set_len`]: Vec::set_len
48798    ///
48799    /// Note that this is a low-level API, which should be used with care for
48800    /// optimization purposes. If you need to append data to a `Vec`
48801    /// you can use [`push`], [`extend`], [`extend_from_slice`],
48802    /// [`extend_from_within`], [`insert`], [`append`], [`resize`] or
48803    /// [`resize_with`], depending on your exact needs.
48804    ///
48805    /// [`push`]: Vec::push
48806    /// [`extend`]: Vec::extend
48807    /// [`extend_from_slice`]: Vec::extend_from_slice
48808    /// [`extend_from_within`]: Vec::extend_from_within
48809    /// [`insert`]: Vec::insert
48810    /// [`append`]: Vec::append
48811    /// [`resize`]: Vec::resize
48812    /// [`resize_with`]: Vec::resize_with
48813    ///
48814    /// # Examples
48815    ///
48816    /// ```
48817    /// #![feature(vec_split_at_spare, maybe_uninit_extra)]
48818    ///
48819    /// let mut v = vec![1, 1, 2];
48820    ///
48821    /// // Reserve additional space big enough for 10 elements.
48822    /// v.reserve(10);
48823    ///
48824    /// let (init, uninit) = v.split_at_spare_mut();
48825    /// let sum = init.iter().copied().sum::<u32>();
48826    ///
48827    /// // Fill in the next 4 elements.
48828    /// uninit[0].write(sum);
48829    /// uninit[1].write(sum * 2);
48830    /// uninit[2].write(sum * 3);
48831    /// uninit[3].write(sum * 4);
48832    ///
48833    /// // Mark the 4 elements of the vector as being initialized.
48834    /// unsafe {
48835    ///     let len = v.len();
48836    ///     v.set_len(len + 4);
48837    /// }
48838    ///
48839    /// assert_eq!(&v, &[1, 1, 2, 4, 8, 12, 16]);
48840    /// ```
48841    #[unstable(feature = "vec_split_at_spare", issue = "81944")]
48842    #[inline]
48843    pub fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [MaybeUninit<T>]) {
48844        // SAFETY:
48845        // - len is ignored and so never changed
48846        let (init, spare, _) = unsafe { self.split_at_spare_mut_with_len() };
48847        (init, spare)
48848    }
48849
48850    /// Safety: changing returned .2 (&mut usize) is considered the same as calling `.set_len(_)`.
48851    ///
48852    /// This method provides unique access to all vec parts at once in `extend_from_within`.
48853    unsafe fn split_at_spare_mut_with_len(
48854        &mut self,
48855    ) -> (&mut [T], &mut [MaybeUninit<T>], &mut usize) {
48856        let Range { start: ptr, end: spare_ptr } = self.as_mut_ptr_range();
48857        let spare_ptr = spare_ptr.cast::<MaybeUninit<T>>();
48858        let spare_len = self.buf.capacity() - self.len;
48859
48860        // SAFETY:
48861        // - `ptr` is guaranteed to be valid for `len` elements
48862        // - `spare_ptr` is pointing one element past the buffer, so it doesn't overlap with `initialized`
48863        unsafe {
48864            let initialized = slice::from_raw_parts_mut(ptr, self.len);
48865            let spare = slice::from_raw_parts_mut(spare_ptr, spare_len);
48866
48867            (initialized, spare, &mut self.len)
48868        }
48869    }
48870}
48871
48872impl<T: Clone, A: Allocator> Vec<T, A> {
48873    /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
48874    ///
48875    /// If `new_len` is greater than `len`, the `Vec` is extended by the
48876    /// difference, with each additional slot filled with `value`.
48877    /// If `new_len` is less than `len`, the `Vec` is simply truncated.
48878    ///
48879    /// This method requires `T` to implement [`Clone`],
48880    /// in order to be able to clone the passed value.
48881    /// If you need more flexibility (or want to rely on [`Default`] instead of
48882    /// [`Clone`]), use [`Vec::resize_with`].
48883    ///
48884    /// # Examples
48885    ///
48886    /// ```
48887    /// let mut vec = vec!["hello"];
48888    /// vec.resize(3, "world");
48889    /// assert_eq!(vec, ["hello", "world", "world"]);
48890    ///
48891    /// let mut vec = vec![1, 2, 3, 4];
48892    /// vec.resize(2, 0);
48893    /// assert_eq!(vec, [1, 2]);
48894    /// ```
48895    #[stable(feature = "vec_resize", since = "1.5.0")]
48896    pub fn resize(&mut self, new_len: usize, value: T) {
48897        let len = self.len();
48898
48899        if new_len > len {
48900            self.extend_with(new_len - len, ExtendElement(value))
48901        } else {
48902            self.truncate(new_len);
48903        }
48904    }
48905
48906    /// Clones and appends all elements in a slice to the `Vec`.
48907    ///
48908    /// Iterates over the slice `other`, clones each element, and then appends
48909    /// it to this `Vec`. The `other` vector is traversed in-order.
48910    ///
48911    /// Note that this function is same as [`extend`] except that it is
48912    /// specialized to work with slices instead. If and when Rust gets
48913    /// specialization this function will likely be deprecated (but still
48914    /// available).
48915    ///
48916    /// # Examples
48917    ///
48918    /// ```
48919    /// let mut vec = vec![1];
48920    /// vec.extend_from_slice(&[2, 3, 4]);
48921    /// assert_eq!(vec, [1, 2, 3, 4]);
48922    /// ```
48923    ///
48924    /// [`extend`]: Vec::extend
48925    #[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
48926    pub fn extend_from_slice(&mut self, other: &[T]) {
48927        self.spec_extend(other.iter())
48928    }
48929
48930    /// Copies elements from `src` range to the end of the vector.
48931    ///
48932    /// ## Examples
48933    ///
48934    /// ```
48935    /// let mut vec = vec![0, 1, 2, 3, 4];
48936    ///
48937    /// vec.extend_from_within(2..);
48938    /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4]);
48939    ///
48940    /// vec.extend_from_within(..2);
48941    /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4, 0, 1]);
48942    ///
48943    /// vec.extend_from_within(4..8);
48944    /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4, 0, 1, 4, 2, 3, 4]);
48945    /// ```
48946    #[stable(feature = "vec_extend_from_within", since = "1.53.0")]
48947    pub fn extend_from_within<R>(&mut self, src: R)
48948    where
48949        R: RangeBounds<usize>,
48950    {
48951        let range = slice::range(src, ..self.len());
48952        self.reserve(range.len());
48953
48954        // SAFETY:
48955        // - `slice::range` guarantees  that the given range is valid for indexing self
48956        unsafe {
48957            self.spec_extend_from_within(range);
48958        }
48959    }
48960}
48961
48962// This code generalizes `extend_with_{element,default}`.
48963trait ExtendWith<T> {
48964    fn next(&mut self) -> T;
48965    fn last(self) -> T;
48966}
48967
48968struct ExtendElement<T>(T);
48969impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
48970    fn next(&mut self) -> T {
48971        self.0.clone()
48972    }
48973    fn last(self) -> T {
48974        self.0
48975    }
48976}
48977
48978struct ExtendDefault;
48979impl<T: Default> ExtendWith<T> for ExtendDefault {
48980    fn next(&mut self) -> T {
48981        Default::default()
48982    }
48983    fn last(self) -> T {
48984        Default::default()
48985    }
48986}
48987
48988struct ExtendFunc<F>(F);
48989impl<T, F: FnMut() -> T> ExtendWith<T> for ExtendFunc<F> {
48990    fn next(&mut self) -> T {
48991        (self.0)()
48992    }
48993    fn last(mut self) -> T {
48994        (self.0)()
48995    }
48996}
48997
48998impl<T, A: Allocator> Vec<T, A> {
48999    /// Extend the vector by `n` values, using the given generator.
49000    fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) {
49001        self.reserve(n);
49002
49003        unsafe {
49004            let mut ptr = self.as_mut_ptr().add(self.len());
49005            // Use SetLenOnDrop to work around bug where compiler
49006            // may not realize the store through `ptr` through self.set_len()
49007            // don't alias.
49008            let mut local_len = SetLenOnDrop::new(&mut self.len);
49009
49010            // Write all elements except the last one
49011            for _ in 1..n {
49012                ptr::write(ptr, value.next());
49013                ptr = ptr.offset(1);
49014                // Increment the length in every step in case next() panics
49015                local_len.increment_len(1);
49016            }
49017
49018            if n > 0 {
49019                // We can write the last element directly without cloning needlessly
49020                ptr::write(ptr, value.last());
49021                local_len.increment_len(1);
49022            }
49023
49024            // len set by scope guard
49025        }
49026    }
49027}
49028
49029impl<T: PartialEq, A: Allocator> Vec<T, A> {
49030    /// Removes consecutive repeated elements in the vector according to the
49031    /// [`PartialEq`] trait implementation.
49032    ///
49033    /// If the vector is sorted, this removes all duplicates.
49034    ///
49035    /// # Examples
49036    ///
49037    /// ```
49038    /// let mut vec = vec![1, 2, 2, 3, 2];
49039    ///
49040    /// vec.dedup();
49041    ///
49042    /// assert_eq!(vec, [1, 2, 3, 2]);
49043    /// ```
49044    #[stable(feature = "rust1", since = "1.0.0")]
49045    #[inline]
49046    pub fn dedup(&mut self) {
49047        self.dedup_by(|a, b| a == b)
49048    }
49049}
49050
49051////////////////////////////////////////////////////////////////////////////////
49052// Internal methods and functions
49053////////////////////////////////////////////////////////////////////////////////
49054
49055#[doc(hidden)]
49056#[stable(feature = "rust1", since = "1.0.0")]
49057pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
49058    <T as SpecFromElem>::from_elem(elem, n, Global)
49059}
49060
49061#[doc(hidden)]
49062#[unstable(feature = "allocator_api", issue = "32838")]
49063pub fn from_elem_in<T: Clone, A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> {
49064    <T as SpecFromElem>::from_elem(elem, n, alloc)
49065}
49066
49067trait ExtendFromWithinSpec {
49068    /// # Safety
49069    ///
49070    /// - `src` needs to be valid index
49071    /// - `self.capacity() - self.len()` must be `>= src.len()`
49072    unsafe fn spec_extend_from_within(&mut self, src: Range<usize>);
49073}
49074
49075impl<T: Clone, A: Allocator> ExtendFromWithinSpec for Vec<T, A> {
49076    default unsafe fn spec_extend_from_within(&mut self, src: Range<usize>) {
49077        // SAFETY:
49078        // - len is increased only after initializing elements
49079        let (this, spare, len) = unsafe { self.split_at_spare_mut_with_len() };
49080
49081        // SAFETY:
49082        // - caller guaratees that src is a valid index
49083        let to_clone = unsafe { this.get_unchecked(src) };
49084
49085        iter::zip(to_clone, spare)
49086            .map(|(src, dst)| dst.write(src.clone()))
49087            // Note:
49088            // - Element was just initialized with `MaybeUninit::write`, so it's ok to increase len
49089            // - len is increased after each element to prevent leaks (see issue #82533)
49090            .for_each(|_| *len += 1);
49091    }
49092}
49093
49094impl<T: Copy, A: Allocator> ExtendFromWithinSpec for Vec<T, A> {
49095    unsafe fn spec_extend_from_within(&mut self, src: Range<usize>) {
49096        let count = src.len();
49097        {
49098            let (init, spare) = self.split_at_spare_mut();
49099
49100            // SAFETY:
49101            // - caller guaratees that `src` is a valid index
49102            let source = unsafe { init.get_unchecked(src) };
49103
49104            // SAFETY:
49105            // - Both pointers are created from unique slice references (`&mut [_]`)
49106            //   so they are valid and do not overlap.
49107            // - Elements are :Copy so it's OK to to copy them, without doing
49108            //   anything with the original values
49109            // - `count` is equal to the len of `source`, so source is valid for
49110            //   `count` reads
49111            // - `.reserve(count)` guarantees that `spare.len() >= count` so spare
49112            //   is valid for `count` writes
49113            unsafe { ptr::copy_nonoverlapping(source.as_ptr(), spare.as_mut_ptr() as _, count) };
49114        }
49115
49116        // SAFETY:
49117        // - The elements were just initialized by `copy_nonoverlapping`
49118        self.len += count;
49119    }
49120}
49121
49122////////////////////////////////////////////////////////////////////////////////
49123// Common trait implementations for Vec
49124////////////////////////////////////////////////////////////////////////////////
49125
49126#[stable(feature = "rust1", since = "1.0.0")]
49127impl<T, A: Allocator> ops::Deref for Vec<T, A> {
49128    type Target = [T];
49129
49130    fn deref(&self) -> &[T] {
49131        unsafe { slice::from_raw_parts(self.as_ptr(), self.len) }
49132    }
49133}
49134
49135#[stable(feature = "rust1", since = "1.0.0")]
49136impl<T, A: Allocator> ops::DerefMut for Vec<T, A> {
49137    fn deref_mut(&mut self) -> &mut [T] {
49138        unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
49139    }
49140}
49141
49142#[stable(feature = "rust1", since = "1.0.0")]
49143impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> {
49144    #[cfg(not(test))]
49145    fn clone(&self) -> Self {
49146        let alloc = self.allocator().clone();
49147        <[T]>::to_vec_in(&**self, alloc)
49148    }
49149
49150    // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
49151    // required for this method definition, is not available. Instead use the
49152    // `slice::to_vec`  function which is only available with cfg(test)
49153    // NB see the slice::hack module in slice.rs for more information
49154    #[cfg(test)]
49155    fn clone(&self) -> Self {
49156        let alloc = self.allocator().clone();
49157        crate::slice::to_vec(&**self, alloc)
49158    }
49159
49160    fn clone_from(&mut self, other: &Self) {
49161        // drop anything that will not be overwritten
49162        self.truncate(other.len());
49163
49164        // self.len <= other.len due to the truncate above, so the
49165        // slices here are always in-bounds.
49166        let (init, tail) = other.split_at(self.len());
49167
49168        // reuse the contained values' allocations/resources.
49169        self.clone_from_slice(init);
49170        self.extend_from_slice(tail);
49171    }
49172}
49173
49174#[stable(feature = "rust1", since = "1.0.0")]
49175impl<T: Hash, A: Allocator> Hash for Vec<T, A> {
49176    #[inline]
49177    fn hash<H: Hasher>(&self, state: &mut H) {
49178        Hash::hash(&**self, state)
49179    }
49180}
49181
49182#[stable(feature = "rust1", since = "1.0.0")]
49183#[rustc_on_unimplemented(
49184    message = "vector indices are of type `usize` or ranges of `usize`",
49185    label = "vector indices are of type `usize` or ranges of `usize`"
49186)]
49187impl<T, I: SliceIndex<[T]>, A: Allocator> Index<I> for Vec<T, A> {
49188    type Output = I::Output;
49189
49190    #[inline]
49191    fn index(&self, index: I) -> &Self::Output {
49192        Index::index(&**self, index)
49193    }
49194}
49195
49196#[stable(feature = "rust1", since = "1.0.0")]
49197#[rustc_on_unimplemented(
49198    message = "vector indices are of type `usize` or ranges of `usize`",
49199    label = "vector indices are of type `usize` or ranges of `usize`"
49200)]
49201impl<T, I: SliceIndex<[T]>, A: Allocator> IndexMut<I> for Vec<T, A> {
49202    #[inline]
49203    fn index_mut(&mut self, index: I) -> &mut Self::Output {
49204        IndexMut::index_mut(&mut **self, index)
49205    }
49206}
49207
49208#[stable(feature = "rust1", since = "1.0.0")]
49209impl<T> FromIterator<T> for Vec<T> {
49210    #[inline]
49211    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> {
49212        <Self as SpecFromIter<T, I::IntoIter>>::from_iter(iter.into_iter())
49213    }
49214}
49215
49216#[stable(feature = "rust1", since = "1.0.0")]
49217impl<T, A: Allocator> IntoIterator for Vec<T, A> {
49218    type Item = T;
49219    type IntoIter = IntoIter<T, A>;
49220
49221    /// Creates a consuming iterator, that is, one that moves each value out of
49222    /// the vector (from start to end). The vector cannot be used after calling
49223    /// this.
49224    ///
49225    /// # Examples
49226    ///
49227    /// ```
49228    /// let v = vec!["a".to_string(), "b".to_string()];
49229    /// for s in v.into_iter() {
49230    ///     // s has type String, not &String
49231    ///     println!("{}", s);
49232    /// }
49233    /// ```
49234    #[inline]
49235    fn into_iter(self) -> IntoIter<T, A> {
49236        unsafe {
49237            let mut me = ManuallyDrop::new(self);
49238            let alloc = ptr::read(me.allocator());
49239            let begin = me.as_mut_ptr();
49240            let end = if mem::size_of::<T>() == 0 {
49241                arith_offset(begin as *const i8, me.len() as isize) as *const T
49242            } else {
49243                begin.add(me.len()) as *const T
49244            };
49245            let cap = me.buf.capacity();
49246            IntoIter {
49247                buf: NonNull::new_unchecked(begin),
49248                phantom: PhantomData,
49249                cap,
49250                alloc,
49251                ptr: begin,
49252                end,
49253            }
49254        }
49255    }
49256}
49257
49258#[stable(feature = "rust1", since = "1.0.0")]
49259impl<'a, T, A: Allocator> IntoIterator for &'a Vec<T, A> {
49260    type Item = &'a T;
49261    type IntoIter = slice::Iter<'a, T>;
49262
49263    fn into_iter(self) -> slice::Iter<'a, T> {
49264        self.iter()
49265    }
49266}
49267
49268#[stable(feature = "rust1", since = "1.0.0")]
49269impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec<T, A> {
49270    type Item = &'a mut T;
49271    type IntoIter = slice::IterMut<'a, T>;
49272
49273    fn into_iter(self) -> slice::IterMut<'a, T> {
49274        self.iter_mut()
49275    }
49276}
49277
49278#[stable(feature = "rust1", since = "1.0.0")]
49279impl<T, A: Allocator> Extend<T> for Vec<T, A> {
49280    #[inline]
49281    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
49282        <Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter())
49283    }
49284
49285    #[inline]
49286    fn extend_one(&mut self, item: T) {
49287        self.push(item);
49288    }
49289
49290    #[inline]
49291    fn extend_reserve(&mut self, additional: usize) {
49292        self.reserve(additional);
49293    }
49294}
49295
49296impl<T, A: Allocator> Vec<T, A> {
49297    // leaf method to which various SpecFrom/SpecExtend implementations delegate when
49298    // they have no further optimizations to apply
49299    fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) {
49300        // This is the case for a general iterator.
49301        //
49302        // This function should be the moral equivalent of:
49303        //
49304        //      for item in iterator {
49305        //          self.push(item);
49306        //      }
49307        while let Some(element) = iterator.next() {
49308            let len = self.len();
49309            if len == self.capacity() {
49310                let (lower, _) = iterator.size_hint();
49311                self.reserve(lower.saturating_add(1));
49312            }
49313            unsafe {
49314                ptr::write(self.as_mut_ptr().add(len), element);
49315                // NB can't overflow since we would have had to alloc the address space
49316                self.set_len(len + 1);
49317            }
49318        }
49319    }
49320
49321    /// Creates a splicing iterator that replaces the specified range in the vector
49322    /// with the given `replace_with` iterator and yields the removed items.
49323    /// `replace_with` does not need to be the same length as `range`.
49324    ///
49325    /// `range` is removed even if the iterator is not consumed until the end.
49326    ///
49327    /// It is unspecified how many elements are removed from the vector
49328    /// if the `Splice` value is leaked.
49329    ///
49330    /// The input iterator `replace_with` is only consumed when the `Splice` value is dropped.
49331    ///
49332    /// This is optimal if:
49333    ///
49334    /// * The tail (elements in the vector after `range`) is empty,
49335    /// * or `replace_with` yields fewer or equal elements than `range`’s length
49336    /// * or the lower bound of its `size_hint()` is exact.
49337    ///
49338    /// Otherwise, a temporary vector is allocated and the tail is moved twice.
49339    ///
49340    /// # Panics
49341    ///
49342    /// Panics if the starting point is greater than the end point or if
49343    /// the end point is greater than the length of the vector.
49344    ///
49345    /// # Examples
49346    ///
49347    /// ```
49348    /// let mut v = vec![1, 2, 3];
49349    /// let new = [7, 8];
49350    /// let u: Vec<_> = v.splice(..2, new.iter().cloned()).collect();
49351    /// assert_eq!(v, &[7, 8, 3]);
49352    /// assert_eq!(u, &[1, 2]);
49353    /// ```
49354    #[inline]
49355    #[stable(feature = "vec_splice", since = "1.21.0")]
49356    pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A>
49357    where
49358        R: RangeBounds<usize>,
49359        I: IntoIterator<Item = T>,
49360    {
49361        Splice { drain: self.drain(range), replace_with: replace_with.into_iter() }
49362    }
49363
49364    /// Creates an iterator which uses a closure to determine if an element should be removed.
49365    ///
49366    /// If the closure returns true, then the element is removed and yielded.
49367    /// If the closure returns false, the element will remain in the vector and will not be yielded
49368    /// by the iterator.
49369    ///
49370    /// Using this method is equivalent to the following code:
49371    ///
49372    /// ```
49373    /// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 };
49374    /// # let mut vec = vec![1, 2, 3, 4, 5, 6];
49375    /// let mut i = 0;
49376    /// while i < vec.len() {
49377    ///     if some_predicate(&mut vec[i]) {
49378    ///         let val = vec.remove(i);
49379    ///         // your code here
49380    ///     } else {
49381    ///         i += 1;
49382    ///     }
49383    /// }
49384    ///
49385    /// # assert_eq!(vec, vec![1, 4, 5]);
49386    /// ```
49387    ///
49388    /// But `drain_filter` is easier to use. `drain_filter` is also more efficient,
49389    /// because it can backshift the elements of the array in bulk.
49390    ///
49391    /// Note that `drain_filter` also lets you mutate every element in the filter closure,
49392    /// regardless of whether you choose to keep or remove it.
49393    ///
49394    /// # Examples
49395    ///
49396    /// Splitting an array into evens and odds, reusing the original allocation:
49397    ///
49398    /// ```
49399    /// #![feature(drain_filter)]
49400    /// let mut numbers = vec![1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15];
49401    ///
49402    /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
49403    /// let odds = numbers;
49404    ///
49405    /// assert_eq!(evens, vec![2, 4, 6, 8, 14]);
49406    /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]);
49407    /// ```
49408    #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
49409    pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F, A>
49410    where
49411        F: FnMut(&mut T) -> bool,
49412    {
49413        let old_len = self.len();
49414
49415        // Guard against us getting leaked (leak amplification)
49416        unsafe {
49417            self.set_len(0);
49418        }
49419
49420        DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false }
49421    }
49422}
49423
49424/// Extend implementation that copies elements out of references before pushing them onto the Vec.
49425///
49426/// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to
49427/// append the entire slice at once.
49428///
49429/// [`copy_from_slice`]: slice::copy_from_slice
49430#[stable(feature = "extend_ref", since = "1.2.0")]
49431impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec<T, A> {
49432    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
49433        self.spec_extend(iter.into_iter())
49434    }
49435
49436    #[inline]
49437    fn extend_one(&mut self, &item: &'a T) {
49438        self.push(item);
49439    }
49440
49441    #[inline]
49442    fn extend_reserve(&mut self, additional: usize) {
49443        self.reserve(additional);
49444    }
49445}
49446
49447/// Implements comparison of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison).
49448#[stable(feature = "rust1", since = "1.0.0")]
49449impl<T: PartialOrd, A: Allocator> PartialOrd for Vec<T, A> {
49450    #[inline]
49451    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
49452        PartialOrd::partial_cmp(&**self, &**other)
49453    }
49454}
49455
49456#[stable(feature = "rust1", since = "1.0.0")]
49457impl<T: Eq, A: Allocator> Eq for Vec<T, A> {}
49458
49459/// Implements ordering of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison).
49460#[stable(feature = "rust1", since = "1.0.0")]
49461impl<T: Ord, A: Allocator> Ord for Vec<T, A> {
49462    #[inline]
49463    fn cmp(&self, other: &Self) -> Ordering {
49464        Ord::cmp(&**self, &**other)
49465    }
49466}
49467
49468#[stable(feature = "rust1", since = "1.0.0")]
49469unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec<T, A> {
49470    fn drop(&mut self) {
49471        unsafe {
49472            // use drop for [T]
49473            // use a raw slice to refer to the elements of the vector as weakest necessary type;
49474            // could avoid questions of validity in certain cases
49475            ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len))
49476        }
49477        // RawVec handles deallocation
49478    }
49479}
49480
49481#[stable(feature = "rust1", since = "1.0.0")]
49482impl<T> Default for Vec<T> {
49483    /// Creates an empty `Vec<T>`.
49484    fn default() -> Vec<T> {
49485        Vec::new()
49486    }
49487}
49488
49489#[stable(feature = "rust1", since = "1.0.0")]
49490impl<T: fmt::Debug, A: Allocator> fmt::Debug for Vec<T, A> {
49491    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
49492        fmt::Debug::fmt(&**self, f)
49493    }
49494}
49495
49496#[stable(feature = "rust1", since = "1.0.0")]
49497impl<T, A: Allocator> AsRef<Vec<T, A>> for Vec<T, A> {
49498    fn as_ref(&self) -> &Vec<T, A> {
49499        self
49500    }
49501}
49502
49503#[stable(feature = "vec_as_mut", since = "1.5.0")]
49504impl<T, A: Allocator> AsMut<Vec<T, A>> for Vec<T, A> {
49505    fn as_mut(&mut self) -> &mut Vec<T, A> {
49506        self
49507    }
49508}
49509
49510#[stable(feature = "rust1", since = "1.0.0")]
49511impl<T, A: Allocator> AsRef<[T]> for Vec<T, A> {
49512    fn as_ref(&self) -> &[T] {
49513        self
49514    }
49515}
49516
49517#[stable(feature = "vec_as_mut", since = "1.5.0")]
49518impl<T, A: Allocator> AsMut<[T]> for Vec<T, A> {
49519    fn as_mut(&mut self) -> &mut [T] {
49520        self
49521    }
49522}
49523
49524#[stable(feature = "rust1", since = "1.0.0")]
49525impl<T: Clone> From<&[T]> for Vec<T> {
49526    /// Allocate a `Vec<T>` and fill it by cloning `s`'s items.
49527    ///
49528    /// # Examples
49529    ///
49530    /// ```
49531    /// assert_eq!(Vec::from(&[1, 2, 3][..]), vec![1, 2, 3]);
49532    /// ```
49533    #[cfg(not(test))]
49534    fn from(s: &[T]) -> Vec<T> {
49535        s.to_vec()
49536    }
49537    #[cfg(test)]
49538    fn from(s: &[T]) -> Vec<T> {
49539        crate::slice::to_vec(s, Global)
49540    }
49541}
49542
49543#[stable(feature = "vec_from_mut", since = "1.19.0")]
49544impl<T: Clone> From<&mut [T]> for Vec<T> {
49545    /// Allocate a `Vec<T>` and fill it by cloning `s`'s items.
49546    ///
49547    /// # Examples
49548    ///
49549    /// ```
49550    /// assert_eq!(Vec::from(&mut [1, 2, 3][..]), vec![1, 2, 3]);
49551    /// ```
49552    #[cfg(not(test))]
49553    fn from(s: &mut [T]) -> Vec<T> {
49554        s.to_vec()
49555    }
49556    #[cfg(test)]
49557    fn from(s: &mut [T]) -> Vec<T> {
49558        crate::slice::to_vec(s, Global)
49559    }
49560}
49561
49562#[stable(feature = "vec_from_array", since = "1.44.0")]
49563impl<T, const N: usize> From<[T; N]> for Vec<T> {
49564    #[cfg(not(test))]
49565    fn from(s: [T; N]) -> Vec<T> {
49566        <[T]>::into_vec(box s)
49567    }
49568    /// Allocate a `Vec<T>` and move `s`'s items into it.
49569    ///
49570    /// # Examples
49571    ///
49572    /// ```
49573    /// assert_eq!(Vec::from([1, 2, 3]), vec![1, 2, 3]);
49574    /// ```
49575    #[cfg(test)]
49576    fn from(s: [T; N]) -> Vec<T> {
49577        crate::slice::into_vec(box s)
49578    }
49579}
49580
49581#[stable(feature = "vec_from_cow_slice", since = "1.14.0")]
49582impl<'a, T> From<Cow<'a, [T]>> for Vec<T>
49583where
49584    [T]: ToOwned<Owned = Vec<T>>,
49585{
49586    /// Convert a clone-on-write slice into a vector.
49587    ///
49588    /// If `s` already owns a `Vec<T>`, it will be returned directly.
49589    /// If `s` is borrowing a slice, a new `Vec<T>` will be allocated and
49590    /// filled by cloning `s`'s items into it.
49591    ///
49592    /// # Examples
49593    ///
49594    /// ```
49595    /// # use std::borrow::Cow;
49596    /// let o: Cow<[i32]> = Cow::Owned(vec![1, 2, 3]);
49597    /// let b: Cow<[i32]> = Cow::Borrowed(&[1, 2, 3]);
49598    /// assert_eq!(Vec::from(o), Vec::from(b));
49599    /// ```
49600    fn from(s: Cow<'a, [T]>) -> Vec<T> {
49601        s.into_owned()
49602    }
49603}
49604
49605// note: test pulls in libstd, which causes errors here
49606#[cfg(not(test))]
49607#[stable(feature = "vec_from_box", since = "1.18.0")]
49608impl<T, A: Allocator> From<Box<[T], A>> for Vec<T, A> {
49609    /// Convert a boxed slice into a vector by transferring ownership of
49610    /// the existing heap allocation.
49611    ///
49612    /// # Examples
49613    ///
49614    /// ```
49615    /// let b: Box<[i32]> = vec![1, 2, 3].into_boxed_slice();
49616    /// assert_eq!(Vec::from(b), vec![1, 2, 3]);
49617    /// ```
49618    fn from(s: Box<[T], A>) -> Self {
49619        s.into_vec()
49620    }
49621}
49622
49623// note: test pulls in libstd, which causes errors here
49624#[cfg(not(test))]
49625#[stable(feature = "box_from_vec", since = "1.20.0")]
49626impl<T, A: Allocator> From<Vec<T, A>> for Box<[T], A> {
49627    /// Convert a vector into a boxed slice.
49628    ///
49629    /// If `v` has excess capacity, its items will be moved into a
49630    /// newly-allocated buffer with exactly the right capacity.
49631    ///
49632    /// # Examples
49633    ///
49634    /// ```
49635    /// assert_eq!(Box::from(vec![1, 2, 3]), vec![1, 2, 3].into_boxed_slice());
49636    /// ```
49637    fn from(v: Vec<T, A>) -> Self {
49638        v.into_boxed_slice()
49639    }
49640}
49641
49642#[stable(feature = "rust1", since = "1.0.0")]
49643impl From<&str> for Vec<u8> {
49644    /// Allocate a `Vec<u8>` and fill it with a UTF-8 string.
49645    ///
49646    /// # Examples
49647    ///
49648    /// ```
49649    /// assert_eq!(Vec::from("123"), vec![b'1', b'2', b'3']);
49650    /// ```
49651    fn from(s: &str) -> Vec<u8> {
49652        From::from(s.as_bytes())
49653    }
49654}
49655
49656#[stable(feature = "array_try_from_vec", since = "1.48.0")]
49657impl<T, A: Allocator, const N: usize> TryFrom<Vec<T, A>> for [T; N] {
49658    type Error = Vec<T, A>;
49659
49660    /// Gets the entire contents of the `Vec<T>` as an array,
49661    /// if its size exactly matches that of the requested array.
49662    ///
49663    /// # Examples
49664    ///
49665    /// ```
49666    /// use std::convert::TryInto;
49667    /// assert_eq!(vec![1, 2, 3].try_into(), Ok([1, 2, 3]));
49668    /// assert_eq!(<Vec<i32>>::new().try_into(), Ok([]));
49669    /// ```
49670    ///
49671    /// If the length doesn't match, the input comes back in `Err`:
49672    /// ```
49673    /// use std::convert::TryInto;
49674    /// let r: Result<[i32; 4], _> = (0..10).collect::<Vec<_>>().try_into();
49675    /// assert_eq!(r, Err(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]));
49676    /// ```
49677    ///
49678    /// If you're fine with just getting a prefix of the `Vec<T>`,
49679    /// you can call [`.truncate(N)`](Vec::truncate) first.
49680    /// ```
49681    /// use std::convert::TryInto;
49682    /// let mut v = String::from("hello world").into_bytes();
49683    /// v.sort();
49684    /// v.truncate(2);
49685    /// let [a, b]: [_; 2] = v.try_into().unwrap();
49686    /// assert_eq!(a, b' ');
49687    /// assert_eq!(b, b'd');
49688    /// ```
49689    fn try_from(mut vec: Vec<T, A>) -> Result<[T; N], Vec<T, A>> {
49690        if vec.len() != N {
49691            return Err(vec);
49692        }
49693
49694        // SAFETY: `.set_len(0)` is always sound.
49695        unsafe { vec.set_len(0) };
49696
49697        // SAFETY: A `Vec`'s pointer is always aligned properly, and
49698        // the alignment the array needs is the same as the items.
49699        // We checked earlier that we have sufficient items.
49700        // The items will not double-drop as the `set_len`
49701        // tells the `Vec` not to also drop them.
49702        let array = unsafe { ptr::read(vec.as_ptr() as *const [T; N]) };
49703        Ok(array)
49704    }
49705}
49706// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
49707//
49708// The idea is: The length field in SetLenOnDrop is a local variable
49709// that the optimizer will see does not alias with any stores through the Vec's data
49710// pointer. This is a workaround for alias analysis issue #32155
49711pub(super) struct SetLenOnDrop<'a> {
49712    len: &'a mut usize,
49713    local_len: usize,
49714}
49715
49716impl<'a> SetLenOnDrop<'a> {
49717    #[inline]
49718    pub(super) fn new(len: &'a mut usize) -> Self {
49719        SetLenOnDrop { local_len: *len, len }
49720    }
49721
49722    #[inline]
49723    pub(super) fn increment_len(&mut self, increment: usize) {
49724        self.local_len += increment;
49725    }
49726}
49727
49728impl Drop for SetLenOnDrop<'_> {
49729    #[inline]
49730    fn drop(&mut self) {
49731        *self.len = self.local_len;
49732    }
49733}
49734use crate::alloc::{Allocator, Global};
49735use core::ptr::{self};
49736use core::slice::{self};
49737
49738use super::Vec;
49739
49740/// An iterator which uses a closure to determine if an element should be removed.
49741///
49742/// This struct is created by [`Vec::drain_filter`].
49743/// See its documentation for more.
49744///
49745/// # Example
49746///
49747/// ```
49748/// #![feature(drain_filter)]
49749///
49750/// let mut v = vec![0, 1, 2];
49751/// let iter: std::vec::DrainFilter<_, _> = v.drain_filter(|x| *x % 2 == 0);
49752/// ```
49753#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
49754#[derive(Debug)]
49755pub struct DrainFilter<
49756    'a,
49757    T,
49758    F,
49759    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
49760> where
49761    F: FnMut(&mut T) -> bool,
49762{
49763    pub(super) vec: &'a mut Vec<T, A>,
49764    /// The index of the item that will be inspected by the next call to `next`.
49765    pub(super) idx: usize,
49766    /// The number of items that have been drained (removed) thus far.
49767    pub(super) del: usize,
49768    /// The original length of `vec` prior to draining.
49769    pub(super) old_len: usize,
49770    /// The filter test predicate.
49771    pub(super) pred: F,
49772    /// A flag that indicates a panic has occurred in the filter test predicate.
49773    /// This is used as a hint in the drop implementation to prevent consumption
49774    /// of the remainder of the `DrainFilter`. Any unprocessed items will be
49775    /// backshifted in the `vec`, but no further items will be dropped or
49776    /// tested by the filter predicate.
49777    pub(super) panic_flag: bool,
49778}
49779
49780impl<T, F, A: Allocator> DrainFilter<'_, T, F, A>
49781where
49782    F: FnMut(&mut T) -> bool,
49783{
49784    /// Returns a reference to the underlying allocator.
49785    #[unstable(feature = "allocator_api", issue = "32838")]
49786    #[inline]
49787    pub fn allocator(&self) -> &A {
49788        self.vec.allocator()
49789    }
49790}
49791
49792#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
49793impl<T, F, A: Allocator> Iterator for DrainFilter<'_, T, F, A>
49794where
49795    F: FnMut(&mut T) -> bool,
49796{
49797    type Item = T;
49798
49799    fn next(&mut self) -> Option<T> {
49800        unsafe {
49801            while self.idx < self.old_len {
49802                let i = self.idx;
49803                let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
49804                self.panic_flag = true;
49805                let drained = (self.pred)(&mut v[i]);
49806                self.panic_flag = false;
49807                // Update the index *after* the predicate is called. If the index
49808                // is updated prior and the predicate panics, the element at this
49809                // index would be leaked.
49810                self.idx += 1;
49811                if drained {
49812                    self.del += 1;
49813                    return Some(ptr::read(&v[i]));
49814                } else if self.del > 0 {
49815                    let del = self.del;
49816                    let src: *const T = &v[i];
49817                    let dst: *mut T = &mut v[i - del];
49818                    ptr::copy_nonoverlapping(src, dst, 1);
49819                }
49820            }
49821            None
49822        }
49823    }
49824
49825    fn size_hint(&self) -> (usize, Option<usize>) {
49826        (0, Some(self.old_len - self.idx))
49827    }
49828}
49829
49830#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
49831impl<T, F, A: Allocator> Drop for DrainFilter<'_, T, F, A>
49832where
49833    F: FnMut(&mut T) -> bool,
49834{
49835    fn drop(&mut self) {
49836        struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator>
49837        where
49838            F: FnMut(&mut T) -> bool,
49839        {
49840            drain: &'b mut DrainFilter<'a, T, F, A>,
49841        }
49842
49843        impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A>
49844        where
49845            F: FnMut(&mut T) -> bool,
49846        {
49847            fn drop(&mut self) {
49848                unsafe {
49849                    if self.drain.idx < self.drain.old_len && self.drain.del > 0 {
49850                        // This is a pretty messed up state, and there isn't really an
49851                        // obviously right thing to do. We don't want to keep trying
49852                        // to execute `pred`, so we just backshift all the unprocessed
49853                        // elements and tell the vec that they still exist. The backshift
49854                        // is required to prevent a double-drop of the last successfully
49855                        // drained item prior to a panic in the predicate.
49856                        let ptr = self.drain.vec.as_mut_ptr();
49857                        let src = ptr.add(self.drain.idx);
49858                        let dst = src.sub(self.drain.del);
49859                        let tail_len = self.drain.old_len - self.drain.idx;
49860                        src.copy_to(dst, tail_len);
49861                    }
49862                    self.drain.vec.set_len(self.drain.old_len - self.drain.del);
49863                }
49864            }
49865        }
49866
49867        let backshift = BackshiftOnDrop { drain: self };
49868
49869        // Attempt to consume any remaining elements if the filter predicate
49870        // has not yet panicked. We'll backshift any remaining elements
49871        // whether we've already panicked or if the consumption here panics.
49872        if !backshift.drain.panic_flag {
49873            backshift.drain.for_each(drop);
49874        }
49875    }
49876}
49877use crate::borrow::Cow;
49878use core::iter::FromIterator;
49879
49880use super::Vec;
49881
49882#[stable(feature = "cow_from_vec", since = "1.8.0")]
49883impl<'a, T: Clone> From<&'a [T]> for Cow<'a, [T]> {
49884    fn from(s: &'a [T]) -> Cow<'a, [T]> {
49885        Cow::Borrowed(s)
49886    }
49887}
49888
49889#[stable(feature = "cow_from_vec", since = "1.8.0")]
49890impl<'a, T: Clone> From<Vec<T>> for Cow<'a, [T]> {
49891    fn from(v: Vec<T>) -> Cow<'a, [T]> {
49892        Cow::Owned(v)
49893    }
49894}
49895
49896#[stable(feature = "cow_from_vec_ref", since = "1.28.0")]
49897impl<'a, T: Clone> From<&'a Vec<T>> for Cow<'a, [T]> {
49898    fn from(v: &'a Vec<T>) -> Cow<'a, [T]> {
49899        Cow::Borrowed(v.as_slice())
49900    }
49901}
49902
49903#[stable(feature = "rust1", since = "1.0.0")]
49904impl<'a, T> FromIterator<T> for Cow<'a, [T]>
49905where
49906    T: Clone,
49907{
49908    fn from_iter<I: IntoIterator<Item = T>>(it: I) -> Cow<'a, [T]> {
49909        Cow::Owned(FromIterator::from_iter(it))
49910    }
49911}
49912use crate::alloc::Allocator;
49913use crate::borrow::Cow;
49914
49915use super::Vec;
49916
49917macro_rules! __impl_slice_eq1 {
49918    ([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => {
49919        #[$stability]
49920        impl<T, U, $($vars)*> PartialEq<$rhs> for $lhs
49921        where
49922            T: PartialEq<U>,
49923            $($ty: $bound)?
49924        {
49925            #[inline]
49926            fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] }
49927            #[inline]
49928            fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] }
49929        }
49930    }
49931}
49932
49933__impl_slice_eq1! { [A: Allocator] Vec<T, A>, Vec<U, A>, #[stable(feature = "rust1", since = "1.0.0")] }
49934__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &[U], #[stable(feature = "rust1", since = "1.0.0")] }
49935__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] }
49936__impl_slice_eq1! { [A: Allocator] &[T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
49937__impl_slice_eq1! { [A: Allocator] &mut [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
49938__impl_slice_eq1! { [A: Allocator] Vec<T, A>, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")]  }
49939__impl_slice_eq1! { [A: Allocator] [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")]  }
49940__impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec<U, A> where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
49941__impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
49942__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
49943__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, [U; N], #[stable(feature = "rust1", since = "1.0.0")] }
49944__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] }
49945
49946// NOTE: some less important impls are omitted to reduce code bloat
49947// FIXME(Centril): Reconsider this?
49948//__impl_slice_eq1! { [const N: usize] Vec<A>, &mut [B; N], }
49949//__impl_slice_eq1! { [const N: usize] [A; N], Vec<B>, }
49950//__impl_slice_eq1! { [const N: usize] &[A; N], Vec<B>, }
49951//__impl_slice_eq1! { [const N: usize] &mut [A; N], Vec<B>, }
49952//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, [B; N], }
49953//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &[B; N], }
49954//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &mut [B; N], }
49955use crate::alloc::{Allocator, Global};
49956use core::ptr::{self};
49957use core::slice::{self};
49958
49959use super::{Drain, Vec};
49960
49961/// A splicing iterator for `Vec`.
49962///
49963/// This struct is created by [`Vec::splice()`].
49964/// See its documentation for more.
49965///
49966/// # Example
49967///
49968/// ```
49969/// let mut v = vec![0, 1, 2];
49970/// let new = [7, 8];
49971/// let iter: std::vec::Splice<_> = v.splice(1.., new.iter().cloned());
49972/// ```
49973#[derive(Debug)]
49974#[stable(feature = "vec_splice", since = "1.21.0")]
49975pub struct Splice<
49976    'a,
49977    I: Iterator + 'a,
49978    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global,
49979> {
49980    pub(super) drain: Drain<'a, I::Item, A>,
49981    pub(super) replace_with: I,
49982}
49983
49984#[stable(feature = "vec_splice", since = "1.21.0")]
49985impl<I: Iterator, A: Allocator> Iterator for Splice<'_, I, A> {
49986    type Item = I::Item;
49987
49988    fn next(&mut self) -> Option<Self::Item> {
49989        self.drain.next()
49990    }
49991
49992    fn size_hint(&self) -> (usize, Option<usize>) {
49993        self.drain.size_hint()
49994    }
49995}
49996
49997#[stable(feature = "vec_splice", since = "1.21.0")]
49998impl<I: Iterator, A: Allocator> DoubleEndedIterator for Splice<'_, I, A> {
49999    fn next_back(&mut self) -> Option<Self::Item> {
50000        self.drain.next_back()
50001    }
50002}
50003
50004#[stable(feature = "vec_splice", since = "1.21.0")]
50005impl<I: Iterator, A: Allocator> ExactSizeIterator for Splice<'_, I, A> {}
50006
50007#[stable(feature = "vec_splice", since = "1.21.0")]
50008impl<I: Iterator, A: Allocator> Drop for Splice<'_, I, A> {
50009    fn drop(&mut self) {
50010        self.drain.by_ref().for_each(drop);
50011
50012        unsafe {
50013            if self.drain.tail_len == 0 {
50014                self.drain.vec.as_mut().extend(self.replace_with.by_ref());
50015                return;
50016            }
50017
50018            // First fill the range left by drain().
50019            if !self.drain.fill(&mut self.replace_with) {
50020                return;
50021            }
50022
50023            // There may be more elements. Use the lower bound as an estimate.
50024            // FIXME: Is the upper bound a better guess? Or something else?
50025            let (lower_bound, _upper_bound) = self.replace_with.size_hint();
50026            if lower_bound > 0 {
50027                self.drain.move_tail(lower_bound);
50028                if !self.drain.fill(&mut self.replace_with) {
50029                    return;
50030                }
50031            }
50032
50033            // Collect any remaining elements.
50034            // This is a zero-length vector which does not allocate if `lower_bound` was exact.
50035            let mut collected = self.replace_with.by_ref().collect::<Vec<I::Item>>().into_iter();
50036            // Now we have an exact count.
50037            if collected.len() > 0 {
50038                self.drain.move_tail(collected.len());
50039                let filled = self.drain.fill(&mut collected);
50040                debug_assert!(filled);
50041                debug_assert_eq!(collected.len(), 0);
50042            }
50043        }
50044        // Let `Drain::drop` move the tail back if necessary and restore `vec.len`.
50045    }
50046}
50047
50048/// Private helper methods for `Splice::drop`
50049impl<T, A: Allocator> Drain<'_, T, A> {
50050    /// The range from `self.vec.len` to `self.tail_start` contains elements
50051    /// that have been moved out.
50052    /// Fill that range as much as possible with new elements from the `replace_with` iterator.
50053    /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.)
50054    unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool {
50055        let vec = unsafe { self.vec.as_mut() };
50056        let range_start = vec.len;
50057        let range_end = self.tail_start;
50058        let range_slice = unsafe {
50059            slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start)
50060        };
50061
50062        for place in range_slice {
50063            if let Some(new_item) = replace_with.next() {
50064                unsafe { ptr::write(place, new_item) };
50065                vec.len += 1;
50066            } else {
50067                return false;
50068            }
50069        }
50070        true
50071    }
50072
50073    /// Makes room for inserting more elements before the tail.
50074    unsafe fn move_tail(&mut self, additional: usize) {
50075        let vec = unsafe { self.vec.as_mut() };
50076        let len = self.tail_start + self.tail_len;
50077        vec.buf.reserve(len, additional);
50078
50079        let new_tail_start = self.tail_start + additional;
50080        unsafe {
50081            let src = vec.as_ptr().add(self.tail_start);
50082            let dst = vec.as_mut_ptr().add(new_tail_start);
50083            ptr::copy(src, dst, self.tail_len);
50084        }
50085        self.tail_start = new_tail_start;
50086    }
50087}
50088use core::ptr::{self};
50089use core::slice::{self};
50090
50091// A helper struct for in-place iteration that drops the destination slice of iteration,
50092// i.e. the head. The source slice (the tail) is dropped by IntoIter.
50093pub(super) struct InPlaceDrop<T> {
50094    pub(super) inner: *mut T,
50095    pub(super) dst: *mut T,
50096}
50097
50098impl<T> InPlaceDrop<T> {
50099    fn len(&self) -> usize {
50100        unsafe { self.dst.offset_from(self.inner) as usize }
50101    }
50102}
50103
50104impl<T> Drop for InPlaceDrop<T> {
50105    #[inline]
50106    fn drop(&mut self) {
50107        unsafe {
50108            ptr::drop_in_place(slice::from_raw_parts_mut(self.inner, self.len()));
50109        }
50110    }
50111}
50112//! The alloc Prelude
50113//!
50114//! The purpose of this module is to alleviate imports of commonly-used
50115//! items of the `alloc` crate by adding a glob import to the top of modules:
50116//!
50117//! ```
50118//! # #![allow(unused_imports)]
50119//! #![feature(alloc_prelude)]
50120//! extern crate alloc;
50121//! use alloc::prelude::v1::*;
50122//! ```
50123
50124#![unstable(feature = "alloc_prelude", issue = "58935")]
50125
50126pub mod v1;
50127//! The first version of the prelude of `alloc` crate.
50128//!
50129//! See the [module-level documentation](../index.html) for more.
50130
50131#![unstable(feature = "alloc_prelude", issue = "58935")]
50132
50133#[unstable(feature = "alloc_prelude", issue = "58935")]
50134pub use crate::borrow::ToOwned;
50135#[unstable(feature = "alloc_prelude", issue = "58935")]
50136pub use crate::boxed::Box;
50137#[unstable(feature = "alloc_prelude", issue = "58935")]
50138pub use crate::string::{String, ToString};
50139#[unstable(feature = "alloc_prelude", issue = "58935")]
50140pub use crate::vec::Vec;
50141use super::*;
50142
50143extern crate test;
50144use crate::boxed::Box;
50145use test::Bencher;
50146
50147#[test]
50148fn allocate_zeroed() {
50149    unsafe {
50150        let layout = Layout::from_size_align(1024, 1).unwrap();
50151        let ptr =
50152            Global.allocate_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout));
50153
50154        let mut i = ptr.as_non_null_ptr().as_ptr();
50155        let end = i.add(layout.size());
50156        while i < end {
50157            assert_eq!(*i, 0);
50158            i = i.offset(1);
50159        }
50160        Global.deallocate(ptr.as_non_null_ptr(), layout);
50161    }
50162}
50163
50164#[bench]
50165#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
50166fn alloc_owned_small(b: &mut Bencher) {
50167    b.iter(|| {
50168        let _: Box<_> = box 10;
50169    })
50170}
50171//! A pointer type for heap allocation.
50172//!
50173//! [`Box<T>`], casually referred to as a 'box', provides the simplest form of
50174//! heap allocation in Rust. Boxes provide ownership for this allocation, and
50175//! drop their contents when they go out of scope. Boxes also ensure that they
50176//! never allocate more than `isize::MAX` bytes.
50177//!
50178//! # Examples
50179//!
50180//! Move a value from the stack to the heap by creating a [`Box`]:
50181//!
50182//! ```
50183//! let val: u8 = 5;
50184//! let boxed: Box<u8> = Box::new(val);
50185//! ```
50186//!
50187//! Move a value from a [`Box`] back to the stack by [dereferencing]:
50188//!
50189//! ```
50190//! let boxed: Box<u8> = Box::new(5);
50191//! let val: u8 = *boxed;
50192//! ```
50193//!
50194//! Creating a recursive data structure:
50195//!
50196//! ```
50197//! #[derive(Debug)]
50198//! enum List<T> {
50199//!     Cons(T, Box<List<T>>),
50200//!     Nil,
50201//! }
50202//!
50203//! let list: List<i32> = List::Cons(1, Box::new(List::Cons(2, Box::new(List::Nil))));
50204//! println!("{:?}", list);
50205//! ```
50206//!
50207//! This will print `Cons(1, Cons(2, Nil))`.
50208//!
50209//! Recursive structures must be boxed, because if the definition of `Cons`
50210//! looked like this:
50211//!
50212//! ```compile_fail,E0072
50213//! # enum List<T> {
50214//! Cons(T, List<T>),
50215//! # }
50216//! ```
50217//!
50218//! It wouldn't work. This is because the size of a `List` depends on how many
50219//! elements are in the list, and so we don't know how much memory to allocate
50220//! for a `Cons`. By introducing a [`Box<T>`], which has a defined size, we know how
50221//! big `Cons` needs to be.
50222//!
50223//! # Memory layout
50224//!
50225//! For non-zero-sized values, a [`Box`] will use the [`Global`] allocator for
50226//! its allocation. It is valid to convert both ways between a [`Box`] and a
50227//! raw pointer allocated with the [`Global`] allocator, given that the
50228//! [`Layout`] used with the allocator is correct for the type. More precisely,
50229//! a `value: *mut T` that has been allocated with the [`Global`] allocator
50230//! with `Layout::for_value(&*value)` may be converted into a box using
50231//! [`Box::<T>::from_raw(value)`]. Conversely, the memory backing a `value: *mut
50232//! T` obtained from [`Box::<T>::into_raw`] may be deallocated using the
50233//! [`Global`] allocator with [`Layout::for_value(&*value)`].
50234//!
50235//! For zero-sized values, the `Box` pointer still has to be [valid] for reads
50236//! and writes and sufficiently aligned. In particular, casting any aligned
50237//! non-zero integer literal to a raw pointer produces a valid pointer, but a
50238//! pointer pointing into previously allocated memory that since got freed is
50239//! not valid. The recommended way to build a Box to a ZST if `Box::new` cannot
50240//! be used is to use [`ptr::NonNull::dangling`].
50241//!
50242//! So long as `T: Sized`, a `Box<T>` is guaranteed to be represented
50243//! as a single pointer and is also ABI-compatible with C pointers
50244//! (i.e. the C type `T*`). This means that if you have extern "C"
50245//! Rust functions that will be called from C, you can define those
50246//! Rust functions using `Box<T>` types, and use `T*` as corresponding
50247//! type on the C side. As an example, consider this C header which
50248//! declares functions that create and destroy some kind of `Foo`
50249//! value:
50250//!
50251//! ```c
50252//! /* C header */
50253//!
50254//! /* Returns ownership to the caller */
50255//! struct Foo* foo_new(void);
50256//!
50257//! /* Takes ownership from the caller; no-op when invoked with NULL */
50258//! void foo_delete(struct Foo*);
50259//! ```
50260//!
50261//! These two functions might be implemented in Rust as follows. Here, the
50262//! `struct Foo*` type from C is translated to `Box<Foo>`, which captures
50263//! the ownership constraints. Note also that the nullable argument to
50264//! `foo_delete` is represented in Rust as `Option<Box<Foo>>`, since `Box<Foo>`
50265//! cannot be null.
50266//!
50267//! ```
50268//! #[repr(C)]
50269//! pub struct Foo;
50270//!
50271//! #[no_mangle]
50272//! pub extern "C" fn foo_new() -> Box<Foo> {
50273//!     Box::new(Foo)
50274//! }
50275//!
50276//! #[no_mangle]
50277//! pub extern "C" fn foo_delete(_: Option<Box<Foo>>) {}
50278//! ```
50279//!
50280//! Even though `Box<T>` has the same representation and C ABI as a C pointer,
50281//! this does not mean that you can convert an arbitrary `T*` into a `Box<T>`
50282//! and expect things to work. `Box<T>` values will always be fully aligned,
50283//! non-null pointers. Moreover, the destructor for `Box<T>` will attempt to
50284//! free the value with the global allocator. In general, the best practice
50285//! is to only use `Box<T>` for pointers that originated from the global
50286//! allocator.
50287//!
50288//! **Important.** At least at present, you should avoid using
50289//! `Box<T>` types for functions that are defined in C but invoked
50290//! from Rust. In those cases, you should directly mirror the C types
50291//! as closely as possible. Using types like `Box<T>` where the C
50292//! definition is just using `T*` can lead to undefined behavior, as
50293//! described in [rust-lang/unsafe-code-guidelines#198][ucg#198].
50294//!
50295//! [ucg#198]: https://github.com/rust-lang/unsafe-code-guidelines/issues/198
50296//! [dereferencing]: core::ops::Deref
50297//! [`Box::<T>::from_raw(value)`]: Box::from_raw
50298//! [`Global`]: crate::alloc::Global
50299//! [`Layout`]: crate::alloc::Layout
50300//! [`Layout::for_value(&*value)`]: crate::alloc::Layout::for_value
50301//! [valid]: ptr#safety
50302
50303#![stable(feature = "rust1", since = "1.0.0")]
50304
50305use core::any::Any;
50306use core::borrow;
50307use core::cmp::Ordering;
50308use core::convert::{From, TryFrom};
50309use core::fmt;
50310use core::future::Future;
50311use core::hash::{Hash, Hasher};
50312use core::iter::{FromIterator, FusedIterator, Iterator};
50313use core::marker::{Unpin, Unsize};
50314use core::mem;
50315use core::ops::{
50316    CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver,
50317};
50318use core::pin::Pin;
50319use core::ptr::{self, Unique};
50320use core::stream::Stream;
50321use core::task::{Context, Poll};
50322
50323use crate::alloc::{handle_alloc_error, AllocError, Allocator, Global, Layout, WriteCloneIntoRaw};
50324use crate::borrow::Cow;
50325use crate::raw_vec::RawVec;
50326use crate::str::from_boxed_utf8_unchecked;
50327use crate::vec::Vec;
50328
50329/// A pointer type for heap allocation.
50330///
50331/// See the [module-level documentation](../../std/boxed/index.html) for more.
50332#[lang = "owned_box"]
50333#[fundamental]
50334#[stable(feature = "rust1", since = "1.0.0")]
50335pub struct Box<
50336    T: ?Sized,
50337    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
50338>(Unique<T>, A);
50339
50340impl<T> Box<T> {
50341    /// Allocates memory on the heap and then places `x` into it.
50342    ///
50343    /// This doesn't actually allocate if `T` is zero-sized.
50344    ///
50345    /// # Examples
50346    ///
50347    /// ```
50348    /// let five = Box::new(5);
50349    /// ```
50350    #[inline(always)]
50351    #[doc(alias = "alloc")]
50352    #[doc(alias = "malloc")]
50353    #[stable(feature = "rust1", since = "1.0.0")]
50354    pub fn new(x: T) -> Self {
50355        box x
50356    }
50357
50358    /// Constructs a new box with uninitialized contents.
50359    ///
50360    /// # Examples
50361    ///
50362    /// ```
50363    /// #![feature(new_uninit)]
50364    ///
50365    /// let mut five = Box::<u32>::new_uninit();
50366    ///
50367    /// let five = unsafe {
50368    ///     // Deferred initialization:
50369    ///     five.as_mut_ptr().write(5);
50370    ///
50371    ///     five.assume_init()
50372    /// };
50373    ///
50374    /// assert_eq!(*five, 5)
50375    /// ```
50376    #[unstable(feature = "new_uninit", issue = "63291")]
50377    #[inline]
50378    pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
50379        Self::new_uninit_in(Global)
50380    }
50381
50382    /// Constructs a new `Box` with uninitialized contents, with the memory
50383    /// being filled with `0` bytes.
50384    ///
50385    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
50386    /// of this method.
50387    ///
50388    /// # Examples
50389    ///
50390    /// ```
50391    /// #![feature(new_uninit)]
50392    ///
50393    /// let zero = Box::<u32>::new_zeroed();
50394    /// let zero = unsafe { zero.assume_init() };
50395    ///
50396    /// assert_eq!(*zero, 0)
50397    /// ```
50398    ///
50399    /// [zeroed]: mem::MaybeUninit::zeroed
50400    #[inline]
50401    #[doc(alias = "calloc")]
50402    #[unstable(feature = "new_uninit", issue = "63291")]
50403    pub fn new_zeroed() -> Box<mem::MaybeUninit<T>> {
50404        Self::new_zeroed_in(Global)
50405    }
50406
50407    /// Constructs a new `Pin<Box<T>>`. If `T` does not implement `Unpin`, then
50408    /// `x` will be pinned in memory and unable to be moved.
50409    #[stable(feature = "pin", since = "1.33.0")]
50410    #[inline(always)]
50411    pub fn pin(x: T) -> Pin<Box<T>> {
50412        (box x).into()
50413    }
50414
50415    /// Allocates memory on the heap then places `x` into it,
50416    /// returning an error if the allocation fails
50417    ///
50418    /// This doesn't actually allocate if `T` is zero-sized.
50419    ///
50420    /// # Examples
50421    ///
50422    /// ```
50423    /// #![feature(allocator_api)]
50424    ///
50425    /// let five = Box::try_new(5)?;
50426    /// # Ok::<(), std::alloc::AllocError>(())
50427    /// ```
50428    #[unstable(feature = "allocator_api", issue = "32838")]
50429    #[inline]
50430    pub fn try_new(x: T) -> Result<Self, AllocError> {
50431        Self::try_new_in(x, Global)
50432    }
50433
50434    /// Constructs a new box with uninitialized contents on the heap,
50435    /// returning an error if the allocation fails
50436    ///
50437    /// # Examples
50438    ///
50439    /// ```
50440    /// #![feature(allocator_api, new_uninit)]
50441    ///
50442    /// let mut five = Box::<u32>::try_new_uninit()?;
50443    ///
50444    /// let five = unsafe {
50445    ///     // Deferred initialization:
50446    ///     five.as_mut_ptr().write(5);
50447    ///
50448    ///     five.assume_init()
50449    /// };
50450    ///
50451    /// assert_eq!(*five, 5);
50452    /// # Ok::<(), std::alloc::AllocError>(())
50453    /// ```
50454    #[unstable(feature = "allocator_api", issue = "32838")]
50455    // #[unstable(feature = "new_uninit", issue = "63291")]
50456    #[inline]
50457    pub fn try_new_uninit() -> Result<Box<mem::MaybeUninit<T>>, AllocError> {
50458        Box::try_new_uninit_in(Global)
50459    }
50460
50461    /// Constructs a new `Box` with uninitialized contents, with the memory
50462    /// being filled with `0` bytes on the heap
50463    ///
50464    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
50465    /// of this method.
50466    ///
50467    /// # Examples
50468    ///
50469    /// ```
50470    /// #![feature(allocator_api, new_uninit)]
50471    ///
50472    /// let zero = Box::<u32>::try_new_zeroed()?;
50473    /// let zero = unsafe { zero.assume_init() };
50474    ///
50475    /// assert_eq!(*zero, 0);
50476    /// # Ok::<(), std::alloc::AllocError>(())
50477    /// ```
50478    ///
50479    /// [zeroed]: mem::MaybeUninit::zeroed
50480    #[unstable(feature = "allocator_api", issue = "32838")]
50481    // #[unstable(feature = "new_uninit", issue = "63291")]
50482    #[inline]
50483    pub fn try_new_zeroed() -> Result<Box<mem::MaybeUninit<T>>, AllocError> {
50484        Box::try_new_zeroed_in(Global)
50485    }
50486}
50487
50488impl<T, A: Allocator> Box<T, A> {
50489    /// Allocates memory in the given allocator then places `x` into it.
50490    ///
50491    /// This doesn't actually allocate if `T` is zero-sized.
50492    ///
50493    /// # Examples
50494    ///
50495    /// ```
50496    /// #![feature(allocator_api)]
50497    ///
50498    /// use std::alloc::System;
50499    ///
50500    /// let five = Box::new_in(5, System);
50501    /// ```
50502    #[unstable(feature = "allocator_api", issue = "32838")]
50503    #[inline]
50504    pub fn new_in(x: T, alloc: A) -> Self {
50505        let mut boxed = Self::new_uninit_in(alloc);
50506        unsafe {
50507            boxed.as_mut_ptr().write(x);
50508            boxed.assume_init()
50509        }
50510    }
50511
50512    /// Allocates memory in the given allocator then places `x` into it,
50513    /// returning an error if the allocation fails
50514    ///
50515    /// This doesn't actually allocate if `T` is zero-sized.
50516    ///
50517    /// # Examples
50518    ///
50519    /// ```
50520    /// #![feature(allocator_api)]
50521    ///
50522    /// use std::alloc::System;
50523    ///
50524    /// let five = Box::try_new_in(5, System)?;
50525    /// # Ok::<(), std::alloc::AllocError>(())
50526    /// ```
50527    #[unstable(feature = "allocator_api", issue = "32838")]
50528    #[inline]
50529    pub fn try_new_in(x: T, alloc: A) -> Result<Self, AllocError> {
50530        let mut boxed = Self::try_new_uninit_in(alloc)?;
50531        unsafe {
50532            boxed.as_mut_ptr().write(x);
50533            Ok(boxed.assume_init())
50534        }
50535    }
50536
50537    /// Constructs a new box with uninitialized contents in the provided allocator.
50538    ///
50539    /// # Examples
50540    ///
50541    /// ```
50542    /// #![feature(allocator_api, new_uninit)]
50543    ///
50544    /// use std::alloc::System;
50545    ///
50546    /// let mut five = Box::<u32, _>::new_uninit_in(System);
50547    ///
50548    /// let five = unsafe {
50549    ///     // Deferred initialization:
50550    ///     five.as_mut_ptr().write(5);
50551    ///
50552    ///     five.assume_init()
50553    /// };
50554    ///
50555    /// assert_eq!(*five, 5)
50556    /// ```
50557    #[unstable(feature = "allocator_api", issue = "32838")]
50558    // #[unstable(feature = "new_uninit", issue = "63291")]
50559    pub fn new_uninit_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> {
50560        let layout = Layout::new::<mem::MaybeUninit<T>>();
50561        // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
50562        // That would make code size bigger.
50563        match Box::try_new_uninit_in(alloc) {
50564            Ok(m) => m,
50565            Err(_) => handle_alloc_error(layout),
50566        }
50567    }
50568
50569    /// Constructs a new box with uninitialized contents in the provided allocator,
50570    /// returning an error if the allocation fails
50571    ///
50572    /// # Examples
50573    ///
50574    /// ```
50575    /// #![feature(allocator_api, new_uninit)]
50576    ///
50577    /// use std::alloc::System;
50578    ///
50579    /// let mut five = Box::<u32, _>::try_new_uninit_in(System)?;
50580    ///
50581    /// let five = unsafe {
50582    ///     // Deferred initialization:
50583    ///     five.as_mut_ptr().write(5);
50584    ///
50585    ///     five.assume_init()
50586    /// };
50587    ///
50588    /// assert_eq!(*five, 5);
50589    /// # Ok::<(), std::alloc::AllocError>(())
50590    /// ```
50591    #[unstable(feature = "allocator_api", issue = "32838")]
50592    // #[unstable(feature = "new_uninit", issue = "63291")]
50593    pub fn try_new_uninit_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocError> {
50594        let layout = Layout::new::<mem::MaybeUninit<T>>();
50595        let ptr = alloc.allocate(layout)?.cast();
50596        unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
50597    }
50598
50599    /// Constructs a new `Box` with uninitialized contents, with the memory
50600    /// being filled with `0` bytes in the provided allocator.
50601    ///
50602    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
50603    /// of this method.
50604    ///
50605    /// # Examples
50606    ///
50607    /// ```
50608    /// #![feature(allocator_api, new_uninit)]
50609    ///
50610    /// use std::alloc::System;
50611    ///
50612    /// let zero = Box::<u32, _>::new_zeroed_in(System);
50613    /// let zero = unsafe { zero.assume_init() };
50614    ///
50615    /// assert_eq!(*zero, 0)
50616    /// ```
50617    ///
50618    /// [zeroed]: mem::MaybeUninit::zeroed
50619    #[unstable(feature = "allocator_api", issue = "32838")]
50620    // #[unstable(feature = "new_uninit", issue = "63291")]
50621    pub fn new_zeroed_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> {
50622        let layout = Layout::new::<mem::MaybeUninit<T>>();
50623        // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
50624        // That would make code size bigger.
50625        match Box::try_new_zeroed_in(alloc) {
50626            Ok(m) => m,
50627            Err(_) => handle_alloc_error(layout),
50628        }
50629    }
50630
50631    /// Constructs a new `Box` with uninitialized contents, with the memory
50632    /// being filled with `0` bytes in the provided allocator,
50633    /// returning an error if the allocation fails,
50634    ///
50635    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
50636    /// of this method.
50637    ///
50638    /// # Examples
50639    ///
50640    /// ```
50641    /// #![feature(allocator_api, new_uninit)]
50642    ///
50643    /// use std::alloc::System;
50644    ///
50645    /// let zero = Box::<u32, _>::try_new_zeroed_in(System)?;
50646    /// let zero = unsafe { zero.assume_init() };
50647    ///
50648    /// assert_eq!(*zero, 0);
50649    /// # Ok::<(), std::alloc::AllocError>(())
50650    /// ```
50651    ///
50652    /// [zeroed]: mem::MaybeUninit::zeroed
50653    #[unstable(feature = "allocator_api", issue = "32838")]
50654    // #[unstable(feature = "new_uninit", issue = "63291")]
50655    pub fn try_new_zeroed_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocError> {
50656        let layout = Layout::new::<mem::MaybeUninit<T>>();
50657        let ptr = alloc.allocate_zeroed(layout)?.cast();
50658        unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
50659    }
50660
50661    /// Constructs a new `Pin<Box<T, A>>`. If `T` does not implement `Unpin`, then
50662    /// `x` will be pinned in memory and unable to be moved.
50663    #[unstable(feature = "allocator_api", issue = "32838")]
50664    #[inline(always)]
50665    pub fn pin_in(x: T, alloc: A) -> Pin<Self>
50666    where
50667        A: 'static,
50668    {
50669        Self::new_in(x, alloc).into()
50670    }
50671
50672    /// Converts a `Box<T>` into a `Box<[T]>`
50673    ///
50674    /// This conversion does not allocate on the heap and happens in place.
50675    #[unstable(feature = "box_into_boxed_slice", issue = "71582")]
50676    pub fn into_boxed_slice(boxed: Self) -> Box<[T], A> {
50677        let (raw, alloc) = Box::into_raw_with_allocator(boxed);
50678        unsafe { Box::from_raw_in(raw as *mut [T; 1], alloc) }
50679    }
50680
50681    /// Consumes the `Box`, returning the wrapped value.
50682    ///
50683    /// # Examples
50684    ///
50685    /// ```
50686    /// #![feature(box_into_inner)]
50687    ///
50688    /// let c = Box::new(5);
50689    ///
50690    /// assert_eq!(Box::into_inner(c), 5);
50691    /// ```
50692    #[unstable(feature = "box_into_inner", issue = "80437")]
50693    #[inline]
50694    pub fn into_inner(boxed: Self) -> T {
50695        *boxed
50696    }
50697}
50698
50699impl<T> Box<[T]> {
50700    /// Constructs a new boxed slice with uninitialized contents.
50701    ///
50702    /// # Examples
50703    ///
50704    /// ```
50705    /// #![feature(new_uninit)]
50706    ///
50707    /// let mut values = Box::<[u32]>::new_uninit_slice(3);
50708    ///
50709    /// let values = unsafe {
50710    ///     // Deferred initialization:
50711    ///     values[0].as_mut_ptr().write(1);
50712    ///     values[1].as_mut_ptr().write(2);
50713    ///     values[2].as_mut_ptr().write(3);
50714    ///
50715    ///     values.assume_init()
50716    /// };
50717    ///
50718    /// assert_eq!(*values, [1, 2, 3])
50719    /// ```
50720    #[unstable(feature = "new_uninit", issue = "63291")]
50721    pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
50722        unsafe { RawVec::with_capacity(len).into_box(len) }
50723    }
50724
50725    /// Constructs a new boxed slice with uninitialized contents, with the memory
50726    /// being filled with `0` bytes.
50727    ///
50728    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
50729    /// of this method.
50730    ///
50731    /// # Examples
50732    ///
50733    /// ```
50734    /// #![feature(new_uninit)]
50735    ///
50736    /// let values = Box::<[u32]>::new_zeroed_slice(3);
50737    /// let values = unsafe { values.assume_init() };
50738    ///
50739    /// assert_eq!(*values, [0, 0, 0])
50740    /// ```
50741    ///
50742    /// [zeroed]: mem::MaybeUninit::zeroed
50743    #[unstable(feature = "new_uninit", issue = "63291")]
50744    pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
50745        unsafe { RawVec::with_capacity_zeroed(len).into_box(len) }
50746    }
50747}
50748
50749impl<T, A: Allocator> Box<[T], A> {
50750    /// Constructs a new boxed slice with uninitialized contents in the provided allocator.
50751    ///
50752    /// # Examples
50753    ///
50754    /// ```
50755    /// #![feature(allocator_api, new_uninit)]
50756    ///
50757    /// use std::alloc::System;
50758    ///
50759    /// let mut values = Box::<[u32], _>::new_uninit_slice_in(3, System);
50760    ///
50761    /// let values = unsafe {
50762    ///     // Deferred initialization:
50763    ///     values[0].as_mut_ptr().write(1);
50764    ///     values[1].as_mut_ptr().write(2);
50765    ///     values[2].as_mut_ptr().write(3);
50766    ///
50767    ///     values.assume_init()
50768    /// };
50769    ///
50770    /// assert_eq!(*values, [1, 2, 3])
50771    /// ```
50772    #[unstable(feature = "allocator_api", issue = "32838")]
50773    // #[unstable(feature = "new_uninit", issue = "63291")]
50774    pub fn new_uninit_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit<T>], A> {
50775        unsafe { RawVec::with_capacity_in(len, alloc).into_box(len) }
50776    }
50777
50778    /// Constructs a new boxed slice with uninitialized contents in the provided allocator,
50779    /// with the memory being filled with `0` bytes.
50780    ///
50781    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
50782    /// of this method.
50783    ///
50784    /// # Examples
50785    ///
50786    /// ```
50787    /// #![feature(allocator_api, new_uninit)]
50788    ///
50789    /// use std::alloc::System;
50790    ///
50791    /// let values = Box::<[u32], _>::new_zeroed_slice_in(3, System);
50792    /// let values = unsafe { values.assume_init() };
50793    ///
50794    /// assert_eq!(*values, [0, 0, 0])
50795    /// ```
50796    ///
50797    /// [zeroed]: mem::MaybeUninit::zeroed
50798    #[unstable(feature = "allocator_api", issue = "32838")]
50799    // #[unstable(feature = "new_uninit", issue = "63291")]
50800    pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit<T>], A> {
50801        unsafe { RawVec::with_capacity_zeroed_in(len, alloc).into_box(len) }
50802    }
50803}
50804
50805impl<T, A: Allocator> Box<mem::MaybeUninit<T>, A> {
50806    /// Converts to `Box<T, A>`.
50807    ///
50808    /// # Safety
50809    ///
50810    /// As with [`MaybeUninit::assume_init`],
50811    /// it is up to the caller to guarantee that the value
50812    /// really is in an initialized state.
50813    /// Calling this when the content is not yet fully initialized
50814    /// causes immediate undefined behavior.
50815    ///
50816    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
50817    ///
50818    /// # Examples
50819    ///
50820    /// ```
50821    /// #![feature(new_uninit)]
50822    ///
50823    /// let mut five = Box::<u32>::new_uninit();
50824    ///
50825    /// let five: Box<u32> = unsafe {
50826    ///     // Deferred initialization:
50827    ///     five.as_mut_ptr().write(5);
50828    ///
50829    ///     five.assume_init()
50830    /// };
50831    ///
50832    /// assert_eq!(*five, 5)
50833    /// ```
50834    #[unstable(feature = "new_uninit", issue = "63291")]
50835    #[inline]
50836    pub unsafe fn assume_init(self) -> Box<T, A> {
50837        let (raw, alloc) = Box::into_raw_with_allocator(self);
50838        unsafe { Box::from_raw_in(raw as *mut T, alloc) }
50839    }
50840}
50841
50842impl<T, A: Allocator> Box<[mem::MaybeUninit<T>], A> {
50843    /// Converts to `Box<[T], A>`.
50844    ///
50845    /// # Safety
50846    ///
50847    /// As with [`MaybeUninit::assume_init`],
50848    /// it is up to the caller to guarantee that the values
50849    /// really are in an initialized state.
50850    /// Calling this when the content is not yet fully initialized
50851    /// causes immediate undefined behavior.
50852    ///
50853    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
50854    ///
50855    /// # Examples
50856    ///
50857    /// ```
50858    /// #![feature(new_uninit)]
50859    ///
50860    /// let mut values = Box::<[u32]>::new_uninit_slice(3);
50861    ///
50862    /// let values = unsafe {
50863    ///     // Deferred initialization:
50864    ///     values[0].as_mut_ptr().write(1);
50865    ///     values[1].as_mut_ptr().write(2);
50866    ///     values[2].as_mut_ptr().write(3);
50867    ///
50868    ///     values.assume_init()
50869    /// };
50870    ///
50871    /// assert_eq!(*values, [1, 2, 3])
50872    /// ```
50873    #[unstable(feature = "new_uninit", issue = "63291")]
50874    #[inline]
50875    pub unsafe fn assume_init(self) -> Box<[T], A> {
50876        let (raw, alloc) = Box::into_raw_with_allocator(self);
50877        unsafe { Box::from_raw_in(raw as *mut [T], alloc) }
50878    }
50879}
50880
50881impl<T: ?Sized> Box<T> {
50882    /// Constructs a box from a raw pointer.
50883    ///
50884    /// After calling this function, the raw pointer is owned by the
50885    /// resulting `Box`. Specifically, the `Box` destructor will call
50886    /// the destructor of `T` and free the allocated memory. For this
50887    /// to be safe, the memory must have been allocated in accordance
50888    /// with the [memory layout] used by `Box` .
50889    ///
50890    /// # Safety
50891    ///
50892    /// This function is unsafe because improper use may lead to
50893    /// memory problems. For example, a double-free may occur if the
50894    /// function is called twice on the same raw pointer.
50895    ///
50896    /// The safety conditions are described in the [memory layout] section.
50897    ///
50898    /// # Examples
50899    ///
50900    /// Recreate a `Box` which was previously converted to a raw pointer
50901    /// using [`Box::into_raw`]:
50902    /// ```
50903    /// let x = Box::new(5);
50904    /// let ptr = Box::into_raw(x);
50905    /// let x = unsafe { Box::from_raw(ptr) };
50906    /// ```
50907    /// Manually create a `Box` from scratch by using the global allocator:
50908    /// ```
50909    /// use std::alloc::{alloc, Layout};
50910    ///
50911    /// unsafe {
50912    ///     let ptr = alloc(Layout::new::<i32>()) as *mut i32;
50913    ///     // In general .write is required to avoid attempting to destruct
50914    ///     // the (uninitialized) previous contents of `ptr`, though for this
50915    ///     // simple example `*ptr = 5` would have worked as well.
50916    ///     ptr.write(5);
50917    ///     let x = Box::from_raw(ptr);
50918    /// }
50919    /// ```
50920    ///
50921    /// [memory layout]: self#memory-layout
50922    /// [`Layout`]: crate::Layout
50923    #[stable(feature = "box_raw", since = "1.4.0")]
50924    #[inline]
50925    pub unsafe fn from_raw(raw: *mut T) -> Self {
50926        unsafe { Self::from_raw_in(raw, Global) }
50927    }
50928}
50929
50930impl<T: ?Sized, A: Allocator> Box<T, A> {
50931    /// Constructs a box from a raw pointer in the given allocator.
50932    ///
50933    /// After calling this function, the raw pointer is owned by the
50934    /// resulting `Box`. Specifically, the `Box` destructor will call
50935    /// the destructor of `T` and free the allocated memory. For this
50936    /// to be safe, the memory must have been allocated in accordance
50937    /// with the [memory layout] used by `Box` .
50938    ///
50939    /// # Safety
50940    ///
50941    /// This function is unsafe because improper use may lead to
50942    /// memory problems. For example, a double-free may occur if the
50943    /// function is called twice on the same raw pointer.
50944    ///
50945    ///
50946    /// # Examples
50947    ///
50948    /// Recreate a `Box` which was previously converted to a raw pointer
50949    /// using [`Box::into_raw_with_allocator`]:
50950    /// ```
50951    /// #![feature(allocator_api)]
50952    ///
50953    /// use std::alloc::System;
50954    ///
50955    /// let x = Box::new_in(5, System);
50956    /// let (ptr, alloc) = Box::into_raw_with_allocator(x);
50957    /// let x = unsafe { Box::from_raw_in(ptr, alloc) };
50958    /// ```
50959    /// Manually create a `Box` from scratch by using the system allocator:
50960    /// ```
50961    /// #![feature(allocator_api, slice_ptr_get)]
50962    ///
50963    /// use std::alloc::{Allocator, Layout, System};
50964    ///
50965    /// unsafe {
50966    ///     let ptr = System.allocate(Layout::new::<i32>())?.as_mut_ptr() as *mut i32;
50967    ///     // In general .write is required to avoid attempting to destruct
50968    ///     // the (uninitialized) previous contents of `ptr`, though for this
50969    ///     // simple example `*ptr = 5` would have worked as well.
50970    ///     ptr.write(5);
50971    ///     let x = Box::from_raw_in(ptr, System);
50972    /// }
50973    /// # Ok::<(), std::alloc::AllocError>(())
50974    /// ```
50975    ///
50976    /// [memory layout]: self#memory-layout
50977    /// [`Layout`]: crate::Layout
50978    #[unstable(feature = "allocator_api", issue = "32838")]
50979    #[inline]
50980    pub unsafe fn from_raw_in(raw: *mut T, alloc: A) -> Self {
50981        Box(unsafe { Unique::new_unchecked(raw) }, alloc)
50982    }
50983
50984    /// Consumes the `Box`, returning a wrapped raw pointer.
50985    ///
50986    /// The pointer will be properly aligned and non-null.
50987    ///
50988    /// After calling this function, the caller is responsible for the
50989    /// memory previously managed by the `Box`. In particular, the
50990    /// caller should properly destroy `T` and release the memory, taking
50991    /// into account the [memory layout] used by `Box`. The easiest way to
50992    /// do this is to convert the raw pointer back into a `Box` with the
50993    /// [`Box::from_raw`] function, allowing the `Box` destructor to perform
50994    /// the cleanup.
50995    ///
50996    /// Note: this is an associated function, which means that you have
50997    /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This
50998    /// is so that there is no conflict with a method on the inner type.
50999    ///
51000    /// # Examples
51001    /// Converting the raw pointer back into a `Box` with [`Box::from_raw`]
51002    /// for automatic cleanup:
51003    /// ```
51004    /// let x = Box::new(String::from("Hello"));
51005    /// let ptr = Box::into_raw(x);
51006    /// let x = unsafe { Box::from_raw(ptr) };
51007    /// ```
51008    /// Manual cleanup by explicitly running the destructor and deallocating
51009    /// the memory:
51010    /// ```
51011    /// use std::alloc::{dealloc, Layout};
51012    /// use std::ptr;
51013    ///
51014    /// let x = Box::new(String::from("Hello"));
51015    /// let p = Box::into_raw(x);
51016    /// unsafe {
51017    ///     ptr::drop_in_place(p);
51018    ///     dealloc(p as *mut u8, Layout::new::<String>());
51019    /// }
51020    /// ```
51021    ///
51022    /// [memory layout]: self#memory-layout
51023    #[stable(feature = "box_raw", since = "1.4.0")]
51024    #[inline]
51025    pub fn into_raw(b: Self) -> *mut T {
51026        Self::into_raw_with_allocator(b).0
51027    }
51028
51029    /// Consumes the `Box`, returning a wrapped raw pointer and the allocator.
51030    ///
51031    /// The pointer will be properly aligned and non-null.
51032    ///
51033    /// After calling this function, the caller is responsible for the
51034    /// memory previously managed by the `Box`. In particular, the
51035    /// caller should properly destroy `T` and release the memory, taking
51036    /// into account the [memory layout] used by `Box`. The easiest way to
51037    /// do this is to convert the raw pointer back into a `Box` with the
51038    /// [`Box::from_raw_in`] function, allowing the `Box` destructor to perform
51039    /// the cleanup.
51040    ///
51041    /// Note: this is an associated function, which means that you have
51042    /// to call it as `Box::into_raw_with_allocator(b)` instead of `b.into_raw_with_allocator()`. This
51043    /// is so that there is no conflict with a method on the inner type.
51044    ///
51045    /// # Examples
51046    /// Converting the raw pointer back into a `Box` with [`Box::from_raw_in`]
51047    /// for automatic cleanup:
51048    /// ```
51049    /// #![feature(allocator_api)]
51050    ///
51051    /// use std::alloc::System;
51052    ///
51053    /// let x = Box::new_in(String::from("Hello"), System);
51054    /// let (ptr, alloc) = Box::into_raw_with_allocator(x);
51055    /// let x = unsafe { Box::from_raw_in(ptr, alloc) };
51056    /// ```
51057    /// Manual cleanup by explicitly running the destructor and deallocating
51058    /// the memory:
51059    /// ```
51060    /// #![feature(allocator_api)]
51061    ///
51062    /// use std::alloc::{Allocator, Layout, System};
51063    /// use std::ptr::{self, NonNull};
51064    ///
51065    /// let x = Box::new_in(String::from("Hello"), System);
51066    /// let (ptr, alloc) = Box::into_raw_with_allocator(x);
51067    /// unsafe {
51068    ///     ptr::drop_in_place(ptr);
51069    ///     let non_null = NonNull::new_unchecked(ptr);
51070    ///     alloc.deallocate(non_null.cast(), Layout::new::<String>());
51071    /// }
51072    /// ```
51073    ///
51074    /// [memory layout]: self#memory-layout
51075    #[unstable(feature = "allocator_api", issue = "32838")]
51076    #[inline]
51077    pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) {
51078        let (leaked, alloc) = Box::into_unique(b);
51079        (leaked.as_ptr(), alloc)
51080    }
51081
51082    #[unstable(
51083        feature = "ptr_internals",
51084        issue = "none",
51085        reason = "use `Box::leak(b).into()` or `Unique::from(Box::leak(b))` instead"
51086    )]
51087    #[inline]
51088    #[doc(hidden)]
51089    pub fn into_unique(b: Self) -> (Unique<T>, A) {
51090        // Box is recognized as a "unique pointer" by Stacked Borrows, but internally it is a
51091        // raw pointer for the type system. Turning it directly into a raw pointer would not be
51092        // recognized as "releasing" the unique pointer to permit aliased raw accesses,
51093        // so all raw pointer methods have to go through `Box::leak`. Turning *that* to a raw pointer
51094        // behaves correctly.
51095        let alloc = unsafe { ptr::read(&b.1) };
51096        (Unique::from(Box::leak(b)), alloc)
51097    }
51098
51099    /// Returns a reference to the underlying allocator.
51100    ///
51101    /// Note: this is an associated function, which means that you have
51102    /// to call it as `Box::allocator(&b)` instead of `b.allocator()`. This
51103    /// is so that there is no conflict with a method on the inner type.
51104    #[unstable(feature = "allocator_api", issue = "32838")]
51105    #[inline]
51106    pub fn allocator(b: &Self) -> &A {
51107        &b.1
51108    }
51109
51110    /// Consumes and leaks the `Box`, returning a mutable reference,
51111    /// `&'a mut T`. Note that the type `T` must outlive the chosen lifetime
51112    /// `'a`. If the type has only static references, or none at all, then this
51113    /// may be chosen to be `'static`.
51114    ///
51115    /// This function is mainly useful for data that lives for the remainder of
51116    /// the program's life. Dropping the returned reference will cause a memory
51117    /// leak. If this is not acceptable, the reference should first be wrapped
51118    /// with the [`Box::from_raw`] function producing a `Box`. This `Box` can
51119    /// then be dropped which will properly destroy `T` and release the
51120    /// allocated memory.
51121    ///
51122    /// Note: this is an associated function, which means that you have
51123    /// to call it as `Box::leak(b)` instead of `b.leak()`. This
51124    /// is so that there is no conflict with a method on the inner type.
51125    ///
51126    /// # Examples
51127    ///
51128    /// Simple usage:
51129    ///
51130    /// ```
51131    /// let x = Box::new(41);
51132    /// let static_ref: &'static mut usize = Box::leak(x);
51133    /// *static_ref += 1;
51134    /// assert_eq!(*static_ref, 42);
51135    /// ```
51136    ///
51137    /// Unsized data:
51138    ///
51139    /// ```
51140    /// let x = vec![1, 2, 3].into_boxed_slice();
51141    /// let static_ref = Box::leak(x);
51142    /// static_ref[0] = 4;
51143    /// assert_eq!(*static_ref, [4, 2, 3]);
51144    /// ```
51145    #[stable(feature = "box_leak", since = "1.26.0")]
51146    #[inline]
51147    pub fn leak<'a>(b: Self) -> &'a mut T
51148    where
51149        A: 'a,
51150    {
51151        unsafe { &mut *mem::ManuallyDrop::new(b).0.as_ptr() }
51152    }
51153
51154    /// Converts a `Box<T>` into a `Pin<Box<T>>`
51155    ///
51156    /// This conversion does not allocate on the heap and happens in place.
51157    ///
51158    /// This is also available via [`From`].
51159    #[unstable(feature = "box_into_pin", issue = "62370")]
51160    pub fn into_pin(boxed: Self) -> Pin<Self>
51161    where
51162        A: 'static,
51163    {
51164        // It's not possible to move or replace the insides of a `Pin<Box<T>>`
51165        // when `T: !Unpin`,  so it's safe to pin it directly without any
51166        // additional requirements.
51167        unsafe { Pin::new_unchecked(boxed) }
51168    }
51169}
51170
51171#[stable(feature = "rust1", since = "1.0.0")]
51172unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box<T, A> {
51173    fn drop(&mut self) {
51174        // FIXME: Do nothing, drop is currently performed by compiler.
51175    }
51176}
51177
51178#[stable(feature = "rust1", since = "1.0.0")]
51179impl<T: Default> Default for Box<T> {
51180    /// Creates a `Box<T>`, with the `Default` value for T.
51181    fn default() -> Self {
51182        box T::default()
51183    }
51184}
51185
51186#[stable(feature = "rust1", since = "1.0.0")]
51187impl<T> Default for Box<[T]> {
51188    fn default() -> Self {
51189        Box::<[T; 0]>::new([])
51190    }
51191}
51192
51193#[stable(feature = "default_box_extra", since = "1.17.0")]
51194impl Default for Box<str> {
51195    fn default() -> Self {
51196        unsafe { from_boxed_utf8_unchecked(Default::default()) }
51197    }
51198}
51199
51200#[stable(feature = "rust1", since = "1.0.0")]
51201impl<T: Clone, A: Allocator + Clone> Clone for Box<T, A> {
51202    /// Returns a new box with a `clone()` of this box's contents.
51203    ///
51204    /// # Examples
51205    ///
51206    /// ```
51207    /// let x = Box::new(5);
51208    /// let y = x.clone();
51209    ///
51210    /// // The value is the same
51211    /// assert_eq!(x, y);
51212    ///
51213    /// // But they are unique objects
51214    /// assert_ne!(&*x as *const i32, &*y as *const i32);
51215    /// ```
51216    #[inline]
51217    fn clone(&self) -> Self {
51218        // Pre-allocate memory to allow writing the cloned value directly.
51219        let mut boxed = Self::new_uninit_in(self.1.clone());
51220        unsafe {
51221            (**self).write_clone_into_raw(boxed.as_mut_ptr());
51222            boxed.assume_init()
51223        }
51224    }
51225
51226    /// Copies `source`'s contents into `self` without creating a new allocation.
51227    ///
51228    /// # Examples
51229    ///
51230    /// ```
51231    /// let x = Box::new(5);
51232    /// let mut y = Box::new(10);
51233    /// let yp: *const i32 = &*y;
51234    ///
51235    /// y.clone_from(&x);
51236    ///
51237    /// // The value is the same
51238    /// assert_eq!(x, y);
51239    ///
51240    /// // And no allocation occurred
51241    /// assert_eq!(yp, &*y);
51242    /// ```
51243    #[inline]
51244    fn clone_from(&mut self, source: &Self) {
51245        (**self).clone_from(&(**source));
51246    }
51247}
51248
51249#[stable(feature = "box_slice_clone", since = "1.3.0")]
51250impl Clone for Box<str> {
51251    fn clone(&self) -> Self {
51252        // this makes a copy of the data
51253        let buf: Box<[u8]> = self.as_bytes().into();
51254        unsafe { from_boxed_utf8_unchecked(buf) }
51255    }
51256}
51257
51258#[stable(feature = "rust1", since = "1.0.0")]
51259impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Box<T, A> {
51260    #[inline]
51261    fn eq(&self, other: &Self) -> bool {
51262        PartialEq::eq(&**self, &**other)
51263    }
51264    #[inline]
51265    fn ne(&self, other: &Self) -> bool {
51266        PartialEq::ne(&**self, &**other)
51267    }
51268}
51269#[stable(feature = "rust1", since = "1.0.0")]
51270impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Box<T, A> {
51271    #[inline]
51272    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
51273        PartialOrd::partial_cmp(&**self, &**other)
51274    }
51275    #[inline]
51276    fn lt(&self, other: &Self) -> bool {
51277        PartialOrd::lt(&**self, &**other)
51278    }
51279    #[inline]
51280    fn le(&self, other: &Self) -> bool {
51281        PartialOrd::le(&**self, &**other)
51282    }
51283    #[inline]
51284    fn ge(&self, other: &Self) -> bool {
51285        PartialOrd::ge(&**self, &**other)
51286    }
51287    #[inline]
51288    fn gt(&self, other: &Self) -> bool {
51289        PartialOrd::gt(&**self, &**other)
51290    }
51291}
51292#[stable(feature = "rust1", since = "1.0.0")]
51293impl<T: ?Sized + Ord, A: Allocator> Ord for Box<T, A> {
51294    #[inline]
51295    fn cmp(&self, other: &Self) -> Ordering {
51296        Ord::cmp(&**self, &**other)
51297    }
51298}
51299#[stable(feature = "rust1", since = "1.0.0")]
51300impl<T: ?Sized + Eq, A: Allocator> Eq for Box<T, A> {}
51301
51302#[stable(feature = "rust1", since = "1.0.0")]
51303impl<T: ?Sized + Hash, A: Allocator> Hash for Box<T, A> {
51304    fn hash<H: Hasher>(&self, state: &mut H) {
51305        (**self).hash(state);
51306    }
51307}
51308
51309#[stable(feature = "indirect_hasher_impl", since = "1.22.0")]
51310impl<T: ?Sized + Hasher, A: Allocator> Hasher for Box<T, A> {
51311    fn finish(&self) -> u64 {
51312        (**self).finish()
51313    }
51314    fn write(&mut self, bytes: &[u8]) {
51315        (**self).write(bytes)
51316    }
51317    fn write_u8(&mut self, i: u8) {
51318        (**self).write_u8(i)
51319    }
51320    fn write_u16(&mut self, i: u16) {
51321        (**self).write_u16(i)
51322    }
51323    fn write_u32(&mut self, i: u32) {
51324        (**self).write_u32(i)
51325    }
51326    fn write_u64(&mut self, i: u64) {
51327        (**self).write_u64(i)
51328    }
51329    fn write_u128(&mut self, i: u128) {
51330        (**self).write_u128(i)
51331    }
51332    fn write_usize(&mut self, i: usize) {
51333        (**self).write_usize(i)
51334    }
51335    fn write_i8(&mut self, i: i8) {
51336        (**self).write_i8(i)
51337    }
51338    fn write_i16(&mut self, i: i16) {
51339        (**self).write_i16(i)
51340    }
51341    fn write_i32(&mut self, i: i32) {
51342        (**self).write_i32(i)
51343    }
51344    fn write_i64(&mut self, i: i64) {
51345        (**self).write_i64(i)
51346    }
51347    fn write_i128(&mut self, i: i128) {
51348        (**self).write_i128(i)
51349    }
51350    fn write_isize(&mut self, i: isize) {
51351        (**self).write_isize(i)
51352    }
51353}
51354
51355#[stable(feature = "from_for_ptrs", since = "1.6.0")]
51356impl<T> From<T> for Box<T> {
51357    /// Converts a generic type `T` into a `Box<T>`
51358    ///
51359    /// The conversion allocates on the heap and moves `t`
51360    /// from the stack into it.
51361    ///
51362    /// # Examples
51363    /// ```rust
51364    /// let x = 5;
51365    /// let boxed = Box::new(5);
51366    ///
51367    /// assert_eq!(Box::from(x), boxed);
51368    /// ```
51369    fn from(t: T) -> Self {
51370        Box::new(t)
51371    }
51372}
51373
51374#[stable(feature = "pin", since = "1.33.0")]
51375impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Pin<Box<T, A>>
51376where
51377    A: 'static,
51378{
51379    /// Converts a `Box<T>` into a `Pin<Box<T>>`
51380    ///
51381    /// This conversion does not allocate on the heap and happens in place.
51382    fn from(boxed: Box<T, A>) -> Self {
51383        Box::into_pin(boxed)
51384    }
51385}
51386
51387#[stable(feature = "box_from_slice", since = "1.17.0")]
51388impl<T: Copy> From<&[T]> for Box<[T]> {
51389    /// Converts a `&[T]` into a `Box<[T]>`
51390    ///
51391    /// This conversion allocates on the heap
51392    /// and performs a copy of `slice`.
51393    ///
51394    /// # Examples
51395    /// ```rust
51396    /// // create a &[u8] which will be used to create a Box<[u8]>
51397    /// let slice: &[u8] = &[104, 101, 108, 108, 111];
51398    /// let boxed_slice: Box<[u8]> = Box::from(slice);
51399    ///
51400    /// println!("{:?}", boxed_slice);
51401    /// ```
51402    fn from(slice: &[T]) -> Box<[T]> {
51403        let len = slice.len();
51404        let buf = RawVec::with_capacity(len);
51405        unsafe {
51406            ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len);
51407            buf.into_box(slice.len()).assume_init()
51408        }
51409    }
51410}
51411
51412#[stable(feature = "box_from_cow", since = "1.45.0")]
51413impl<T: Copy> From<Cow<'_, [T]>> for Box<[T]> {
51414    #[inline]
51415    fn from(cow: Cow<'_, [T]>) -> Box<[T]> {
51416        match cow {
51417            Cow::Borrowed(slice) => Box::from(slice),
51418            Cow::Owned(slice) => Box::from(slice),
51419        }
51420    }
51421}
51422
51423#[stable(feature = "box_from_slice", since = "1.17.0")]
51424impl From<&str> for Box<str> {
51425    /// Converts a `&str` into a `Box<str>`
51426    ///
51427    /// This conversion allocates on the heap
51428    /// and performs a copy of `s`.
51429    ///
51430    /// # Examples
51431    /// ```rust
51432    /// let boxed: Box<str> = Box::from("hello");
51433    /// println!("{}", boxed);
51434    /// ```
51435    #[inline]
51436    fn from(s: &str) -> Box<str> {
51437        unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) }
51438    }
51439}
51440
51441#[stable(feature = "box_from_cow", since = "1.45.0")]
51442impl From<Cow<'_, str>> for Box<str> {
51443    #[inline]
51444    fn from(cow: Cow<'_, str>) -> Box<str> {
51445        match cow {
51446            Cow::Borrowed(s) => Box::from(s),
51447            Cow::Owned(s) => Box::from(s),
51448        }
51449    }
51450}
51451
51452#[stable(feature = "boxed_str_conv", since = "1.19.0")]
51453impl<A: Allocator> From<Box<str, A>> for Box<[u8], A> {
51454    /// Converts a `Box<str>` into a `Box<[u8]>`
51455    ///
51456    /// This conversion does not allocate on the heap and happens in place.
51457    ///
51458    /// # Examples
51459    /// ```rust
51460    /// // create a Box<str> which will be used to create a Box<[u8]>
51461    /// let boxed: Box<str> = Box::from("hello");
51462    /// let boxed_str: Box<[u8]> = Box::from(boxed);
51463    ///
51464    /// // create a &[u8] which will be used to create a Box<[u8]>
51465    /// let slice: &[u8] = &[104, 101, 108, 108, 111];
51466    /// let boxed_slice = Box::from(slice);
51467    ///
51468    /// assert_eq!(boxed_slice, boxed_str);
51469    /// ```
51470    #[inline]
51471    fn from(s: Box<str, A>) -> Self {
51472        let (raw, alloc) = Box::into_raw_with_allocator(s);
51473        unsafe { Box::from_raw_in(raw as *mut [u8], alloc) }
51474    }
51475}
51476
51477#[stable(feature = "box_from_array", since = "1.45.0")]
51478impl<T, const N: usize> From<[T; N]> for Box<[T]> {
51479    /// Converts a `[T; N]` into a `Box<[T]>`
51480    ///
51481    /// This conversion moves the array to newly heap-allocated memory.
51482    ///
51483    /// # Examples
51484    /// ```rust
51485    /// let boxed: Box<[u8]> = Box::from([4, 2]);
51486    /// println!("{:?}", boxed);
51487    /// ```
51488    fn from(array: [T; N]) -> Box<[T]> {
51489        box array
51490    }
51491}
51492
51493#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
51494impl<T, const N: usize> TryFrom<Box<[T]>> for Box<[T; N]> {
51495    type Error = Box<[T]>;
51496
51497    fn try_from(boxed_slice: Box<[T]>) -> Result<Self, Self::Error> {
51498        if boxed_slice.len() == N {
51499            Ok(unsafe { Box::from_raw(Box::into_raw(boxed_slice) as *mut [T; N]) })
51500        } else {
51501            Err(boxed_slice)
51502        }
51503    }
51504}
51505
51506impl<A: Allocator> Box<dyn Any, A> {
51507    #[inline]
51508    #[stable(feature = "rust1", since = "1.0.0")]
51509    /// Attempt to downcast the box to a concrete type.
51510    ///
51511    /// # Examples
51512    ///
51513    /// ```
51514    /// use std::any::Any;
51515    ///
51516    /// fn print_if_string(value: Box<dyn Any>) {
51517    ///     if let Ok(string) = value.downcast::<String>() {
51518    ///         println!("String ({}): {}", string.len(), string);
51519    ///     }
51520    /// }
51521    ///
51522    /// let my_string = "Hello World".to_string();
51523    /// print_if_string(Box::new(my_string));
51524    /// print_if_string(Box::new(0i8));
51525    /// ```
51526    pub fn downcast<T: Any>(self) -> Result<Box<T, A>, Self> {
51527        if self.is::<T>() {
51528            unsafe {
51529                let (raw, alloc): (*mut dyn Any, _) = Box::into_raw_with_allocator(self);
51530                Ok(Box::from_raw_in(raw as *mut T, alloc))
51531            }
51532        } else {
51533            Err(self)
51534        }
51535    }
51536}
51537
51538impl<A: Allocator> Box<dyn Any + Send, A> {
51539    #[inline]
51540    #[stable(feature = "rust1", since = "1.0.0")]
51541    /// Attempt to downcast the box to a concrete type.
51542    ///
51543    /// # Examples
51544    ///
51545    /// ```
51546    /// use std::any::Any;
51547    ///
51548    /// fn print_if_string(value: Box<dyn Any + Send>) {
51549    ///     if let Ok(string) = value.downcast::<String>() {
51550    ///         println!("String ({}): {}", string.len(), string);
51551    ///     }
51552    /// }
51553    ///
51554    /// let my_string = "Hello World".to_string();
51555    /// print_if_string(Box::new(my_string));
51556    /// print_if_string(Box::new(0i8));
51557    /// ```
51558    pub fn downcast<T: Any>(self) -> Result<Box<T, A>, Self> {
51559        if self.is::<T>() {
51560            unsafe {
51561                let (raw, alloc): (*mut (dyn Any + Send), _) = Box::into_raw_with_allocator(self);
51562                Ok(Box::from_raw_in(raw as *mut T, alloc))
51563            }
51564        } else {
51565            Err(self)
51566        }
51567    }
51568}
51569
51570impl<A: Allocator> Box<dyn Any + Send + Sync, A> {
51571    #[inline]
51572    #[stable(feature = "box_send_sync_any_downcast", since = "1.51.0")]
51573    /// Attempt to downcast the box to a concrete type.
51574    ///
51575    /// # Examples
51576    ///
51577    /// ```
51578    /// use std::any::Any;
51579    ///
51580    /// fn print_if_string(value: Box<dyn Any + Send + Sync>) {
51581    ///     if let Ok(string) = value.downcast::<String>() {
51582    ///         println!("String ({}): {}", string.len(), string);
51583    ///     }
51584    /// }
51585    ///
51586    /// let my_string = "Hello World".to_string();
51587    /// print_if_string(Box::new(my_string));
51588    /// print_if_string(Box::new(0i8));
51589    /// ```
51590    pub fn downcast<T: Any>(self) -> Result<Box<T, A>, Self> {
51591        if self.is::<T>() {
51592            unsafe {
51593                let (raw, alloc): (*mut (dyn Any + Send + Sync), _) =
51594                    Box::into_raw_with_allocator(self);
51595                Ok(Box::from_raw_in(raw as *mut T, alloc))
51596            }
51597        } else {
51598            Err(self)
51599        }
51600    }
51601}
51602
51603#[stable(feature = "rust1", since = "1.0.0")]
51604impl<T: fmt::Display + ?Sized, A: Allocator> fmt::Display for Box<T, A> {
51605    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
51606        fmt::Display::fmt(&**self, f)
51607    }
51608}
51609
51610#[stable(feature = "rust1", since = "1.0.0")]
51611impl<T: fmt::Debug + ?Sized, A: Allocator> fmt::Debug for Box<T, A> {
51612    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
51613        fmt::Debug::fmt(&**self, f)
51614    }
51615}
51616
51617#[stable(feature = "rust1", since = "1.0.0")]
51618impl<T: ?Sized, A: Allocator> fmt::Pointer for Box<T, A> {
51619    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
51620        // It's not possible to extract the inner Uniq directly from the Box,
51621        // instead we cast it to a *const which aliases the Unique
51622        let ptr: *const T = &**self;
51623        fmt::Pointer::fmt(&ptr, f)
51624    }
51625}
51626
51627#[stable(feature = "rust1", since = "1.0.0")]
51628impl<T: ?Sized, A: Allocator> Deref for Box<T, A> {
51629    type Target = T;
51630
51631    fn deref(&self) -> &T {
51632        &**self
51633    }
51634}
51635
51636#[stable(feature = "rust1", since = "1.0.0")]
51637impl<T: ?Sized, A: Allocator> DerefMut for Box<T, A> {
51638    fn deref_mut(&mut self) -> &mut T {
51639        &mut **self
51640    }
51641}
51642
51643#[unstable(feature = "receiver_trait", issue = "none")]
51644impl<T: ?Sized, A: Allocator> Receiver for Box<T, A> {}
51645
51646#[stable(feature = "rust1", since = "1.0.0")]
51647impl<I: Iterator + ?Sized, A: Allocator> Iterator for Box<I, A> {
51648    type Item = I::Item;
51649    fn next(&mut self) -> Option<I::Item> {
51650        (**self).next()
51651    }
51652    fn size_hint(&self) -> (usize, Option<usize>) {
51653        (**self).size_hint()
51654    }
51655    fn nth(&mut self, n: usize) -> Option<I::Item> {
51656        (**self).nth(n)
51657    }
51658    fn last(self) -> Option<I::Item> {
51659        BoxIter::last(self)
51660    }
51661}
51662
51663trait BoxIter {
51664    type Item;
51665    fn last(self) -> Option<Self::Item>;
51666}
51667
51668impl<I: Iterator + ?Sized, A: Allocator> BoxIter for Box<I, A> {
51669    type Item = I::Item;
51670    default fn last(self) -> Option<I::Item> {
51671        #[inline]
51672        fn some<T>(_: Option<T>, x: T) -> Option<T> {
51673            Some(x)
51674        }
51675
51676        self.fold(None, some)
51677    }
51678}
51679
51680/// Specialization for sized `I`s that uses `I`s implementation of `last()`
51681/// instead of the default.
51682#[stable(feature = "rust1", since = "1.0.0")]
51683impl<I: Iterator, A: Allocator> BoxIter for Box<I, A> {
51684    fn last(self) -> Option<I::Item> {
51685        (*self).last()
51686    }
51687}
51688
51689#[stable(feature = "rust1", since = "1.0.0")]
51690impl<I: DoubleEndedIterator + ?Sized, A: Allocator> DoubleEndedIterator for Box<I, A> {
51691    fn next_back(&mut self) -> Option<I::Item> {
51692        (**self).next_back()
51693    }
51694    fn nth_back(&mut self, n: usize) -> Option<I::Item> {
51695        (**self).nth_back(n)
51696    }
51697}
51698#[stable(feature = "rust1", since = "1.0.0")]
51699impl<I: ExactSizeIterator + ?Sized, A: Allocator> ExactSizeIterator for Box<I, A> {
51700    fn len(&self) -> usize {
51701        (**self).len()
51702    }
51703    fn is_empty(&self) -> bool {
51704        (**self).is_empty()
51705    }
51706}
51707
51708#[stable(feature = "fused", since = "1.26.0")]
51709impl<I: FusedIterator + ?Sized, A: Allocator> FusedIterator for Box<I, A> {}
51710
51711#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
51712impl<Args, F: FnOnce<Args> + ?Sized, A: Allocator> FnOnce<Args> for Box<F, A> {
51713    type Output = <F as FnOnce<Args>>::Output;
51714
51715    extern "rust-call" fn call_once(self, args: Args) -> Self::Output {
51716        <F as FnOnce<Args>>::call_once(*self, args)
51717    }
51718}
51719
51720#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
51721impl<Args, F: FnMut<Args> + ?Sized, A: Allocator> FnMut<Args> for Box<F, A> {
51722    extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output {
51723        <F as FnMut<Args>>::call_mut(self, args)
51724    }
51725}
51726
51727#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
51728impl<Args, F: Fn<Args> + ?Sized, A: Allocator> Fn<Args> for Box<F, A> {
51729    extern "rust-call" fn call(&self, args: Args) -> Self::Output {
51730        <F as Fn<Args>>::call(self, args)
51731    }
51732}
51733
51734#[unstable(feature = "coerce_unsized", issue = "27732")]
51735impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
51736
51737#[unstable(feature = "dispatch_from_dyn", issue = "none")]
51738impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Box<U>> for Box<T, Global> {}
51739
51740#[stable(feature = "boxed_slice_from_iter", since = "1.32.0")]
51741impl<I> FromIterator<I> for Box<[I]> {
51742    fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
51743        iter.into_iter().collect::<Vec<_>>().into_boxed_slice()
51744    }
51745}
51746
51747#[stable(feature = "box_slice_clone", since = "1.3.0")]
51748impl<T: Clone, A: Allocator + Clone> Clone for Box<[T], A> {
51749    fn clone(&self) -> Self {
51750        let alloc = Box::allocator(self).clone();
51751        self.to_vec_in(alloc).into_boxed_slice()
51752    }
51753
51754    fn clone_from(&mut self, other: &Self) {
51755        if self.len() == other.len() {
51756            self.clone_from_slice(&other);
51757        } else {
51758            *self = other.clone();
51759        }
51760    }
51761}
51762
51763#[stable(feature = "box_borrow", since = "1.1.0")]
51764impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Box<T, A> {
51765    fn borrow(&self) -> &T {
51766        &**self
51767    }
51768}
51769
51770#[stable(feature = "box_borrow", since = "1.1.0")]
51771impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for Box<T, A> {
51772    fn borrow_mut(&mut self) -> &mut T {
51773        &mut **self
51774    }
51775}
51776
51777#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
51778impl<T: ?Sized, A: Allocator> AsRef<T> for Box<T, A> {
51779    fn as_ref(&self) -> &T {
51780        &**self
51781    }
51782}
51783
51784#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
51785impl<T: ?Sized, A: Allocator> AsMut<T> for Box<T, A> {
51786    fn as_mut(&mut self) -> &mut T {
51787        &mut **self
51788    }
51789}
51790
51791/* Nota bene
51792 *
51793 *  We could have chosen not to add this impl, and instead have written a
51794 *  function of Pin<Box<T>> to Pin<T>. Such a function would not be sound,
51795 *  because Box<T> implements Unpin even when T does not, as a result of
51796 *  this impl.
51797 *
51798 *  We chose this API instead of the alternative for a few reasons:
51799 *      - Logically, it is helpful to understand pinning in regard to the
51800 *        memory region being pointed to. For this reason none of the
51801 *        standard library pointer types support projecting through a pin
51802 *        (Box<T> is the only pointer type in std for which this would be
51803 *        safe.)
51804 *      - It is in practice very useful to have Box<T> be unconditionally
51805 *        Unpin because of trait objects, for which the structural auto
51806 *        trait functionality does not apply (e.g., Box<dyn Foo> would
51807 *        otherwise not be Unpin).
51808 *
51809 *  Another type with the same semantics as Box but only a conditional
51810 *  implementation of `Unpin` (where `T: Unpin`) would be valid/safe, and
51811 *  could have a method to project a Pin<T> from it.
51812 */
51813#[stable(feature = "pin", since = "1.33.0")]
51814impl<T: ?Sized, A: Allocator> Unpin for Box<T, A> where A: 'static {}
51815
51816#[unstable(feature = "generator_trait", issue = "43122")]
51817impl<G: ?Sized + Generator<R> + Unpin, R, A: Allocator> Generator<R> for Box<G, A>
51818where
51819    A: 'static,
51820{
51821    type Yield = G::Yield;
51822    type Return = G::Return;
51823
51824    fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
51825        G::resume(Pin::new(&mut *self), arg)
51826    }
51827}
51828
51829#[unstable(feature = "generator_trait", issue = "43122")]
51830impl<G: ?Sized + Generator<R>, R, A: Allocator> Generator<R> for Pin<Box<G, A>>
51831where
51832    A: 'static,
51833{
51834    type Yield = G::Yield;
51835    type Return = G::Return;
51836
51837    fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
51838        G::resume((*self).as_mut(), arg)
51839    }
51840}
51841
51842#[stable(feature = "futures_api", since = "1.36.0")]
51843impl<F: ?Sized + Future + Unpin, A: Allocator> Future for Box<F, A>
51844where
51845    A: 'static,
51846{
51847    type Output = F::Output;
51848
51849    fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
51850        F::poll(Pin::new(&mut *self), cx)
51851    }
51852}
51853
51854#[unstable(feature = "async_stream", issue = "79024")]
51855impl<S: ?Sized + Stream + Unpin> Stream for Box<S> {
51856    type Item = S::Item;
51857
51858    fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
51859        Pin::new(&mut **self).poll_next(cx)
51860    }
51861
51862    fn size_hint(&self) -> (usize, Option<usize>) {
51863        (**self).size_hint()
51864    }
51865}
51866use super::*;
51867use std::cell::Cell;
51868
51869#[test]
51870fn allocator_param() {
51871    use crate::alloc::AllocError;
51872
51873    // Writing a test of integration between third-party
51874    // allocators and `RawVec` is a little tricky because the `RawVec`
51875    // API does not expose fallible allocation methods, so we
51876    // cannot check what happens when allocator is exhausted
51877    // (beyond detecting a panic).
51878    //
51879    // Instead, this just checks that the `RawVec` methods do at
51880    // least go through the Allocator API when it reserves
51881    // storage.
51882
51883    // A dumb allocator that consumes a fixed amount of fuel
51884    // before allocation attempts start failing.
51885    struct BoundedAlloc {
51886        fuel: Cell<usize>,
51887    }
51888    unsafe impl Allocator for BoundedAlloc {
51889        fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
51890            let size = layout.size();
51891            if size > self.fuel.get() {
51892                return Err(AllocError);
51893            }
51894            match Global.allocate(layout) {
51895                ok @ Ok(_) => {
51896                    self.fuel.set(self.fuel.get() - size);
51897                    ok
51898                }
51899                err @ Err(_) => err,
51900            }
51901        }
51902        unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
51903            unsafe { Global.deallocate(ptr, layout) }
51904        }
51905    }
51906
51907    let a = BoundedAlloc { fuel: Cell::new(500) };
51908    let mut v: RawVec<u8, _> = RawVec::with_capacity_in(50, a);
51909    assert_eq!(v.alloc.fuel.get(), 450);
51910    v.reserve(50, 150); // (causes a realloc, thus using 50 + 150 = 200 units of fuel)
51911    assert_eq!(v.alloc.fuel.get(), 250);
51912}
51913
51914#[test]
51915fn reserve_does_not_overallocate() {
51916    {
51917        let mut v: RawVec<u32> = RawVec::new();
51918        // First, `reserve` allocates like `reserve_exact`.
51919        v.reserve(0, 9);
51920        assert_eq!(9, v.capacity());
51921    }
51922
51923    {
51924        let mut v: RawVec<u32> = RawVec::new();
51925        v.reserve(0, 7);
51926        assert_eq!(7, v.capacity());
51927        // 97 is more than double of 7, so `reserve` should work
51928        // like `reserve_exact`.
51929        v.reserve(7, 90);
51930        assert_eq!(97, v.capacity());
51931    }
51932
51933    {
51934        let mut v: RawVec<u32> = RawVec::new();
51935        v.reserve(0, 12);
51936        assert_eq!(12, v.capacity());
51937        v.reserve(12, 3);
51938        // 3 is less than half of 12, so `reserve` must grow
51939        // exponentially. At the time of writing this test grow
51940        // factor is 2, so new capacity is 24, however, grow factor
51941        // of 1.5 is OK too. Hence `>= 18` in assert.
51942        assert!(v.capacity() >= 12 + 12 / 2);
51943    }
51944}
51945//! Test for `boxed` mod.
51946
51947use core::any::Any;
51948use core::clone::Clone;
51949use core::convert::TryInto;
51950use core::ops::Deref;
51951use core::result::Result::{Err, Ok};
51952
51953use std::boxed::Box;
51954
51955#[test]
51956fn test_owned_clone() {
51957    let a = Box::new(5);
51958    let b: Box<i32> = a.clone();
51959    assert!(a == b);
51960}
51961
51962#[derive(PartialEq, Eq)]
51963struct Test;
51964
51965#[test]
51966fn any_move() {
51967    let a = Box::new(8) as Box<dyn Any>;
51968    let b = Box::new(Test) as Box<dyn Any>;
51969
51970    match a.downcast::<i32>() {
51971        Ok(a) => {
51972            assert!(a == Box::new(8));
51973        }
51974        Err(..) => panic!(),
51975    }
51976    match b.downcast::<Test>() {
51977        Ok(a) => {
51978            assert!(a == Box::new(Test));
51979        }
51980        Err(..) => panic!(),
51981    }
51982
51983    let a = Box::new(8) as Box<dyn Any>;
51984    let b = Box::new(Test) as Box<dyn Any>;
51985
51986    assert!(a.downcast::<Box<Test>>().is_err());
51987    assert!(b.downcast::<Box<i32>>().is_err());
51988}
51989
51990#[test]
51991fn test_show() {
51992    let a = Box::new(8) as Box<dyn Any>;
51993    let b = Box::new(Test) as Box<dyn Any>;
51994    let a_str = format!("{:?}", a);
51995    let b_str = format!("{:?}", b);
51996    assert_eq!(a_str, "Any { .. }");
51997    assert_eq!(b_str, "Any { .. }");
51998
51999    static EIGHT: usize = 8;
52000    static TEST: Test = Test;
52001    let a = &EIGHT as &dyn Any;
52002    let b = &TEST as &dyn Any;
52003    let s = format!("{:?}", a);
52004    assert_eq!(s, "Any { .. }");
52005    let s = format!("{:?}", b);
52006    assert_eq!(s, "Any { .. }");
52007}
52008
52009#[test]
52010fn deref() {
52011    fn homura<T: Deref<Target = i32>>(_: T) {}
52012    homura(Box::new(765));
52013}
52014
52015#[test]
52016fn raw_sized() {
52017    let x = Box::new(17);
52018    let p = Box::into_raw(x);
52019    unsafe {
52020        assert_eq!(17, *p);
52021        *p = 19;
52022        let y = Box::from_raw(p);
52023        assert_eq!(19, *y);
52024    }
52025}
52026
52027#[test]
52028fn raw_trait() {
52029    trait Foo {
52030        fn get(&self) -> u32;
52031        fn set(&mut self, value: u32);
52032    }
52033
52034    struct Bar(u32);
52035
52036    impl Foo for Bar {
52037        fn get(&self) -> u32 {
52038            self.0
52039        }
52040
52041        fn set(&mut self, value: u32) {
52042            self.0 = value;
52043        }
52044    }
52045
52046    let x: Box<dyn Foo> = Box::new(Bar(17));
52047    let p = Box::into_raw(x);
52048    unsafe {
52049        assert_eq!(17, (*p).get());
52050        (*p).set(19);
52051        let y: Box<dyn Foo> = Box::from_raw(p);
52052        assert_eq!(19, y.get());
52053    }
52054}
52055
52056#[test]
52057fn f64_slice() {
52058    let slice: &[f64] = &[-1.0, 0.0, 1.0, f64::INFINITY];
52059    let boxed: Box<[f64]> = Box::from(slice);
52060    assert_eq!(&*boxed, slice)
52061}
52062
52063#[test]
52064fn i64_slice() {
52065    let slice: &[i64] = &[i64::MIN, -2, -1, 0, 1, 2, i64::MAX];
52066    let boxed: Box<[i64]> = Box::from(slice);
52067    assert_eq!(&*boxed, slice)
52068}
52069
52070#[test]
52071fn str_slice() {
52072    let s = "Hello, world!";
52073    let boxed: Box<str> = Box::from(s);
52074    assert_eq!(&*boxed, s)
52075}
52076
52077#[test]
52078fn boxed_slice_from_iter() {
52079    let iter = 0..100;
52080    let boxed: Box<[u32]> = iter.collect();
52081    assert_eq!(boxed.len(), 100);
52082    assert_eq!(boxed[7], 7);
52083}
52084
52085#[test]
52086fn test_array_from_slice() {
52087    let v = vec![1, 2, 3];
52088    let r: Box<[u32]> = v.into_boxed_slice();
52089
52090    let a: Result<Box<[u32; 3]>, _> = r.clone().try_into();
52091    assert!(a.is_ok());
52092
52093    let a: Result<Box<[u32; 2]>, _> = r.clone().try_into();
52094    assert!(a.is_err());
52095}
52096