@@ -767,6 +767,7 @@ impl<T, A: Allocator> Vec<T, A> {
767
767
/// assert!(vec.capacity() >= 11);
768
768
/// ```
769
769
#[ doc( alias = "realloc" ) ]
770
+ #[ track_caller]
770
771
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
771
772
pub fn reserve ( & mut self , additional : usize ) {
772
773
self . buf . reserve ( self . len , additional) ;
@@ -793,6 +794,7 @@ impl<T, A: Allocator> Vec<T, A> {
793
794
/// assert!(vec.capacity() >= 11);
794
795
/// ```
795
796
#[ doc( alias = "realloc" ) ]
797
+ #[ track_caller]
796
798
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
797
799
pub fn reserve_exact ( & mut self , additional : usize ) {
798
800
self . buf . reserve_exact ( self . len , additional) ;
@@ -893,6 +895,7 @@ impl<T, A: Allocator> Vec<T, A> {
893
895
/// assert!(vec.capacity() >= 3);
894
896
/// ```
895
897
#[ doc( alias = "realloc" ) ]
898
+ #[ track_caller]
896
899
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
897
900
pub fn shrink_to_fit ( & mut self ) {
898
901
// The capacity is never less than the length, and there's nothing to do when
@@ -923,6 +926,7 @@ impl<T, A: Allocator> Vec<T, A> {
923
926
/// assert!(vec.capacity() >= 3);
924
927
/// ```
925
928
#[ doc( alias = "realloc" ) ]
929
+ #[ track_caller]
926
930
#[ unstable( feature = "shrink_to" , reason = "new API" , issue = "56431" ) ]
927
931
pub fn shrink_to ( & mut self , min_capacity : usize ) {
928
932
if self . capacity ( ) > min_capacity {
@@ -954,6 +958,7 @@ impl<T, A: Allocator> Vec<T, A> {
954
958
/// let slice = vec.into_boxed_slice();
955
959
/// assert_eq!(slice.into_vec().capacity(), 3);
956
960
/// ```
961
+ #[ track_caller]
957
962
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
958
963
pub fn into_boxed_slice ( mut self ) -> Box < [ T ] , A > {
959
964
unsafe {
@@ -1620,6 +1625,7 @@ impl<T, A: Allocator> Vec<T, A> {
1620
1625
/// assert_eq!(vec, [1, 2, 3]);
1621
1626
/// ```
1622
1627
#[ inline]
1628
+ #[ track_caller]
1623
1629
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
1624
1630
pub fn push ( & mut self , value : T ) {
1625
1631
// This will panic or abort if we would allocate > isize::MAX bytes
@@ -1673,6 +1679,7 @@ impl<T, A: Allocator> Vec<T, A> {
1673
1679
/// assert_eq!(vec2, []);
1674
1680
/// ```
1675
1681
#[ inline]
1682
+ #[ track_caller]
1676
1683
#[ stable( feature = "append" , since = "1.4.0" ) ]
1677
1684
pub fn append ( & mut self , other : & mut Self ) {
1678
1685
unsafe {
@@ -1683,6 +1690,7 @@ impl<T, A: Allocator> Vec<T, A> {
1683
1690
1684
1691
/// Appends elements to `Self` from other buffer.
1685
1692
#[ inline]
1693
+ #[ track_caller]
1686
1694
unsafe fn append_elements ( & mut self , other : * const [ T ] ) {
1687
1695
let count = unsafe { ( * other) . len ( ) } ;
1688
1696
self . reserve ( count) ;
@@ -2106,6 +2114,7 @@ impl<T: Clone, A: Allocator> Vec<T, A> {
2106
2114
/// ```
2107
2115
///
2108
2116
/// [`extend`]: Vec::extend
2117
+ #[ track_caller]
2109
2118
#[ stable( feature = "vec_extend_from_slice" , since = "1.6.0" ) ]
2110
2119
pub fn extend_from_slice ( & mut self , other : & [ T ] ) {
2111
2120
self . spec_extend ( other. iter ( ) )
@@ -2183,6 +2192,7 @@ impl<T, F: FnMut() -> T> ExtendWith<T> for ExtendFunc<F> {
2183
2192
2184
2193
impl < T , A : Allocator > Vec < T , A > {
2185
2194
/// Extend the vector by `n` values, using the given generator.
2195
+ #[ track_caller]
2186
2196
fn extend_with < E : ExtendWith < T > > ( & mut self , n : usize , mut value : E ) {
2187
2197
self . reserve ( n) ;
2188
2198
@@ -2397,6 +2407,7 @@ impl<T, I: SliceIndex<[T]>, A: Allocator> IndexMut<I> for Vec<T, A> {
2397
2407
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
2398
2408
impl < T > FromIterator < T > for Vec < T > {
2399
2409
#[ inline]
2410
+ #[ track_caller]
2400
2411
fn from_iter < I : IntoIterator < Item = T > > ( iter : I ) -> Vec < T > {
2401
2412
<Self as SpecFromIter < T , I :: IntoIter > >:: from_iter ( iter. into_iter ( ) )
2402
2413
}
@@ -2485,6 +2496,7 @@ impl<T, A: Allocator> Extend<T> for Vec<T, A> {
2485
2496
impl < T , A : Allocator > Vec < T , A > {
2486
2497
// leaf method to which various SpecFrom/SpecExtend implementations delegate when
2487
2498
// they have no further optimizations to apply
2499
+ #[ track_caller]
2488
2500
fn extend_desugared < I : Iterator < Item = T > > ( & mut self , mut iterator : I ) {
2489
2501
// This is the case for a general iterator.
2490
2502
//
@@ -2616,18 +2628,21 @@ impl<T, A: Allocator> Vec<T, A> {
2616
2628
/// append the entire slice at once.
2617
2629
///
2618
2630
/// [`copy_from_slice`]: slice::copy_from_slice
2631
+ #[ track_caller]
2619
2632
#[ stable( feature = "extend_ref" , since = "1.2.0" ) ]
2620
2633
impl < ' a , T : Copy + ' a , A : Allocator + ' a > Extend < & ' a T > for Vec < T , A > {
2621
2634
fn extend < I : IntoIterator < Item = & ' a T > > ( & mut self , iter : I ) {
2622
2635
self . spec_extend ( iter. into_iter ( ) )
2623
2636
}
2624
2637
2625
2638
#[ inline]
2639
+ #[ track_caller]
2626
2640
fn extend_one ( & mut self , & item: & ' a T ) {
2627
2641
self . push ( item) ;
2628
2642
}
2629
2643
2630
2644
#[ inline]
2645
+ #[ track_caller]
2631
2646
fn extend_reserve ( & mut self , additional : usize ) {
2632
2647
self . reserve ( additional) ;
2633
2648
}
@@ -2792,7 +2807,7 @@ impl<T, A: Allocator, const N: usize> TryFrom<Vec<T, A>> for [T; N] {
2792
2807
/// # Examples
2793
2808
///
2794
2809
/// ```
2795
- /// use std::convert::TryInto;
2810
+ /// use std::convert::TryInto;k
2796
2811
/// assert_eq!(vec![1, 2, 3].try_into(), Ok([1, 2, 3]));
2797
2812
/// assert_eq!(<Vec<i32>>::new().try_into(), Ok([]));
2798
2813
/// ```
0 commit comments