@@ -131,11 +131,12 @@ pub struct Arc<T: ?Sized> {
131
131
}
132
132
133
133
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
134
- unsafe impl < T : ?Sized + Sync + Send > Send for Arc < T > { }
134
+ unsafe impl < T : ?Sized + Sync + Send > Send for Arc < T > { }
135
135
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
136
- unsafe impl < T : ?Sized + Sync + Send > Sync for Arc < T > { }
136
+ unsafe impl < T : ?Sized + Sync + Send > Sync for Arc < T > { }
137
137
138
- #[ cfg( not( stage0) ) ] // remove cfg after new snapshot
138
+ // remove cfg after new snapshot
139
+ #[ cfg( not( stage0) ) ]
139
140
#[ unstable( feature = "coerce_unsized" , issue = "27732" ) ]
140
141
impl < T : ?Sized + Unsize < U > , U : ?Sized > CoerceUnsized < Arc < U > > for Arc < T > { }
141
142
@@ -152,11 +153,12 @@ pub struct Weak<T: ?Sized> {
152
153
}
153
154
154
155
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
155
- unsafe impl < T : ?Sized + Sync + Send > Send for Weak < T > { }
156
+ unsafe impl < T : ?Sized + Sync + Send > Send for Weak < T > { }
156
157
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
157
- unsafe impl < T : ?Sized + Sync + Send > Sync for Weak < T > { }
158
+ unsafe impl < T : ?Sized + Sync + Send > Sync for Weak < T > { }
158
159
159
- #[ cfg( not( stage0) ) ] // remove cfg after new snapshot
160
+ // remove cfg after new snapshot
161
+ #[ cfg( not( stage0) ) ]
160
162
#[ unstable( feature = "coerce_unsized" , issue = "27732" ) ]
161
163
impl < T : ?Sized + Unsize < U > , U : ?Sized > CoerceUnsized < Weak < U > > for Weak < T > { }
162
164
@@ -226,7 +228,7 @@ impl<T> Arc<T> {
226
228
pub fn try_unwrap ( this : Self ) -> Result < T , Self > {
227
229
// See `drop` for why all these atomics are like this
228
230
if this. inner ( ) . strong . compare_and_swap ( 1 , 0 , Release ) != 1 {
229
- return Err ( this)
231
+ return Err ( this) ;
230
232
}
231
233
232
234
atomic:: fence ( Acquire ) ;
@@ -265,7 +267,7 @@ impl<T: ?Sized> Arc<T> {
265
267
266
268
// check if the weak counter is currently "locked"; if so, spin.
267
269
if cur == usize:: MAX {
268
- continue
270
+ continue ;
269
271
}
270
272
271
273
// NOTE: this code currently ignores the possibility of overflow
@@ -276,7 +278,7 @@ impl<T: ?Sized> Arc<T> {
276
278
// synchronize with the write coming from `is_unique`, so that the
277
279
// events prior to that write happen before this read.
278
280
if this. inner ( ) . weak . compare_and_swap ( cur, cur + 1 , Acquire ) == cur {
279
- return Weak { _ptr : this. _ptr }
281
+ return Weak { _ptr : this. _ptr } ;
280
282
}
281
283
}
282
284
}
@@ -568,14 +570,14 @@ impl<T: ?Sized> Drop for Arc<T> {
568
570
let ptr = * self . _ptr ;
569
571
// if ptr.is_null() { return }
570
572
if ptr as * mut u8 as usize == 0 || ptr as * mut u8 as usize == mem:: POST_DROP_USIZE {
571
- return
573
+ return ;
572
574
}
573
575
574
576
// Because `fetch_sub` is already atomic, we do not need to synchronize
575
577
// with other threads unless we are going to delete the object. This
576
578
// same logic applies to the below `fetch_sub` to the `weak` count.
577
579
if self . inner ( ) . strong . fetch_sub ( 1 , Release ) != 1 {
578
- return
580
+ return ;
579
581
}
580
582
581
583
// This fence is needed to prevent reordering of use of the data and
@@ -634,13 +636,13 @@ impl<T: ?Sized> Weak<T> {
634
636
// confirmed via the CAS below.
635
637
let n = inner. strong . load ( Relaxed ) ;
636
638
if n == 0 {
637
- return None
639
+ return None ;
638
640
}
639
641
640
642
// Relaxed is valid for the same reason it is on Arc's Clone impl
641
643
let old = inner. strong . compare_and_swap ( n, n + 1 , Relaxed ) ;
642
644
if old == n {
643
- return Some ( Arc { _ptr : self . _ptr } )
645
+ return Some ( Arc { _ptr : self . _ptr } ) ;
644
646
}
645
647
}
646
648
}
@@ -682,7 +684,7 @@ impl<T: ?Sized> Clone for Weak<T> {
682
684
}
683
685
}
684
686
685
- return Weak { _ptr : self . _ptr }
687
+ return Weak { _ptr : self . _ptr } ;
686
688
}
687
689
}
688
690
@@ -718,7 +720,7 @@ impl<T: ?Sized> Drop for Weak<T> {
718
720
719
721
// see comments above for why this check is here
720
722
if ptr as * mut u8 as usize == 0 || ptr as * mut u8 as usize == mem:: POST_DROP_USIZE {
721
- return
723
+ return ;
722
724
}
723
725
724
726
// If we find out that we were the last weak pointer, then its time to
@@ -928,8 +930,7 @@ mod tests {
928
930
929
931
struct Canary ( * mut atomic:: AtomicUsize ) ;
930
932
931
- impl Drop for Canary
932
- {
933
+ impl Drop for Canary {
933
934
fn drop ( & mut self ) {
934
935
unsafe {
935
936
match * self {
@@ -943,7 +944,7 @@ mod tests {
943
944
944
945
#[ test]
945
946
fn manually_share_arc ( ) {
946
- let v = vec ! ( 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 ) ;
947
+ let v = vec ! [ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 ] ;
947
948
let arc_v = Arc :: new ( v) ;
948
949
949
950
let ( tx, rx) = channel ( ) ;
0 commit comments