@@ -131,11 +131,12 @@ pub struct Arc<T: ?Sized> {
131
131
}
132
132
133
133
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
134
- unsafe impl < T : ?Sized + Sync + Send > Send for Arc < T > { }
134
+ unsafe impl < T : ?Sized + Sync + Send > Send for Arc < T > { }
135
135
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
136
- unsafe impl < T : ?Sized + Sync + Send > Sync for Arc < T > { }
136
+ unsafe impl < T : ?Sized + Sync + Send > Sync for Arc < T > { }
137
137
138
- #[ cfg( not( stage0) ) ] // remove cfg after new snapshot
138
+ // remove cfg after new snapshot
139
+ #[ cfg( not( stage0) ) ]
139
140
#[ unstable( feature = "coerce_unsized" , issue = "27732" ) ]
140
141
impl < T : ?Sized + Unsize < U > , U : ?Sized > CoerceUnsized < Arc < U > > for Arc < T > { }
141
142
@@ -152,11 +153,12 @@ pub struct Weak<T: ?Sized> {
152
153
}
153
154
154
155
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
155
- unsafe impl < T : ?Sized + Sync + Send > Send for Weak < T > { }
156
+ unsafe impl < T : ?Sized + Sync + Send > Send for Weak < T > { }
156
157
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
157
- unsafe impl < T : ?Sized + Sync + Send > Sync for Weak < T > { }
158
+ unsafe impl < T : ?Sized + Sync + Send > Sync for Weak < T > { }
158
159
159
- #[ cfg( not( stage0) ) ] // remove cfg after new snapshot
160
+ // remove cfg after new snapshot
161
+ #[ cfg( not( stage0) ) ]
160
162
#[ unstable( feature = "coerce_unsized" , issue = "27732" ) ]
161
163
impl < T : ?Sized + Unsize < U > , U : ?Sized > CoerceUnsized < Weak < U > > for Weak < T > { }
162
164
@@ -226,7 +228,7 @@ impl<T> Arc<T> {
226
228
pub fn try_unwrap ( this : Self ) -> Result < T , Self > {
227
229
// See `drop` for why all these atomics are like this
228
230
if this. inner ( ) . strong . compare_and_swap ( 1 , 0 , Release ) != 1 {
229
- return Err ( this)
231
+ return Err ( this) ;
230
232
}
231
233
232
234
atomic:: fence ( Acquire ) ;
@@ -265,7 +267,7 @@ impl<T: ?Sized> Arc<T> {
265
267
266
268
// check if the weak counter is currently "locked"; if so, spin.
267
269
if cur == usize:: MAX {
268
- continue
270
+ continue ;
269
271
}
270
272
271
273
// NOTE: this code currently ignores the possibility of overflow
@@ -276,7 +278,7 @@ impl<T: ?Sized> Arc<T> {
276
278
// synchronize with the write coming from `is_unique`, so that the
277
279
// events prior to that write happen before this read.
278
280
if this. inner ( ) . weak . compare_and_swap ( cur, cur + 1 , Acquire ) == cur {
279
- return Weak { _ptr : this. _ptr }
281
+ return Weak { _ptr : this. _ptr } ;
280
282
}
281
283
}
282
284
}
@@ -568,14 +570,14 @@ impl<T: ?Sized> Drop for Arc<T> {
568
570
let ptr = * self . _ptr ;
569
571
// if ptr.is_null() { return }
570
572
if ptr as * mut u8 as usize == 0 || ptr as * mut u8 as usize == mem:: POST_DROP_USIZE {
571
- return
573
+ return ;
572
574
}
573
575
574
576
// Because `fetch_sub` is already atomic, we do not need to synchronize
575
577
// with other threads unless we are going to delete the object. This
576
578
// same logic applies to the below `fetch_sub` to the `weak` count.
577
579
if self . inner ( ) . strong . fetch_sub ( 1 , Release ) != 1 {
578
- return
580
+ return ;
579
581
}
580
582
581
583
// This fence is needed to prevent reordering of use of the data and
@@ -634,7 +636,7 @@ impl<T: ?Sized> Weak<T> {
634
636
// confirmed via the CAS below.
635
637
let n = inner. strong . load ( Relaxed ) ;
636
638
if n == 0 {
637
- return None
639
+ return None ;
638
640
}
639
641
640
642
// See comments in `Arc::clone` for why we do this (for `mem::forget`).
@@ -645,7 +647,7 @@ impl<T: ?Sized> Weak<T> {
645
647
// Relaxed is valid for the same reason it is on Arc's Clone impl
646
648
let old = inner. strong . compare_and_swap ( n, n + 1 , Relaxed ) ;
647
649
if old == n {
648
- return Some ( Arc { _ptr : self . _ptr } )
650
+ return Some ( Arc { _ptr : self . _ptr } ) ;
649
651
}
650
652
}
651
653
}
@@ -687,7 +689,7 @@ impl<T: ?Sized> Clone for Weak<T> {
687
689
}
688
690
}
689
691
690
- return Weak { _ptr : self . _ptr }
692
+ return Weak { _ptr : self . _ptr } ;
691
693
}
692
694
}
693
695
@@ -723,7 +725,7 @@ impl<T: ?Sized> Drop for Weak<T> {
723
725
724
726
// see comments above for why this check is here
725
727
if ptr as * mut u8 as usize == 0 || ptr as * mut u8 as usize == mem:: POST_DROP_USIZE {
726
- return
728
+ return ;
727
729
}
728
730
729
731
// If we find out that we were the last weak pointer, then its time to
@@ -933,8 +935,7 @@ mod tests {
933
935
934
936
struct Canary ( * mut atomic:: AtomicUsize ) ;
935
937
936
- impl Drop for Canary
937
- {
938
+ impl Drop for Canary {
938
939
fn drop ( & mut self ) {
939
940
unsafe {
940
941
match * self {
@@ -948,7 +949,7 @@ mod tests {
948
949
949
950
#[ test]
950
951
fn manually_share_arc ( ) {
951
- let v = vec ! ( 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 ) ;
952
+ let v = vec ! [ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 ] ;
952
953
let arc_v = Arc :: new ( v) ;
953
954
954
955
let ( tx, rx) = channel ( ) ;
0 commit comments