@@ -210,6 +210,21 @@ impl<T> Arc<T> {
210
210
// contents.
211
211
unsafe { & * * self . _ptr }
212
212
}
213
+
214
+ // Non-inlined part of `drop`.
215
+ #[ inline( never) ]
216
+ unsafe fn drop_slow ( & mut self ) {
217
+ let ptr = * self . _ptr ;
218
+
219
+ // Destroy the data at this time, even though we may not free the box allocation itself
220
+ // (there may still be weak pointers lying around).
221
+ drop ( ptr:: read ( & self . inner ( ) . data ) ) ;
222
+
223
+ if self . inner ( ) . weak . fetch_sub ( 1 , Release ) == 1 {
224
+ atomic:: fence ( Acquire ) ;
225
+ deallocate ( ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) , min_align_of :: < ArcInner < T > > ( ) )
226
+ }
227
+ }
213
228
}
214
229
215
230
/// Get the number of weak references to this value.
@@ -325,6 +340,7 @@ impl<T: Sync + Send> Drop for Arc<T> {
325
340
///
326
341
/// } // implicit drop
327
342
/// ```
343
+ #[ inline]
328
344
fn drop ( & mut self ) {
329
345
// This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
330
346
// it is guaranteed to be zeroed after the first if it's run more than once)
@@ -353,14 +369,8 @@ impl<T: Sync + Send> Drop for Arc<T> {
353
369
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
354
370
atomic:: fence ( Acquire ) ;
355
371
356
- // Destroy the data at this time, even though we may not free the box allocation itself
357
- // (there may still be weak pointers lying around).
358
- unsafe { drop ( ptr:: read ( & self . inner ( ) . data ) ) ; }
359
-
360
- if self . inner ( ) . weak . fetch_sub ( 1 , Release ) == 1 {
361
- atomic:: fence ( Acquire ) ;
362
- unsafe { deallocate ( ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
363
- min_align_of :: < ArcInner < T > > ( ) ) }
372
+ unsafe {
373
+ self . drop_slow ( )
364
374
}
365
375
}
366
376
}
0 commit comments