@@ -253,7 +253,7 @@ use core::hash::{Hash, Hasher};
253
253
use core:: intrinsics:: abort;
254
254
use core:: marker;
255
255
use core:: marker:: { Unsize , PhantomData } ;
256
- use core:: mem:: { self , align_of_val, forget, size_of_val, uninitialized } ;
256
+ use core:: mem:: { self , align_of_val, forget, size_of_val} ;
257
257
use core:: ops:: Deref ;
258
258
use core:: ops:: CoerceUnsized ;
259
259
use core:: ptr:: { self , NonNull } ;
@@ -1153,6 +1153,10 @@ impl<T> From<Vec<T>> for Rc<[T]> {
1153
1153
/// [`None`]: ../../std/option/enum.Option.html#variant.None
1154
1154
#[ stable( feature = "rc_weak" , since = "1.4.0" ) ]
1155
1155
pub struct Weak < T : ?Sized > {
1156
+ // This is a `NonNull` to allow optimizing the size of this type in enums,
1157
+ // but it is not necessarily a valid pointer.
1158
+ // `Weak::new` sets this to a dangling pointer so that it doesn’t need
1159
+ // to allocate space on the heap.
1156
1160
ptr : NonNull < RcBox < T > > ,
1157
1161
}
1158
1162
@@ -1165,8 +1169,8 @@ impl<T: ?Sized> !marker::Sync for Weak<T> {}
1165
1169
impl < T : ?Sized + Unsize < U > , U : ?Sized > CoerceUnsized < Weak < U > > for Weak < T > { }
1166
1170
1167
1171
impl < T > Weak < T > {
1168
- /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
1169
- /// it. Calling [`upgrade`] on the return value always gives [`None`].
1172
+ /// Constructs a new `Weak<T>`, without allocating any memory.
1173
+ /// Calling [`upgrade`] on the return value always gives [`None`].
1170
1174
///
1171
1175
/// [`upgrade`]: struct.Weak.html#method.upgrade
1172
1176
/// [`None`]: ../../std/option/enum.Option.html
@@ -1181,18 +1185,18 @@ impl<T> Weak<T> {
1181
1185
/// ```
1182
1186
#[ stable( feature = "downgraded_weak" , since = "1.10.0" ) ]
1183
1187
pub fn new ( ) -> Weak < T > {
1184
- unsafe {
1185
- Weak {
1186
- ptr : Box :: into_raw_non_null ( box RcBox {
1187
- strong : Cell :: new ( 0 ) ,
1188
- weak : Cell :: new ( 1 ) ,
1189
- value : uninitialized ( ) ,
1190
- } ) ,
1191
- }
1188
+ Weak {
1189
+ ptr : NonNull :: dangling ( ) ,
1192
1190
}
1193
1191
}
1194
1192
}
1195
1193
1194
+ pub ( crate ) fn is_dangling < T : ?Sized > ( ptr : NonNull < T > ) -> bool {
1195
+ let address = ptr. as_ptr ( ) as * mut ( ) as usize ;
1196
+ let align = align_of_val ( unsafe { ptr. as_ref ( ) } ) ;
1197
+ address == align
1198
+ }
1199
+
1196
1200
impl < T : ?Sized > Weak < T > {
1197
1201
/// Attempts to upgrade the `Weak` pointer to an [`Rc`], extending
1198
1202
/// the lifetime of the value if successful.
@@ -1222,13 +1226,25 @@ impl<T: ?Sized> Weak<T> {
1222
1226
/// ```
1223
1227
#[ stable( feature = "rc_weak" , since = "1.4.0" ) ]
1224
1228
pub fn upgrade ( & self ) -> Option < Rc < T > > {
1225
- if self . strong ( ) == 0 {
1229
+ let inner = self . inner ( ) ?;
1230
+ if inner. strong ( ) == 0 {
1226
1231
None
1227
1232
} else {
1228
- self . inc_strong ( ) ;
1233
+ inner . inc_strong ( ) ;
1229
1234
Some ( Rc { ptr : self . ptr , phantom : PhantomData } )
1230
1235
}
1231
1236
}
1237
+
1238
+ /// Return `None` when the pointer is dangling and there is no allocated `RcBox`,
1239
+ /// i.e. this `Weak` was created by `Weak::new`
1240
+ #[ inline]
1241
+ fn inner ( & self ) -> Option < & RcBox < T > > {
1242
+ if is_dangling ( self . ptr ) {
1243
+ None
1244
+ } else {
1245
+ Some ( unsafe { self . ptr . as_ref ( ) } )
1246
+ }
1247
+ }
1232
1248
}
1233
1249
1234
1250
#[ stable( feature = "rc_weak" , since = "1.4.0" ) ]
@@ -1258,12 +1274,14 @@ impl<T: ?Sized> Drop for Weak<T> {
1258
1274
/// assert!(other_weak_foo.upgrade().is_none());
1259
1275
/// ```
1260
1276
fn drop ( & mut self ) {
1261
- unsafe {
1262
- self . dec_weak ( ) ;
1277
+ if let Some ( inner ) = self . inner ( ) {
1278
+ inner . dec_weak ( ) ;
1263
1279
// the weak count starts at 1, and will only go to zero if all
1264
1280
// the strong pointers have disappeared.
1265
- if self . weak ( ) == 0 {
1266
- Global . dealloc ( self . ptr . cast ( ) , Layout :: for_value ( self . ptr . as_ref ( ) ) ) ;
1281
+ if inner. weak ( ) == 0 {
1282
+ unsafe {
1283
+ Global . dealloc ( self . ptr . cast ( ) , Layout :: for_value ( self . ptr . as_ref ( ) ) ) ;
1284
+ }
1267
1285
}
1268
1286
}
1269
1287
}
@@ -1284,7 +1302,9 @@ impl<T: ?Sized> Clone for Weak<T> {
1284
1302
/// ```
1285
1303
#[ inline]
1286
1304
fn clone ( & self ) -> Weak < T > {
1287
- self . inc_weak ( ) ;
1305
+ if let Some ( inner) = self . inner ( ) {
1306
+ inner. inc_weak ( )
1307
+ }
1288
1308
Weak { ptr : self . ptr }
1289
1309
}
1290
1310
}
@@ -1317,7 +1337,7 @@ impl<T> Default for Weak<T> {
1317
1337
}
1318
1338
}
1319
1339
1320
- // NOTE: We checked_add here to deal with mem::forget safety . In particular
1340
+ // NOTE: We checked_add here to deal with mem::forget safely . In particular
1321
1341
// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
1322
1342
// you can free the allocation while outstanding Rcs (or Weaks) exist.
1323
1343
// We abort because this is such a degenerate scenario that we don't care about
@@ -1370,12 +1390,10 @@ impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
1370
1390
}
1371
1391
}
1372
1392
1373
- impl < T : ?Sized > RcBoxPtr < T > for Weak < T > {
1393
+ impl < T : ?Sized > RcBoxPtr < T > for RcBox < T > {
1374
1394
#[ inline( always) ]
1375
1395
fn inner ( & self ) -> & RcBox < T > {
1376
- unsafe {
1377
- self . ptr . as_ref ( )
1378
- }
1396
+ self
1379
1397
}
1380
1398
}
1381
1399
0 commit comments