Skip to content

Commit 4f7612a

Browse files
committed
Auto merge of #78594 - m-ou-se:rollup-h5c8frs, r=m-ou-se
Rollup of 7 pull requests Successful merges: - #74622 (Add std::panic::panic_any.) - #77099 (make exp_m1 and ln_1p examples more representative of use) - #78526 (Strip tokens from trait and impl items before printing AST JSON) - #78550 (x.py setup: Create config.toml in the current directory, not the top-level directory) - #78577 (validator: Extend aliasing check to a call terminator) - #78581 (Constantify more BTreeMap and BTreeSet functions) - #78587 (parser: Cleanup `LazyTokenStream` and avoid some clones) Failed merges: r? `@ghost`
2 parents 3478d7c + 1873ca5 commit 4f7612a

File tree

17 files changed

+219
-113
lines changed

17 files changed

+219
-113
lines changed

compiler/rustc_ast/src/tokenstream.rs

+22-43
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
2222
use rustc_span::{Span, DUMMY_SP};
2323
use smallvec::{smallvec, SmallVec};
2424

25-
use std::{iter, mem};
25+
use std::{fmt, iter, mem};
2626

2727
/// When the main rust parser encounters a syntax-extension invocation, it
2828
/// parses the arguments to the invocation as a token-tree. This is a very
@@ -120,72 +120,51 @@ where
120120
}
121121
}
122122

123-
// A cloneable callback which produces a `TokenStream`. Each clone
124-
// of this should produce the same `TokenStream`
125-
pub trait CreateTokenStream: sync::Send + sync::Sync + FnOnce() -> TokenStream {
126-
// Workaround for the fact that `Clone` is not object-safe
127-
fn clone_it(&self) -> Box<dyn CreateTokenStream>;
123+
pub trait CreateTokenStream: sync::Send + sync::Sync {
124+
fn create_token_stream(&self) -> TokenStream;
128125
}
129126

130-
impl<F: 'static + Clone + sync::Send + sync::Sync + FnOnce() -> TokenStream> CreateTokenStream
131-
for F
132-
{
133-
fn clone_it(&self) -> Box<dyn CreateTokenStream> {
134-
Box::new(self.clone())
135-
}
136-
}
137-
138-
impl Clone for Box<dyn CreateTokenStream> {
139-
fn clone(&self) -> Self {
140-
let val: &(dyn CreateTokenStream) = &**self;
141-
val.clone_it()
127+
impl CreateTokenStream for TokenStream {
128+
fn create_token_stream(&self) -> TokenStream {
129+
self.clone()
142130
}
143131
}
144132

145-
/// A lazy version of `TokenStream`, which may defer creation
133+
/// A lazy version of `TokenStream`, which defers creation
146134
/// of an actual `TokenStream` until it is needed.
147-
pub type LazyTokenStream = Lrc<LazyTokenStreamInner>;
148-
135+
/// `Box` is here only to reduce the structure size.
149136
#[derive(Clone)]
150-
pub enum LazyTokenStreamInner {
151-
Lazy(Box<dyn CreateTokenStream>),
152-
Ready(TokenStream),
153-
}
137+
pub struct LazyTokenStream(Lrc<Box<dyn CreateTokenStream>>);
154138

155-
impl std::fmt::Debug for LazyTokenStreamInner {
156-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
157-
match self {
158-
LazyTokenStreamInner::Lazy(..) => f.debug_struct("LazyTokenStream::Lazy").finish(),
159-
LazyTokenStreamInner::Ready(..) => f.debug_struct("LazyTokenStream::Ready").finish(),
160-
}
139+
impl LazyTokenStream {
140+
pub fn new(inner: impl CreateTokenStream + 'static) -> LazyTokenStream {
141+
LazyTokenStream(Lrc::new(Box::new(inner)))
142+
}
143+
144+
pub fn create_token_stream(&self) -> TokenStream {
145+
self.0.create_token_stream()
161146
}
162147
}
163148

164-
impl LazyTokenStreamInner {
165-
pub fn into_token_stream(&self) -> TokenStream {
166-
match self {
167-
// Note that we do not cache this. If this ever becomes a performance
168-
// problem, we should investigate wrapping `LazyTokenStreamInner`
169-
// in a lock
170-
LazyTokenStreamInner::Lazy(cb) => (cb.clone())(),
171-
LazyTokenStreamInner::Ready(stream) => stream.clone(),
172-
}
149+
impl fmt::Debug for LazyTokenStream {
150+
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
151+
fmt::Debug::fmt("LazyTokenStream", f)
173152
}
174153
}
175154

176-
impl<S: Encoder> Encodable<S> for LazyTokenStreamInner {
155+
impl<S: Encoder> Encodable<S> for LazyTokenStream {
177156
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
178157
panic!("Attempted to encode LazyTokenStream");
179158
}
180159
}
181160

182-
impl<D: Decoder> Decodable<D> for LazyTokenStreamInner {
161+
impl<D: Decoder> Decodable<D> for LazyTokenStream {
183162
fn decode(_d: &mut D) -> Result<Self, D::Error> {
184163
panic!("Attempted to decode LazyTokenStream");
185164
}
186165
}
187166

188-
impl<CTX> HashStable<CTX> for LazyTokenStreamInner {
167+
impl<CTX> HashStable<CTX> for LazyTokenStream {
189168
fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
190169
panic!("Attempted to compute stable hash for LazyTokenStream");
191170
}

compiler/rustc_expand/src/config.rs

+6-7
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,11 @@ use rustc_ast::attr::HasAttrs;
44
use rustc_ast::mut_visit::*;
55
use rustc_ast::ptr::P;
66
use rustc_ast::token::{DelimToken, Token, TokenKind};
7-
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStreamInner, Spacing, TokenStream, TokenTree};
7+
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing, TokenStream, TokenTree};
88
use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem};
99
use rustc_attr as attr;
1010
use rustc_data_structures::fx::FxHashMap;
1111
use rustc_data_structures::map_in_place::MapInPlace;
12-
use rustc_data_structures::sync::Lrc;
1312
use rustc_errors::{error_code, struct_span_err, Applicability, Handler};
1413
use rustc_feature::{Feature, Features, State as FeatureState};
1514
use rustc_feature::{
@@ -303,7 +302,7 @@ impl<'a> StripUnconfigured<'a> {
303302

304303
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
305304
// for `attr` when we expand it to `#[attr]`
306-
let pound_token = orig_tokens.into_token_stream().trees().next().unwrap();
305+
let pound_token = orig_tokens.create_token_stream().trees().next().unwrap();
307306
if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) {
308307
panic!("Bad tokens for attribute {:?}", attr);
309308
}
@@ -313,16 +312,16 @@ impl<'a> StripUnconfigured<'a> {
313312
DelimSpan::from_single(pound_token.span()),
314313
DelimToken::Bracket,
315314
item.tokens
316-
.clone()
315+
.as_ref()
317316
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
318-
.into_token_stream(),
317+
.create_token_stream(),
319318
);
320319

321320
let mut attr = attr::mk_attr_from_item(attr.style, item, span);
322-
attr.tokens = Some(Lrc::new(LazyTokenStreamInner::Ready(TokenStream::new(vec![
321+
attr.tokens = Some(LazyTokenStream::new(TokenStream::new(vec![
323322
(pound_token, Spacing::Alone),
324323
(bracket_group, Spacing::Alone),
325-
]))));
324+
])));
326325
self.process_cfg_attr(attr)
327326
})
328327
.collect()

compiler/rustc_interface/src/passes.rs

+11
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,17 @@ impl mut_visit::MutVisitor for TokenStripper {
7070
i.tokens = None;
7171
mut_visit::noop_flat_map_foreign_item(i, self)
7272
}
73+
fn flat_map_trait_item(
74+
&mut self,
75+
mut i: P<ast::AssocItem>,
76+
) -> SmallVec<[P<ast::AssocItem>; 1]> {
77+
i.tokens = None;
78+
mut_visit::noop_flat_map_assoc_item(i, self)
79+
}
80+
fn flat_map_impl_item(&mut self, mut i: P<ast::AssocItem>) -> SmallVec<[P<ast::AssocItem>; 1]> {
81+
i.tokens = None;
82+
mut_visit::noop_flat_map_assoc_item(i, self)
83+
}
7384
fn visit_block(&mut self, b: &mut P<ast::Block>) {
7485
b.tokens = None;
7586
mut_visit::noop_visit_block(b, self);

compiler/rustc_mir/src/transform/validate.rs

+40-5
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@ use rustc_middle::mir::{
1010
visit::{PlaceContext, Visitor},
1111
};
1212
use rustc_middle::mir::{
13-
AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPhase, Operand, Rvalue,
14-
SourceScope, Statement, StatementKind, Terminator, TerminatorKind, VarDebugInfo,
13+
AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPhase, Operand, PlaceRef,
14+
Rvalue, SourceScope, Statement, StatementKind, Terminator, TerminatorKind, VarDebugInfo,
1515
};
1616
use rustc_middle::ty::relate::{Relate, RelateResult, TypeRelation};
1717
use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt};
@@ -46,8 +46,16 @@ impl<'tcx> MirPass<'tcx> for Validator {
4646
.iterate_to_fixpoint()
4747
.into_results_cursor(body);
4848

49-
TypeChecker { when: &self.when, body, tcx, param_env, mir_phase, storage_liveness }
50-
.visit_body(body);
49+
TypeChecker {
50+
when: &self.when,
51+
body,
52+
tcx,
53+
param_env,
54+
mir_phase,
55+
storage_liveness,
56+
place_cache: Vec::new(),
57+
}
58+
.visit_body(body);
5159
}
5260
}
5361

@@ -150,6 +158,7 @@ struct TypeChecker<'a, 'tcx> {
150158
param_env: ParamEnv<'tcx>,
151159
mir_phase: MirPhase,
152160
storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive>,
161+
place_cache: Vec<PlaceRef<'tcx>>,
153162
}
154163

155164
impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
@@ -391,7 +400,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
391400
self.check_edge(location, *unwind, EdgeKind::Unwind);
392401
}
393402
}
394-
TerminatorKind::Call { func, destination, cleanup, .. } => {
403+
TerminatorKind::Call { func, args, destination, cleanup, .. } => {
395404
let func_ty = func.ty(&self.body.local_decls, self.tcx);
396405
match func_ty.kind() {
397406
ty::FnPtr(..) | ty::FnDef(..) => {}
@@ -406,6 +415,32 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
406415
if let Some(cleanup) = cleanup {
407416
self.check_edge(location, *cleanup, EdgeKind::Unwind);
408417
}
418+
419+
// The call destination place and Operand::Move place used as an argument might be
420+
// passed by a reference to the callee. Consequently they must be non-overlapping.
421+
// Currently this simply checks for duplicate places.
422+
self.place_cache.clear();
423+
if let Some((destination, _)) = destination {
424+
self.place_cache.push(destination.as_ref());
425+
}
426+
for arg in args {
427+
if let Operand::Move(place) = arg {
428+
self.place_cache.push(place.as_ref());
429+
}
430+
}
431+
let all_len = self.place_cache.len();
432+
self.place_cache.sort_unstable();
433+
self.place_cache.dedup();
434+
let has_duplicates = all_len != self.place_cache.len();
435+
if has_duplicates {
436+
self.fail(
437+
location,
438+
format!(
439+
"encountered overlapping memory in `Call` terminator: {:?}",
440+
terminator.kind,
441+
),
442+
);
443+
}
409444
}
410445
TerminatorKind::Assert { cond, target, cleanup, .. } => {
411446
let cond_ty = cond.ty(&self.body.local_decls, self.tcx);

compiler/rustc_parse/src/lib.rs

+12-11
Original file line numberDiff line numberDiff line change
@@ -249,29 +249,30 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
249249
// came from. Here we attempt to extract these lossless token streams
250250
// before we fall back to the stringification.
251251

252-
let convert_tokens = |tokens: Option<LazyTokenStream>| tokens.map(|t| t.into_token_stream());
252+
let convert_tokens =
253+
|tokens: &Option<LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());
253254

254255
let tokens = match *nt {
255256
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
256-
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()),
257+
Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens),
257258
Nonterminal::NtStmt(ref stmt) => {
258259
// FIXME: We currently only collect tokens for `:stmt`
259260
// matchers in `macro_rules!` macros. When we start collecting
260261
// tokens for attributes on statements, we will need to prepend
261262
// attributes here
262-
convert_tokens(stmt.tokens.clone())
263+
convert_tokens(&stmt.tokens)
263264
}
264-
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.clone()),
265-
Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.clone()),
265+
Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens),
266+
Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens),
266267
Nonterminal::NtIdent(ident, is_raw) => {
267268
Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into())
268269
}
269270
Nonterminal::NtLifetime(ident) => {
270271
Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into())
271272
}
272-
Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.clone()),
273-
Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.clone()),
274-
Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.clone()),
273+
Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens),
274+
Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens),
275+
Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens),
275276
Nonterminal::NtTT(ref tt) => Some(tt.clone().into()),
276277
Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
277278
if expr.tokens.is_none() {
@@ -604,7 +605,7 @@ fn prepend_attrs(
604605
attrs: &[ast::Attribute],
605606
tokens: Option<&tokenstream::LazyTokenStream>,
606607
) -> Option<tokenstream::TokenStream> {
607-
let tokens = tokens?.clone().into_token_stream();
608+
let tokens = tokens?.create_token_stream();
608609
if attrs.is_empty() {
609610
return Some(tokens);
610611
}
@@ -617,9 +618,9 @@ fn prepend_attrs(
617618
);
618619
builder.push(
619620
attr.tokens
620-
.clone()
621+
.as_ref()
621622
.unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr))
622-
.into_token_stream(),
623+
.create_token_stream(),
623624
);
624625
}
625626
builder.push(tokens);

compiler/rustc_parse/src/parser/mod.rs

+37-26
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@ pub use path::PathStyle;
1616

1717
use rustc_ast::ptr::P;
1818
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
19-
use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, LazyTokenStreamInner, Spacing};
20-
use rustc_ast::tokenstream::{TokenStream, TokenTree};
19+
use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing};
20+
use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree};
2121
use rustc_ast::DUMMY_NODE_ID;
2222
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe};
2323
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit};
@@ -1199,15 +1199,12 @@ impl<'a> Parser<'a> {
11991199
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
12001200
) -> PResult<'a, (R, Option<LazyTokenStream>)> {
12011201
let start_token = (self.token.clone(), self.token_spacing);
1202-
let mut cursor_snapshot = self.token_cursor.clone();
1202+
let cursor_snapshot = self.token_cursor.clone();
12031203

12041204
let ret = f(self)?;
12051205

1206-
let new_calls = self.token_cursor.num_next_calls;
1207-
let num_calls = new_calls - cursor_snapshot.num_next_calls;
1208-
let desugar_doc_comments = self.desugar_doc_comments;
1209-
12101206
// We didn't capture any tokens
1207+
let num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
12111208
if num_calls == 0 {
12121209
return Ok((ret, None));
12131210
}
@@ -1220,27 +1217,41 @@ impl<'a> Parser<'a> {
12201217
//
12211218
// This also makes `Parser` very cheap to clone, since
12221219
// there is no intermediate collection buffer to clone.
1223-
let lazy_cb = move || {
1224-
// The token produced by the final call to `next` or `next_desugared`
1225-
// was not actually consumed by the callback. The combination
1226-
// of chaining the initial token and using `take` produces the desired
1227-
// result - we produce an empty `TokenStream` if no calls were made,
1228-
// and omit the final token otherwise.
1229-
let tokens = std::iter::once(start_token)
1230-
.chain((0..num_calls).map(|_| {
1231-
if desugar_doc_comments {
1232-
cursor_snapshot.next_desugared()
1233-
} else {
1234-
cursor_snapshot.next()
1235-
}
1236-
}))
1237-
.take(num_calls);
1220+
struct LazyTokenStreamImpl {
1221+
start_token: (Token, Spacing),
1222+
cursor_snapshot: TokenCursor,
1223+
num_calls: usize,
1224+
desugar_doc_comments: bool,
1225+
}
1226+
impl CreateTokenStream for LazyTokenStreamImpl {
1227+
fn create_token_stream(&self) -> TokenStream {
1228+
// The token produced by the final call to `next` or `next_desugared`
1229+
// was not actually consumed by the callback. The combination
1230+
// of chaining the initial token and using `take` produces the desired
1231+
// result - we produce an empty `TokenStream` if no calls were made,
1232+
// and omit the final token otherwise.
1233+
let mut cursor_snapshot = self.cursor_snapshot.clone();
1234+
let tokens = std::iter::once(self.start_token.clone())
1235+
.chain((0..self.num_calls).map(|_| {
1236+
if self.desugar_doc_comments {
1237+
cursor_snapshot.next_desugared()
1238+
} else {
1239+
cursor_snapshot.next()
1240+
}
1241+
}))
1242+
.take(self.num_calls);
12381243

1239-
make_token_stream(tokens)
1240-
};
1241-
let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb)));
1244+
make_token_stream(tokens)
1245+
}
1246+
}
12421247

1243-
Ok((ret, Some(stream)))
1248+
let lazy_impl = LazyTokenStreamImpl {
1249+
start_token,
1250+
cursor_snapshot,
1251+
num_calls,
1252+
desugar_doc_comments: self.desugar_doc_comments,
1253+
};
1254+
Ok((ret, Some(LazyTokenStream::new(lazy_impl))))
12441255
}
12451256

12461257
/// `::{` or `::*`

0 commit comments

Comments
 (0)