Skip to content

Commit a52085d

Browse files
committed
Auto merge of rust-lang#134470 - jieyouxu:rollup-kld7kmk, r=jieyouxu
Rollup of 11 pull requests Successful merges: - rust-lang#130786 ( mir-opt: a sub-BB of a cleanup BB must also be a cleanup BB in `EarlyOtherwiseBranch`) - rust-lang#133926 (Fix const conditions for RPITITs) - rust-lang#134161 (Overhaul token cursors) - rust-lang#134253 (Overhaul keyword handling) - rust-lang#134394 (Clarify the match ergonomics 2024 migration lint's output) - rust-lang#134399 (Do not do if ! else, use unnegated cond and swap the branches instead) - rust-lang#134420 (refactor: replace &PathBuf with &Path to enhance generality) - rust-lang#134436 (tests/assembly/asm: Remove uses of rustc_attrs and lang_items features by using minicore) - rust-lang#134444 (Fix `x build --stage 1 std` when using cg_cranelift as the default backend) - rust-lang#134452 (fix(LazyCell): documentation of get[_mut] was wrong) - rust-lang#134460 (Merge some patterns together) r? `@ghost` `@rustbot` modify labels: rollup
2 parents 057bdb3 + 29d201a commit a52085d

File tree

87 files changed

+1715
-1191
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

87 files changed

+1715
-1191
lines changed

compiler/rustc_ast/src/attr/mod.rs

+30-39
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
//! Functions dealing with attributes and meta items.
22
33
use std::fmt::Debug;
4-
use std::iter;
54
use std::sync::atomic::{AtomicU32, Ordering};
65

76
use rustc_index::bit_set::GrowableBitSet;
@@ -16,7 +15,9 @@ use crate::ast::{
1615
};
1716
use crate::ptr::P;
1817
use crate::token::{self, CommentKind, Delimiter, Token};
19-
use crate::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenTree};
18+
use crate::tokenstream::{
19+
DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenStreamIter, TokenTree,
20+
};
2021
use crate::util::comments;
2122
use crate::util::literal::escape_string_symbol;
2223

@@ -365,22 +366,19 @@ impl MetaItem {
365366
}
366367
}
367368

368-
fn from_tokens<'a, I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
369-
where
370-
I: Iterator<Item = &'a TokenTree>,
371-
{
369+
fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItem> {
372370
// FIXME: Share code with `parse_path`.
373-
let tt = tokens.next().map(|tt| TokenTree::uninterpolate(tt));
371+
let tt = iter.next().map(|tt| TokenTree::uninterpolate(tt));
374372
let path = match tt.as_deref() {
375373
Some(&TokenTree::Token(
376374
Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span },
377375
_,
378376
)) => 'arm: {
379377
let mut segments = if let &token::Ident(name, _) = kind {
380378
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
381-
tokens.peek()
379+
iter.peek()
382380
{
383-
tokens.next();
381+
iter.next();
384382
thin_vec![PathSegment::from_ident(Ident::new(name, span))]
385383
} else {
386384
break 'arm Path::from_ident(Ident::new(name, span));
@@ -390,16 +388,16 @@ impl MetaItem {
390388
};
391389
loop {
392390
if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) =
393-
tokens.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref()
391+
iter.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref()
394392
{
395393
segments.push(PathSegment::from_ident(Ident::new(name, span)));
396394
} else {
397395
return None;
398396
}
399397
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
400-
tokens.peek()
398+
iter.peek()
401399
{
402-
tokens.next();
400+
iter.next();
403401
} else {
404402
break;
405403
}
@@ -420,8 +418,8 @@ impl MetaItem {
420418
}
421419
_ => return None,
422420
};
423-
let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi());
424-
let kind = MetaItemKind::from_tokens(tokens)?;
421+
let list_closing_paren_pos = iter.peek().map(|tt| tt.span().hi());
422+
let kind = MetaItemKind::from_tokens(iter)?;
425423
let hi = match &kind {
426424
MetaItemKind::NameValue(lit) => lit.span.hi(),
427425
MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(path.span.hi()),
@@ -438,25 +436,23 @@ impl MetaItem {
438436
impl MetaItemKind {
439437
// public because it can be called in the hir
440438
pub fn list_from_tokens(tokens: TokenStream) -> Option<ThinVec<MetaItemInner>> {
441-
let mut tokens = tokens.trees().peekable();
439+
let mut iter = tokens.iter();
442440
let mut result = ThinVec::new();
443-
while tokens.peek().is_some() {
444-
let item = MetaItemInner::from_tokens(&mut tokens)?;
441+
while iter.peek().is_some() {
442+
let item = MetaItemInner::from_tokens(&mut iter)?;
445443
result.push(item);
446-
match tokens.next() {
444+
match iter.next() {
447445
None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => {}
448446
_ => return None,
449447
}
450448
}
451449
Some(result)
452450
}
453451

454-
fn name_value_from_tokens<'a>(
455-
tokens: &mut impl Iterator<Item = &'a TokenTree>,
456-
) -> Option<MetaItemKind> {
457-
match tokens.next() {
452+
fn name_value_from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemKind> {
453+
match iter.next() {
458454
Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => {
459-
MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
455+
MetaItemKind::name_value_from_tokens(&mut inner_tokens.iter())
460456
}
461457
Some(TokenTree::Token(token, _)) => {
462458
MetaItemLit::from_token(token).map(MetaItemKind::NameValue)
@@ -465,19 +461,17 @@ impl MetaItemKind {
465461
}
466462
}
467463

468-
fn from_tokens<'a>(
469-
tokens: &mut iter::Peekable<impl Iterator<Item = &'a TokenTree>>,
470-
) -> Option<MetaItemKind> {
471-
match tokens.peek() {
464+
fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemKind> {
465+
match iter.peek() {
472466
Some(TokenTree::Delimited(.., Delimiter::Parenthesis, inner_tokens)) => {
473467
let inner_tokens = inner_tokens.clone();
474-
tokens.next();
468+
iter.next();
475469
MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List)
476470
}
477471
Some(TokenTree::Delimited(..)) => None,
478472
Some(TokenTree::Token(Token { kind: token::Eq, .. }, _)) => {
479-
tokens.next();
480-
MetaItemKind::name_value_from_tokens(tokens)
473+
iter.next();
474+
MetaItemKind::name_value_from_tokens(iter)
481475
}
482476
_ => Some(MetaItemKind::Word),
483477
}
@@ -593,22 +587,19 @@ impl MetaItemInner {
593587
self.meta_item().is_some()
594588
}
595589

596-
fn from_tokens<'a, I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemInner>
597-
where
598-
I: Iterator<Item = &'a TokenTree>,
599-
{
600-
match tokens.peek() {
590+
fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemInner> {
591+
match iter.peek() {
601592
Some(TokenTree::Token(token, _)) if let Some(lit) = MetaItemLit::from_token(token) => {
602-
tokens.next();
593+
iter.next();
603594
return Some(MetaItemInner::Lit(lit));
604595
}
605596
Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => {
606-
tokens.next();
607-
return MetaItemInner::from_tokens(&mut inner_tokens.trees().peekable());
597+
iter.next();
598+
return MetaItemInner::from_tokens(&mut inner_tokens.iter());
608599
}
609600
_ => {}
610601
}
611-
MetaItem::from_tokens(tokens).map(MetaItemInner::MetaItem)
602+
MetaItem::from_tokens(iter).map(MetaItemInner::MetaItem)
612603
}
613604
}
614605

compiler/rustc_ast/src/token.rs

+7-1
Original file line numberDiff line numberDiff line change
@@ -903,7 +903,8 @@ impl Token {
903903
self.is_non_raw_ident_where(|id| id.name == kw)
904904
}
905905

906-
/// Returns `true` if the token is a given keyword, `kw` or if `case` is `Insensitive` and this token is an identifier equal to `kw` ignoring the case.
906+
/// Returns `true` if the token is a given keyword, `kw` or if `case` is `Insensitive` and this
907+
/// token is an identifier equal to `kw` ignoring the case.
907908
pub fn is_keyword_case(&self, kw: Symbol, case: Case) -> bool {
908909
self.is_keyword(kw)
909910
|| (case == Case::Insensitive
@@ -916,6 +917,11 @@ impl Token {
916917
self.is_non_raw_ident_where(Ident::is_path_segment_keyword)
917918
}
918919

920+
/// Don't use this unless you're doing something very loose and heuristic-y.
921+
pub fn is_any_keyword(&self) -> bool {
922+
self.is_non_raw_ident_where(Ident::is_any_keyword)
923+
}
924+
919925
/// Returns true for reserved identifiers used internally for elided lifetimes,
920926
/// unnamed method parameters, crate root module, error recovery etc.
921927
pub fn is_special_ident(&self) -> bool {

compiler/rustc_ast/src/tokenstream.rs

+22-54
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ where
9999
CTX: crate::HashStableContext,
100100
{
101101
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
102-
for sub_tt in self.trees() {
102+
for sub_tt in self.iter() {
103103
sub_tt.hash_stable(hcx, hasher);
104104
}
105105
}
@@ -406,7 +406,7 @@ impl Eq for TokenStream {}
406406

407407
impl PartialEq<TokenStream> for TokenStream {
408408
fn eq(&self, other: &TokenStream) -> bool {
409-
self.trees().eq(other.trees())
409+
self.iter().eq(other.iter())
410410
}
411411
}
412412

@@ -423,24 +423,24 @@ impl TokenStream {
423423
self.0.len()
424424
}
425425

426-
pub fn trees(&self) -> RefTokenTreeCursor<'_> {
427-
RefTokenTreeCursor::new(self)
426+
pub fn get(&self, index: usize) -> Option<&TokenTree> {
427+
self.0.get(index)
428428
}
429429

430-
pub fn into_trees(self) -> TokenTreeCursor {
431-
TokenTreeCursor::new(self)
430+
pub fn iter(&self) -> TokenStreamIter<'_> {
431+
TokenStreamIter::new(self)
432432
}
433433

434434
/// Compares two `TokenStream`s, checking equality without regarding span information.
435435
pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
436-
let mut t1 = self.trees();
437-
let mut t2 = other.trees();
438-
for (t1, t2) in iter::zip(&mut t1, &mut t2) {
439-
if !t1.eq_unspanned(t2) {
436+
let mut iter1 = self.iter();
437+
let mut iter2 = other.iter();
438+
for (tt1, tt2) in iter::zip(&mut iter1, &mut iter2) {
439+
if !tt1.eq_unspanned(tt2) {
440440
return false;
441441
}
442442
}
443-
t1.next().is_none() && t2.next().is_none()
443+
iter1.next().is_none() && iter2.next().is_none()
444444
}
445445

446446
/// Create a token stream containing a single token with alone spacing. The
@@ -509,7 +509,7 @@ impl TokenStream {
509509
#[must_use]
510510
pub fn flattened(&self) -> TokenStream {
511511
fn can_skip(stream: &TokenStream) -> bool {
512-
stream.trees().all(|tree| match tree {
512+
stream.iter().all(|tree| match tree {
513513
TokenTree::Token(token, _) => !matches!(
514514
token.kind,
515515
token::NtIdent(..) | token::NtLifetime(..) | token::Interpolated(..)
@@ -522,7 +522,7 @@ impl TokenStream {
522522
return self.clone();
523523
}
524524

525-
self.trees().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
525+
self.iter().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
526526
}
527527

528528
// If `vec` is not empty, try to glue `tt` onto its last token. The return
@@ -665,25 +665,26 @@ impl TokenStream {
665665
}
666666
}
667667

668-
/// By-reference iterator over a [`TokenStream`], that produces `&TokenTree`
669-
/// items.
670668
#[derive(Clone)]
671-
pub struct RefTokenTreeCursor<'t> {
669+
pub struct TokenStreamIter<'t> {
672670
stream: &'t TokenStream,
673671
index: usize,
674672
}
675673

676-
impl<'t> RefTokenTreeCursor<'t> {
674+
impl<'t> TokenStreamIter<'t> {
677675
fn new(stream: &'t TokenStream) -> Self {
678-
RefTokenTreeCursor { stream, index: 0 }
676+
TokenStreamIter { stream, index: 0 }
679677
}
680678

681-
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
682-
self.stream.0.get(self.index + n)
679+
// Peeking could be done via `Peekable`, but most iterators need peeking,
680+
// and this is simple and avoids the need to use `peekable` and `Peekable`
681+
// at all the use sites.
682+
pub fn peek(&self) -> Option<&'t TokenTree> {
683+
self.stream.0.get(self.index)
683684
}
684685
}
685686

686-
impl<'t> Iterator for RefTokenTreeCursor<'t> {
687+
impl<'t> Iterator for TokenStreamIter<'t> {
687688
type Item = &'t TokenTree;
688689

689690
fn next(&mut self) -> Option<&'t TokenTree> {
@@ -694,39 +695,6 @@ impl<'t> Iterator for RefTokenTreeCursor<'t> {
694695
}
695696
}
696697

697-
/// Owning by-value iterator over a [`TokenStream`], that produces `&TokenTree`
698-
/// items.
699-
///
700-
/// Doesn't impl `Iterator` because Rust doesn't permit an owning iterator to
701-
/// return `&T` from `next`; the need for an explicit lifetime in the `Item`
702-
/// associated type gets in the way. Instead, use `next_ref` (which doesn't
703-
/// involve associated types) for getting individual elements, or
704-
/// `RefTokenTreeCursor` if you really want an `Iterator`, e.g. in a `for`
705-
/// loop.
706-
#[derive(Clone, Debug)]
707-
pub struct TokenTreeCursor {
708-
pub stream: TokenStream,
709-
index: usize,
710-
}
711-
712-
impl TokenTreeCursor {
713-
fn new(stream: TokenStream) -> Self {
714-
TokenTreeCursor { stream, index: 0 }
715-
}
716-
717-
#[inline]
718-
pub fn next_ref(&mut self) -> Option<&TokenTree> {
719-
self.stream.0.get(self.index).map(|tree| {
720-
self.index += 1;
721-
tree
722-
})
723-
}
724-
725-
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
726-
self.stream.0.get(self.index + n)
727-
}
728-
}
729-
730698
#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)]
731699
pub struct DelimSpan {
732700
pub open: Span,

compiler/rustc_ast_pretty/src/pprust/state.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -725,7 +725,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
725725
// E.g. we have seen cases where a proc macro can handle `a :: b` but not
726726
// `a::b`. See #117433 for some examples.
727727
fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) {
728-
let mut iter = tts.trees().peekable();
728+
let mut iter = tts.iter().peekable();
729729
while let Some(tt) = iter.next() {
730730
let spacing = self.print_tt(tt, convert_dollar_crate);
731731
if let Some(next) = iter.peek() {

compiler/rustc_builtin_macros/src/concat_idents.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ pub(crate) fn expand_concat_idents<'cx>(
1818
}
1919

2020
let mut res_str = String::new();
21-
for (i, e) in tts.trees().enumerate() {
21+
for (i, e) in tts.iter().enumerate() {
2222
if i & 1 == 1 {
2323
match e {
2424
TokenTree::Token(Token { kind: token::Comma, .. }, _) => {}

compiler/rustc_builtin_macros/src/trace_macros.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -9,17 +9,17 @@ pub(crate) fn expand_trace_macros(
99
sp: Span,
1010
tt: TokenStream,
1111
) -> MacroExpanderResult<'static> {
12-
let mut cursor = tt.trees();
12+
let mut iter = tt.iter();
1313
let mut err = false;
14-
let value = match &cursor.next() {
14+
let value = match iter.next() {
1515
Some(TokenTree::Token(token, _)) if token.is_keyword(kw::True) => true,
1616
Some(TokenTree::Token(token, _)) if token.is_keyword(kw::False) => false,
1717
_ => {
1818
err = true;
1919
false
2020
}
2121
};
22-
err |= cursor.next().is_some();
22+
err |= iter.next().is_some();
2323
if err {
2424
cx.dcx().emit_err(errors::TraceMacros { span: sp });
2525
} else {

compiler/rustc_driver_impl/src/lib.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ use std::fmt::Write as _;
2626
use std::fs::{self, File};
2727
use std::io::{self, IsTerminal, Read, Write};
2828
use std::panic::{self, PanicHookInfo, catch_unwind};
29-
use std::path::PathBuf;
29+
use std::path::{Path, PathBuf};
3030
use std::process::{self, Command, Stdio};
3131
use std::sync::atomic::{AtomicBool, Ordering};
3232
use std::sync::{Arc, OnceLock};
@@ -460,7 +460,7 @@ fn run_compiler(
460460
})
461461
}
462462

463-
fn dump_feature_usage_metrics(tcxt: TyCtxt<'_>, metrics_dir: &PathBuf) {
463+
fn dump_feature_usage_metrics(tcxt: TyCtxt<'_>, metrics_dir: &Path) {
464464
let output_filenames = tcxt.output_filenames(());
465465
let mut metrics_file_name = std::ffi::OsString::from("unstable_feature_usage_metrics-");
466466
let mut metrics_path = output_filenames.with_directory_and_extension(metrics_dir, "json");

0 commit comments

Comments
 (0)