diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index e933eb434a1557c0664f431edececf22686c8686..ff89982a4efd8ccbf8f243562ccaf7f5f448f98b 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -925,16 +925,6 @@ pub fn tokens_mut(&mut self) -> Option<&mut LazyTokenStream> { } } - pub fn set_tokens(&mut self, tokens: Option) { - match self.kind { - StmtKind::Local(ref mut local) => local.tokens = tokens, - StmtKind::Item(ref mut item) => item.tokens = tokens, - StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens = tokens, - StmtKind::Empty => {} - StmtKind::MacCall(ref mut mac) => mac.tokens = tokens, - } - } - pub fn has_trailing_semicolon(&self) -> bool { match &self.kind { StmtKind::Semi(_) => true, @@ -2890,3 +2880,69 @@ fn try_from(item_kind: ItemKind) -> Result { } pub type ForeignItem = Item; + +pub trait HasTokens { + /// Called by `Parser::collect_tokens` to store the collected + /// tokens inside an AST node + fn finalize_tokens(&mut self, tokens: LazyTokenStream); +} + +impl HasTokens for P { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + (**self).finalize_tokens(tokens); + } +} + +impl HasTokens for Option { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + if let Some(inner) = self { + inner.finalize_tokens(tokens); + } + } +} + +impl HasTokens for Attribute { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + match &mut self.kind { + AttrKind::Normal(_, attr_tokens) => { + if attr_tokens.is_none() { + *attr_tokens = Some(tokens); + } + } + AttrKind::DocComment(..) => { + panic!("Called finalize_tokens on doc comment attr {:?}", self) + } + } + } +} + +impl HasTokens for Stmt { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + let stmt_tokens = match self.kind { + StmtKind::Local(ref mut local) => &mut local.tokens, + StmtKind::Item(ref mut item) => &mut item.tokens, + StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => &mut expr.tokens, + StmtKind::Empty => return, + StmtKind::MacCall(ref mut mac) => &mut mac.tokens, + }; + if stmt_tokens.is_none() { + *stmt_tokens = Some(tokens); + } + } +} + +macro_rules! derive_has_tokens { + ($($ty:path),*) => { $( + impl HasTokens for $ty { + fn finalize_tokens(&mut self, tokens: LazyTokenStream) { + if self.tokens.is_none() { + self.tokens = Some(tokens); + } + } + } + )* } +} + +derive_has_tokens! { + Item, Expr, Ty, AttrItem, Visibility, Path, Block, Pat +} diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index fae09fa6fec65bcb43c283ab2088749427530925..1b26fb3337043dc29845035817c11b4aa10bd12c 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -89,7 +89,7 @@ pub fn parse_attribute( inner_parse_policy, self.token ); let lo = self.token.span; - let ((item, style, span), tokens) = self.collect_tokens(|this| { + self.collect_tokens(|this| { if this.eat(&token::Pound) { let style = if this.eat(&token::Not) { ast::AttrStyle::Inner @@ -107,15 +107,13 @@ pub fn parse_attribute( this.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy); } - Ok((item, style, attr_sp)) + Ok(attr::mk_attr_from_item(item, None, style, attr_sp)) } else { let token_str = pprust::token_to_string(&this.token); let msg = &format!("expected `#`, found `{}`", token_str); Err(this.struct_span_err(this.token.span, msg)) } - })?; - - Ok(attr::mk_attr_from_item(item, tokens, style, span)) + }) } pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerAttrPolicy<'_>) { @@ -165,13 +163,7 @@ pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::Attr let args = this.parse_attr_args()?; Ok(ast::AttrItem { path, args, tokens: None }) }; - if capture_tokens { - let (mut item, tokens) = self.collect_tokens(do_parse)?; - item.tokens = tokens; - item - } else { - do_parse(self)? - } + if capture_tokens { self.collect_tokens(do_parse) } else { do_parse(self) }? }) } diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index f4332e4548a1930688626878476bf2c6c1484bdf..6db415ead415c22ca34d1556bde48f5c211cf294 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -472,7 +472,8 @@ fn parse_prefix_range_expr(&mut self, attrs: Option) -> PResult<'a, P) -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(attrs)?; - self.maybe_collect_tokens(super::attr::maybe_needs_tokens(&attrs), |this| { + let needs_tokens = super::attr::maybe_needs_tokens(&attrs); + let do_parse = |this: &mut Parser<'a>| { let lo = this.token.span; // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() let (hi, ex) = match this.token.uninterpolate().kind { @@ -488,7 +489,8 @@ fn parse_prefix_expr(&mut self, attrs: Option) -> PResult<'a, P> _ => return this.parse_dot_or_call_expr(Some(attrs)), }?; Ok(this.mk_expr(lo.to(hi), ex, attrs)) - }) + }; + if needs_tokens { self.collect_tokens(do_parse) } else { do_parse(self) } } fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P)> { @@ -1125,20 +1127,6 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P> { } } - fn maybe_collect_tokens( - &mut self, - needs_tokens: bool, - f: impl FnOnce(&mut Self) -> PResult<'a, P>, - ) -> PResult<'a, P> { - if needs_tokens { - let (mut expr, tokens) = self.collect_tokens(f)?; - expr.tokens = tokens; - Ok(expr) - } else { - f(self) - } - } - fn parse_lit_expr(&mut self, attrs: AttrVec) -> PResult<'a, P> { let lo = self.token.span; match self.parse_opt_lit() { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 4fcc9edb7d91bbf962de491b82fd6a91bd4dc0b2..810ae61307c1950ec590a9e917a26509c3189369 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -125,19 +125,7 @@ pub(super) fn parse_item_common( item }; - let (mut item, tokens) = if needs_tokens { - let (item, tokens) = self.collect_tokens(parse_item)?; - (item, tokens) - } else { - (parse_item(self)?, None) - }; - if let Some(item) = &mut item { - // If we captured tokens during parsing (due to encountering an `NtItem`), - // use those instead - if item.tokens.is_none() { - item.tokens = tokens; - } - } + let item = if needs_tokens { self.collect_tokens(parse_item) } else { parse_item(self) }?; self.unclosed_delims.append(&mut unclosed_delims); Ok(item) diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 45964b1c988edc14d6ab02c99059467ce7dcdec2..5d7ea5b8d578e3733bd5fd4021817691c487f1ca 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -19,8 +19,8 @@ use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing}; use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing}; use rustc_ast::DUMMY_NODE_ID; -use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe}; -use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit}; +use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, HasTokens}; +use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe}; use rustc_ast::{Visibility, VisibilityKind}; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; @@ -1234,10 +1234,10 @@ fn parse_abi(&mut self) -> Option { /// This restriction shouldn't be an issue in practice, /// since this function is used to record the tokens for /// a parsed AST item, which always has matching delimiters. - pub fn collect_tokens( + pub fn collect_tokens( &mut self, f: impl FnOnce(&mut Self) -> PResult<'a, R>, - ) -> PResult<'a, (R, Option)> { + ) -> PResult<'a, R> { let start_token = (self.token.clone(), self.token_spacing); let cursor_snapshot = TokenCursor { frame: self.token_cursor.frame.clone(), @@ -1249,7 +1249,7 @@ pub fn collect_tokens( append_unglued_token: self.token_cursor.append_unglued_token.clone(), }; - let ret = f(self)?; + let mut ret = f(self)?; // Produces a `TokenStream` on-demand. Using `cursor_snapshot` // and `num_calls`, we can reconstruct the `TokenStream` seen @@ -1319,7 +1319,8 @@ fn add_trailing_semi(&self) -> Box { trailing_semi: false, append_unglued_token: self.token_cursor.append_unglued_token.clone(), }; - Ok((ret, Some(LazyTokenStream::new(lazy_impl)))) + ret.finalize_tokens(LazyTokenStream::new(lazy_impl)); + Ok(ret) } /// `::{` or `::*` diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index eb5d7075f0081e6c8f75facd8db5b52041858aa3..97d0c0d8745832805718068cf40358ea34408298 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -99,80 +99,34 @@ pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, Nonter // we always capture tokens for any `Nonterminal` which needs them. Ok(match kind { NonterminalKind::Item => match self.collect_tokens(|this| this.parse_item())? { - (Some(mut item), tokens) => { - // If we captured tokens during parsing (due to outer attributes), - // use those. - if item.tokens.is_none() { - item.tokens = tokens; - } - token::NtItem(item) - } - (None, _) => { + Some(item) => token::NtItem(item), + None => { return Err(self.struct_span_err(self.token.span, "expected an item keyword")); } }, NonterminalKind::Block => { - let (mut block, tokens) = self.collect_tokens(|this| this.parse_block())?; - // We have have eaten an NtBlock, which could already have tokens - if block.tokens.is_none() { - block.tokens = tokens; - } - token::NtBlock(block) + token::NtBlock(self.collect_tokens(|this| this.parse_block())?) } - NonterminalKind::Stmt => { - let (stmt, tokens) = self.collect_tokens(|this| this.parse_stmt())?; - match stmt { - Some(mut s) => { - if s.tokens().is_none() { - s.set_tokens(tokens); - } - token::NtStmt(s) - } - None => { - return Err(self.struct_span_err(self.token.span, "expected a statement")); - } + NonterminalKind::Stmt => match self.collect_tokens(|this| this.parse_stmt())? { + Some(s) => token::NtStmt(s), + None => { + return Err(self.struct_span_err(self.token.span, "expected a statement")); } - } + }, NonterminalKind::Pat2018 { .. } | NonterminalKind::Pat2021 { .. } => { - let (mut pat, tokens) = self.collect_tokens(|this| match kind { + token::NtPat(self.collect_tokens(|this| match kind { NonterminalKind::Pat2018 { .. } => this.parse_pat(None), NonterminalKind::Pat2021 { .. } => { this.parse_top_pat(GateOr::Yes, RecoverComma::No) } _ => unreachable!(), - })?; - // We have have eaten an NtPat, which could already have tokens - if pat.tokens.is_none() { - pat.tokens = tokens; - } - token::NtPat(pat) - } - NonterminalKind::Expr => { - let (mut expr, tokens) = self.collect_tokens(|this| this.parse_expr())?; - // If we captured tokens during parsing (due to outer attributes), - // use those. - if expr.tokens.is_none() { - expr.tokens = tokens; - } - token::NtExpr(expr) + })?) } + NonterminalKind::Expr => token::NtExpr(self.collect_tokens(|this| this.parse_expr())?), NonterminalKind::Literal => { - let (mut lit, tokens) = - self.collect_tokens(|this| this.parse_literal_maybe_minus())?; - // We have have eaten a nonterminal, which could already have tokens - if lit.tokens.is_none() { - lit.tokens = tokens; - } - token::NtLiteral(lit) - } - NonterminalKind::Ty => { - let (mut ty, tokens) = self.collect_tokens(|this| this.parse_ty())?; - // We have an eaten an NtTy, which could already have tokens - if ty.tokens.is_none() { - ty.tokens = tokens; - } - token::NtTy(ty) + token::NtLiteral(self.collect_tokens(|this| this.parse_literal_maybe_minus())?) } + NonterminalKind::Ty => token::NtTy(self.collect_tokens(|this| this.parse_ty())?), // this could be handled like a token, since it is one NonterminalKind::Ident => { if let Some((ident, is_raw)) = get_macro_ident(&self.token) { @@ -185,32 +139,15 @@ pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, Nonter } } NonterminalKind::Path => { - let (mut path, tokens) = - self.collect_tokens(|this| this.parse_path(PathStyle::Type))?; - // We have have eaten an NtPath, which could already have tokens - if path.tokens.is_none() { - path.tokens = tokens; - } - token::NtPath(path) + token::NtPath(self.collect_tokens(|this| this.parse_path(PathStyle::Type))?) } NonterminalKind::Meta => { - let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item(false))?; - // We may have eaten a nonterminal, which could already have tokens - if attr.tokens.is_none() { - attr.tokens = tokens; - } - token::NtMeta(P(attr)) + token::NtMeta(P(self.collect_tokens(|this| this.parse_attr_item(false))?)) } NonterminalKind::TT => token::NtTT(self.parse_token_tree()), - NonterminalKind::Vis => { - let (mut vis, tokens) = - self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?; - // We may have etan an `NtVis`, which could already have tokens - if vis.tokens.is_none() { - vis.tokens = tokens; - } - token::NtVis(vis) - } + NonterminalKind::Vis => token::NtVis( + self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?, + ), NonterminalKind::Lifetime => { if self.check_lifetime() { token::NtLifetime(self.expect_lifetime().ident) diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 2942747991a1db30da9059dca7977ed84060231c..641b29227db98d246adb8b6a481e2fed2c9490f1 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -89,15 +89,7 @@ fn parse_stmt_without_recovery(&mut self) -> PResult<'a, Option> { }; let stmt = if has_attrs { - let (mut stmt, tokens) = self.collect_tokens(parse_stmt_inner)?; - if let Some(stmt) = &mut stmt { - // If we already have tokens (e.g. due to encounting an `NtStmt`), - // use those instead. - if stmt.tokens().is_none() { - stmt.set_tokens(tokens); - } - } - stmt + self.collect_tokens(parse_stmt_inner)? } else { parse_stmt_inner(self)? };