diff --git a/src/librustc_ast_lowering/lib.rs b/src/librustc_ast_lowering/lib.rs index 0eabb726b88c96fe64917415dbf7a4de1c2c940f..5d98fdeeaf9a5f9693ee3cd343d026e36bf54dc4 100644 --- a/src/librustc_ast_lowering/lib.rs +++ b/src/librustc_ast_lowering/lib.rs @@ -688,7 +688,7 @@ fn mark_span_with_reason( ) -> Span { span.fresh_expansion(ExpnData { allow_internal_unstable, - ..ExpnData::default(ExpnKind::Desugaring(reason), span, self.sess.edition()) + ..ExpnData::default(ExpnKind::Desugaring(reason), span, self.sess.edition(), None) }) } diff --git a/src/librustc_expand/base.rs b/src/librustc_expand/base.rs index fe5bf6f82c6d38b44a9cf9177f30079feba1a06a..0137080938fdd13637313c96a26c112ac319d576 100644 --- a/src/librustc_expand/base.rs +++ b/src/librustc_expand/base.rs @@ -13,6 +13,7 @@ use rustc_errors::{DiagnosticBuilder, ErrorReported}; use rustc_parse::{self, parser, MACRO_ARGUMENTS}; use rustc_session::parse::ParseSess; +use rustc_span::def_id::DefId; use rustc_span::edition::Edition; use rustc_span::hygiene::{AstPass, ExpnData, ExpnId, ExpnKind}; use rustc_span::source_map::SourceMap; @@ -857,7 +858,13 @@ pub fn non_macro_attr(mark_used: bool, edition: Edition) -> SyntaxExtension { SyntaxExtension::default(SyntaxExtensionKind::NonMacroAttr { mark_used }, edition) } - pub fn expn_data(&self, parent: ExpnId, call_site: Span, descr: Symbol) -> ExpnData { + pub fn expn_data( + &self, + parent: ExpnId, + call_site: Span, + descr: Symbol, + macro_def_id: Option, + ) -> ExpnData { ExpnData { kind: ExpnKind::Macro(self.macro_kind(), descr), parent, @@ -867,6 +874,7 @@ pub fn expn_data(&self, parent: ExpnId, call_site: Span, descr: Symbol) -> ExpnD allow_internal_unsafe: self.allow_internal_unsafe, local_inner_macros: self.local_inner_macros, edition: self.edition, + macro_def_id, } } } diff --git a/src/librustc_expand/expand.rs b/src/librustc_expand/expand.rs index 485c5147d2c26c89b963df4069cf1677ecd2738e..b505302f62501add8da868130e4401f384afa16b 100644 --- a/src/librustc_expand/expand.rs +++ b/src/librustc_expand/expand.rs @@ -988,6 +988,7 @@ fn collect(&mut self, fragment_kind: AstFragmentKind, kind: InvocationKind) -> A ExpnKind::Macro(MacroKind::Attr, sym::derive), item.span(), self.cx.parse_sess.edition, + None, ) }), _ => None, diff --git a/src/librustc_parse/parser/mod.rs b/src/librustc_parse/parser/mod.rs index bdb4d7c9df6be3c494ef64622aefcbd85594dd62..b21524cb9bdd2f2258df3ecf5c4d24ac96be4b16 100644 --- a/src/librustc_parse/parser/mod.rs +++ b/src/librustc_parse/parser/mod.rs @@ -118,6 +118,8 @@ fn drop(&mut self) { struct TokenCursor { frame: TokenCursorFrame, stack: Vec, + cur_token: Option, + collecting: Option, } #[derive(Clone)] @@ -127,30 +129,24 @@ struct TokenCursorFrame { open_delim: bool, tree_cursor: tokenstream::Cursor, close_delim: bool, - last_token: LastToken, } -/// This is used in `TokenCursorFrame` above to track tokens that are consumed -/// by the parser, and then that's transitively used to record the tokens that -/// each parse AST item is created with. -/// -/// Right now this has two states, either collecting tokens or not collecting -/// tokens. If we're collecting tokens we just save everything off into a local -/// `Vec`. This should eventually though likely save tokens from the original -/// token stream and just use slicing of token streams to avoid creation of a -/// whole new vector. -/// -/// The second state is where we're passively not recording tokens, but the last -/// token is still tracked for when we want to start recording tokens. This -/// "last token" means that when we start recording tokens we'll want to ensure -/// that this, the first token, is included in the output. -/// -/// You can find some more example usage of this in the `collect_tokens` method -/// on the parser. -#[derive(Clone)] -enum LastToken { - Collecting(Vec), - Was(Option), +/// Used to track additional state needed by `collect_tokens` +#[derive(Clone, Debug)] +struct Collecting { + /// Holds the current tokens captured during the most + /// recent call to `collect_tokens` + buf: Vec, + /// The depth of the `TokenCursor` stack at the time + /// collection was started. When we encounter a `TokenTree::Delimited`, + /// we want to record the `TokenTree::Delimited` itself, + /// but *not* any of the inner tokens while we are inside + /// the new frame (this would cause us to record duplicate tokens). + /// + /// This `depth` fields tracks stack depth we are recording tokens. + /// Only tokens encountered at this depth will be recorded. See + /// `TokenCursor::next` for more details. + depth: usize, } impl TokenCursorFrame { @@ -161,7 +157,6 @@ fn new(span: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self { open_delim: delim == token::NoDelim, tree_cursor: tts.clone().into_trees(), close_delim: delim == token::NoDelim, - last_token: LastToken::Was(None), } } } @@ -171,12 +166,12 @@ fn next(&mut self) -> Token { loop { let tree = if !self.frame.open_delim { self.frame.open_delim = true; - TokenTree::open_tt(self.frame.span, self.frame.delim) - } else if let Some(tree) = self.frame.tree_cursor.next() { + TokenTree::open_tt(self.frame.span, self.frame.delim).into() + } else if let Some(tree) = self.frame.tree_cursor.next_with_joint() { tree } else if !self.frame.close_delim { self.frame.close_delim = true; - TokenTree::close_tt(self.frame.span, self.frame.delim) + TokenTree::close_tt(self.frame.span, self.frame.delim).into() } else if let Some(frame) = self.stack.pop() { self.frame = frame; continue; @@ -184,12 +179,25 @@ fn next(&mut self) -> Token { return Token::new(token::Eof, DUMMY_SP); }; - match self.frame.last_token { - LastToken::Collecting(ref mut v) => v.push(tree.clone().into()), - LastToken::Was(ref mut t) => *t = Some(tree.clone().into()), + // Don't set an open delimiter as our current token - we want + // to leave it as the full `TokenTree::Delimited` from the previous + // iteration of this loop + if !matches!(tree.0, TokenTree::Token(Token { kind: TokenKind::OpenDelim(_), .. })) { + self.cur_token = Some(tree.clone()); + } + + if let Some(collecting) = &mut self.collecting { + if collecting.depth == self.stack.len() { + debug!( + "TokenCursor::next(): collected {:?} at depth {:?}", + tree, + self.stack.len() + ); + collecting.buf.push(tree.clone().into()) + } } - match tree { + match tree.0 { TokenTree::Token(token) => return token, TokenTree::Delimited(sp, delim, tts) => { let frame = TokenCursorFrame::new(sp, delim, &tts); @@ -350,6 +358,8 @@ pub fn new( token_cursor: TokenCursor { frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, &tokens), stack: Vec::new(), + cur_token: None, + collecting: None, }, desugar_doc_comments, unmatched_angle_bracket_count: 0, @@ -1105,65 +1115,95 @@ fn parse_abi(&mut self) -> Option { } } + /// Records all tokens consumed by the provided callback, + /// including the current token. These tokens are collected + /// into a `TokenStream`, and returned along with the result + /// of the callback. + /// + /// Note: If your callback consumes an opening delimiter + /// (including the case where you call `collect_tokens` + /// when the current token is an opening delimeter), + /// you must also consume the corresponding closing delimiter. + /// + /// That is, you can consume + /// `something ([{ }])` or `([{}])`, but not `([{}]` + /// + /// This restriction shouldn't be an issue in practice, + /// since this function is used to record the tokens for + /// a parsed AST item, which always has matching delimiters. fn collect_tokens( &mut self, f: impl FnOnce(&mut Self) -> PResult<'a, R>, ) -> PResult<'a, (R, TokenStream)> { // Record all tokens we parse when parsing this item. - let mut tokens = Vec::new(); - let prev_collecting = match self.token_cursor.frame.last_token { - LastToken::Collecting(ref mut list) => Some(mem::take(list)), - LastToken::Was(ref mut last) => { - tokens.extend(last.take()); - None - } - }; - self.token_cursor.frame.last_token = LastToken::Collecting(tokens); - let prev = self.token_cursor.stack.len(); + let tokens: Vec = self.token_cursor.cur_token.clone().into_iter().collect(); + debug!("collect_tokens: starting with {:?}", tokens); + + // We need special handling for the case where `collect_tokens` is called + // on an opening delimeter (e.g. '('). At this point, we have already pushed + // a new frame - however, we want to record the original `TokenTree::Delimited`, + // for consistency with the case where we start recording one token earlier. + // See `TokenCursor::next` to see how `cur_token` is set up. + let prev_depth = + if matches!(self.token_cursor.cur_token, Some((TokenTree::Delimited(..), _))) { + if self.token_cursor.stack.is_empty() { + // There is nothing below us in the stack that + // the function could consume, so the only thing it can legally + // capture is the entire contents of the current frame. + return Ok((f(self)?, TokenStream::new(tokens))); + } + // We have already recorded the full `TokenTree::Delimited` when we created + // our `tokens` vector at the start of this function. We are now inside + // a new frame corresponding to the `TokenTree::Delimited` we already recoreded. + // We don't want to record any of the tokens inside this frame, since they + // will be duplicates of the tokens nested inside the `TokenTree::Delimited`. + // Therefore, we set our recording depth to the *previous* frame. This allows + // us to record a sequence like: `(foo).bar()`: the `(foo)` will be recored + // as our initial `cur_token`, while the `.bar()` will be recored after we + // pop the `(foo)` frame. + self.token_cursor.stack.len() - 1 + } else { + self.token_cursor.stack.len() + }; + let prev_collecting = + self.token_cursor.collecting.replace(Collecting { buf: tokens, depth: prev_depth }); + let ret = f(self); - let last_token = if self.token_cursor.stack.len() == prev { - &mut self.token_cursor.frame.last_token - } else if self.token_cursor.stack.get(prev).is_none() { - // This can happen due to a bad interaction of two unrelated recovery mechanisms with - // mismatched delimiters *and* recovery lookahead on the likely typo `pub ident(` - // (#62881). - return Ok((ret?, TokenStream::default())); + + let mut collected_tokens = if let Some(collecting) = self.token_cursor.collecting.take() { + collecting.buf } else { - &mut self.token_cursor.stack[prev].last_token + let msg = format!("our vector went away?"); + debug!("collect_tokens: {}", msg); + self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg); + // This can happen due to a bad interaction of two unrelated recovery mechanisms + // with mismatched delimiters *and* recovery lookahead on the likely typo + // `pub ident(` (#62895, different but similar to the case above). + return Ok((ret?, TokenStream::default())); }; - // Pull out the tokens that we've collected from the call to `f` above. - let mut collected_tokens = match *last_token { - LastToken::Collecting(ref mut v) => mem::take(v), - LastToken::Was(ref was) => { - let msg = format!("our vector went away? - found Was({:?})", was); - debug!("collect_tokens: {}", msg); - self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg); - // This can happen due to a bad interaction of two unrelated recovery mechanisms - // with mismatched delimiters *and* recovery lookahead on the likely typo - // `pub ident(` (#62895, different but similar to the case above). - return Ok((ret?, TokenStream::default())); - } - }; + debug!("collect_tokens: got raw tokens {:?}", collected_tokens); // If we're not at EOF our current token wasn't actually consumed by // `f`, but it'll still be in our list that we pulled out. In that case // put it back. let extra_token = if self.token != token::Eof { collected_tokens.pop() } else { None }; - // If we were previously collecting tokens, then this was a recursive - // call. In that case we need to record all the tokens we collected in - // our parent list as well. To do that we push a clone of our stream - // onto the previous list. - match prev_collecting { - Some(mut list) => { - list.extend(collected_tokens.iter().cloned()); - list.extend(extra_token); - *last_token = LastToken::Collecting(list); - } - None => { - *last_token = LastToken::Was(extra_token); + if let Some(mut collecting) = prev_collecting { + // If we were previously collecting at the same depth, + // then the previous call to `collect_tokens` needs to see + // the tokens we just recorded. + // + // If we were previously recording at an lower `depth`, + // then the previous `collect_tokens` call already recorded + // this entire frame in the form of a `TokenTree::Delimited`, + // so there is nothing else for us to do. + if collecting.depth == prev_depth { + collecting.buf.extend(collected_tokens.iter().cloned()); + collecting.buf.extend(extra_token); + debug!("collect_tokens: updating previous buf to {:?}", collecting); } + self.token_cursor.collecting = Some(collecting) } Ok((ret?, TokenStream::new(collected_tokens))) diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 988ec3d4374e0f375765d8bbc05e2741c09f3a1d..c32b823fe73b20a1f808653622c98ad2f7f92491 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -126,8 +126,8 @@ impl<'a> Resolver<'a> { } crate fn macro_def_scope(&mut self, expn_id: ExpnId) -> Module<'a> { - let def_id = match self.macro_defs.get(&expn_id) { - Some(def_id) => *def_id, + let def_id = match expn_id.expn_data().macro_def_id { + Some(def_id) => def_id, None => return self.ast_transform_scopes.get(&expn_id).unwrap_or(&self.graph_root), }; if let Some(id) = self.definitions.as_local_node_id(def_id) { diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index bfb7f081fc33336ebb9d12c5e0cd8539dae21b70..34368a07071fe14275a0c0101b8bf969069d28bf 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -922,7 +922,6 @@ pub struct Resolver<'a> { dummy_ext_bang: Lrc, dummy_ext_derive: Lrc, non_macro_attrs: [Lrc; 2], - macro_defs: FxHashMap, local_macro_def_scopes: FxHashMap>, ast_transform_scopes: FxHashMap>, unused_macros: NodeMap, @@ -1152,9 +1151,6 @@ pub fn new( let mut invocation_parent_scopes = FxHashMap::default(); invocation_parent_scopes.insert(ExpnId::root(), ParentScope::module(graph_root)); - let mut macro_defs = FxHashMap::default(); - macro_defs.insert(ExpnId::root(), root_def_id); - let features = session.features_untracked(); let non_macro_attr = |mark_used| Lrc::new(SyntaxExtension::non_macro_attr(mark_used, session.edition())); @@ -1229,7 +1225,6 @@ pub fn new( invocation_parent_scopes, output_macro_rules_scopes: Default::default(), helper_attrs: Default::default(), - macro_defs, local_macro_def_scopes: FxHashMap::default(), name_already_seen: FxHashMap::default(), potentially_unused_imports: Vec::new(), @@ -1335,8 +1330,8 @@ fn is_builtin_macro(&mut self, res: Res) -> bool { fn macro_def(&self, mut ctxt: SyntaxContext) -> DefId { loop { - match self.macro_defs.get(&ctxt.outer_expn()) { - Some(&def_id) => return def_id, + match ctxt.outer_expn().expn_data().macro_def_id { + Some(def_id) => return def_id, None => ctxt.remove_mark(), }; } @@ -1820,7 +1815,7 @@ fn hygienic_lexical_parent_with_compatibility_fallback( && module.expansion.is_descendant_of(parent.expansion) { // The macro is a proc macro derive - if let Some(&def_id) = self.macro_defs.get(&module.expansion) { + if let Some(def_id) = module.expansion.expn_data().macro_def_id { if let Some(ext) = self.get_macro_by_def_id(def_id) { if !ext.is_builtin && ext.macro_kind() == MacroKind::Derive { if parent.expansion.outer_expn_is_descendant_of(span.ctxt()) { diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 1b6268dc8cbcfbb6b3a84190fca9d4bf9568971b..7027c8262678774fca3d3c5f707e5de79f7a15dd 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -186,6 +186,7 @@ fn expansion_for_ast_pass( call_site, self.session.edition(), features.into(), + None, ))); let parent_scope = if let Some(module_id) = parent_module_id { @@ -290,13 +291,17 @@ fn resolve_macro_invocation( let (ext, res) = self.smart_resolve_macro_path(path, kind, parent_scope, force)?; let span = invoc.span(); - invoc_id.set_expn_data(ext.expn_data(parent_scope.expansion, span, fast_print_path(path))); - - if let Res::Def(_, def_id) = res { + invoc_id.set_expn_data(ext.expn_data( + parent_scope.expansion, + span, + fast_print_path(path), + res.opt_def_id(), + )); + + if let Res::Def(_, _) = res { if after_derive { self.session.span_err(span, "macro attributes must be placed before `#[derive]`"); } - self.macro_defs.insert(invoc_id, def_id); let normal_module_def_id = self.macro_def_scope(invoc_id).normal_ancestor_id; self.definitions.add_parent_module_of_macro_def(invoc_id, normal_module_def_id); } diff --git a/src/librustc_span/hygiene.rs b/src/librustc_span/hygiene.rs index 23c3dccb130f6ddd5339293b3a06b0a256b4bf89..c0fb84e741f4a9f56111f003678ad26512b7580a 100644 --- a/src/librustc_span/hygiene.rs +++ b/src/librustc_span/hygiene.rs @@ -25,6 +25,7 @@ // because getting it wrong can lead to nested `HygieneData::with` calls that // trigger runtime aborts. (Fortunately these are obvious and easy to fix.) +use crate::def_id::{DefId, CRATE_DEF_INDEX}; use crate::edition::Edition; use crate::symbol::{kw, sym, Symbol}; use crate::GLOBALS; @@ -155,7 +156,12 @@ pub fn expansion_cause(mut self) -> Option { impl HygieneData { crate fn new(edition: Edition) -> Self { HygieneData { - expn_data: vec![Some(ExpnData::default(ExpnKind::Root, DUMMY_SP, edition))], + expn_data: vec![Some(ExpnData::default( + ExpnKind::Root, + DUMMY_SP, + edition, + Some(DefId::local(CRATE_DEF_INDEX)), + ))], syntax_context_data: vec![SyntaxContextData { outer_expn: ExpnId::root(), outer_transparency: Transparency::Opaque, @@ -673,11 +679,19 @@ pub struct ExpnData { pub local_inner_macros: bool, /// Edition of the crate in which the macro is defined. pub edition: Edition, + /// The `DefId` of the macro being invoked, + /// if this `ExpnData` corresponds to a macro invocation + pub macro_def_id: Option, } impl ExpnData { /// Constructs expansion data with default properties. - pub fn default(kind: ExpnKind, call_site: Span, edition: Edition) -> ExpnData { + pub fn default( + kind: ExpnKind, + call_site: Span, + edition: Edition, + macro_def_id: Option, + ) -> ExpnData { ExpnData { kind, parent: ExpnId::root(), @@ -687,6 +701,7 @@ pub fn default(kind: ExpnKind, call_site: Span, edition: Edition) -> ExpnData { allow_internal_unsafe: false, local_inner_macros: false, edition, + macro_def_id, } } @@ -695,10 +710,11 @@ pub fn allow_unstable( call_site: Span, edition: Edition, allow_internal_unstable: Lrc<[Symbol]>, + macro_def_id: Option, ) -> ExpnData { ExpnData { allow_internal_unstable: Some(allow_internal_unstable), - ..ExpnData::default(kind, call_site, edition) + ..ExpnData::default(kind, call_site, edition, macro_def_id) } } diff --git a/src/libstd/sync/once.rs b/src/libstd/sync/once.rs index a3ee14e85d22286df13e0dfcffef2f14ea9f3e27..7dc822db3d0271fe7266a275e3854cf6391f5186 100644 --- a/src/libstd/sync/once.rs +++ b/src/libstd/sync/once.rs @@ -272,7 +272,7 @@ pub fn call_once(&self, f: F) /// result in an immediate panic. If `f` panics, the `Once` will remain /// in a poison state. If `f` does _not_ panic, the `Once` will no /// longer be in a poison state and all future calls to `call_once` or - /// `call_one_force` will be no-ops. + /// `call_once_force` will be no-ops. /// /// The closure `f` is yielded a [`OnceState`] structure which can be used /// to query the poison status of the `Once`. diff --git a/src/test/ui/ast-json/ast-json-noexpand-output.stdout b/src/test/ui/ast-json/ast-json-noexpand-output.stdout index 1a07968bdf162cb8e4c273b4e4e79ad6e41d7d20..f60b6a00be1298f3afb7fc3ab5fbf1d460084dd6 100644 --- a/src/test/ui/ast-json/ast-json-noexpand-output.stdout +++ b/src/test/ui/ast-json/ast-json-noexpand-output.stdout @@ -1 +1 @@ -{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":{"_field0":[[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["extern",false]},"span":{"lo":0,"hi":0}}]},"NonJoint"],[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["crate",false]},"span":{"lo":0,"hi":0}}]},"NonJoint"],[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["core",false]},"span":{"lo":0,"hi":0}}]},"NonJoint"],[{"variant":"Token","fields":[{"kind":"Semi","span":{"lo":0,"hi":0}}]},"NonJoint"]]}}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"_field0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"NonJoint"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]} +{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":{"_field0":[[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["extern",false]},"span":{"lo":0,"hi":0}}]},"NonJoint"],[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["crate",false]},"span":{"lo":0,"hi":0}}]},"NonJoint"],[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["core",false]},"span":{"lo":0,"hi":0}}]},"Joint"],[{"variant":"Token","fields":[{"kind":"Semi","span":{"lo":0,"hi":0}}]},"NonJoint"]]}}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"_field0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"NonJoint"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]} diff --git a/src/test/ui/ast-json/ast-json-output.stdout b/src/test/ui/ast-json/ast-json-output.stdout index 0b3704e8e0045864c9d50851fa37f9cdc0ad2b74..42e7e789980634d3eb17b435e965b80785a858bf 100644 --- a/src/test/ui/ast-json/ast-json-output.stdout +++ b/src/test/ui/ast-json/ast-json-output.stdout @@ -1 +1 @@ -{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"v1","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":{"_field0":[[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["extern",false]},"span":{"lo":0,"hi":0}}]},"NonJoint"],[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["crate",false]},"span":{"lo":0,"hi":0}}]},"NonJoint"],[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["core",false]},"span":{"lo":0,"hi":0}}]},"NonJoint"],[{"variant":"Token","fields":[{"kind":"Semi","span":{"lo":0,"hi":0}}]},"NonJoint"]]}}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"_field0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"NonJoint"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]} +{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"v1","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":{"_field0":[[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["extern",false]},"span":{"lo":0,"hi":0}}]},"NonJoint"],[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["crate",false]},"span":{"lo":0,"hi":0}}]},"NonJoint"],[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["core",false]},"span":{"lo":0,"hi":0}}]},"Joint"],[{"variant":"Token","fields":[{"kind":"Semi","span":{"lo":0,"hi":0}}]},"NonJoint"]]}}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"_field0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"NonJoint"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]}