diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 7ca4703a2e1859e298a9ba486156f196758526c6..2f0ae540fbf07c7d1d4a2db33155ac6d253c95d9 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -147,7 +147,7 @@ fn write_source(sess: &parse::ParseSess, } // keywords are also included in the identifier set - token::Ident(ident, _is_mod_sep) => { + token::Ident(ident) => { match &*ident.name.as_str() { "ref" | "mut" => "kw-2", diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index df9f935446d71a4dde4293b7023a513dc4f0dc01..0e825b8c2fe3c9f4c06e015778a938fb80975d51 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -1206,8 +1206,7 @@ pub fn get_tt(&self, index: usize) -> TokenTree { TokenTree::Delimited(sp, Rc::new(Delimited { delim: token::Bracket, open_span: sp, - tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"), - token::Plain)), + tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))), TokenTree::Token(sp, token::Eq), TokenTree::Token(sp, token::Literal( token::StrRaw(token::intern(&stripped), num_of_hashes), None))], @@ -1225,14 +1224,13 @@ pub fn get_tt(&self, index: usize) -> TokenTree { } (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => { let v = [TokenTree::Token(sp, token::Dollar), - TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str()), - token::Plain))]; + TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))]; v[index].clone() } - (&TokenTree::Token(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => { - let v = [TokenTree::Token(sp, token::SubstNt(name, name_st)), + (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => { + let v = [TokenTree::Token(sp, token::SubstNt(name)), TokenTree::Token(sp, token::Colon), - TokenTree::Token(sp, token::Ident(kind, kind_st))]; + TokenTree::Token(sp, token::Ident(kind))]; v[index].clone() } (&TokenTree::Sequence(_, ref seq), _) => { diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 43b4a201afc184fb3c401d35513e5dab137cdc9c..26088b1242e2a326d3eac0ee476148a754d5ee5d 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -54,7 +54,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, token_tree: &[TokenTree]) -> Box { let code = match (token_tree.len(), token_tree.get(0)) { - (1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code, + (1, Some(&TokenTree::Token(_, token::Ident(code)))) => code, _ => unreachable!() }; @@ -92,10 +92,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, token_tree.get(1), token_tree.get(2) ) { - (1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => { + (1, Some(&TokenTree::Token(_, token::Ident(ref code))), None, None) => { (code, None) }, - (3, Some(&TokenTree::Token(_, token::Ident(ref code, _))), + (3, Some(&TokenTree::Token(_, token::Ident(ref code))), Some(&TokenTree::Token(_, token::Comma)), Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => { (code, Some(description)) @@ -160,9 +160,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, let (crate_name, name) = match (&token_tree[0], &token_tree[2]) { ( // Crate name. - &TokenTree::Token(_, token::Ident(ref crate_name, _)), + &TokenTree::Token(_, token::Ident(ref crate_name)), // DIAGNOSTICS ident. - &TokenTree::Token(_, token::Ident(ref name, _)) + &TokenTree::Token(_, token::Ident(ref name)) ) => (*&crate_name, name), _ => unreachable!() }; diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 77aeaf8459aec8b046d2fa95176aa098faa9d27b..9734b49ba7ce29f653539e0851a3583414c0d86b 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -72,7 +72,7 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec { impl ToTokens for ast::Ident { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![TokenTree::Token(DUMMY_SP, token::Ident(*self, token::Plain))] + vec![TokenTree::Token(DUMMY_SP, token::Ident(*self))] } } @@ -646,14 +646,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { cx.expr_usize(sp, n)) } - token::Ident(ident, style) => { + token::Ident(ident) => { return cx.expr_call(sp, mk_token_path(cx, sp, "Ident"), - vec![mk_ident(cx, sp, ident), - match style { - ModName => mk_token_path(cx, sp, "ModName"), - Plain => mk_token_path(cx, sp, "Plain"), - }]); + vec![mk_ident(cx, sp, ident)]); } token::Lifetime(ident) => { @@ -668,19 +664,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))); } - token::MatchNt(name, kind, namep, kindp) => { + token::MatchNt(name, kind) => { return cx.expr_call(sp, mk_token_path(cx, sp, "MatchNt"), - vec!(mk_ident(cx, sp, name), - mk_ident(cx, sp, kind), - match namep { - ModName => mk_token_path(cx, sp, "ModName"), - Plain => mk_token_path(cx, sp, "Plain"), - }, - match kindp { - ModName => mk_token_path(cx, sp, "ModName"), - Plain => mk_token_path(cx, sp, "Plain"), - })); + vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]); } token::Interpolated(_) => panic!("quote! with interpolated token"), @@ -722,7 +709,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec { match *tt { - TokenTree::Token(sp, SubstNt(ident, _)) => { + TokenTree::Token(sp, SubstNt(ident)) => { // tt.extend($ident.to_tokens(ext_cx)) let e_to_toks = diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 4e4c644776a5163200273ee26a93415a77384d72..8b33bbd37000d0af32cc9aac23dee967475a6417 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -216,7 +216,7 @@ fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc], n_rec(p_s, next_m, res, ret_val, idx)?; } } - TokenTree::Token(sp, MatchNt(bind_name, _, _, _)) => { + TokenTree::Token(sp, MatchNt(bind_name, _)) => { match ret_val.entry(bind_name.name) { Vacant(spot) => { spot.insert(res[*idx].clone()); @@ -263,7 +263,7 @@ pub enum ParseResult { /// unhygienic comparison) pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool { match (t1,t2) { - (&token::Ident(id1,_),&token::Ident(id2,_)) + (&token::Ident(id1),&token::Ident(id2)) | (&token::Lifetime(id1),&token::Lifetime(id2)) => id1.name == id2.name, _ => *t1 == *t2 @@ -451,7 +451,7 @@ pub fn parse(sess: &ParseSess, if (!bb_eis.is_empty() && !next_eis.is_empty()) || bb_eis.len() > 1 { let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) { - TokenTree::Token(_, MatchNt(bind, name, _, _)) => { + TokenTree::Token(_, MatchNt(bind, name)) => { format!("{} ('{}')", name, bind) } _ => panic!() @@ -479,7 +479,7 @@ pub fn parse(sess: &ParseSess, let mut ei = bb_eis.pop().unwrap(); match ei.top_elts.get_tt(ei.idx) { - TokenTree::Token(span, MatchNt(_, ident, _, _)) => { + TokenTree::Token(span, MatchNt(_, ident)) => { let match_cur = ei.match_cur; (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( parse_nt(&mut rust_parser, span, &ident.name.as_str())))); @@ -534,9 +534,9 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { "ty" => token::NtTy(panictry!(p.parse_ty())), // this could be handled like a token, since it is one "ident" => match p.token { - token::Ident(sn,b) => { + token::Ident(sn) => { p.bump(); - token::NtIdent(Box::new(Spanned::{node: sn, span: p.span}),b) + token::NtIdent(Box::new(Spanned::{node: sn, span: p.span})) } _ => { let token_str = pprust::token_to_string(&p.token); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 87ab3dad50c7082e6374c554f3d776e692a9c8e8..abb17de47eae43c489b9c67982b721059de75869 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -244,8 +244,8 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, // $( $lhs:tt => $rhs:tt );+ // ...quasiquoting this would be nice. // These spans won't matter, anyways - let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain); - let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain); + let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt); + let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt); let argument_gram = vec!( TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { @@ -415,7 +415,7 @@ fn check_matcher_old<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token, on_fai let mut tokens = matcher.peekable(); while let Some(token) = tokens.next() { last = match *token { - TokenTree::Token(sp, MatchNt(ref name, ref frag_spec, _, _)) => { + TokenTree::Token(sp, MatchNt(ref name, ref frag_spec)) => { // ii. If T is a simple NT, look ahead to the next token T' in // M. If T' is in the set FOLLOW(NT), continue. Else; reject. if can_be_followed_by_any(&frag_spec.name.as_str()) { @@ -881,7 +881,7 @@ fn check_matcher_core(cx: &mut ExtCtxt, // Now `last` holds the complete set of NT tokens that could // end the sequence before SUFFIX. Check that every one works with `suffix`. 'each_last: for &(_sp, ref t) in &last.tokens { - if let MatchNt(ref name, ref frag_spec, _, _) = *t { + if let MatchNt(ref name, ref frag_spec) = *t { for &(sp, ref next_token) in &suffix_first.tokens { match is_in_follow(cx, next_token, &frag_spec.name.as_str()) { Err(msg) => { @@ -917,9 +917,8 @@ fn check_matcher_core(cx: &mut ExtCtxt, last } - fn token_can_be_followed_by_any(tok: &Token) -> bool { - if let &MatchNt(_, ref frag_spec, _, _) = tok { + if let &MatchNt(_, ref frag_spec) = tok { frag_can_be_followed_by_any(&frag_spec.name.as_str()) } else { // (Non NT's can always be followed by anthing in matchers.) @@ -1005,8 +1004,8 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result { "pat" => { match *tok { FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true), - Ident(i, _) if (i.name.as_str() == "if" || - i.name.as_str() == "in") => Ok(true), + Ident(i) if (i.name.as_str() == "if" || + i.name.as_str() == "in") => Ok(true), _ => Ok(false) } }, @@ -1014,9 +1013,8 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result { match *tok { OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true), - MatchNt(_, ref frag, _, _) if frag.name.as_str() == "block" => Ok(true), - Ident(i, _) if (i.name.as_str() == "as" || - i.name.as_str() == "where") => Ok(true), + MatchNt(_, ref frag) if frag.name.as_str() == "block" => Ok(true), + Ident(i) if i.name.as_str() == "as" || i.name.as_str() == "where" => Ok(true), _ => Ok(false) } }, @@ -1036,7 +1034,7 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result { fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> { debug!("has_legal_fragment_specifier({:?})", tok); - if let &MatchNt(_, ref frag_spec, _, _) = tok { + if let &MatchNt(_, ref frag_spec) = tok { let s = &frag_spec.name.as_str(); if !is_legal_fragment_specifier(s) { return Err(s.to_string()); diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index ae99fe817395f53a8b414344b27b486e347819cd..7f53d0f412cca268cccabc3382e56e64fc0aef82 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -161,7 +161,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { size + lockstep_iter_size(tt, r) }) }, - TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) => + TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) => match lookup_cur_matched(r, name) { Some(matched) => match *matched { MatchedNonterminal(_) => LisUnconstrained, @@ -186,7 +186,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { None => (), Some(sp) => { r.cur_span = sp; - r.cur_tok = token::Ident(r.imported_from.unwrap(), token::Plain); + r.cur_tok = token::Ident(r.imported_from.unwrap()); return ret_val; }, } @@ -278,12 +278,12 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } } // FIXME #2887: think about span stuff here - TokenTree::Token(sp, SubstNt(ident, namep)) => { + TokenTree::Token(sp, SubstNt(ident)) => { r.stack.last_mut().unwrap().idx += 1; match lookup_cur_matched(r, ident) { None => { r.cur_span = sp; - r.cur_tok = SubstNt(ident, namep); + r.cur_tok = SubstNt(ident); return ret_val; // this can't be 0 length, just like TokenTree::Delimited } @@ -292,9 +292,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { // sidestep the interpolation tricks for ident because // (a) idents can be in lots of places, so it'd be a pain // (b) we actually can, since it's a token. - MatchedNonterminal(NtIdent(ref sn, b)) => { + MatchedNonterminal(NtIdent(ref sn)) => { r.cur_span = sn.span; - r.cur_tok = token::Ident(sn.node, b); + r.cur_tok = token::Ident(sn.node); return ret_val; } MatchedNonterminal(ref other_whole_nt) => { diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 89451e795503f0c272b562f74af7201d95deca0f..69c420902c8a0724ad97ae1cd91edfa7e8f953ba 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -610,17 +610,11 @@ pub fn noop_fold_tts(tts: &[TokenTree], fld: &mut T) -> Vec(t: token::Token, fld: &mut T) -> token::Token { match t { - token::Ident(id, followed_by_colons) => { - token::Ident(fld.fold_ident(id), followed_by_colons) - } + token::Ident(id) => token::Ident(fld.fold_ident(id)), token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)), token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)), - token::SubstNt(ident, namep) => { - token::SubstNt(fld.fold_ident(ident), namep) - } - token::MatchNt(name, kind, namep, kindp) => { - token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind), namep, kindp) - } + token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)), + token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)), _ => t } } @@ -664,9 +658,8 @@ pub fn noop_fold_interpolated(nt: token::Nonterminal, fld: &mut T) token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)), token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)), token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)), - token::NtIdent(id, is_mod_name) => - token::NtIdent(Box::new(Spanned::{node: fld.fold_ident(id.node), .. *id}), - is_mod_name), + token::NtIdent(id) => + token::NtIdent(Box::new(Spanned::{node: fld.fold_ident(id.node), ..*id})), token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))), token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))), diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index a5cb5c7117e212d64bd82e32fc9a2d426161de74..265a432ae8267536062dc56309e3c96b5d79b6c0 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1039,11 +1039,7 @@ fn next_token_inner(&mut self) -> token::Token { token::Underscore } else { // FIXME: perform NFKC normalization here. (Issue #2253) - if self.curr_is(':') && self.nextch_is(':') { - token::Ident(str_to_ident(string), token::ModName) - } else { - token::Ident(str_to_ident(string), token::Plain) - } + token::Ident(str_to_ident(string)) } }); } @@ -1231,8 +1227,7 @@ fn next_token_inner(&mut self) -> token::Token { let keyword_checking_ident = self.with_str_from(start, |lifetime_name| { str_to_ident(lifetime_name) }); - let keyword_checking_token = &token::Ident(keyword_checking_ident, - token::Plain); + let keyword_checking_token = &token::Ident(keyword_checking_ident); let last_bpos = self.last_pos; if keyword_checking_token.is_keyword(token::keywords::SelfValue) { self.err_span_(start, @@ -1687,7 +1682,7 @@ fn t1() { assert_eq!(string_reader.next_token().tok, token::Whitespace); let tok1 = string_reader.next_token(); let tok2 = TokenAndSpan { - tok: token::Ident(id, token::Plain), + tok: token::Ident(id), sp: Span { lo: BytePos(21), hi: BytePos(23), @@ -1701,7 +1696,7 @@ fn t1() { // read another token: let tok3 = string_reader.next_token(); let tok4 = TokenAndSpan { - tok: token::Ident(str_to_ident("main"), token::Plain), + tok: token::Ident(str_to_ident("main")), sp: Span { lo: BytePos(24), hi: BytePos(28), @@ -1722,8 +1717,8 @@ fn check_tokenization(mut string_reader: StringReader, expected: Vec token::Token { - token::Ident(str_to_ident(id), style) + fn mk_ident(id: &str) -> token::Token { + token::Ident(str_to_ident(id)) } #[test] @@ -1731,9 +1726,7 @@ fn doublecolonparsing() { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a b".to_string()), - vec![mk_ident("a", token::Plain), - token::Whitespace, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); } #[test] @@ -1741,9 +1734,7 @@ fn dcparsing_2() { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a::b".to_string()), - vec![mk_ident("a", token::ModName), - token::ModSep, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::ModSep, mk_ident("b")]); } #[test] @@ -1751,10 +1742,7 @@ fn dcparsing_3() { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a ::b".to_string()), - vec![mk_ident("a", token::Plain), - token::Whitespace, - token::ModSep, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); } #[test] @@ -1762,10 +1750,7 @@ fn dcparsing_4() { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a:: b".to_string()), - vec![mk_ident("a", token::ModName), - token::ModSep, - token::Whitespace, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); } #[test] diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 29b1d5b9aff0631159027de605e0bc1170e1b1f8..7534683a206ffb63e7354fd057efb404ad5b1f36 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -734,9 +734,9 @@ fn string_to_tts_macro () { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( 4, - Some(&TokenTree::Token(_, token::Ident(name_macro_rules, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(name_macro_rules))), Some(&TokenTree::Token(_, token::Not)), - Some(&TokenTree::Token(_, token::Ident(name_zip, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(name_zip))), Some(&TokenTree::Delimited(_, ref macro_delimed)), ) if name_macro_rules.name.as_str() == "macro_rules" @@ -755,7 +755,7 @@ fn string_to_tts_macro () { ( 2, Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(ident))), ) if first_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, @@ -766,7 +766,7 @@ fn string_to_tts_macro () { ( 2, Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(ident))), ) if second_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, @@ -785,26 +785,17 @@ fn string_to_tts_1() { let tts = string_to_tts("fn a (b : i32) { b; }".to_string()); let expected = vec![ - TokenTree::Token(sp(0, 2), - token::Ident(str_to_ident("fn"), - token::IdentStyle::Plain)), - TokenTree::Token(sp(3, 4), - token::Ident(str_to_ident("a"), - token::IdentStyle::Plain)), + TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"))), + TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))), TokenTree::Delimited( sp(5, 14), Rc::new(ast::Delimited { delim: token::DelimToken::Paren, open_span: sp(5, 6), tts: vec![ - TokenTree::Token(sp(6, 7), - token::Ident(str_to_ident("b"), - token::IdentStyle::Plain)), - TokenTree::Token(sp(8, 9), - token::Colon), - TokenTree::Token(sp(10, 13), - token::Ident(str_to_ident("i32"), - token::IdentStyle::Plain)), + TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"))), + TokenTree::Token(sp(8, 9), token::Colon), + TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))), ], close_span: sp(13, 14), })), @@ -814,11 +805,8 @@ fn string_to_tts_1() { delim: token::DelimToken::Brace, open_span: sp(15, 16), tts: vec![ - TokenTree::Token(sp(17, 18), - token::Ident(str_to_ident("b"), - token::IdentStyle::Plain)), - TokenTree::Token(sp(18, 19), - token::Semi) + TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"))), + TokenTree::Token(sp(18, 19), token::Semi), ], close_span: sp(20, 21), })) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index b81ee67c2141c5a0965eaf74fbe25c33ac07bf5c..71f059de0414557bf1d53f735c6e32453ecb1760 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -567,7 +567,7 @@ pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { } self.check_reserved_keywords(); match self.token { - token::Ident(i, _) => { + token::Ident(i) => { self.bump(); Ok(i) } @@ -629,9 +629,8 @@ pub fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool { } pub fn check_contextual_keyword(&mut self, ident: Ident) -> bool { - let tok = token::Ident(ident, token::Plain); - self.expected_tokens.push(TokenType::Token(tok)); - if let token::Ident(ref cur_ident, _) = self.token { + self.expected_tokens.push(TokenType::Token(token::Ident(ident))); + if let token::Ident(ref cur_ident) = self.token { cur_ident.name == ident.name } else { false @@ -1699,7 +1698,7 @@ pub fn parse_pat_literal_maybe_minus(&mut self) -> PResult<'a, P> { pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { match self.token { - token::Ident(sid, _) if self.token.is_path_segment_keyword() => { + token::Ident(sid) if self.token.is_path_segment_keyword() => { self.bump(); Ok(sid) } @@ -2564,7 +2563,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: BytePos) -> PResult< // expr.f if self.eat(&token::Dot) { match self.token { - token::Ident(i, _) => { + token::Ident(i) => { let dot_pos = self.last_span.hi; hi = self.span.hi; self.bump(); @@ -2661,7 +2660,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: BytePos) -> PResult< // Parse unquoted tokens after a `$` in a token tree fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> { let mut sp = self.span; - let (name, namep) = match self.token { + let name = match self.token { token::Dollar => { self.bump(); @@ -2686,14 +2685,12 @@ fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> { return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar))); } else { sp = mk_sp(sp.lo, self.span.hi); - let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain }; - let name = self.parse_ident()?; - (name, namep) + self.parse_ident()? } } - token::SubstNt(name, namep) => { + token::SubstNt(name) => { self.bump(); - (name, namep) + name } _ => unreachable!() }; @@ -2703,18 +2700,17 @@ fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> { !t.is_reserved_keyword()) { self.bump(); sp = mk_sp(sp.lo, self.span.hi); - let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain }; let nt_kind = self.parse_ident()?; - Ok(TokenTree::Token(sp, MatchNt(name, nt_kind, namep, kindp))) + Ok(TokenTree::Token(sp, MatchNt(name, nt_kind))) } else { - Ok(TokenTree::Token(sp, SubstNt(name, namep))) + Ok(TokenTree::Token(sp, SubstNt(name))) } } pub fn check_unknown_macro_variable(&mut self) { if self.quote_depth == 0 { match self.token { - token::SubstNt(name, _) => + token::SubstNt(name) => self.fatal(&format!("unknown macro variable `{}`", name)).emit(), _ => {} } @@ -4614,7 +4610,7 @@ pub fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<'a, P> fn expect_self_ident(&mut self) -> PResult<'a, ast::Ident> { match self.token { - token::Ident(id, _) if id.name == special_idents::self_.name => { + token::Ident(id) if id.name == special_idents::self_.name => { self.bump(); Ok(id) }, @@ -4927,7 +4923,7 @@ fn complain_if_pub_macro(&mut self, visa: &Visibility, span: Span) { Visibility::Inherited => (), _ => { let is_macro_rules: bool = match self.token { - token::Ident(sid, _) => sid.name == intern("macro_rules"), + token::Ident(sid) => sid.name == intern("macro_rules"), _ => false, }; if is_macro_rules { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 46cf79ba336ef2739885963dece0196f94985368..76bd0f66cd8f4390fda0b9950c85381233261450 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -11,7 +11,6 @@ pub use self::BinOpToken::*; pub use self::Nonterminal::*; pub use self::DelimToken::*; -pub use self::IdentStyle::*; pub use self::Lit::*; pub use self::Token::*; @@ -51,13 +50,6 @@ pub enum DelimToken { Brace, } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] -pub enum IdentStyle { - /// `::` follows the identifier with no whitespace in-between. - ModName, - Plain, -} - #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] pub enum SpecialMacroVar { /// `$crate` will be filled in with the name of the crate a macro was @@ -139,7 +131,7 @@ pub enum Token { Literal(Lit, Option), /* Name components */ - Ident(ast::Ident, IdentStyle), + Ident(ast::Ident), Underscore, Lifetime(ast::Ident), @@ -150,10 +142,10 @@ pub enum Token { DocComment(ast::Name), // In left-hand-sides of MBE macros: /// Parse a nonterminal (name to bind, name of NT, styles of their idents) - MatchNt(ast::Ident, ast::Ident, IdentStyle, IdentStyle), + MatchNt(ast::Ident, ast::Ident), // In right-hand-sides of MBE macros: /// A syntactic variable that will be filled in by macro expansion. - SubstNt(ast::Ident, IdentStyle), + SubstNt(ast::Ident), /// A macro variable with special meaning. SpecialVarNt(SpecialMacroVar), @@ -279,16 +271,16 @@ pub fn to_binop(&self) -> Option { /// Returns `true` if the token is a given keyword, `kw`. pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { match *self { - Ident(id, _) => id.name == kw.to_name(), + Ident(id) => id.name == kw.to_name(), _ => false, } } pub fn is_path_segment_keyword(&self) -> bool { match *self { - Ident(id, _) => id.name == SUPER_KEYWORD_NAME || - id.name == SELF_KEYWORD_NAME || - id.name == SELF_TYPE_KEYWORD_NAME, + Ident(id) => id.name == SUPER_KEYWORD_NAME || + id.name == SELF_KEYWORD_NAME || + id.name == SELF_TYPE_KEYWORD_NAME, _ => false, } } @@ -296,12 +288,12 @@ pub fn is_path_segment_keyword(&self) -> bool { /// Returns `true` if the token is either a strict or reserved keyword. pub fn is_any_keyword(&self) -> bool { match *self { - Ident(id, _) => id.name == SELF_KEYWORD_NAME || - id.name == STATIC_KEYWORD_NAME || - id.name == SUPER_KEYWORD_NAME || - id.name == SELF_TYPE_KEYWORD_NAME || - id.name >= STRICT_KEYWORD_START && - id.name <= RESERVED_KEYWORD_FINAL, + Ident(id) => id.name == SELF_KEYWORD_NAME || + id.name == STATIC_KEYWORD_NAME || + id.name == SUPER_KEYWORD_NAME || + id.name == SELF_TYPE_KEYWORD_NAME || + id.name >= STRICT_KEYWORD_START && + id.name <= RESERVED_KEYWORD_FINAL, _ => false } } @@ -309,12 +301,12 @@ pub fn is_any_keyword(&self) -> bool { /// Returns `true` if the token is either a strict keyword. pub fn is_strict_keyword(&self) -> bool { match *self { - Ident(id, _) => id.name == SELF_KEYWORD_NAME || - id.name == STATIC_KEYWORD_NAME || - id.name == SUPER_KEYWORD_NAME || - id.name == SELF_TYPE_KEYWORD_NAME || - id.name >= STRICT_KEYWORD_START && - id.name <= STRICT_KEYWORD_FINAL, + Ident(id) => id.name == SELF_KEYWORD_NAME || + id.name == STATIC_KEYWORD_NAME || + id.name == SUPER_KEYWORD_NAME || + id.name == SELF_TYPE_KEYWORD_NAME || + id.name >= STRICT_KEYWORD_START && + id.name <= STRICT_KEYWORD_FINAL, _ => false, } } @@ -322,8 +314,8 @@ pub fn is_strict_keyword(&self) -> bool { /// Returns `true` if the token is either a keyword reserved for possible future use. pub fn is_reserved_keyword(&self) -> bool { match *self { - Ident(id, _) => id.name >= RESERVED_KEYWORD_START && - id.name <= RESERVED_KEYWORD_FINAL, + Ident(id) => id.name >= RESERVED_KEYWORD_START && + id.name <= RESERVED_KEYWORD_FINAL, _ => false, } } @@ -333,7 +325,7 @@ pub fn is_reserved_keyword(&self) -> bool { /// See `styntax::ext::mtwt`. pub fn mtwt_eq(&self, other : &Token) -> bool { match (self, other) { - (&Ident(id1,_), &Ident(id2,_)) | (&Lifetime(id1), &Lifetime(id2)) => + (&Ident(id1), &Ident(id2)) | (&Lifetime(id1), &Lifetime(id2)) => mtwt::resolve(id1) == mtwt::resolve(id2), _ => *self == *other } @@ -349,7 +341,7 @@ pub enum Nonterminal { NtPat(P), NtExpr(P), NtTy(P), - NtIdent(Box, IdentStyle), + NtIdent(Box), /// Stuff inside brackets for attributes NtMeta(P), NtPath(Box), @@ -743,6 +735,6 @@ fn mark_ident(id : ast::Ident, m : ast::Mrk) -> ast::Ident { assert!(Gt.mtwt_eq(&Gt)); let a = str_to_ident("bac"); let a1 = mark_ident(a,92); - assert!(Ident(a, ModName).mtwt_eq(&Ident(a1, Plain))); + assert!(Ident(a).mtwt_eq(&Ident(a1))); } } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 95f1b63168b479f7db4b4249def9158810e7e04d..4fe076b3a7b517ef8b6331a8b3bcda53d775a4b6 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -270,14 +270,14 @@ pub fn token_to_string(tok: &Token) -> String { } /* Name components */ - token::Ident(s, _) => s.to_string(), + token::Ident(s) => s.to_string(), token::Lifetime(s) => s.to_string(), token::Underscore => "_".to_string(), /* Other */ token::DocComment(s) => s.to_string(), - token::SubstNt(s, _) => format!("${}", s), - token::MatchNt(s, t, _, _) => format!("${}:{}", s, t), + token::SubstNt(s) => format!("${}", s), + token::MatchNt(s, t) => format!("${}:{}", s, t), token::Eof => "".to_string(), token::Whitespace => " ".to_string(), token::Comment => "/* */".to_string(), @@ -294,7 +294,7 @@ pub fn token_to_string(tok: &Token) -> String { token::NtBlock(ref e) => block_to_string(&e), token::NtStmt(ref e) => stmt_to_string(&e), token::NtPat(ref e) => pat_to_string(&e), - token::NtIdent(ref e, _) => ident_to_string(e.node), + token::NtIdent(ref e) => ident_to_string(e.node), token::NtTT(ref e) => tt_to_string(&e), token::NtArm(ref e) => arm_to_string(&e), token::NtImplItem(ref e) => impl_item_to_string(&e), @@ -1488,20 +1488,11 @@ pub fn print_tt(&mut self, tt: &ast::TokenTree) -> io::Result<()> { pub fn print_tts(&mut self, tts: &[ast::TokenTree]) -> io::Result<()> { self.ibox(0)?; - let mut suppress_space = false; for (i, tt) in tts.iter().enumerate() { - if i != 0 && !suppress_space { + if i != 0 { space(&mut self.s)?; } self.print_tt(tt)?; - // There should be no space between the module name and the following `::` in paths, - // otherwise imported macros get re-parsed from crate metadata incorrectly (#20701) - suppress_space = match *tt { - TokenTree::Token(_, token::Ident(_, token::ModName)) | - TokenTree::Token(_, token::MatchNt(_, _, _, token::ModName)) | - TokenTree::Token(_, token::SubstNt(_, token::ModName)) => true, - _ => false - } } self.end() } diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index 85453f6dfcbc8baceb1fecbc32530465e909c61a..dce808756cf6a0a2ee0a89cb425a00b2ba75d981 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) } } else { match *e { - TokenTree::Token(_, token::Ident(ident, _)) => { + TokenTree::Token(_, token::Ident(ident)) => { res_str.push_str(&ident.name.as_str()) }, _ => { diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index fd68ba7342798bc9e16613f4877591aa5f8cbfdc..c8341a057a1c9a7c5890000e3c75f2665146ecfc 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -106,7 +106,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) { named = true; let ident = match p.token { - token::Ident(i, _) => { + token::Ident(i) => { p.bump(); i } diff --git a/src/test/auxiliary/roman_numerals.rs b/src/test/auxiliary/roman_numerals.rs index c262b0dba2553c8b0ef566e5e955ed2e9f038f1d..839ece49c3eb5d4c90cf2fea1321e2c98af91890 100644 --- a/src/test/auxiliary/roman_numerals.rs +++ b/src/test/auxiliary/roman_numerals.rs @@ -48,7 +48,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) } let text = match args[0] { - TokenTree::Token(_, token::Ident(s, _)) => s.to_string(), + TokenTree::Token(_, token::Ident(s)) => s.to_string(), _ => { cx.span_err(sp, "argument should be a single identifier"); return DummyResult::any(sp);