提交 d8b34e9a 编写于 作者: J Jeffrey Seyfried

Add `syntax::ext::tt::quoted::{TokenTree, ..}` and remove `tokenstream::TokenTree::Sequence`.

上级 24718880
...@@ -173,8 +173,7 @@ fn from_str(src: &str) -> Result<TokenStream, LexError> { ...@@ -173,8 +173,7 @@ fn from_str(src: &str) -> Result<TokenStream, LexError> {
__internal::with_parse_sess(|sess| { __internal::with_parse_sess(|sess| {
let src = src.to_string(); let src = src.to_string();
let name = "<proc-macro source code>".to_string(); let name = "<proc-macro source code>".to_string();
let tts = try!(parse::parse_tts_from_source_str(name, src, sess) let tts = parse::parse_tts_from_source_str(name, src, sess);
.map_err(parse_to_lex_err));
Ok(__internal::token_stream_wrap(tts.into_iter().collect())) Ok(__internal::token_stream_wrap(tts.into_iter().collect()))
}) })
......
...@@ -119,7 +119,6 @@ fn quote(&self) -> TokenStream { ...@@ -119,7 +119,6 @@ fn quote(&self) -> TokenStream {
::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP, ::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP,
(quote delimited)) (quote delimited))
}, },
_ => panic!("unexpected `TokenTree::Sequence` in `qquote`"),
} }
} }
} }
......
...@@ -1044,26 +1044,6 @@ fn hash_token_tree(&mut self, tt: &tokenstream::TokenTree) { ...@@ -1044,26 +1044,6 @@ fn hash_token_tree(&mut self, tt: &tokenstream::TokenTree) {
self.hash_token_tree(sub_tt); self.hash_token_tree(sub_tt);
} }
} }
tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
hash_span!(self, span);
let tokenstream::SequenceRepetition {
ref tts,
ref separator,
op,
num_captures,
} = **sequence_repetition;
tts.len().hash(self.st);
for sub_tt in tts {
self.hash_token_tree(sub_tt);
}
self.hash_discriminant(separator);
if let Some(ref separator) = *separator {
self.hash_token(separator, span);
}
op.hash(self.st);
num_captures.hash(self.st);
}
} }
} }
......
...@@ -287,7 +287,7 @@ pub fn signature_string_for_span(&self, span: Span) -> String { ...@@ -287,7 +287,7 @@ pub fn signature_string_for_span(&self, span: Span) -> String {
let mut toks = toks.parse_all_token_trees().unwrap().into_iter(); let mut toks = toks.parse_all_token_trees().unwrap().into_iter();
let mut prev = toks.next().unwrap(); let mut prev = toks.next().unwrap();
let first_span = prev.get_span(); let first_span = prev.span();
let mut angle_count = 0; let mut angle_count = 0;
for tok in toks { for tok in toks {
if let TokenTree::Token(_, ref tok) = prev { if let TokenTree::Token(_, ref tok) = prev {
...@@ -305,10 +305,10 @@ pub fn signature_string_for_span(&self, span: Span) -> String { ...@@ -305,10 +305,10 @@ pub fn signature_string_for_span(&self, span: Span) -> String {
continue; continue;
} }
if let TokenTree::Token(_, token::Semi) = tok { if let TokenTree::Token(_, token::Semi) = tok {
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi)); return self.snippet(mk_sp(first_span.lo, prev.span().hi));
} else if let TokenTree::Delimited(_, ref d) = tok { } else if let TokenTree::Delimited(_, ref d) = tok {
if d.delim == token::Brace { if d.delim == token::Brace {
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi)); return self.snippet(mk_sp(first_span.lo, prev.span().hi));
} }
} }
prev = tok; prev = tok;
......
...@@ -211,7 +211,7 @@ pub fn visit_mod_contents(&mut self, span: Span, attrs: hir::HirVec<ast::Attribu ...@@ -211,7 +211,7 @@ pub fn visit_mod_contents(&mut self, span: Span, attrs: hir::HirVec<ast::Attribu
}; };
// FIXME(jseyfried) merge with `self.visit_macro()` // FIXME(jseyfried) merge with `self.visit_macro()`
let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect(); let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
om.macros.push(Macro { om.macros.push(Macro {
def_id: def_id, def_id: def_id,
attrs: def.attrs.clone().into(), attrs: def.attrs.clone().into(),
...@@ -521,7 +521,7 @@ pub fn visit_item(&mut self, item: &hir::Item, ...@@ -521,7 +521,7 @@ pub fn visit_item(&mut self, item: &hir::Item,
// convert each exported_macro into a doc item // convert each exported_macro into a doc item
fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro { fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro {
// Extract the spans of all matchers. They represent the "interface" of the macro. // Extract the spans of all matchers. They represent the "interface" of the macro.
let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect(); let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
Macro { Macro {
def_id: self.cx.tcx.hir.local_def_id(def.id), def_id: self.cx.tcx.hir.local_def_id(def.id),
......
...@@ -14,10 +14,9 @@ ...@@ -14,10 +14,9 @@
use ext::base; use ext::base;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use parse::parser::{Parser, PathStyle}; use parse::parser::{Parser, PathStyle};
use parse::token::*;
use parse::token; use parse::token;
use ptr::P; use ptr::P;
use tokenstream::{self, TokenTree}; use tokenstream::TokenTree;
/// Quasiquoting works via token trees. /// Quasiquoting works via token trees.
...@@ -356,14 +355,35 @@ fn parse_expr(&self, s: String) -> P<ast::Expr> { ...@@ -356,14 +355,35 @@ fn parse_expr(&self, s: String) -> P<ast::Expr> {
} }
fn parse_tts(&self, s: String) -> Vec<TokenTree> { fn parse_tts(&self, s: String) -> Vec<TokenTree> {
panictry!(parse::parse_tts_from_source_str( parse::parse_tts_from_source_str("<quote expansion>".to_string(), s, self.parse_sess())
"<quote expansion>".to_string(),
s,
self.parse_sess()))
} }
} }
} }
// Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
use std::rc::Rc;
use tokenstream::Delimited;
let mut results = Vec::new();
let mut result = Vec::new();
for tree in tts {
match tree {
TokenTree::Token(_, token::OpenDelim(..)) => {
results.push(::std::mem::replace(&mut result, Vec::new()));
}
TokenTree::Token(span, token::CloseDelim(delim)) => {
let tree =
TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, tts: result }));
result = results.pop().unwrap();
result.push(tree);
}
tree @ _ => result.push(tree),
}
}
result
}
// These panicking parsing functions are used by the quote_*!() syntax extensions, // These panicking parsing functions are used by the quote_*!() syntax extensions,
// but shouldn't be used otherwise. // but shouldn't be used otherwise.
pub fn parse_expr_panic(parser: &mut Parser) -> P<Expr> { pub fn parse_expr_panic(parser: &mut Parser) -> P<Expr> {
...@@ -510,20 +530,6 @@ pub fn expand_quote_path(cx: &mut ExtCtxt, ...@@ -510,20 +530,6 @@ pub fn expand_quote_path(cx: &mut ExtCtxt,
base::MacEager::expr(expanded) base::MacEager::expr(expanded)
} }
pub fn expand_quote_matcher(cx: &mut ExtCtxt,
sp: Span,
tts: &[TokenTree])
-> Box<base::MacResult+'static> {
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
let mut vector = mk_stmts_let(cx, sp);
vector.extend(statements_mk_tts(cx, &tts[..], true));
vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
let block = cx.expr_block(cx.block(sp, vector));
let expanded = expand_wrapper(cx, sp, cx_expr, block, &[&["syntax", "ext", "quote", "rt"]]);
base::MacEager::expr(expanded)
}
fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> { fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> {
strs.iter().map(|s| ast::Ident::from_str(s)).collect() strs.iter().map(|s| ast::Ident::from_str(s)).collect()
} }
...@@ -669,12 +675,6 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> { ...@@ -669,12 +675,6 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
vec![mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))]); vec![mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))]);
} }
token::MatchNt(name, kind) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "MatchNt"),
vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]);
}
token::Interpolated(_) => panic!("quote! with interpolated token"), token::Interpolated(_) => panic!("quote! with interpolated token"),
_ => () _ => ()
...@@ -712,9 +712,9 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> { ...@@ -712,9 +712,9 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
mk_token_path(cx, sp, name) mk_token_path(cx, sp, name)
} }
fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> { fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> {
match *tt { match *tt {
TokenTree::Token(sp, SubstNt(ident)) => { TokenTree::Token(sp, token::Ident(ident)) if quoted => {
// tt.extend($ident.to_tokens(ext_cx)) // tt.extend($ident.to_tokens(ext_cx))
let e_to_toks = let e_to_toks =
...@@ -733,13 +733,6 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm ...@@ -733,13 +733,6 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
vec![cx.stmt_expr(e_push)] vec![cx.stmt_expr(e_push)]
} }
ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => {
let mut seq = vec![];
for i in 0..tt.len() {
seq.push(tt.get_tt(i));
}
statements_mk_tts(cx, &seq[..], matcher)
}
TokenTree::Token(sp, ref tok) => { TokenTree::Token(sp, ref tok) => {
let e_sp = cx.expr_ident(sp, id_ext("_sp")); let e_sp = cx.expr_ident(sp, id_ext("_sp"));
let e_tok = cx.expr_call(sp, let e_tok = cx.expr_call(sp,
...@@ -753,77 +746,17 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm ...@@ -753,77 +746,17 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
vec![cx.stmt_expr(e_push)] vec![cx.stmt_expr(e_push)]
}, },
TokenTree::Delimited(span, ref delimed) => { TokenTree::Delimited(span, ref delimed) => {
statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter() let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
.chain(delimed.tts.iter() stmts.extend(statements_mk_tts(cx, &delimed.tts));
.flat_map(|tt| statements_mk_tt(cx, tt, matcher))) stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
.chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher)) stmts
.collect()
},
TokenTree::Sequence(sp, ref seq) => {
if !matcher {
panic!("TokenTree::Sequence in quote!");
}
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
let mut tts_stmts = vec![stmt_let_tt];
tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher));
tts_stmts.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
let e_tts = cx.expr_block(cx.block(sp, tts_stmts));
let e_separator = match seq.separator {
Some(ref sep) => cx.expr_some(sp, expr_mk_token(cx, sp, sep)),
None => cx.expr_none(sp),
};
let e_op = match seq.op {
tokenstream::KleeneOp::ZeroOrMore => "ZeroOrMore",
tokenstream::KleeneOp::OneOrMore => "OneOrMore",
};
let e_op_idents = vec![
id_ext("syntax"),
id_ext("tokenstream"),
id_ext("KleeneOp"),
id_ext(e_op),
];
let e_op = cx.expr_path(cx.path_global(sp, e_op_idents));
let fields = vec![cx.field_imm(sp, id_ext("tts"), e_tts),
cx.field_imm(sp, id_ext("separator"), e_separator),
cx.field_imm(sp, id_ext("op"), e_op),
cx.field_imm(sp, id_ext("num_captures"),
cx.expr_usize(sp, seq.num_captures))];
let seq_path = vec![id_ext("syntax"),
id_ext("tokenstream"),
id_ext("SequenceRepetition")];
let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields);
let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"),
id_ext("rc"),
id_ext("Rc"),
id_ext("new")],
vec![e_seq_struct]);
let e_tok = cx.expr_call(sp,
mk_tt_path(cx, sp, "Sequence"),
vec![e_sp, e_rc_new]);
let e_push =
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("tt")),
id_ext("push"),
vec![e_tok]);
vec![cx.stmt_expr(e_push)]
} }
} }
} }
fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree]) fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree])
-> (P<ast::Expr>, Vec<TokenTree>) { -> (P<ast::Expr>, Vec<TokenTree>) {
// NB: It appears that the main parser loses its mind if we consider
// $foo as a SubstNt during the main parse, so we have to re-parse
// under quote_depth > 0. This is silly and should go away; the _guess_ is
// it has to do with transition away from supporting old-style macros, so
// try removing it when enough of them are gone.
let mut p = cx.new_parser_from_tts(tts); let mut p = cx.new_parser_from_tts(tts);
p.quote_depth += 1;
let cx_expr = panictry!(p.parse_expr()); let cx_expr = panictry!(p.parse_expr());
if !p.eat(&token::Comma) { if !p.eat(&token::Comma) {
...@@ -877,24 +810,31 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> { ...@@ -877,24 +810,31 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> {
vec![stmt_let_sp, stmt_let_tt] vec![stmt_let_sp, stmt_let_tt]
} }
fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec<ast::Stmt> { fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree]) -> Vec<ast::Stmt> {
let mut ss = Vec::new(); let mut ss = Vec::new();
let mut quoted = false;
for tt in tts { for tt in tts {
ss.extend(statements_mk_tt(cx, tt, matcher)); quoted = match *tt {
TokenTree::Token(_, token::Dollar) if !quoted => true,
_ => {
ss.extend(statements_mk_tt(cx, tt, quoted));
false
}
}
} }
ss ss
} }
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast::Expr>) {
-> (P<ast::Expr>, P<ast::Expr>) {
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
let mut vector = mk_stmts_let(cx, sp); let mut vector = mk_stmts_let(cx, sp);
vector.extend(statements_mk_tts(cx, &tts[..], false)); vector.extend(statements_mk_tts(cx, &tts[..]));
vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt")))); vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
let block = cx.expr_block(cx.block(sp, vector)); let block = cx.expr_block(cx.block(sp, vector));
let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")];
(cx_expr, block) (cx_expr, cx.expr_call_global(sp, unflatten, vec![block]))
} }
fn expand_wrapper(cx: &ExtCtxt, fn expand_wrapper(cx: &ExtCtxt,
......
...@@ -82,13 +82,14 @@ ...@@ -82,13 +82,14 @@
use syntax_pos::{self, BytePos, mk_sp, Span}; use syntax_pos::{self, BytePos, mk_sp, Span};
use codemap::Spanned; use codemap::Spanned;
use errors::FatalError; use errors::FatalError;
use ext::tt::quoted;
use parse::{Directory, ParseSess}; use parse::{Directory, ParseSess};
use parse::parser::{PathStyle, Parser}; use parse::parser::{PathStyle, Parser};
use parse::token::{DocComment, MatchNt, SubstNt}; use parse::token::{DocComment, MatchNt};
use parse::token::{Token, Nonterminal}; use parse::token::{Token, Nonterminal};
use parse::token; use parse::token;
use print::pprust; use print::pprust;
use tokenstream::{self, TokenTree}; use tokenstream::TokenTree;
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
use std::mem; use std::mem;
...@@ -101,8 +102,8 @@ ...@@ -101,8 +102,8 @@
#[derive(Clone)] #[derive(Clone)]
enum TokenTreeOrTokenTreeVec { enum TokenTreeOrTokenTreeVec {
Tt(tokenstream::TokenTree), Tt(quoted::TokenTree),
TtSeq(Vec<tokenstream::TokenTree>), TtSeq(Vec<quoted::TokenTree>),
} }
impl TokenTreeOrTokenTreeVec { impl TokenTreeOrTokenTreeVec {
...@@ -113,7 +114,7 @@ fn len(&self) -> usize { ...@@ -113,7 +114,7 @@ fn len(&self) -> usize {
} }
} }
fn get_tt(&self, index: usize) -> TokenTree { fn get_tt(&self, index: usize) -> quoted::TokenTree {
match *self { match *self {
TtSeq(ref v) => v[index].clone(), TtSeq(ref v) => v[index].clone(),
Tt(ref tt) => tt.get_tt(index), Tt(ref tt) => tt.get_tt(index),
...@@ -144,7 +145,9 @@ struct MatcherPos { ...@@ -144,7 +145,9 @@ struct MatcherPos {
pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>; pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
pub fn count_names(ms: &[TokenTree]) -> usize { pub fn count_names(ms: &[quoted::TokenTree]) -> usize {
use self::quoted::TokenTree;
ms.iter().fold(0, |count, elt| { ms.iter().fold(0, |count, elt| {
count + match *elt { count + match *elt {
TokenTree::Sequence(_, ref seq) => { TokenTree::Sequence(_, ref seq) => {
...@@ -161,7 +164,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize { ...@@ -161,7 +164,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
}) })
} }
fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> { fn initial_matcher_pos(ms: Vec<quoted::TokenTree>, lo: BytePos) -> Box<MatcherPos> {
let match_idx_hi = count_names(&ms[..]); let match_idx_hi = count_names(&ms[..]);
let matches = create_matches(match_idx_hi); let matches = create_matches(match_idx_hi);
Box::new(MatcherPos { Box::new(MatcherPos {
...@@ -200,7 +203,10 @@ pub enum NamedMatch { ...@@ -200,7 +203,10 @@ pub enum NamedMatch {
MatchedNonterminal(Rc<Nonterminal>) MatchedNonterminal(Rc<Nonterminal>)
} }
fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[TokenTree], mut res: I) -> NamedParseResult { fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[quoted::TokenTree], mut res: I)
-> NamedParseResult {
use self::quoted::TokenTree;
fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(m: &TokenTree, mut res: &mut I, fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(m: &TokenTree, mut res: &mut I,
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>) ret_val: &mut HashMap<Ident, Rc<NamedMatch>>)
-> Result<(), (syntax_pos::Span, String)> { -> Result<(), (syntax_pos::Span, String)> {
...@@ -225,9 +231,6 @@ fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(m: &TokenTree, mut res: &mut I, ...@@ -225,9 +231,6 @@ fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(m: &TokenTree, mut res: &mut I,
} }
} }
} }
TokenTree::Token(sp, SubstNt(..)) => {
return Err((sp, "missing fragment specifier".to_string()))
}
TokenTree::Token(..) => (), TokenTree::Token(..) => (),
} }
...@@ -281,6 +284,8 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, ...@@ -281,6 +284,8 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
eof_eis: &mut SmallVector<Box<MatcherPos>>, eof_eis: &mut SmallVector<Box<MatcherPos>>,
bb_eis: &mut SmallVector<Box<MatcherPos>>, bb_eis: &mut SmallVector<Box<MatcherPos>>,
token: &Token, span: &syntax_pos::Span) -> ParseResult<()> { token: &Token, span: &syntax_pos::Span) -> ParseResult<()> {
use self::quoted::TokenTree;
while let Some(mut ei) = cur_eis.pop() { while let Some(mut ei) = cur_eis.pop() {
// When unzipped trees end, remove them // When unzipped trees end, remove them
while ei.idx >= ei.top_elts.len() { while ei.idx >= ei.top_elts.len() {
...@@ -346,7 +351,7 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, ...@@ -346,7 +351,7 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
match ei.top_elts.get_tt(idx) { match ei.top_elts.get_tt(idx) {
/* need to descend into sequence */ /* need to descend into sequence */
TokenTree::Sequence(sp, seq) => { TokenTree::Sequence(sp, seq) => {
if seq.op == tokenstream::KleeneOp::ZeroOrMore { if seq.op == quoted::KleeneOp::ZeroOrMore {
// Examine the case where there are 0 matches of this sequence // Examine the case where there are 0 matches of this sequence
let mut new_ei = ei.clone(); let mut new_ei = ei.clone();
new_ei.match_cur += seq.num_captures; new_ei.match_cur += seq.num_captures;
...@@ -380,9 +385,6 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, ...@@ -380,9 +385,6 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
_ => bb_eis.push(ei), _ => bb_eis.push(ei),
} }
} }
TokenTree::Token(sp, SubstNt(..)) => {
return Error(sp, "missing fragment specifier".to_string())
}
seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq)); let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
let idx = ei.idx; let idx = ei.idx;
...@@ -406,8 +408,13 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, ...@@ -406,8 +408,13 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
Success(()) Success(())
} }
pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: Option<Directory>) pub fn parse(sess: &ParseSess,
tts: Vec<TokenTree>,
ms: &[quoted::TokenTree],
directory: Option<Directory>)
-> NamedParseResult { -> NamedParseResult {
use self::quoted::TokenTree;
let mut parser = Parser::new(sess, tts, directory, true); let mut parser = Parser::new(sess, tts, directory, true);
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo)); let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
let mut next_eis = Vec::new(); // or proceed normally let mut next_eis = Vec::new(); // or proceed normally
...@@ -479,10 +486,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: ...@@ -479,10 +486,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory:
fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
match name { match name {
"tt" => { "tt" => {
p.quote_depth += 1; //but in theory, non-quoted tts might be useful return token::NtTT(panictry!(p.parse_token_tree()));
let tt = panictry!(p.parse_token_tree());
p.quote_depth -= 1;
return token::NtTT(tt);
} }
_ => {} _ => {}
} }
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
use ext::tt::macro_parser::{Success, Error, Failure}; use ext::tt::macro_parser::{Success, Error, Failure};
use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
use ext::tt::macro_parser::{parse, parse_failure_msg}; use ext::tt::macro_parser::{parse, parse_failure_msg};
use ext::tt::quoted;
use ext::tt::transcribe::transcribe; use ext::tt::transcribe::transcribe;
use parse::{Directory, ParseSess}; use parse::{Directory, ParseSess};
use parse::parser::Parser; use parse::parser::Parser;
...@@ -23,7 +24,7 @@ ...@@ -23,7 +24,7 @@
use parse::token::Token::*; use parse::token::Token::*;
use print; use print;
use symbol::Symbol; use symbol::Symbol;
use tokenstream::{self, TokenTree}; use tokenstream::TokenTree;
use std::collections::{HashMap}; use std::collections::{HashMap};
use std::collections::hash_map::{Entry}; use std::collections::hash_map::{Entry};
...@@ -58,8 +59,8 @@ pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: ExpansionKind) -> Expansion ...@@ -58,8 +59,8 @@ pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: ExpansionKind) -> Expansion
struct MacroRulesMacroExpander { struct MacroRulesMacroExpander {
name: ast::Ident, name: ast::Ident,
lhses: Vec<TokenTree>, lhses: Vec<quoted::TokenTree>,
rhses: Vec<TokenTree>, rhses: Vec<quoted::TokenTree>,
valid: bool, valid: bool,
} }
...@@ -86,8 +87,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, ...@@ -86,8 +87,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
sp: Span, sp: Span,
name: ast::Ident, name: ast::Ident,
arg: &[TokenTree], arg: &[TokenTree],
lhses: &[TokenTree], lhses: &[quoted::TokenTree],
rhses: &[TokenTree]) rhses: &[quoted::TokenTree])
-> Box<MacResult+'cx> { -> Box<MacResult+'cx> {
if cx.trace_macros() { if cx.trace_macros() {
println!("{}! {{ {} }}", println!("{}! {{ {} }}",
...@@ -101,7 +102,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, ...@@ -101,7 +102,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
let lhs_tt = match *lhs { let lhs_tt = match *lhs {
TokenTree::Delimited(_, ref delim) => &delim.tts[..], quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
_ => cx.span_bug(sp, "malformed macro lhs") _ => cx.span_bug(sp, "malformed macro lhs")
}; };
...@@ -109,7 +110,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, ...@@ -109,7 +110,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
Success(named_matches) => { Success(named_matches) => {
let rhs = match rhses[i] { let rhs = match rhses[i] {
// ignore delimiters // ignore delimiters
TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(), quoted::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
_ => cx.span_bug(sp, "malformed macro rhs"), _ => cx.span_bug(sp, "malformed macro rhs"),
}; };
// rhs has holes ( `$id` and `$(...)` that need filled) // rhs has holes ( `$id` and `$(...)` that need filled)
...@@ -167,21 +168,21 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { ...@@ -167,21 +168,21 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt")); let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt"));
let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt")); let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt"));
let argument_gram = vec![ let argument_gram = vec![
TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition {
tts: vec![ tts: vec![
TokenTree::Token(DUMMY_SP, match_lhs_tok), quoted::TokenTree::Token(DUMMY_SP, match_lhs_tok),
TokenTree::Token(DUMMY_SP, token::FatArrow), quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
TokenTree::Token(DUMMY_SP, match_rhs_tok), quoted::TokenTree::Token(DUMMY_SP, match_rhs_tok),
], ],
separator: Some(token::Semi), separator: Some(token::Semi),
op: tokenstream::KleeneOp::OneOrMore, op: quoted::KleeneOp::OneOrMore,
num_captures: 2, num_captures: 2,
})), })),
// to phase into semicolon-termination instead of semicolon-separation // to phase into semicolon-termination instead of semicolon-separation
TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition {
tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)], tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
separator: None, separator: None,
op: tokenstream::KleeneOp::ZeroOrMore, op: quoted::KleeneOp::ZeroOrMore,
num_captures: 0 num_captures: 0
})), })),
]; ];
...@@ -206,12 +207,13 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { ...@@ -206,12 +207,13 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
s.iter().map(|m| { s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m { if let MatchedNonterminal(ref nt) = **m {
if let NtTT(ref tt) = **nt { if let NtTT(ref tt) = **nt {
valid &= check_lhs_nt_follows(sess, tt); let tt = quoted::parse(&[tt.clone()], true, sess).pop().unwrap();
return (*tt).clone(); valid &= check_lhs_nt_follows(sess, &tt);
return tt;
} }
} }
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
}).collect::<Vec<TokenTree>>() }).collect::<Vec<quoted::TokenTree>>()
} }
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
}; };
...@@ -221,11 +223,11 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { ...@@ -221,11 +223,11 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
s.iter().map(|m| { s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m { if let MatchedNonterminal(ref nt) = **m {
if let NtTT(ref tt) = **nt { if let NtTT(ref tt) = **nt {
return (*tt).clone(); return quoted::parse(&[tt.clone()], false, sess).pop().unwrap();
} }
} }
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
}).collect() }).collect::<Vec<quoted::TokenTree>>()
} }
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs") _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
}; };
...@@ -249,14 +251,14 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { ...@@ -249,14 +251,14 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable")) NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable"))
} }
fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool { fn check_lhs_nt_follows(sess: &ParseSess, lhs: &quoted::TokenTree) -> bool {
// lhs is going to be like TokenTree::Delimited(...), where the // lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
match lhs { match lhs {
&TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts), &quoted::TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts),
_ => { _ => {
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters"; let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
sess.span_diagnostic.span_err(lhs.get_span(), msg); sess.span_diagnostic.span_err(lhs.span(), msg);
false false
} }
} }
...@@ -266,7 +268,8 @@ fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool { ...@@ -266,7 +268,8 @@ fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool {
/// Check that the lhs contains no repetition which could match an empty token /// Check that the lhs contains no repetition which could match an empty token
/// tree, because then the matcher would hang indefinitely. /// tree, because then the matcher would hang indefinitely.
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool { fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
use self::quoted::TokenTree;
for tt in tts { for tt in tts {
match *tt { match *tt {
TokenTree::Token(_, _) => (), TokenTree::Token(_, _) => (),
...@@ -278,7 +281,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool { ...@@ -278,7 +281,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool {
if seq.tts.iter().all(|seq_tt| { if seq.tts.iter().all(|seq_tt| {
match *seq_tt { match *seq_tt {
TokenTree::Sequence(_, ref sub_seq) => TokenTree::Sequence(_, ref sub_seq) =>
sub_seq.op == tokenstream::KleeneOp::ZeroOrMore, sub_seq.op == quoted::KleeneOp::ZeroOrMore,
_ => false, _ => false,
} }
}) { }) {
...@@ -296,15 +299,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool { ...@@ -296,15 +299,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool {
true true
} }
fn check_rhs(sess: &ParseSess, rhs: &TokenTree) -> bool { fn check_rhs(sess: &ParseSess, rhs: &quoted::TokenTree) -> bool {
match *rhs { match *rhs {
TokenTree::Delimited(..) => return true, quoted::TokenTree::Delimited(..) => return true,
_ => sess.span_diagnostic.span_err(rhs.get_span(), "macro rhs must be delimited") _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited")
} }
false false
} }
fn check_matcher(sess: &ParseSess, matcher: &[TokenTree]) -> bool { fn check_matcher(sess: &ParseSess, matcher: &[quoted::TokenTree]) -> bool {
let first_sets = FirstSets::new(matcher); let first_sets = FirstSets::new(matcher);
let empty_suffix = TokenSet::empty(); let empty_suffix = TokenSet::empty();
let err = sess.span_diagnostic.err_count(); let err = sess.span_diagnostic.err_count();
...@@ -335,7 +338,9 @@ struct FirstSets { ...@@ -335,7 +338,9 @@ struct FirstSets {
} }
impl FirstSets { impl FirstSets {
fn new(tts: &[TokenTree]) -> FirstSets { fn new(tts: &[quoted::TokenTree]) -> FirstSets {
use self::quoted::TokenTree;
let mut sets = FirstSets { first: HashMap::new() }; let mut sets = FirstSets { first: HashMap::new() };
build_recur(&mut sets, tts); build_recur(&mut sets, tts);
return sets; return sets;
...@@ -382,7 +387,7 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet { ...@@ -382,7 +387,7 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
} }
// Reverse scan: Sequence comes before `first`. // Reverse scan: Sequence comes before `first`.
if subfirst.maybe_empty || seq_rep.op == tokenstream::KleeneOp::ZeroOrMore { if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore {
// If sequence is potentially empty, then // If sequence is potentially empty, then
// union them (preserving first emptiness). // union them (preserving first emptiness).
first.add_all(&TokenSet { maybe_empty: true, ..subfirst }); first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
...@@ -401,7 +406,9 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet { ...@@ -401,7 +406,9 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
// walks forward over `tts` until all potential FIRST tokens are // walks forward over `tts` until all potential FIRST tokens are
// identified. // identified.
fn first(&self, tts: &[TokenTree]) -> TokenSet { fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
use self::quoted::TokenTree;
let mut first = TokenSet::empty(); let mut first = TokenSet::empty();
for tt in tts.iter() { for tt in tts.iter() {
assert!(first.maybe_empty); assert!(first.maybe_empty);
...@@ -430,7 +437,7 @@ fn first(&self, tts: &[TokenTree]) -> TokenSet { ...@@ -430,7 +437,7 @@ fn first(&self, tts: &[TokenTree]) -> TokenSet {
assert!(first.maybe_empty); assert!(first.maybe_empty);
first.add_all(subfirst); first.add_all(subfirst);
if subfirst.maybe_empty || if subfirst.maybe_empty ||
seq_rep.op == tokenstream::KleeneOp::ZeroOrMore { seq_rep.op == quoted::KleeneOp::ZeroOrMore {
// continue scanning for more first // continue scanning for more first
// tokens, but also make sure we // tokens, but also make sure we
// restore empty-tracking state // restore empty-tracking state
...@@ -549,9 +556,10 @@ fn add_all(&mut self, other: &Self) { ...@@ -549,9 +556,10 @@ fn add_all(&mut self, other: &Self) {
// see `FirstSets::new`. // see `FirstSets::new`.
fn check_matcher_core(sess: &ParseSess, fn check_matcher_core(sess: &ParseSess,
first_sets: &FirstSets, first_sets: &FirstSets,
matcher: &[TokenTree], matcher: &[quoted::TokenTree],
follow: &TokenSet) -> TokenSet { follow: &TokenSet) -> TokenSet {
use print::pprust::token_to_string; use print::pprust::token_to_string;
use self::quoted::TokenTree;
let mut last = TokenSet::empty(); let mut last = TokenSet::empty();
......
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast;
use ext::tt::macro_parser;
use parse::{ParseSess, token};
use print::pprust;
use symbol::{keywords, Symbol};
use syntax_pos::{DUMMY_SP, Span, BytePos};
use tokenstream;
use std::rc::Rc;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Delimited {
pub delim: token::DelimToken,
pub tts: Vec<TokenTree>,
}
impl Delimited {
pub fn open_token(&self) -> token::Token {
token::OpenDelim(self.delim)
}
pub fn close_token(&self) -> token::Token {
token::CloseDelim(self.delim)
}
pub fn open_tt(&self, span: Span) -> TokenTree {
let open_span = match span {
DUMMY_SP => DUMMY_SP,
_ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span },
};
TokenTree::Token(open_span, self.open_token())
}
pub fn close_tt(&self, span: Span) -> TokenTree {
let close_span = match span {
DUMMY_SP => DUMMY_SP,
_ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span },
};
TokenTree::Token(close_span, self.close_token())
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct SequenceRepetition {
/// The sequence of token trees
pub tts: Vec<TokenTree>,
/// The optional separator
pub separator: Option<token::Token>,
/// Whether the sequence can be repeated zero (*), or one or more times (+)
pub op: KleeneOp,
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
pub num_captures: usize,
}
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
/// for token sequences.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum KleeneOp {
ZeroOrMore,
OneOrMore,
}
/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
/// are "first-class" token trees.
#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub enum TokenTree {
Token(Span, token::Token),
Delimited(Span, Rc<Delimited>),
/// A kleene-style repetition sequence with a span
Sequence(Span, Rc<SequenceRepetition>),
}
impl TokenTree {
pub fn len(&self) -> usize {
match *self {
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
token::NoDelim => delimed.tts.len(),
_ => delimed.tts.len() + 2,
},
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
TokenTree::Token(..) => 0,
}
}
pub fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
delimed.tts[index].clone()
}
(&TokenTree::Delimited(span, ref delimed), _) => {
if index == 0 {
return delimed.open_tt(span);
}
if index == delimed.tts.len() + 1 {
return delimed.close_tt(span);
}
delimed.tts[index - 1].clone()
}
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
_ => panic!("Cannot expand a token tree"),
}
}
/// Retrieve the TokenTree's span.
pub fn span(&self) -> Span {
match *self {
TokenTree::Token(sp, _) |
TokenTree::Delimited(sp, _) |
TokenTree::Sequence(sp, _) => sp,
}
}
}
pub fn parse(input: &[tokenstream::TokenTree], expect_matchers: bool, sess: &ParseSess)
-> Vec<TokenTree> {
let mut result = Vec::new();
let mut trees = input.iter().cloned();
while let Some(tree) = trees.next() {
let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
match tree {
TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => {
let span = match trees.next() {
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => {
let span = Span { lo: start_sp.lo, ..end_sp };
result.push(TokenTree::Token(span, token::MatchNt(ident, kind)));
continue
}
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
},
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
};
sess.span_diagnostic.span_err(span, "missing fragment specifier");
}
_ => result.push(tree),
}
}
result
}
fn parse_tree<I>(tree: tokenstream::TokenTree,
trees: &mut I,
expect_matchers: bool,
sess: &ParseSess)
-> TokenTree
where I: Iterator<Item = tokenstream::TokenTree>,
{
match tree {
tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
Some(tokenstream::TokenTree::Delimited(span, ref delimited)) => {
if delimited.delim != token::Paren {
let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
let msg = format!("expected `(`, found `{}`", tok);
sess.span_diagnostic.span_err(span, &msg);
}
let sequence = parse(&delimited.tts, expect_matchers, sess);
let (separator, op) = parse_sep_and_kleene_op(trees, span, sess);
let name_captures = macro_parser::count_names(&sequence);
TokenTree::Sequence(span, Rc::new(SequenceRepetition {
tts: sequence,
separator: separator,
op: op,
num_captures: name_captures,
}))
}
Some(tokenstream::TokenTree::Token(ident_span, token::Ident(ident))) => {
let span = Span { lo: span.lo, ..ident_span };
if ident.name == keywords::Crate.name() {
let ident = ast::Ident { name: Symbol::intern("$crate"), ..ident };
TokenTree::Token(span, token::Ident(ident))
} else {
TokenTree::Token(span, token::SubstNt(ident))
}
}
Some(tokenstream::TokenTree::Token(span, tok)) => {
let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok));
sess.span_diagnostic.span_err(span, &msg);
TokenTree::Token(span, token::SubstNt(keywords::Invalid.ident()))
}
None => TokenTree::Token(span, token::Dollar),
},
tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
tokenstream::TokenTree::Delimited(span, delimited) => {
TokenTree::Delimited(span, Rc::new(Delimited {
delim: delimited.delim,
tts: parse(&delimited.tts, expect_matchers, sess),
}))
}
}
}
fn parse_sep_and_kleene_op<I>(input: &mut I, span: Span, sess: &ParseSess)
-> (Option<token::Token>, KleeneOp)
where I: Iterator<Item = tokenstream::TokenTree>,
{
fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
match *token {
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
_ => None,
}
}
let span = match input.next() {
Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
Some(op) => return (None, op),
None => match input.next() {
Some(tokenstream::TokenTree::Token(span, tok2)) => match kleene_op(&tok2) {
Some(op) => return (Some(tok), op),
None => span,
},
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
}
},
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
};
sess.span_diagnostic.span_err(span, "expected `*` or `+`");
(None, KleeneOp::ZeroOrMore)
}
...@@ -11,9 +11,10 @@ ...@@ -11,9 +11,10 @@
use ast::Ident; use ast::Ident;
use errors::Handler; use errors::Handler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
use ext::tt::quoted;
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT}; use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT};
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
use tokenstream::{self, TokenTree, Delimited, SequenceRepetition}; use tokenstream::{TokenTree, Delimited};
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
use std::rc::Rc; use std::rc::Rc;
...@@ -24,34 +25,28 @@ ...@@ -24,34 +25,28 @@
// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`). // An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
enum Frame { enum Frame {
Delimited { Delimited {
forest: Rc<Delimited>, forest: Rc<quoted::Delimited>,
idx: usize,
span: Span,
},
MatchNt {
name: Ident,
kind: Ident,
idx: usize, idx: usize,
span: Span, span: Span,
}, },
Sequence { Sequence {
forest: Rc<SequenceRepetition>, forest: Rc<quoted::SequenceRepetition>,
idx: usize, idx: usize,
sep: Option<Token>, sep: Option<Token>,
}, },
} }
impl Frame { impl Frame {
fn new(tts: Vec<TokenTree>) -> Frame { fn new(tts: Vec<quoted::TokenTree>) -> Frame {
let forest = Rc::new(tokenstream::Delimited { delim: token::NoDelim, tts: tts }); let forest = Rc::new(quoted::Delimited { delim: token::NoDelim, tts: tts });
Frame::Delimited { forest: forest, idx: 0, span: DUMMY_SP } Frame::Delimited { forest: forest, idx: 0, span: DUMMY_SP }
} }
} }
impl Iterator for Frame { impl Iterator for Frame {
type Item = TokenTree; type Item = quoted::TokenTree;
fn next(&mut self) -> Option<TokenTree> { fn next(&mut self) -> Option<quoted::TokenTree> {
match *self { match *self {
Frame::Delimited { ref forest, ref mut idx, .. } => { Frame::Delimited { ref forest, ref mut idx, .. } => {
*idx += 1; *idx += 1;
...@@ -61,15 +56,6 @@ fn next(&mut self) -> Option<TokenTree> { ...@@ -61,15 +56,6 @@ fn next(&mut self) -> Option<TokenTree> {
*idx += 1; *idx += 1;
forest.tts.get(*idx - 1).cloned() forest.tts.get(*idx - 1).cloned()
} }
Frame::MatchNt { ref mut idx, name, kind, span } => {
*idx += 1;
match *idx {
1 => Some(TokenTree::Token(span, token::SubstNt(name))),
2 => Some(TokenTree::Token(span, token::Colon)),
3 => Some(TokenTree::Token(span, token::Ident(kind))),
_ => None,
}
}
} }
} }
} }
...@@ -79,7 +65,7 @@ fn next(&mut self) -> Option<TokenTree> { ...@@ -79,7 +65,7 @@ fn next(&mut self) -> Option<TokenTree> {
/// (and should) be None. /// (and should) be None.
pub fn transcribe(sp_diag: &Handler, pub fn transcribe(sp_diag: &Handler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>, interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
src: Vec<tokenstream::TokenTree>) src: Vec<quoted::TokenTree>)
-> Vec<TokenTree> { -> Vec<TokenTree> {
let mut stack = SmallVector::one(Frame::new(src)); let mut stack = SmallVector::one(Frame::new(src));
let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */ let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */
...@@ -121,15 +107,14 @@ pub fn transcribe(sp_diag: &Handler, ...@@ -121,15 +107,14 @@ pub fn transcribe(sp_diag: &Handler,
result = result_stack.pop().unwrap(); result = result_stack.pop().unwrap();
result.push(tree); result.push(tree);
} }
_ => {}
} }
continue continue
}; };
match tree { match tree {
TokenTree::Sequence(sp, seq) => { quoted::TokenTree::Sequence(sp, seq) => {
// FIXME(pcwalton): Bad copy. // FIXME(pcwalton): Bad copy.
match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()), match lockstep_iter_size(&quoted::TokenTree::Sequence(sp, seq.clone()),
&interpolations, &interpolations,
&repeat_idx) { &repeat_idx) {
LockstepIterSize::Unconstrained => { LockstepIterSize::Unconstrained => {
...@@ -145,7 +130,7 @@ pub fn transcribe(sp_diag: &Handler, ...@@ -145,7 +130,7 @@ pub fn transcribe(sp_diag: &Handler,
} }
LockstepIterSize::Constraint(len, _) => { LockstepIterSize::Constraint(len, _) => {
if len == 0 { if len == 0 {
if seq.op == tokenstream::KleeneOp::OneOrMore { if seq.op == quoted::KleeneOp::OneOrMore {
// FIXME #2887 blame invoker // FIXME #2887 blame invoker
panic!(sp_diag.span_fatal(sp.clone(), panic!(sp_diag.span_fatal(sp.clone(),
"this must repeat at least once")); "this must repeat at least once"));
...@@ -163,7 +148,7 @@ pub fn transcribe(sp_diag: &Handler, ...@@ -163,7 +148,7 @@ pub fn transcribe(sp_diag: &Handler,
} }
} }
// FIXME #2887: think about span stuff here // FIXME #2887: think about span stuff here
TokenTree::Token(sp, SubstNt(ident)) => { quoted::TokenTree::Token(sp, SubstNt(ident)) => {
match lookup_cur_matched(ident, &interpolations, &repeat_idx) { match lookup_cur_matched(ident, &interpolations, &repeat_idx) {
None => result.push(TokenTree::Token(sp, SubstNt(ident))), None => result.push(TokenTree::Token(sp, SubstNt(ident))),
Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched { Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
...@@ -187,14 +172,11 @@ pub fn transcribe(sp_diag: &Handler, ...@@ -187,14 +172,11 @@ pub fn transcribe(sp_diag: &Handler,
} }
} }
} }
TokenTree::Delimited(span, delimited) => { quoted::TokenTree::Delimited(span, delimited) => {
stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span }); stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
result_stack.push(mem::replace(&mut result, Vec::new())); result_stack.push(mem::replace(&mut result, Vec::new()));
} }
TokenTree::Token(span, MatchNt(name, kind)) => { quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)),
stack.push(Frame::MatchNt { name: name, kind: kind, idx: 0, span: span });
}
tt @ TokenTree::Token(..) => result.push(tt),
} }
} }
} }
...@@ -245,10 +227,11 @@ fn add(self, other: LockstepIterSize) -> LockstepIterSize { ...@@ -245,10 +227,11 @@ fn add(self, other: LockstepIterSize) -> LockstepIterSize {
} }
} }
fn lockstep_iter_size(tree: &TokenTree, fn lockstep_iter_size(tree: &quoted::TokenTree,
interpolations: &HashMap<Ident, Rc<NamedMatch>>, interpolations: &HashMap<Ident, Rc<NamedMatch>>,
repeat_idx: &[usize]) repeat_idx: &[usize])
-> LockstepIterSize { -> LockstepIterSize {
use self::quoted::TokenTree;
match *tree { match *tree {
TokenTree::Delimited(_, ref delimed) => { TokenTree::Delimited(_, ref delimed) => {
delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| { delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
......
...@@ -551,13 +551,6 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree { ...@@ -551,13 +551,6 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
} }
)) ))
}, },
TokenTree::Sequence(span, ref seq) =>
TokenTree::Sequence(fld.new_span(span),
Rc::new(SequenceRepetition {
tts: fld.fold_tts(&seq.tts),
separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
..**seq
})),
} }
} }
......
...@@ -139,6 +139,7 @@ pub mod tt { ...@@ -139,6 +139,7 @@ pub mod tt {
pub mod transcribe; pub mod transcribe;
pub mod macro_parser; pub mod macro_parser;
pub mod macro_rules; pub mod macro_rules;
pub mod quoted;
} }
} }
......
...@@ -139,13 +139,9 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa ...@@ -139,13 +139,9 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa
new_parser_from_source_str(sess, name, source).parse_stmt() new_parser_from_source_str(sess, name, source).parse_stmt()
} }
// Warning: This parses with quote_depth > 0, which is not the default.
pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-> PResult<'a, Vec<tokenstream::TokenTree>> { -> Vec<tokenstream::TokenTree> {
let mut p = new_parser_from_source_str(sess, name, source); filemap_to_tts(sess, sess.codemap().new_filemap(name, None, source))
p.quote_depth += 1;
// right now this is re-creating the token trees from ... token trees.
p.parse_all_token_trees()
} }
// Create a new parser from a source string // Create a new parser from a source string
...@@ -986,7 +982,7 @@ fn ttdelim_span() { ...@@ -986,7 +982,7 @@ fn ttdelim_span() {
_ => panic!("not a macro"), _ => panic!("not a macro"),
}; };
let span = tts.iter().rev().next().unwrap().get_span(); let span = tts.iter().rev().next().unwrap().span();
match sess.codemap().span_to_snippet(span) { match sess.codemap().span_to_snippet(span) {
Ok(s) => assert_eq!(&s[..], "{ body }"), Ok(s) => assert_eq!(&s[..], "{ body }"),
......
...@@ -43,19 +43,16 @@ ...@@ -43,19 +43,16 @@
use codemap::{self, CodeMap, Spanned, spanned, respan}; use codemap::{self, CodeMap, Spanned, spanned, respan};
use syntax_pos::{self, Span, Pos, BytePos, mk_sp}; use syntax_pos::{self, Span, Pos, BytePos, mk_sp};
use errors::{self, DiagnosticBuilder}; use errors::{self, DiagnosticBuilder};
use ext::tt::macro_parser; use parse::{self, classify, token};
use parse;
use parse::classify;
use parse::common::SeqSep; use parse::common::SeqSep;
use parse::lexer::TokenAndSpan; use parse::lexer::TokenAndSpan;
use parse::obsolete::ObsoleteSyntax; use parse::obsolete::ObsoleteSyntax;
use parse::token::{self, MatchNt, SubstNt};
use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
use util::parser::{AssocOp, Fixity}; use util::parser::{AssocOp, Fixity};
use print::pprust; use print::pprust;
use ptr::P; use ptr::P;
use parse::PResult; use parse::PResult;
use tokenstream::{self, Delimited, SequenceRepetition, TokenTree}; use tokenstream::{Delimited, TokenTree};
use symbol::{Symbol, keywords}; use symbol::{Symbol, keywords};
use util::ThinVec; use util::ThinVec;
...@@ -168,8 +165,6 @@ pub struct Parser<'a> { ...@@ -168,8 +165,6 @@ pub struct Parser<'a> {
/// the previous token kind /// the previous token kind
prev_token_kind: PrevTokenKind, prev_token_kind: PrevTokenKind,
pub restrictions: Restrictions, pub restrictions: Restrictions,
pub quote_depth: usize, // not (yet) related to the quasiquoter
parsing_token_tree: bool,
/// The set of seen errors about obsolete syntax. Used to suppress /// The set of seen errors about obsolete syntax. Used to suppress
/// extra detail when the same error is seen twice /// extra detail when the same error is seen twice
pub obsolete_set: HashSet<ObsoleteSyntax>, pub obsolete_set: HashSet<ObsoleteSyntax>,
...@@ -329,8 +324,6 @@ pub fn new(sess: &'a ParseSess, ...@@ -329,8 +324,6 @@ pub fn new(sess: &'a ParseSess,
prev_span: syntax_pos::DUMMY_SP, prev_span: syntax_pos::DUMMY_SP,
prev_token_kind: PrevTokenKind::Other, prev_token_kind: PrevTokenKind::Other,
restrictions: Restrictions::empty(), restrictions: Restrictions::empty(),
quote_depth: 0,
parsing_token_tree: false,
obsolete_set: HashSet::new(), obsolete_set: HashSet::new(),
directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned }, directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned },
root_module_name: None, root_module_name: None,
...@@ -359,20 +352,11 @@ fn next_tok(&mut self) -> TokenAndSpan { ...@@ -359,20 +352,11 @@ fn next_tok(&mut self) -> TokenAndSpan {
if i + 1 < tts.len() { if i + 1 < tts.len() {
self.tts.push((tts, i + 1)); self.tts.push((tts, i + 1));
} }
// FIXME(jseyfried): remove after fixing #39390 in #39419. if let TokenTree::Token(sp, tok) = tt {
if self.quote_depth > 0 { TokenAndSpan { tok: tok, sp: sp }
if let TokenTree::Sequence(sp, _) = tt { } else {
self.span_err(sp, "attempted to repeat an expression containing no \ self.tts.push((tt, 0));
syntax variables matched as repeating at this depth"); continue
}
}
match tt {
TokenTree::Token(sp, tok) => TokenAndSpan { tok: tok, sp: sp },
_ if tt.len() > 0 => {
self.tts.push((tt, 0));
continue
}
_ => continue,
} }
} else { } else {
TokenAndSpan { tok: token::Eof, sp: self.span } TokenAndSpan { tok: token::Eof, sp: self.span }
...@@ -997,7 +981,6 @@ pub fn look_ahead<R, F>(&mut self, dist: usize, f: F) -> R where ...@@ -997,7 +981,6 @@ pub fn look_ahead<R, F>(&mut self, dist: usize, f: F) -> R where
tok = match tts.get_tt(i) { tok = match tts.get_tt(i) {
TokenTree::Token(_, tok) => tok, TokenTree::Token(_, tok) => tok,
TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim), TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim),
TokenTree::Sequence(..) => token::Dollar,
}; };
} }
} }
...@@ -2586,139 +2569,21 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: BytePos) -> PResult< ...@@ -2586,139 +2569,21 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: BytePos) -> PResult<
return Ok(e); return Ok(e);
} }
// Parse unquoted tokens after a `$` in a token tree
fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> {
let mut sp = self.span;
let name = match self.token {
token::Dollar => {
self.bump();
if self.token == token::OpenDelim(token::Paren) {
let Spanned { node: seq, span: seq_span } = self.parse_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
SeqSep::none(),
|p| p.parse_token_tree()
)?;
let (sep, repeat) = self.parse_sep_and_kleene_op()?;
let name_num = macro_parser::count_names(&seq);
return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi),
Rc::new(SequenceRepetition {
tts: seq,
separator: sep,
op: repeat,
num_captures: name_num
})));
} else if self.token.is_keyword(keywords::Crate) {
let ident = match self.token {
token::Ident(id) => ast::Ident { name: Symbol::intern("$crate"), ..id },
_ => unreachable!(),
};
self.bump();
return Ok(TokenTree::Token(sp, token::Ident(ident)));
} else {
sp = mk_sp(sp.lo, self.span.hi);
self.parse_ident().unwrap_or_else(|mut e| {
e.emit();
keywords::Invalid.ident()
})
}
}
token::SubstNt(name) => {
self.bump();
name
}
_ => unreachable!()
};
// continue by trying to parse the `:ident` after `$name`
if self.token == token::Colon &&
self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword()) {
self.bump();
sp = mk_sp(sp.lo, self.span.hi);
let nt_kind = self.parse_ident()?;
Ok(TokenTree::Token(sp, MatchNt(name, nt_kind)))
} else {
Ok(TokenTree::Token(sp, SubstNt(name)))
}
}
pub fn check_unknown_macro_variable(&mut self) { pub fn check_unknown_macro_variable(&mut self) {
if self.quote_depth == 0 && !self.parsing_token_tree { if let token::SubstNt(name) = self.token {
match self.token { self.fatal(&format!("unknown macro variable `{}`", name)).emit()
token::SubstNt(name) =>
self.fatal(&format!("unknown macro variable `{}`", name)).emit(),
_ => {}
}
}
}
/// Parse an optional separator followed by a Kleene-style
/// repetition token (+ or *).
pub fn parse_sep_and_kleene_op(&mut self)
-> PResult<'a, (Option<token::Token>, tokenstream::KleeneOp)> {
fn parse_kleene_op<'a>(parser: &mut Parser<'a>) ->
PResult<'a, Option<tokenstream::KleeneOp>> {
match parser.token {
token::BinOp(token::Star) => {
parser.bump();
Ok(Some(tokenstream::KleeneOp::ZeroOrMore))
},
token::BinOp(token::Plus) => {
parser.bump();
Ok(Some(tokenstream::KleeneOp::OneOrMore))
},
_ => Ok(None)
}
};
if let Some(kleene_op) = parse_kleene_op(self)? {
return Ok((None, kleene_op));
}
let separator = match self.token {
token::CloseDelim(..) => None,
_ => Some(self.bump_and_get()),
};
match parse_kleene_op(self)? {
Some(zerok) => Ok((separator, zerok)),
None => return Err(self.fatal("expected `*` or `+`"))
} }
} }
/// parse a single token tree from the input. /// parse a single token tree from the input.
pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> { pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
// FIXME #6994: currently, this is too eager. It
// parses token trees but also identifies TokenType::Sequence's
// and token::SubstNt's; it's too early to know yet
// whether something will be a nonterminal or a seq
// yet.
match self.token { match self.token {
token::OpenDelim(delim) => { token::OpenDelim(..) => {
if self.quote_depth == 0 { let tt = self.tts.pop().unwrap().0;
let tt = self.tts.pop().unwrap().0;
self.bump();
return Ok(tt);
}
let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true);
let lo = self.span.lo;
self.bump();
let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace),
&token::CloseDelim(token::Paren),
&token::CloseDelim(token::Bracket)],
SeqSep::none(),
|p| p.parse_token_tree(),
|mut e| e.emit());
self.parsing_token_tree = parsing_token_tree;
self.bump(); self.bump();
return Ok(tt);
Ok(TokenTree::Delimited(Span { lo: lo, ..self.prev_span }, Rc::new(Delimited {
delim: delim,
tts: tts,
})))
}, },
token::CloseDelim(..) | token::Eof => Ok(TokenTree::Token(self.span, token::Eof)), token::CloseDelim(_) | token::Eof => unreachable!(),
token::Dollar | token::SubstNt(..) if self.quote_depth > 0 => self.parse_unquoted(),
_ => Ok(TokenTree::Token(self.span, self.bump_and_get())), _ => Ok(TokenTree::Token(self.span, self.bump_and_get())),
} }
} }
......
...@@ -1475,20 +1475,6 @@ pub fn print_tt(&mut self, tt: &tokenstream::TokenTree) -> io::Result<()> { ...@@ -1475,20 +1475,6 @@ pub fn print_tt(&mut self, tt: &tokenstream::TokenTree) -> io::Result<()> {
space(&mut self.s)?; space(&mut self.s)?;
word(&mut self.s, &token_to_string(&delimed.close_token())) word(&mut self.s, &token_to_string(&delimed.close_token()))
}, },
TokenTree::Sequence(_, ref seq) => {
word(&mut self.s, "$(")?;
for tt_elt in &seq.tts {
self.print_tt(tt_elt)?;
}
word(&mut self.s, ")")?;
if let Some(ref tk) = seq.separator {
word(&mut self.s, &token_to_string(tk))?;
}
match seq.op {
tokenstream::KleeneOp::ZeroOrMore => word(&mut self.s, "*"),
tokenstream::KleeneOp::OneOrMore => word(&mut self.s, "+"),
}
}
} }
} }
......
...@@ -12,9 +12,7 @@ ...@@ -12,9 +12,7 @@
//! //!
//! TokenStreams represent syntactic objects before they are converted into ASTs. //! TokenStreams represent syntactic objects before they are converted into ASTs.
//! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s, //! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
//! which are themselves either a single Token, a Delimited subsequence of tokens, //! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
//! or a SequenceRepetition specifier (for the purpose of sequence generation during macro
//! expansion).
//! //!
//! ## Ownership //! ## Ownership
//! TokenStreams are persistent data structures constructed as ropes with reference //! TokenStreams are persistent data structures constructed as ropes with reference
...@@ -28,10 +26,10 @@ ...@@ -28,10 +26,10 @@
use syntax_pos::{BytePos, Span, DUMMY_SP}; use syntax_pos::{BytePos, Span, DUMMY_SP};
use codemap::Spanned; use codemap::Spanned;
use ext::base; use ext::base;
use ext::tt::macro_parser; use ext::tt::{macro_parser, quoted};
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use parse::{self, Directory}; use parse::{self, Directory};
use parse::token::{self, Token, Lit, Nonterminal}; use parse::token::{self, Token, Lit};
use print::pprust; use print::pprust;
use serialize::{Decoder, Decodable, Encoder, Encodable}; use serialize::{Decoder, Decodable, Encoder, Encodable};
use symbol::Symbol; use symbol::Symbol;
...@@ -84,27 +82,6 @@ pub fn subtrees(&self) -> &[TokenTree] { ...@@ -84,27 +82,6 @@ pub fn subtrees(&self) -> &[TokenTree] {
} }
} }
/// A sequence of token trees
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct SequenceRepetition {
/// The sequence of token trees
pub tts: Vec<TokenTree>,
/// The optional separator
pub separator: Option<token::Token>,
/// Whether the sequence can be repeated zero (*), or one or more times (+)
pub op: KleeneOp,
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
pub num_captures: usize,
}
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
/// for token sequences.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum KleeneOp {
ZeroOrMore,
OneOrMore,
}
/// When the main rust parser encounters a syntax-extension invocation, it /// When the main rust parser encounters a syntax-extension invocation, it
/// parses the arguments to the invocation as a token-tree. This is a very /// parses the arguments to the invocation as a token-tree. This is a very
/// loose structure, such that all sorts of different AST-fragments can /// loose structure, such that all sorts of different AST-fragments can
...@@ -123,10 +100,6 @@ pub enum TokenTree { ...@@ -123,10 +100,6 @@ pub enum TokenTree {
Token(Span, token::Token), Token(Span, token::Token),
/// A delimited sequence of token trees /// A delimited sequence of token trees
Delimited(Span, Rc<Delimited>), Delimited(Span, Rc<Delimited>),
// This only makes sense in MBE macros.
/// A kleene-style repetition sequence with a span
Sequence(Span, Rc<SequenceRepetition>),
} }
impl TokenTree { impl TokenTree {
...@@ -138,15 +111,10 @@ pub fn len(&self) -> usize { ...@@ -138,15 +111,10 @@ pub fn len(&self) -> usize {
AttrStyle::Inner => 3, AttrStyle::Inner => 3,
} }
} }
TokenTree::Token(_, token::Interpolated(ref nt)) => {
if let Nonterminal::NtTT(..) = **nt { 1 } else { 0 }
},
TokenTree::Token(_, token::MatchNt(..)) => 3,
TokenTree::Delimited(_, ref delimed) => match delimed.delim { TokenTree::Delimited(_, ref delimed) => match delimed.delim {
token::NoDelim => delimed.tts.len(), token::NoDelim => delimed.tts.len(),
_ => delimed.tts.len() + 2, _ => delimed.tts.len() + 2,
}, },
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
TokenTree::Token(..) => 0, TokenTree::Token(..) => 0,
} }
} }
...@@ -197,30 +165,12 @@ pub fn get_tt(&self, index: usize) -> TokenTree { ...@@ -197,30 +165,12 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
} }
delimed.tts[index - 1].clone() delimed.tts[index - 1].clone()
} }
(&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
let v = [TokenTree::Token(sp, token::SubstNt(name)),
TokenTree::Token(sp, token::Colon),
TokenTree::Token(sp, token::Ident(kind))];
v[index].clone()
}
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
_ => panic!("Cannot expand a token tree"), _ => panic!("Cannot expand a token tree"),
} }
} }
/// Returns the `Span` corresponding to this token tree.
pub fn get_span(&self) -> Span {
match *self {
TokenTree::Token(span, _) => span,
TokenTree::Delimited(span, _) => span,
TokenTree::Sequence(span, _) => span,
}
}
/// Use this token tree as a matcher to parse given tts. /// Use this token tree as a matcher to parse given tts.
pub fn parse(cx: &base::ExtCtxt, pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: &[TokenTree])
mtch: &[TokenTree],
tts: &[TokenTree])
-> macro_parser::NamedParseResult { -> macro_parser::NamedParseResult {
// `None` is because we're not interpolating // `None` is because we're not interpolating
let directory = Directory { let directory = Directory {
...@@ -252,9 +202,7 @@ pub fn eq_unspanned(&self, other: &TokenTree) -> bool { ...@@ -252,9 +202,7 @@ pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
/// Retrieve the TokenTree's span. /// Retrieve the TokenTree's span.
pub fn span(&self) -> Span { pub fn span(&self) -> Span {
match *self { match *self {
TokenTree::Token(sp, _) | TokenTree::Token(sp, _) | TokenTree::Delimited(sp, _) => sp,
TokenTree::Delimited(sp, _) |
TokenTree::Sequence(sp, _) => sp,
} }
} }
......
...@@ -79,7 +79,6 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, ...@@ -79,7 +79,6 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver,
quote_pat: expand_quote_pat, quote_pat: expand_quote_pat,
quote_arm: expand_quote_arm, quote_arm: expand_quote_arm,
quote_stmt: expand_quote_stmt, quote_stmt: expand_quote_stmt,
quote_matcher: expand_quote_matcher,
quote_attr: expand_quote_attr, quote_attr: expand_quote_attr,
quote_arg: expand_quote_arg, quote_arg: expand_quote_arg,
quote_block: expand_quote_block, quote_block: expand_quote_block,
......
...@@ -54,8 +54,6 @@ pub fn main() { ...@@ -54,8 +54,6 @@ pub fn main() {
//~^ ERROR cannot find macro `quote_arm!` in this scope //~^ ERROR cannot find macro `quote_arm!` in this scope
let x = quote_stmt!(ecx, 3); let x = quote_stmt!(ecx, 3);
//~^ ERROR cannot find macro `quote_stmt!` in this scope //~^ ERROR cannot find macro `quote_stmt!` in this scope
let x = quote_matcher!(ecx, 3);
//~^ ERROR cannot find macro `quote_matcher!` in this scope
let x = quote_attr!(ecx, 3); let x = quote_attr!(ecx, 3);
//~^ ERROR cannot find macro `quote_attr!` in this scope //~^ ERROR cannot find macro `quote_attr!` in this scope
let x = quote_arg!(ecx, 3); let x = quote_arg!(ecx, 3);
......
...@@ -8,9 +8,8 @@ ...@@ -8,9 +8,8 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
macro_rules! m { ($t:tt) => { $t } } macro_rules! m { ($($t:tt)*) => { $($t)* } }
fn main() { fn main() {
m!($t); //~ ERROR unknown macro variable m!($t); //~ ERROR expected expression
//~| ERROR expected expression
} }
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
// except according to those terms. // except according to those terms.
macro_rules! foo { macro_rules! foo {
($a:expr) => $a; //~ ERROR macro rhs must be delimited ($a:expr) => a; //~ ERROR macro rhs must be delimited
} }
fn main() { fn main() {
......
...@@ -8,7 +8,8 @@ ...@@ -8,7 +8,8 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
macro_rules! test { ($a, $b) => (()); } //~ ERROR missing fragment macro_rules! test { ($a, //~ ERROR missing fragment
$b) => (()); } //~ ERROR missing fragment
fn main() { fn main() {
test!() test!()
......
...@@ -17,16 +17,5 @@ ...@@ -17,16 +17,5 @@
foo!(Box); foo!(Box);
macro_rules! bar {
($x:tt) => {
macro_rules! baz {
($x:tt, $y:tt) => { ($x, $y) }
}
}
}
#[rustc_error] #[rustc_error]
fn main() { //~ ERROR compilation successful fn main() {} //~ ERROR compilation successful
bar!($y);
let _: (i8, i16) = baz!(0i8, 0i16);
}
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
// except according to those terms. // except according to those terms.
macro_rules! my_precioooous { macro_rules! my_precioooous {
$($t:tt)* => (1); //~ ERROR invalid macro matcher t => (1); //~ ERROR invalid macro matcher
} }
fn main() { fn main() {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
macro_rules! foo { macro_rules! foo {
{ $+ } => { //~ ERROR expected identifier, found `+` { $+ } => { //~ ERROR expected identifier, found `+`
//~^ ERROR missing fragment specifier
$(x)(y) //~ ERROR expected `*` or `+` $(x)(y) //~ ERROR expected `*` or `+`
//~^ ERROR no rules expected the token `)`
} }
} }
...@@ -23,6 +23,7 @@ ...@@ -23,6 +23,7 @@
use syntax::tokenstream::{TokenTree}; use syntax::tokenstream::{TokenTree};
use syntax::ext::base::{ExtCtxt, MacResult, MacEager}; use syntax::ext::base::{ExtCtxt, MacResult, MacEager};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::ext::tt::quoted;
use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
use syntax::ext::tt::macro_parser::{Success, Failure, Error}; use syntax::ext::tt::macro_parser::{Success, Failure, Error};
use syntax::ext::tt::macro_parser::parse_failure_msg; use syntax::ext::tt::macro_parser::parse_failure_msg;
...@@ -33,7 +34,8 @@ ...@@ -33,7 +34,8 @@
fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree]) fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree])
-> Box<MacResult + 'static> { -> Box<MacResult + 'static> {
let mbe_matcher = quote_matcher!(cx, $matched:expr, $($pat:pat)|+); let mbe_matcher = quote_tokens!(cx, $$matched:expr, $$($$pat:pat)|+);
let mbe_matcher = quoted::parse(&mbe_matcher, true, cx.parse_sess);
let map = match TokenTree::parse(cx, &mbe_matcher, args) { let map = match TokenTree::parse(cx, &mbe_matcher, args) {
Success(map) => map, Success(map) => map,
Failure(_, tok) => { Failure(_, tok) => {
......
...@@ -14,11 +14,7 @@ ...@@ -14,11 +14,7 @@
#![feature(plugin)] #![feature(plugin)]
#![plugin(procedural_mbe_matching)] #![plugin(procedural_mbe_matching)]
#[no_link]
extern crate procedural_mbe_matching;
pub fn main() { pub fn main() {
let abc = 123u32;
assert_eq!(matches!(Some(123), None | Some(0)), false); assert_eq!(matches!(Some(123), None | Some(0)), false);
assert_eq!(matches!(Some(123), None | Some(123)), true); assert_eq!(matches!(Some(123), None | Some(123)), true);
assert_eq!(matches!(true, true), true); assert_eq!(matches!(true, true), true);
......
...@@ -37,7 +37,6 @@ fn syntax_extension(cx: &ExtCtxt) { ...@@ -37,7 +37,6 @@ fn syntax_extension(cx: &ExtCtxt) {
let _l: P<syntax::ast::Ty> = quote_ty!(cx, &isize); let _l: P<syntax::ast::Ty> = quote_ty!(cx, &isize);
let _m: Vec<syntax::tokenstream::TokenTree> = quote_matcher!(cx, $($foo:tt,)* bar);
let _n: syntax::ast::Attribute = quote_attr!(cx, #![cfg(foo, bar = "baz")]); let _n: syntax::ast::Attribute = quote_attr!(cx, #![cfg(foo, bar = "baz")]);
let _o: Option<P<syntax::ast::Item>> = quote_item!(cx, fn foo<T: ?Sized>() {}); let _o: Option<P<syntax::ast::Item>> = quote_item!(cx, fn foo<T: ?Sized>() {});
......
...@@ -9,7 +9,6 @@ ...@@ -9,7 +9,6 @@
// except according to those terms. // except according to those terms.
fn main() { fn main() {
println!("{}", { macro_rules! x { ($()*) => {} } 33 }); println!("{}", { macro_rules! x { ($(t:tt)*) => {} } 33 });
//~^ ERROR no syntax variables matched as repeating at this depth
} }
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册