提交 473b1ec0 编写于 作者: P Paul Stansifer

Make token trees parseable.

上级 b6ed1de2
......@@ -17,6 +17,8 @@
deserialize_str,
serialize_bool,
deserialize_bool};
import parse::token;
/* Note #1972 -- spans are serialized but not deserialized */
fn serialize_span<S>(_s: S, _v: span) {
......@@ -371,6 +373,16 @@ enum blk_sort {
}
*/
#[auto_serialize]
type token_tree = spanned<token_tree_>;
#[auto_serialize]
enum token_tree_ {
/* for macro invocations; parsing is the macro's job */
tt_delim(token::token, [token_tree]),
tt_flat(token::token)
}
#[auto_serialize]
type mac = spanned<mac_>;
......@@ -386,6 +398,7 @@ enum blk_sort {
#[auto_serialize]
enum mac_ {
mac_invoc(@path, mac_arg, mac_body),
mac_invoc_tt(@path, token_tree), //will kill mac_invoc and steal its name
mac_embed_type(@ty),
mac_embed_block(blk),
mac_ellipsis,
......
......@@ -549,6 +549,7 @@ fn no_des(cx: ext_ctxt, sp: span, syn: str) -> ! {
alt mac.node {
ast::mac_ellipsis { cx.span_fatal(mac.span, "misused `...`"); }
ast::mac_invoc(_, _, _) { no_des(cx, mac.span, "macro calls"); }
ast::mac_invoc_tt(_, _) { no_des(cx, mac.span, "macro calls"); }
ast::mac_embed_type(ty) {
alt ty.node {
ast::ty_path(pth, _) {
......
......@@ -119,6 +119,7 @@ fn fold_mac_(m: mac, fld: ast_fold) -> mac {
mac_invoc(fld.fold_path(pth),
option::map(arg, fld.fold_expr), body)
}
mac_invoc_tt(pth, tt) { m.node }
mac_embed_type(ty) { mac_embed_type(fld.fold_ty(ty)) }
mac_embed_block(blk) { mac_embed_block(fld.fold_block(blk)) }
mac_ellipsis { mac_ellipsis }
......
......@@ -24,6 +24,7 @@ fn parse_outer_attrs_or_ext(first_item_attrs: [ast::attribute])
ret some(left([first_attr] + self.parse_outer_attributes()));
} else if !(self.look_ahead(1u) == token::LT
|| self.look_ahead(1u) == token::LBRACKET
|| self.look_ahead(1u) == token::POUND
|| expect_item_next) {
self.bump();
ret some(right(self.parse_syntax_ext_naked(lo)));
......
......@@ -4,7 +4,7 @@
import token::{can_begin_expr, is_ident, is_plain_ident};
import codemap::{span,fss_none};
import util::interner;
import ast_util::{spanned, mk_sp, ident_to_path, operator_prec};
import ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec};
import ast::*;
import lexer::reader;
import prec::{as_prec, token_to_binop};
......@@ -801,10 +801,16 @@ fn parse_bottom_expr() -> pexpr {
{|p| p.parse_expr()});
hi = self.span.hi;
ex = expr_vec(es, mutbl);
} else if self.token == token::POUND
&& self.look_ahead(1u) == token::POUND {
self.bump(); self.bump();
let macname = self.parse_path_without_tps();
let macbody = self.parse_token_tree();
ret pexpr(self.mk_mac_expr(lo, self.span.hi,
mac_invoc_tt(macname, macbody)));
} else if self.token == token::POUND
&& self.look_ahead(1u) == token::LT {
self.bump();
self.bump();
self.bump(); self.bump();
let ty = self.parse_ty(false);
self.expect(token::GT);
......@@ -813,8 +819,7 @@ fn parse_bottom_expr() -> pexpr {
mac_embed_type(ty)));
} else if self.token == token::POUND
&& self.look_ahead(1u) == token::LBRACE {
self.bump();
self.bump();
self.bump(); self.bump();
let blk = mac_embed_block(
self.parse_block_tail(lo, default_blk));
ret pexpr(self.mk_mac_expr(lo, self.span.hi, blk));
......@@ -1053,6 +1058,47 @@ fn parse_dot_or_call_expr_with(e0: pexpr) -> pexpr {
ret e;
}
fn parse_token_tree() -> token_tree {
#[doc="what's the opposite delimiter?"]
fn flip(t: token::token) -> token::token {
alt t {
token::LPAREN { token::RPAREN }
token::LBRACE { token::RBRACE }
token::LBRACKET { token::RBRACKET }
_ { fail }
}
}
fn parse_tt_flat(p: parser, delim_ok: bool) -> token_tree {
alt p.token {
token::RPAREN | token::RBRACE | token::RBRACKET
if !delim_ok {
p.fatal("incorrect close delimiter: `"
+ token_to_str(p.reader, p.token) + "`");
}
token::EOF {
p.fatal("file ended in the middle of a macro invocation");
}
_ { /* ok */ }
}
let res = tt_flat(p.span.lo, p.token);
p.bump();
ret res;
}
ret alt self.token {
token::LPAREN | token::LBRACE | token::LBRACKET {
let ket = flip(self.token);
tt_delim([parse_tt_flat(self, true)] +
self.parse_seq_to_before_end(ket, seq_sep_none(),
{|p| p.parse_token_tree()})
+ [parse_tt_flat(self, true)])
}
_ { parse_tt_flat(self, false) }
};
}
fn parse_prefix_expr() -> pexpr {
let lo = self.span.lo;
let mut hi;
......
import util::interner;
import util::interner::interner;
import std::map::{hashmap, str_hash};
import std::serialization::{serializer,
deserializer,
serialize_uint,
deserialize_uint,
serialize_i64,
deserialize_i64,
serialize_u64,
deserialize_u64,
serialize_bool,
deserialize_bool};
#[auto_serialize]
type str_num = uint;
#[auto_serialize]
enum binop {
PLUS,
MINUS,
......@@ -17,6 +29,7 @@ enum binop {
SHR,
}
#[auto_serialize]
enum token {
/* Expression-operator symbols. */
EQ,
......
......@@ -30,6 +30,7 @@ mod util {
mod parse {
export parser;
export lexer;
export token;
export comments;
export prec;
export classify;
......
......@@ -351,6 +351,7 @@ fn visit_mac<E>(m: mac, e: E, v: vt<E>) {
alt m.node {
ast::mac_invoc(pth, arg, body) {
option::map(arg) {|arg| v.visit_expr(arg, e, v)}; }
ast::mac_invoc_tt(pth, tt) { /* no user-serviceable parts inside */ }
ast::mac_embed_type(ty) { v.visit_ty(ty, e, v); }
ast::mac_embed_block(blk) { v.visit_block(blk, e, v); }
ast::mac_ellipsis { }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册