提交 7f629445 编写于 作者: M Marijn Haverbeke

Convert the objects used in the lexer and parser to records + impls

上级 0616cba6
......@@ -80,8 +80,8 @@ fn file_exists(path: str) -> bool {
let inner_attrs = parse_inner_attrs_and_next(p0);
let first_item_outer_attrs = inner_attrs.next;
let m0 = parse_mod_items(p0, token::EOF, first_item_outer_attrs);
cx.chpos = p0.get_chpos();
cx.byte_pos = p0.get_byte_pos();
cx.chpos = p0.reader.chpos;
cx.byte_pos = p0.reader.pos;
ret (m0.view_items, m0.items, inner_attrs.inner);
} else {
ret ([], [], []);
......@@ -119,8 +119,8 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
syntax::parse::parser::mk_item(p0, cdir.span.lo, cdir.span.hi, id,
ast::item_mod(m0), mod_attrs);
// Thread defids, chpos and byte_pos through the parsers
cx.chpos = p0.get_chpos();
cx.byte_pos = p0.get_byte_pos();
cx.chpos = p0.reader.chpos;
cx.byte_pos = p0.reader.pos;
items += [i];
}
ast::cdir_dir_mod(id, cdirs, attrs) {
......
......@@ -7,81 +7,63 @@
import util::interner::intern;
import codemap;
type reader =
obj {
fn is_eof() -> bool;
fn curr() -> char;
fn next() -> char;
fn init();
fn bump();
fn get_str_from(uint) -> str;
fn get_interner() -> @interner::interner<str>;
fn get_chpos() -> uint;
fn get_byte_pos() -> uint;
fn get_col() -> uint;
fn get_filemap() -> codemap::filemap;
fn err(str);
};
type reader = @{
cm: codemap::codemap,
src: str,
len: uint,
mutable col: uint,
mutable pos: uint,
mutable curr: char,
mutable chpos: uint,
mutable strs: [str],
filemap: codemap::filemap,
interner: @interner::interner<str>
};
impl reader for reader {
fn is_eof() -> bool { self.curr == -1 as char }
fn get_str_from(start: uint) -> str {
// I'm pretty skeptical about this subtraction. What if there's a
// multi-byte character before the mark?
ret str::slice(self.src, start - 1u, self.pos - 1u);
}
fn next() -> char {
if self.pos < self.len {
ret str::char_at(self.src, self.pos);
} else { ret -1 as char; }
}
fn bump() {
if self.pos < self.len {
self.col += 1u;
self.chpos += 1u;
if self.curr == '\n' {
codemap::next_line(self.filemap, self.chpos, self.pos +
self.filemap.start_pos.byte);
self.col = 0u;
}
let next = str::char_range_at(self.src, self.pos);
self.pos = next.next;
self.curr = next.ch;
} else { self.curr = -1 as char; }
}
fn err(m: str) {
codemap::emit_error(some(ast_util::mk_sp(self.chpos, self.chpos)),
m, self.cm);
}
}
fn new_reader(cm: codemap::codemap, src: str, filemap: codemap::filemap,
itr: @interner::interner<str>) -> reader {
obj reader(cm: codemap::codemap,
src: str,
len: uint,
mutable col: uint,
mutable pos: uint,
mutable ch: char,
mutable chpos: uint,
mutable strs: [str],
fm: codemap::filemap,
itr: @interner::interner<str>) {
fn is_eof() -> bool { ret ch == -1 as char; }
fn get_str_from(start: uint) -> str {
// I'm pretty skeptical about this subtraction. What if there's a
// multi-byte character before the mark?
ret str::slice(src, start - 1u, pos - 1u);
}
fn get_chpos() -> uint { ret chpos; }
fn get_byte_pos() -> uint { ret pos; }
fn curr() -> char { ret ch; }
fn next() -> char {
if pos < len {
ret str::char_at(src, pos);
} else { ret -1 as char; }
}
fn init() {
if pos < len {
let next = str::char_range_at(src, pos);
pos = next.next;
ch = next.ch;
}
}
fn bump() {
if pos < len {
col += 1u;
chpos += 1u;
if ch == '\n' {
codemap::next_line(fm, chpos, pos + fm.start_pos.byte);
col = 0u;
}
let next = str::char_range_at(src, pos);
pos = next.next;
ch = next.ch;
} else { ch = -1 as char; }
}
fn get_interner() -> @interner::interner<str> { ret itr; }
fn get_col() -> uint { ret col; }
fn get_filemap() -> codemap::filemap { ret fm; }
fn err(m: str) {
codemap::emit_error(some(ast_util::mk_sp(chpos, chpos)), m, cm);
}
let r = @{cm: cm, src: src, len: str::byte_len(src),
mutable col: 0u, mutable pos: 0u, mutable curr: -1 as char,
mutable chpos: filemap.start_pos.ch, mutable strs: [],
filemap: filemap, interner: itr};
if r.pos < r.len {
let next = str::char_range_at(r.src, r.pos);
r.pos = next.next;
r.curr = next.ch;
}
let strs: [str] = [];
let rd =
reader(cm, src, str::byte_len(src), 0u, 0u, -1 as char,
filemap.start_pos.ch, strs, filemap, itr);
rd.init();
ret rd;
ret r;
}
fn dec_digit_val(c: char) -> int { ret (c as int) - ('0' as int); }
......@@ -119,15 +101,15 @@ fn is_hex_digit(c: char) -> bool {
fn is_bin_digit(c: char) -> bool { ret c == '0' || c == '1'; }
fn consume_whitespace_and_comments(rdr: reader) {
while is_whitespace(rdr.curr()) { rdr.bump(); }
while is_whitespace(rdr.curr) { rdr.bump(); }
be consume_any_line_comment(rdr);
}
fn consume_any_line_comment(rdr: reader) {
if rdr.curr() == '/' {
if rdr.curr == '/' {
alt rdr.next() {
'/' {
while rdr.curr() != '\n' && !rdr.is_eof() { rdr.bump(); }
while rdr.curr != '\n' && !rdr.is_eof() { rdr.bump(); }
// Restart whitespace munch.
be consume_whitespace_and_comments(rdr);
......@@ -142,12 +124,12 @@ fn consume_block_comment(rdr: reader) {
let level: int = 1;
while level > 0 {
if rdr.is_eof() { rdr.err("unterminated block comment"); fail; }
if rdr.curr() == '/' && rdr.next() == '*' {
if rdr.curr == '/' && rdr.next() == '*' {
rdr.bump();
rdr.bump();
level += 1;
} else {
if rdr.curr() == '*' && rdr.next() == '/' {
if rdr.curr == '*' && rdr.next() == '/' {
rdr.bump();
rdr.bump();
level -= 1;
......@@ -160,12 +142,12 @@ fn consume_block_comment(rdr: reader) {
}
fn scan_exponent(rdr: reader) -> option::t<str> {
let c = rdr.curr();
let c = rdr.curr;
let rslt = "";
if c == 'e' || c == 'E' {
str::push_byte(rslt, c as u8);
rdr.bump();
c = rdr.curr();
c = rdr.curr;
if c == '-' || c == '+' {
str::push_byte(rslt, c as u8);
rdr.bump();
......@@ -180,7 +162,7 @@ fn scan_exponent(rdr: reader) -> option::t<str> {
fn scan_digits(rdr: reader, radix: uint) -> str {
let rslt = "";
while true {
let c = rdr.curr();
let c = rdr.curr;
if c == '_' { rdr.bump(); cont; }
alt char::maybe_digit(c) {
some(d) if (d as uint) < radix {
......@@ -205,13 +187,13 @@ fn scan_number(c: char, rdr: reader) -> token::token {
base = 2u;
}
num_str = scan_digits(rdr, base);
c = rdr.curr();
c = rdr.curr;
n = rdr.next();
if c == 'u' || c == 'i' {
let signed = c == 'i', tp = signed ? either::left(ast::ty_i)
: either::right(ast::ty_u);
rdr.bump();
c = rdr.curr();
c = rdr.curr;
if c == '8' {
rdr.bump();
tp = signed ? either::left(ast::ty_i8)
......@@ -241,7 +223,7 @@ fn scan_number(c: char, rdr: reader) -> token::token {
}
}
let is_float = false;
if rdr.curr() == '.' && !(is_alpha(rdr.next()) || rdr.next() == '_') {
if rdr.curr == '.' && !(is_alpha(rdr.next()) || rdr.next() == '_') {
is_float = true;
rdr.bump();
let dec_part = scan_digits(rdr, 10u);
......@@ -254,19 +236,19 @@ fn scan_number(c: char, rdr: reader) -> token::token {
}
none. {}
}
if rdr.curr() == 'f' {
if rdr.curr == 'f' {
rdr.bump();
c = rdr.curr();
c = rdr.curr;
n = rdr.next();
if c == '3' && n == '2' {
rdr.bump();
rdr.bump();
ret token::LIT_FLOAT(intern(*rdr.get_interner(), num_str),
ret token::LIT_FLOAT(intern(*rdr.interner, num_str),
ast::ty_f32);
} else if c == '6' && n == '4' {
rdr.bump();
rdr.bump();
ret token::LIT_FLOAT(intern(*rdr.get_interner(), num_str),
ret token::LIT_FLOAT(intern(*rdr.interner, num_str),
ast::ty_f64);
/* FIXME: if this is out of range for either a 32-bit or
64-bit float, it won't be noticed till the back-end */
......@@ -275,7 +257,7 @@ fn scan_number(c: char, rdr: reader) -> token::token {
}
}
if is_float {
ret token::LIT_FLOAT(interner::intern(*rdr.get_interner(), num_str),
ret token::LIT_FLOAT(interner::intern(*rdr.interner, num_str),
ast::ty_f);
} else {
let parsed = u64::from_str(num_str, base as u64);
......@@ -286,7 +268,7 @@ fn scan_number(c: char, rdr: reader) -> token::token {
fn scan_numeric_escape(rdr: reader, n_hex_digits: uint) -> char {
let accum_int = 0, i = n_hex_digits;
while i != 0u {
let n = rdr.curr();
let n = rdr.curr;
rdr.bump();
if !is_hex_digit(n) {
rdr.err(#fmt["illegal numeric character escape: %d", n as int]);
......@@ -301,26 +283,26 @@ fn scan_numeric_escape(rdr: reader, n_hex_digits: uint) -> char {
fn next_token(rdr: reader) -> {tok: token::token, chpos: uint, bpos: uint} {
consume_whitespace_and_comments(rdr);
let start_chpos = rdr.get_chpos();
let start_bpos = rdr.get_byte_pos();
let start_chpos = rdr.chpos;
let start_bpos = rdr.pos;
let tok = if rdr.is_eof() { token::EOF } else { next_token_inner(rdr) };
ret {tok: tok, chpos: start_chpos, bpos: start_bpos};
}
fn next_token_inner(rdr: reader) -> token::token {
let accum_str = "";
let c = rdr.curr();
let c = rdr.curr;
if char::is_XID_start(c) || c == '_' {
while char::is_XID_continue(c) {
str::push_char(accum_str, c);
rdr.bump();
c = rdr.curr();
c = rdr.curr;
}
if str::eq(accum_str, "_") { ret token::UNDERSCORE; }
let is_mod_name = c == ':' && rdr.next() == ':';
// FIXME: perform NFKC normalization here.
ret token::IDENT(interner::intern::<str>(*rdr.get_interner(),
ret token::IDENT(interner::intern::<str>(*rdr.interner,
accum_str), is_mod_name);
}
if is_dec_digit(c) {
......@@ -328,7 +310,7 @@ fn next_token_inner(rdr: reader) -> token::token {
}
fn binop(rdr: reader, op: token::binop) -> token::token {
rdr.bump();
if rdr.curr() == '=' {
if rdr.curr == '=' {
rdr.bump();
ret token::BINOPEQ(op);
} else { ret token::BINOP(op); }
......@@ -348,7 +330,7 @@ fn binop(rdr: reader, op: token::binop) -> token::token {
',' { rdr.bump(); ret token::COMMA; }
'.' {
rdr.bump();
if rdr.curr() == '.' && rdr.next() == '.' {
if rdr.curr == '.' && rdr.next() == '.' {
rdr.bump();
rdr.bump();
ret token::ELLIPSIS;
......@@ -364,14 +346,14 @@ fn binop(rdr: reader, op: token::binop) -> token::token {
'@' { rdr.bump(); ret token::AT; }
'#' {
rdr.bump();
if rdr.curr() == '<' { rdr.bump(); ret token::POUND_LT; }
if rdr.curr() == '{' { rdr.bump(); ret token::POUND_LBRACE; }
if rdr.curr == '<' { rdr.bump(); ret token::POUND_LT; }
if rdr.curr == '{' { rdr.bump(); ret token::POUND_LBRACE; }
ret token::POUND;
}
'~' { rdr.bump(); ret token::TILDE; }
':' {
rdr.bump();
if rdr.curr() == ':' {
if rdr.curr == ':' {
rdr.bump();
ret token::MOD_SEP;
} else { ret token::COLON; }
......@@ -384,26 +366,26 @@ fn binop(rdr: reader, op: token::binop) -> token::token {
// Multi-byte tokens.
'=' {
rdr.bump();
if rdr.curr() == '=' {
if rdr.curr == '=' {
rdr.bump();
ret token::EQEQ;
} else { ret token::EQ; }
}
'!' {
rdr.bump();
if rdr.curr() == '=' {
if rdr.curr == '=' {
rdr.bump();
ret token::NE;
} else { ret token::NOT; }
}
'<' {
rdr.bump();
alt rdr.curr() {
alt rdr.curr {
'=' { rdr.bump(); ret token::LE; }
'<' { ret binop(rdr, token::LSL); }
'-' {
rdr.bump();
alt rdr.curr() {
alt rdr.curr {
'>' { rdr.bump(); ret token::DARROW; }
_ { ret token::LARROW; }
}
......@@ -413,7 +395,7 @@ fn binop(rdr: reader, op: token::binop) -> token::token {
}
'>' {
rdr.bump();
alt rdr.curr() {
alt rdr.curr {
'=' { rdr.bump(); ret token::GE; }
'>' {
if rdr.next() == '>' {
......@@ -426,10 +408,10 @@ fn binop(rdr: reader, op: token::binop) -> token::token {
}
'\'' {
rdr.bump();
let c2 = rdr.curr();
let c2 = rdr.curr;
rdr.bump();
if c2 == '\\' {
let escaped = rdr.curr();
let escaped = rdr.curr;
rdr.bump();
alt escaped {
'n' { c2 = '\n'; }
......@@ -446,7 +428,7 @@ fn binop(rdr: reader, op: token::binop) -> token::token {
}
}
}
if rdr.curr() != '\'' {
if rdr.curr != '\'' {
rdr.err("unterminated character constant");
fail;
}
......@@ -454,20 +436,20 @@ fn binop(rdr: reader, op: token::binop) -> token::token {
ret token::LIT_INT(c2 as i64, ast::ty_char);
}
'"' {
let n = rdr.get_chpos();
let n = rdr.chpos;
rdr.bump();
while rdr.curr() != '"' {
while rdr.curr != '"' {
if rdr.is_eof() {
rdr.err(#fmt["unterminated double quote string: %s",
rdr.get_str_from(n)]);
fail;
}
let ch = rdr.curr();
let ch = rdr.curr;
rdr.bump();
alt ch {
'\\' {
let escaped = rdr.curr();
let escaped = rdr.curr;
rdr.bump();
alt escaped {
'n' { str::push_byte(accum_str, '\n' as u8); }
......@@ -495,7 +477,7 @@ fn binop(rdr: reader, op: token::binop) -> token::token {
}
}
rdr.bump();
ret token::LIT_STR(interner::intern::<str>(*rdr.get_interner(),
ret token::LIT_STR(interner::intern::<str>(*rdr.interner,
accum_str));
}
'-' {
......@@ -538,11 +520,11 @@ fn binop(rdr: reader, op: token::binop) -> token::token {
fn read_to_eol(rdr: reader) -> str {
let val = "";
while rdr.curr() != '\n' && !rdr.is_eof() {
str::push_char(val, rdr.curr());
while rdr.curr != '\n' && !rdr.is_eof() {
str::push_char(val, rdr.curr);
rdr.bump();
}
if rdr.curr() == '\n' { rdr.bump(); }
if rdr.curr == '\n' { rdr.bump(); }
ret val;
}
......@@ -553,11 +535,11 @@ fn read_one_line_comment(rdr: reader) -> str {
}
fn consume_whitespace(rdr: reader) {
while is_whitespace(rdr.curr()) && !rdr.is_eof() { rdr.bump(); }
while is_whitespace(rdr.curr) && !rdr.is_eof() { rdr.bump(); }
}
fn consume_non_eol_whitespace(rdr: reader) {
while is_whitespace(rdr.curr()) && rdr.curr() != '\n' && !rdr.is_eof() {
while is_whitespace(rdr.curr) && rdr.curr != '\n' && !rdr.is_eof() {
rdr.bump();
}
}
......@@ -565,12 +547,12 @@ fn consume_non_eol_whitespace(rdr: reader) {
fn push_blank_line_comment(rdr: reader, &comments: [cmnt]) {
#debug(">>> blank-line comment");
let v: [str] = [];
comments += [{style: blank_line, lines: v, pos: rdr.get_chpos()}];
comments += [{style: blank_line, lines: v, pos: rdr.chpos}];
}
fn consume_whitespace_counting_blank_lines(rdr: reader, &comments: [cmnt]) {
while is_whitespace(rdr.curr()) && !rdr.is_eof() {
if rdr.get_col() == 0u && rdr.curr() == '\n' {
while is_whitespace(rdr.curr) && !rdr.is_eof() {
if rdr.col == 0u && rdr.curr == '\n' {
push_blank_line_comment(rdr, comments);
}
rdr.bump();
......@@ -579,9 +561,9 @@ fn consume_whitespace_counting_blank_lines(rdr: reader, &comments: [cmnt]) {
fn read_line_comments(rdr: reader, code_to_the_left: bool) -> cmnt {
#debug(">>> line comments");
let p = rdr.get_chpos();
let p = rdr.chpos;
let lines: [str] = [];
while rdr.curr() == '/' && rdr.next() == '/' {
while rdr.curr == '/' && rdr.next() == '/' {
let line = read_one_line_comment(rdr);
log(debug, line);
lines += [line];
......@@ -612,9 +594,9 @@ fn trim_whitespace_prefix_and_push_line(&lines: [str], s: str, col: uint) {
fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
#debug(">>> block comment");
let p = rdr.get_chpos();
let p = rdr.chpos;
let lines: [str] = [];
let col: uint = rdr.get_col();
let col: uint = rdr.col;
rdr.bump();
rdr.bump();
let curr_line = "/*";
......@@ -622,19 +604,19 @@ fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
while level > 0 {
#debug("=== block comment level %d", level);
if rdr.is_eof() { rdr.err("unterminated block comment"); fail; }
if rdr.curr() == '\n' {
if rdr.curr == '\n' {
trim_whitespace_prefix_and_push_line(lines, curr_line, col);
curr_line = "";
rdr.bump();
} else {
str::push_char(curr_line, rdr.curr());
if rdr.curr() == '/' && rdr.next() == '*' {
str::push_char(curr_line, rdr.curr);
if rdr.curr == '/' && rdr.next() == '*' {
rdr.bump();
rdr.bump();
curr_line += "*";
level += 1;
} else {
if rdr.curr() == '*' && rdr.next() == '/' {
if rdr.curr == '*' && rdr.next() == '/' {
rdr.bump();
rdr.bump();
curr_line += "/";
......@@ -648,7 +630,7 @@ fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
}
let style = if code_to_the_left { trailing } else { isolated };
consume_non_eol_whitespace(rdr);
if !rdr.is_eof() && rdr.curr() != '\n' && vec::len(lines) == 1u {
if !rdr.is_eof() && rdr.curr != '\n' && vec::len(lines) == 1u {
style = mixed;
}
#debug("<<< block comment");
......@@ -656,15 +638,15 @@ fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
}
fn peeking_at_comment(rdr: reader) -> bool {
ret rdr.curr() == '/' && rdr.next() == '/' ||
rdr.curr() == '/' && rdr.next() == '*';
ret rdr.curr == '/' && rdr.next() == '/' ||
rdr.curr == '/' && rdr.next() == '*';
}
fn consume_comment(rdr: reader, code_to_the_left: bool, &comments: [cmnt]) {
#debug(">>> consume comment");
if rdr.curr() == '/' && rdr.next() == '/' {
if rdr.curr == '/' && rdr.next() == '/' {
comments += [read_line_comments(rdr, code_to_the_left)];
} else if rdr.curr() == '/' && rdr.next() == '*' {
} else if rdr.curr == '/' && rdr.next() == '*' {
comments += [read_block_comment(rdr, code_to_the_left)];
} else { fail; }
#debug("<<< consume comment");
......@@ -696,7 +678,7 @@ fn gather_comments_and_literals(cm: codemap::codemap, path: str,
while true {
let code_to_the_left = !first_read;
consume_non_eol_whitespace(rdr);
if rdr.curr() == '\n' {
if rdr.curr == '\n' {
code_to_the_left = false;
consume_whitespace_counting_blank_lines(rdr, comments);
}
......
......@@ -9,6 +9,7 @@
import util::interner;
import ast::{node_id, spanned};
import front::attr;
import lexer::reader;
tag restriction {
UNRESTRICTED;
......@@ -27,34 +28,60 @@ fn next_node_id(sess: parse_sess) -> node_id {
ret rv;
}
type parser =
obj {
fn peek() -> token::token;
fn bump();
fn swap(token::token, uint, uint);
fn look_ahead(uint) -> token::token;
fn fatal(str) -> ! ;
fn span_fatal(span, str) -> ! ;
fn warn(str);
fn restrict(restriction);
fn get_restriction() -> restriction;
fn get_file_type() -> file_type;
fn get_cfg() -> ast::crate_cfg;
fn get_span() -> span;
fn get_lo_pos() -> uint;
fn get_hi_pos() -> uint;
fn get_last_lo_pos() -> uint;
fn get_last_hi_pos() -> uint;
fn get_prec_table() -> @[op_spec];
fn get_str(token::str_num) -> str;
fn get_reader() -> lexer::reader;
fn get_filemap() -> codemap::filemap;
fn get_bad_expr_words() -> hashmap<str, ()>;
fn get_chpos() -> uint;
fn get_byte_pos() -> uint;
fn get_id() -> node_id;
fn get_sess() -> parse_sess;
};
type parser = @{
sess: parse_sess,
cfg: ast::crate_cfg,
file_type: file_type,
mutable token: token::token,
mutable span: span,
mutable last_span: span,
mutable buffer: [{tok: token::token, span: span}],
mutable restriction: restriction,
reader: reader,
precs: @[op_spec],
bad_expr_words: hashmap<str, ()>
};
impl parser for parser {
fn bump() {
self.last_span = self.span;
if vec::len(self.buffer) == 0u {
let next = lexer::next_token(self.reader);
self.token = next.tok;
self.span = ast_util::mk_sp(next.chpos, self.reader.chpos);
} else {
let next = vec::pop(self.buffer);
self.token = next.tok;
self.span = next.span;
}
}
fn swap(next: token::token, lo: uint, hi: uint) {
self.token = next;
self.span = ast_util::mk_sp(lo, hi);
}
fn look_ahead(distance: uint) -> token::token {
while vec::len(self.buffer) < distance {
let next = lexer::next_token(self.reader);
let sp = ast_util::mk_sp(next.chpos, self.reader.chpos);
self.buffer = [{tok: next.tok, span: sp}] + self.buffer;
}
ret self.buffer[distance - 1u].tok;
}
fn fatal(m: str) -> ! {
self.span_fatal(self.span, m);
}
fn span_fatal(sp: span, m: str) -> ! {
codemap::emit_error(some(sp), m, self.sess.cm);
fail;
}
fn warn(m: str) {
codemap::emit_warning(some(self.span), m, self.sess.cm);
}
fn get_str(i: token::str_num) -> str {
interner::get(*self.reader.interner, i)
}
fn get_id() -> node_id { next_node_id(self.sess) }
}
fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: str,
chpos: uint, byte_pos: uint, ftype: file_type) ->
......@@ -86,79 +113,21 @@ fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
ret new_parser(sess, cfg, rdr, ftype);
}
fn new_parser(sess: parse_sess, cfg: ast::crate_cfg, rdr: lexer::reader,
fn new_parser(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader,
ftype: file_type) -> parser {
obj stdio_parser(sess: parse_sess,
cfg: ast::crate_cfg,
ftype: file_type,
mutable tok: token::token,
mutable tok_span: span,
mutable last_tok_span: span,
mutable buffer: [{tok: token::token, span: span}],
mutable restr: restriction,
rdr: lexer::reader,
precs: @[op_spec],
bad_words: hashmap<str, ()>) {
fn peek() -> token::token { ret tok; }
fn bump() {
last_tok_span = tok_span;
if vec::len(buffer) == 0u {
let next = lexer::next_token(rdr);
tok = next.tok;
tok_span = ast_util::mk_sp(next.chpos, rdr.get_chpos());
} else {
let next = vec::pop(buffer);
tok = next.tok;
tok_span = next.span;
}
}
fn swap(next: token::token, lo: uint, hi: uint) {
tok = next;
tok_span = ast_util::mk_sp(lo, hi);
}
fn look_ahead(distance: uint) -> token::token {
while vec::len(buffer) < distance {
let next = lexer::next_token(rdr);
let sp = ast_util::mk_sp(next.chpos, rdr.get_chpos());
buffer = [{tok: next.tok, span: sp}] + buffer;
}
ret buffer[distance - 1u].tok;
}
fn fatal(m: str) -> ! {
self.span_fatal(self.get_span(), m);
}
fn span_fatal(sp: span, m: str) -> ! {
codemap::emit_error(some(sp), m, sess.cm);
fail;
}
fn warn(m: str) {
codemap::emit_warning(some(self.get_span()), m, sess.cm);
}
fn restrict(r: restriction) { restr = r; }
fn get_restriction() -> restriction { ret restr; }
fn get_span() -> span { ret tok_span; }
fn get_lo_pos() -> uint { ret tok_span.lo; }
fn get_hi_pos() -> uint { ret tok_span.hi; }
fn get_last_lo_pos() -> uint { ret last_tok_span.lo; }
fn get_last_hi_pos() -> uint { ret last_tok_span.hi; }
fn get_file_type() -> file_type { ret ftype; }
fn get_cfg() -> ast::crate_cfg { ret cfg; }
fn get_prec_table() -> @[op_spec] { ret precs; }
fn get_str(i: token::str_num) -> str {
ret interner::get(*rdr.get_interner(), i);
}
fn get_reader() -> lexer::reader { ret rdr; }
fn get_filemap() -> codemap::filemap { ret rdr.get_filemap(); }
fn get_bad_expr_words() -> hashmap<str, ()> { ret bad_words; }
fn get_chpos() -> uint { ret rdr.get_chpos(); }
fn get_byte_pos() -> uint { ret rdr.get_byte_pos(); }
fn get_id() -> node_id { ret next_node_id(sess); }
fn get_sess() -> parse_sess { ret sess; }
}
let tok0 = lexer::next_token(rdr);
let span0 = ast_util::mk_sp(tok0.chpos, rdr.get_chpos());
ret stdio_parser(sess, cfg, ftype, tok0.tok, span0, span0, [],
UNRESTRICTED, rdr, prec_table(), bad_expr_word_table());
let span0 = ast_util::mk_sp(tok0.chpos, rdr.chpos);
@{sess: sess,
cfg: cfg,
file_type: ftype,
mutable token: tok0.tok,
mutable span: span0,
mutable last_span: span0,
mutable buffer: [],
mutable restriction: UNRESTRICTED,
reader: rdr,
precs: prec_table(),
bad_expr_words: bad_expr_word_table()}
}
// These are the words that shouldn't be allowed as value identifiers,
......@@ -178,35 +147,35 @@ fn bad_expr_word_table() -> hashmap<str, ()> {
}
fn unexpected(p: parser, t: token::token) -> ! {
let s: str = "unexpected token: '" + token::to_str(p.get_reader(), t) +
let s: str = "unexpected token: '" + token::to_str(p.reader, t) +
"'";
p.fatal(s);
}
fn expect(p: parser, t: token::token) {
if p.peek() == t {
if p.token == t {
p.bump();
} else {
let s: str = "expecting '";
s += token::to_str(p.get_reader(), t);
s += token::to_str(p.reader, t);
s += "' but found '";
s += token::to_str(p.get_reader(), p.peek());
s += token::to_str(p.reader, p.token);
p.fatal(s + "'");
}
}
fn expect_gt(p: parser) {
if p.peek() == token::GT {
if p.token == token::GT {
p.bump();
} else if p.peek() == token::BINOP(token::LSR) {
p.swap(token::GT, p.get_lo_pos() + 1u, p.get_hi_pos());
} else if p.peek() == token::BINOP(token::ASR) {
p.swap(token::BINOP(token::LSR), p.get_lo_pos() + 1u, p.get_hi_pos());
} else if p.token == token::BINOP(token::LSR) {
p.swap(token::GT, p.span.lo + 1u, p.span.hi);
} else if p.token == token::BINOP(token::ASR) {
p.swap(token::BINOP(token::LSR), p.span.lo + 1u, p.span.hi);
} else {
let s: str = "expecting ";
s += token::to_str(p.get_reader(), token::GT);
s += token::to_str(p.reader, token::GT);
s += ", found ";
s += token::to_str(p.get_reader(), p.peek());
s += token::to_str(p.reader, p.token);
p.fatal(s);
}
}
......@@ -216,7 +185,7 @@ fn spanned<T: copy>(lo: uint, hi: uint, node: T) -> spanned<T> {
}
fn parse_ident(p: parser) -> ast::ident {
alt p.peek() {
alt p.token {
token::IDENT(i, _) { p.bump(); ret p.get_str(i); }
_ { p.fatal("expecting ident"); }
}
......@@ -228,18 +197,18 @@ fn parse_value_ident(p: parser) -> ast::ident {
}
fn eat(p: parser, tok: token::token) -> bool {
ret if p.peek() == tok { p.bump(); true } else { false };
ret if p.token == tok { p.bump(); true } else { false };
}
fn is_word(p: parser, word: str) -> bool {
ret alt p.peek() {
ret alt p.token {
token::IDENT(sid, false) { str::eq(word, p.get_str(sid)) }
_ { false }
};
}
fn eat_word(p: parser, word: str) -> bool {
alt p.peek() {
alt p.token {
token::IDENT(sid, false) {
if str::eq(word, p.get_str(sid)) {
p.bump();
......@@ -253,15 +222,15 @@ fn eat_word(p: parser, word: str) -> bool {
fn expect_word(p: parser, word: str) {
if !eat_word(p, word) {
p.fatal("expecting " + word + ", found " +
token::to_str(p.get_reader(), p.peek()));
token::to_str(p.reader, p.token));
}
}
fn check_bad_word(p: parser) {
alt p.peek() {
alt p.token {
token::IDENT(sid, false) {
let w = p.get_str(sid);
if p.get_bad_expr_words().contains_key(w) {
if p.bad_expr_words.contains_key(w) {
p.fatal("found " + w + " in expression position");
}
}
......@@ -293,11 +262,11 @@ fn parse_fn_input_ty(p: parser) -> ast::arg {
fn parse_ty_methods(p: parser, allow_tps: bool) -> [ast::ty_method] {
parse_seq(token::LBRACE, token::RBRACE, seq_sep_none(), {|p|
let flo = p.get_lo_pos();
let flo = p.span.lo;
expect_word(p, "fn");
let ident = parse_value_ident(p);
let tps = allow_tps ? parse_ty_params(p) : [];
let f = parse_ty_fn(ast::proto_bare, p), fhi = p.get_last_hi_pos();
let f = parse_ty_fn(ast::proto_bare, p), fhi = p.last_span.hi;
expect(p, token::SEMI);
alt f {
ast::ty_fn(_, d) {
......@@ -315,7 +284,7 @@ fn parse_mt(p: parser) -> ast::mt {
}
fn parse_ty_field(p: parser) -> ast::ty_field {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let mut = parse_mutability(p);
let id = parse_ident(p);
expect(p, token::COLON);
......@@ -332,10 +301,10 @@ fn ident_index(p: parser, args: [ast::arg], i: ast::ident) -> uint {
}
fn parse_type_constr_arg(p: parser) -> @ast::ty_constr_arg {
let sp = p.get_span();
let sp = p.span;
let carg = ast::carg_base;
expect(p, token::BINOP(token::STAR));
if p.peek() == token::DOT {
if p.token == token::DOT {
// "*..." notation for record fields
p.bump();
let pth = parse_path(p);
......@@ -346,9 +315,9 @@ fn parse_type_constr_arg(p: parser) -> @ast::ty_constr_arg {
}
fn parse_constr_arg(args: [ast::arg], p: parser) -> @ast::constr_arg {
let sp = p.get_span();
let sp = p.span;
let carg = ast::carg_base;
if p.peek() == token::BINOP(token::STAR) {
if p.token == token::BINOP(token::STAR) {
p.bump();
} else {
let i: ast::ident = parse_value_ident(p);
......@@ -358,7 +327,7 @@ fn parse_constr_arg(args: [ast::arg], p: parser) -> @ast::constr_arg {
}
fn parse_ty_constr(fn_args: [ast::arg], p: parser) -> @ast::constr {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let path = parse_path(p);
let args: {node: [@ast::constr_arg], span: span} =
parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA),
......@@ -368,12 +337,12 @@ fn parse_ty_constr(fn_args: [ast::arg], p: parser) -> @ast::constr {
}
fn parse_constr_in_type(p: parser) -> @ast::ty_constr {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let path = parse_path(p);
let args: [@ast::ty_constr_arg] =
parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA),
parse_type_constr_arg, p).node;
let hi = p.get_lo_pos();
let hi = p.span.lo;
let tc: ast::ty_constr_ = {path: path, args: args, id: p.get_id()};
ret @spanned(lo, hi, tc);
}
......@@ -386,7 +355,7 @@ fn parse_constrs<T: copy>(pser: block(parser) -> @ast::constr_general<T>,
while true {
let constr = pser(p);
constrs += [constr];
if p.peek() == token::COMMA { p.bump(); } else { break; }
if p.token == token::COMMA { p.bump(); } else { break; }
}
constrs
}
......@@ -397,12 +366,12 @@ fn parse_type_constraints(p: parser) -> [@ast::ty_constr] {
fn parse_ty_postfix(orig_t: ast::ty_, p: parser, colons_before_params: bool,
lo: uint) -> @ast::ty {
if colons_before_params && p.peek() == token::MOD_SEP {
if colons_before_params && p.token == token::MOD_SEP {
p.bump();
expect(p, token::LT);
} else if !colons_before_params && p.peek() == token::LT {
} else if !colons_before_params && p.token == token::LT {
p.bump();
} else { ret @spanned(lo, p.get_last_hi_pos(), orig_t); }
} else { ret @spanned(lo, p.last_span.hi, orig_t); }
// If we're here, we have explicit type parameter instantiation.
let seq = parse_seq_to_gt(some(token::COMMA), {|p| parse_ty(p, false)},
......@@ -410,8 +379,8 @@ fn parse_ty_postfix(orig_t: ast::ty_, p: parser, colons_before_params: bool,
alt orig_t {
ast::ty_path(pth, ann) {
ret @spanned(lo, p.get_last_hi_pos(),
ast::ty_path(@spanned(lo, p.get_last_hi_pos(),
ret @spanned(lo, p.last_span.hi,
ast::ty_path(@spanned(lo, p.last_span.hi,
{global: pth.node.global,
idents: pth.node.idents,
types: seq}), ann));
......@@ -422,18 +391,18 @@ fn parse_ty_postfix(orig_t: ast::ty_, p: parser, colons_before_params: bool,
fn parse_ret_ty(p: parser) -> (ast::ret_style, @ast::ty) {
ret if eat(p, token::RARROW) {
let lo = p.get_lo_pos();
let lo = p.span.lo;
if eat(p, token::NOT) {
(ast::noreturn, @spanned(lo, p.get_last_hi_pos(), ast::ty_bot))
(ast::noreturn, @spanned(lo, p.last_span.hi, ast::ty_bot))
} else { (ast::return_val, parse_ty(p, false)) }
} else {
let pos = p.get_lo_pos();
let pos = p.span.lo;
(ast::return_val, @spanned(pos, pos, ast::ty_nil))
}
}
fn parse_ty(p: parser, colons_before_params: bool) -> @ast::ty {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let t: ast::ty_;
// FIXME: do something with this
......@@ -469,14 +438,14 @@ fn parse_ty(p: parser, colons_before_params: bool) -> @ast::ty {
t = ast::ty_float(ast::ty_f32);
} else if eat_word(p, "f64") {
t = ast::ty_float(ast::ty_f64);
} else if p.peek() == token::LPAREN {
} else if p.token == token::LPAREN {
p.bump();
if p.peek() == token::RPAREN {
if p.token == token::RPAREN {
p.bump();
t = ast::ty_nil;
} else {
let ts = [parse_ty(p, false)];
while p.peek() == token::COMMA {
while p.token == token::COMMA {
p.bump();
ts += [parse_ty(p, false)];
}
......@@ -485,28 +454,28 @@ fn parse_ty(p: parser, colons_before_params: bool) -> @ast::ty {
} else { t = ast::ty_tup(ts); }
expect(p, token::RPAREN);
}
} else if p.peek() == token::AT {
} else if p.token == token::AT {
p.bump();
t = ast::ty_box(parse_mt(p));
} else if p.peek() == token::TILDE {
} else if p.token == token::TILDE {
p.bump();
t = ast::ty_uniq(parse_mt(p));
} else if p.peek() == token::BINOP(token::STAR) {
} else if p.token == token::BINOP(token::STAR) {
p.bump();
t = ast::ty_ptr(parse_mt(p));
} else if p.peek() == token::LBRACE {
} else if p.token == token::LBRACE {
let elems =
parse_seq(token::LBRACE, token::RBRACE, seq_sep_opt(token::COMMA),
parse_ty_field, p);
if vec::len(elems.node) == 0u { unexpected(p, token::RBRACE); }
let hi = elems.span.hi;
t = ast::ty_rec(elems.node);
if p.peek() == token::COLON {
if p.token == token::COLON {
p.bump();
t = ast::ty_constr(@spanned(lo, hi, t),
parse_type_constraints(p));
}
} else if p.peek() == token::LBRACKET {
} else if p.token == token::LBRACKET {
expect(p, token::LBRACKET);
t = ast::ty_vec(parse_mt(p));
expect(p, token::RBRACKET);
......@@ -523,7 +492,7 @@ fn parse_ty(p: parser, colons_before_params: bool) -> @ast::ty {
t = parse_ty_fn(ast::proto_uniq, p);
} else if eat_word(p, "obj") {
t = ast::ty_obj(parse_ty_methods(p, false));
} else if p.peek() == token::MOD_SEP || is_ident(p.peek()) {
} else if p.token == token::MOD_SEP || is_ident(p.token) {
let path = parse_path(p);
t = ast::ty_path(path, p.get_id());
} else { p.fatal("expecting type"); }
......@@ -553,7 +522,7 @@ fn parse_fn_block_arg(p: parser) -> ast::arg {
let m = parse_arg_mode(p);
let i = parse_value_ident(p);
let t = eat(p, token::COLON) ? parse_ty(p, false) :
@spanned(p.get_lo_pos(), p.get_hi_pos(), ast::ty_infer);
@spanned(p.span.lo, p.span.hi, ast::ty_infer);
ret {mode: m, ty: t, ident: i, id: p.get_id()};
}
......@@ -562,8 +531,8 @@ fn parse_seq_to_before_gt<T: copy>(sep: option::t<token::token>,
p: parser) -> [T] {
let first = true;
let v = [];
while p.peek() != token::GT && p.peek() != token::BINOP(token::LSR) &&
p.peek() != token::BINOP(token::ASR) {
while p.token != token::GT && p.token != token::BINOP(token::LSR) &&
p.token != token::BINOP(token::ASR) {
alt sep {
some(t) { if first { first = false; } else { expect(p, t); } }
_ { }
......@@ -585,10 +554,10 @@ fn parse_seq_to_gt<T: copy>(sep: option::t<token::token>,
fn parse_seq_lt_gt<T: copy>(sep: option::t<token::token>,
f: block(parser) -> T,
p: parser) -> spanned<[T]> {
let lo = p.get_lo_pos();
let lo = p.span.lo;
expect(p, token::LT);
let result = parse_seq_to_before_gt::<T>(sep, f, p);
let hi = p.get_hi_pos();
let hi = p.span.hi;
expect_gt(p);
ret spanned(lo, hi, result);
}
......@@ -620,12 +589,12 @@ fn parse_seq_to_before_end<T: copy>(ket: token::token,
f: block(parser) -> T, p: parser) -> [T] {
let first: bool = true;
let v: [T] = [];
while p.peek() != ket {
while p.token != ket {
alt sep.sep {
some(t) { if first { first = false; } else { expect(p, t); } }
_ { }
}
if sep.trailing_opt && p.peek() == ket { break; }
if sep.trailing_opt && p.token == ket { break; }
v += [f(p)];
}
ret v;
......@@ -635,10 +604,10 @@ fn parse_seq_to_before_end<T: copy>(ket: token::token,
fn parse_seq<T: copy>(bra: token::token, ket: token::token,
sep: seq_sep, f: block(parser) -> T,
p: parser) -> spanned<[T]> {
let lo = p.get_lo_pos();
let lo = p.span.lo;
expect(p, bra);
let result = parse_seq_to_before_end::<T>(ket, sep, f, p);
let hi = p.get_hi_pos();
let hi = p.span.hi;
p.bump();
ret spanned(lo, hi, result);
}
......@@ -655,13 +624,13 @@ fn lit_from_token(p: parser, tok: token::token) -> ast::lit_ {
}
fn parse_lit(p: parser) -> ast::lit {
let sp = p.get_span();
let sp = p.span;
let lit = if eat_word(p, "true") {
ast::lit_bool(true)
} else if eat_word(p, "false") {
ast::lit_bool(false)
} else {
let tok = p.peek();
let tok = p.token;
p.bump();
lit_from_token(p, tok)
};
......@@ -674,23 +643,23 @@ fn is_ident(t: token::token) -> bool {
}
fn is_plain_ident(p: parser) -> bool {
ret alt p.peek() { token::IDENT(_, false) { true } _ { false } };
ret alt p.token { token::IDENT(_, false) { true } _ { false } };
}
fn parse_path(p: parser) -> @ast::path {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let global = eat(p, token::MOD_SEP), ids = [parse_ident(p)];
while p.look_ahead(1u) != token::LT && eat(p, token::MOD_SEP) {
ids += [parse_ident(p)];
}
ret @spanned(lo, p.get_last_hi_pos(),
ret @spanned(lo, p.last_span.hi,
{global: global, idents: ids, types: []});
}
fn parse_path_and_ty_param_substs(p: parser, colons: bool) -> @ast::path {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let path = parse_path(p);
if colons ? eat(p, token::MOD_SEP) : p.peek() == token::LT {
if colons ? eat(p, token::MOD_SEP) : p.token == token::LT {
let seq = parse_seq_lt_gt(some(token::COMMA),
{|p| parse_ty(p, false)}, p);
@spanned(lo, seq.span.hi, {types: seq.node with path.node})
......@@ -708,7 +677,7 @@ fn parse_mutability(p: parser) -> ast::mutability {
}
fn parse_field(p: parser, sep: token::token) -> ast::field {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let m = parse_mutability(p);
let i = parse_ident(p);
expect(p, sep);
......@@ -731,8 +700,7 @@ fn is_bar(t: token::token) -> bool {
}
fn mk_lit_u32(p: parser, i: u32) -> @ast::expr {
let span = p.get_span();
let span = p.span;
let lv_lit = @{node: ast::lit_uint(i as u64, ast::ty_u32),
span: span};
......@@ -762,21 +730,21 @@ fn to_expr(e: pexpr) -> @ast::expr {
}
fn parse_bottom_expr(p: parser) -> pexpr {
let lo = p.get_lo_pos();
let hi = p.get_hi_pos();
let lo = p.span.lo;
let hi = p.span.hi;
let ex: ast::expr_;
if p.peek() == token::LPAREN {
if p.token == token::LPAREN {
p.bump();
if p.peek() == token::RPAREN {
hi = p.get_hi_pos();
if p.token == token::RPAREN {
hi = p.span.hi;
p.bump();
let lit = @spanned(lo, hi, ast::lit_nil);
ret mk_pexpr(p, lo, hi, ast::expr_lit(lit));
}
let es = [parse_expr(p)];
while p.peek() == token::COMMA { p.bump(); es += [parse_expr(p)]; }
hi = p.get_hi_pos();
while p.token == token::COMMA { p.bump(); es += [parse_expr(p)]; }
hi = p.span.hi;
expect(p, token::RPAREN);
// Note: we retain the expr_tup() even for simple
......@@ -785,25 +753,25 @@ fn parse_bottom_expr(p: parser) -> pexpr {
// can tell whether the expression was parenthesized or not,
// which affects expr_is_complete().
ret mk_pexpr(p, lo, hi, ast::expr_tup(es));
} else if p.peek() == token::LBRACE {
} else if p.token == token::LBRACE {
p.bump();
if is_word(p, "mutable") ||
is_plain_ident(p) && p.look_ahead(1u) == token::COLON {
let fields = [parse_field(p, token::COLON)];
let base = none;
while p.peek() != token::RBRACE {
while p.token != token::RBRACE {
if eat_word(p, "with") { base = some(parse_expr(p)); break; }
expect(p, token::COMMA);
if p.peek() == token::RBRACE {
if p.token == token::RBRACE {
// record ends by an optional trailing comma
break;
}
fields += [parse_field(p, token::COLON)];
}
hi = p.get_hi_pos();
hi = p.span.hi;
expect(p, token::RBRACE);
ex = ast::expr_rec(fields, base);
} else if is_bar(p.peek()) {
} else if is_bar(p.token) {
ret pexpr(parse_fn_block_expr(p));
} else {
let blk = parse_block_tail(p, lo, ast::default_blk);
......@@ -834,35 +802,35 @@ fn parse_bottom_expr(p: parser) -> pexpr {
ret pexpr(parse_block_expr(p, lo, ast::unchecked_blk));
} else if eat_word(p, "unsafe") {
ret pexpr(parse_block_expr(p, lo, ast::unsafe_blk));
} else if p.peek() == token::LBRACKET {
} else if p.token == token::LBRACKET {
p.bump();
let mut = parse_mutability(p);
let es =
parse_seq_to_end(token::RBRACKET, seq_sep(token::COMMA),
parse_expr, p);
ex = ast::expr_vec(es, mut);
} else if p.peek() == token::POUND_LT {
} else if p.token == token::POUND_LT {
p.bump();
let ty = parse_ty(p, false);
expect(p, token::GT);
/* hack: early return to take advantage of specialized function */
ret pexpr(mk_mac_expr(p, lo, p.get_hi_pos(),
ret pexpr(mk_mac_expr(p, lo, p.span.hi,
ast::mac_embed_type(ty)));
} else if p.peek() == token::POUND_LBRACE {
} else if p.token == token::POUND_LBRACE {
p.bump();
let blk = ast::mac_embed_block(
parse_block_tail(p, lo, ast::default_blk));
ret pexpr(mk_mac_expr(p, lo, p.get_hi_pos(), blk));
} else if p.peek() == token::ELLIPSIS {
ret pexpr(mk_mac_expr(p, lo, p.span.hi, blk));
} else if p.token == token::ELLIPSIS {
p.bump();
ret pexpr(mk_mac_expr(p, lo, p.get_hi_pos(), ast::mac_ellipsis));
ret pexpr(mk_mac_expr(p, lo, p.span.hi, ast::mac_ellipsis));
} else if eat_word(p, "obj") {
// Anonymous object
// Only make people type () if they're actually adding new fields
let fields: option::t<[ast::anon_obj_field]> = none;
if p.peek() == token::LPAREN {
if p.token == token::LPAREN {
p.bump();
fields =
some(parse_seq_to_end(token::RPAREN, seq_sep(token::COMMA),
......@@ -871,12 +839,12 @@ fn parse_bottom_expr(p: parser) -> pexpr {
let meths: [@ast::method] = [];
let inner_obj: option::t<@ast::expr> = none;
expect(p, token::LBRACE);
while p.peek() != token::RBRACE {
while p.token != token::RBRACE {
if eat_word(p, "with") {
inner_obj = some(parse_expr(p));
} else { meths += [parse_method(p, false)]; }
}
hi = p.get_hi_pos();
hi = p.span.hi;
expect(p, token::RBRACE);
// fields and methods may be *additional* or *overriding* fields
// and methods if there's a inner_obj, or they may be the *only*
......@@ -889,7 +857,7 @@ fn parse_bottom_expr(p: parser) -> pexpr {
} else if eat_word(p, "bind") {
let e = parse_expr_res(p, RESTRICT_NO_CALL_EXPRS);
fn parse_expr_opt(p: parser) -> option::t<@ast::expr> {
alt p.peek() {
alt p.token {
token::UNDERSCORE. { p.bump(); ret none; }
_ { ret some(parse_expr(p)); }
}
......@@ -899,12 +867,12 @@ fn parse_expr_opt(p: parser) -> option::t<@ast::expr> {
parse_expr_opt, p);
hi = es.span.hi;
ex = ast::expr_bind(e, es.node);
} else if p.peek() == token::POUND {
} else if p.token == token::POUND {
let ex_ext = parse_syntax_ext(p);
hi = ex_ext.span.hi;
ex = ex_ext.node;
} else if eat_word(p, "fail") {
if can_begin_expr(p.peek()) {
if can_begin_expr(p.token) {
let e = parse_expr(p);
hi = e.span.hi;
ex = ast::expr_fail(some(e));
......@@ -915,7 +883,7 @@ fn parse_expr_opt(p: parser) -> option::t<@ast::expr> {
expect(p, token::COMMA);
let e = parse_expr(p);
ex = ast::expr_log(2, lvl, e);
hi = p.get_hi_pos();
hi = p.span.hi;
expect(p, token::RPAREN);
} else if eat_word(p, "assert") {
let e = parse_expr(p);
......@@ -938,17 +906,17 @@ fn parse_expr_opt(p: parser) -> option::t<@ast::expr> {
hi = e.span.hi;
ex = ast::expr_check(ast::claimed_expr, e);
} else if eat_word(p, "ret") {
if can_begin_expr(p.peek()) {
if can_begin_expr(p.token) {
let e = parse_expr(p);
hi = e.span.hi;
ex = ast::expr_ret(some(e));
} else { ex = ast::expr_ret(none); }
} else if eat_word(p, "break") {
ex = ast::expr_break;
hi = p.get_hi_pos();
hi = p.span.hi;
} else if eat_word(p, "cont") {
ex = ast::expr_cont;
hi = p.get_hi_pos();
hi = p.span.hi;
} else if eat_word(p, "be") {
let e = parse_expr(p);
......@@ -961,8 +929,8 @@ fn parse_expr_opt(p: parser) -> option::t<@ast::expr> {
let e = parse_expr(p);
ex = ast::expr_copy(e);
hi = e.span.hi;
} else if p.peek() == token::MOD_SEP ||
is_ident(p.peek()) && !is_word(p, "true") &&
} else if p.token == token::MOD_SEP ||
is_ident(p.token) && !is_word(p, "true") &&
!is_word(p, "false") {
check_bad_word(p);
let pth = parse_path_and_ty_param_substs(p, true);
......@@ -985,13 +953,13 @@ fn parse_block_expr(p: parser,
}
fn parse_syntax_ext(p: parser) -> @ast::expr {
let lo = p.get_lo_pos();
let lo = p.span.lo;
expect(p, token::POUND);
ret parse_syntax_ext_naked(p, lo);
}
fn parse_syntax_ext_naked(p: parser, lo: uint) -> @ast::expr {
alt p.peek() {
alt p.token {
token::IDENT(_, _) {}
_ { p.fatal("expected a syntax expander name"); }
}
......@@ -999,7 +967,7 @@ fn parse_syntax_ext_naked(p: parser, lo: uint) -> @ast::expr {
//temporary for a backwards-compatible cycle:
let sep = seq_sep(token::COMMA);
let es =
if p.peek() == token::LPAREN {
if p.token == token::LPAREN {
parse_seq(token::LPAREN, token::RPAREN, sep, parse_expr, p)
} else {
parse_seq(token::LBRACKET, token::RBRACKET, sep, parse_expr, p)
......@@ -1015,7 +983,7 @@ fn parse_dot_or_call_expr(p: parser) -> pexpr {
}
fn permits_call(p: parser) -> bool {
ret p.get_restriction() != RESTRICT_NO_CALL_EXPRS;
ret p.restriction != RESTRICT_NO_CALL_EXPRS;
}
fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
......@@ -1023,7 +991,7 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
let lo = e.span.lo;
let hi = e.span.hi;
while !expr_is_complete(p, e) {
alt p.peek() {
alt p.token {
// expr(...)
token::LPAREN. if permits_call(p) {
let es = parse_seq(token::LPAREN, token::RPAREN,
......@@ -1043,7 +1011,7 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
with *to_expr(e)});
}
_ {
e = mk_pexpr(p, lo, p.get_last_hi_pos(),
e = mk_pexpr(p, lo, p.last_span.hi,
ast::expr_call(to_expr(e), [blk], true));
}
}
......@@ -1061,9 +1029,9 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
// expr.f
token::DOT. {
p.bump();
alt p.peek() {
alt p.token {
token::IDENT(i, _) {
hi = p.get_hi_pos();
hi = p.span.hi;
p.bump();
let tys = if eat(p, token::MOD_SEP) {
expect(p, token::LT);
......@@ -1086,11 +1054,11 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
}
fn parse_prefix_expr(p: parser) -> pexpr {
let lo = p.get_lo_pos();
let hi = p.get_hi_pos();
let lo = p.span.lo;
let hi = p.span.hi;
let ex;
alt p.peek() {
alt p.token {
token::NOT. {
p.bump();
let e = to_expr(parse_prefix_expr(p));
......@@ -1135,7 +1103,7 @@ fn parse_prefix_expr(p: parser) -> pexpr {
fn parse_ternary(p: parser) -> @ast::expr {
let cond_expr = parse_binops(p);
if p.peek() == token::QUES {
if p.token == token::QUES {
p.bump();
let then_expr = parse_expr(p);
expect(p, token::COLON);
......@@ -1185,10 +1153,10 @@ fn parse_more_binops(p: parser, plhs: pexpr, min_prec: int) ->
@ast::expr {
let lhs = to_expr(plhs);
if expr_is_complete(p, plhs) { ret lhs; }
let peeked = p.peek();
let peeked = p.token;
if peeked == token::BINOP(token::OR) &&
p.get_restriction() == RESTRICT_NO_BAR_OP { ret lhs; }
for cur: op_spec in *p.get_prec_table() {
p.restriction == RESTRICT_NO_BAR_OP { ret lhs; }
for cur: op_spec in *p.precs {
if cur.prec > min_prec && cur.tok == peeked {
p.bump();
let expr = parse_prefix_expr(p);
......@@ -1208,9 +1176,9 @@ fn parse_more_binops(p: parser, plhs: pexpr, min_prec: int) ->
}
fn parse_assign_expr(p: parser) -> @ast::expr {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let lhs = parse_ternary(p);
alt p.peek() {
alt p.token {
token::EQ. {
p.bump();
let rhs = parse_expr(p);
......@@ -1256,7 +1224,7 @@ fn parse_if_expr_1(p: parser) ->
els: option::t<@ast::expr>,
lo: uint,
hi: uint} {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let cond = parse_expr(p);
let thn = parse_block(p);
let els: option::t<@ast::expr> = none;
......@@ -1287,7 +1255,7 @@ fn parse_if_expr(p: parser) -> @ast::expr {
fn parse_capture_clause(p: parser) -> @ast::capture_clause {
fn expect_opt_trailing_semi(p: parser) {
if !eat(p, token::SEMI) {
if p.peek() != token::RBRACKET {
if p.token != token::RBRACKET {
p.fatal("expecting ; or ]");
}
}
......@@ -1296,10 +1264,10 @@ fn expect_opt_trailing_semi(p: parser) {
fn eat_ident_list(p: parser) -> [@ast::capture_item] {
let res = [];
while true {
alt p.peek() {
alt p.token {
token::IDENT(_, _) {
let id = p.get_id();
let sp = ast_util::mk_sp(p.get_lo_pos(), p.get_hi_pos());
let sp = ast_util::mk_sp(p.span.lo, p.span.hi);
let ident = parse_ident(p);
res += [@{id:id, name:ident, span:sp}];
if !eat(p, token::COMMA) {
......@@ -1335,7 +1303,7 @@ fn eat_ident_list(p: parser) -> [@ast::capture_item] {
}
fn parse_fn_expr(p: parser, proto: ast::proto) -> @ast::expr {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let capture_clause = parse_capture_clause(p);
let decl = parse_fn_decl(p, ast::impure_fn);
let body = parse_block(p);
......@@ -1344,7 +1312,7 @@ fn parse_fn_expr(p: parser, proto: ast::proto) -> @ast::expr {
}
fn parse_fn_block_expr(p: parser) -> @ast::expr {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let decl = parse_fn_block_decl(p);
let body = parse_block_tail(p, lo, ast::default_blk);
ret mk_expr(p, lo, body.span.hi, ast::expr_fn_block(decl, body));
......@@ -1360,7 +1328,7 @@ fn parse_else_expr(p: parser) -> @ast::expr {
}
fn parse_for_expr(p: parser) -> @ast::expr {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let decl = parse_local(p, false);
expect_word(p, "in");
let seq = parse_expr(p);
......@@ -1370,7 +1338,7 @@ fn parse_for_expr(p: parser) -> @ast::expr {
}
fn parse_while_expr(p: parser) -> @ast::expr {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let cond = parse_expr(p);
let body = parse_block_no_value(p);
let hi = body.span.hi;
......@@ -1378,7 +1346,7 @@ fn parse_while_expr(p: parser) -> @ast::expr {
}
fn parse_do_while_expr(p: parser) -> @ast::expr {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let body = parse_block_no_value(p);
expect_word(p, "while");
let cond = parse_expr(p);
......@@ -1387,18 +1355,18 @@ fn parse_do_while_expr(p: parser) -> @ast::expr {
}
fn parse_alt_expr(p: parser) -> @ast::expr {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let discriminant = parse_expr(p);
expect(p, token::LBRACE);
let arms: [ast::arm] = [];
while p.peek() != token::RBRACE {
while p.token != token::RBRACE {
let pats = parse_pats(p);
let guard = none;
if eat_word(p, "if") { guard = some(parse_expr(p)); }
let blk = parse_block(p);
arms += [{pats: pats, guard: guard, body: blk}];
}
let hi = p.get_hi_pos();
let hi = p.span.hi;
p.bump();
ret mk_expr(p, lo, hi, ast::expr_alt(discriminant, arms));
}
......@@ -1408,15 +1376,15 @@ fn parse_expr(p: parser) -> @ast::expr {
}
fn parse_expr_res(p: parser, r: restriction) -> @ast::expr {
let old = p.get_restriction();
p.restrict(r);
let old = p.restriction;
p.restriction = r;
let e = parse_assign_expr(p);
p.restrict(old);
p.restriction = old;
ret e;
}
fn parse_initializer(p: parser) -> option::t<ast::initializer> {
alt p.peek() {
alt p.token {
token::EQ. {
p.bump();
ret some({op: ast::init_assign, expr: parse_expr(p)});
......@@ -1442,16 +1410,16 @@ fn parse_pats(p: parser) -> [@ast::pat] {
let pats = [];
while true {
pats += [parse_pat(p)];
if p.peek() == token::BINOP(token::OR) { p.bump(); } else { break; }
if p.token == token::BINOP(token::OR) { p.bump(); } else { break; }
}
ret pats;
}
fn parse_pat(p: parser) -> @ast::pat {
let lo = p.get_lo_pos();
let hi = p.get_hi_pos();
let lo = p.span.lo;
let hi = p.span.hi;
let pat;
alt p.peek() {
alt p.token {
token::UNDERSCORE. { p.bump(); pat = ast::pat_wild; }
token::AT. {
p.bump();
......@@ -1470,14 +1438,14 @@ fn parse_pat(p: parser) -> @ast::pat {
let fields = [];
let etc = false;
let first = true;
while p.peek() != token::RBRACE {
while p.token != token::RBRACE {
if first { first = false; } else { expect(p, token::COMMA); }
if p.peek() == token::UNDERSCORE {
if p.token == token::UNDERSCORE {
p.bump();
if p.peek() != token::RBRACE {
if p.token != token::RBRACE {
p.fatal("expecting }, found " +
token::to_str(p.get_reader(), p.peek()));
token::to_str(p.reader, p.token));
}
etc = true;
break;
......@@ -1485,11 +1453,11 @@ fn parse_pat(p: parser) -> @ast::pat {
let fieldname = parse_ident(p);
let subpat;
if p.peek() == token::COLON {
if p.token == token::COLON {
p.bump();
subpat = parse_pat(p);
} else {
if p.get_bad_expr_words().contains_key(fieldname) {
if p.bad_expr_words.contains_key(fieldname) {
p.fatal("found " + fieldname + " in binding position");
}
subpat = @{id: p.get_id(),
......@@ -1498,26 +1466,26 @@ fn parse_pat(p: parser) -> @ast::pat {
}
fields += [{ident: fieldname, pat: subpat}];
}
hi = p.get_hi_pos();
hi = p.span.hi;
p.bump();
pat = ast::pat_rec(fields, etc);
}
token::LPAREN. {
p.bump();
if p.peek() == token::RPAREN {
hi = p.get_hi_pos();
if p.token == token::RPAREN {
hi = p.span.hi;
p.bump();
let lit = @{node: ast::lit_nil, span: ast_util::mk_sp(lo, hi)};
let expr = mk_expr(p, lo, hi, ast::expr_lit(lit));
pat = ast::pat_lit(expr);
} else {
let fields = [parse_pat(p)];
while p.peek() == token::COMMA {
while p.token == token::COMMA {
p.bump();
fields += [parse_pat(p)];
}
if vec::len(fields) == 1u { expect(p, token::COMMA); }
hi = p.get_hi_pos();
hi = p.span.hi;
expect(p, token::RPAREN);
pat = ast::pat_tup(fields);
}
......@@ -1540,7 +1508,7 @@ fn parse_pat(p: parser) -> @ast::pat {
}
_ { true }
} {
hi = p.get_hi_pos();
hi = p.span.hi;
let name = parse_value_ident(p);
let sub = eat(p, token::AT) ? some(parse_pat(p)) : none;
pat = ast::pat_bind(name, sub);
......@@ -1548,7 +1516,7 @@ fn parse_pat(p: parser) -> @ast::pat {
let tag_path = parse_path_and_ty_param_substs(p, true);
hi = tag_path.span.hi;
let args: [@ast::pat];
alt p.peek() {
alt p.token {
token::LPAREN. {
let a =
parse_seq(token::LPAREN, token::RPAREN,
......@@ -1567,12 +1535,12 @@ fn parse_pat(p: parser) -> @ast::pat {
}
fn parse_local(p: parser, allow_init: bool) -> @ast::local {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let pat = parse_pat(p);
let ty = @spanned(lo, lo, ast::ty_infer);
if eat(p, token::COLON) { ty = parse_ty(p, false); }
let init = if allow_init { parse_initializer(p) } else { none };
ret @spanned(lo, p.get_last_hi_pos(),
ret @spanned(lo, p.last_span.hi,
{ty: ty, pat: pat, init: init, id: p.get_id()});
}
......@@ -1580,16 +1548,16 @@ fn parse_let(p: parser) -> @ast::decl {
fn parse_let_style(p: parser) -> ast::let_style {
eat(p, token::BINOP(token::AND)) ? ast::let_ref : ast::let_copy
}
let lo = p.get_lo_pos();
let lo = p.span.lo;
let locals = [(parse_let_style(p), parse_local(p, true))];
while eat(p, token::COMMA) {
locals += [(parse_let_style(p), parse_local(p, true))];
}
ret @spanned(lo, p.get_last_hi_pos(), ast::decl_local(locals));
ret @spanned(lo, p.last_span.hi, ast::decl_local(locals));
}
fn parse_stmt(p: parser) -> @ast::stmt {
let lo = p.get_lo_pos();
let lo = p.span.lo;
if eat_word(p, "let") {
let decl = parse_let(p);
ret @spanned(lo, decl.span.hi, ast::stmt_decl(decl, p.get_id()));
......@@ -1624,10 +1592,10 @@ fn parse_stmt(p: parser) -> @ast::stmt {
}
fn expr_is_complete(p: parser, e: pexpr) -> bool {
log(debug, ("expr_is_complete", p.get_restriction(),
log(debug, ("expr_is_complete", p.restriction,
print::pprust::expr_to_str(*e),
expr_requires_semi_to_be_stmt(*e)));
ret p.get_restriction() == RESTRICT_STMT_EXPR &&
ret p.restriction == RESTRICT_STMT_EXPR &&
!expr_requires_semi_to_be_stmt(*e);
}
......@@ -1662,7 +1630,7 @@ fn stmt_ends_with_semi(stmt: ast::stmt) -> bool {
}
fn parse_block(p: parser) -> ast::blk {
let lo = p.get_lo_pos();
let lo = p.span.lo;
if eat_word(p, "unchecked") {
expect(p, token::LBRACE);
be parse_block_tail(p, lo, ast::unchecked_blk);
......@@ -1689,8 +1657,8 @@ fn parse_block_no_value(p: parser) -> ast::blk {
fn parse_block_tail(p: parser, lo: uint, s: ast::blk_check_mode) -> ast::blk {
let view_items = [], stmts = [], expr = none;
while is_word(p, "import") { view_items += [parse_view_item(p)]; }
while p.peek() != token::RBRACE {
alt p.peek() {
while p.token != token::RBRACE {
alt p.token {
token::SEMI. {
p.bump(); // empty
}
......@@ -1698,7 +1666,7 @@ fn parse_block_tail(p: parser, lo: uint, s: ast::blk_check_mode) -> ast::blk {
let stmt = parse_stmt(p);
alt stmt.node {
ast::stmt_expr(e, stmt_id) { // Expression without semicolon:
alt p.peek() {
alt p.token {
token::SEMI. {
p.bump();
stmts += [@{node: ast::stmt_semi(e, stmt_id) with *stmt}];
......@@ -1709,7 +1677,7 @@ fn parse_block_tail(p: parser, lo: uint, s: ast::blk_check_mode) -> ast::blk {
t {
if stmt_ends_with_semi(*stmt) {
p.fatal("expected ';' or '}' after expression but \
found '" + token::to_str(p.get_reader(), t) +
found '" + token::to_str(p.reader, t) +
"'");
}
stmts += [stmt];
......@@ -1728,7 +1696,7 @@ fn parse_block_tail(p: parser, lo: uint, s: ast::blk_check_mode) -> ast::blk {
}
}
}
let hi = p.get_hi_pos();
let hi = p.span.hi;
p.bump();
let bloc = {view_items: view_items, stmts: stmts, expr: expr,
id: p.get_id(), rules: s};
......@@ -1739,7 +1707,7 @@ fn parse_ty_param(p: parser) -> ast::ty_param {
let bounds = [];
let ident = parse_ident(p);
if eat(p, token::COLON) {
while p.peek() != token::COMMA && p.peek() != token::GT {
while p.token != token::COMMA && p.token != token::GT {
if eat_word(p, "send") { bounds += [ast::bound_send]; }
else if eat_word(p, "copy") { bounds += [ast::bound_copy]; }
else { bounds += [ast::bound_iface(parse_ty(p, false))]; }
......@@ -1763,7 +1731,7 @@ fn parse_fn_decl(p: parser, purity: ast::purity)
// mentioned in a constraint to an arg index.
// Seems weird to do this in the parser, but I'm not sure how else to.
let constrs = [];
if p.peek() == token::COLON {
if p.token == token::COLON {
p.bump();
constrs = parse_constrs({|x| parse_ty_constr(inputs.node, x) }, p);
}
......@@ -1780,7 +1748,7 @@ fn parse_fn_block_decl(p: parser) -> ast::fn_decl {
parse_seq(token::BINOP(token::OR), token::BINOP(token::OR),
seq_sep(token::COMMA), parse_fn_block_arg, p).node;
let output = eat(p, token::RARROW) ? parse_ty(p, false) :
@spanned(p.get_lo_pos(), p.get_hi_pos(), ast::ty_infer);
@spanned(p.span.lo, p.span.hi, ast::ty_infer);
ret {inputs: inputs,
output: output,
purity: ast::impure_fn,
......@@ -1805,7 +1773,7 @@ fn mk_item(p: parser, lo: uint, hi: uint, ident: ast::ident, node: ast::item_,
fn parse_item_fn(p: parser, purity: ast::purity,
attrs: [ast::attribute]) -> @ast::item {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let t = parse_fn_header(p);
let decl = parse_fn_decl(p, purity);
let body = parse_block(p);
......@@ -1832,7 +1800,7 @@ fn parse_anon_obj_field(p: parser) -> ast::anon_obj_field {
}
fn parse_method(p: parser, allow_tps: bool) -> @ast::method {
let lo = p.get_lo_pos();
let lo = p.span.lo;
expect_word(p, "fn");
let ident = parse_value_ident(p);
let tps = allow_tps ? parse_ty_params(p) : [];
......@@ -1843,7 +1811,7 @@ fn parse_method(p: parser, allow_tps: bool) -> @ast::method {
}
fn parse_item_obj(p: parser, attrs: [ast::attribute]) -> @ast::item {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let ident = parse_value_ident(p);
let ty_params = parse_ty_params(p);
let fields: ast::spanned<[ast::obj_field]> =
......@@ -1851,8 +1819,8 @@ fn parse_item_obj(p: parser, attrs: [ast::attribute]) -> @ast::item {
parse_obj_field, p);
let meths: [@ast::method] = [];
expect(p, token::LBRACE);
while p.peek() != token::RBRACE { meths += [parse_method(p, false)]; }
let hi = p.get_hi_pos();
while p.token != token::RBRACE { meths += [parse_method(p, false)]; }
let hi = p.span.hi;
expect(p, token::RBRACE);
let ob: ast::_obj = {fields: fields.node, methods: meths};
ret mk_item(p, lo, hi, ident, ast::item_obj(ob, ty_params, p.get_id()),
......@@ -1860,9 +1828,9 @@ fn parse_item_obj(p: parser, attrs: [ast::attribute]) -> @ast::item {
}
fn parse_item_iface(p: parser, attrs: [ast::attribute]) -> @ast::item {
let lo = p.get_last_lo_pos(), ident = parse_ident(p),
let lo = p.last_span.lo, ident = parse_ident(p),
tps = parse_ty_params(p), meths = parse_ty_methods(p, true);
ret mk_item(p, lo, p.get_last_hi_pos(), ident,
ret mk_item(p, lo, p.last_span.hi, ident,
ast::item_iface(tps, meths), attrs);
}
......@@ -1871,12 +1839,12 @@ fn parse_item_iface(p: parser, attrs: [ast::attribute]) -> @ast::item {
// impl name<T> of to_str for [T] { ... }
// impl name<T> for [T] { ... }
fn parse_item_impl(p: parser, attrs: [ast::attribute]) -> @ast::item {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
fn wrap_path(p: parser, pt: @ast::path) -> @ast::ty {
@{node: ast::ty_path(pt, p.get_id()), span: pt.span}
}
let (ident, tps) = if !is_word(p, "of") {
if p.peek() == token::LT { (none, parse_ty_params(p)) }
if p.token == token::LT { (none, parse_ty_params(p)) }
else { (some(parse_ident(p)), parse_ty_params(p)) }
} else { (none, []) };
let ifce = if eat_word(p, "of") {
......@@ -1894,12 +1862,12 @@ fn wrap_path(p: parser, pt: @ast::path) -> @ast::ty {
let ty = parse_ty(p, false), meths = [];
expect(p, token::LBRACE);
while !eat(p, token::RBRACE) { meths += [parse_method(p, true)]; }
ret mk_item(p, lo, p.get_last_hi_pos(), ident,
ret mk_item(p, lo, p.last_span.hi, ident,
ast::item_impl(tps, ifce, ty, meths), attrs);
}
fn parse_item_res(p: parser, attrs: [ast::attribute]) -> @ast::item {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let ident = parse_value_ident(p);
let ty_params = parse_ty_params(p);
expect(p, token::LPAREN);
......@@ -1928,14 +1896,14 @@ fn parse_mod_items(p: parser, term: token::token,
if vec::len(first_item_attrs) == 0u { parse_view(p) } else { [] };
let items: [@ast::item] = [];
let initial_attrs = first_item_attrs;
while p.peek() != term {
while p.token != term {
let attrs = initial_attrs + parse_outer_attributes(p);
initial_attrs = [];
alt parse_item(p, attrs) {
some(i) { items += [i]; }
_ {
p.fatal("expected item but found '" +
token::to_str(p.get_reader(), p.peek()) + "'");
token::to_str(p.reader, p.token) + "'");
}
}
}
......@@ -1943,25 +1911,25 @@ fn parse_mod_items(p: parser, term: token::token,
}
fn parse_item_const(p: parser, attrs: [ast::attribute]) -> @ast::item {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let id = parse_value_ident(p);
expect(p, token::COLON);
let ty = parse_ty(p, false);
expect(p, token::EQ);
let e = parse_expr(p);
let hi = p.get_hi_pos();
let hi = p.span.hi;
expect(p, token::SEMI);
ret mk_item(p, lo, hi, id, ast::item_const(ty, e), attrs);
}
fn parse_item_mod(p: parser, attrs: [ast::attribute]) -> @ast::item {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let id = parse_ident(p);
expect(p, token::LBRACE);
let inner_attrs = parse_inner_attrs_and_next(p);
let first_item_outer_attrs = inner_attrs.next;
let m = parse_mod_items(p, token::RBRACE, first_item_outer_attrs);
let hi = p.get_hi_pos();
let hi = p.span.hi;
expect(p, token::RBRACE);
ret mk_item(p, lo, hi, id, ast::item_mod(m), attrs + inner_attrs.inner);
}
......@@ -1969,7 +1937,7 @@ fn parse_item_mod(p: parser, attrs: [ast::attribute]) -> @ast::item {
fn parse_item_native_type(p: parser, attrs: [ast::attribute]) ->
@ast::native_item {
let t = parse_type_decl(p);
let hi = p.get_hi_pos();
let hi = p.span.hi;
expect(p, token::SEMI);
ret @{ident: t.ident,
attrs: attrs,
......@@ -1980,10 +1948,10 @@ fn parse_item_native_type(p: parser, attrs: [ast::attribute]) ->
fn parse_item_native_fn(p: parser, attrs: [ast::attribute],
purity: ast::purity) -> @ast::native_item {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let t = parse_fn_header(p);
let decl = parse_fn_decl(p, purity);
let hi = p.get_hi_pos();
let hi = p.span.hi;
expect(p, token::SEMI);
ret @{ident: t.ident,
attrs: attrs,
......@@ -2004,7 +1972,7 @@ fn parse_native_item(p: parser, attrs: [ast::attribute]) ->
} else if eat_word(p, "unsafe") {
expect_word(p, "fn");
ret parse_item_native_fn(p, attrs, ast::unsafe_fn);
} else { unexpected(p, p.peek()); }
} else { unexpected(p, p.token); }
}
fn parse_native_mod_items(p: parser, first_item_attrs: [ast::attribute]) ->
......@@ -2016,7 +1984,7 @@ fn parse_native_mod_items(p: parser, first_item_attrs: [ast::attribute]) ->
} else { [] };
let items: [@ast::native_item] = [];
let initial_attrs = first_item_attrs;
while p.peek() != token::RBRACE {
while p.token != token::RBRACE {
let attrs = initial_attrs + parse_outer_attributes(p);
initial_attrs = [];
items += [parse_native_item(p, attrs)];
......@@ -2026,7 +1994,7 @@ fn parse_native_mod_items(p: parser, first_item_attrs: [ast::attribute]) ->
}
fn parse_item_native_mod(p: parser, attrs: [ast::attribute]) -> @ast::item {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
expect_word(p, "mod");
let id = parse_ident(p);
expect(p, token::LBRACE);
......@@ -2034,13 +2002,13 @@ fn parse_item_native_mod(p: parser, attrs: [ast::attribute]) -> @ast::item {
let inner_attrs = more_attrs.inner;
let first_item_outer_attrs = more_attrs.next;
let m = parse_native_mod_items(p, first_item_outer_attrs);
let hi = p.get_hi_pos();
let hi = p.span.hi;
expect(p, token::RBRACE);
ret mk_item(p, lo, hi, id, ast::item_native_mod(m), attrs + inner_attrs);
}
fn parse_type_decl(p: parser) -> {lo: uint, ident: ast::ident} {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let id = parse_ident(p);
ret {lo: lo, ident: id};
}
......@@ -2050,19 +2018,19 @@ fn parse_item_type(p: parser, attrs: [ast::attribute]) -> @ast::item {
let tps = parse_ty_params(p);
expect(p, token::EQ);
let ty = parse_ty(p, false);
let hi = p.get_hi_pos();
let hi = p.span.hi;
expect(p, token::SEMI);
ret mk_item(p, t.lo, hi, t.ident, ast::item_ty(ty, tps), attrs);
}
fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
let lo = p.get_last_lo_pos();
let lo = p.last_span.lo;
let id = parse_ident(p);
let ty_params = parse_ty_params(p);
let variants: [ast::variant] = [];
// Newtype syntax
if p.peek() == token::EQ {
if p.get_bad_expr_words().contains_key(id) {
if p.token == token::EQ {
if p.bad_expr_words.contains_key(id) {
p.fatal("found " + id + " in tag constructor position");
}
p.bump();
......@@ -2082,17 +2050,17 @@ fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
let all_nullary = true;
let have_disr = false;
let disr_val = 0;
while p.peek() != token::RBRACE {
let tok = p.peek();
while p.token != token::RBRACE {
let tok = p.token;
alt tok {
token::IDENT(name, _) {
check_bad_word(p);
let vlo = p.get_lo_pos();
let vlo = p.span.lo;
p.bump();
let args: [ast::variant_arg] = [];
let vhi = p.get_hi_pos();
let vhi = p.span.hi;
let disr_expr = none;
alt p.peek() {
alt p.token {
token::LPAREN. {
all_nullary = false;
let arg_tys = parse_seq(token::LPAREN, token::RPAREN,
......@@ -2142,11 +2110,11 @@ fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
token::RBRACE. {/* empty */ }
_ {
p.fatal("expected name of variant or '}' but found '" +
token::to_str(p.get_reader(), tok) + "'");
token::to_str(p.reader, tok) + "'");
}
}
}
let hi = p.get_hi_pos();
let hi = p.span.hi;
if (have_disr && !all_nullary) {
p.fatal("discriminator values can only be used with a c-like enum");
}
......@@ -2155,10 +2123,10 @@ fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
}
fn parse_fn_ty_proto(p: parser) -> ast::proto {
if p.peek() == token::AT {
if p.token == token::AT {
p.bump();
ast::proto_box
} else if p.peek() == token::TILDE {
} else if p.token == token::TILDE {
p.bump();
ast::proto_uniq
} else {
......@@ -2218,13 +2186,13 @@ fn parse_item(p: parser, attrs: [ast::attribute]) -> option::t<@ast::item> {
type attr_or_ext = option::t<either::t<[ast::attribute], @ast::expr>>;
fn parse_outer_attrs_or_ext(p: parser) -> attr_or_ext {
if p.peek() == token::POUND {
let lo = p.get_lo_pos();
if p.token == token::POUND {
let lo = p.span.lo;
p.bump();
if p.peek() == token::LBRACKET {
if p.token == token::LBRACKET {
let first_attr = parse_attribute_naked(p, ast::attr_outer, lo);
ret some(left([first_attr] + parse_outer_attributes(p)));
} else if !(p.peek() == token::LT || p.peek() == token::LBRACKET) {
} else if !(p.token == token::LT || p.token == token::LBRACKET) {
ret some(right(parse_syntax_ext_naked(p, lo)));
} else { ret none; }
} else { ret none; }
......@@ -2233,14 +2201,14 @@ fn parse_outer_attrs_or_ext(p: parser) -> attr_or_ext {
// Parse attributes that appear before an item
fn parse_outer_attributes(p: parser) -> [ast::attribute] {
let attrs: [ast::attribute] = [];
while p.peek() == token::POUND {
while p.token == token::POUND {
attrs += [parse_attribute(p, ast::attr_outer)];
}
ret attrs;
}
fn parse_attribute(p: parser, style: ast::attr_style) -> ast::attribute {
let lo = p.get_lo_pos();
let lo = p.span.lo;
expect(p, token::POUND);
ret parse_attribute_naked(p, style, lo);
}
......@@ -2250,7 +2218,7 @@ fn parse_attribute_naked(p: parser, style: ast::attr_style, lo: uint) ->
expect(p, token::LBRACKET);
let meta_item = parse_meta_item(p);
expect(p, token::RBRACKET);
let hi = p.get_hi_pos();
let hi = p.span.hi;
ret spanned(lo, hi, {style: style, value: *meta_item});
}
......@@ -2264,9 +2232,9 @@ fn parse_inner_attrs_and_next(p: parser) ->
{inner: [ast::attribute], next: [ast::attribute]} {
let inner_attrs: [ast::attribute] = [];
let next_outer_attrs: [ast::attribute] = [];
while p.peek() == token::POUND {
while p.token == token::POUND {
let attr = parse_attribute(p, ast::attr_inner);
if p.peek() == token::SEMI {
if p.token == token::SEMI {
p.bump();
inner_attrs += [attr];
} else {
......@@ -2282,22 +2250,22 @@ fn parse_inner_attrs_and_next(p: parser) ->
}
fn parse_meta_item(p: parser) -> @ast::meta_item {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let ident = parse_ident(p);
alt p.peek() {
alt p.token {
token::EQ. {
p.bump();
let lit = parse_lit(p);
let hi = p.get_hi_pos();
let hi = p.span.hi;
ret @spanned(lo, hi, ast::meta_name_value(ident, lit));
}
token::LPAREN. {
let inner_items = parse_meta_seq(p);
let hi = p.get_hi_pos();
let hi = p.span.hi;
ret @spanned(lo, hi, ast::meta_list(ident, inner_items));
}
_ {
let hi = p.get_hi_pos();
let hi = p.span.hi;
ret @spanned(lo, hi, ast::meta_word(ident));
}
}
......@@ -2309,7 +2277,7 @@ fn parse_meta_seq(p: parser) -> [@ast::meta_item] {
}
fn parse_optional_meta(p: parser) -> [@ast::meta_item] {
alt p.peek() { token::LPAREN. { ret parse_meta_seq(p); } _ { ret []; } }
alt p.token { token::LPAREN. { ret parse_meta_seq(p); } _ { ret []; } }
}
fn parse_use(p: parser) -> ast::view_item_ {
......@@ -2325,7 +2293,7 @@ fn parse_rest_import_name(p: parser, first: ast::ident,
let glob: bool = false;
let from_idents = option::none::<[ast::import_ident]>;
while true {
alt p.peek() {
alt p.token {
token::SEMI. { break; }
token::MOD_SEP. {
if glob { p.fatal("cannot path into a glob"); }
......@@ -2336,7 +2304,7 @@ fn parse_rest_import_name(p: parser, first: ast::ident,
}
_ { p.fatal("expecting '::' or ';'"); }
}
alt p.peek() {
alt p.token {
token::IDENT(_, _) { identifiers += [parse_ident(p)]; }
......@@ -2355,9 +2323,9 @@ fn parse_rest_import_name(p: parser, first: ast::ident,
token::LBRACE. {
fn parse_import_ident(p: parser) -> ast::import_ident {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let ident = parse_ident(p);
let hi = p.get_hi_pos();
let hi = p.span.hi;
ret spanned(lo, hi, {name: ident, id: p.get_id()});
}
let from_idents_ =
......@@ -2404,7 +2372,7 @@ fn parse_import_ident(p: parser) -> ast::import_ident {
fn parse_full_import_name(p: parser, def_ident: ast::ident) ->
ast::view_item_ {
alt p.peek() {
alt p.token {
token::IDENT(i, _) {
p.bump();
ret parse_rest_import_name(p, p.get_str(i), some(def_ident));
......@@ -2414,10 +2382,10 @@ fn parse_full_import_name(p: parser, def_ident: ast::ident) ->
}
fn parse_import(p: parser) -> ast::view_item_ {
alt p.peek() {
alt p.token {
token::IDENT(i, _) {
p.bump();
alt p.peek() {
alt p.token {
token::EQ. {
p.bump();
ret parse_full_import_name(p, p.get_str(i));
......@@ -2437,20 +2405,20 @@ fn parse_export(p: parser) -> ast::view_item_ {
}
fn parse_view_item(p: parser) -> @ast::view_item {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let the_item =
if eat_word(p, "use") {
parse_use(p)
} else if eat_word(p, "import") {
parse_import(p)
} else if eat_word(p, "export") { parse_export(p) } else { fail };
let hi = p.get_lo_pos();
let hi = p.span.lo;
expect(p, token::SEMI);
ret @spanned(lo, hi, the_item);
}
fn is_view_item(p: parser) -> bool {
alt p.peek() {
alt p.token {
token::IDENT(sid, false) {
let st = p.get_str(sid);
ret str::eq(st, "use") || str::eq(st, "import") ||
......@@ -2493,19 +2461,19 @@ fn parse_crate_from_source_str(name: str, source: str, cfg: ast::crate_cfg,
// Parses a source module as a crate
fn parse_crate_mod(p: parser, _cfg: ast::crate_cfg) -> @ast::crate {
let lo = p.get_lo_pos();
let lo = p.span.lo;
let crate_attrs = parse_inner_attrs_and_next(p);
let first_item_outer_attrs = crate_attrs.next;
let m = parse_mod_items(p, token::EOF, first_item_outer_attrs);
ret @spanned(lo, p.get_lo_pos(),
ret @spanned(lo, p.span.lo,
{directives: [],
module: m,
attrs: crate_attrs.inner,
config: p.get_cfg()});
config: p.cfg});
}
fn parse_str(p: parser) -> str {
alt p.peek() {
alt p.token {
token::LIT_STR(s) { p.bump(); p.get_str(s) }
_ {
p.fatal("expected string literal")
......@@ -2526,14 +2494,14 @@ fn parse_crate_directive(p: parser, first_outer_attr: [ast::attribute]) ->
// In a crate file outer attributes are only going to apply to mods
let expect_mod = vec::len(outer_attrs) > 0u;
let lo = p.get_lo_pos();
let lo = p.span.lo;
if expect_mod || is_word(p, "mod") {
expect_word(p, "mod");
let id = parse_ident(p);
alt p.peek() {
alt p.token {
// mod x = "foo.rs";
token::SEMI. {
let hi = p.get_hi_pos();
let hi = p.span.hi;
p.bump();
ret spanned(lo, hi, ast::cdir_src_mod(id, outer_attrs));
}
......@@ -2545,7 +2513,7 @@ fn parse_crate_directive(p: parser, first_outer_attr: [ast::attribute]) ->
let next_outer_attr = inner_attrs.next;
let cdirs =
parse_crate_directives(p, token::RBRACE, next_outer_attr);
let hi = p.get_hi_pos();
let hi = p.span.hi;
expect(p, token::RBRACE);
ret spanned(lo, hi,
ast::cdir_dir_mod(id, cdirs, mod_attrs));
......@@ -2565,13 +2533,13 @@ fn parse_crate_directives(p: parser, term: token::token,
// This is pretty ugly. If we have an outer attribute then we can't accept
// seeing the terminator next, so if we do see it then fail the same way
// parse_crate_directive would
if vec::len(first_outer_attr) > 0u && p.peek() == term {
if vec::len(first_outer_attr) > 0u && p.token == term {
expect_word(p, "mod");
}
let cdirs: [@ast::crate_directive] = [];
let first_outer_attr = first_outer_attr;
while p.peek() != term {
while p.token != term {
let cdir = @parse_crate_directive(p, first_outer_attr);
cdirs += [cdir];
first_outer_attr = [];
......@@ -2582,8 +2550,8 @@ fn parse_crate_directives(p: parser, term: token::token,
fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg,
sess: parse_sess) -> @ast::crate {
let p = new_parser_from_file(sess, cfg, input, 0u, 0u, CRATE_FILE);
let lo = p.get_lo_pos();
let prefix = std::fs::dirname(p.get_filemap().name);
let lo = p.span.lo;
let prefix = std::fs::dirname(p.reader.filemap.name);
let leading_attrs = parse_inner_attrs_and_next(p);
let crate_attrs = leading_attrs.inner;
let first_cdir_attr = leading_attrs.next;
......@@ -2591,19 +2559,19 @@ fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg,
let cx =
@{p: p,
sess: sess,
mutable chpos: p.get_chpos(),
mutable byte_pos: p.get_byte_pos(),
cfg: p.get_cfg()};
mutable chpos: p.reader.chpos,
mutable byte_pos: p.reader.pos,
cfg: p.cfg};
let (companionmod, _) = fs::splitext(fs::basename(input));
let (m, attrs) = eval::eval_crate_directives_to_mod(
cx, cdirs, prefix, option::some(companionmod));
let hi = p.get_hi_pos();
let hi = p.span.hi;
expect(p, token::EOF);
ret @spanned(lo, hi,
{directives: cdirs,
module: m,
attrs: crate_attrs + attrs,
config: p.get_cfg()});
config: p.cfg});
}
fn parse_crate_from_file(input: str, cfg: ast::crate_cfg, sess: parse_sess) ->
......
import util::interner;
import core::{int, uint, str};
import lexer::reader;
type str_num = uint;
......@@ -87,7 +88,7 @@ fn binop_to_str(o: binop) -> str {
}
}
fn to_str(r: lexer::reader, t: token) -> str {
fn to_str(r: reader, t: token) -> str {
alt t {
EQ. { ret "="; }
LT. { ret "<"; }
......@@ -142,17 +143,17 @@ fn to_str(r: lexer::reader, t: token) -> str {
ret uint::to_str(u as uint, 10u) + ast_util::uint_ty_to_str(t);
}
LIT_FLOAT(s, t) {
ret interner::get::<str>(*r.get_interner(), s) +
ret interner::get::<str>(*r.interner, s) +
ast_util::float_ty_to_str(t);
}
LIT_STR(s) { // FIXME: escape.
ret "\"" + interner::get::<str>(*r.get_interner(), s) + "\"";
ret "\"" + interner::get::<str>(*r.interner, s) + "\"";
}
LIT_BOOL(b) { if b { ret "true"; } else { ret "false"; } }
/* Name components */
IDENT(s, _) {
ret interner::get::<str>(*r.get_interner(), s);
ret interner::get::<str>(*r.interner, s);
}
IDX(i) { ret "_" + int::to_str(i, 10u); }
UNDERSCORE. { ret "_"; }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册