提交 45680c83 编写于 作者: N Niko Matsakis

borrowck changes: some copies, some removed mut annotations, some dvec

上级 c3b266f5
import codemap::span;
import std::map::{hashmap, str_hash};
import dvec::{dvec, extensions};
import base::*;
......@@ -134,7 +135,7 @@ fn scomp(s1: selector, s2: selector, m: matchable) -> match_result {
type binders =
{real_binders: hashmap<ident, selector>,
mut literal_ast_matchers: [selector]};
literal_ast_matchers: dvec<selector>};
type bindings = hashmap<ident, arb_depth<matchable>>;
fn acumm_bindings(_cx: ext_ctxt, _b_dest: bindings, _b_src: bindings) { }
......@@ -146,7 +147,7 @@ fn acumm_bindings(_cx: ext_ctxt, _b_dest: bindings, _b_src: bindings) { }
fn pattern_to_selectors(cx: ext_ctxt, e: @expr) -> binders {
let res: binders =
{real_binders: str_hash::<selector>(),
mut literal_ast_matchers: []};
literal_ast_matchers: dvec()};
//this oughta return binders instead, but macro args are a sequence of
//expressions, rather than a single expression
fn trivial_selector(m: matchable) -> match_result { ret some(leaf(m)); }
......@@ -474,7 +475,7 @@ fn select(cx: ext_ctxt, m: matchable, pat: @expr) ->
_ { cx.bug("broken traversal in p_t_s_r") }
}
}
b.literal_ast_matchers += [bind select(cx, _, e)];
b.literal_ast_matchers.push(bind select(cx, _, e));
}
}
}
......@@ -640,8 +641,8 @@ fn len_select(_cx: ext_ctxt, m: matchable, at_least: bool, len: uint) ->
_ { none }
}
}
b.literal_ast_matchers +=
[compose_sels(s, bind len_select(cx, _, at_least, len))];
b.literal_ast_matchers.push(
compose_sels(s, bind len_select(cx, _, at_least, len)));
}
fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: [@expr], _repeat_after: bool,
......
......@@ -18,7 +18,7 @@ fn seq_sep_none() -> seq_sep {
}
fn token_to_str(reader: reader, token: token::token) -> str {
fn token_to_str(reader: reader, ++token: token::token) -> str {
token::to_str(*reader.interner, token)
}
......@@ -27,8 +27,9 @@ fn token_to_str(reader: reader, token: token::token) -> str {
impl parser_common for parser {
fn unexpected_last(t: token::token) -> ! {
self.span_fatal(self.last_span, "unexpected token: '"
+ token_to_str(self.reader, t) + "'");
self.span_fatal(
copy self.last_span,
"unexpected token: '" + token_to_str(self.reader, t) + "'");
}
fn unexpected() -> ! {
......@@ -49,7 +50,7 @@ fn expect(t: token::token) {
}
fn parse_ident() -> ast::ident {
alt self.token {
alt copy self.token {
token::IDENT(i, _) { self.bump(); ret self.get_str(i); }
_ { self.fatal("expecting ident, found "
+ token_to_str(self.reader, self.token)); }
......@@ -79,7 +80,7 @@ fn require_keyword(word: str) {
}
}
fn token_is_keyword(word: str, tok: token::token) -> bool {
fn token_is_keyword(word: str, ++tok: token::token) -> bool {
self.require_keyword(word);
alt tok {
token::IDENT(sid, false) { str::eq(word, self.get_str(sid)) }
......@@ -93,7 +94,7 @@ fn is_keyword(word: str) -> bool {
fn eat_keyword(word: str) -> bool {
self.require_keyword(word);
alt self.token {
alt copy self.token {
token::IDENT(sid, false) {
if str::eq(word, self.get_str(sid)) {
self.bump();
......
......@@ -121,16 +121,16 @@ fn look_ahead(distance: uint) -> token::token {
ret self.buffer[distance - 1u].tok;
}
fn fatal(m: str) -> ! {
self.sess.span_diagnostic.span_fatal(self.span, m)
self.sess.span_diagnostic.span_fatal(copy self.span, m)
}
fn span_fatal(sp: span, m: str) -> ! {
self.sess.span_diagnostic.span_fatal(sp, m)
}
fn bug(m: str) -> ! {
self.sess.span_diagnostic.span_bug(self.span, m)
self.sess.span_diagnostic.span_bug(copy self.span, m)
}
fn warn(m: str) {
self.sess.span_diagnostic.span_warn(self.span, m)
self.sess.span_diagnostic.span_warn(copy self.span, m)
}
fn get_str(i: token::str_num) -> str {
interner::get(*self.reader.interner, i)
......@@ -307,7 +307,7 @@ fn region_from_name(s: option<str>) -> @region {
// Parses something like "&x"
fn parse_region() -> @region {
self.expect(token::BINOP(token::AND));
alt self.token {
alt copy self.token {
token::IDENT(sid, _) {
self.bump();
let n = self.get_str(sid);
......@@ -322,7 +322,7 @@ fn parse_region() -> @region {
// Parses something like "&x." (note the trailing dot)
fn parse_region_dot() -> @region {
let name =
alt self.token {
alt copy self.token {
token::IDENT(sid, _) if self.look_ahead(1u) == token::DOT {
self.bump(); self.bump();
some(self.get_str(sid))
......@@ -483,11 +483,11 @@ fn parse_fn_block_arg() -> arg_or_capture_item {
}
fn maybe_parse_dollar_mac() -> option<mac_> {
alt self.token {
alt copy self.token {
token::DOLLAR {
let lo = self.span.lo;
self.bump();
alt self.token {
alt copy self.token {
token::LIT_INT(num, ty_i) {
self.bump();
some(mac_var(num as uint))
......@@ -511,7 +511,7 @@ fn maybe_parse_dollar_mac() -> option<mac_> {
fn maybe_parse_vstore() -> option<vstore> {
if self.token == token::BINOP(token::SLASH) {
self.bump();
alt self.token {
alt copy self.token {
token::AT {
self.bump(); some(vstore_box)
}
......@@ -968,7 +968,7 @@ fn parse_dot_or_call_expr_with(e0: pexpr) -> pexpr {
loop {
// expr.f
if self.eat(token::DOT) {
alt self.token {
alt copy self.token {
token::IDENT(i, _) {
hi = self.span.hi;
self.bump();
......@@ -986,7 +986,7 @@ fn parse_dot_or_call_expr_with(e0: pexpr) -> pexpr {
cont;
}
if self.expr_is_complete(e) { break; }
alt self.token {
alt copy self.token {
// expr(...)
token::LPAREN if self.permits_call() {
let es_opt = self.parse_seq(token::LPAREN, token::RPAREN,
......@@ -1042,7 +1042,7 @@ fn parse_prefix_expr() -> pexpr {
let mut hi;
let mut ex;
alt self.token {
alt copy self.token {
token::NOT {
self.bump();
let e = self.to_expr(self.parse_prefix_expr());
......@@ -1134,7 +1134,7 @@ fn parse_more_binops(plhs: pexpr, min_prec: uint) ->
fn parse_assign_expr() -> @expr {
let lo = self.span.lo;
let lhs = self.parse_binops();
alt self.token {
alt copy self.token {
token::EQ {
self.bump();
let rhs = self.parse_expr();
......@@ -1831,7 +1831,7 @@ fn parse_item_fn(purity: purity) -> item_info {
}
fn parse_method_name() -> ident {
alt self.token {
alt copy self.token {
token::BINOP(op) { self.bump(); token::binop_to_str(op) }
token::NOT { self.bump(); "!" }
token::LBRACKET { self.bump(); self.expect(token::RBRACKET); "[]" }
......@@ -2375,7 +2375,7 @@ fn parse_view_path() -> @view_path {
while self.token == token::MOD_SEP {
self.bump();
alt self.token {
alt copy self.token {
token::IDENT(i, _) {
self.bump();
......@@ -2477,7 +2477,7 @@ fn parse_crate_mod(_cfg: crate_cfg) -> @crate {
}
fn parse_str() -> str {
alt self.token {
alt copy self.token {
token::LIT_STR(s) { self.bump(); self.get_str(s) }
_ {
self.fatal("expected string literal")
......
......@@ -59,11 +59,11 @@ enum breaks { consistent, inconsistent, }
type begin_t = {offset: int, breaks: breaks};
enum token { STRING(str, int), BREAK(break_t), BEGIN(begin_t), END, EOF, }
enum token { STRING(@str, int), BREAK(break_t), BEGIN(begin_t), END, EOF, }
fn tok_str(t: token) -> str {
fn tok_str(++t: token) -> str {
alt t {
STRING(s, len) { ret #fmt["STR(%s,%d)", s, len]; }
STRING(s, len) { ret #fmt["STR(%s,%d)", *s, len]; }
BREAK(_) { ret "BREAK"; }
BEGIN(_) { ret "BEGIN"; }
END { ret "END"; }
......@@ -109,8 +109,8 @@ fn mk_printer(out: io::writer, linewidth: uint) -> printer {
mut space: linewidth as int,
mut left: 0u,
mut right: 0u,
mut token: token,
mut size: size,
token: token,
size: size,
mut left_total: 0,
mut right_total: 0,
mut scan_stack: scan_stack,
......@@ -206,8 +206,8 @@ fn mk_printer(out: io::writer, linewidth: uint) -> printer {
mut space: int, // number of spaces left on line
mut left: uint, // index of left side of input stream
mut right: uint, // index of right side of input stream
mut token: [mut token], // ring-buffr stream goes through
mut size: [mut int], // ring-buffer of calculated sizes
token: [mut token], // ring-buffr stream goes through
size: [mut int], // ring-buffer of calculated sizes
mut left_total: int, // running size of stream "...left"
mut right_total: int, // running size of stream "...right"
// pseudo-stack, really a ring too. Holds the
......@@ -346,7 +346,7 @@ fn advance_right() {
self.right %= self.buf_len;
assert (self.right != self.left);
}
fn advance_left(x: token, L: int) {
fn advance_left(++x: token, L: int) {
#debug("advnce_left [%u,%u], sizeof(%u)=%d", self.left, self.right,
self.left, L);
if L >= 0 {
......@@ -367,7 +367,7 @@ fn advance_left(x: token, L: int) {
fn check_stack(k: int) {
if !self.scan_stack_empty {
let x = self.scan_top();
alt self.token[x] {
alt copy self.token[x] {
BEGIN(b) {
if k > 0 {
self.size[self.scan_pop()] = self.size[x] +
......@@ -465,7 +465,7 @@ fn print(x: token, L: int) {
assert (L == len);
// assert L <= space;
self.space -= len;
self.write_str(s);
self.write_str(*s);
}
EOF {
// EOF should never get here.
......@@ -493,14 +493,14 @@ fn break_offset(p: printer, n: uint, off: int) {
fn eof(p: printer) { p.pretty_print(EOF); }
fn word(p: printer, wrd: str) {
p.pretty_print(STRING(wrd, str::len(wrd) as int));
p.pretty_print(STRING(@wrd, str::len(wrd) as int));
}
fn huge_word(p: printer, wrd: str) {
p.pretty_print(STRING(wrd, size_infinity));
p.pretty_print(STRING(@wrd, size_infinity));
}
fn zero_word(p: printer, wrd: str) { p.pretty_print(STRING(wrd, 0)); }
fn zero_word(p: printer, wrd: str) { p.pretty_print(STRING(@wrd, 0)); }
fn spaces(p: printer, n: uint) { break_offset(p, n, 0); }
......
......@@ -1703,7 +1703,7 @@ fn print_comment(s: ps, cmnt: comments::cmnt) {
// We need to do at least one, possibly two hardbreaks.
let is_semi =
alt s.s.last_token() {
pp::STRING(s, _) { s == ";" }
pp::STRING(s, _) { *s == ";" }
_ { false }
};
if is_semi || is_begin(s) || is_end(s) { hardbreak(s.s); }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册