提交 0d554139 编写于 作者: J Jeffrey Seyfried

Fix fallout in unit tests.

上级 a02c18aa
......@@ -598,7 +598,6 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
#[cfg(test)]
mod tests {
use super::*;
use std::rc::Rc;
use syntax_pos::{self, Span, BytePos, Pos, NO_EXPANSION};
use codemap::Spanned;
use ast::{self, Ident, PatKind};
......@@ -609,7 +608,7 @@ mod tests {
use print::pprust::item_to_string;
use ptr::P;
use tokenstream::{self, TokenTree};
use util::parser_testing::{string_to_tts, string_to_parser};
use util::parser_testing::{string_to_stream, string_to_parser};
use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt};
use util::ThinVec;
......@@ -654,7 +653,8 @@ fn sp(a: u32, b: u32) -> Span {
// check the token-tree-ization of macros
#[test]
fn string_to_tts_macro () {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
let tts: Vec<_> =
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
let tts: &[TokenTree] = &tts[..];
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
......@@ -667,7 +667,7 @@ fn string_to_tts_macro () {
)
if name_macro_rules.name == "macro_rules"
&& name_zip.name == "zip" => {
let tts = &macro_delimed.tts[..];
let tts = &macro_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
(
3,
......@@ -676,7 +676,7 @@ fn string_to_tts_macro () {
Some(&TokenTree::Delimited(_, ref second_delimed)),
)
if macro_delimed.delim == token::Paren => {
let tts = &first_delimed.tts[..];
let tts = &first_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
......@@ -684,9 +684,9 @@ fn string_to_tts_macro () {
Some(&TokenTree::Token(_, token::Ident(ident))),
)
if first_delimed.delim == token::Paren && ident.name == "a" => {},
_ => panic!("value 3: {:?}", **first_delimed),
_ => panic!("value 3: {:?}", *first_delimed),
}
let tts = &second_delimed.tts[..];
let tts = &second_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
......@@ -695,10 +695,10 @@ fn string_to_tts_macro () {
)
if second_delimed.delim == token::Paren
&& ident.name == "a" => {},
_ => panic!("value 4: {:?}", **second_delimed),
_ => panic!("value 4: {:?}", *second_delimed),
}
},
_ => panic!("value 2: {:?}", **macro_delimed),
_ => panic!("value 2: {:?}", *macro_delimed),
}
},
_ => panic!("value: {:?}",tts),
......@@ -707,31 +707,31 @@ fn string_to_tts_macro () {
#[test]
fn string_to_tts_1() {
let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
let expected = vec![
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))),
let expected = TokenStream::concat(vec![
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(),
TokenTree::Delimited(
sp(5, 14),
Rc::new(tokenstream::Delimited {
tokenstream::Delimited {
delim: token::DelimToken::Paren,
tts: vec![
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))),
TokenTree::Token(sp(8, 9), token::Colon),
TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))),
],
})),
tts: TokenStream::concat(vec![
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(),
TokenTree::Token(sp(8, 9), token::Colon).into(),
TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))).into(),
]).into(),
}).into(),
TokenTree::Delimited(
sp(15, 21),
Rc::new(tokenstream::Delimited {
tokenstream::Delimited {
delim: token::DelimToken::Brace,
tts: vec![
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))),
TokenTree::Token(sp(18, 19), token::Semi),
],
}))
];
tts: TokenStream::concat(vec![
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(),
TokenTree::Token(sp(18, 19), token::Semi).into(),
]).into(),
}).into()
]);
assert_eq!(tts, expected);
}
......@@ -974,8 +974,8 @@ fn ttdelim_span() {
let expr = parse::parse_expr_from_source_str("foo".to_string(),
"foo!( fn main() { body } )".to_string(), &sess).unwrap();
let tts = match expr.node {
ast::ExprKind::Mac(ref mac) => mac.node.tts.clone(),
let tts: Vec<_> = match expr.node {
ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(),
_ => panic!("not a macro"),
};
......
......@@ -409,10 +409,10 @@ mod tests {
use syntax::ast::Ident;
use syntax_pos::{Span, BytePos, NO_EXPANSION};
use parse::token::Token;
use util::parser_testing::string_to_tts;
use util::parser_testing::string_to_stream;
fn string_to_ts(string: &str) -> TokenStream {
string_to_tts(string.to_owned()).into_iter().collect()
string_to_stream(string.to_owned())
}
fn sp(a: u32, b: u32) -> Span {
......@@ -428,20 +428,12 @@ fn test_concat() {
let test_res = string_to_ts("foo::bar::baz");
let test_fst = string_to_ts("foo::bar");
let test_snd = string_to_ts("::baz");
let eq_res = TokenStream::concat([test_fst, test_snd].iter().cloned());
let eq_res = TokenStream::concat(vec![test_fst, test_snd]);
assert_eq!(test_res.trees().count(), 5);
assert_eq!(eq_res.trees().count(), 5);
assert_eq!(test_res.eq_unspanned(&eq_res), true);
}
#[test]
fn test_from_to_bijection() {
let test_start = string_to_tts("foo::bar(baz)".to_string());
let ts = test_start.iter().cloned().collect::<TokenStream>();
let test_end: Vec<TokenTree> = ts.trees().collect();
assert_eq!(test_start, test_end)
}
#[test]
fn test_to_from_bijection() {
let test_start = string_to_ts("foo::bar(baz)");
......
......@@ -9,17 +9,17 @@
// except according to those terms.
use ast::{self, Ident};
use parse::{ParseSess,PResult,filemap_to_tts};
use parse::{ParseSess, PResult, filemap_to_stream};
use parse::{lexer, new_parser_from_source_str};
use parse::parser::Parser;
use ptr::P;
use tokenstream;
use tokenstream::TokenStream;
use std::iter::Peekable;
/// Map a string to tts, using a made-up filename:
pub fn string_to_tts(source_str: String) -> Vec<tokenstream::TokenTree> {
pub fn string_to_stream(source_str: String) -> TokenStream {
let ps = ParseSess::new();
filemap_to_tts(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str))
filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str))
}
/// Map string to parser (via tts)
......
......@@ -18,7 +18,7 @@
use syntax::attr::*;
use syntax::ast;
use syntax::parse;
use syntax::parse::{ParseSess,filemap_to_tts, PResult};
use syntax::parse::{ParseSess, PResult};
use syntax::parse::new_parser_from_source_str;
use syntax::parse::parser::Parser;
use syntax::parse::token;
......
......@@ -32,13 +32,13 @@ pub fn plugin_registrar(reg: &mut Registry) {
fn cond(input: TokenStream) -> TokenStream {
let mut conds = Vec::new();
let mut input = input.trees();
let mut input = input.trees().peekable();
while let Some(tree) = input.next() {
let cond: TokenStream = match *tree {
TokenTree::Delimited(_, ref delimited) => delimited.tts.iter().cloned().collect(),
let mut cond = match tree {
TokenTree::Delimited(_, ref delimited) => delimited.stream(),
_ => panic!("Invalid input"),
};
let mut trees = cond.trees().cloned();
let mut trees = cond.trees();
let test = trees.next();
let rhs = trees.collect::<TokenStream>();
if rhs.is_empty() {
......
......@@ -26,7 +26,7 @@
use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::Span;
use syntax::tokenstream;
use syntax::tokenstream::TokenStream;
use rustc_plugin::Registry;
struct Expander {
......@@ -37,7 +37,7 @@ impl TTMacroExpander for Expander {
fn expand<'cx>(&self,
ecx: &'cx mut ExtCtxt,
sp: Span,
_: &[tokenstream::TokenTree]) -> Box<MacResult+'cx> {
_: TokenStream) -> Box<MacResult+'cx> {
let args = self.args.iter().map(|i| pprust::meta_list_item_to_string(i))
.collect::<Vec<_>>().join(", ");
MacEager::expr(ecx.expr_str(sp, Symbol::intern(&args)))
......
......@@ -35,8 +35,8 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree])
-> Box<MacResult + 'static> {
let mbe_matcher = quote_tokens!(cx, $$matched:expr, $$($$pat:pat)|+);
let mbe_matcher = quoted::parse(&mbe_matcher, true, cx.parse_sess);
let map = match TokenTree::parse(cx, &mbe_matcher, args) {
let mbe_matcher = quoted::parse(mbe_matcher.into_iter().collect(), true, cx.parse_sess);
let map = match TokenTree::parse(cx, &mbe_matcher, args.iter().cloned().collect()) {
Success(map) => map,
Failure(_, tok) => {
panic!("expected Success, but got Failure: {}", parse_failure_msg(tok));
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册