diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 7207463e1b9ab0c81df1fb00319164817fe519b4..c00d2952b3b425bcc87e6ac78be0bf9719c53a2c 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -598,7 +598,6 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a #[cfg(test)] mod tests { use super::*; - use std::rc::Rc; use syntax_pos::{self, Span, BytePos, Pos, NO_EXPANSION}; use codemap::Spanned; use ast::{self, Ident, PatKind}; @@ -609,7 +608,7 @@ mod tests { use print::pprust::item_to_string; use ptr::P; use tokenstream::{self, TokenTree}; - use util::parser_testing::{string_to_tts, string_to_parser}; + use util::parser_testing::{string_to_stream, string_to_parser}; use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt}; use util::ThinVec; @@ -654,7 +653,8 @@ fn sp(a: u32, b: u32) -> Span { // check the token-tree-ization of macros #[test] fn string_to_tts_macro () { - let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); + let tts: Vec<_> = + string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); let tts: &[TokenTree] = &tts[..]; match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { @@ -667,7 +667,7 @@ fn string_to_tts_macro () { ) if name_macro_rules.name == "macro_rules" && name_zip.name == "zip" => { - let tts = ¯o_delimed.tts[..]; + let tts = ¯o_delimed.stream().trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( 3, @@ -676,7 +676,7 @@ fn string_to_tts_macro () { Some(&TokenTree::Delimited(_, ref second_delimed)), ) if macro_delimed.delim == token::Paren => { - let tts = &first_delimed.tts[..]; + let tts = &first_delimed.stream().trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, @@ -684,9 +684,9 @@ fn string_to_tts_macro () { Some(&TokenTree::Token(_, token::Ident(ident))), ) if first_delimed.delim == token::Paren && ident.name == "a" => {}, - _ => panic!("value 3: {:?}", **first_delimed), + _ => panic!("value 3: {:?}", *first_delimed), } - let tts = &second_delimed.tts[..]; + let tts = &second_delimed.stream().trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, @@ -695,10 +695,10 @@ fn string_to_tts_macro () { ) if second_delimed.delim == token::Paren && ident.name == "a" => {}, - _ => panic!("value 4: {:?}", **second_delimed), + _ => panic!("value 4: {:?}", *second_delimed), } }, - _ => panic!("value 2: {:?}", **macro_delimed), + _ => panic!("value 2: {:?}", *macro_delimed), } }, _ => panic!("value: {:?}",tts), @@ -707,31 +707,31 @@ fn string_to_tts_macro () { #[test] fn string_to_tts_1() { - let tts = string_to_tts("fn a (b : i32) { b; }".to_string()); + let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); - let expected = vec![ - TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))), - TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))), + let expected = TokenStream::concat(vec![ + TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(), + TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(), TokenTree::Delimited( sp(5, 14), - Rc::new(tokenstream::Delimited { + tokenstream::Delimited { delim: token::DelimToken::Paren, - tts: vec![ - TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))), - TokenTree::Token(sp(8, 9), token::Colon), - TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))), - ], - })), + tts: TokenStream::concat(vec![ + TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(), + TokenTree::Token(sp(8, 9), token::Colon).into(), + TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))).into(), + ]).into(), + }).into(), TokenTree::Delimited( sp(15, 21), - Rc::new(tokenstream::Delimited { + tokenstream::Delimited { delim: token::DelimToken::Brace, - tts: vec![ - TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))), - TokenTree::Token(sp(18, 19), token::Semi), - ], - })) - ]; + tts: TokenStream::concat(vec![ + TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(), + TokenTree::Token(sp(18, 19), token::Semi).into(), + ]).into(), + }).into() + ]); assert_eq!(tts, expected); } @@ -974,8 +974,8 @@ fn ttdelim_span() { let expr = parse::parse_expr_from_source_str("foo".to_string(), "foo!( fn main() { body } )".to_string(), &sess).unwrap(); - let tts = match expr.node { - ast::ExprKind::Mac(ref mac) => mac.node.tts.clone(), + let tts: Vec<_> = match expr.node { + ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), _ => panic!("not a macro"), }; diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index b7728609acaa533a5a74e419520b8400ce884776..2da442a1a53da752fe2ed2bdffd413599afe91dd 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -409,10 +409,10 @@ mod tests { use syntax::ast::Ident; use syntax_pos::{Span, BytePos, NO_EXPANSION}; use parse::token::Token; - use util::parser_testing::string_to_tts; + use util::parser_testing::string_to_stream; fn string_to_ts(string: &str) -> TokenStream { - string_to_tts(string.to_owned()).into_iter().collect() + string_to_stream(string.to_owned()) } fn sp(a: u32, b: u32) -> Span { @@ -428,20 +428,12 @@ fn test_concat() { let test_res = string_to_ts("foo::bar::baz"); let test_fst = string_to_ts("foo::bar"); let test_snd = string_to_ts("::baz"); - let eq_res = TokenStream::concat([test_fst, test_snd].iter().cloned()); + let eq_res = TokenStream::concat(vec![test_fst, test_snd]); assert_eq!(test_res.trees().count(), 5); assert_eq!(eq_res.trees().count(), 5); assert_eq!(test_res.eq_unspanned(&eq_res), true); } - #[test] - fn test_from_to_bijection() { - let test_start = string_to_tts("foo::bar(baz)".to_string()); - let ts = test_start.iter().cloned().collect::(); - let test_end: Vec = ts.trees().collect(); - assert_eq!(test_start, test_end) - } - #[test] fn test_to_from_bijection() { let test_start = string_to_ts("foo::bar(baz)"); diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index e703dc6b4191c50f933fa82e98777eb9cbf189fe..51eb295b502a70bd764ede600fbbc877025742c4 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -9,17 +9,17 @@ // except according to those terms. use ast::{self, Ident}; -use parse::{ParseSess,PResult,filemap_to_tts}; +use parse::{ParseSess, PResult, filemap_to_stream}; use parse::{lexer, new_parser_from_source_str}; use parse::parser::Parser; use ptr::P; -use tokenstream; +use tokenstream::TokenStream; use std::iter::Peekable; /// Map a string to tts, using a made-up filename: -pub fn string_to_tts(source_str: String) -> Vec { +pub fn string_to_stream(source_str: String) -> TokenStream { let ps = ParseSess::new(); - filemap_to_tts(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str)) + filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str)) } /// Map string to parser (via tts) diff --git a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs index a41b34f6a53d0a3e08e5c32a6077b80b00ec414a..5139b68bce7fd0d6686aa3293ee75697567c5c7f 100644 --- a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs +++ b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs @@ -18,7 +18,7 @@ use syntax::attr::*; use syntax::ast; use syntax::parse; -use syntax::parse::{ParseSess,filemap_to_tts, PResult}; +use syntax::parse::{ParseSess, PResult}; use syntax::parse::new_parser_from_source_str; use syntax::parse::parser::Parser; use syntax::parse::token; diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs index 730e112c70016f9fd2bdfb78108ca49571c65125..2f94a440e72da3ca5342883acb0d1660ce27e28c 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -32,13 +32,13 @@ pub fn plugin_registrar(reg: &mut Registry) { fn cond(input: TokenStream) -> TokenStream { let mut conds = Vec::new(); - let mut input = input.trees(); + let mut input = input.trees().peekable(); while let Some(tree) = input.next() { - let cond: TokenStream = match *tree { - TokenTree::Delimited(_, ref delimited) => delimited.tts.iter().cloned().collect(), + let mut cond = match tree { + TokenTree::Delimited(_, ref delimited) => delimited.stream(), _ => panic!("Invalid input"), }; - let mut trees = cond.trees().cloned(); + let mut trees = cond.trees(); let test = trees.next(); let rhs = trees.collect::(); if rhs.is_empty() { diff --git a/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs index 3c8868f1664e8185d471185b3ecd872bb80ff94f..134e36c587bede9bd15684f3dc20c3fc0c3cf91d 100644 --- a/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs +++ b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs @@ -26,7 +26,7 @@ use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; -use syntax::tokenstream; +use syntax::tokenstream::TokenStream; use rustc_plugin::Registry; struct Expander { @@ -37,7 +37,7 @@ impl TTMacroExpander for Expander { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, sp: Span, - _: &[tokenstream::TokenTree]) -> Box { + _: TokenStream) -> Box { let args = self.args.iter().map(|i| pprust::meta_list_item_to_string(i)) .collect::>().join(", "); MacEager::expr(ecx.expr_str(sp, Symbol::intern(&args))) diff --git a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs index 3db69f2167cc6e4ba7bc5fb89456a7761c532004..c9fa96b83c280a6d9325bef110aab04a74332fc4 100644 --- a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs +++ b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs @@ -35,8 +35,8 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree]) -> Box { let mbe_matcher = quote_tokens!(cx, $$matched:expr, $$($$pat:pat)|+); - let mbe_matcher = quoted::parse(&mbe_matcher, true, cx.parse_sess); - let map = match TokenTree::parse(cx, &mbe_matcher, args) { + let mbe_matcher = quoted::parse(mbe_matcher.into_iter().collect(), true, cx.parse_sess); + let map = match TokenTree::parse(cx, &mbe_matcher, args.iter().cloned().collect()) { Success(map) => map, Failure(_, tok) => { panic!("expected Success, but got Failure: {}", parse_failure_msg(tok));