提交 665ad9c1 编写于 作者: B Brendan Zabarauskas

Move token-to-string functions into print::pprust

上级 cd049591
......@@ -87,6 +87,7 @@
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
use parse::token::{Token, Nonterminal};
use parse::token;
use print::pprust;
use ptr::P;
use std::rc::Rc;
......@@ -402,7 +403,7 @@ pub fn parse(sess: &ParseSess,
nts, next_eis.len()).to_string());
} else if bb_eis.len() == 0u && next_eis.len() == 0u {
return Failure(sp, format!("no rules expected the token `{}`",
token::to_string(&tok)).to_string());
pprust::token_to_string(&tok)).to_string());
} else if next_eis.len() > 0u {
/* Now process the next token */
while next_eis.len() > 0u {
......@@ -449,7 +450,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
"ident" => match p.token {
token::Ident(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
_ => {
let token_str = token::to_string(&p.token);
let token_str = pprust::token_to_string(&p.token);
p.fatal((format!("expected ident, found {}",
token_str.as_slice())).as_slice())
}
......
......@@ -15,7 +15,7 @@
use parse::lexer::{StringReader, TokenAndSpan};
use parse::lexer::is_block_doc_comment;
use parse::lexer;
use parse::token;
use print::pprust;
use std::io;
use std::str;
......@@ -373,7 +373,7 @@ pub fn gather_comments_and_literals(span_diagnostic: &diagnostic::SpanHandler,
literals.push(Literal {lit: s.to_string(), pos: sp.lo});
})
} else {
debug!("tok: {}", token::to_string(&tok));
debug!("tok: {}", pprust::token_to_string(&tok));
}
first_read = false;
}
......
......@@ -78,6 +78,7 @@
use parse::token::{keywords, special_idents};
use parse::token;
use parse::{new_sub_parser_from_file, ParseSess};
use print::pprust;
use ptr::P;
use owned_slice::OwnedSlice;
......@@ -394,7 +395,7 @@ pub fn new(sess: &'a ParseSess,
/// Convert a token to a string using self's reader
pub fn token_to_string(token: &token::Token) -> String {
token::to_string(token)
pprust::token_to_string(token)
}
/// Convert the current token to a string using self's reader
......
......@@ -431,101 +431,6 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
}
}
pub fn binop_to_string(o: BinOpToken) -> &'static str {
match o {
Plus => "+",
Minus => "-",
Star => "*",
Slash => "/",
Percent => "%",
Caret => "^",
And => "&",
Or => "|",
Shl => "<<",
Shr => ">>",
}
}
pub fn to_string(t: &Token) -> String {
match *t {
Eq => "=".into_string(),
Lt => "<".into_string(),
Le => "<=".into_string(),
EqEq => "==".into_string(),
Ne => "!=".into_string(),
Ge => ">=".into_string(),
Gt => ">".into_string(),
Not => "!".into_string(),
Tilde => "~".into_string(),
OrOr => "||".into_string(),
AndAnd => "&&".into_string(),
BinOp(op) => binop_to_string(op).into_string(),
BinOpEq(op) => format!("{}=", binop_to_string(op)),
/* Structural symbols */
At => "@".into_string(),
Dot => ".".into_string(),
DotDot => "..".into_string(),
DotDotDot => "...".into_string(),
Comma => ",".into_string(),
Semi => ";".into_string(),
Colon => ":".into_string(),
ModSep => "::".into_string(),
RArrow => "->".into_string(),
LArrow => "<-".into_string(),
FatArrow => "=>".into_string(),
LParen => "(".into_string(),
RParen => ")".into_string(),
LBracket => "[".into_string(),
RBracket => "]".into_string(),
LBrace => "{".into_string(),
RBrace => "}".into_string(),
Pound => "#".into_string(),
Dollar => "$".into_string(),
Question => "?".into_string(),
/* Literals */
LitByte(b) => format!("b'{}'", b.as_str()),
LitChar(c) => format!("'{}'", c.as_str()),
LitFloat(c) => c.as_str().into_string(),
LitInteger(c) => c.as_str().into_string(),
LitStr(s) => format!("\"{}\"", s.as_str()),
LitStrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
delim="#".repeat(n),
string=s.as_str()),
LitBinary(v) => format!("b\"{}\"", v.as_str()),
LitBinaryRaw(s, n) => format!("br{delim}\"{string}\"{delim}",
delim="#".repeat(n),
string=s.as_str()),
/* Name components */
Ident(s, _) => get_ident(s).get().into_string(),
Lifetime(s) => format!("{}", get_ident(s)),
Underscore => "_".into_string(),
/* Other */
DocComment(s) => s.as_str().into_string(),
Eof => "<eof>".into_string(),
Whitespace => " ".into_string(),
Comment => "/* */".into_string(),
Shebang(s) => format!("/* shebang: {}*/", s.as_str()),
Interpolated(ref nt) => match *nt {
NtExpr(ref e) => ::print::pprust::expr_to_string(&**e),
NtMeta(ref e) => ::print::pprust::meta_item_to_string(&**e),
NtTy(ref e) => ::print::pprust::ty_to_string(&**e),
NtPath(ref e) => ::print::pprust::path_to_string(&**e),
NtItem(..) => "an interpolated item".into_string(),
NtBlock(..) => "an interpolated block".into_string(),
NtStmt(..) => "an interpolated statement".into_string(),
NtPat(..) => "an interpolated pattern".into_string(),
NtIdent(..) => "an interpolated identifier".into_string(),
NtTT(..) => "an interpolated tt".into_string(),
NtMatchers(..) => "an interpolated matcher sequence".into_string(),
}
}
}
// Get the first "argument"
macro_rules! first {
( $first:expr, $( $remainder:expr, )* ) => ( $first )
......
......@@ -21,6 +21,7 @@
use codemap::{CodeMap, BytePos};
use codemap;
use diagnostic;
use parse::token::{BinOpToken, Token};
use parse::token;
use parse::lexer::comments;
use parse;
......@@ -181,6 +182,101 @@ pub fn to_string(f: |&mut State| -> IoResult<()>) -> String {
}
}
pub fn binop_to_string(op: BinOpToken) -> &'static str {
match op {
token::Plus => "+",
token::Minus => "-",
token::Star => "*",
token::Slash => "/",
token::Percent => "%",
token::Caret => "^",
token::And => "&",
token::Or => "|",
token::Shl => "<<",
token::Shr => ">>",
}
}
pub fn token_to_string(tok: &Token) -> String {
match *tok {
token::Eq => "=".into_string(),
token::Lt => "<".into_string(),
token::Le => "<=".into_string(),
token::EqEq => "==".into_string(),
token::Ne => "!=".into_string(),
token::Ge => ">=".into_string(),
token::Gt => ">".into_string(),
token::Not => "!".into_string(),
token::Tilde => "~".into_string(),
token::OrOr => "||".into_string(),
token::AndAnd => "&&".into_string(),
token::BinOp(op) => binop_to_string(op).into_string(),
token::BinOpEq(op) => format!("{}=", binop_to_string(op)),
/* Structural symbols */
token::At => "@".into_string(),
token::Dot => ".".into_string(),
token::DotDot => "..".into_string(),
token::DotDotDot => "...".into_string(),
token::Comma => ",".into_string(),
token::Semi => ";".into_string(),
token::Colon => ":".into_string(),
token::ModSep => "::".into_string(),
token::RArrow => "->".into_string(),
token::LArrow => "<-".into_string(),
token::FatArrow => "=>".into_string(),
token::LParen => "(".into_string(),
token::RParen => ")".into_string(),
token::LBracket => "[".into_string(),
token::RBracket => "]".into_string(),
token::LBrace => "{".into_string(),
token::RBrace => "}".into_string(),
token::Pound => "#".into_string(),
token::Dollar => "$".into_string(),
token::Question => "?".into_string(),
/* Literals */
token::LitByte(b) => format!("b'{}'", b.as_str()),
token::LitChar(c) => format!("'{}'", c.as_str()),
token::LitFloat(c) => c.as_str().into_string(),
token::LitInteger(c) => c.as_str().into_string(),
token::LitStr(s) => format!("\"{}\"", s.as_str()),
token::LitStrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
delim="#".repeat(n),
string=s.as_str()),
token::LitBinary(v) => format!("b\"{}\"", v.as_str()),
token::LitBinaryRaw(s, n) => format!("br{delim}\"{string}\"{delim}",
delim="#".repeat(n),
string=s.as_str()),
/* Name components */
token::Ident(s, _) => token::get_ident(s).get().into_string(),
token::Lifetime(s) => format!("{}", token::get_ident(s)),
token::Underscore => "_".into_string(),
/* Other */
token::DocComment(s) => s.as_str().into_string(),
token::Eof => "<eof>".into_string(),
token::Whitespace => " ".into_string(),
token::Comment => "/* */".into_string(),
token::Shebang(s) => format!("/* shebang: {}*/", s.as_str()),
token::Interpolated(ref nt) => match *nt {
token::NtExpr(ref e) => expr_to_string(&**e),
token::NtMeta(ref e) => meta_item_to_string(&**e),
token::NtTy(ref e) => ty_to_string(&**e),
token::NtPath(ref e) => path_to_string(&**e),
token::NtItem(..) => "an interpolated item".into_string(),
token::NtBlock(..) => "an interpolated block".into_string(),
token::NtStmt(..) => "an interpolated statement".into_string(),
token::NtPat(..) => "an interpolated pattern".into_string(),
token::NtIdent(..) => "an interpolated identifier".into_string(),
token::NtTT(..) => "an interpolated tt".into_string(),
token::NtMatchers(..) => "an interpolated matcher sequence".into_string(),
}
}
}
// FIXME (Issue #16472): the thing_to_string_impls macro should go away
// after we revise the syntax::ext::quote::ToToken impls to go directly
// to token-trees instead of thing -> string -> token-trees.
......@@ -1026,14 +1122,14 @@ pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
match *tt {
ast::TtDelimited(_, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice()));
try!(word(&mut self.s, token_to_string(&open.token).as_slice()));
try!(space(&mut self.s));
try!(self.print_tts(tts.as_slice()));
try!(space(&mut self.s));
word(&mut self.s, parse::token::to_string(&close.token).as_slice())
word(&mut self.s, token_to_string(&close.token).as_slice())
},
ast::TtToken(_, ref tk) => {
try!(word(&mut self.s, parse::token::to_string(tk).as_slice()));
try!(word(&mut self.s, token_to_string(tk).as_slice()));
match *tk {
parse::token::DocComment(..) => {
hardbreak(&mut self.s)
......@@ -1049,10 +1145,9 @@ pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
try!(word(&mut self.s, ")"));
match *separator {
Some(ref tk) => {
try!(word(&mut self.s,
parse::token::to_string(tk).as_slice()));
try!(word(&mut self.s, token_to_string(tk).as_slice()));
}
None => ()
None => {},
}
match kleene_op {
ast::ZeroOrMore => word(&mut self.s, "*"),
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册