提交 1153b5dc 编写于 作者: P Paul Stansifer

intern identifiers

上级 7317bf87
......@@ -225,7 +225,7 @@ fn load_link(mis: ~[@ast::meta_item]) -> (option<~str>,
for mis.each |a| {
match a.node {
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) => {
match *v {
match v {
~"name" => name = some(*s),
~"vers" => vers = some(*s),
~"uuid" => uuid = some(*s),
......@@ -252,15 +252,15 @@ fn load_crate(filename: ~str) -> option<crate> {
for c.node.attrs.each |a| {
match a.node.value.node {
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) => {
match *v {
~"desc" => desc = some(*v),
~"sigs" => sigs = some(*v),
~"crate_type" => crate_type = some(*v),
match v {
~"desc" => desc = some(v),
~"sigs" => sigs = some(v),
~"crate_type" => crate_type = some(v),
_ => { }
}
}
ast::meta_list(v, mis) => {
if *v == ~"link" {
if v == ~"link" {
let (n, v, u) = load_link(mis);
name = n;
vers = v;
......@@ -278,13 +278,15 @@ fn load_crate(filename: ~str) -> option<crate> {
mut deps: ~[~str]
};
fn goto_view_item(e: env, i: @ast::view_item) {
fn goto_view_item(ps: syntax::parse::parse_sess, e: env,
i: @ast::view_item) {
match i.node {
ast::view_item_use(ident, metas, id) => {
let name_items =
attr::find_meta_items_by_name(metas, ~"name");
let m = if name_items.is_empty() {
metas + ~[attr::mk_name_value_item_str(@~"name", *ident)]
metas + ~[attr::mk_name_value_item_str(
~"name", *ps.interner.get(ident))]
} else {
metas
};
......@@ -297,9 +299,9 @@ fn goto_view_item(e: env, i: @ast::view_item) {
some(value) => {
let name = attr::get_meta_item_name(item);
match *name {
~"vers" => attr_vers = *value,
~"from" => attr_from = *value,
match name {
~"vers" => attr_vers = value,
~"from" => attr_from = value,
_ => ()
}
}
......@@ -311,11 +313,11 @@ fn goto_view_item(e: env, i: @ast::view_item) {
attr_from
} else {
if !str::is_empty(attr_vers) {
*attr_name + ~"@" + attr_vers
} else { *attr_name }
ps.interner.get(attr_name) + ~"@" + attr_vers
} else { *ps.interner.get(attr_name) }
};
match *attr_name {
match *ps.interner.get(attr_name) {
~"std" | ~"core" => (),
_ => vec::push(e.deps, query)
}
......@@ -330,7 +332,7 @@ fn goto_item(_e: env, _i: @ast::item) {
mut deps: ~[]
};
let v = visit::mk_simple_visitor(@{
visit_view_item: |a| goto_view_item(e, a),
visit_view_item: |a| goto_view_item(sess, e, a),
visit_item: |a| goto_item(e, a),
with *visit::default_simple_visitor()
});
......
......@@ -238,20 +238,23 @@ fn check_variants_T<T: copy>(
filename: ~str,
thing_label: ~str,
things: ~[T],
stringifier: fn@(@T) -> ~str,
stringifier: fn@(@T, syntax::parse::token::ident_interner) -> ~str,
replacer: fn@(ast::crate, uint, T, test_mode) -> ast::crate,
cx: context
) {
error!{"%s contains %u %s objects", filename,
vec::len(things), thing_label};
// Assuming we're not generating any token_trees
let intr = syntax::parse::token::mk_fake_ident_interner();
let L = vec::len(things);
if L < 100u {
do under(uint::min(L, 20u)) |i| {
log(error, ~"Replacing... #" + uint::str(i));
do under(uint::min(L, 30u)) |j| {
log(error, ~"With... " + stringifier(@things[j]));
log(error, ~"With... " + stringifier(@things[j], intr));
let crate2 = @replacer(crate, i, things[j], cx.mode);
// It would be best to test the *crate* for stability, but
// testing the string for stability is easier and ok for now.
......@@ -259,8 +262,7 @@ fn check_variants_T<T: copy>(
let str3 =
@as_str(|a|pprust::print_crate(
codemap,
// Assuming we're not generating any token_trees
syntax::parse::token::mk_ident_interner(),
intr,
diagnostic::mk_span_handler(handler, codemap),
crate2,
filename,
......@@ -422,7 +424,7 @@ fn parse_and_print(code: @~str) -> ~str {
pprust::print_crate(
sess.cm,
// Assuming there are no token_trees
syntax::parse::token::mk_ident_interner(),
syntax::parse::token::mk_fake_ident_interner(),
sess.span_diagnostic,
crate,
filename,
......@@ -570,7 +572,7 @@ fn check_variants(files: ~[~str], cx: context) {
as_str(|a| pprust::print_crate(
sess.cm,
// Assuming no token_trees
syntax::parse::token::mk_ident_interner(),
syntax::parse::token::mk_fake_ident_interner(),
sess.span_diagnostic,
crate,
file,
......
......@@ -30,8 +30,18 @@ fn deserialize_span<D>(_d: D) -> span {
#[auto_serialize]
type spanned<T> = {node: T, span: span};
#[auto_serialize]
type ident = @~str;
fn serialize_ident<S: serializer>(s: S, i: ident) {
let intr = unsafe{ task::local_data_get(parse::token::interner_key) };
s.emit_str(*(*intr.get()).get(i));
}
fn deserialize_ident<D: deserializer>(d: D) -> ident {
let intr = unsafe{ task::local_data_get(parse::token::interner_key) };
(*intr.get()).intern(@d.read_str())
}
type ident = token::str_num;
// Functions may or may not have names.
#[auto_serialize]
......@@ -127,9 +137,9 @@ enum crate_directive_ {
#[auto_serialize]
enum meta_item_ {
meta_word(ident),
meta_list(ident, ~[@meta_item]),
meta_name_value(ident, lit),
meta_word(~str),
meta_list(~str, ~[@meta_item]),
meta_name_value(~str, lit),
}
#[auto_serialize]
......@@ -815,36 +825,6 @@ enum inlined_item {
ii_dtor(class_dtor, ident, ~[ty_param], def_id /* parent id */)
}
// Convenience functions
pure fn simple_path(id: ident, span: span) -> @path {
@{span: span,
global: false,
idents: ~[id],
rp: none,
types: ~[]}
}
pure fn empty_span() -> span {
{lo: 0, hi: 0, expn_info: none}
}
// Convenience implementations
impl ident: ops::add<ident,@path> {
pure fn add(&&id: ident) -> @path {
simple_path(self, empty_span()) + id
}
}
impl @path: ops::add<ident,@path> {
pure fn add(&&id: ident) -> @path {
@{
idents: vec::append_one(self.idents, id)
with *self
}
}
}
//
// Local Variables:
......
......@@ -4,31 +4,33 @@
import print::pprust;
import ast_util::{path_to_ident, stmt_id};
import diagnostic::span_handler;
import parse::token::ident_interner;
enum path_elt { path_mod(ident), path_name(ident) }
type path = ~[path_elt];
/* FIXMEs that say "bad" are as per #2543 */
fn path_to_str_with_sep(p: path, sep: ~str) -> ~str {
fn path_to_str_with_sep(p: path, sep: ~str, itr: ident_interner) -> ~str {
let strs = do vec::map(p) |e| {
match e {
path_mod(s) => /* FIXME (#2543) */ copy *s,
path_name(s) => /* FIXME (#2543) */ copy *s
path_mod(s) => *itr.get(s),
path_name(s) => *itr.get(s)
}
};
str::connect(strs, sep)
}
fn path_ident_to_str(p: path, i: ident) -> ~str {
fn path_ident_to_str(p: path, i: ident, itr: ident_interner) -> ~str {
if vec::is_empty(p) {
/* FIXME (#2543) */ copy *i
//FIXME /* FIXME (#2543) */ copy *i
*itr.get(i)
} else {
fmt!{"%s::%s", path_to_str(p), *i}
fmt!{"%s::%s", path_to_str(p, itr), *itr.get(i)}
}
}
fn path_to_str(p: path) -> ~str {
path_to_str_with_sep(p, ~"::")
fn path_to_str(p: path, itr: ident_interner) -> ~str {
path_to_str_with_sep(p, ~"::", itr)
}
enum ast_node {
......@@ -291,43 +293,42 @@ fn map_stmt(stmt: @stmt, cx: ctx, v: vt) {
visit::visit_stmt(stmt, cx, v);
}
fn node_id_to_str(map: map, id: node_id) -> ~str {
fn node_id_to_str(map: map, id: node_id, itr: ident_interner) -> ~str {
match map.find(id) {
none => {
fmt!{"unknown node (id=%d)", id}
}
some(node_item(item, path)) => {
fmt!{"item %s (id=%?)", path_ident_to_str(*path, item.ident), id}
fmt!{"item %s (id=%?)", path_ident_to_str(*path, item.ident, itr), id}
}
some(node_foreign_item(item, abi, path)) => {
fmt!{"foreign item %s with abi %? (id=%?)",
path_ident_to_str(*path, item.ident), abi, id}
path_ident_to_str(*path, item.ident, itr), abi, id}
}
some(node_method(m, impl_did, path)) => {
fmt!{"method %s in %s (id=%?)",
*m.ident, path_to_str(*path), id}
*itr.get(m.ident), path_to_str(*path, itr), id}
}
some(node_trait_method(tm, impl_did, path)) => {
let m = ast_util::trait_method_to_ty_method(*tm);
fmt!{"method %s in %s (id=%?)",
*m.ident, path_to_str(*path), id}
*itr.get(m.ident), path_to_str(*path, itr), id}
}
some(node_variant(variant, def_id, path)) => {
fmt!{"variant %s in %s (id=%?)",
*variant.node.name, path_to_str(*path), id}
*itr.get(variant.node.name), path_to_str(*path, itr), id}
}
some(node_expr(expr)) => {
fmt!{"expr %s (id=%?)",
pprust::expr_to_str(expr), id}
fmt!{"expr %s (id=%?)", pprust::expr_to_str(expr, itr), id}
}
some(node_stmt(stmt)) => {
fmt!{"stmt %s (id=%?)",
pprust::stmt_to_str(*stmt), id}
pprust::stmt_to_str(*stmt, itr), id}
}
// FIXMEs are as per #2410
some(node_export(_, path)) => {
fmt!{"export %s (id=%?)", // add more info here
path_to_str(*path), id}
path_to_str(*path, itr), id}
}
some(node_arg(_, _)) => { // add more info here
fmt!{"arg (id=%?)", id}
......
......@@ -21,13 +21,14 @@
// make this a const, once the compiler supports it
pure fn dummy_sp() -> span { return mk_sp(0u, 0u); }
pure fn path_name(p: @path) -> ~str { path_name_i(p.idents) }
pure fn path_name_i(idents: ~[ident]) -> ~str {
pure fn path_name_i(idents: ~[ident], intr: token::ident_interner) -> ~str {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
str::connect(idents.map(|i|*i), ~"::")
str::connect(idents.map(|i| *intr.get(i)), ~"::")
}
pure fn path_to_ident(p: @path) -> ident { vec::last(p.idents) }
pure fn local_def(id: node_id) -> def_id { {crate: local_crate, node: id} }
......@@ -408,7 +409,8 @@ fn dtor_dec() -> fn_decl {
let nil_t = @{id: 0, node: ty_nil, span: dummy_sp()};
// dtor has one argument, of type ()
{inputs: ~[{mode: ast::expl(ast::by_ref),
ty: nil_t, ident: @~"_", id: 0}],
ty: nil_t, ident: parse::token::special_idents::underscore,
id: 0}],
output: nil_t, purity: impure_fn, cf: return_val}
}
......
......@@ -49,23 +49,23 @@
/* Constructors */
fn mk_name_value_item_str(+name: ast::ident, +value: ~str) ->
fn mk_name_value_item_str(name: ~str, +value: ~str) ->
@ast::meta_item {
let value_lit = dummy_spanned(ast::lit_str(@value));
return mk_name_value_item(name, value_lit);
}
fn mk_name_value_item(+name: ast::ident, +value: ast::lit)
fn mk_name_value_item(name: ~str, +value: ast::lit)
-> @ast::meta_item {
return @dummy_spanned(ast::meta_name_value(name, value));
}
fn mk_list_item(+name: ast::ident, +items: ~[@ast::meta_item]) ->
fn mk_list_item(name: ~str, +items: ~[@ast::meta_item]) ->
@ast::meta_item {
return @dummy_spanned(ast::meta_list(name, items));
}
fn mk_word_item(+name: ast::ident) -> @ast::meta_item {
fn mk_word_item(name: ~str) -> @ast::meta_item {
return @dummy_spanned(ast::meta_word(name));
}
......@@ -78,7 +78,7 @@ fn mk_sugared_doc_attr(text: ~str, lo: uint, hi: uint) -> ast::attribute {
let lit = spanned(lo, hi, ast::lit_str(@text));
let attr = {
style: doc_comment_style(text),
value: spanned(lo, hi, ast::meta_name_value(@~"doc", lit)),
value: spanned(lo, hi, ast::meta_name_value(~"doc", lit)),
is_sugared_doc: true
};
return spanned(lo, hi, attr);
......@@ -98,8 +98,8 @@ fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
fn desugar_doc_attr(attr: ast::attribute) -> ast::attribute {
if attr.node.is_sugared_doc {
let comment = get_meta_item_value_str(@attr.node.value).get();
let meta = mk_name_value_item_str(@~"doc",
strip_doc_comment_decoration(*comment));
let meta = mk_name_value_item_str(~"doc",
strip_doc_comment_decoration(comment));
return mk_attr(meta);
} else {
attr
......@@ -108,16 +108,15 @@ fn desugar_doc_attr(attr: ast::attribute) -> ast::attribute {
/* Accessors */
fn get_attr_name(attr: ast::attribute) -> ast::ident {
fn get_attr_name(attr: ast::attribute) -> ~str {
get_meta_item_name(@attr.node.value)
}
// All "bad" FIXME copies are as per #2543
fn get_meta_item_name(meta: @ast::meta_item) -> ast::ident {
fn get_meta_item_name(meta: @ast::meta_item) -> ~str {
match meta.node {
ast::meta_word(n) => /* FIXME (#2543) */ copy n,
ast::meta_name_value(n, _) => /* FIXME (#2543) */ copy n,
ast::meta_list(n, _) => /* FIXME (#2543) */ copy n
ast::meta_word(n) => n,
ast::meta_name_value(n, _) => n,
ast::meta_list(n, _) => n
}
}
......@@ -125,13 +124,13 @@ fn get_meta_item_name(meta: @ast::meta_item) -> ast::ident {
* Gets the string value if the meta_item is a meta_name_value variant
* containing a string, otherwise none
*/
fn get_meta_item_value_str(meta: @ast::meta_item) -> option<@~str> {
fn get_meta_item_value_str(meta: @ast::meta_item) -> option<~str> {
match meta.node {
ast::meta_name_value(_, v) => match v.node {
ast::lit_str(s) => option::some(s),
ast::meta_name_value(_, v) => match v.node {
ast::lit_str(s) => option::some(*s),
_ => option::none
},
_ => option::none
},
_ => option::none
}
}
......@@ -147,9 +146,7 @@ fn get_meta_item_list(meta: @ast::meta_item) -> option<~[@ast::meta_item]> {
* If the meta item is a nam-value type with a string value then returns
* a tuple containing the name and string value, otherwise `none`
*/
fn get_name_value_str_pair(
item: @ast::meta_item
) -> option<(ast::ident, @~str)> {
fn get_name_value_str_pair(item: @ast::meta_item) -> option<(~str, ~str)> {
match attr::get_meta_item_value_str(item) {
some(value) => {
let name = attr::get_meta_item_name(item);
......@@ -163,11 +160,11 @@ fn get_name_value_str_pair(
/* Searching */
/// Search a list of attributes and return only those with a specific name
fn find_attrs_by_name(attrs: ~[ast::attribute], +name: ~str) ->
fn find_attrs_by_name(attrs: ~[ast::attribute], name: ~str) ->
~[ast::attribute] {
let filter = (
fn@(a: ast::attribute) -> option<ast::attribute> {
if *get_attr_name(a) == name {
if get_attr_name(a) == name {
option::some(a)
} else { option::none }
}
......@@ -176,10 +173,10 @@ fn find_attrs_by_name(attrs: ~[ast::attribute], +name: ~str) ->
}
/// Searcha list of meta items and return only those with a specific name
fn find_meta_items_by_name(metas: ~[@ast::meta_item], +name: ~str) ->
fn find_meta_items_by_name(metas: ~[@ast::meta_item], name: ~str) ->
~[@ast::meta_item] {
let filter = fn@(&&m: @ast::meta_item) -> option<@ast::meta_item> {
if *get_meta_item_name(m) == name {
if get_meta_item_name(m) == name {
option::some(m)
} else { option::none }
};
......@@ -191,14 +188,9 @@ fn find_meta_items_by_name(metas: ~[@ast::meta_item], +name: ~str) ->
* comparison is performed structurally.
*/
fn contains(haystack: ~[@ast::meta_item], needle: @ast::meta_item) -> bool {
debug!{"looking for %s",
print::pprust::meta_item_to_str(*needle)};
for haystack.each |item| {
debug!{"looking in %s",
print::pprust::meta_item_to_str(*item)};
if eq(item, needle) { debug!{"found it!"}; return true; }
if eq(item, needle) { return true; }
}
#debug("found it not :(");
return false;
}
......@@ -223,17 +215,18 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
}
}
fn contains_name(metas: ~[@ast::meta_item], +name: ~str) -> bool {
fn contains_name(metas: ~[@ast::meta_item], name: ~str) -> bool {
let matches = find_meta_items_by_name(metas, name);
return vec::len(matches) > 0u;
}
fn attrs_contains_name(attrs: ~[ast::attribute], +name: ~str) -> bool {
fn attrs_contains_name(attrs: ~[ast::attribute], name: ~str) -> bool {
vec::is_not_empty(find_attrs_by_name(attrs, name))
}
fn first_attr_value_str_by_name(attrs: ~[ast::attribute], +name: ~str)
-> option<@~str> {
fn first_attr_value_str_by_name(attrs: ~[ast::attribute], name: ~str)
-> option<~str> {
let mattrs = find_attrs_by_name(attrs, name);
if vec::len(mattrs) > 0u {
return get_meta_item_value_str(attr_meta(mattrs[0]));
......@@ -241,18 +234,16 @@ fn first_attr_value_str_by_name(attrs: ~[ast::attribute], +name: ~str)
return option::none;
}
fn last_meta_item_by_name(
items: ~[@ast::meta_item],
+name: ~str
) -> option<@ast::meta_item> {
fn last_meta_item_by_name(items: ~[@ast::meta_item], name: ~str)
-> option<@ast::meta_item> {
let items = attr::find_meta_items_by_name(items, name);
vec::last_opt(items)
}
fn last_meta_item_value_str_by_name(
items: ~[@ast::meta_item],
+name: ~str
) -> option<@~str> {
fn last_meta_item_value_str_by_name(items: ~[@ast::meta_item], name: ~str)
-> option<~str> {
match last_meta_item_by_name(items, name) {
some(item) => match attr::get_meta_item_value_str(item) {
some(value) => some(value),
......@@ -262,10 +253,9 @@ fn last_meta_item_value_str_by_name(
}
}
fn last_meta_item_list_by_name(
items: ~[@ast::meta_item],
+name: ~str
) -> option<~[@ast::meta_item]> {
fn last_meta_item_list_by_name(items: ~[@ast::meta_item], name: ~str)
-> option<~[@ast::meta_item]> {
match last_meta_item_by_name(items, name) {
some(item) => attr::get_meta_item_list(item),
none => none
......@@ -279,11 +269,11 @@ fn last_meta_item_list_by_name(
// the item name (See [Fixme-sorting])
fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
pure fn lteq(ma: &@ast::meta_item, mb: &@ast::meta_item) -> bool {
pure fn key(m: &ast::meta_item) -> ast::ident {
pure fn key(m: &ast::meta_item) -> ~str {
match m.node {
ast::meta_word(name) => /* FIXME (#2543) */ copy name,
ast::meta_name_value(name, _) => /* FIXME (#2543) */ copy name,
ast::meta_list(name, _) => /* FIXME (#2543) */ copy name
ast::meta_word(name) => name,
ast::meta_name_value(name, _) => name,
ast::meta_list(name, _) => name
}
}
key(*ma) <= key(*mb)
......@@ -295,7 +285,7 @@ fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
return vec::from_mut(v);
}
fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: ast::ident) ->
fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: ~str) ->
~[@ast::meta_item] {
return vec::filter_map(items, |item| {
......@@ -335,17 +325,17 @@ fn foreign_abi(attrs: ~[ast::attribute]) -> Either<~str, ast::foreign_abi> {
option::none => {
either::Right(ast::foreign_abi_cdecl)
}
option::some(@~"rust-intrinsic") => {
option::some(~"rust-intrinsic") => {
either::Right(ast::foreign_abi_rust_intrinsic)
}
option::some(@~"cdecl") => {
option::some(~"cdecl") => {
either::Right(ast::foreign_abi_cdecl)
}
option::some(@~"stdcall") => {
option::some(~"stdcall") => {
either::Right(ast::foreign_abi_stdcall)
}
option::some(t) => {
either::Left(~"unsupported abi: " + *t)
either::Left(~"unsupported abi: " + t)
}
};
}
......@@ -362,8 +352,8 @@ fn find_inline_attr(attrs: ~[ast::attribute]) -> inline_attr {
// FIXME (#2809)---validate the usage of #[inline] and #[inline(always)]
do vec::foldl(ia_none, attrs) |ia,attr| {
match attr.node.value.node {
ast::meta_word(@~"inline") => ia_hint,
ast::meta_list(@~"inline", items) => {
ast::meta_word(~"inline") => ia_hint,
ast::meta_list(~"inline", items) => {
if !vec::is_empty(find_meta_items_by_name(items, ~"always")) {
ia_always
} else if !vec::is_empty(
......@@ -386,11 +376,11 @@ fn require_unique_names(diagnostic: span_handler,
let name = get_meta_item_name(meta);
// FIXME: How do I silence the warnings? --pcw (#2619)
if map.contains_key(*name) {
if map.contains_key(name) {
diagnostic.span_fatal(meta.span,
fmt!{"duplicate meta item `%s`", *name});
fmt!{"duplicate meta item `%s`", name});
}
map.insert(*name, ());
map.insert(name, ());
}
}
......
......@@ -84,15 +84,15 @@ mod syntax {
export parse;
}
type ser_tps_map = map::hashmap<~str, fn@(@ast::expr) -> ~[@ast::stmt]>;
type deser_tps_map = map::hashmap<~str, fn@() -> @ast::expr>;
type ser_tps_map = map::hashmap<ast::ident, fn@(@ast::expr) -> ~[@ast::stmt]>;
type deser_tps_map = map::hashmap<ast::ident, fn@() -> @ast::expr>;
fn expand(cx: ext_ctxt,
span: span,
_mitem: ast::meta_item,
in_items: ~[@ast::item]) -> ~[@ast::item] {
fn not_auto_serialize(a: ast::attribute) -> bool {
attr::get_attr_name(a) != @~"auto_serialize"
attr::get_attr_name(a) != ~"auto_serialize"
}
fn filter_attrs(item: @ast::item) -> @ast::item {
......@@ -156,7 +156,11 @@ fn helper_path(base_path: @ast::path,
let head = vec::init(base_path.idents);
let tail = vec::last(base_path.idents);
self.path(base_path.span,
vec::append(head, ~[@(helper_name + ~"_" + *tail)]))
vec::append(head,
~[self.parse_sess().interner.
intern(@(helper_name + ~"_" +
*self.parse_sess().interner.get(
tail)))]))
}
fn path(span: span, strs: ~[ast::ident]) -> @ast::path {
......@@ -181,7 +185,7 @@ fn ty_fn(span: span,
let args = do vec::map(input_tys) |ty| {
{mode: ast::expl(ast::by_ref),
ty: ty,
ident: @~"",
ident: parse::token::special_idents::invalid,
id: self.next_id()}
};
......@@ -352,7 +356,7 @@ fn ser_variant(cx: ext_ctxt,
argfn: fn(-@ast::expr, uint, ast::blk) -> @ast::expr)
-> ast::arm {
let vnames = do vec::from_fn(vec::len(tys)) |i| {
@fmt!{"__v%u", i}
cx.parse_sess().interner.intern(@fmt!{"__v%u", i})
};
let pats = do vec::from_fn(vec::len(tys)) |i| {
cx.binder_pat(tys[i].span, vnames[i])
......@@ -384,7 +388,7 @@ fn is_vec_or_str(ty: @ast::ty) -> bool {
// This may be wrong if the user has shadowed (!) str
ast::ty_path(@{span: _, global: _, idents: ids,
rp: none, types: _}, _)
if ids == ~[@~"str"] => true,
if ids == ~[parse::token::special_idents::str] => true,
_ => false
}
}
......@@ -434,7 +438,8 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
fld.node.ident,
~[]));
let s = cx.clone(s);
let f = cx.lit_str(fld.span, fld.node.ident);
let f = cx.lit_str(fld.span, cx.parse_sess().interner.get(
fld.node.ident));
let i = cx.lit_uint(fld.span, fidx);
let l = ser_lambda(cx, tps, fld.node.mt.ty, cx.clone(s), vf);
#ast[stmt]{$(s).emit_rec_field($(f), $(i), $(l));}
......@@ -487,7 +492,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
vec::is_empty(path.types) {
let ident = path.idents[0];
match tps.find(*ident) {
match tps.find(ident) {
some(f) => f(v),
none => ser_path(cx, tps, path, s, v)
}
......@@ -545,7 +550,7 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
ty: cx.ty_fn(span,
~[cx.ty_path(span, ~[tp.ident], ~[])],
cx.ty_nil(span)),
ident: @(~"__s" + *tp.ident),
ident: cx.ident_of(~"__s" + cx.str_of(tp.ident)),
id: cx.next_id()});
debug!{"tp_inputs = %?", tp_inputs};
......@@ -553,35 +558,36 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
let ser_inputs: ~[ast::arg] =
vec::append(~[{mode: ast::expl(ast::by_ref),
ty: cx.ty_path(span, ~[@~"__S"], ~[]),
ident: @~"__s",
ty: cx.ty_path(span, ~[cx.ident_of(~"__S")], ~[]),
ident: cx.ident_of(~"__s"),
id: cx.next_id()},
{mode: ast::expl(ast::by_ref),
ty: v_ty,
ident: @~"__v",
ident: cx.ident_of(~"__v"),
id: cx.next_id()}],
tp_inputs);
let tps_map = map::str_hash();
let tps_map = map::uint_hash();
do vec::iter2(tps, tp_inputs) |tp, arg| {
let arg_ident = arg.ident;
tps_map.insert(
*tp.ident,
tp.ident,
fn@(v: @ast::expr) -> ~[@ast::stmt] {
let f = cx.var_ref(span, arg_ident);
debug!{"serializing type arg %s", *arg_ident};
debug!{"serializing type arg %s", cx.str_of(arg_ident)};
~[#ast[stmt]{$(f)($(v));}]
});
}
let ser_bnds = @~[
ast::bound_trait(cx.ty_path(span,
~[@~"std", @~"serialization",
@~"serializer"],
~[cx.ident_of(~"std"),
cx.ident_of(~"serialization"),
cx.ident_of(~"serializer")],
~[]))];
let ser_tps: ~[ast::ty_param] =
vec::append(~[{ident: @~"__S",
vec::append(~[{ident: cx.ident_of(~"__S"),
id: cx.next_id(),
bounds: ser_bnds}],
vec::map(tps, |tp| cx.clone_ty_param(tp)));
......@@ -593,7 +599,7 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
let ser_blk = cx.blk(span,
f(cx, tps_map, #ast{ __s }, #ast{ __v }));
@{ident: @(~"serialize_" + *name),
@{ident: cx.ident_of(~"serialize_" + cx.str_of(name)),
attrs: ~[],
id: cx.next_id(),
node: ast::item_fn({inputs: ser_inputs,
......@@ -670,7 +676,7 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
let fields = do vec::from_fn(vec::len(flds)) |fidx| {
let fld = flds[fidx];
let d = cx.clone(d);
let f = cx.lit_str(fld.span, fld.node.ident);
let f = cx.lit_str(fld.span, @cx.str_of(fld.node.ident));
let i = cx.lit_uint(fld.span, fidx);
let l = deser_lambda(cx, tps, fld.node.mt.ty, cx.clone(d));
{node: {mutbl: fld.node.mt.mutbl,
......@@ -713,7 +719,7 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
vec::is_empty(path.types) {
let ident = path.idents[0];
match tps.find(*ident) {
match tps.find(ident) {
some(f) => f(),
none => deser_path(cx, tps, path, d)
}
......@@ -756,23 +762,23 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
ty: cx.ty_fn(span,
~[],
cx.ty_path(span, ~[tp.ident], ~[])),
ident: @(~"__d" + *tp.ident),
ident: cx.ident_of(~"__d" + cx.str_of(tp.ident)),
id: cx.next_id()});
debug!{"tp_inputs = %?", tp_inputs};
let deser_inputs: ~[ast::arg] =
vec::append(~[{mode: ast::expl(ast::by_ref),
ty: cx.ty_path(span, ~[@~"__D"], ~[]),
ident: @~"__d",
ty: cx.ty_path(span, ~[cx.ident_of(~"__D")], ~[]),
ident: cx.ident_of(~"__d"),
id: cx.next_id()}],
tp_inputs);
let tps_map = map::str_hash();
let tps_map = map::uint_hash();
do vec::iter2(tps, tp_inputs) |tp, arg| {
let arg_ident = arg.ident;
tps_map.insert(
*tp.ident,
tp.ident,
fn@() -> @ast::expr {
let f = cx.var_ref(span, arg_ident);
#ast{ $(f)() }
......@@ -782,11 +788,12 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
let deser_bnds = @~[
ast::bound_trait(cx.ty_path(
span,
~[@~"std", @~"serialization", @~"deserializer"],
~[cx.ident_of(~"std"), cx.ident_of(~"serialization"),
cx.ident_of(~"deserializer")],
~[]))];
let deser_tps: ~[ast::ty_param] =
vec::append(~[{ident: @~"__D",
vec::append(~[{ident: cx.ident_of(~"__D"),
id: cx.next_id(),
bounds: deser_bnds}],
vec::map(tps, |tp| {
......@@ -798,7 +805,7 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
let deser_blk = cx.expr_blk(f(cx, tps_map, #ast[expr]{__d}));
@{ident: @(~"deserialize_" + *name),
@{ident: cx.ident_of(~"deserialize_" + cx.str_of(name)),
attrs: ~[],
id: cx.next_id(),
node: ast::item_fn({inputs: deser_inputs,
......@@ -853,7 +860,7 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident,
// Generate body s.emit_enum_variant("foo", 0u,
// 3u, {|| blk })
|-s, blk| {
let v_name = cx.lit_str(v_span, v_name);
let v_name = cx.lit_str(v_span, @cx.str_of(v_name));
let v_id = cx.lit_uint(v_span, vidx);
let sz = cx.lit_uint(v_span, vec::len(variant_tys));
let body = cx.lambda(blk);
......@@ -877,7 +884,7 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident,
}
};
let lam = cx.lambda(cx.blk(e_span, ~[cx.alt_stmt(arms, e_span, v)]));
let e_name = cx.lit_str(e_span, e_name);
let e_name = cx.lit_str(e_span, @cx.str_of(e_name));
~[#ast[stmt]{ $(s).emit_enum($(e_name), $(lam)) }]
}
......@@ -935,7 +942,7 @@ fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident,
arms += ~[impossible_case];
// Generate code like:
let e_name = cx.lit_str(e_span, e_name);
let e_name = cx.lit_str(e_span, @cx.str_of(e_name));
let alt_expr = cx.expr(e_span,
ast::expr_match(#ast{__i}, arms, ast::alt_exhaustive));
let var_lambda = #ast{ |__i| $(alt_expr) };
......
......@@ -23,7 +23,7 @@
// second argument is the origin of the macro, if user-defined
type syntax_expander = {expander: syntax_expander_, span: option<span>};
type macro_def = {ident: ast::ident, ext: syntax_extension};
type macro_def = {name: ~str, ext: syntax_extension};
// macro_definer is obsolete, remove when #old_macros go away.
type macro_definer =
......@@ -141,6 +141,9 @@ trait ext_ctxt {
fn next_id() -> ast::node_id;
pure fn trace_macros() -> bool;
fn set_trace_macros(x: bool);
/* for unhygienic identifier transformation */
fn str_of(id: ast::ident) -> ~str;
fn ident_of(st: ~str) -> ast::ident;
}
fn mk_ctxt(parse_sess: parse::parse_sess,
......@@ -211,6 +214,13 @@ fn next_id() -> ast::node_id {
fn set_trace_macros(x: bool) {
self.trace_mac = x
}
fn str_of(id: ast::ident) -> ~str {
*self.parse_sess.interner.get(id)
}
fn ident_of(st: ~str) -> ast::ident {
self.parse_sess.interner.intern(@st)
}
}
let imp : ctxt_repr = {
parse_sess: parse_sess,
......@@ -264,12 +274,12 @@ fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
cx.span_fatal(sp, fmt!{"#%s needs at least %u arguments.",
name, min});
}
_ => return elts /* we're good */
_ => return elts /* we are good */
}
}
_ => {
cx.span_fatal(sp, fmt!{"#%s: malformed invocation", name})
}
}
},
none => cx.span_fatal(sp, fmt!{"#%s: missing arguments", name})
}
......@@ -298,22 +308,24 @@ fn tt_args_to_original_flavor(cx: ext_ctxt, sp: span, arg: ~[ast::token_tree])
fn ms(m: matcher_) -> matcher {
{node: m, span: {lo: 0u, hi: 0u, expn_info: none}}
}
let arg_nm = cx.parse_sess().interner.gensym(@~"arg");
let argument_gram = ~[ms(match_seq(~[
ms(match_nonterminal(@~"arg",@~"expr", 0u))
ms(match_nonterminal(arg_nm, parse::token::special_idents::expr, 0u))
], some(parse::token::COMMA), true, 0u, 1u))];
let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic,
cx.parse_sess().interner, none, arg);
let args =
match parse_or_else(cx.parse_sess(), cx.cfg(), arg_reader as reader,
argument_gram).get(@~"arg") {
@matched_seq(s, _) => do s.map() |lf| {
match lf {
@matched_nonterminal(parse::token::nt_expr(arg)) => {
arg /* whew! list of exprs, here we come! */
}
_ => fail ~"badly-structured parse result"
argument_gram).get(arg_nm) {
@matched_seq(s, _) => {
do s.map() |lf| {
match lf {
@matched_nonterminal(parse::token::nt_expr(arg)) =>
arg, /* whew! list of exprs, here we come! */
_ => fail ~"badly-structured parse result"
}
}
},
_ => fail ~"badly-structured parse result"
......
......@@ -3,14 +3,16 @@
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let args = get_mac_args_no_max(cx,sp,arg,1u,~"concat_idents");
let mut res = ~"";
let mut res_str = ~"";
for args.each |e| {
res += *expr_to_ident(cx, e, ~"expected an ident");
res_str += *cx.parse_sess().interner.get(
expr_to_ident(cx, e, ~"expected an ident"));
}
let res = cx.parse_sess().interner.intern(@res_str);
return @{id: cx.next_id(),
callee_id: cx.next_id(),
node: ast::expr_path(@{span: sp, global: false, idents: ~[@res],
node: ast::expr_path(@{span: sp, global: false, idents: ~[res],
rp: none, types: ~[]}),
span: sp};
}
......@@ -25,7 +25,9 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
match mac.node {
mac_invoc(pth, args, body) => {
assert (vec::len(pth.idents) > 0u);
let extname = pth.idents[0];
/* using idents and token::special_idents would make the
the macro names be hygienic */
let extname = cx.parse_sess().interner.get(pth.idents[0]);
match exts.find(*extname) {
none => {
cx.span_fatal(pth.span,
......@@ -49,7 +51,7 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
}
some(macro_defining(ext)) => {
let named_extension = ext(cx, mac.span, args, body);
exts.insert(*named_extension.ident, named_extension.ext);
exts.insert(named_extension.name, named_extension.ext);
(ast::expr_rec(~[], none), s)
}
some(expr_tt(_)) => {
......@@ -68,7 +70,9 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
// finished transitioning.
mac_invoc_tt(pth, tts) => {
assert (vec::len(pth.idents) == 1u);
let extname = pth.idents[0];
/* using idents and token::special_idents would make the
the macro names be hygienic */
let extname = cx.parse_sess().interner.get(pth.idents[0]);
match exts.find(*extname) {
none => {
cx.span_fatal(pth.span,
......@@ -146,7 +150,7 @@ fn expand_mod_items(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
ast::meta_name_value(n, _) => n,
ast::meta_list(n, _) => n
};
match exts.find(*mname) {
match exts.find(mname) {
none | some(normal(_)) | some(macro_defining(_))
| some(expr_tt(_)) | some(item_tt(*)) => items,
some(item_decorator(dec_fn)) => {
......@@ -194,7 +198,7 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>,
fld: ast_fold) -> option<@ast::item> {
match it.node {
item_mac({node: mac_invoc_tt(pth, tts), span}) => {
let extname = pth.idents[0];
let extname = cx.parse_sess().interner.get(pth.idents[0]);
match exts.find(*extname) {
none => {
cx.span_fatal(pth.span,
......@@ -211,7 +215,7 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>,
~"expr macro in item position: " +
*extname),
mr_def(mdef) => {
exts.insert(*mdef.ident, mdef.ext);
exts.insert(mdef.name, mdef.ext);
none
}
};
......
......@@ -37,12 +37,13 @@ fn parse_fmt_err_(cx: ext_ctxt, sp: span, msg: ~str) -> ! {
fn pieces_to_expr(cx: ext_ctxt, sp: span,
pieces: ~[piece], args: ~[@ast::expr])
-> @ast::expr {
fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> ~[ast::ident] {
return ~[@~"extfmt", @~"rt", ident];
fn make_path_vec(_cx: ext_ctxt, ident: @~str) -> ~[ast::ident] {
let intr = _cx.parse_sess().interner;
return ~[intr.intern(@~"extfmt"), intr.intern(@~"rt"),
intr.intern(ident)];
}
fn make_rt_path_expr(cx: ext_ctxt, sp: span,
ident: ast::ident) -> @ast::expr {
let path = make_path_vec(cx, ident);
fn make_rt_path_expr(cx: ext_ctxt, sp: span, nm: @~str) -> @ast::expr {
let path = make_path_vec(cx, nm);
return mk_path(cx, sp, path);
}
// Produces an AST expression that represents a RT::conv record,
......@@ -94,11 +95,13 @@ fn make_ty(cx: ext_ctxt, sp: span, t: ty) -> @ast::expr {
fn make_conv_rec(cx: ext_ctxt, sp: span, flags_expr: @ast::expr,
width_expr: @ast::expr, precision_expr: @ast::expr,
ty_expr: @ast::expr) -> @ast::expr {
let intr = cx.parse_sess().interner;
return mk_rec_e(cx, sp,
~[{ident: @~"flags", ex: flags_expr},
{ident: @~"width", ex: width_expr},
{ident: @~"precision", ex: precision_expr},
{ident: @~"ty", ex: ty_expr}]);
~[{ident: intr.intern(@~"flags"), ex: flags_expr},
{ident: intr.intern(@~"width"), ex: width_expr},
{ident: intr.intern(@~"precision"),
ex: precision_expr},
{ident: intr.intern(@~"ty"), ex: ty_expr}]);
}
let rt_conv_flags = make_flags(cx, sp, cnv.flags);
let rt_conv_width = make_count(cx, sp, cnv.width);
......@@ -268,7 +271,10 @@ fn log_conv(c: conv) {
}
let arg_vec = mk_fixed_vec_e(cx, fmt_sp, piece_exprs);
return mk_call(cx, fmt_sp, ~[@~"str", @~"concat"], ~[arg_vec]);
return mk_call(cx, fmt_sp,
~[cx.parse_sess().interner.intern(@~"str"),
cx.parse_sess().interner.intern(@~"concat")],
~[arg_vec]);
}
//
// Local Variables:
......
......@@ -6,6 +6,6 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let args = get_mac_args(cx,sp,arg,1u,option::some(1u),~"ident_to_str");
return mk_uniq_str(cx, sp, *expr_to_ident(cx, args[0u],
~"expected an ident"));
return mk_uniq_str(cx, sp, *cx.parse_sess().interner.get(
expr_to_ident(cx, args[0u], ~"expected an ident")));
}
......@@ -6,8 +6,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, tt: ~[ast::token_tree])
cx.print_backtrace();
io::stdout().write_line(
print::pprust::unexpanded_tt_to_str(ast::tt_delim(tt),
cx.parse_sess().interner));
print::pprust::tt_to_str(ast::tt_delim(tt),cx.parse_sess().interner));
//trivial expression
return mr_expr(@{id: cx.next_id(), callee_id: cx.next_id(),
......
......@@ -54,7 +54,7 @@ fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident,
let rdr = tt_rdr as reader;
let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE);
let proto = rust_parser.parse_proto(id);
let proto = rust_parser.parse_proto(cx.str_of(id));
// check for errors
visit(proto, cx);
......
......@@ -15,14 +15,10 @@ mod syntax {
export parse;
}
fn ident(s: &str) -> ast::ident {
@(s.to_unique())
}
fn path(id: ident, span: span) -> @ast::path {
fn path(ids: ~[ident], span: span) -> @ast::path {
@{span: span,
global: false,
idents: ~[id],
idents: ids,
rp: none,
types: ~[]}
}
......@@ -94,7 +90,8 @@ fn item_ty_poly(name: ident,
impl ext_ctxt: ext_ctxt_ast_builder {
fn ty_option(ty: @ast::ty) -> @ast::ty {
self.ty_path_ast_builder(path(@~"option", self.empty_span())
self.ty_path_ast_builder(path(~[self.ident_of(~"option")],
self.empty_span())
.add_ty(ty))
}
......@@ -126,7 +123,7 @@ fn stmt_let(ident: ident, e: @ast::expr) -> @ast::stmt {
ty: self.ty_infer(),
pat: @{id: self.next_id(),
node: ast::pat_ident(ast::bind_by_implicit_ref,
path(ident,
path(~[ident],
self.empty_span()),
none),
span: self.empty_span()},
......@@ -301,6 +298,6 @@ fn item_ty(name: ident, span: span, ty: @ast::ty) -> @ast::item {
fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::ty] {
ty_params.map(|p| self.ty_path_ast_builder(
path(p.ident, self.empty_span())))
path(~[p.ident], self.empty_span())))
}
}
......@@ -21,8 +21,6 @@
import ext::base::ext_ctxt;
import ast::{ident};
import proto::{state, protocol, next_state};
import ast_builder::empty_span;
......@@ -36,11 +34,11 @@ fn visit_state(state: state, _m: &[()]) {
state.span, // use a real span!
fmt!{"state %s contains no messages, \
consider stepping to a terminal state instead",
*state.name})
state.name})
}
}
fn visit_message(name: ident, _span: span, _tys: &[@ast::ty],
fn visit_message(name: ~str, _span: span, _tys: &[@ast::ty],
this: state, next: next_state) {
match next {
some({state: next, tys: next_tys}) => {
......@@ -51,7 +49,7 @@ fn visit_message(name: ident, _span: span, _tys: &[@ast::ty],
self.span_err(
proto.get_state(next).span,
fmt!{"message %s steps to undefined state, %s",
*name, *next});
name, next});
}
else {
let next = proto.get_state(next);
......@@ -61,7 +59,7 @@ fn visit_message(name: ident, _span: span, _tys: &[@ast::ty],
next.span, // use a real span
fmt!{"message %s target (%s) \
needs %u type parameters, but got %u",
*name, *next.name,
name, next.name,
next.ty_params.len(),
next_tys.len()});
}
......
......@@ -70,10 +70,10 @@ fn analyze(proto: protocol, _cx: ext_ctxt) {
}
if self_live.len() > 0 {
let states = str::connect(self_live.map(|s| *s.name), ~" ");
let states = str::connect(self_live.map(|s| s.name), ~" ");
debug!{"protocol %s is unbounded due to loops involving: %s",
*proto.name, states};
proto.name, states};
// Someday this will be configurable with a warning
//cx.span_warn(empty_span(),
......@@ -85,7 +85,7 @@ fn analyze(proto: protocol, _cx: ext_ctxt) {
proto.bounded = some(false);
}
else {
debug!{"protocol %s is bounded. yay!", *proto.name};
debug!{"protocol %s is bounded. yay!", proto.name};
proto.bounded = some(true);
}
}
\ No newline at end of file
// Parsing pipes protocols from token trees.
import parse::parser;
import ast::ident;
import parse::token;
import pipec::*;
trait proto_parser {
fn parse_proto(id: ident) -> protocol;
fn parse_proto(id: ~str) -> protocol;
fn parse_state(proto: protocol);
}
impl parser: proto_parser {
fn parse_proto(id: ident) -> protocol {
fn parse_proto(id: ~str) -> protocol {
let proto = protocol(id, self.span);
self.parse_seq_to_before_end(token::EOF,
......@@ -24,9 +23,11 @@ fn parse_proto(id: ident) -> protocol {
fn parse_state(proto: protocol) {
let id = self.parse_ident();
let name = *self.interner.get(id);
self.expect(token::COLON);
let dir = match copy self.token {
token::IDENT(n, _) => self.get_str(n),
token::IDENT(n, _) => self.interner.get(n),
_ => fail
};
self.bump();
......@@ -41,7 +42,7 @@ fn parse_state(proto: protocol) {
}
else { ~[] };
let state = proto.add_state_poly(id, dir, typarms);
let state = proto.add_state_poly(name, id, dir, typarms);
// parse the messages
self.parse_unspanned_seq(
......@@ -51,7 +52,7 @@ fn parse_state(proto: protocol) {
}
fn parse_message(state: state) {
let mname = self.parse_ident();
let mname = *self.interner.get(self.parse_ident());
let args = if self.token == token::LPAREN {
self.parse_unspanned_seq(token::LPAREN,
......@@ -66,7 +67,7 @@ fn parse_message(state: state) {
let next = match copy self.token {
token::IDENT(_, _) => {
let name = self.parse_ident();
let name = *self.interner.get(self.parse_ident());
let ntys = if self.token == token::LT {
self.parse_unspanned_seq(token::LT,
token::GT,
......
......@@ -13,8 +13,7 @@
import parse::*;
import proto::*;
import ast_builder::append_types;
import ast_builder::path;
import ast_builder::{append_types, path, empty_span};
// Transitional reexports so qquote can find the paths it is looking for
mod syntax {
......@@ -47,17 +46,17 @@ fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item {
debug!("pipec: next state exists");
let next = this.proto.get_state(next);
assert next_tys.len() == next.ty_params.len();
let arg_names = tys.mapi(|i, _ty| @(~"x_" + i.to_str()));
let arg_names = tys.mapi(|i, _ty| cx.ident_of(~"x_"+i.to_str()));
let args_ast = (arg_names, tys).map(
|n, t| cx.arg_mode(n, t, ast::by_copy)
);
let pipe_ty = cx.ty_path_ast_builder(
path(this.data_name(), span)
path(~[this.data_name()], span)
.add_tys(cx.ty_vars(this.ty_params)));
let args_ast = vec::append(
~[cx.arg_mode(@~"pipe",
~[cx.arg_mode(cx.ident_of(~"pipe"),
pipe_ty,
ast::by_copy)],
args_ast);
......@@ -75,10 +74,10 @@ fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item {
body += ~"let b = pipe.reuse_buffer();\n";
body += fmt!("let %s = pipes::send_packet_buffered(\
ptr::addr_of(b.buffer.data.%s));\n",
sp, *next.name);
body += fmt!("let %s = pipes::recv_packet_buffered(\
sp, next.name);
body += fmt!{"let %s = pipes::recv_packet_buffered(\
ptr::addr_of(b.buffer.data.%s));\n",
rp, *next.name);
rp, next.name};
}
else {
let pat = match (this.dir, next.dir) {
......@@ -91,10 +90,10 @@ fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item {
body += fmt!("let %s = pipes::entangle();\n", pat);
}
body += fmt!("let message = %s::%s(%s);\n",
*this.proto.name,
*self.name(),
str::connect(vec::append_one(arg_names, @~"s")
.map(|x| *x),
this.proto.name,
self.name(),
str::connect(vec::append_one(
arg_names.map(|x| cx.str_of(x)), ~"s"),
~", "));
if !try {
......@@ -110,17 +109,15 @@ fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item {
let body = cx.parse_expr(body);
let mut rty = cx.ty_path_ast_builder(path(next.data_name(),
let mut rty = cx.ty_path_ast_builder(path(~[next.data_name()],
span)
.add_tys(next_tys));
if try {
rty = cx.ty_option(rty);
}
let name = if try {
@(~"try_" + *self.name())
}
else { self.name() };
let name = cx.ident_of(if try { ~"try_" + self.name()
} else { self.name() } );
cx.item_fn_poly(name,
args_ast,
......@@ -131,16 +128,16 @@ fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item {
message(id, span, tys, this, none) => {
debug!{"pipec: no next state"};
let arg_names = tys.mapi(|i, _ty| @(~"x_" + i.to_str()));
let arg_names = tys.mapi(|i, _ty| (~"x_" + i.to_str()));
let args_ast = (arg_names, tys).map(
|n, t| cx.arg_mode(n, t, ast::by_copy)
|n, t| cx.arg_mode(cx.ident_of(n), t, ast::by_copy)
);
let args_ast = vec::append(
~[cx.arg_mode(@~"pipe",
~[cx.arg_mode(cx.ident_of(~"pipe"),
cx.ty_path_ast_builder(
path(this.data_name(), span)
path(~[this.data_name()], span)
.add_tys(cx.ty_vars(this.ty_params))),
ast::by_copy)],
args_ast);
......@@ -149,13 +146,13 @@ fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item {
~""
}
else {
~"(" + str::connect(arg_names.map(|x| *x), ~", ") + ~")"
~"(" + str::connect(arg_names, ~", ") + ~")"
};
let mut body = ~"{ ";
body += fmt!{"let message = %s::%s%s;\n",
*this.proto.name,
*self.name(),
this.proto.name,
self.name(),
message_args};
if !try {
......@@ -170,11 +167,11 @@ fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item {
let body = cx.parse_expr(body);
let name = if try {
@(~"try_" + *self.name())
~"try_" + self.name()
}
else { self.name() };
cx.item_fn_poly(name,
cx.item_fn_poly(cx.ident_of(name),
args_ast,
if try {
cx.ty_option(cx.ty_nil_ast_builder())
......@@ -188,7 +185,7 @@ fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item {
}
fn to_ty(cx: ext_ctxt) -> @ast::ty {
cx.ty_path_ast_builder(path(self.name(), self.span())
cx.ty_path_ast_builder(path(~[cx.ident_of(self.name())], self.span())
.add_tys(cx.ty_vars(self.get_params())))
}
}
......@@ -212,21 +209,23 @@ fn to_type_decls(cx: ext_ctxt) -> ~[@ast::item] {
let tys = match next {
some({state: next, tys: next_tys}) => {
let next = this.proto.get_state(next);
let next_name = next.data_name();
let next_name = cx.str_of(next.data_name());
let dir = match this.dir {
send => @~"server",
recv => @~"client"
send => ~"server",
recv => ~"client"
};
vec::append_one(tys,
cx.ty_path_ast_builder((dir + next_name)
.add_tys(next_tys)))
cx.ty_path_ast_builder(
path(~[cx.ident_of(dir),
cx.ident_of(next_name)], span)
.add_tys(next_tys)))
}
none => tys
};
let v = cx.variant(name, span, tys);
let v = cx.variant(cx.ident_of(name), span, tys);
vec::push(items_msg, v);
}
......@@ -258,9 +257,13 @@ fn to_endpoint_decls(cx: ext_ctxt, dir: direction) -> ~[@ast::item] {
self.data_name(),
self.span,
cx.ty_path_ast_builder(
(@~"pipes" + @(dir.to_str() + ~"_packet"))
path(~[cx.ident_of(~"pipes"),
cx.ident_of(dir.to_str() + ~"_packet")],
empty_span())
.add_ty(cx.ty_path_ast_builder(
(self.proto.name + self.data_name())
path(~[cx.ident_of(self.proto.name),
self.data_name()],
empty_span())
.add_tys(cx.ty_vars(self.ty_params))))),
self.ty_params));
}
......@@ -270,10 +273,14 @@ fn to_endpoint_decls(cx: ext_ctxt, dir: direction) -> ~[@ast::item] {
self.data_name(),
self.span,
cx.ty_path_ast_builder(
(@~"pipes" + @(dir.to_str()
+ ~"_packet_buffered"))
path(~[cx.ident_of(~"pipes"),
cx.ident_of(dir.to_str()
+ ~"_packet_buffered")],
empty_span())
.add_tys(~[cx.ty_path_ast_builder(
(self.proto.name + self.data_name())
path(~[cx.ident_of(self.proto.name),
self.data_name()],
empty_span())
.add_tys(cx.ty_vars(self.ty_params))),
self.proto.buffer_ty_path(cx)])),
self.ty_params));
......@@ -315,16 +322,17 @@ fn gen_init(cx: ext_ctxt) -> @ast::item {
cx.parse_item(fmt!{"fn init%s() -> (client::%s, server::%s)\
{ import pipes::has_buffer; %s }",
start_state.ty_params.to_source(),
start_state.to_ty(cx).to_source(),
start_state.to_ty(cx).to_source(),
body.to_source()})
start_state.ty_params.to_source(cx),
start_state.to_ty(cx).to_source(cx),
start_state.to_ty(cx).to_source(cx),
body.to_source(cx)})
}
fn gen_buffer_init(ext_cx: ext_ctxt) -> @ast::expr {
ext_cx.rec(self.states.map_to_vec(|s| {
let fty = s.to_ty(ext_cx);
ext_cx.field_imm(s.name, #ast { pipes::mk_packet::<$(fty)>() })
ext_cx.field_imm(ext_cx.ident_of(s.name),
#ast { pipes::mk_packet::<$(fty)>() })
}))
}
......@@ -341,9 +349,11 @@ fn gen_init_bounded(ext_cx: ext_ctxt) -> @ast::expr {
ext_cx.block(
self.states.map_to_vec(
|s| ext_cx.parse_stmt(
fmt!{"data.%s.set_buffer(buffer)", *s.name})),
fmt!{"data.%s.set_buffer(buffer)",
s.name})),
ext_cx.parse_expr(
fmt!{"ptr::addr_of(data.%s)", *self.states[0].name})));
fmt!{"ptr::addr_of(data.%s)",
self.states[0].name})));
#ast {{
let buffer = $(buffer);
......@@ -357,14 +367,14 @@ fn buffer_ty_path(cx: ext_ctxt) -> @ast::ty {
let mut params: ~[ast::ty_param] = ~[];
for (copy self.states).each |s| {
for s.ty_params.each |tp| {
match params.find(|tpp| *tp.ident == *tpp.ident) {
match params.find(|tpp| tp.ident == tpp.ident) {
none => vec::push(params, tp),
_ => ()
}
}
}
cx.ty_path_ast_builder(path(@~"__Buffer", self.span)
cx.ty_path_ast_builder(path(~[cx.ident_of(~"__Buffer")], self.span)
.add_tys(cx.ty_vars(params)))
}
......@@ -373,7 +383,7 @@ fn gen_buffer_type(cx: ext_ctxt) -> @ast::item {
let mut params: ~[ast::ty_param] = ~[];
let fields = do (copy self.states).map_to_vec |s| {
for s.ty_params.each |tp| {
match params.find(|tpp| *tp.ident == *tpp.ident) {
match params.find(|tpp| tp.ident == tpp.ident) {
none => vec::push(params, tp),
_ => ()
}
......@@ -382,11 +392,11 @@ fn gen_buffer_type(cx: ext_ctxt) -> @ast::item {
let fty = #ast[ty] {
pipes::packet<$(ty)>
};
cx.ty_field_imm(s.name, fty)
cx.ty_field_imm(cx.ident_of(s.name), fty)
};
cx.item_ty_poly(
@~"__Buffer",
cx.ident_of(~"__Buffer"),
cx.empty_span(),
cx.ty_rec(fields),
params)
......@@ -410,56 +420,56 @@ fn compile(cx: ext_ctxt) -> @ast::item {
}
vec::push(items,
cx.item_mod(@~"client",
cx.item_mod(cx.ident_of(~"client"),
self.span,
client_states));
vec::push(items,
cx.item_mod(@~"server",
cx.item_mod(cx.ident_of(~"server"),
self.span,
server_states));
cx.item_mod(self.name, self.span, items)
cx.item_mod(cx.ident_of(self.name), self.span, items)
}
}
trait to_source {
// Takes a thing and generates a string containing rust code for it.
fn to_source() -> ~str;
fn to_source(cx: ext_ctxt) -> ~str;
}
impl @ast::item: to_source {
fn to_source() -> ~str {
item_to_str(self)
fn to_source(cx: ext_ctxt) -> ~str {
item_to_str(self, cx.parse_sess().interner)
}
}
impl ~[@ast::item]: to_source {
fn to_source() -> ~str {
str::connect(self.map(|i| i.to_source()), ~"\n\n")
fn to_source(cx: ext_ctxt) -> ~str {
str::connect(self.map(|i| i.to_source(cx)), ~"\n\n")
}
}
impl @ast::ty: to_source {
fn to_source() -> ~str {
ty_to_str(self)
fn to_source(cx: ext_ctxt) -> ~str {
ty_to_str(self, cx.parse_sess().interner)
}
}
impl ~[@ast::ty]: to_source {
fn to_source() -> ~str {
str::connect(self.map(|i| i.to_source()), ~", ")
fn to_source(cx: ext_ctxt) -> ~str {
str::connect(self.map(|i| i.to_source(cx)), ~", ")
}
}
impl ~[ast::ty_param]: to_source {
fn to_source() -> ~str {
pprust::typarams_to_str(self)
fn to_source(cx: ext_ctxt) -> ~str {
pprust::typarams_to_str(self, cx.parse_sess().interner)
}
}
impl @ast::expr: to_source {
fn to_source() -> ~str {
pprust::expr_to_str(self)
fn to_source(cx: ext_ctxt) -> ~str {
pprust::expr_to_str(self, cx.parse_sess().interner)
}
}
......
import to_str::ToStr;
import dvec::{DVec, dvec};
import ast::{ident};
import ast_builder::{path, append_types};
enum direction {
......@@ -27,15 +25,15 @@ fn reverse() -> direction {
}
}
type next_state = option<{state: ident, tys: ~[@ast::ty]}>;
type next_state = option<{state: ~str, tys: ~[@ast::ty]}>;
enum message {
// name, span, data, current state, next state
message(ident, span, ~[@ast::ty], state, next_state)
message(~str, span, ~[@ast::ty], state, next_state)
}
impl message {
fn name() -> ident {
fn name() -> ~str {
match self {
message(id, _, _, _, _) => id
}
......@@ -58,7 +56,8 @@ fn get_params() -> ~[ast::ty_param] {
enum state {
state_(@{
id: uint,
name: ident,
name: ~str,
ident: ast::ident,
span: span,
dir: direction,
ty_params: ~[ast::ty_param],
......@@ -68,7 +67,7 @@ enum state {
}
impl state {
fn add_message(name: ident, span: span,
fn add_message(name: ~str, span: span,
+data: ~[@ast::ty], next: next_state) {
self.messages.push(message(name, span, data, self,
next));
......@@ -78,14 +77,15 @@ fn filename() -> ~str {
(*self).proto.filename()
}
fn data_name() -> ident {
self.name
fn data_name() -> ast::ident {
self.ident
}
/// Returns the type that is used for the messages.
fn to_ty(cx: ext_ctxt) -> @ast::ty {
cx.ty_path_ast_builder
(path(self.name, self.span).add_tys(cx.ty_vars(self.ty_params)))
(path(~[cx.ident_of(self.name)],self.span).add_tys(
cx.ty_vars(self.ty_params)))
}
/// Iterate over the states that can be reached in one message
......@@ -105,18 +105,18 @@ fn reachable(f: fn(state) -> bool) {
type protocol = @protocol_;
fn protocol(name: ident, +span: span) -> protocol {
fn protocol(name: ~str, +span: span) -> protocol {
@protocol_(name, span)
}
struct protocol_ {
let name: ident;
let name: ~str;
let span: span;
let states: DVec<state>;
let mut bounded: option<bool>;
new(name: ident, span: span) {
new(name: ~str, span: span) {
self.name = name;
self.span = span;
self.states = dvec();
......@@ -124,18 +124,18 @@ struct protocol_ {
}
/// Get a state.
fn get_state(name: ident) -> state {
fn get_state(name: ~str) -> state {
self.states.find(|i| i.name == name).get()
}
fn get_state_by_id(id: uint) -> state { self.states[id] }
fn has_state(name: ident) -> bool {
fn has_state(name: ~str) -> bool {
self.states.find(|i| i.name == name) != none
}
fn filename() -> ~str {
~"proto://" + *self.name
~"proto://" + self.name
}
fn num_states() -> uint { self.states.len() }
......@@ -162,17 +162,14 @@ fn is_bounded() -> bool {
}
impl protocol {
fn add_state(name: ident, dir: direction) -> state {
self.add_state_poly(name, dir, ~[])
}
fn add_state_poly(name: ident, dir: direction,
fn add_state_poly(name: ~str, ident: ast::ident, dir: direction,
+ty_params: ~[ast::ty_param]) -> state {
let messages = dvec();
let state = state_(@{
id: self.states.len(),
name: name,
ident: ident,
span: self.span,
dir: dir,
ty_params: ty_params,
......@@ -188,7 +185,7 @@ fn add_state_poly(name: ident, dir: direction,
trait visitor<Tproto, Tstate, Tmessage> {
fn visit_proto(proto: protocol, st: &[Tstate]) -> Tproto;
fn visit_state(state: state, m: &[Tmessage]) -> Tstate;
fn visit_message(name: ident, spane: span, tys: &[@ast::ty],
fn visit_message(name: ~str, spane: span, tys: &[@ast::ty],
this: state, next: next_state) -> Tmessage;
}
......
......@@ -3,6 +3,7 @@
import parse::parser;
import parse::parser::parse_from_source_str;
import dvec::{DVec, dvec};
import parse::token::ident_interner;
import fold::*;
import visit::*;
......@@ -26,6 +27,14 @@ enum fragment {
from_ty(@ast::ty)
}
fn ids_ext(cx: ext_ctxt, strs: ~[~str]) -> ~[ast::ident] {
strs.map(|str| cx.parse_sess().interner.intern(@str))
}
fn id_ext(cx: ext_ctxt, str: ~str) -> ast::ident {
cx.parse_sess().interner.intern(@str)
}
trait qq_helper {
fn span() -> span;
fn visit(aq_ctxt, vt<aq_ctxt>);
......@@ -40,7 +49,7 @@ fn span() -> span {self.span}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp,
~[@~"syntax", @~"ext", @~"qquote", @~"parse_crate"])
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_crate"]))
}
fn get_fold_fn() -> ~str {~"fold_crate"}
}
......@@ -55,7 +64,7 @@ fn extract_mac() -> option<ast::mac_> {
}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp,
~[@~"syntax", @~"ext", @~"qquote", @~"parse_expr"])
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_expr"]))
}
fn get_fold_fn() -> ~str {~"fold_expr"}
}
......@@ -70,7 +79,7 @@ fn extract_mac() -> option<ast::mac_> {
}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp,
~[@~"syntax", @~"ext", @~"qquote", @~"parse_ty"])
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_ty"]))
}
fn get_fold_fn() -> ~str {~"fold_ty"}
}
......@@ -80,7 +89,7 @@ fn span() -> span {self.span}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp,
~[@~"syntax", @~"ext", @~"qquote", @~"parse_item"])
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_item"]))
}
fn get_fold_fn() -> ~str {~"fold_item"}
}
......@@ -90,7 +99,7 @@ fn span() -> span {self.span}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp,
~[@~"syntax", @~"ext", @~"qquote", @~"parse_stmt"])
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_stmt"]))
}
fn get_fold_fn() -> ~str {~"fold_stmt"}
}
......@@ -99,7 +108,8 @@ fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, ~[@~"syntax", @~"ext", @~"qquote", @~"parse_pat"])
mk_path(cx, sp, ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote",
~"parse_pat"]))
}
fn get_fold_fn() -> ~str {~"fold_pat"}
}
......@@ -159,7 +169,7 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
}
match (args[0].node) {
ast::expr_path(@{idents: id, _}) if vec::len(id) == 1u
=> what = *id[0],
=> what = *ecx.parse_sess().interner.get(id[0]),
_ => ecx.span_fatal(args[0].span, ~"expected an identifier")
}
}
......@@ -243,19 +253,21 @@ fn finish<T: qq_helper>
let cx = ecx;
let cfg_call = || mk_call_(
cx, sp, mk_access(cx, sp, ~[@~"ext_cx"], @~"cfg"), ~[]);
cx, sp, mk_access(cx, sp, ids_ext(cx, ~[~"ext_cx"]),
id_ext(cx, ~"cfg")), ~[]);
let parse_sess_call = || mk_call_(
cx, sp, mk_access(cx, sp, ~[@~"ext_cx"], @~"parse_sess"), ~[]);
cx, sp, mk_access(cx, sp, ids_ext(cx, ~[~"ext_cx"]),
id_ext(cx, ~"parse_sess")), ~[]);
let pcall = mk_call(cx,sp,
~[@~"syntax", @~"parse", @~"parser",
@~"parse_from_source_str"],
ids_ext(cx, ~[~"syntax", ~"parse", ~"parser",
~"parse_from_source_str"]),
~[node.mk_parse_fn(cx,sp),
mk_uniq_str(cx,sp, fname),
mk_call(cx,sp,
~[@~"syntax",@~"ext",
@~"qquote", @~"mk_file_substr"],
ids_ext(cx, ~[~"syntax",~"ext",
~"qquote", ~"mk_file_substr"]),
~[mk_uniq_str(cx,sp, loc.file.name),
mk_uint(cx,sp, loc.line),
mk_uint(cx,sp, loc.col)]),
......@@ -267,16 +279,17 @@ fn finish<T: qq_helper>
let mut rcall = pcall;
if (g_len > 0u) {
rcall = mk_call(cx,sp,
~[@~"syntax", @~"ext", @~"qquote", @~"replace"],
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote",
~"replace"]),
~[pcall,
mk_uniq_vec_e(cx,sp, qcx.gather.map_to_vec(|g| {
mk_call(cx,sp,
~[@~"syntax", @~"ext",
@~"qquote", @g.constr],
ids_ext(cx, ~[~"syntax", ~"ext",
~"qquote", g.constr]),
~[g.e])})),
mk_path(cx,sp,
~[@~"syntax", @~"ext", @~"qquote",
@node.get_fold_fn()])]);
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote",
node.get_fold_fn()]))]);
}
return rcall;
}
......@@ -331,14 +344,6 @@ fn replace_ty(repls: ~[fragment],
}
}
fn print_expr(expr: @ast::expr) {
let stdout = io::stdout();
let pp = pprust::rust_printer(stdout);
pprust::print_expr(pp, expr);
pp::eof(pp.s);
stdout.write_str(~"\n");
}
fn mk_file_substr(fname: ~str, line: uint, col: uint) ->
codemap::file_substr {
codemap::fss_external({filename: fname, line: line, col: col})
......
import codemap::span;
import std::map::{hashmap, str_hash, box_str_hash};
import std::map::{hashmap, str_hash, uint_hash};
import dvec::{DVec, dvec};
import base::*;
......@@ -135,7 +135,7 @@ fn acumm_bindings(_cx: ext_ctxt, _b_dest: bindings, _b_src: bindings) { }
fn pattern_to_selectors(cx: ext_ctxt, e: @expr) -> binders {
let res: binders =
{real_binders: box_str_hash::<selector>(),
{real_binders: uint_hash::<selector>(),
literal_ast_matchers: dvec()};
//this oughta return binders instead, but macro args are a sequence of
//expressions, rather than a single expression
......@@ -153,7 +153,7 @@ fn trivial_selector(m: matchable) -> match_result {
selectors. */
fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
let res = box_str_hash::<arb_depth<matchable>>();
let res = uint_hash::<arb_depth<matchable>>();
//need to do this first, to check vec lengths.
for b.literal_ast_matchers.each |sel| {
match sel(match_expr(e)) { none => return none, _ => () }
......@@ -237,7 +237,7 @@ fn follow_for_trans(cx: ext_ctxt, mmaybe: option<arb_depth<matchable>>,
/* helper for transcribe_exprs: what vars from `b` occur in `e`? */
fn free_vars(b: bindings, e: @expr, it: fn(ident)) {
let idents: hashmap<ident, ()> = box_str_hash::<()>();
let idents: hashmap<ident, ()> = uint_hash::<()>();
fn mark_ident(&&i: ident, _fld: ast_fold, b: bindings,
idents: hashmap<ident, ()>) -> ident {
if b.contains_key(i) { idents.insert(i, ()); }
......@@ -253,6 +253,12 @@ fn mark_ident(&&i: ident, _fld: ast_fold, b: bindings,
for idents.each_key |x| { it(x); };
}
fn wrong_occurs(cx: ext_ctxt, l: ident, l_c: uint, r: ident, r_c: uint)
-> ~str {
fmt!{"'%s' occurs %u times, but '%s' occurs %u times",
*cx.parse_sess().interner.get(l), l_c,
*cx.parse_sess().interner.get(r), r_c}
}
/* handle sequences (anywhere in the AST) of exprs, either real or ...ed */
fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
......@@ -279,10 +285,8 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
some({rep_count: old_len, name: old_name}) => {
let len = vec::len(*ms);
if old_len != len {
let msg =
fmt!{"'%s' occurs %u times, but ", *fv, len} +
fmt!{"'%s' occurs %u times", *old_name,
old_len};
let msg = wrong_occurs(cx, fv, len,
old_name, old_len);
cx.span_fatal(repeat_me.span, msg);
}
}
......@@ -626,7 +630,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body) -> base::macro_def {
let args = get_mac_args_no_max(cx, sp, arg, 0u, ~"macro");
let mut macro_name: option<@~str> = none;
let mut macro_name: option<~str> = none;
let mut clauses: ~[@clause] = ~[];
for args.each |arg| {
match arg.node {
......@@ -643,12 +647,15 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
match mac.node {
mac_invoc(pth, invoc_arg, body) => {
match path_to_ident(pth) {
some(id) => match macro_name {
none => macro_name = some(id),
some(other_id) => if id != other_id {
some(id) => {
let id_str = cx.str_of(id);
match macro_name {
none => macro_name = some(id_str),
some(other_id) => if id_str != other_id {
cx.span_fatal(pth.span,
~"macro name must be " +
~"consistent");
}
}
},
none => cx.span_fatal(pth.span,
......@@ -688,7 +695,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
let ext = |a,b,c,d, move clauses| generic_extension(a,b,c,d,clauses);
return {ident:
return {name:
match macro_name {
some(id) => id,
none => cx.span_fatal(sp, ~"macro definition must have " +
......
......@@ -43,14 +43,15 @@ fn expand_file(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
fn expand_stringify(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), ~"stringify");
return mk_uniq_str(cx, sp, pprust::expr_to_str(args[0]));
let s = pprust::expr_to_str(args[0], cx.parse_sess().interner);
return mk_uniq_str(cx, sp, s);
}
fn expand_mod(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body)
-> @ast::expr {
get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"file");
return mk_uniq_str(cx, sp,
str::connect(cx.mod_path().map(|x|*x), ~"::"));
str::connect(cx.mod_path().map(|x| cx.str_of(x)), ~"::"));
}
fn expand_include(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
......
......@@ -15,10 +15,10 @@ fn expand_trace_macros(cx: ext_ctxt, sp: span,
let rdr = tt_rdr as reader;
let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE);
let arg = rust_parser.parse_ident();
let arg = cx.str_of(rust_parser.parse_ident());
match arg {
@~"true" => cx.set_trace_macros(true),
@~"false" => cx.set_trace_macros(false),
~"true" => cx.set_trace_macros(true),
~"false" => cx.set_trace_macros(false),
_ => cx.span_fatal(sp, ~"trace_macros! only accepts `true` or `false`")
}
let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE);
......
......@@ -10,7 +10,7 @@
import dvec::{DVec, dvec};
import ast::{matcher, match_tok, match_seq, match_nonterminal, ident};
import ast_util::mk_sp;
import std::map::{hashmap, box_str_hash};
import std::map::{hashmap, uint_hash};
/* This is an Earley-like parser, without support for in-grammar nonterminals,
onlyl calling out to the main rust parser for named nonterminals (which it
......@@ -120,14 +120,14 @@ fn n_rec(p_s: parse_sess, m: matcher, res: ~[@named_match],
}
{node: match_nonterminal(bind_name, _, idx), span: sp} => {
if ret_val.contains_key(bind_name) {
p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "
+ *bind_name)
p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "+
*p_s.interner.get(bind_name))
}
ret_val.insert(bind_name, res[idx]);
}
}
}
let ret_val = box_str_hash::<@named_match>();
let ret_val = uint_hash::<@named_match>();
for ms.each() |m| { n_rec(p_s, m, res, ret_val) }
return ret_val;
}
......@@ -274,7 +274,8 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
let nts = str::connect(vec::map(bb_eis, |ei| {
match ei.elts[ei.idx].node {
match_nonterminal(bind,name,_) => {
fmt!{"%s ('%s')", *name, *bind}
fmt!{"%s ('%s')", *sess.interner.get(name),
*sess.interner.get(bind)}
}
_ => fail
} }), ~" or ");
......@@ -298,7 +299,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
match ei.elts[ei.idx].node {
match_nonterminal(_, name, idx) => {
ei.matches[idx].push(@matched_nonterminal(
parse_nt(rust_parser, *name)));
parse_nt(rust_parser, *sess.interner.get(name))));
ei.idx += 1u;
}
_ => fail
......
......@@ -8,6 +8,7 @@
import earley_parser::{parse, parse_or_else, success, failure, named_match,
matched_seq, matched_nonterminal, error};
import std::map::hashmap;
import parse::token::special_idents;
fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
arg: ~[ast::token_tree]) -> base::mac_result {
......@@ -16,14 +17,17 @@ fn ms(m: matcher_) -> matcher {
{node: m, span: {lo: 0u, hi: 0u, expn_info: none}}
}
let lhs_nm = cx.parse_sess().interner.gensym(@~"lhs");
let rhs_nm = cx.parse_sess().interner.gensym(@~"rhs");
// The grammar for macro_rules! is:
// $( $lhs:mtcs => $rhs:tt );+
// ...quasiquoting this would be nice.
let argument_gram = ~[
ms(match_seq(~[
ms(match_nonterminal(@~"lhs",@~"matchers", 0u)),
ms(match_nonterminal(lhs_nm, special_idents::matchers, 0u)),
ms(match_tok(FAT_ARROW)),
ms(match_nonterminal(@~"rhs",@~"tt", 1u)),
ms(match_nonterminal(rhs_nm, special_idents::tt, 1u)),
], some(SEMI), false, 0u, 2u)),
//to phase into semicolon-termination instead of
//semicolon-separation
......@@ -37,11 +41,11 @@ fn ms(m: matcher_) -> matcher {
arg_reader as reader, argument_gram);
// Extract the arguments:
let lhses:~[@named_match] = match argument_map.get(@~"lhs") {
let lhses:~[@named_match] = match argument_map.get(lhs_nm) {
@matched_seq(s, sp) => s,
_ => cx.span_bug(sp, ~"wrong-structured lhs")
};
let rhses:~[@named_match] = match argument_map.get(@~"rhs") {
let rhses:~[@named_match] = match argument_map.get(rhs_nm) {
@matched_seq(s, sp) => s,
_ => cx.span_bug(sp, ~"wrong-structured rhs")
};
......@@ -53,8 +57,9 @@ fn generic_extension(cx: ext_ctxt, sp: span, name: ident,
-> mac_result {
if cx.trace_macros() {
io::println(fmt!("%s! { %s }", *name,
print::pprust::unexpanded_tt_to_str(
io::println(fmt!("%s! { %s }",
cx.str_of(name),
print::pprust::tt_to_str(
ast::tt_delim(arg),
cx.parse_sess().interner)));
}
......@@ -103,7 +108,7 @@ fn generic_extension(cx: ext_ctxt, sp: span, name: ident,
arg, lhses, rhses);
return mr_def({
ident: name,
name: *cx.parse_sess().interner.get(name),
ext: expr_tt({expander: exp, span: some(sp)})
});
}
\ No newline at end of file
......@@ -47,7 +47,7 @@ fn new_tt_reader(sp_diag: span_handler, itr: ident_interner,
mut cur: @{readme: src, mut idx: 0u, dotdotdoted: false,
sep: none, up: tt_frame_up(option::none)},
interpolations: match interp { /* just a convienience */
none => std::map::box_str_hash::<@named_match>(),
none => std::map::uint_hash::<@named_match>(),
some(x) => x
},
mut repeat_idx: ~[mut], mut repeat_len: ~[],
......@@ -100,8 +100,8 @@ enum lis {
lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str)
}
fn lockstep_iter_size(&&t: token_tree, &&r: tt_reader) -> lis {
fn lis_merge(lhs: lis, rhs: lis) -> lis {
fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis {
fn lis_merge(lhs: lis, rhs: lis, r: tt_reader) -> lis {
match lhs {
lis_unconstrained => rhs,
lis_contradiction(_) => lhs,
......@@ -110,9 +110,11 @@ fn lis_merge(lhs: lis, rhs: lis) -> lis {
lis_contradiction(_) => rhs,
lis_constraint(r_len, _) if l_len == r_len => lhs,
lis_constraint(r_len, r_id) => {
let l_n = *r.interner.get(l_id);
let r_n = *r.interner.get(r_id);
lis_contradiction(fmt!{"Inconsistent lockstep iteration: \
'%s' has %u items, but '%s' has %u",
*l_id, l_len, *r_id, r_len})
l_n, l_len, r_n, r_len})
}
}
}
......@@ -120,7 +122,7 @@ fn lis_merge(lhs: lis, rhs: lis) -> lis {
match t {
tt_delim(tts) | tt_seq(_, tts, _, _) => {
vec::foldl(lis_unconstrained, tts, {|lis, tt|
lis_merge(lis, lockstep_iter_size(tt, r)) })
lis_merge(lis, lockstep_iter_size(tt, r), r) })
}
tt_tok(*) => lis_unconstrained,
tt_nonterminal(_, name) => match *lookup_cur_matched(r, name) {
......@@ -230,7 +232,7 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
r.sp_diag.span_fatal(
copy r.cur_span, /* blame the macro writer */
fmt!{"variable '%s' is still repeating at this depth",
*ident});
*r.interner.get(ident)});
}
}
}
......
......@@ -81,15 +81,14 @@ trait ast_fold {
fn fold_meta_item_(&&mi: @meta_item, fld: ast_fold) -> @meta_item {
return @{node:
match mi.node {
meta_word(id) => meta_word(fld.fold_ident(id)),
meta_word(id) => meta_word(id),
meta_list(id, mis) => {
let fold_meta_item = |x|fold_meta_item_(x, fld);
meta_list(/* FIXME: (#2543) */ copy id,
vec::map(mis, fold_meta_item))
}
meta_name_value(id, s) => {
meta_name_value(fld.fold_ident(id),
/* FIXME (#2543) */ copy s)
meta_name_value(id, /* FIXME (#2543) */ copy s)
}
},
span: fld.new_span(mi.span)};
......
......@@ -66,7 +66,7 @@ fn parse_outer_attributes() -> ~[ast::attribute] {
}
token::DOC_COMMENT(s) => {
let attr = ::attr::mk_sugared_doc_attr(
*self.get_str(s), self.span.lo, self.span.hi);
*self.id_to_str(s), self.span.lo, self.span.hi);
if attr.node.style != ast::attr_outer {
self.fatal(~"expected outer comment");
}
......@@ -128,7 +128,7 @@ fn parse_inner_attrs_and_next() ->
}
token::DOC_COMMENT(s) => {
let attr = ::attr::mk_sugared_doc_attr(
*self.get_str(s), self.span.lo, self.span.hi);
*self.id_to_str(s), self.span.lo, self.span.hi);
self.bump();
if attr.node.style == ast::attr_inner {
inner_attrs += ~[attr];
......@@ -145,22 +145,22 @@ fn parse_inner_attrs_and_next() ->
fn parse_meta_item() -> @ast::meta_item {
let lo = self.span.lo;
let ident = self.parse_ident();
let name = *self.id_to_str(self.parse_ident());
match self.token {
token::EQ => {
self.bump();
let lit = self.parse_lit();
let mut hi = self.span.hi;
return @spanned(lo, hi, ast::meta_name_value(ident, lit));
return @spanned(lo, hi, ast::meta_name_value(name, lit));
}
token::LPAREN => {
let inner_items = self.parse_meta_seq();
let mut hi = self.span.hi;
return @spanned(lo, hi, ast::meta_list(ident, inner_items));
return @spanned(lo, hi, ast::meta_list(name, inner_items));
}
_ => {
let mut hi = self.span.hi;
return @spanned(lo, hi, ast::meta_word(ident));
return @spanned(lo, hi, ast::meta_word(name));
}
}
}
......
......@@ -276,7 +276,7 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
srdr: io::Reader) ->
{cmnts: ~[cmnt], lits: ~[lit]} {
let src = @str::from_bytes(srdr.read_whole_stream());
let itr = parse::token::mk_ident_interner();
let itr = parse::token::mk_fake_ident_interner();
let rdr = lexer::new_low_level_string_reader
(span_diagnostic, codemap::new_filemap(path, src, 0u, 0u), itr);
......
......@@ -85,7 +85,7 @@ fn expect(t: token::token) {
fn parse_ident() -> ast::ident {
match copy self.token {
token::IDENT(i, _) => { self.bump(); return self.get_str(i); }
token::IDENT(i, _) => { self.bump(); return i; }
token::INTERPOLATED(token::nt_ident(*)) => { self.bug(
~"ident interpolation not converted to real token"); }
_ => { self.fatal(~"expected ident, found `"
......@@ -110,6 +110,8 @@ fn eat(tok: token::token) -> bool {
return if self.token == tok { self.bump(); true } else { false };
}
// Storing keywords as interned idents instead of strings would be nifty.
// A sanity check that the word we are asking for is a known keyword
fn require_keyword(word: ~str) {
if !self.keywords.contains_key_ref(&word) {
......@@ -119,7 +121,7 @@ fn require_keyword(word: ~str) {
fn token_is_word(word: ~str, ++tok: token::token) -> bool {
match tok {
token::IDENT(sid, false) => { word == *self.get_str(sid) }
token::IDENT(sid, false) => { *self.id_to_str(sid) == word }
_ => { false }
}
}
......@@ -136,7 +138,7 @@ fn is_keyword(word: ~str) -> bool {
fn is_any_keyword(tok: token::token) -> bool {
match tok {
token::IDENT(sid, false) => {
self.keywords.contains_key_ref(self.get_str(sid))
self.keywords.contains_key_ref(self.id_to_str(sid))
}
_ => false
}
......@@ -148,7 +150,7 @@ fn eat_keyword(word: ~str) -> bool {
let mut bump = false;
let val = match self.token {
token::IDENT(sid, false) => {
if word == *self.get_str(sid) {
if word == *self.id_to_str(sid) {
bump = true;
true
} else { false }
......
......@@ -78,10 +78,10 @@ fn file_exists(path: ~str) -> bool {
}
}
fn cdir_path_opt(id: ast::ident, attrs: ~[ast::attribute]) -> @~str {
fn cdir_path_opt(default: ~str, attrs: ~[ast::attribute]) -> ~str {
match ::attr::first_attr_value_str_by_name(attrs, ~"path") {
some(d) => return d,
none => return id
some(d) => d,
none => default
}
}
......@@ -90,11 +90,12 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
&items: ~[@ast::item]) {
match cdir.node {
ast::cdir_src_mod(id, attrs) => {
let file_path = cdir_path_opt(@(*id + ~".rs"), attrs);
let file_path = cdir_path_opt((cx.sess.interner.get(id) + ~".rs"),
attrs);
let full_path =
if path::path_is_absolute(*file_path) {
*file_path
} else { prefix + path::path_sep() + *file_path };
if path::path_is_absolute(file_path) {
file_path
} else { prefix + path::path_sep() + file_path };
let (p0, r0) =
new_parser_etc_from_file(cx.sess, cx.cfg, full_path, SOURCE_FILE);
let inner_attrs = p0.parse_inner_attrs_and_next();
......@@ -111,11 +112,11 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
vec::push(items, i);
}
ast::cdir_dir_mod(id, cdirs, attrs) => {
let path = cdir_path_opt(id, attrs);
let path = cdir_path_opt(*cx.sess.interner.get(id), attrs);
let full_path =
if path::path_is_absolute(*path) {
*path
} else { prefix + path::path_sep() + *path };
if path::path_is_absolute(path) {
path
} else { prefix + path::path_sep() + path };
let (m0, a0) = eval_crate_directives_to_mod(
cx, cdirs, full_path, none);
let i =
......
......@@ -6,7 +6,7 @@
import token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident,
INTERPOLATED};
import codemap::{span,fss_none};
import util::interner;
import util::interner::interner;
import ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec};
import lexer::reader;
import prec::{as_prec, token_to_binop};
......@@ -193,12 +193,14 @@ struct parser {
let mut restriction: restriction;
let mut quote_depth: uint; // not (yet) related to the quasiquoter
let reader: reader;
let interner: interner<@~str>;
let keywords: hashmap<~str, ()>;
let restricted_keywords: hashmap<~str, ()>;
new(sess: parse_sess, cfg: ast::crate_cfg, +rdr: reader, ftype: file_type)
{
self.reader <- rdr;
self.interner = self.reader.interner();
let tok0 = self.reader.next_token();
let span0 = tok0.sp;
self.sess = sess;
......@@ -268,11 +270,10 @@ fn bug(m: ~str) -> ! {
fn warn(m: ~str) {
self.sess.span_diagnostic.span_warn(copy self.span, m)
}
pure fn get_str(i: token::str_num) -> @~str {
self.reader.interner().get(i)
}
fn get_id() -> node_id { next_node_id(self.sess) }
pure fn id_to_str(id: ident) -> @~str { self.sess.interner.get(id) }
fn parse_ty_fn(purity: ast::purity) -> ty_ {
let proto, bounds;
if self.eat_keyword(~"extern") {
......@@ -398,9 +399,9 @@ fn parse_ret_ty() -> (ret_style, @ty) {
}
}
fn region_from_name(s: option<@~str>) -> @region {
fn region_from_name(s: option<ident>) -> @region {
let r = match s {
some (string) => re_named(string),
some (id) => re_named(id),
none => re_anon
};
......@@ -414,8 +415,7 @@ fn parse_region() -> @region {
match copy self.token {
token::IDENT(sid, _) => {
self.bump();
let n = self.get_str(sid);
self.region_from_name(some(n))
self.region_from_name(some(sid))
}
_ => {
self.region_from_name(none)
......@@ -430,7 +430,7 @@ fn parse_region_with_sep() -> @region {
token::IDENT(sid, _) => {
if self.look_ahead(1u) == token::BINOP(token::SLASH) {
self.bump(); self.bump();
some(self.get_str(sid))
some(sid)
} else {
none
}
......@@ -583,7 +583,7 @@ fn parse_arg_general(require_name: bool) -> arg {
let name = self.parse_value_ident();
self.bump();
name
} else { @~"" }
} else { token::special_idents::invalid }
};
let t = self.parse_ty(false);
......@@ -678,10 +678,10 @@ fn lit_from_token(tok: token::token) -> lit_ {
token::LIT_INT(i, it) => lit_int(i, it),
token::LIT_UINT(u, ut) => lit_uint(u, ut),
token::LIT_INT_UNSUFFIXED(i) => lit_int_unsuffixed(i),
token::LIT_FLOAT(s, ft) => lit_float(self.get_str(s), ft),
token::LIT_STR(s) => lit_str(self.get_str(s)),
token::LPAREN => { self.expect(token::RPAREN); lit_nil }
_ => self.unexpected_last(tok)
token::LIT_FLOAT(s, ft) => lit_float(self.id_to_str(s), ft),
token::LIT_STR(s) => lit_str(self.id_to_str(s)),
token::LPAREN => { self.expect(token::RPAREN); lit_nil },
_ => { self.unexpected_last(tok); }
}
}
......@@ -1140,8 +1140,7 @@ fn parse_dot_or_call_expr_with(e0: pexpr) -> pexpr {
self.parse_seq_to_gt(some(token::COMMA),
|p| p.parse_ty(false))
} else { ~[] };
e = self.mk_pexpr(lo, hi, expr_field(self.to_expr(e),
self.get_str(i),
e = self.mk_pexpr(lo, hi, expr_field(self.to_expr(e), i,
tys));
}
_ => self.unexpected()
......@@ -2123,9 +2122,6 @@ fn check_expected_item(p: parser, current_attrs: ~[attribute]) {
}
fn expr_is_complete(e: pexpr) -> bool {
log(debug, (~"expr_is_complete", self.restriction,
print::pprust::expr_to_str(*e),
classify::expr_requires_semi_to_be_stmt(*e)));
return self.restriction == RESTRICT_STMT_EXPR &&
!classify::expr_requires_semi_to_be_stmt(*e);
}
......@@ -2306,8 +2302,9 @@ fn parse_fn_decl(purity: purity,
fn is_self_ident() -> bool {
match self.token {
token::IDENT(sid, false) if ~"self" == *self.get_str(sid) => true,
_ => false
token::IDENT(id, false) if id == token::special_idents::self_
=> true,
_ => false
}
}
......@@ -2522,11 +2519,13 @@ fn wrap_path(p: parser, pt: @path) -> @ty {
}
// This is a new-style impl declaration.
let ident = @~"__extensions__"; // XXX: clownshoes
// XXX: clownshoes
let ident = token::special_idents::clownshoes_extensions;
// Parse the type.
let ty = self.parse_ty(false);
// Parse traits, if necessary.
let traits = if self.token == token::COLON {
self.bump();
......@@ -2595,7 +2594,8 @@ fn parse_item_class() -> item_info {
match the_ctor {
some((_, _, _, s_first)) => {
self.span_note(s, #fmt("Duplicate constructor \
declaration for class %s", *class_name));
declaration for class %s",
*self.interner.get(class_name)));
self.span_fatal(copy s_first, ~"First constructor \
declared here");
}
......@@ -2608,7 +2608,8 @@ fn parse_item_class() -> item_info {
match the_dtor {
some((_, _, s_first)) => {
self.span_note(s, #fmt("Duplicate destructor \
declaration for class %s", *class_name));
declaration for class %s",
*self.interner.get(class_name)));
self.span_fatal(copy s_first, ~"First destructor \
declared here");
}
......@@ -3081,7 +3082,7 @@ fn parse_item_enum() -> item_info {
let ty_params = self.parse_ty_params();
// Newtype syntax
if self.token == token::EQ {
self.check_restricted_keywords_(*id);
self.check_restricted_keywords_(*self.id_to_str(id));
self.bump();
let ty = self.parse_ty(false);
self.expect(token::SEMI);
......@@ -3297,7 +3298,7 @@ fn parse_view_path() -> @view_path {
let lo = self.span.lo;
let first_ident = self.parse_ident();
let mut path = ~[first_ident];
debug!{"parsed view_path: %s", *first_ident};
debug!{"parsed view_path: %s", *self.id_to_str(first_ident)};
match self.token {
token::EQ => {
// x = foo::bar
......@@ -3323,7 +3324,7 @@ fn parse_view_path() -> @view_path {
token::IDENT(i, _) => {
self.bump();
vec::push(path, self.get_str(i));
vec::push(path, i);
}
// foo::bar::{a,b,c}
......@@ -3458,8 +3459,8 @@ fn parse_crate_mod(_cfg: crate_cfg) -> @crate {
fn parse_str() -> @~str {
match copy self.token {
token::LIT_STR(s) => { self.bump(); self.get_str(s) }
_ => self.fatal(~"expected string literal")
token::LIT_STR(s) => { self.bump(); self.id_to_str(s) }
_ => self.fatal(~"expected string literal")
}
}
......
......@@ -277,23 +277,83 @@ fn is_lit(t: token) -> bool {
match t { BINOP(OR) | OROR => true, _ => false }
}
type ident_interner = util::interner::interner<@~str>;
mod special_idents {
const underscore : uint = 0u;
const anon : uint = 1u;
const destr : uint = 2u; // 'drop', but that's reserved
import ast::ident;
const underscore : ident = 0u;
const anon : ident = 1u;
const dtor : ident = 2u; // 'drop', but that's reserved
const invalid : ident = 3u; // ''
const unary : ident = 4u;
const not_fn : ident = 5u;
const idx_fn : ident = 6u;
const unary_minus_fn : ident = 7u;
const clownshoes_extensions : ident = 8u;
const self_ : ident = 9u; // 'self'
/* for matcher NTs */
const item : ident = 10u;
const block : ident = 11u;
const stmt : ident = 12u;
const pat : ident = 13u;
const expr : ident = 14u;
const ty : ident = 15u;
const ident : ident = 16u;
const path : ident = 17u;
const tt : ident = 18u;
const matchers : ident = 19u;
const str : ident = 20u; // for the type
/* outside of libsyntax */
const ty_visitor : ident = 21u;
const arg : ident = 22u;
const descrim : ident = 23u;
const clownshoe_abi : ident = 24u;
const clownshoe_stack_shim : ident = 25u;
const tydesc : ident = 26u;
const literally_dtor : ident = 27u;
const main : ident = 28u;
const opaque : ident = 29u;
const blk : ident = 30u;
const static : ident = 31u;
const intrinsic : ident = 32u;
}
type ident_interner = util::interner::interner<@~str>;
/** Key for thread-local data for sneaking interner information to the
* serializer/deserializer. It sounds like a hack because it is one. */
fn interner_key(+_x: @@ident_interner) { }
fn mk_ident_interner() -> ident_interner {
/* the indices here must correspond to the numbers in special_idents */
let init_vec = ~[@~"_", @~"anon", @~"drop"];
let rv = @interner::mk_prefill::<@~str>(|x| str::hash(*x),
|x,y| str::eq(*x, *y), init_vec);
let init_vec = ~[@~"_", @~"anon", @~"drop", @~"", @~"unary", @~"!",
@~"[]", @~"unary-", @~"__extensions__", @~"self",
@~"item", @~"block", @~"stmt", @~"pat", @~"expr",
@~"ty", @~"ident", @~"path", @~"tt", @~"matchers",
@~"str", @~"ty_visitor", @~"arg", @~"descrim",
@~"__rust_abi", @~"__rust_stack_shim", @~"tydesc",
@~"dtor", @~"main", @~"<opaque>", @~"blk", @~"static",
@~"intrinsic"];
let rv = interner::mk_prefill::<@~str>(|x| str::hash(*x),
|x,y| str::eq(*x, *y), init_vec);
/* having multiple interners will just confuse the serializer */
unsafe{ assert task::local_data_get(interner_key) == none };
unsafe{ task::local_data_set(interner_key, @rv) };
rv
}
/* for when we don't care about the contents; doesn't interact with TLD or
serialization */
fn mk_fake_ident_interner() -> ident_interner {
interner::mk::<@~str>(|x| str::hash(*x), |x,y| str::eq(*x, *y))
}
/**
* All the valid words that have meaning in the Rust language.
*
......
......@@ -8,9 +8,7 @@
import ast_util::{operator_prec};
import dvec::{DVec, dvec};
import parse::classify::*;
import util::interner;
type ident_interner = interner::interner<@~str>;
import parse::token::ident_interner;
// The ps is stored here to prevent recursive type.
enum ann_node {
......@@ -47,19 +45,7 @@ fn end(s: ps) {
pp::end(s.s);
}
fn rust_printer(writer: io::Writer) -> ps {
return @{s: pp::mk_printer(writer, default_columns),
cm: none::<codemap>,
intr: token::mk_ident_interner(),
comments: none::<~[comments::cmnt]>,
literals: none::<~[comments::lit]>,
mut cur_cmnt: 0u,
mut cur_lit: 0u,
boxes: dvec(),
ann: no_ann()};
}
fn unexpanded_rust_printer(writer: io::Writer, intr: ident_interner) -> ps {
fn rust_printer(writer: io::Writer, intr: ident_interner) -> ps {
return @{s: pp::mk_printer(writer, default_columns),
cm: none::<codemap>,
intr: intr,
......@@ -79,7 +65,7 @@ fn unexpanded_rust_printer(writer: io::Writer, intr: ident_interner) -> ps {
// Requires you to pass an input filename and reader so that
// it can scan the input text for comments and literals to
// copy forward.
fn print_crate(cm: codemap, intr: interner::interner<@~str>,
fn print_crate(cm: codemap, intr: ident_interner,
span_diagnostic: diagnostic::span_handler,
crate: @ast::crate, filename: ~str, in: io::Reader,
out: io::Writer, ann: pp_ann, is_expanded: bool) {
......@@ -107,41 +93,46 @@ fn print_crate_(s: ps, &&crate: @ast::crate) {
eof(s.s);
}
fn ty_to_str(ty: @ast::ty) -> ~str { return to_str(ty, print_type); }
fn ty_to_str(ty: @ast::ty, intr: ident_interner) -> ~str {
to_str(ty, print_type, intr)
}
fn pat_to_str(pat: @ast::pat) -> ~str { return to_str(pat, print_pat); }
fn pat_to_str(pat: @ast::pat, intr: ident_interner) -> ~str {
to_str(pat, print_pat, intr)
}
fn expr_to_str(e: @ast::expr) -> ~str { return to_str(e, print_expr); }
fn expr_to_str(e: @ast::expr, intr: ident_interner) -> ~str {
to_str(e, print_expr, intr)
}
fn unexpanded_tt_to_str(tt: ast::token_tree, intr: ident_interner)
-> ~str {
let buffer = io::mem_buffer();
let s = unexpanded_rust_printer(io::mem_buffer_writer(buffer), intr);
print_tt(s, tt);
eof(s.s);
io::mem_buffer_str(buffer)
fn tt_to_str(tt: ast::token_tree, intr: ident_interner) -> ~str {
to_str(tt, print_tt, intr)
}
fn stmt_to_str(s: ast::stmt) -> ~str { return to_str(s, print_stmt); }
fn stmt_to_str(s: ast::stmt, intr: ident_interner) -> ~str {
to_str(s, print_stmt, intr)
}
fn item_to_str(i: @ast::item) -> ~str { return to_str(i, print_item); }
fn item_to_str(i: @ast::item, intr: ident_interner) -> ~str {
to_str(i, print_item, intr)
}
fn attr_to_str(i: ast::attribute) -> ~str {
return to_str(i, print_attribute);
fn attr_to_str(i: ast::attribute, intr: ident_interner) -> ~str {
to_str(i, print_attribute, intr)
}
fn typarams_to_str(tps: ~[ast::ty_param]) -> ~str {
return to_str(tps, print_type_params)
fn typarams_to_str(tps: ~[ast::ty_param], intr: ident_interner) -> ~str {
to_str(tps, print_type_params, intr)
}
fn path_to_str(&&p: @ast::path) -> ~str {
return to_str(p, |a,b| print_path(a, b, false));
fn path_to_str(&&p: @ast::path, intr: ident_interner) -> ~str {
to_str(p, |a,b| print_path(a, b, false), intr)
}
fn fun_to_str(decl: ast::fn_decl, name: ast::ident,
params: ~[ast::ty_param]) -> ~str {
params: ~[ast::ty_param], intr: ident_interner) -> ~str {
let buffer = io::mem_buffer();
let s = rust_printer(io::mem_buffer_writer(buffer));
let s = rust_printer(io::mem_buffer_writer(buffer), intr);
print_fn(s, decl, name, params, none);
end(s); // Close the head box
end(s); // Close the outer box
......@@ -162,9 +153,9 @@ fn test_fun_to_str() {
assert fun_to_str(decl, "a", ~[]) == "fn a()";
}
fn block_to_str(blk: ast::blk) -> ~str {
fn block_to_str(blk: ast::blk, intr: ident_interner) -> ~str {
let buffer = io::mem_buffer();
let s = rust_printer(io::mem_buffer_writer(buffer));
let s = rust_printer(io::mem_buffer_writer(buffer), intr);
// containing cbox, will be closed by print-block at }
cbox(s, indent_unit);
// head-ibox, will be closed by print-block after {
......@@ -174,16 +165,16 @@ fn block_to_str(blk: ast::blk) -> ~str {
io::mem_buffer_str(buffer)
}
fn meta_item_to_str(mi: ast::meta_item) -> ~str {
return to_str(@mi, print_meta_item);
fn meta_item_to_str(mi: ast::meta_item, intr: ident_interner) -> ~str {
to_str(@mi, print_meta_item, intr)
}
fn attribute_to_str(attr: ast::attribute) -> ~str {
return to_str(attr, print_attribute);
fn attribute_to_str(attr: ast::attribute, intr: ident_interner) -> ~str {
to_str(attr, print_attribute, intr)
}
fn variant_to_str(var: ast::variant) -> ~str {
return to_str(var, print_variant);
fn variant_to_str(var: ast::variant, intr: ident_interner) -> ~str {
to_str(var, print_variant, intr)
}
#[test]
......@@ -349,7 +340,7 @@ fn print_region(s: ps, region: @ast::region) {
ast::re_anon => word_space(s, ~"&"),
ast::re_named(name) => {
word(s.s, ~"&");
word(s.s, *name);
print_ident(s, name);
}
}
}
......@@ -389,7 +380,7 @@ fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) {
fn print_field(s: ps, f: ast::ty_field) {
cbox(s, indent_unit);
print_mutability(s, f.node.mt.mutbl);
word(s.s, *f.node.ident);
print_ident(s, f.node.ident);
word_space(s, ~":");
print_type(s, f.node.mt.ty);
end(s);
......@@ -446,7 +437,8 @@ fn print_item(s: ps, &&item: @ast::item) {
match item.node {
ast::item_const(ty, expr) => {
head(s, ~"const");
word_space(s, *item.ident + ~":");
print_ident(s, item.ident);
word_space(s, ~":");
print_type(s, ty);
space(s.s);
end(s); // end the head-ibox
......@@ -464,7 +456,8 @@ fn print_item(s: ps, &&item: @ast::item) {
}
ast::item_mod(_mod) => {
head(s, ~"mod");
word_nbsp(s, *item.ident);
print_ident(s, item.ident);
nbsp(s);
bopen(s);
print_mod(s, _mod, item.attrs);
bclose(s, item.span);
......@@ -472,7 +465,8 @@ fn print_item(s: ps, &&item: @ast::item) {
ast::item_foreign_mod(nmod) => {
head(s, ~"extern");
word_nbsp(s, ~"mod");
word_nbsp(s, *item.ident);
print_ident(s, item.ident);
nbsp(s);
bopen(s);
print_foreign_mod(s, nmod, item.attrs);
bclose(s, item.span);
......@@ -481,7 +475,7 @@ fn print_item(s: ps, &&item: @ast::item) {
ibox(s, indent_unit);
ibox(s, 0u);
word_nbsp(s, ~"type");
word(s.s, *item.ident);
print_ident(s, item.ident);
print_type_params(s, params);
end(s); // end the inner ibox
......@@ -498,6 +492,7 @@ fn print_item(s: ps, &&item: @ast::item) {
head(s, ~"struct");
print_struct(s, struct_def, tps, item.ident, item.span);
}
ast::item_impl(tps, traits, ty, methods) => {
head(s, ~"impl");
if tps.is_not_empty() {
......@@ -522,7 +517,7 @@ fn print_item(s: ps, &&item: @ast::item) {
}
ast::item_trait(tps, traits, methods) => {
head(s, ~"trait");
word(s.s, *item.ident);
print_ident(s, item.ident);
print_type_params(s, tps);
if vec::len(traits) != 0u {
word_space(s, ~":");
......@@ -535,7 +530,9 @@ fn print_item(s: ps, &&item: @ast::item) {
bclose(s, item.span);
}
ast::item_mac({node: ast::mac_invoc_tt(pth, tts), _}) => {
head(s, path_to_str(pth) + ~"! " + *item.ident);
print_path(s, pth, false);
head(s, ~"! ");
print_ident(s, item.ident);
bopen(s);
for tts.each |tt| { print_tt(s, tt); }
bclose(s, item.span);
......@@ -552,7 +549,7 @@ fn print_enum_def(s: ps, enum_definition: ast::enum_def,
span: ast::span) {
let mut newtype =
vec::len(enum_definition.variants) == 1u &&
str::eq(ident, enum_definition.variants[0].node.name);
ident == enum_definition.variants[0].node.name;
if newtype {
match enum_definition.variants[0].node.kind {
ast::tuple_variant_kind(args) if args.len() == 1 => {}
......@@ -566,7 +563,7 @@ fn print_enum_def(s: ps, enum_definition: ast::enum_def,
head(s, ~"enum");
}
word(s.s, *ident);
print_ident(s, ident);
print_type_params(s, params);
space(s.s);
if newtype {
......@@ -599,7 +596,8 @@ fn print_variants(s: ps, variants: ~[ast::variant], span: ast::span) {
fn print_struct(s: ps, struct_def: @ast::struct_def, tps: ~[ast::ty_param],
ident: ast::ident, span: ast::span) {
word_nbsp(s, *ident);
print_ident(s, ident);
nbsp(s);
print_type_params(s, tps);
if vec::len(struct_def.traits) != 0u {
word_space(s, ~":");
......@@ -639,7 +637,7 @@ fn print_struct(s: ps, struct_def: @ast::struct_def, tps: ~[ast::ty_param],
if mutability == ast::class_mutable {
word_nbsp(s, ~"mut");
}
word(s.s, *ident);
print_ident(s, ident);
word_nbsp(s, ~":");
print_type(s, field.node.ty);
word(s.s, ~";");
......@@ -684,7 +682,8 @@ fn print_tt(s: ps, tt: ast::token_tree) {
s.s.token_tree_last_was_ident = false;
}
ast::tt_nonterminal(_, name) => {
word(s.s, ~"$" + *name);
word(s.s, ~"$");
print_ident(s, name);
s.s.token_tree_last_was_ident = true;
}
}
......@@ -693,7 +692,7 @@ fn print_tt(s: ps, tt: ast::token_tree) {
fn print_variant(s: ps, v: ast::variant) {
match v.node.kind {
ast::tuple_variant_kind(args) => {
word(s.s, *v.node.name);
print_ident(s, v.node.name);
if vec::len(args) > 0u {
popen(s);
fn print_variant_arg(s: ps, arg: ast::variant_arg) {
......@@ -780,7 +779,7 @@ fn print_attribute(s: ps, attr: ast::attribute) {
if attr.node.is_sugared_doc {
let meta = attr::attr_meta(attr);
let comment = attr::get_meta_item_value_str(meta).get();
word(s.s, *comment);
word(s.s, comment);
} else {
word(s.s, ~"#[");
print_meta_item(s, @attr.node.value);
......@@ -935,7 +934,8 @@ fn print_mac(s: ps, m: ast::mac) {
// FIXME: extension 'body' (#2339)
}
ast::mac_invoc_tt(pth, tts) => {
head(s, path_to_str(pth) + ~"!");
print_path(s, pth, false);
head(s, ~"!");
bopen(s);
for tts.each() |tt| { print_tt(s, tt); }
bclose(s, m.span);
......@@ -956,7 +956,7 @@ fn print_vstore(s: ps, t: ast::vstore) {
ast::re_anon => word(s.s, ~"&"),
ast::re_named(name) => {
word(s.s, ~"&");
word(s.s, *name);
print_ident(s, name);
word(s.s, ~".");
}
}
......@@ -967,7 +967,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
fn print_field(s: ps, field: ast::field) {
ibox(s, indent_unit);
if field.node.mutbl == ast::m_mutbl { word_nbsp(s, ~"mut"); }
word(s.s, *field.node.ident);
print_ident(s, field.node.ident);
word_space(s, ~":");
print_expr(s, field.node.expr);
end(s);
......@@ -1125,7 +1125,7 @@ fn print_field(s: ps, field: ast::field) {
ast::expr_loop(blk, opt_ident) => {
head(s, ~"loop");
space(s.s);
option::iter(opt_ident, |ident| word_space(s, *ident));
option::iter(opt_ident, |ident| {print_ident(s, ident); space(s.s)});
print_block(s, blk);
}
ast::expr_match(expr, arms, mode) => {
......@@ -1270,7 +1270,7 @@ fn print_field(s: ps, field: ast::field) {
print_expr_parens_if_not_bot(s, expr);
}
word(s.s, ~".");
word(s.s, *id);
print_ident(s, id);
if vec::len(tys) > 0u {
word(s.s, ~"::<");
commasep(s, inconsistent, tys, print_type);
......@@ -1294,12 +1294,12 @@ fn print_field(s: ps, field: ast::field) {
ast::expr_break(opt_ident) => {
word(s.s, ~"break");
space(s.s);
option::iter(opt_ident, |ident| word_space(s, *ident));
option::iter(opt_ident, |ident| {print_ident(s, ident); space(s.s)});
}
ast::expr_again(opt_ident) => {
word(s.s, ~"again");
space(s.s);
option::iter(opt_ident, |ident| word_space(s, *ident));
option::iter(opt_ident, |ident| {print_ident(s, ident); space(s.s)});
}
ast::expr_ret(result) => {
word(s.s, ~"return");
......@@ -1395,7 +1395,7 @@ fn print_local(s: ps, &&loc: @ast::local) {
}
}
fn print_ident(s: ps, ident: ast::ident) { word(s.s, *ident); }
fn print_ident(s: ps, ident: ast::ident) { word(s.s, *s.intr.get(ident)); }
fn print_for_decl(s: ps, loc: @ast::local, coll: @ast::expr) {
print_local_decl(s, loc);
......@@ -1410,7 +1410,7 @@ fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) {
let mut first = true;
for path.idents.each |id| {
if first { first = false; } else { word(s.s, ~"::"); }
word(s.s, *id);
print_ident(s, id);
}
if path.rp.is_some() || !path.types.is_empty() {
if colons_before_params { word(s.s, ~"::"); }
......@@ -1471,7 +1471,7 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
word(s.s, ~"{");
fn print_field(s: ps, f: ast::field_pat) {
cbox(s, indent_unit);
word(s.s, *f.ident);
print_ident(s, f.ident);
word_space(s, ~":");
print_pat(s, f.pat);
end(s);
......@@ -1489,7 +1489,7 @@ fn print_field(s: ps, f: ast::field_pat) {
word(s.s, ~"{");
fn print_field(s: ps, f: ast::field_pat) {
cbox(s, indent_unit);
word(s.s, *f.ident);
print_ident(s, f.ident);
word_space(s, ~":");
print_pat(s, f.pat);
end(s);
......@@ -1542,7 +1542,7 @@ fn print_fn(s: ps, decl: ast::fn_decl, name: ast::ident,
typarams: ~[ast::ty_param],
opt_self_ty: option<ast::self_ty_>) {
head(s, fn_header_info_to_str(opt_self_ty, decl.purity, none));
word(s.s, *name);
print_ident(s, name);
print_type_params(s, typarams);
print_fn_args_and_ret(s, decl, ~[], opt_self_ty);
}
......@@ -1568,7 +1568,7 @@ fn print_fn_args(s: ps, decl: ast::fn_decl,
if first { first = false; } else { word_space(s, ~","); }
if cap_item.is_move { word_nbsp(s, ~"move") }
else { word_nbsp(s, ~"copy") }
word(s.s, *cap_item.name);
print_ident(s, cap_item.name);
}
end(s);
......@@ -1638,7 +1638,7 @@ fn print_type_params(s: ps, &&params: ~[ast::ty_param]) {
if vec::len(params) > 0u {
word(s.s, ~"<");
fn printParam(s: ps, param: ast::ty_param) {
word(s.s, *param.ident);
print_ident(s, param.ident);
print_bounds(s, param.bounds);
}
commasep(s, inconsistent, params, printParam);
......@@ -1649,14 +1649,14 @@ fn printParam(s: ps, param: ast::ty_param) {
fn print_meta_item(s: ps, &&item: @ast::meta_item) {
ibox(s, indent_unit);
match item.node {
ast::meta_word(name) => word(s.s, *name),
ast::meta_word(name) => word(s.s, name),
ast::meta_name_value(name, value) => {
word_space(s, *name);
word_space(s, name);
word_space(s, ~"=");
print_literal(s, @value);
}
ast::meta_list(name, items) => {
word(s.s, *name);
word(s.s, name);
popen(s);
commasep(s, consistent, items, print_meta_item);
pclose(s);
......@@ -1669,7 +1669,8 @@ fn print_view_path(s: ps, &&vp: @ast::view_path) {
match vp.node {
ast::view_path_simple(ident, path, _) => {
if path.idents[vec::len(path.idents)-1u] != ident {
word_space(s, *ident);
print_ident(s, ident);
space(s.s);
word_space(s, ~"=");
}
print_path(s, path, false);
......@@ -1684,7 +1685,7 @@ fn print_view_path(s: ps, &&vp: @ast::view_path) {
print_path(s, path, false);
word(s.s, ~"::{");
do commasep(s, inconsistent, idents) |s, w| {
word(s.s, *w.node.name)
print_ident(s, w.node.name);
}
word(s.s, ~"}");
}
......@@ -1702,7 +1703,7 @@ fn print_view_item(s: ps, item: @ast::view_item) {
match item.node {
ast::view_item_use(id, mta, _) => {
head(s, ~"use");
word(s.s, *id);
print_ident(s, id);
if vec::len(mta) > 0u {
popen(s);
commasep(s, consistent, mta, print_meta_item);
......@@ -1749,10 +1750,12 @@ fn print_arg(s: ps, input: ast::arg) {
ibox(s, indent_unit);
print_arg_mode(s, input.mode);
match input.ty.node {
ast::ty_infer => word(s.s, *input.ident),
ast::ty_infer => print_ident(s, input.ident),
_ => {
if str::len(*input.ident) > 0u {
word_space(s, *input.ident + ~":");
if input.ident != parse::token::special_idents::invalid {
print_ident(s, input.ident);
word(s.s, ~":");
space(s.s);
}
print_type(s, input.ty);
}
......@@ -1768,7 +1771,7 @@ fn print_ty_fn(s: ps, opt_proto: option<ast::proto>,
ibox(s, indent_unit);
word(s.s, fn_header_info_to_str(opt_self_ty, decl.purity, opt_proto));
print_bounds(s, bounds);
match id { some(id) => { word(s.s, ~" "); word(s.s, *id); } _ => () }
match id { some(id) => { word(s.s, ~" "); print_ident(s, id); } _ => () }
match tps { some(tps) => print_type_params(s, tps), _ => () }
zerobreak(s.s);
......@@ -1880,7 +1883,9 @@ fn print_literal(s: ps, &&lit: @ast::lit) {
}
}
fn lit_to_str(l: @ast::lit) -> ~str { return to_str(l, print_literal); }
fn lit_to_str(l: @ast::lit) -> ~str {
return to_str(l, print_literal, parse::token::mk_fake_ident_interner());
}
fn next_lit(s: ps, pos: uint) -> option<comments::lit> {
match s.literals {
......@@ -1961,9 +1966,9 @@ fn print_string(s: ps, st: ~str) {
word(s.s, ~"\"");
}
fn to_str<T>(t: T, f: fn@(ps, T)) -> ~str {
fn to_str<T>(t: T, f: fn@(ps, T), intr: ident_interner) -> ~str {
let buffer = io::mem_buffer();
let s = rust_printer(io::mem_buffer_writer(buffer));
let s = rust_printer(io::mem_buffer_writer(buffer), intr);
f(s, t);
eof(s.s);
io::mem_buffer_str(buffer)
......
......@@ -21,7 +21,7 @@ fn mk<T: const copy>(+hasher: hashfn<T>, +eqer: eqfn<T>) -> interner<T> {
fn mk_prefill<T: const copy>(hasher: hashfn<T>, eqer: eqfn<T>,
init: ~[T]) -> interner<T> {
let rv = mk(hasher, eqer);
let rv = mk(copy hasher, copy eqer);
for init.each() |v| { rv.intern(v); }
return rv;
}
......@@ -30,6 +30,7 @@ fn mk_prefill<T: const copy>(hasher: hashfn<T>, eqer: eqfn<T>,
/* when traits can extend traits, we should extend index<uint,T> to get [] */
trait interner<T: const copy> {
fn intern(T) -> uint;
fn gensym(T) -> uint;
pure fn get(uint) -> T;
fn len() -> uint;
}
......@@ -46,6 +47,12 @@ fn intern(val: T) -> uint {
}
}
}
fn gensym(val: T) -> uint {
let new_idx = self.vect.len();
// leave out of .map to avoid colliding
self.vect.push(val);
return new_idx;
}
// this isn't "pure" in the traditional sense, because it can go from
// failing to returning a value as items are interned. But for typestate,
......
......@@ -27,9 +27,9 @@ enum fn_kind {
fn name_of_fn(fk: fn_kind) -> ident {
match fk {
fk_item_fn(name, _) | fk_method(name, _, _)
| fk_ctor(name, _, _, _, _) => /* FIXME (#2543) */ copy name,
fk_anon(*) | fk_fn_block(*) => @~"anon",
fk_dtor(*) => @~"drop"
| fk_ctor(name, _, _, _, _) => /* FIXME (#2543) */ copy name,
fk_anon(*) | fk_fn_block(*) => parse::token::special_idents::anon,
fk_dtor(*) => parse::token::special_idents::dtor
}
}
......
......@@ -310,24 +310,24 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str,
symbol_hasher: &hash::State) -> link_meta {
type provided_metas =
{name: option<@~str>,
vers: option<@~str>,
{name: option<~str>,
vers: option<~str>,
cmh_items: ~[@ast::meta_item]};
fn provided_link_metas(sess: session, c: ast::crate) ->
provided_metas {
let mut name: option<@~str> = none;
let mut vers: option<@~str> = none;
let mut name: option<~str> = none;
let mut vers: option<~str> = none;
let mut cmh_items: ~[@ast::meta_item] = ~[];
let linkage_metas = attr::find_linkage_metas(c.node.attrs);
attr::require_unique_names(sess.diagnostic(), linkage_metas);
for linkage_metas.each |meta| {
if *attr::get_meta_item_name(meta) == ~"name" {
if attr::get_meta_item_name(meta) == ~"name" {
match attr::get_meta_item_value_str(meta) {
some(v) => { name = some(v); }
none => vec::push(cmh_items, meta)
}
} else if *attr::get_meta_item_name(meta) == ~"vers" {
} else if attr::get_meta_item_name(meta) == ~"vers" {
match attr::get_meta_item_value_str(meta) {
some(v) => { vers = some(v); }
none => vec::push(cmh_items, meta)
......@@ -341,7 +341,7 @@ fn provided_link_metas(sess: session, c: ast::crate) ->
fn crate_meta_extras_hash(symbol_hasher: &hash::State,
_crate: ast::crate,
metas: provided_metas,
dep_hashes: ~[@~str]) -> ~str {
dep_hashes: ~[~str]) -> ~str {
fn len_and_str(s: ~str) -> ~str {
return fmt!{"%u_%s", str::len(s), s};
}
......@@ -357,11 +357,11 @@ fn len_and_str_lit(l: ast::lit) -> ~str {
let m = m_;
match m.node {
ast::meta_name_value(key, value) => {
symbol_hasher.write_str(len_and_str(*key));
symbol_hasher.write_str(len_and_str(key));
symbol_hasher.write_str(len_and_str_lit(value));
}
ast::meta_word(name) => {
symbol_hasher.write_str(len_and_str(*name));
symbol_hasher.write_str(len_and_str(name));
}
ast::meta_list(_, _) => {
// FIXME (#607): Implement this
......@@ -371,7 +371,7 @@ fn len_and_str_lit(l: ast::lit) -> ~str {
}
for dep_hashes.each |dh| {
symbol_hasher.write_str(len_and_str(*dh));
symbol_hasher.write_str(len_and_str(dh));
}
return truncated_hash_result(symbol_hasher);
......@@ -384,7 +384,7 @@ fn warn_missing(sess: session, name: ~str, default: ~str) {
}
fn crate_meta_name(sess: session, _crate: ast::crate,
output: ~str, metas: provided_metas) -> @~str {
output: ~str, metas: provided_metas) -> ~str {
return match metas.name {
some(v) => v,
none => {
......@@ -400,19 +400,19 @@ fn crate_meta_name(sess: session, _crate: ast::crate,
str::connect(os, ~".")
};
warn_missing(sess, ~"name", name);
@name
name
}
};
}
fn crate_meta_vers(sess: session, _crate: ast::crate,
metas: provided_metas) -> @~str {
metas: provided_metas) -> ~str {
return match metas.vers {
some(v) => v,
none => {
let vers = ~"0.0";
warn_missing(sess, ~"vers", vers);
@vers
vers
}
};
}
......@@ -439,7 +439,7 @@ fn symbol_hash(tcx: ty::ctxt, symbol_hasher: &hash::State, t: ty::t,
// to be independent of one another in the crate.
symbol_hasher.reset();
symbol_hasher.write_str(*link_meta.name);
symbol_hasher.write_str(link_meta.name);
symbol_hasher.write_str(~"-");
symbol_hasher.write_str(link_meta.extras_hash);
symbol_hasher.write_str(~"-");
......@@ -497,14 +497,14 @@ fn sanitize(s: ~str) -> ~str {
return result;
}
fn mangle(ss: path) -> ~str {
fn mangle(sess: session, ss: path) -> ~str {
// Follow C++ namespace-mangling style
let mut n = ~"_ZN"; // Begin name-sequence.
for ss.each |s| {
match s { path_name(s) | path_mod(s) => {
let sani = sanitize(*s);
let sani = sanitize(sess.str_of(s));
n += fmt!{"%u%s", str::len(sani), sani};
} }
}
......@@ -512,36 +512,41 @@ fn mangle(ss: path) -> ~str {
n
}
fn exported_name(path: path, hash: @~str, vers: @~str) -> ~str {
return mangle(
vec::append_one(vec::append_one(path, path_name(hash)),
path_name(vers)));
fn exported_name(sess: session, path: path, hash: ~str, vers: ~str) -> ~str {
return mangle(sess,
vec::append_one(
vec::append_one(path, path_name(sess.ident_of(hash))),
path_name(sess.ident_of(vers))));
}
fn mangle_exported_name(ccx: @crate_ctxt, path: path, t: ty::t) -> ~str {
let hash = get_symbol_hash(ccx, t);
return exported_name(path, @hash, ccx.link_meta.vers);
return exported_name(ccx.sess, path, hash, ccx.link_meta.vers);
}
fn mangle_internal_name_by_type_only(ccx: @crate_ctxt,
t: ty::t, name: @~str) ->
t: ty::t, name: ~str) ->
~str {
let s = @util::ppaux::ty_to_short_str(ccx.tcx, t);
let s = util::ppaux::ty_to_short_str(ccx.tcx, t);
let hash = get_symbol_hash(ccx, t);
return mangle(~[path_name(name), path_name(s), path_name(@hash)]);
return mangle(ccx.sess,
~[path_name(ccx.sess.ident_of(name)),
path_name(ccx.sess.ident_of(s)),
path_name(ccx.sess.ident_of(hash))]);
}
fn mangle_internal_name_by_path_and_seq(ccx: @crate_ctxt, path: path,
flav: @~str) -> ~str {
return mangle(vec::append_one(path, path_name(@ccx.names(*flav))));
flav: ~str) -> ~str {
return mangle(ccx.sess,
vec::append_one(path, path_name(ccx.names(flav))));
}
fn mangle_internal_name_by_path(_ccx: @crate_ctxt, path: path) -> ~str {
return mangle(path);
fn mangle_internal_name_by_path(ccx: @crate_ctxt, path: path) -> ~str {
return mangle(ccx.sess, path);
}
fn mangle_internal_name_by_seq(ccx: @crate_ctxt, flav: @~str) -> ~str {
return ccx.names(*flav);
fn mangle_internal_name_by_seq(ccx: @crate_ctxt, flav: ~str) -> ~str {
return fmt!("%s_%u", flav, ccx.names(flav));
}
// If the user wants an exe generated we need to invoke
......@@ -577,8 +582,8 @@ fn rmext(filename: ~str) -> ~str {
let output = if sess.building_library {
let long_libname =
os::dll_filename(fmt!{"%s-%s-%s",
*lm.name, lm.extras_hash, *lm.vers});
debug!{"link_meta.name: %s", *lm.name};
lm.name, lm.extras_hash, lm.vers});
debug!{"link_meta.name: %s", lm.name};
debug!{"long_libname: %s", long_libname};
debug!{"out_filename: %s", out_filename};
debug!{"dirname(out_filename): %s", path::dirname(out_filename)};
......
......@@ -51,15 +51,15 @@ fn default_configuration(sess: session, argv0: ~str, input: input) ->
};
return ~[ // Target bindings.
attr::mk_word_item(@os::family()),
mk(@~"target_os", os::sysname()),
mk(@~"target_family", os::family()),
mk(@~"target_arch", arch),
mk(@~"target_word_size", wordsz),
mk(@~"target_libc", libc),
attr::mk_word_item(os::family()),
mk(~"target_os", os::sysname()),
mk(~"target_family", os::family()),
mk(~"target_arch", arch),
mk(~"target_word_size", wordsz),
mk(~"target_libc", libc),
// Build bindings.
mk(@~"build_compiler", argv0),
mk(@~"build_input", source_name(input))];
mk(~"build_compiler", argv0),
mk(~"build_input", source_name(input))];
}
fn build_configuration(sess: session, argv0: ~str, input: input) ->
......@@ -72,9 +72,9 @@ fn build_configuration(sess: session, argv0: ~str, input: input) ->
let gen_cfg =
{
if sess.opts.test && !attr::contains_name(user_cfg, ~"test") {
~[attr::mk_word_item(@~"test")]
~[attr::mk_word_item(~"test")]
} else {
~[attr::mk_word_item(@~"notest")]
~[attr::mk_word_item(~"notest")]
}
};
return vec::append(vec::append(user_cfg, gen_cfg), default_cfg);
......@@ -86,7 +86,7 @@ fn parse_cfgspecs(cfgspecs: ~[~str]) -> ast::crate_cfg {
// varieties of meta_item here. At the moment we just support the
// meta_word variant.
let mut words = ~[];
for cfgspecs.each |s| { vec::push(words, attr::mk_word_item(@s)); }
for cfgspecs.each |s| { vec::push(words, attr::mk_word_item(s)); }
return words;
}
......@@ -169,7 +169,8 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg,
creader::read_crates(sess.diagnostic(), *crate, sess.cstore,
sess.filesearch,
session::sess_os_to_meta_os(sess.targ_cfg.os),
sess.opts.static));
sess.opts.static,
sess.parse_sess.interner));
let lang_items = time(time_passes, ~"language item collection", ||
middle::lang_items::collect_language_items(crate, sess));
......@@ -552,7 +553,9 @@ fn build_session_(sopts: @session::options,
-> session {
let target_cfg = build_target_config(sopts, demitter);
let cstore = cstore::mk_cstore();
let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler,
cm);
let cstore = cstore::mk_cstore(p_s.interner);
let filesearch = filesearch::mk_filesearch(
sopts.maybe_sysroot,
sopts.target_triple,
......@@ -561,8 +564,7 @@ fn build_session_(sopts: @session::options,
session_(@{targ_cfg: target_cfg,
opts: sopts,
cstore: cstore,
parse_sess:
parse::new_parse_sess_special_handler(span_diagnostic_handler, cm),
parse_sess: p_s,
codemap: cm,
// For a library crate, this is always none
mut main_fn: none,
......@@ -701,6 +703,7 @@ fn early_error(emitter: diagnostic::emitter, msg: ~str) -> ! {
fn list_metadata(sess: session, path: ~str, out: io::Writer) {
metadata::loader::list_file_metadata(
sess.parse_sess.interner,
session::sess_os_to_meta_os(sess.targ_cfg.os), path, out);
}
......
......@@ -204,6 +204,16 @@ fn coherence() -> bool { self.debugging_opt(coherence) }
fn borrowck_stats() -> bool { self.debugging_opt(borrowck_stats) }
fn borrowck_note_pure() -> bool { self.debugging_opt(borrowck_note_pure) }
fn borrowck_note_loan() -> bool { self.debugging_opt(borrowck_note_loan) }
fn str_of(id: ast::ident) -> ~str {
*self.parse_sess.interner.get(id)
}
fn ident_of(st: ~str) -> ast::ident {
self.parse_sess.interner.intern(@st)
}
fn intr() -> syntax::parse::token::ident_interner {
self.parse_sess.interner
}
}
/// Some reasonable defaults
......@@ -245,7 +255,7 @@ fn building_library(req_crate_type: crate_type, crate: @ast::crate,
match syntax::attr::first_attr_value_str_by_name(
crate.node.attrs,
~"crate_type") {
option::some(@~"lib") => true,
option::some(~"lib") => true,
_ => false
}
}
......@@ -273,7 +283,7 @@ fn make_crate_type_attr(t: ~str) -> ast::attribute {
style: ast::attr_outer,
value: ast_util::respan(ast_util::dummy_sp(),
ast::meta_name_value(
@~"crate_type",
~"crate_type",
ast_util::respan(ast_util::dummy_sp(),
ast::lit_str(@t)))),
is_sugared_doc: false
......
......@@ -30,12 +30,13 @@ fn spanned<T: copy>(x: T) -> @ast::spanned<T> {
let n1 = sess.next_node_id();
let n2 = sess.next_node_id();
let vi1 = @{node: ast::view_item_use(@~"core", ~[], n1),
let vi1 = @{node: ast::view_item_use(sess.ident_of(~"core"), ~[], n1),
attrs: ~[],
vis: ast::public,
span: dummy_sp()};
let vp = spanned(ast::view_path_glob(ident_to_path(dummy_sp(), @~"core"),
n2));
let vp = spanned(ast::view_path_glob(
ident_to_path(dummy_sp(), sess.ident_of(~"core")),
n2));
let vi2 = @{node: ast::view_item_import(~[vp]),
attrs: ~[],
vis: ast::public,
......
......@@ -62,17 +62,17 @@ fn strip_test_functions(crate: @ast::crate) -> @ast::crate {
}
}
fn fold_mod(_cx: test_ctxt, m: ast::_mod, fld: fold::ast_fold) -> ast::_mod {
fn fold_mod(cx: test_ctxt, m: ast::_mod, fld: fold::ast_fold) -> ast::_mod {
// Remove any defined main function from the AST so it doesn't clash with
// the one we're going to add.
// FIXME (#2403): This is sloppy. Instead we should have some mechanism to
// indicate to the translation pass which function we want to be main.
fn nomain(&&item: @ast::item) -> option<@ast::item> {
fn nomain(cx: test_ctxt, item: @ast::item) -> option<@ast::item> {
match item.node {
ast::item_fn(_, _, _) => {
if *item.ident == ~"main" {
if item.ident == cx.sess.ident_of(~"main") {
option::none
} else { option::some(item) }
}
......@@ -81,7 +81,8 @@ fn nomain(&&item: @ast::item) -> option<@ast::item> {
}
let mod_nomain =
{view_items: m.view_items, items: vec::filter_map(m.items, nomain)};
{view_items: m.view_items, items: vec::filter_map(m.items,
|i| nomain(cx, i))};
return fold::noop_fold_mod(mod_nomain, fld);
}
......@@ -99,7 +100,8 @@ fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) ->
option<@ast::item> {
vec::push(cx.path, i.ident);
debug!{"current path: %s", ast_util::path_name_i(cx.path)};
debug!{"current path: %s",
ast_util::path_name_i(cx.path, cx.sess.parse_sess.interner)};
if is_test_fn(i) {
match i.node {
......@@ -192,16 +194,17 @@ fn mk_test_module(cx: test_ctxt) -> @ast::item {
let item_ = ast::item_mod(testmod);
// This attribute tells resolve to let us call unexported functions
let resolve_unexported_attr =
attr::mk_attr(attr::mk_word_item(@~"!resolve_unexported"));
attr::mk_attr(attr::mk_word_item(~"!resolve_unexported"));
let item: ast::item =
{ident: @~"__test",
{ident: cx.sess.ident_of(~"__test"),
attrs: ~[resolve_unexported_attr],
id: cx.sess.next_node_id(),
node: item_,
vis: ast::public,
span: dummy_sp()};
debug!{"Synthetic test module:\n%s\n", pprust::item_to_str(@item)};
debug!{"Synthetic test module:\n%s\n",
pprust::item_to_str(@item, cx.sess.intr())};
return @item;
}
......@@ -232,7 +235,7 @@ fn mk_tests(cx: test_ctxt) -> @ast::item {
let item_ = ast::item_fn(decl, ~[], body);
let item: ast::item =
{ident: @~"tests",
{ident: cx.sess.ident_of(~"tests"),
attrs: ~[],
id: cx.sess.next_node_id(),
node: item_,
......@@ -247,18 +250,19 @@ fn mk_path(cx: test_ctxt, path: ~[ast::ident]) -> ~[ast::ident] {
let is_std = {
let items = attr::find_linkage_metas(cx.crate.node.attrs);
match attr::last_meta_item_value_str_by_name(items, ~"name") {
some(@~"std") => true,
some(~"std") => true,
_ => false
}
};
if is_std { path }
else { vec::append(~[@~"std"], path) }
else { vec::append(~[cx.sess.ident_of(~"std")], path) }
}
// The ast::ty of ~[std::test::test_desc]
fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty {
let test_desc_ty_path =
path_node(mk_path(cx, ~[@~"test", @~"test_desc"]));
path_node(mk_path(cx, ~[cx.sess.ident_of(~"test"),
cx.sess.ident_of(~"test_desc")]));
let test_desc_ty: ast::ty =
{id: cx.sess.next_node_id(),
......@@ -296,10 +300,12 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
let span = test.span;
let path = test.path;
debug!{"encoding %s", ast_util::path_name_i(path)};
debug!{"encoding %s", ast_util::path_name_i(path,
cx.sess.parse_sess.interner)};
let name_lit: ast::lit =
nospan(ast::lit_str(@ast_util::path_name_i(path)));
nospan(ast::lit_str(@ast_util::path_name_i(path, cx.sess.parse_sess
.interner)));
let name_expr_inner: @ast::expr =
@{id: cx.sess.next_node_id(),
callee_id: cx.sess.next_node_id(),
......@@ -313,7 +319,8 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
let name_field: ast::field =
nospan({mutbl: ast::m_imm, ident: @~"name", expr: @name_expr});
nospan({mutbl: ast::m_imm, ident: cx.sess.ident_of(~"name"),
expr: @name_expr});
let fn_path = path_node(path);
......@@ -326,7 +333,8 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
let fn_wrapper_expr = mk_test_wrapper(cx, fn_expr, span);
let fn_field: ast::field =
nospan({mutbl: ast::m_imm, ident: @~"fn", expr: fn_wrapper_expr});
nospan({mutbl: ast::m_imm, ident: cx.sess.ident_of(~"fn"),
expr: fn_wrapper_expr});
let ignore_lit: ast::lit = nospan(ast::lit_bool(test.ignore));
......@@ -337,7 +345,8 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
span: span};
let ignore_field: ast::field =
nospan({mutbl: ast::m_imm, ident: @~"ignore", expr: @ignore_expr});
nospan({mutbl: ast::m_imm, ident: cx.sess.ident_of(~"ignore"),
expr: @ignore_expr});
let fail_lit: ast::lit = nospan(ast::lit_bool(test.should_fail));
......@@ -349,7 +358,7 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
let fail_field: ast::field =
nospan({mutbl: ast::m_imm,
ident: @~"should_fail",
ident: cx.sess.ident_of(~"should_fail"),
expr: @fail_expr});
let desc_rec_: ast::expr_ =
......@@ -404,7 +413,7 @@ fn mk_test_wrapper(cx: test_ctxt,
}
fn mk_main(cx: test_ctxt) -> @ast::item {
let str_pt = path_node(~[@~"str"]);
let str_pt = path_node(~[cx.sess.ident_of(~"str")]);
let str_ty_inner = @{id: cx.sess.next_node_id(),
node: ast::ty_path(str_pt, cx.sess.next_node_id()),
span: dummy_sp()};
......@@ -423,7 +432,7 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
let args_arg: ast::arg =
{mode: ast::expl(ast::by_val),
ty: @args_ty,
ident: @~"args",
ident: cx.sess.ident_of(~"args"),
id: cx.sess.next_node_id()};
let ret_ty = {id: cx.sess.next_node_id(),
......@@ -445,7 +454,7 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
let item_ = ast::item_fn(decl, ~[], body);
let item: ast::item =
{ident: @~"main",
{ident: cx.sess.ident_of(~"main"),
attrs: ~[],
id: cx.sess.next_node_id(),
node: item_,
......@@ -457,7 +466,7 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
fn mk_test_main_call(cx: test_ctxt) -> @ast::expr {
// Get the args passed to main so we can pass the to test_main
let args_path = path_node(~[@~"args"]);
let args_path = path_node(~[cx.sess.ident_of(~"args")]);
let args_path_expr_: ast::expr_ = ast::expr_path(args_path);
......@@ -466,7 +475,7 @@ fn mk_test_main_call(cx: test_ctxt) -> @ast::expr {
node: args_path_expr_, span: dummy_sp()};
// Call __test::test to generate the vector of test_descs
let test_path = path_node(~[@~"tests"]);
let test_path = path_node(~[cx.sess.ident_of(~"tests")]);
let test_path_expr_: ast::expr_ = ast::expr_path(test_path);
......@@ -481,7 +490,9 @@ fn mk_test_main_call(cx: test_ctxt) -> @ast::expr {
node: test_call_expr_, span: dummy_sp()};
// Call std::test::test_main
let test_main_path = path_node(mk_path(cx, ~[@~"test", @~"test_main"]));
let test_main_path = path_node(
mk_path(cx, ~[cx.sess.ident_of(~"test"),
cx.sess.ident_of(~"test_main")]));
let test_main_path_expr_: ast::expr_ = ast::expr_path(test_main_path);
......
......@@ -134,5 +134,5 @@ fn hash_path(&&s: ~str) -> uint {
return h;
}
type link_meta = {name: @~str, vers: @~str, extras_hash: ~str};
type link_meta = {name: ~str, vers: ~str, extras_hash: ~str};
......@@ -10,6 +10,7 @@
import filesearch::filesearch;
import common::*;
import dvec::{DVec, dvec};
import syntax::parse::token::ident_interner;
export read_crates;
......@@ -17,28 +18,29 @@
// libraries necessary for later resolving, typechecking, linking, etc.
fn read_crates(diag: span_handler, crate: ast::crate,
cstore: cstore::cstore, filesearch: filesearch,
os: loader::os, static: bool) {
os: loader::os, static: bool, intr: ident_interner) {
let e = @{diag: diag,
filesearch: filesearch,
cstore: cstore,
os: os,
static: static,
crate_cache: dvec(),
mut next_crate_num: 1};
mut next_crate_num: 1,
intr: intr};
let v =
visit::mk_simple_visitor(@{visit_view_item:
|a| visit_view_item(e, a),
visit_item: |a| visit_item(e, a)
with *visit::default_simple_visitor()});
with *visit::default_simple_visitor()});
visit::visit_crate(crate, (), v);
dump_crates(e.crate_cache);
warn_if_multiple_versions(diag, e.crate_cache.get());
warn_if_multiple_versions(e, diag, e.crate_cache.get());
}
type cache_entry = {
cnum: int,
span: span,
hash: @~str,
hash: ~str,
metas: @~[@ast::meta_item]
};
......@@ -48,16 +50,10 @@ fn dump_crates(crate_cache: DVec<cache_entry>) {
debug!{"cnum: %?", entry.cnum};
debug!{"span: %?", entry.span};
debug!{"hash: %?", entry.hash};
let attrs = ~[
attr::mk_attr(attr::mk_list_item(@~"link", *entry.metas))
];
for attr::find_linkage_attrs(attrs).each |attr| {
debug!{"meta: %s", pprust::attr_to_str(attr)};
}
}
}
fn warn_if_multiple_versions(diag: span_handler,
fn warn_if_multiple_versions(e: env, diag: span_handler,
crate_cache: ~[cache_entry]) {
import either::*;
......@@ -77,17 +73,17 @@ fn warn_if_multiple_versions(diag: span_handler,
if matches.len() != 1u {
diag.handler().warn(
fmt!{"using multiple versions of crate `%s`", *name});
fmt!{"using multiple versions of crate `%s`", name});
for matches.each |match_| {
diag.span_note(match_.span, ~"used here");
let attrs = ~[
attr::mk_attr(attr::mk_list_item(@~"link", *match_.metas))
attr::mk_attr(attr::mk_list_item(~"link", *match_.metas))
];
loader::note_linkage_attrs(diag, attrs);
loader::note_linkage_attrs(e.intr, diag, attrs);
}
}
warn_if_multiple_versions(diag, non_matches);
warn_if_multiple_versions(e, diag, non_matches);
}
}
......@@ -97,7 +93,8 @@ fn warn_if_multiple_versions(diag: span_handler,
os: loader::os,
static: bool,
crate_cache: DVec<cache_entry>,
mut next_crate_num: ast::crate_num};
mut next_crate_num: ast::crate_num,
intr: ident_interner};
fn visit_view_item(e: env, i: @ast::view_item) {
match i.node {
......@@ -125,28 +122,28 @@ fn visit_item(e: env, i: @ast::item) {
let foreign_name =
match attr::first_attr_value_str_by_name(i.attrs, ~"link_name") {
some(nn) => {
if *nn == ~"" {
if nn == ~"" {
e.diag.span_fatal(
i.span,
~"empty #[link_name] not allowed; use #[nolink].");
}
nn
}
none => i.ident
none => *e.intr.get(i.ident)
};
let mut already_added = false;
if vec::len(attr::find_attrs_by_name(i.attrs, ~"nolink")) == 0u {
already_added = !cstore::add_used_library(cstore, *foreign_name);
already_added = !cstore::add_used_library(cstore, foreign_name);
}
let link_args = attr::find_attrs_by_name(i.attrs, ~"link_args");
if vec::len(link_args) > 0u && already_added {
e.diag.span_fatal(i.span, ~"library '" + *foreign_name +
e.diag.span_fatal(i.span, ~"library '" + foreign_name +
~"' already added: can't specify link_args.");
}
for link_args.each |a| {
match attr::get_meta_item_value_str(attr::attr_meta(a)) {
some(linkarg) => {
cstore::add_used_link_args(cstore, *linkarg);
cstore::add_used_link_args(cstore, linkarg);
}
none => {/* fallthrough */ }
}
......@@ -156,19 +153,19 @@ fn visit_item(e: env, i: @ast::item) {
}
}
fn metas_with(ident: ast::ident, key: ast::ident,
metas: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
let name_items = attr::find_meta_items_by_name(metas, *key);
fn metas_with(ident: ~str, key: ~str, metas: ~[@ast::meta_item])
-> ~[@ast::meta_item] {
let name_items = attr::find_meta_items_by_name(metas, key);
if name_items.is_empty() {
vec::append_one(metas, attr::mk_name_value_item_str(key, *ident))
vec::append_one(metas, attr::mk_name_value_item_str(key, ident))
} else {
metas
}
}
fn metas_with_ident(ident: ast::ident,
metas: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
metas_with(ident, @~"name", metas)
fn metas_with_ident(ident: ~str, metas: ~[@ast::meta_item])
-> ~[@ast::meta_item] {
metas_with(ident, ~"name", metas)
}
fn existing_match(e: env, metas: ~[@ast::meta_item], hash: ~str) ->
......@@ -176,7 +173,7 @@ fn existing_match(e: env, metas: ~[@ast::meta_item], hash: ~str) ->
for e.crate_cache.each |c| {
if loader::metadata_matches(*c.metas, metas)
&& (hash.is_empty() || *c.hash == hash) {
&& (hash.is_empty() || c.hash == hash) {
return some(c.cnum);
}
}
......@@ -185,7 +182,7 @@ fn existing_match(e: env, metas: ~[@ast::meta_item], hash: ~str) ->
fn resolve_crate(e: env, ident: ast::ident, metas: ~[@ast::meta_item],
hash: ~str, span: span) -> ast::crate_num {
let metas = metas_with_ident(ident, metas);
let metas = metas_with_ident(*e.intr.get(ident), metas);
match existing_match(e, metas, hash) {
none => {
......@@ -197,7 +194,8 @@ fn resolve_crate(e: env, ident: ast::ident, metas: ~[@ast::meta_item],
metas: metas,
hash: hash,
os: e.os,
static: e.static
static: e.static,
intr: e.intr
};
let cinfo = loader::load_library_crate(load_ctxt);
......@@ -220,9 +218,9 @@ fn resolve_crate(e: env, ident: ast::ident, metas: ~[@ast::meta_item],
let cname =
match attr::last_meta_item_value_str_by_name(metas, ~"name") {
option::some(v) => v,
option::none => ident
option::none => *e.intr.get(ident)
};
let cmeta = @{name: *cname, data: cdata,
let cmeta = @{name: cname, data: cdata,
cnum_map: cnum_map, cnum: cnum};
let cstore = e.cstore;
......@@ -242,13 +240,14 @@ fn resolve_crate_deps(e: env, cdata: @~[u8]) -> cstore::cnum_map {
// The map from crate numbers in the crate we're resolving to local crate
// numbers
let cnum_map = int_hash::<ast::crate_num>();
for decoder::get_crate_deps(cdata).each |dep| {
for decoder::get_crate_deps(e.intr, cdata).each |dep| {
let extrn_cnum = dep.cnum;
let cname = dep.name;
let cmetas = metas_with(dep.vers, @~"vers", ~[]);
let cmetas = metas_with(dep.vers, ~"vers", ~[]);
debug!{"resolving dep crate %s ver: %s hash: %s",
*dep.name, *dep.vers, *dep.hash};
match existing_match(e, metas_with_ident(cname, cmetas), *dep.hash) {
*e.intr.get(dep.name), dep.vers, dep.hash};
match existing_match(e, metas_with_ident(*e.intr.get(cname), cmetas),
dep.hash) {
some(local_cnum) => {
debug!{"already have it"};
// We've already seen this crate
......@@ -260,8 +259,8 @@ fn resolve_crate_deps(e: env, cdata: @~[u8]) -> cstore::cnum_map {
// FIXME (#2404): Need better error reporting than just a bogus
// span.
let fake_span = ast_util::dummy_sp();
let local_cnum =
resolve_crate(e, cname, cmetas, *dep.hash, fake_span);
let local_cnum = resolve_crate(e, cname, cmetas, dep.hash,
fake_span);
cnum_map.insert(extrn_cnum, local_cnum);
}
}
......
......@@ -54,17 +54,17 @@ fn lookup_method_purity(cstore: cstore::cstore, did: ast::def_id)
fn each_path(cstore: cstore::cstore, cnum: ast::crate_num,
f: fn(decoder::path_entry) -> bool) {
let crate_data = cstore::get_crate_data(cstore, cnum);
decoder::each_path(crate_data, f);
decoder::each_path(cstore.intr, crate_data, f);
}
fn get_item_path(tcx: ty::ctxt, def: ast::def_id) -> ast_map::path {
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);
let path = decoder::get_item_path(cdata, def.node);
let path = decoder::get_item_path(cstore.intr, cdata, def.node);
// FIXME #1920: This path is not always correct if the crate is not linked
// into the root namespace.
vec::append(~[ast_map::path_mod(@cdata.name)], path)
vec::append(~[ast_map::path_mod(tcx.sess.ident_of(cdata.name))], path)
}
enum found_ast {
......@@ -81,7 +81,7 @@ fn maybe_get_item_ast(tcx: ty::ctxt, def: ast::def_id,
-> found_ast {
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::maybe_get_item_ast(cdata, tcx, def.node,
decoder::maybe_get_item_ast(cstore.intr, cdata, tcx, def.node,
decode_inlined_item)
}
......@@ -89,14 +89,14 @@ fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id)
-> ~[ty::variant_info] {
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);
return decoder::get_enum_variants(cdata, def.node, tcx)
return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx)
}
fn get_impls_for_mod(cstore: cstore::cstore, def: ast::def_id,
name: option<ast::ident>)
-> @~[@decoder::_impl] {
let cdata = cstore::get_crate_data(cstore, def.crate);
do decoder::get_impls_for_mod(cdata, def.node, name) |cnum| {
do decoder::get_impls_for_mod(cstore.intr, cdata, def.node, name) |cnum| {
cstore::get_crate_data(cstore, cnum)
}
}
......@@ -104,14 +104,14 @@ fn get_impls_for_mod(cstore: cstore::cstore, def: ast::def_id,
fn get_trait_methods(tcx: ty::ctxt, def: ast::def_id) -> @~[ty::method] {
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::get_trait_methods(cdata, def.node, tcx)
decoder::get_trait_methods(cstore.intr, cdata, def.node, tcx)
}
fn get_method_names_if_trait(cstore: cstore::cstore, def: ast::def_id)
-> option<@DVec<(@~str, ast::self_ty_)>> {
-> option<@DVec<(ast::ident, ast::self_ty_)>> {
let cdata = cstore::get_crate_data(cstore, def.crate);
return decoder::get_method_names_if_trait(cdata, def.node);
return decoder::get_method_names_if_trait(cstore.intr, cdata, def.node);
}
fn get_item_attrs(cstore: cstore::cstore,
......@@ -125,7 +125,7 @@ fn get_item_attrs(cstore: cstore::cstore,
fn get_class_fields(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::field_ty] {
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::get_class_fields(cdata, def.node)
decoder::get_class_fields(cstore.intr, cdata, def.node)
}
fn get_type(tcx: ty::ctxt, def: ast::def_id) -> ty::ty_param_bounds_and_ty {
......@@ -173,7 +173,7 @@ fn get_impl_method(cstore: cstore::cstore,
def: ast::def_id, mname: ast::ident)
-> ast::def_id {
let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::get_impl_method(cdata, def.node, mname)
decoder::get_impl_method(cstore.intr, cdata, def.node, mname)
}
/* Because classes use the trait format rather than the impl format
......@@ -184,7 +184,7 @@ fn get_class_method(cstore: cstore::cstore,
def: ast::def_id, mname: ast::ident)
-> ast::def_id {
let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::get_class_method(cdata, def.node, mname)
decoder::get_class_method(cstore.intr, cdata, def.node, mname)
}
/* If def names a class with a dtor, return it. Otherwise, return none. */
......
......@@ -5,6 +5,7 @@
import std::map::hashmap;
import syntax::{ast, attr};
import syntax::ast_util::new_def_hash;
import syntax::parse::token::ident_interner;
export cstore;
export cnum_map;
......@@ -57,7 +58,8 @@ enum cstore { private(cstore_private), }
mod_path_map: mod_path_map,
mut used_crate_files: ~[~str],
mut used_libraries: ~[~str],
mut used_link_args: ~[~str]};
mut used_link_args: ~[~str],
intr: ident_interner};
// Map from node_id's of local use statements to crate numbers
type use_crate_map = map::hashmap<ast::node_id, ast::crate_num>;
......@@ -67,28 +69,29 @@ enum cstore { private(cstore_private), }
match cstore { private(p) => p }
}
fn mk_cstore() -> cstore {
fn mk_cstore(intr: ident_interner) -> cstore {
let meta_cache = map::int_hash::<crate_metadata>();
let crate_map = map::int_hash::<ast::crate_num>();
let mod_path_map = new_def_hash();
return private(@{metas: meta_cache,
use_crate_map: crate_map,
mod_path_map: mod_path_map,
mut used_crate_files: ~[],
mut used_libraries: ~[],
mut used_link_args: ~[]});
use_crate_map: crate_map,
mod_path_map: mod_path_map,
mut used_crate_files: ~[],
mut used_libraries: ~[],
mut used_link_args: ~[],
intr: intr});
}
fn get_crate_data(cstore: cstore, cnum: ast::crate_num) -> crate_metadata {
return p(cstore).metas.get(cnum);
}
fn get_crate_hash(cstore: cstore, cnum: ast::crate_num) -> @~str {
fn get_crate_hash(cstore: cstore, cnum: ast::crate_num) -> ~str {
let cdata = get_crate_data(cstore, cnum);
return decoder::get_crate_hash(cdata.data);
}
fn get_crate_vers(cstore: cstore, cnum: ast::crate_num) -> @~str {
fn get_crate_vers(cstore: cstore, cnum: ast::crate_num) -> ~str {
let cdata = get_crate_data(cstore, cnum);
return decoder::get_crate_vers(cdata.data);
}
......@@ -96,7 +99,7 @@ fn get_crate_vers(cstore: cstore, cnum: ast::crate_num) -> @~str {
fn set_crate_data(cstore: cstore, cnum: ast::crate_num,
data: crate_metadata) {
p(cstore).metas.insert(cnum, data);
do vec::iter(decoder::get_crate_module_paths(data)) |dp| {
do vec::iter(decoder::get_crate_module_paths(cstore.intr, data)) |dp| {
let (did, path) = dp;
let d = {crate: cnum, node: did.node};
p(cstore).mod_path_map.insert(d, @path);
......@@ -153,32 +156,29 @@ fn find_use_stmt_cnum(cstore: cstore,
// returns hashes of crates directly used by this crate. Hashes are
// sorted by crate name.
fn get_dep_hashes(cstore: cstore) -> ~[@~str] {
type crate_hash = {name: @~str, hash: @~str};
fn get_dep_hashes(cstore: cstore) -> ~[~str] {
type crate_hash = {name: ~str, hash: ~str};
let mut result = ~[];
for p(cstore).use_crate_map.each_value |cnum| {
let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data);
debug!{"Add hash[%s]: %s", cdata.name, *hash};
vec::push(result, {name: @cdata.name, hash: hash});
debug!{"Add hash[%s]: %s", cdata.name, hash};
vec::push(result, {name: cdata.name, hash: hash});
};
pure fn lteq(a: &crate_hash, b: &crate_hash) -> bool {
*a.name <= *b.name
}
pure fn lteq(a: &crate_hash, b: &crate_hash) -> bool {a.name <= b.name}
let sorted = std::sort::merge_sort(lteq, result);
debug!{"sorted:"};
for sorted.each |x| {
debug!{" hash[%s]: %s", *x.name, *x.hash};
debug!{" hash[%s]: %s", x.name, x.hash};
}
fn mapper(ch: crate_hash) -> @~str { return ch.hash; }
fn mapper(ch: crate_hash) -> ~str { return ch.hash; }
return vec::map(sorted, mapper);
}
fn get_path(cstore: cstore, d: ast::def_id) -> ~[ast::ident] {
// let f = bind str::split_str(_, "::");
fn get_path(cstore: cstore, d: ast::def_id) -> ~[~str] {
option::map_default(p(cstore).mod_path_map.find(d), ~[],
|ds| str::split_str(*ds, ~"::").map(|x| @x ) )
|ds| str::split_str(*ds, ~"::"))
}
// Local Variables:
// mode: rust
......
......@@ -15,6 +15,8 @@
import util::ppaux::ty_to_str;
import syntax::diagnostic::span_handler;
import common::*;
import syntax::parse::token::ident_interner;
export class_dtor;
export get_class_fields;
......@@ -212,7 +214,7 @@ fn enum_variant_ids(item: ebml::doc, cdata: cmd) -> ~[ast::def_id] {
return ids;
}
fn item_path(item_doc: ebml::doc) -> ast_map::path {
fn item_path(intr: ident_interner, item_doc: ebml::doc) -> ast_map::path {
let path_doc = ebml::get_doc(item_doc, tag_path);
let len_doc = ebml::get_doc(path_doc, tag_path_len);
......@@ -224,10 +226,10 @@ fn item_path(item_doc: ebml::doc) -> ast_map::path {
for ebml::docs(path_doc) |tag, elt_doc| {
if tag == tag_path_elt_mod {
let str = ebml::doc_as_str(elt_doc);
vec::push(result, ast_map::path_mod(@str));
vec::push(result, ast_map::path_mod(intr.intern(@str)));
} else if tag == tag_path_elt_name {
let str = ebml::doc_as_str(elt_doc);
vec::push(result, ast_map::path_name(@str));
vec::push(result, ast_map::path_name(intr.intern(@str)));
} else {
// ignore tag_path_len element
}
......@@ -236,9 +238,9 @@ fn item_path(item_doc: ebml::doc) -> ast_map::path {
return result;
}
fn item_name(item: ebml::doc) -> ast::ident {
fn item_name(intr: ident_interner, item: ebml::doc) -> ast::ident {
let name = ebml::get_doc(item, tag_paths_data_name);
@str::from_bytes(ebml::doc_data(name))
intr.intern(@str::from_bytes(ebml::doc_data(name)))
}
fn item_to_def_like(item: ebml::doc, did: ast::def_id, cnum: ast::crate_num)
......@@ -304,37 +306,38 @@ fn get_impl_traits(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) -> ~[ty::t] {
item_impl_traits(lookup_item(id, cdata.data), tcx, cdata)
}
fn get_impl_method(cdata: cmd, id: ast::node_id,
fn get_impl_method(intr: ident_interner, cdata: cmd, id: ast::node_id,
name: ast::ident) -> ast::def_id {
let items = ebml::get_doc(ebml::doc(cdata.data), tag_items);
let mut found = none;
for ebml::tagged_docs(find_item(id, items), tag_item_impl_method) |mid| {
let m_did = ebml::with_doc_data(mid, |d| parse_def_id(d));
if item_name(find_item(m_did.node, items)) == name {
if item_name(intr, find_item(m_did.node, items)) == name {
found = some(translate_def_id(cdata, m_did));
}
}
option::get(found)
}
fn get_class_method(cdata: cmd, id: ast::node_id,
fn get_class_method(intr: ident_interner, cdata: cmd, id: ast::node_id,
name: ast::ident) -> ast::def_id {
let items = ebml::get_doc(ebml::doc(cdata.data), tag_items);
let mut found = none;
let cls_items = match maybe_find_item(id, items) {
some(it) => it,
none => fail (fmt!{"get_class_method: class id not found \
when looking up method %s", *name})
when looking up method %s", *intr.get(name)})
};
for ebml::tagged_docs(cls_items, tag_item_trait_method) |mid| {
let m_did = item_def_id(mid, cdata);
if item_name(mid) == name {
if item_name(intr, mid) == name {
found = some(m_did);
}
}
match found {
some(found) => found,
none => fail (fmt!{"get_class_method: no method named %s", *name})
none => fail (fmt!{"get_class_method: no method named %s",
*intr.get(name)})
}
}
......@@ -387,7 +390,7 @@ struct path_entry {
}
/// Iterates over all the paths in the given crate.
fn each_path(cdata: cmd, f: fn(path_entry) -> bool) {
fn each_path(intr: ident_interner, cdata: cmd, f: fn(path_entry) -> bool) {
let root = ebml::doc(cdata.data);
let items = ebml::get_doc(root, tag_items);
let items_data = ebml::get_doc(items, tag_items_data);
......@@ -397,8 +400,8 @@ fn each_path(cdata: cmd, f: fn(path_entry) -> bool) {
// First, go through all the explicit items.
for ebml::tagged_docs(items_data, tag_items_data_item) |item_doc| {
if !broken {
let path = ast_map::path_to_str_with_sep(item_path(item_doc),
~"::");
let path = ast_map::path_to_str_with_sep(
item_path(intr, item_doc), ~"::", intr);
if path != ~"" {
// Extract the def ID.
let def_id = item_def_id(item_doc, cdata);
......@@ -467,8 +470,9 @@ fn each_path(cdata: cmd, f: fn(path_entry) -> bool) {
}
}
fn get_item_path(cdata: cmd, id: ast::node_id) -> ast_map::path {
item_path(lookup_item(id, cdata.data))
fn get_item_path(intr: ident_interner, cdata: cmd, id: ast::node_id)
-> ast_map::path {
item_path(intr, lookup_item(id, cdata.data))
}
type decode_inlined_item = fn(
......@@ -477,13 +481,13 @@ fn get_item_path(cdata: cmd, id: ast::node_id) -> ast_map::path {
path: ast_map::path,
par_doc: ebml::doc) -> option<ast::inlined_item>;
fn maybe_get_item_ast(cdata: cmd, tcx: ty::ctxt,
fn maybe_get_item_ast(intr: ident_interner, cdata: cmd, tcx: ty::ctxt,
id: ast::node_id,
decode_inlined_item: decode_inlined_item
) -> csearch::found_ast {
debug!{"Looking up item: %d", id};
let item_doc = lookup_item(id, cdata.data);
let path = vec::init(item_path(item_doc));
let path = vec::init(item_path(intr, item_doc));
match decode_inlined_item(cdata, tcx, path, item_doc) {
some(ii) => csearch::found(ii),
none => {
......@@ -503,8 +507,8 @@ fn maybe_get_item_ast(cdata: cmd, tcx: ty::ctxt,
}
}
fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
-> ~[ty::variant_info] {
fn get_enum_variants(intr: ident_interner, cdata: cmd, id: ast::node_id,
tcx: ty::ctxt) -> ~[ty::variant_info] {
let data = cdata.data;
let items = ebml::get_doc(ebml::doc(data), tag_items);
let item = find_item(id, items);
......@@ -515,7 +519,7 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
let item = find_item(did.node, items);
let ctor_ty = item_type({crate: cdata.cnum, node: id}, item,
tcx, cdata);
let name = item_name(item);
let name = item_name(intr, item);
let mut arg_tys: ~[ty::t] = ~[];
match ty::get(ctor_ty).struct {
ty::ty_fn(f) => {
......@@ -573,8 +577,8 @@ fn get_mutability(ch: u8) -> ast::mutability {
}
}
fn item_impl_methods(cdata: cmd, item: ebml::doc, base_tps: uint)
-> ~[@method_info] {
fn item_impl_methods(intr: ident_interner, cdata: cmd, item: ebml::doc,
base_tps: uint) -> ~[@method_info] {
let mut rslt = ~[];
for ebml::tagged_docs(item, tag_item_impl_method) |doc| {
let m_did = ebml::with_doc_data(doc, |d| parse_def_id(d));
......@@ -583,15 +587,14 @@ fn item_impl_methods(cdata: cmd, item: ebml::doc, base_tps: uint)
vec::push(rslt, @{did: translate_def_id(cdata, m_did),
/* FIXME (maybe #2323) tjc: take a look at this. */
n_tps: item_ty_param_count(mth_item) - base_tps,
ident: item_name(mth_item),
ident: item_name(intr, mth_item),
self_type: self_ty});
}
rslt
}
fn get_impls_for_mod(cdata: cmd,
m_id: ast::node_id,
name: option<ast::ident>,
fn get_impls_for_mod(intr: ident_interner, cdata: cmd,
m_id: ast::node_id, name: option<ast::ident>,
get_cdata: fn(ast::crate_num) -> cmd)
-> @~[@_impl] {
......@@ -608,12 +611,12 @@ fn get_impls_for_mod(cdata: cmd,
let impl_cdata = get_cdata(local_did.crate);
let impl_data = impl_cdata.data;
let item = lookup_item(local_did.node, impl_data);
let nm = item_name(item);
let nm = item_name(intr, item);
if match name { some(n) => { n == nm } none => { true } } {
let base_tps = item_ty_param_count(item);
vec::push(result, @{
did: local_did, ident: nm,
methods: item_impl_methods(impl_cdata, item, base_tps)
methods: item_impl_methods(intr, impl_cdata, item, base_tps)
});
};
}
......@@ -621,14 +624,14 @@ fn get_impls_for_mod(cdata: cmd,
}
/* Works for both classes and traits */
fn get_trait_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
-> @~[ty::method] {
fn get_trait_methods(intr: ident_interner, cdata: cmd, id: ast::node_id,
tcx: ty::ctxt) -> @~[ty::method] {
let data = cdata.data;
let item = lookup_item(id, data);
let mut result = ~[];
for ebml::tagged_docs(item, tag_item_trait_method) |mth| {
let bounds = item_ty_param_bounds(mth, tcx, cdata);
let name = item_name(mth);
let name = item_name(intr, mth);
let ty = doc_type(mth, tcx, cdata);
let fty = match ty::get(ty).struct {
ty::ty_fn(f) => f,
......@@ -651,8 +654,9 @@ fn get_trait_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
// If the item in question is a trait, returns its set of methods and
// their self types. Otherwise, returns none. This overlaps in an
// annoying way with get_trait_methods.
fn get_method_names_if_trait(cdata: cmd, node_id: ast::node_id)
-> option<@DVec<(@~str, ast::self_ty_)>> {
fn get_method_names_if_trait(intr: ident_interner, cdata: cmd,
node_id: ast::node_id)
-> option<@DVec<(ast::ident, ast::self_ty_)>> {
let item = lookup_item(node_id, cdata.data);
if item_family(item) != 'I' {
......@@ -662,7 +666,7 @@ fn get_method_names_if_trait(cdata: cmd, node_id: ast::node_id)
let resulting_methods = @dvec();
for ebml::tagged_docs(item, tag_item_trait_method) |method| {
resulting_methods.push(
(item_name(method), get_self_ty(method)));
(item_name(intr, method), get_self_ty(method)));
}
return some(resulting_methods);
}
......@@ -680,7 +684,7 @@ fn get_item_attrs(cdata: cmd,
}
// Helper function that gets either fields or methods
fn get_class_members(cdata: cmd, id: ast::node_id,
fn get_class_members(intr: ident_interner, cdata: cmd, id: ast::node_id,
p: fn(char) -> bool) -> ~[ty::field_ty] {
let data = cdata.data;
let item = lookup_item(id, data);
......@@ -688,7 +692,7 @@ fn get_class_members(cdata: cmd, id: ast::node_id,
for ebml::tagged_docs(item, tag_item_field) |an_item| {
let f = item_family(an_item);
if p(f) {
let name = item_name(an_item);
let name = item_name(intr, an_item);
let did = item_def_id(an_item, cdata);
let mt = field_mutability(an_item);
vec::push(result, {ident: name, id: did, vis:
......@@ -708,8 +712,9 @@ fn get_class_members(cdata: cmd, id: ast::node_id,
}
/* 'g' for public field, 'j' for private field, 'N' for inherited field */
fn get_class_fields(cdata: cmd, id: ast::node_id) -> ~[ty::field_ty] {
get_class_members(cdata, id, |f| f == 'g' || f == 'j' || f == 'N')
fn get_class_fields(intr: ident_interner, cdata: cmd, id: ast::node_id)
-> ~[ty::field_ty] {
get_class_members(intr, cdata, id, |f| f == 'g' || f == 'j' || f == 'N')
}
fn family_has_type_params(fam_ch: char) -> bool {
......@@ -774,7 +779,7 @@ fn get_meta_items(md: ebml::doc) -> ~[@ast::meta_item] {
for ebml::tagged_docs(md, tag_meta_item_word) |meta_item_doc| {
let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name);
let n = str::from_bytes(ebml::doc_data(nd));
vec::push(items, attr::mk_word_item(@n));
vec::push(items, attr::mk_word_item(n));
};
for ebml::tagged_docs(md, tag_meta_item_name_value) |meta_item_doc| {
let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name);
......@@ -783,13 +788,13 @@ fn get_meta_items(md: ebml::doc) -> ~[@ast::meta_item] {
let v = str::from_bytes(ebml::doc_data(vd));
// FIXME (#623): Should be able to decode meta_name_value variants,
// but currently the encoder just drops them
vec::push(items, attr::mk_name_value_item_str(@n, v));
vec::push(items, attr::mk_name_value_item_str(n, v));
};
for ebml::tagged_docs(md, tag_meta_item_list) |meta_item_doc| {
let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name);
let n = str::from_bytes(ebml::doc_data(nd));
let subitems = get_meta_items(meta_item_doc);
vec::push(items, attr::mk_list_item(@n, subitems));
vec::push(items, attr::mk_list_item(n, subitems));
};
return items;
}
......@@ -815,17 +820,19 @@ fn get_attributes(md: ebml::doc) -> ~[ast::attribute] {
return attrs;
}
fn list_meta_items(meta_items: ebml::doc, out: io::Writer) {
fn list_meta_items(intr: ident_interner,
meta_items: ebml::doc, out: io::Writer) {
for get_meta_items(meta_items).each |mi| {
out.write_str(fmt!{"%s\n", pprust::meta_item_to_str(*mi)});
out.write_str(fmt!{"%s\n", pprust::meta_item_to_str(*mi, intr)});
}
}
fn list_crate_attributes(md: ebml::doc, hash: @~str, out: io::Writer) {
out.write_str(fmt!{"=Crate Attributes (%s)=\n", *hash});
fn list_crate_attributes(intr: ident_interner, md: ebml::doc, hash: ~str,
out: io::Writer) {
out.write_str(fmt!{"=Crate Attributes (%s)=\n", hash});
for get_attributes(md).each |attr| {
out.write_str(fmt!{"%s\n", pprust::attribute_to_str(attr)});
out.write_str(fmt!{"%s\n", pprust::attribute_to_str(attr, intr)});
}
out.write_str(~"\n\n");
......@@ -836,9 +843,9 @@ fn get_crate_attributes(data: @~[u8]) -> ~[ast::attribute] {
}
type crate_dep = {cnum: ast::crate_num, name: ast::ident,
vers: @~str, hash: @~str};
vers: ~str, hash: ~str};
fn get_crate_deps(data: @~[u8]) -> ~[crate_dep] {
fn get_crate_deps(intr: ident_interner, data: @~[u8]) -> ~[crate_dep] {
let mut deps: ~[crate_dep] = ~[];
let cratedoc = ebml::doc(data);
let depsdoc = ebml::get_doc(cratedoc, tag_crate_deps);
......@@ -848,42 +855,44 @@ fn docstr(doc: ebml::doc, tag_: uint) -> ~str {
}
for ebml::tagged_docs(depsdoc, tag_crate_dep) |depdoc| {
vec::push(deps, {cnum: crate_num,
name: @docstr(depdoc, tag_crate_dep_name),
vers: @docstr(depdoc, tag_crate_dep_vers),
hash: @docstr(depdoc, tag_crate_dep_hash)});
name: intr.intern(@docstr(depdoc, tag_crate_dep_name)),
vers: docstr(depdoc, tag_crate_dep_vers),
hash: docstr(depdoc, tag_crate_dep_hash)});
crate_num += 1;
};
return deps;
}
fn list_crate_deps(data: @~[u8], out: io::Writer) {
fn list_crate_deps(intr: ident_interner, data: @~[u8], out: io::Writer) {
out.write_str(~"=External Dependencies=\n");
for get_crate_deps(data).each |dep| {
out.write_str(fmt!{"%d %s-%s-%s\n",
dep.cnum, *dep.name, *dep.hash, *dep.vers});
for get_crate_deps(intr, data).each |dep| {
out.write_str(
fmt!{"%d %s-%s-%s\n",
dep.cnum, *intr.get(dep.name), dep.hash, dep.vers});
}
out.write_str(~"\n");
}
fn get_crate_hash(data: @~[u8]) -> @~str {
fn get_crate_hash(data: @~[u8]) -> ~str {
let cratedoc = ebml::doc(data);
let hashdoc = ebml::get_doc(cratedoc, tag_crate_hash);
return @str::from_bytes(ebml::doc_data(hashdoc));
return str::from_bytes(ebml::doc_data(hashdoc));
}
fn get_crate_vers(data: @~[u8]) -> @~str {
fn get_crate_vers(data: @~[u8]) -> ~str {
let attrs = decoder::get_crate_attributes(data);
return match attr::last_meta_item_value_str_by_name(
attr::find_linkage_metas(attrs), ~"vers") {
some(ver) => ver,
none => @~"0.0"
none => ~"0.0"
};
}
fn iter_crate_items(cdata: cmd, proc: fn(~str, ast::def_id)) {
for each_path(cdata) |path_entry| {
fn iter_crate_items(intr: ident_interner,
cdata: cmd, proc: fn(~str, ast::def_id)) {
for each_path(intr, cdata) |path_entry| {
match path_entry.def_like {
dl_impl(*) | dl_field => {}
dl_def(def) => {
......@@ -893,7 +902,8 @@ fn iter_crate_items(cdata: cmd, proc: fn(~str, ast::def_id)) {
}
}
fn get_crate_module_paths(cdata: cmd) -> ~[(ast::def_id, ~str)] {
fn get_crate_module_paths(intr: ident_interner, cdata: cmd)
-> ~[(ast::def_id, ~str)] {
fn mod_of_path(p: ~str) -> ~str {
str::connect(vec::init(str::split_str(p, ~"::")), ~"::")
}
......@@ -902,7 +912,7 @@ fn mod_of_path(p: ~str) -> ~str {
// fowarded path due to renamed import or reexport
let mut res = ~[];
let mods = map::str_hash();
do iter_crate_items(cdata) |path, did| {
do iter_crate_items(intr, cdata) |path, did| {
let m = mod_of_path(path);
if str::is_not_empty(m) {
// if m has a sub-item, it must be a module
......@@ -919,11 +929,12 @@ fn mod_of_path(p: ~str) -> ~str {
}
}
fn list_crate_metadata(bytes: @~[u8], out: io::Writer) {
fn list_crate_metadata(intr: ident_interner, bytes: @~[u8],
out: io::Writer) {
let hash = get_crate_hash(bytes);
let md = ebml::doc(bytes);
list_crate_attributes(md, hash, out);
list_crate_deps(bytes, out);
list_crate_attributes(intr, md, hash, out);
list_crate_deps(intr, bytes, out);
}
// Translates a def_id from an external crate to a def_id for the current
......
此差异已折叠。
......@@ -7,6 +7,7 @@
import lib::llvm::{False, llvm, mk_object_file, mk_section_iter};
import filesearch::filesearch;
import io::WriterUtil;
import syntax::parse::token::ident_interner;
export os;
export os_macos, os_win32, os_linux, os_freebsd;
......@@ -33,7 +34,8 @@ enum os {
metas: ~[@ast::meta_item],
hash: ~str,
os: os,
static: bool
static: bool,
intr: ident_interner
};
fn load_library_crate(cx: ctxt) -> {ident: ~str, data: @~[u8]} {
......@@ -41,7 +43,8 @@ fn load_library_crate(cx: ctxt) -> {ident: ~str, data: @~[u8]} {
some(t) => return t,
none => {
cx.diag.span_fatal(
cx.span, fmt!{"can't find crate for `%s`", *cx.ident});
cx.span, fmt!{"can't find crate for `%s`",
*cx.intr.get(cx.ident)});
}
}
}
......@@ -66,7 +69,7 @@ fn find_library_crate_aux(cx: ctxt,
filesearch: filesearch::filesearch) ->
option<{ident: ~str, data: @~[u8]}> {
let crate_name = crate_name_from_metas(cx.metas);
let prefix: ~str = nn.prefix + *crate_name + ~"-";
let prefix: ~str = nn.prefix + crate_name + ~"-";
let suffix: ~str = nn.suffix;
let mut matches = ~[];
......@@ -104,19 +107,19 @@ fn find_library_crate_aux(cx: ctxt,
some(matches[0])
} else {
cx.diag.span_err(
cx.span, fmt!{"multiple matching crates for `%s`", *crate_name});
cx.span, fmt!{"multiple matching crates for `%s`", crate_name});
cx.diag.handler().note(~"candidates:");
for matches.each |match_| {
cx.diag.handler().note(fmt!{"path: %s", match_.ident});
let attrs = decoder::get_crate_attributes(match_.data);
note_linkage_attrs(cx.diag, attrs);
note_linkage_attrs(cx.intr, cx.diag, attrs);
}
cx.diag.handler().abort_if_errors();
none
}
}
fn crate_name_from_metas(metas: ~[@ast::meta_item]) -> @~str {
fn crate_name_from_metas(metas: ~[@ast::meta_item]) -> ~str {
let name_items = attr::find_meta_items_by_name(metas, ~"name");
match vec::last_opt(name_items) {
some(i) => {
......@@ -131,9 +134,10 @@ fn crate_name_from_metas(metas: ~[@ast::meta_item]) -> @~str {
}
}
fn note_linkage_attrs(diag: span_handler, attrs: ~[ast::attribute]) {
fn note_linkage_attrs(intr: ident_interner, diag: span_handler,
attrs: ~[ast::attribute]) {
for attr::find_linkage_attrs(attrs).each |attr| {
diag.handler().note(fmt!{"meta: %s", pprust::attr_to_str(attr)});
diag.handler().note(fmt!{"meta: %s", pprust::attr_to_str(attr,intr)});
}
}
......@@ -143,7 +147,7 @@ fn crate_matches(crate_data: @~[u8], metas: ~[@ast::meta_item],
let linkage_metas = attr::find_linkage_metas(attrs);
if hash.is_not_empty() {
let chash = decoder::get_crate_hash(crate_data);
if *chash != hash { return false; }
if chash != hash { return false; }
}
metadata_matches(linkage_metas, metas)
}
......@@ -154,15 +158,8 @@ fn metadata_matches(extern_metas: ~[@ast::meta_item],
debug!{"matching %u metadata requirements against %u items",
vec::len(local_metas), vec::len(extern_metas)};
debug!{"crate metadata:"};
for extern_metas.each |have| {
debug!{" %s", pprust::meta_item_to_str(*have)};
}
for local_metas.each |needed| {
debug!{"looking for %s", pprust::meta_item_to_str(*needed)};
if !attr::contains(extern_metas, needed) {
debug!{"missing %s", pprust::meta_item_to_str(*needed)};
return false;
}
}
......@@ -206,9 +203,10 @@ fn meta_section_name(os: os) -> ~str {
}
// A diagnostic function for dumping crate metadata to an output stream
fn list_file_metadata(os: os, path: ~str, out: io::Writer) {
fn list_file_metadata(intr: ident_interner, os: os, path: ~str,
out: io::Writer) {
match get_metadata_section(os, path) {
option::some(bytes) => decoder::list_crate_metadata(bytes, out),
option::some(bytes) => decoder::list_crate_metadata(intr, bytes, out),
option::none => {
out.write_str(~"could not find metadata in " + path + ~".\n");
}
......
......@@ -46,7 +46,7 @@ fn parse_ident_(st: @pstate, is_last: fn@(char) -> bool) ->
while !is_last(peek(st)) {
rslt += str::from_byte(next_byte(st));
}
return @rslt;
return st.tcx.sess.ident_of(rslt);
}
......@@ -133,7 +133,7 @@ fn parse_bound_region(st: @pstate) -> ty::bound_region {
assert next(st) == '|';
ty::br_anon(id)
}
'[' => ty::br_named(@parse_str(st, ']')),
'[' => ty::br_named(st.tcx.sess.ident_of(parse_str(st, ']'))),
'c' => {
let id = parse_int(st);
assert next(st) == '|';
......@@ -249,7 +249,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
assert (next(st) == '[');
let mut fields: ~[ty::field] = ~[];
while peek(st) != ']' {
let name = @parse_str(st, '=');
let name = st.tcx.sess.ident_of(parse_str(st, '='));
vec::push(fields, {ident: name, mt: parse_mt(st, conv)});
}
st.pos = st.pos + 1u;
......
......@@ -126,14 +126,14 @@ fn enc_region(w: io::Writer, cx: @ctxt, r: ty::region) {
match r {
ty::re_bound(br) => {
w.write_char('b');
enc_bound_region(w, br);
enc_bound_region(w, cx, br);
}
ty::re_free(id, br) => {
w.write_char('f');
w.write_char('[');
w.write_int(id);
w.write_char('|');
enc_bound_region(w, br);
enc_bound_region(w, cx, br);
w.write_char(']');
}
ty::re_scope(nid) => {
......@@ -151,7 +151,7 @@ fn enc_region(w: io::Writer, cx: @ctxt, r: ty::region) {
}
}
fn enc_bound_region(w: io::Writer, br: ty::bound_region) {
fn enc_bound_region(w: io::Writer, cx: @ctxt, br: ty::bound_region) {
match br {
ty::br_self => w.write_char('s'),
ty::br_anon(idx) => {
......@@ -161,14 +161,14 @@ fn enc_bound_region(w: io::Writer, br: ty::bound_region) {
}
ty::br_named(s) => {
w.write_char('[');
w.write_str(*s);
w.write_str(cx.tcx.sess.str_of(s));
w.write_char(']')
}
ty::br_cap_avoid(id, br) => {
w.write_char('c');
w.write_int(id);
w.write_char('|');
enc_bound_region(w, *br);
enc_bound_region(w, cx, *br);
}
}
}
......@@ -265,7 +265,7 @@ fn enc_sty(w: io::Writer, cx: @ctxt, st: ty::sty) {
ty::ty_rec(fields) => {
w.write_str(&"R[");
for fields.each |field| {
w.write_str(*field.ident);
w.write_str(cx.tcx.sess.str_of(field.ident));
w.write_char('=');
enc_mt(w, cx, field.mt);
}
......
......@@ -83,7 +83,8 @@ fn encode_inlined_item(ecx: @e::encode_ctxt,
ii: ast::inlined_item,
maps: maps) {
debug!{"> Encoding inlined item: %s::%s (%u)",
ast_map::path_to_str(path), *ii.ident(),
ast_map::path_to_str(path, ecx.tcx.sess.parse_sess.interner),
ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell()};
let id_range = ast_util::compute_id_range_for_inlined_item(ii);
......@@ -94,7 +95,8 @@ fn encode_inlined_item(ecx: @e::encode_ctxt,
}
debug!{"< Encoded inlined fn: %s::%s (%u)",
ast_map::path_to_str(path), *ii.ident(),
ast_map::path_to_str(path, ecx.tcx.sess.parse_sess.interner),
ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell()};
}
......@@ -107,7 +109,8 @@ fn decode_inlined_item(cdata: cstore::crate_metadata,
match par_doc.opt_child(c::tag_ast) {
none => none,
some(ast_doc) => {
debug!{"> Decoding inlined fn: %s::?", ast_map::path_to_str(path)};
debug!{"> Decoding inlined fn: %s::?",
ast_map::path_to_str(path, tcx.sess.parse_sess.interner)};
let ast_dsr = ebml::ebml_deserializer(ast_doc);
let from_id_range = ast_util::deserialize_id_range(ast_dsr);
let to_id_range = reserve_id_range(dcx.tcx.sess, from_id_range);
......@@ -118,14 +121,15 @@ fn decode_inlined_item(cdata: cstore::crate_metadata,
let ii = renumber_ast(xcx, raw_ii);
ast_map::map_decoded_item(tcx.sess.diagnostic(),
dcx.tcx.items, path, ii);
debug!{"Fn named: %s", *ii.ident()};
debug!{"Fn named: %s", tcx.sess.str_of(ii.ident())};
decode_side_tables(xcx, ast_doc);
debug!{"< Decoded inlined fn: %s::%s",
ast_map::path_to_str(path), *ii.ident()};
ast_map::path_to_str(path, tcx.sess.parse_sess.interner),
tcx.sess.str_of(ii.ident())};
match ii {
ast::ii_item(i) => {
debug!{">>> DECODED ITEM >>>\n%s\n<<< DECODED ITEM <<<",
syntax::print::pprust::item_to_str(i)};
syntax::print::pprust::item_to_str(i, tcx.sess.intr())};
}
_ => { }
}
......@@ -915,28 +919,26 @@ trait fake_ext_ctxt {
}
#[cfg(test)]
type fake_session = ();
type fake_session = parse::parse_sess;
#[cfg(test)]
impl fake_session: fake_ext_ctxt {
fn cfg() -> ast::crate_cfg { ~[] }
fn parse_sess() -> parse::parse_sess { parse::new_parse_sess(none) }
fn parse_sess() -> parse::parse_sess { self }
}
#[cfg(test)]
fn mk_ctxt() -> fake_ext_ctxt {
() as fake_ext_ctxt
parse::new_parse_sess(none) as fake_ext_ctxt
}
#[cfg(test)]
fn roundtrip(in_item: @ast::item) {
debug!{"in_item = %s", pprust::item_to_str(in_item)};
let mbuf = io::mem_buffer();
let ebml_w = ebml::writer(io::mem_buffer_writer(mbuf));
encode_item_ast(ebml_w, in_item);
let ebml_doc = ebml::doc(@io::mem_buffer_buf(mbuf));
let out_item = decode_item_ast(ebml_doc);
debug!{"out_item = %s", pprust::item_to_str(out_item)};
let exp_str =
io::with_str_writer(|w| ast::serialize_item(w, *in_item) );
......@@ -993,7 +995,8 @@ fn new_int_alist<B: copy>() -> alist<int, B> {
});
match (item_out, item_exp) {
(ast::ii_item(item_out), ast::ii_item(item_exp)) => {
assert pprust::item_to_str(item_out) == pprust::item_to_str(item_exp);
assert pprust::item_to_str(item_out, ext_cx.parse_sess().interner)
== pprust::item_to_str(item_exp, ext_cx.parse_sess().interner);
}
_ => fail
}
......
......@@ -159,7 +159,7 @@ fn check_pure_callee_or_arg(pc: purity_cause,
debug!{"check_pure_callee_or_arg(pc=%?, expr=%?, \
callee_id=%d, ty=%s)",
pc,
opt_expr.map(|e| pprust::expr_to_str(e) ),
opt_expr.map(|e| pprust::expr_to_str(e, tcx.sess.intr()) ),
callee_id,
ty_to_str(self.tcx(), ty::node_id_to_type(tcx, callee_id))};
......
......@@ -90,7 +90,8 @@ fn req_loans_in_expr(ex: @ast::expr,
let tcx = bccx.tcx;
let old_root_ub = self.root_ub;
debug!{"req_loans_in_expr(ex=%s)", pprust::expr_to_str(ex)};
debug!{"req_loans_in_expr(ex=%s)",
pprust::expr_to_str(ex, tcx.sess.intr())};
// If this expression is borrowed, have to ensure it remains valid:
for tcx.borrowings.find(ex.id).each |borrow| {
......
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册