提交 be6613e0 编写于 作者: B Brian Anderson

Remove the crate language

上级 81a79603
......@@ -26,6 +26,198 @@ mod common;
#[legacy_exports]
mod errors;
use std::getopts;
use std::test;
use core::result;
use result::{Ok, Err};
use common::config;
use common::mode_run_pass;
use common::mode_run_fail;
use common::mode_compile_fail;
use common::mode_pretty;
use common::mode;
use util::logv;
fn main() {
let args = os::args();
let config = parse_config(args);
log_config(config);
run_tests(config);
}
fn parse_config(args: ~[~str]) -> config {
let opts =
~[getopts::reqopt(~"compile-lib-path"),
getopts::reqopt(~"run-lib-path"),
getopts::reqopt(~"rustc-path"), getopts::reqopt(~"src-base"),
getopts::reqopt(~"build-base"), getopts::reqopt(~"aux-base"),
getopts::reqopt(~"stage-id"),
getopts::reqopt(~"mode"), getopts::optflag(~"ignored"),
getopts::optopt(~"runtool"), getopts::optopt(~"rustcflags"),
getopts::optflag(~"verbose"),
getopts::optopt(~"logfile"),
getopts::optflag(~"jit")];
assert (vec::is_not_empty(args));
let args_ = vec::tail(args);
let matches =
match getopts::getopts(args_, opts) {
Ok(m) => m,
Err(f) => fail getopts::fail_str(f)
};
fn opt_path(m: getopts::Matches, nm: ~str) -> Path {
Path(getopts::opt_str(m, nm))
}
return {compile_lib_path: getopts::opt_str(matches, ~"compile-lib-path"),
run_lib_path: getopts::opt_str(matches, ~"run-lib-path"),
rustc_path: opt_path(matches, ~"rustc-path"),
src_base: opt_path(matches, ~"src-base"),
build_base: opt_path(matches, ~"build-base"),
aux_base: opt_path(matches, ~"aux-base"),
stage_id: getopts::opt_str(matches, ~"stage-id"),
mode: str_mode(getopts::opt_str(matches, ~"mode")),
run_ignored: getopts::opt_present(matches, ~"ignored"),
filter:
if vec::len(matches.free) > 0u {
option::Some(matches.free[0])
} else { option::None },
logfile: option::map(&getopts::opt_maybe_str(matches,
~"logfile"),
|s| Path(*s)),
runtool: getopts::opt_maybe_str(matches, ~"runtool"),
rustcflags: getopts::opt_maybe_str(matches, ~"rustcflags"),
jit: getopts::opt_present(matches, ~"jit"),
verbose: getopts::opt_present(matches, ~"verbose")};
}
fn log_config(config: config) {
let c = config;
logv(c, fmt!("configuration:"));
logv(c, fmt!("compile_lib_path: %s", config.compile_lib_path));
logv(c, fmt!("run_lib_path: %s", config.run_lib_path));
logv(c, fmt!("rustc_path: %s", config.rustc_path.to_str()));
logv(c, fmt!("src_base: %s", config.src_base.to_str()));
logv(c, fmt!("build_base: %s", config.build_base.to_str()));
logv(c, fmt!("stage_id: %s", config.stage_id));
logv(c, fmt!("mode: %s", mode_str(config.mode)));
logv(c, fmt!("run_ignored: %b", config.run_ignored));
logv(c, fmt!("filter: %s", opt_str(config.filter)));
logv(c, fmt!("runtool: %s", opt_str(config.runtool)));
logv(c, fmt!("rustcflags: %s", opt_str(config.rustcflags)));
logv(c, fmt!("jit: %b", config.jit));
logv(c, fmt!("verbose: %b", config.verbose));
logv(c, fmt!("\n"));
}
fn opt_str(maybestr: Option<~str>) -> ~str {
match maybestr { option::Some(s) => s, option::None => ~"(none)" }
}
fn str_opt(maybestr: ~str) -> Option<~str> {
if maybestr != ~"(none)" { option::Some(maybestr) } else { option::None }
}
fn str_mode(s: ~str) -> mode {
match s {
~"compile-fail" => mode_compile_fail,
~"run-fail" => mode_run_fail,
~"run-pass" => mode_run_pass,
~"pretty" => mode_pretty,
_ => fail ~"invalid mode"
}
}
fn mode_str(mode: mode) -> ~str {
match mode {
mode_compile_fail => ~"compile-fail",
mode_run_fail => ~"run-fail",
mode_run_pass => ~"run-pass",
mode_pretty => ~"pretty"
}
}
fn run_tests(config: config) {
let opts = test_opts(config);
let tests = make_tests(config);
let res = test::run_tests_console(&opts, tests);
if !res { fail ~"Some tests failed"; }
}
fn test_opts(config: config) -> test::TestOpts {
{filter:
match config.filter {
option::Some(s) => option::Some(s),
option::None => option::None
},
run_ignored: config.run_ignored,
logfile:
match config.logfile {
option::Some(s) => option::Some(s.to_str()),
option::None => option::None
}
}
}
fn make_tests(config: config) -> ~[test::TestDesc] {
debug!("making tests from %s",
config.src_base.to_str());
let mut tests = ~[];
for os::list_dir_path(&config.src_base).each |file| {
let file = copy *file;
debug!("inspecting file %s", file.to_str());
if is_test(config, file) {
tests.push(make_test(config, file))
}
}
move tests
}
fn is_test(config: config, testfile: &Path) -> bool {
// Pretty-printer does not work with .rc files yet
let valid_extensions =
match config.mode {
mode_pretty => ~[~".rs"],
_ => ~[~".rc", ~".rs"]
};
let invalid_prefixes = ~[~".", ~"#", ~"~"];
let name = testfile.filename().get();
let mut valid = false;
for valid_extensions.each |ext| {
if str::ends_with(name, *ext) { valid = true; }
}
for invalid_prefixes.each |pre| {
if str::starts_with(name, *pre) { valid = false; }
}
return valid;
}
fn make_test(config: config, testfile: &Path) ->
test::TestDesc {
{
name: make_test_name(config, testfile),
testfn: make_test_closure(config, testfile),
ignore: header::is_test_ignored(config, testfile),
should_fail: false
}
}
fn make_test_name(config: config, testfile: &Path) -> ~str {
fmt!("[%s] %s", mode_str(config.mode), testfile.to_str())
}
fn make_test_closure(config: config, testfile: &Path) -> test::TestFn {
let testfile = testfile.to_str();
fn~() { runtest::run(config, testfile) }
}
// Local Variables:
// fill-column: 78;
// indent-tabs-mode: nil
......
use std::getopts;
use std::test;
use core::result;
use result::{Ok, Err};
use common::config;
use common::mode_run_pass;
use common::mode_run_fail;
use common::mode_compile_fail;
use common::mode_pretty;
use common::mode;
use util::logv;
fn main() {
let args = os::args();
let config = parse_config(args);
log_config(config);
run_tests(config);
}
fn parse_config(args: ~[~str]) -> config {
let opts =
~[getopts::reqopt(~"compile-lib-path"),
getopts::reqopt(~"run-lib-path"),
getopts::reqopt(~"rustc-path"), getopts::reqopt(~"src-base"),
getopts::reqopt(~"build-base"), getopts::reqopt(~"aux-base"),
getopts::reqopt(~"stage-id"),
getopts::reqopt(~"mode"), getopts::optflag(~"ignored"),
getopts::optopt(~"runtool"), getopts::optopt(~"rustcflags"),
getopts::optflag(~"verbose"),
getopts::optopt(~"logfile"),
getopts::optflag(~"jit")];
assert (vec::is_not_empty(args));
let args_ = vec::tail(args);
let matches =
match getopts::getopts(args_, opts) {
Ok(m) => m,
Err(f) => fail getopts::fail_str(f)
};
fn opt_path(m: getopts::Matches, nm: ~str) -> Path {
Path(getopts::opt_str(m, nm))
}
return {compile_lib_path: getopts::opt_str(matches, ~"compile-lib-path"),
run_lib_path: getopts::opt_str(matches, ~"run-lib-path"),
rustc_path: opt_path(matches, ~"rustc-path"),
src_base: opt_path(matches, ~"src-base"),
build_base: opt_path(matches, ~"build-base"),
aux_base: opt_path(matches, ~"aux-base"),
stage_id: getopts::opt_str(matches, ~"stage-id"),
mode: str_mode(getopts::opt_str(matches, ~"mode")),
run_ignored: getopts::opt_present(matches, ~"ignored"),
filter:
if vec::len(matches.free) > 0u {
option::Some(matches.free[0])
} else { option::None },
logfile: option::map(&getopts::opt_maybe_str(matches,
~"logfile"),
|s| Path(*s)),
runtool: getopts::opt_maybe_str(matches, ~"runtool"),
rustcflags: getopts::opt_maybe_str(matches, ~"rustcflags"),
jit: getopts::opt_present(matches, ~"jit"),
verbose: getopts::opt_present(matches, ~"verbose")};
}
fn log_config(config: config) {
let c = config;
logv(c, fmt!("configuration:"));
logv(c, fmt!("compile_lib_path: %s", config.compile_lib_path));
logv(c, fmt!("run_lib_path: %s", config.run_lib_path));
logv(c, fmt!("rustc_path: %s", config.rustc_path.to_str()));
logv(c, fmt!("src_base: %s", config.src_base.to_str()));
logv(c, fmt!("build_base: %s", config.build_base.to_str()));
logv(c, fmt!("stage_id: %s", config.stage_id));
logv(c, fmt!("mode: %s", mode_str(config.mode)));
logv(c, fmt!("run_ignored: %b", config.run_ignored));
logv(c, fmt!("filter: %s", opt_str(config.filter)));
logv(c, fmt!("runtool: %s", opt_str(config.runtool)));
logv(c, fmt!("rustcflags: %s", opt_str(config.rustcflags)));
logv(c, fmt!("jit: %b", config.jit));
logv(c, fmt!("verbose: %b", config.verbose));
logv(c, fmt!("\n"));
}
fn opt_str(maybestr: Option<~str>) -> ~str {
match maybestr { option::Some(s) => s, option::None => ~"(none)" }
}
fn str_opt(maybestr: ~str) -> Option<~str> {
if maybestr != ~"(none)" { option::Some(maybestr) } else { option::None }
}
fn str_mode(s: ~str) -> mode {
match s {
~"compile-fail" => mode_compile_fail,
~"run-fail" => mode_run_fail,
~"run-pass" => mode_run_pass,
~"pretty" => mode_pretty,
_ => fail ~"invalid mode"
}
}
fn mode_str(mode: mode) -> ~str {
match mode {
mode_compile_fail => ~"compile-fail",
mode_run_fail => ~"run-fail",
mode_run_pass => ~"run-pass",
mode_pretty => ~"pretty"
}
}
fn run_tests(config: config) {
let opts = test_opts(config);
let tests = make_tests(config);
let res = test::run_tests_console(&opts, tests);
if !res { fail ~"Some tests failed"; }
}
fn test_opts(config: config) -> test::TestOpts {
{filter:
match config.filter {
option::Some(s) => option::Some(s),
option::None => option::None
},
run_ignored: config.run_ignored,
logfile:
match config.logfile {
option::Some(s) => option::Some(s.to_str()),
option::None => option::None
}
}
}
fn make_tests(config: config) -> ~[test::TestDesc] {
debug!("making tests from %s",
config.src_base.to_str());
let mut tests = ~[];
for os::list_dir_path(&config.src_base).each |file| {
let file = copy *file;
debug!("inspecting file %s", file.to_str());
if is_test(config, file) {
tests.push(make_test(config, file))
}
}
move tests
}
fn is_test(config: config, testfile: &Path) -> bool {
// Pretty-printer does not work with .rc files yet
let valid_extensions =
match config.mode {
mode_pretty => ~[~".rs"],
_ => ~[~".rc", ~".rs"]
};
let invalid_prefixes = ~[~".", ~"#", ~"~"];
let name = testfile.filename().get();
let mut valid = false;
for valid_extensions.each |ext| {
if str::ends_with(name, *ext) { valid = true; }
}
for invalid_prefixes.each |pre| {
if str::starts_with(name, *pre) { valid = false; }
}
return valid;
}
fn make_test(config: config, testfile: &Path) ->
test::TestDesc {
{
name: make_test_name(config, testfile),
testfn: make_test_closure(config, testfile),
ignore: header::is_test_ignored(config, testfile),
should_fail: false
}
}
fn make_test_name(config: config, testfile: &Path) -> ~str {
fmt!("[%s] %s", mode_str(config.mode), testfile.to_str())
}
fn make_test_closure(config: config, testfile: &Path) -> test::TestFn {
let testfile = testfile.to_str();
fn~() { runtest::run(config, testfile) }
}
// Local Variables:
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
......@@ -36,3 +36,1982 @@ use core::*;
#[legacy_exports]
mod pgp;
use syntax::{ast, codemap, parse, visit, attr};
use syntax::diagnostic::span_handler;
use codemap::span;
use rustc::metadata::filesearch::{get_cargo_root, get_cargo_root_nearest,
get_cargo_sysroot, libdir};
use syntax::diagnostic;
use result::{Ok, Err};
use io::WriterUtil;
use send_map::linear::LinearMap;
use std::{map, json, tempfile, term, sort, getopts};
use map::HashMap;
use to_str::to_str;
use getopts::{optflag, optopt, opt_present};
use dvec::DVec;
struct Package {
name: ~str,
uuid: ~str,
url: ~str,
method: ~str,
description: ~str,
reference: Option<~str>,
tags: ~[~str],
versions: ~[(~str, ~str)]
}
impl Package : cmp::Ord {
#[cfg(stage0)]
pure fn lt(other: &Package) -> bool {
if self.name.lt(&(*other).name) { return true; }
if (*other).name.lt(&self.name) { return false; }
if self.uuid.lt(&(*other).uuid) { return true; }
if (*other).uuid.lt(&self.uuid) { return false; }
if self.url.lt(&(*other).url) { return true; }
if (*other).url.lt(&self.url) { return false; }
if self.method.lt(&(*other).method) { return true; }
if (*other).method.lt(&self.method) { return false; }
if self.description.lt(&(*other).description) { return true; }
if (*other).description.lt(&self.description) { return false; }
if self.tags.lt(&(*other).tags) { return true; }
if (*other).tags.lt(&self.tags) { return false; }
if self.versions.lt(&(*other).versions) { return true; }
return false;
}
#[cfg(stage1)]
#[cfg(stage2)]
pure fn lt(&self, other: &Package) -> bool {
if (*self).name.lt(&(*other).name) { return true; }
if (*other).name.lt(&(*self).name) { return false; }
if (*self).uuid.lt(&(*other).uuid) { return true; }
if (*other).uuid.lt(&(*self).uuid) { return false; }
if (*self).url.lt(&(*other).url) { return true; }
if (*other).url.lt(&(*self).url) { return false; }
if (*self).method.lt(&(*other).method) { return true; }
if (*other).method.lt(&(*self).method) { return false; }
if (*self).description.lt(&(*other).description) { return true; }
if (*other).description.lt(&(*self).description) { return false; }
if (*self).tags.lt(&(*other).tags) { return true; }
if (*other).tags.lt(&(*self).tags) { return false; }
if (*self).versions.lt(&(*other).versions) { return true; }
return false;
}
#[cfg(stage0)]
pure fn le(other: &Package) -> bool { !(*other).lt(&self) }
#[cfg(stage1)]
#[cfg(stage2)]
pure fn le(&self, other: &Package) -> bool { !(*other).lt(&(*self)) }
#[cfg(stage0)]
pure fn ge(other: &Package) -> bool { !self.lt(other) }
#[cfg(stage1)]
#[cfg(stage2)]
pure fn ge(&self, other: &Package) -> bool { !(*self).lt(other) }
#[cfg(stage0)]
pure fn gt(other: &Package) -> bool { (*other).lt(&self) }
#[cfg(stage1)]
#[cfg(stage2)]
pure fn gt(&self, other: &Package) -> bool { (*other).lt(&(*self)) }
}
struct Source {
name: ~str,
mut url: ~str,
mut method: ~str,
mut key: Option<~str>,
mut keyfp: Option<~str>,
packages: DVec<Package>
}
struct Cargo {
pgp: bool,
root: Path,
installdir: Path,
bindir: Path,
libdir: Path,
workdir: Path,
sourcedir: Path,
sources: map::HashMap<~str, @Source>,
mut current_install: ~str,
dep_cache: map::HashMap<~str, bool>,
opts: Options
}
struct Crate {
name: ~str,
vers: ~str,
uuid: ~str,
desc: Option<~str>,
sigs: Option<~str>,
crate_type: Option<~str>,
deps: ~[~str]
}
struct Options {
test: bool,
mode: Mode,
free: ~[~str],
help: bool,
}
enum Mode { SystemMode, UserMode, LocalMode }
impl Mode : cmp::Eq {
#[cfg(stage0)]
pure fn eq(other: &Mode) -> bool {
(self as uint) == ((*other) as uint)
}
#[cfg(stage1)]
#[cfg(stage2)]
pure fn eq(&self, other: &Mode) -> bool {
((*self) as uint) == ((*other) as uint)
}
#[cfg(stage0)]
pure fn ne(other: &Mode) -> bool { !self.eq(other) }
#[cfg(stage1)]
#[cfg(stage2)]
pure fn ne(&self, other: &Mode) -> bool { !(*self).eq(other) }
}
fn opts() -> ~[getopts::Opt] {
~[optflag(~"g"), optflag(~"G"), optflag(~"test"),
optflag(~"h"), optflag(~"help")]
}
fn info(msg: ~str) {
let out = io::stdout();
if term::color_supported() {
term::fg(out, term::color_green);
out.write_str(~"info: ");
term::reset(out);
out.write_line(msg);
} else { out.write_line(~"info: " + msg); }
}
fn warn(msg: ~str) {
let out = io::stdout();
if term::color_supported() {
term::fg(out, term::color_yellow);
out.write_str(~"warning: ");
term::reset(out);
out.write_line(msg);
}else { out.write_line(~"warning: " + msg); }
}
fn error(msg: ~str) {
let out = io::stdout();
if term::color_supported() {
term::fg(out, term::color_red);
out.write_str(~"error: ");
term::reset(out);
out.write_line(msg);
}
else { out.write_line(~"error: " + msg); }
}
fn is_uuid(id: ~str) -> bool {
let parts = str::split_str(id, ~"-");
if vec::len(parts) == 5u {
let mut correct = 0u;
for vec::eachi(parts) |i, part| {
fn is_hex_digit(+ch: char) -> bool {
('0' <= ch && ch <= '9') ||
('a' <= ch && ch <= 'f') ||
('A' <= ch && ch <= 'F')
}
if !part.all(is_hex_digit) {
return false;
}
match i {
0u => {
if part.len() == 8u {
correct += 1u;
}
}
1u | 2u | 3u => {
if part.len() == 4u {
correct += 1u;
}
}
4u => {
if part.len() == 12u {
correct += 1u;
}
}
_ => { }
}
}
if correct >= 5u {
return true;
}
}
return false;
}
#[test]
fn test_is_uuid() {
assert is_uuid(~"aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaafAF09");
assert !is_uuid(~"aaaaaaaa-aaaa-aaaa-aaaaa-aaaaaaaaaaaa");
assert !is_uuid(~"");
assert !is_uuid(~"aaaaaaaa-aaa -aaaa-aaaa-aaaaaaaaaaaa");
assert !is_uuid(~"aaaaaaaa-aaa!-aaaa-aaaa-aaaaaaaaaaaa");
assert !is_uuid(~"aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa-a");
assert !is_uuid(~"aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaป");
}
// FIXME (#2661): implement url/URL parsing so we don't have to resort
// to weak checks
fn has_archive_extension(p: ~str) -> bool {
str::ends_with(p, ~".tar") ||
str::ends_with(p, ~".tar.gz") ||
str::ends_with(p, ~".tar.bz2") ||
str::ends_with(p, ~".tar.Z") ||
str::ends_with(p, ~".tar.lz") ||
str::ends_with(p, ~".tar.xz") ||
str::ends_with(p, ~".tgz") ||
str::ends_with(p, ~".tbz") ||
str::ends_with(p, ~".tbz2") ||
str::ends_with(p, ~".tb2") ||
str::ends_with(p, ~".taz") ||
str::ends_with(p, ~".tlz") ||
str::ends_with(p, ~".txz")
}
fn is_archive_path(u: ~str) -> bool {
has_archive_extension(u) && os::path_exists(&Path(u))
}
fn is_archive_url(u: ~str) -> bool {
// FIXME (#2661): this requires the protocol bit - if we had proper
// url parsing, we wouldn't need it
match str::find_str(u, ~"://") {
option::Some(_) => has_archive_extension(u),
_ => false
}
}
fn is_git_url(url: ~str) -> bool {
if str::ends_with(url, ~"/") { str::ends_with(url, ~".git/") }
else {
str::starts_with(url, ~"git://") || str::ends_with(url, ~".git")
}
}
fn assume_source_method(url: ~str) -> ~str {
if is_git_url(url) {
return ~"git";
}
if str::starts_with(url, ~"file://") || os::path_exists(&Path(url)) {
return ~"file";
}
~"curl"
}
fn load_link(mis: ~[@ast::meta_item]) -> (Option<~str>,
Option<~str>,
Option<~str>) {
let mut name = None;
let mut vers = None;
let mut uuid = None;
for mis.each |a| {
match a.node {
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) => {
match v {
~"name" => name = Some(*s),
~"vers" => vers = Some(*s),
~"uuid" => uuid = Some(*s),
_ => { }
}
}
_ => fail ~"load_link: meta items must be name-values"
}
}
(name, vers, uuid)
}
fn load_crate(filename: &Path) -> Option<Crate> {
let sess = parse::new_parse_sess(None);
let c = parse::parse_crate_from_file(filename, ~[], sess);
let mut name = None;
let mut vers = None;
let mut uuid = None;
let mut desc = None;
let mut sigs = None;
let mut crate_type = None;
for c.node.attrs.each |a| {
match a.node.value.node {
ast::meta_name_value(v, {node: ast::lit_str(_), span: _}) => {
match v {
~"desc" => desc = Some(v),
~"sigs" => sigs = Some(v),
~"crate_type" => crate_type = Some(v),
_ => { }
}
}
ast::meta_list(v, mis) => {
if v == ~"link" {
let (n, v, u) = load_link(mis);
name = n;
vers = v;
uuid = u;
}
}
_ => {
fail ~"crate attributes may not contain " +
~"meta_words";
}
}
}
type env = @{
mut deps: ~[~str]
};
fn goto_view_item(ps: syntax::parse::parse_sess, e: env,
i: @ast::view_item) {
match i.node {
ast::view_item_use(ident, metas, _) => {
let name_items =
attr::find_meta_items_by_name(metas, ~"name");
let m = if name_items.is_empty() {
metas + ~[attr::mk_name_value_item_str(
~"name", *ps.interner.get(ident))]
} else {
metas
};
let mut attr_name = ident;
let mut attr_vers = ~"";
let mut attr_from = ~"";
for m.each |item| {
match attr::get_meta_item_value_str(*item) {
Some(value) => {
let name = attr::get_meta_item_name(*item);
match name {
~"vers" => attr_vers = value,
~"from" => attr_from = value,
_ => ()
}
}
None => ()
}
}
let query = if !str::is_empty(attr_from) {
attr_from
} else {
if !str::is_empty(attr_vers) {
ps.interner.get(attr_name) + ~"@" + attr_vers
} else { *ps.interner.get(attr_name) }
};
match *ps.interner.get(attr_name) {
~"std" | ~"core" => (),
_ => e.deps.push(query)
}
}
_ => ()
}
}
fn goto_item(_e: env, _i: @ast::item) {
}
let e = @{
mut deps: ~[]
};
let v = visit::mk_simple_visitor(@{
visit_view_item: |a| goto_view_item(sess, e, a),
visit_item: |a| goto_item(e, a),
.. *visit::default_simple_visitor()
});
visit::visit_crate(*c, (), v);
let deps = copy e.deps;
match (name, vers, uuid) {
(Some(name0), Some(vers0), Some(uuid0)) => {
Some(Crate {
name: name0,
vers: vers0,
uuid: uuid0,
desc: desc,
sigs: sigs,
crate_type: crate_type,
deps: deps })
}
_ => return None
}
}
fn print(s: ~str) {
io::stdout().write_line(s);
}
fn rest(s: ~str, start: uint) -> ~str {
if (start >= str::len(s)) {
~""
} else {
str::slice(s, start, str::len(s))
}
}
fn need_dir(s: &Path) {
if os::path_is_dir(s) { return; }
if !os::make_dir(s, 493_i32 /* oct: 755 */) {
fail fmt!("can't make_dir %s", s.to_str());
}
}
fn valid_pkg_name(s: &str) -> bool {
fn is_valid_digit(+c: char) -> bool {
('0' <= c && c <= '9') ||
('a' <= c && c <= 'z') ||
('A' <= c && c <= 'Z') ||
c == '-' ||
c == '_'
}
s.all(is_valid_digit)
}
fn parse_source(name: ~str, j: &json::Json) -> @Source {
if !valid_pkg_name(name) {
fail fmt!("'%s' is an invalid source name", name);
}
match *j {
json::Object(j) => {
let mut url = match j.find(&~"url") {
Some(json::String(u)) => u,
_ => fail ~"needed 'url' field in source"
};
let method = match j.find(&~"method") {
Some(json::String(u)) => u,
_ => assume_source_method(url)
};
let key = match j.find(&~"key") {
Some(json::String(u)) => Some(u),
_ => None
};
let keyfp = match j.find(&~"keyfp") {
Some(json::String(u)) => Some(u),
_ => None
};
if method == ~"file" {
url = os::make_absolute(&Path(url)).to_str();
}
return @Source {
name: name,
mut url: url,
mut method: method,
mut key: key,
mut keyfp: keyfp,
packages: DVec() };
}
_ => fail ~"needed dict value in source"
};
}
fn try_parse_sources(filename: &Path, sources: map::HashMap<~str, @Source>) {
if !os::path_exists(filename) { return; }
let c = io::read_whole_file_str(filename);
match json::from_str(c.get()) {
Ok(json::Object(j)) => {
for j.each |k, v| {
sources.insert(copy *k, parse_source(*k, v));
debug!("source: %s", *k);
}
}
Ok(_) => fail ~"malformed sources.json",
Err(e) => fail fmt!("%s:%s", filename.to_str(), e.to_str())
}
}
fn load_one_source_package(src: @Source, p: &json::Object) {
let name = match p.find(&~"name") {
Some(json::String(n)) => {
if !valid_pkg_name(n) {
warn(~"malformed source json: "
+ src.name + ~", '" + n + ~"'"+
~" is an invalid name (alphanumeric, underscores and" +
~" dashes only)");
return;
}
n
}
_ => {
warn(~"malformed source json: " + src.name + ~" (missing name)");
return;
}
};
let uuid = match p.find(&~"uuid") {
Some(json::String(n)) => {
if !is_uuid(n) {
warn(~"malformed source json: "
+ src.name + ~", '" + n + ~"'"+
~" is an invalid uuid");
return;
}
n
}
_ => {
warn(~"malformed source json: " + src.name + ~" (missing uuid)");
return;
}
};
let url = match p.find(&~"url") {
Some(json::String(n)) => n,
_ => {
warn(~"malformed source json: " + src.name + ~" (missing url)");
return;
}
};
let method = match p.find(&~"method") {
Some(json::String(n)) => n,
_ => {
warn(~"malformed source json: "
+ src.name + ~" (missing method)");
return;
}
};
let reference = match p.find(&~"ref") {
Some(json::String(n)) => Some(n),
_ => None
};
let mut tags = ~[];
match p.find(&~"tags") {
Some(json::List(js)) => {
for js.each |j| {
match *j {
json::String(ref j) => tags.grow(1u, j),
_ => ()
}
}
}
_ => ()
}
let description = match p.find(&~"description") {
Some(json::String(n)) => n,
_ => {
warn(~"malformed source json: " + src.name
+ ~" (missing description)");
return;
}
};
let newpkg = Package {
name: name,
uuid: uuid,
url: url,
method: method,
description: description,
reference: reference,
tags: tags,
versions: ~[]
};
match src.packages.position(|pkg| pkg.uuid == uuid) {
Some(idx) => {
src.packages.set_elt(idx, newpkg);
log(debug, ~" updated package: " + src.name + ~"/" + name);
}
None => {
src.packages.push(newpkg);
}
}
log(debug, ~" loaded package: " + src.name + ~"/" + name);
}
fn load_source_info(c: &Cargo, src: @Source) {
let dir = c.sourcedir.push(src.name);
let srcfile = dir.push("source.json");
if !os::path_exists(&srcfile) { return; }
let srcstr = io::read_whole_file_str(&srcfile);
match json::from_str(srcstr.get()) {
Ok(ref json @ json::Object(_)) => {
let o = parse_source(src.name, json);
src.key = o.key;
src.keyfp = o.keyfp;
}
Ok(_) => {
warn(~"malformed source.json: " + src.name +
~"(source info is not a dict)");
}
Err(e) => {
warn(fmt!("%s:%s", src.name, e.to_str()));
}
};
}
fn load_source_packages(c: &Cargo, src: @Source) {
log(debug, ~"loading source: " + src.name);
let dir = c.sourcedir.push(src.name);
let pkgfile = dir.push("packages.json");
if !os::path_exists(&pkgfile) { return; }
let pkgstr = io::read_whole_file_str(&pkgfile);
match json::from_str(pkgstr.get()) {
Ok(json::List(js)) => {
for js.each |j| {
match *j {
json::Object(p) => {
load_one_source_package(src, p);
}
_ => {
warn(~"malformed source json: " + src.name +
~" (non-dict pkg)");
}
}
}
}
Ok(_) => {
warn(~"malformed packages.json: " + src.name +
~"(packages is not a list)");
}
Err(e) => {
warn(fmt!("%s:%s", src.name, e.to_str()));
}
};
}
fn build_cargo_options(argv: ~[~str]) -> Options {
let matches = match getopts::getopts(argv, opts()) {
result::Ok(m) => m,
result::Err(f) => {
fail fmt!("%s", getopts::fail_str(f));
}
};
let test = opt_present(matches, ~"test");
let G = opt_present(matches, ~"G");
let g = opt_present(matches, ~"g");
let help = opt_present(matches, ~"h") || opt_present(matches, ~"help");
let len = vec::len(matches.free);
let is_install = len > 1u && matches.free[1] == ~"install";
let is_uninstall = len > 1u && matches.free[1] == ~"uninstall";
if G && g { fail ~"-G and -g both provided"; }
if !is_install && !is_uninstall && (g || G) {
fail ~"-g and -G are only valid for `install` and `uninstall|rm`";
}
let mode =
if (!is_install && !is_uninstall) || g { UserMode }
else if G { SystemMode }
else { LocalMode };
Options {test: test, mode: mode, free: matches.free, help: help}
}
fn configure(opts: Options) -> Cargo {
let home = match get_cargo_root() {
Ok(home) => home,
Err(_err) => get_cargo_sysroot().get()
};
let get_cargo_dir = match opts.mode {
SystemMode => get_cargo_sysroot,
UserMode => get_cargo_root,
LocalMode => get_cargo_root_nearest
};
let p = get_cargo_dir().get();
let sources = HashMap();
try_parse_sources(&home.push("sources.json"), sources);
try_parse_sources(&home.push("local-sources.json"), sources);
let dep_cache = HashMap();
let mut c = Cargo {
pgp: pgp::supported(),
root: home,
installdir: p,
bindir: p.push("bin"),
libdir: p.push("lib"),
workdir: p.push("work"),
sourcedir: home.push("sources"),
sources: sources,
mut current_install: ~"",
dep_cache: dep_cache,
opts: opts
};
need_dir(&c.root);
need_dir(&c.installdir);
need_dir(&c.sourcedir);
need_dir(&c.workdir);
need_dir(&c.libdir);
need_dir(&c.bindir);
for sources.each_key |k| {
let mut s = sources.get(k);
load_source_packages(&c, s);
sources.insert(k, s);
}
if c.pgp {
pgp::init(&c.root);
} else {
warn(~"command `gpg` was not found");
warn(~"you have to install gpg from source " +
~" or package manager to get it to work correctly");
}
move c
}
fn for_each_package(c: &Cargo, b: fn(s: @Source, p: &Package)) {
for c.sources.each_value |v| {
for v.packages.each |p| {
b(v, p);
}
}
}
// Runs all programs in directory <buildpath>
fn run_programs(buildpath: &Path) {
let newv = os::list_dir_path(buildpath);
for newv.each |ct| {
run::run_program(ct.to_str(), ~[]);
}
}
// Runs rustc in <path + subdir> with the given flags
// and returns <patho + subdir>
fn run_in_buildpath(what: &str, path: &Path, subdir: &Path, cf: &Path,
extra_flags: ~[~str]) -> Option<Path> {
let buildpath = path.push_rel(subdir);
need_dir(&buildpath);
debug!("%s: %s -> %s", what, cf.to_str(), buildpath.to_str());
let p = run::program_output(rustc_sysroot(),
~[~"--out-dir",
buildpath.to_str(),
cf.to_str()] + extra_flags);
if p.status != 0 {
error(fmt!("rustc failed: %d\n%s\n%s", p.status, p.err, p.out));
return None;
}
Some(buildpath)
}
fn test_one_crate(_c: &Cargo, path: &Path, cf: &Path) {
let buildpath = match run_in_buildpath(~"testing", path,
&Path("test"),
cf,
~[ ~"--test"]) {
None => return,
Some(bp) => bp
};
run_programs(&buildpath);
}
fn install_one_crate(c: &Cargo, path: &Path, cf: &Path) {
let buildpath = match run_in_buildpath(~"installing", path,
&Path("build"),
cf, ~[]) {
None => return,
Some(bp) => bp
};
let newv = os::list_dir_path(&buildpath);
let exec_suffix = os::exe_suffix();
for newv.each |ct| {
if (exec_suffix != ~"" && str::ends_with(ct.to_str(),
exec_suffix)) ||
(exec_suffix == ~"" &&
!str::starts_with(ct.filename().get(),
~"lib")) {
debug!(" bin: %s", ct.to_str());
install_to_dir(*ct, &c.bindir);
if c.opts.mode == SystemMode {
// FIXME (#2662): Put this file in PATH / symlink it so it can
// be used as a generic executable
// `cargo install -G rustray` and `rustray file.obj`
}
} else {
debug!(" lib: %s", ct.to_str());
install_to_dir(*ct, &c.libdir);
}
}
}
fn rustc_sysroot() -> ~str {
match os::self_exe_path() {
Some(path) => {
let rustc = path.push_many([~"..", ~"bin", ~"rustc"]);
debug!(" rustc: %s", rustc.to_str());
rustc.to_str()
}
None => ~"rustc"
}
}
fn install_source(c: &Cargo, path: &Path) {
debug!("source: %s", path.to_str());
os::change_dir(path);
let mut cratefiles = ~[];
for os::walk_dir(&Path(".")) |p| {
if p.filetype() == Some(~".rc") {
cratefiles.push(*p);
}
}
if vec::is_empty(cratefiles) {
fail ~"this doesn't look like a rust package (no .rc files)";
}
for cratefiles.each |cf| {
match load_crate(cf) {
None => loop,
Some(crate) => {
for crate.deps.each |query| {
// FIXME (#1356): handle cyclic dependencies
// (n.b. #1356 says "Cyclic dependency is an error
// condition")
let wd = get_temp_workdir(c);
install_query(c, &wd, *query);
}
os::change_dir(path);
if c.opts.test {
test_one_crate(c, path, cf);
}
install_one_crate(c, path, cf);
}
}
}
}
fn install_git(c: &Cargo, wd: &Path, url: ~str, reference: Option<~str>) {
run::program_output(~"git", ~[~"clone", url, wd.to_str()]);
if reference.is_some() {
let r = reference.get();
os::change_dir(wd);
run::run_program(~"git", ~[~"checkout", r]);
}
install_source(c, wd);
}
fn install_curl(c: &Cargo, wd: &Path, url: ~str) {
let tarpath = wd.push("pkg.tar");
let p = run::program_output(~"curl", ~[~"-f", ~"-s", ~"-o",
tarpath.to_str(), url]);
if p.status != 0 {
fail fmt!("fetch of %s failed: %s", url, p.err);
}
run::run_program(~"tar", ~[~"-x", ~"--strip-components=1",
~"-C", wd.to_str(),
~"-f", tarpath.to_str()]);
install_source(c, wd);
}
fn install_file(c: &Cargo, wd: &Path, path: &Path) {
run::program_output(~"tar", ~[~"-x", ~"--strip-components=1",
~"-C", wd.to_str(),
~"-f", path.to_str()]);
install_source(c, wd);
}
fn install_package(c: &Cargo, src: ~str, wd: &Path, pkg: Package) {
let url = copy pkg.url;
let method = match pkg.method {
~"git" => ~"git",
~"file" => ~"file",
_ => ~"curl"
};
info(fmt!("installing %s/%s via %s...", src, pkg.name, method));
match method {
~"git" => install_git(c, wd, url, copy pkg.reference),
~"file" => install_file(c, wd, &Path(url)),
~"curl" => install_curl(c, wd, url),
_ => ()
}
}
fn cargo_suggestion(c: &Cargo, fallback: fn())
{
if c.sources.size() == 0u {
error(~"no sources defined - you may wish to run " +
~"`cargo init`");
return;
}
fallback();
}
fn install_uuid(c: &Cargo, wd: &Path, uuid: ~str) {
let mut ps = ~[];
for_each_package(c, |s, p| {
if p.uuid == uuid {
vec::push(&mut ps, (s.name, copy *p));
}
});
if vec::len(ps) == 1u {
let (sname, p) = copy ps[0];
install_package(c, sname, wd, p);
return;
} else if vec::len(ps) == 0u {
cargo_suggestion(c, || {
error(~"can't find package: " + uuid);
});
return;
}
error(~"found multiple packages:");
for ps.each |elt| {
let (sname,p) = copy *elt;
info(~" " + sname + ~"/" + p.uuid + ~" (" + p.name + ~")");
}
}
fn install_named(c: &Cargo, wd: &Path, name: ~str) {
let mut ps = ~[];
for_each_package(c, |s, p| {
if p.name == name {
vec::push(&mut ps, (s.name, copy *p));
}
});
if vec::len(ps) == 1u {
let (sname, p) = copy ps[0];
install_package(c, sname, wd, p);
return;
} else if vec::len(ps) == 0u {
cargo_suggestion(c, || {
error(~"can't find package: " + name);
});
return;
}
error(~"found multiple packages:");
for ps.each |elt| {
let (sname,p) = copy *elt;
info(~" " + sname + ~"/" + p.uuid + ~" (" + p.name + ~")");
}
}
fn install_uuid_specific(c: &Cargo, wd: &Path, src: ~str, uuid: ~str) {
match c.sources.find(src) {
Some(s) => {
for s.packages.each |p| {
if p.uuid == uuid {
install_package(c, src, wd, *p);
return;
}
}
}
_ => ()
}
error(~"can't find package: " + src + ~"/" + uuid);
}
fn install_named_specific(c: &Cargo, wd: &Path, src: ~str, name: ~str) {
match c.sources.find(src) {
Some(s) => {
for s.packages.each |p| {
if p.name == name {
install_package(c, src, wd, *p);
return;
}
}
}
_ => ()
}
error(~"can't find package: " + src + ~"/" + name);
}
fn cmd_uninstall(c: &Cargo) {
if vec::len(c.opts.free) < 3u {
cmd_usage();
return;
}
let lib = &c.libdir;
let bin = &c.bindir;
let target = c.opts.free[2u];
// FIXME (#2662): needs stronger pattern matching
// FIXME (#2662): needs to uninstall from a specified location in a
// cache instead of looking for it (binaries can be uninstalled by
// name only)
fn try_uninstall(p: &Path) -> bool {
if os::remove_file(p) {
info(~"uninstalled: '" + p.to_str() + ~"'");
true
} else {
error(~"could not uninstall: '" +
p.to_str() + ~"'");
false
}
}
if is_uuid(target) {
for os::list_dir(lib).each |file| {
match str::find_str(*file, ~"-" + target + ~"-") {
Some(_) => if !try_uninstall(&lib.push(*file)) { return },
None => ()
}
}
error(~"can't find package with uuid: " + target);
} else {
for os::list_dir(lib).each |file| {
match str::find_str(*file, ~"lib" + target + ~"-") {
Some(_) => if !try_uninstall(&lib.push(*file)) { return },
None => ()
}
}
for os::list_dir(bin).each |file| {
match str::find_str(*file, target) {
Some(_) => if !try_uninstall(&lib.push(*file)) { return },
None => ()
}
}
error(~"can't find package with name: " + target);
}
}
fn install_query(c: &Cargo, wd: &Path, target: ~str) {
match c.dep_cache.find(target) {
Some(inst) => {
if inst {
return;
}
}
None => ()
}
c.dep_cache.insert(target, true);
if is_archive_path(target) {
install_file(c, wd, &Path(target));
return;
} else if is_git_url(target) {
let reference = if c.opts.free.len() >= 4u {
Some(c.opts.free[3u])
} else {
None
};
install_git(c, wd, target, reference);
} else if !valid_pkg_name(target) && has_archive_extension(target) {
install_curl(c, wd, target);
return;
} else {
let mut ps = copy target;
match str::find_char(ps, '/') {
option::Some(idx) => {
let source = str::slice(ps, 0u, idx);
ps = str::slice(ps, idx + 1u, str::len(ps));
if is_uuid(ps) {
install_uuid_specific(c, wd, source, ps);
} else {
install_named_specific(c, wd, source, ps);
}
}
option::None => {
if is_uuid(ps) {
install_uuid(c, wd, ps);
} else {
install_named(c, wd, ps);
}
}
}
}
// FIXME (#2662): This whole dep_cache and current_install thing is
// a bit of a hack. It should be cleaned up in the future.
if target == c.current_install {
for c.dep_cache.each |k, _v| {
c.dep_cache.remove(k);
}
c.current_install = ~"";
}
}
fn get_temp_workdir(c: &Cargo) -> Path {
match tempfile::mkdtemp(&c.workdir, "cargo") {
Some(wd) => wd,
None => fail fmt!("needed temp dir: %s",
c.workdir.to_str())
}
}
fn cmd_install(c: &Cargo) unsafe {
let wd = get_temp_workdir(c);
if vec::len(c.opts.free) == 2u {
let cwd = os::getcwd();
let status = run::run_program(~"cp", ~[~"-R", cwd.to_str(),
wd.to_str()]);
if status != 0 {
fail fmt!("could not copy directory: %s", cwd.to_str());
}
install_source(c, &wd);
return;
}
sync(c);
let query = c.opts.free[2];
c.current_install = query.to_str();
install_query(c, &wd, query);
}
fn sync(c: &Cargo) {
for c.sources.each_key |k| {
let mut s = c.sources.get(k);
sync_one(c, s);
c.sources.insert(k, s);
}
}
fn sync_one_file(c: &Cargo, dir: &Path, src: @Source) -> bool {
let name = src.name;
let srcfile = dir.push("source.json.new");
let destsrcfile = dir.push("source.json");
let pkgfile = dir.push("packages.json.new");
let destpkgfile = dir.push("packages.json");
let keyfile = dir.push("key.gpg");
let srcsigfile = dir.push("source.json.sig");
let sigfile = dir.push("packages.json.sig");
let url = Path(src.url);
let mut has_src_file = false;
if !os::copy_file(&url.push("packages.json"), &pkgfile) {
error(fmt!("fetch for source %s (url %s) failed",
name, url.to_str()));
return false;
}
if os::copy_file(&url.push("source.json"), &srcfile) {
has_src_file = false;
}
os::copy_file(&url.push("source.json.sig"), &srcsigfile);
os::copy_file(&url.push("packages.json.sig"), &sigfile);
match copy src.key {
Some(u) => {
let p = run::program_output(~"curl",
~[~"-f", ~"-s",
~"-o", keyfile.to_str(), u]);
if p.status != 0 {
error(fmt!("fetch for source %s (key %s) failed", name, u));
return false;
}
pgp::add(&c.root, &keyfile);
}
_ => ()
}
match (src.key, src.keyfp) {
(Some(_), Some(f)) => {
let r = pgp::verify(&c.root, &pkgfile, &sigfile);
if !r {
error(fmt!("signature verification failed for source %s with \
key %s", name, f));
return false;
}
if has_src_file {
let e = pgp::verify(&c.root, &srcfile, &srcsigfile);
if !e {
error(fmt!("signature verification failed for source %s \
with key %s", name, f));
return false;
}
}
}
_ => ()
}
copy_warn(&pkgfile, &destpkgfile);
if has_src_file {
copy_warn(&srcfile, &destsrcfile);
}
os::remove_file(&keyfile);
os::remove_file(&srcfile);
os::remove_file(&srcsigfile);
os::remove_file(&pkgfile);
os::remove_file(&sigfile);
info(fmt!("synced source: %s", name));
return true;
}
fn sync_one_git(c: &Cargo, dir: &Path, src: @Source) -> bool {
let name = src.name;
let srcfile = dir.push("source.json");
let pkgfile = dir.push("packages.json");
let keyfile = dir.push("key.gpg");
let srcsigfile = dir.push("source.json.sig");
let sigfile = dir.push("packages.json.sig");
let url = src.url;
fn rollback(name: ~str, dir: &Path, insecure: bool) {
fn msg(name: ~str, insecure: bool) {
error(fmt!("could not rollback source: %s", name));
if insecure {
warn(~"a past security check failed on source " +
name + ~" and rolling back the source failed -"
+ ~" this source may be compromised");
}
}
if !os::change_dir(dir) {
msg(name, insecure);
}
else {
let p = run::program_output(~"git", ~[~"reset", ~"--hard",
~"HEAD@{1}"]);
if p.status != 0 {
msg(name, insecure);
}
}
}
if !os::path_exists(&dir.push(".git")) {
let p = run::program_output(~"git", ~[~"clone", url, dir.to_str()]);
if p.status != 0 {
error(fmt!("fetch for source %s (url %s) failed", name, url));
return false;
}
}
else {
if !os::change_dir(dir) {
error(fmt!("fetch for source %s (url %s) failed", name, url));
return false;
}
let p = run::program_output(~"git", ~[~"pull"]);
if p.status != 0 {
error(fmt!("fetch for source %s (url %s) failed", name, url));
return false;
}
}
let has_src_file = os::path_exists(&srcfile);
match copy src.key {
Some(u) => {
let p = run::program_output(~"curl",
~[~"-f", ~"-s",
~"-o", keyfile.to_str(), u]);
if p.status != 0 {
error(fmt!("fetch for source %s (key %s) failed", name, u));
rollback(name, dir, false);
return false;
}
pgp::add(&c.root, &keyfile);
}
_ => ()
}
match (src.key, src.keyfp) {
(Some(_), Some(f)) => {
let r = pgp::verify(&c.root, &pkgfile, &sigfile);
if !r {
error(fmt!("signature verification failed for source %s with \
key %s", name, f));
rollback(name, dir, false);
return false;
}
if has_src_file {
let e = pgp::verify(&c.root, &srcfile, &srcsigfile);
if !e {
error(fmt!("signature verification failed for source %s \
with key %s", name, f));
rollback(name, dir, false);
return false;
}
}
}
_ => ()
}
os::remove_file(&keyfile);
info(fmt!("synced source: %s", name));
return true;
}
fn sync_one_curl(c: &Cargo, dir: &Path, src: @Source) -> bool {
let name = src.name;
let srcfile = dir.push("source.json.new");
let destsrcfile = dir.push("source.json");
let pkgfile = dir.push("packages.json.new");
let destpkgfile = dir.push("packages.json");
let keyfile = dir.push("key.gpg");
let srcsigfile = dir.push("source.json.sig");
let sigfile = dir.push("packages.json.sig");
let mut url = src.url;
let smart = !str::ends_with(src.url, ~"packages.json");
let mut has_src_file = false;
if smart {
url += ~"/packages.json";
}
let p = run::program_output(~"curl",
~[~"-f", ~"-s",
~"-o", pkgfile.to_str(), url]);
if p.status != 0 {
error(fmt!("fetch for source %s (url %s) failed", name, url));
return false;
}
if smart {
url = src.url + ~"/source.json";
let p =
run::program_output(~"curl",
~[~"-f", ~"-s",
~"-o", srcfile.to_str(), url]);
if p.status == 0 {
has_src_file = true;
}
}
match copy src.key {
Some(u) => {
let p = run::program_output(~"curl",
~[~"-f", ~"-s",
~"-o", keyfile.to_str(), u]);
if p.status != 0 {
error(fmt!("fetch for source %s (key %s) failed", name, u));
return false;
}
pgp::add(&c.root, &keyfile);
}
_ => ()
}
match (src.key, src.keyfp) {
(Some(_), Some(f)) => {
if smart {
url = src.url + ~"/packages.json.sig";
}
else {
url = src.url + ~".sig";
}
let mut p = run::program_output(~"curl",
~[~"-f", ~"-s", ~"-o",
sigfile.to_str(), url]);
if p.status != 0 {
error(fmt!("fetch for source %s (sig %s) failed", name, url));
return false;
}
let r = pgp::verify(&c.root, &pkgfile, &sigfile);
if !r {
error(fmt!("signature verification failed for source %s with \
key %s", name, f));
return false;
}
if smart && has_src_file {
url = src.url + ~"/source.json.sig";
p = run::program_output(~"curl",
~[~"-f", ~"-s", ~"-o",
srcsigfile.to_str(), url]);
if p.status != 0 {
error(fmt!("fetch for source %s (sig %s) failed",
name, url));
return false;
}
let e = pgp::verify(&c.root, &srcfile, &srcsigfile);
if !e {
error(~"signature verification failed for " +
~"source " + name + ~" with key " + f);
return false;
}
}
}
_ => ()
}
copy_warn(&pkgfile, &destpkgfile);
if smart && has_src_file {
copy_warn(&srcfile, &destsrcfile);
}
os::remove_file(&keyfile);
os::remove_file(&srcfile);
os::remove_file(&srcsigfile);
os::remove_file(&pkgfile);
os::remove_file(&sigfile);
info(fmt!("synced source: %s", name));
return true;
}
fn sync_one(c: &Cargo, src: @Source) {
let name = src.name;
let dir = c.sourcedir.push(name);
info(fmt!("syncing source: %s...", name));
need_dir(&dir);
let result = match src.method {
~"git" => sync_one_git(c, &dir, src),
~"file" => sync_one_file(c, &dir, src),
_ => sync_one_curl(c, &dir, src)
};
if result {
load_source_info(c, src);
load_source_packages(c, src);
}
}
fn cmd_init(c: &Cargo) {
let srcurl = ~"http://www.rust-lang.org/cargo/sources.json";
let sigurl = ~"http://www.rust-lang.org/cargo/sources.json.sig";
let srcfile = c.root.push("sources.json.new");
let sigfile = c.root.push("sources.json.sig");
let destsrcfile = c.root.push("sources.json");
let p =
run::program_output(~"curl", ~[~"-f", ~"-s",
~"-o", srcfile.to_str(), srcurl]);
if p.status != 0 {
error(fmt!("fetch of sources.json failed: %s", p.out));
return;
}
let p =
run::program_output(~"curl", ~[~"-f", ~"-s",
~"-o", sigfile.to_str(), sigurl]);
if p.status != 0 {
error(fmt!("fetch of sources.json.sig failed: %s", p.out));
return;
}
let r = pgp::verify(&c.root, &srcfile, &sigfile);
if !r {
error(fmt!("signature verification failed for '%s'",
srcfile.to_str()));
return;
}
copy_warn(&srcfile, &destsrcfile);
os::remove_file(&srcfile);
os::remove_file(&sigfile);
info(fmt!("initialized .cargo in %s", c.root.to_str()));
}
fn print_pkg(s: @Source, p: &Package) {
let mut m = s.name + ~"/" + p.name + ~" (" + p.uuid + ~")";
if vec::len(p.tags) > 0u {
m = m + ~" [" + str::connect(p.tags, ~", ") + ~"]";
}
info(m);
if p.description != ~"" {
print(~" >> " + p.description + ~"\n")
}
}
fn print_source(s: @Source) {
info(s.name + ~" (" + s.url + ~")");
let pks = sort::merge_sort(s.packages.get(), sys::shape_lt);
let l = vec::len(pks);
print(io::with_str_writer(|writer| {
let mut list = ~" >> ";
for vec::eachi(pks) |i, pk| {
if str::len(list) > 78u {
writer.write_line(list);
list = ~" >> ";
}
list += pk.name + (if l - 1u == i { ~"" } else { ~", " });
}
writer.write_line(list);
}));
}
fn cmd_list(c: &Cargo) {
sync(c);
if vec::len(c.opts.free) >= 3u {
let v = vec::view(c.opts.free, 2u, vec::len(c.opts.free));
for vec::each(v) |name| {
if !valid_pkg_name(*name) {
error(fmt!("'%s' is an invalid source name", *name));
} else {
match c.sources.find(*name) {
Some(source) => {
print_source(source);
}
None => {
error(fmt!("no such source: %s", *name));
}
}
}
}
} else {
for c.sources.each_value |v| {
print_source(v);
}
}
}
fn cmd_search(c: &Cargo) {
if vec::len(c.opts.free) < 3u {
cmd_usage();
return;
}
sync(c);
let mut n = 0;
let name = c.opts.free[2];
let tags = vec::slice(c.opts.free, 3u, vec::len(c.opts.free));
for_each_package(c, |s, p| {
if (str::contains(p.name, name) || name == ~"*") &&
vec::all(tags, |t| vec::contains(p.tags, t) ) {
print_pkg(s, p);
n += 1;
}
});
info(fmt!("found %d packages", n));
}
fn install_to_dir(srcfile: &Path, destdir: &Path) {
let newfile = destdir.push(srcfile.filename().get());
let status = run::run_program(~"cp", ~[~"-r", srcfile.to_str(),
newfile.to_str()]);
if status == 0 {
info(fmt!("installed: '%s'", newfile.to_str()));
} else {
error(fmt!("could not install: '%s'", newfile.to_str()));
}
}
fn dump_cache(c: &Cargo) {
need_dir(&c.root);
let out = c.root.push("cache.json");
let _root = json::Object(~LinearMap());
if os::path_exists(&out) {
copy_warn(&out, &c.root.push("cache.json.old"));
}
}
fn dump_sources(c: &Cargo) {
if c.sources.size() < 1u {
return;
}
need_dir(&c.root);
let out = c.root.push("sources.json");
if os::path_exists(&out) {
copy_warn(&out, &c.root.push("sources.json.old"));
}
match io::buffered_file_writer(&out) {
result::Ok(writer) => {
let mut hash = ~LinearMap();
for c.sources.each |k, v| {
let mut chash = ~LinearMap();
chash.insert(~"url", json::String(v.url));
chash.insert(~"method", json::String(v.method));
match copy v.key {
Some(key) => {
chash.insert(~"key", json::String(copy key));
}
_ => ()
}
match copy v.keyfp {
Some(keyfp) => {
chash.insert(~"keyfp", json::String(copy keyfp));
}
_ => ()
}
hash.insert(copy k, json::Object(move chash));
}
json::to_writer(writer, &json::Object(move hash))
}
result::Err(e) => {
error(fmt!("could not dump sources: %s", e));
}
}
}
fn copy_warn(srcfile: &Path, destfile: &Path) {
if !os::copy_file(srcfile, destfile) {
warn(fmt!("copying %s to %s failed",
srcfile.to_str(), destfile.to_str()));
}
}
fn cmd_sources(c: &Cargo) {
if vec::len(c.opts.free) < 3u {
for c.sources.each_value |v| {
info(fmt!("%s (%s) via %s",
v.name, v.url, v.method));
}
return;
}
let action = c.opts.free[2u];
match action {
~"clear" => {
for c.sources.each_key |k| {
c.sources.remove(k);
}
info(~"cleared sources");
}
~"add" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
}
let name = c.opts.free[3u];
let url = c.opts.free[4u];
if !valid_pkg_name(name) {
error(fmt!("'%s' is an invalid source name", name));
return;
}
if c.sources.contains_key(name) {
error(fmt!("source already exists: %s", name));
} else {
c.sources.insert(name, @Source {
name: name,
mut url: url,
mut method: assume_source_method(url),
mut key: None,
mut keyfp: None,
packages: DVec()
});
info(fmt!("added source: %s", name));
}
}
~"remove" => {
if vec::len(c.opts.free) < 4u {
cmd_usage();
return;
}
let name = c.opts.free[3u];
if !valid_pkg_name(name) {
error(fmt!("'%s' is an invalid source name", name));
return;
}
if c.sources.contains_key(name) {
c.sources.remove(name);
info(fmt!("removed source: %s", name));
} else {
error(fmt!("no such source: %s", name));
}
}
~"set-url" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
}
let name = c.opts.free[3u];
let url = c.opts.free[4u];
if !valid_pkg_name(name) {
error(fmt!("'%s' is an invalid source name", name));
return;
}
match c.sources.find(name) {
Some(source) => {
let old = copy source.url;
let method = assume_source_method(url);
source.url = url;
source.method = method;
c.sources.insert(name, source);
info(fmt!("changed source url: '%s' to '%s'", old, url));
}
None => {
error(fmt!("no such source: %s", name));
}
}
}
~"set-method" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
}
let name = c.opts.free[3u];
let method = c.opts.free[4u];
if !valid_pkg_name(name) {
error(fmt!("'%s' is an invalid source name", name));
return;
}
match c.sources.find(name) {
Some(source) => {
let old = copy source.method;
source.method = match method {
~"git" => ~"git",
~"file" => ~"file",
_ => ~"curl"
};
c.sources.insert(name, source);
info(fmt!("changed source method: '%s' to '%s'", old,
method));
}
None => {
error(fmt!("no such source: %s", name));
}
}
}
~"rename" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
}
let name = c.opts.free[3u];
let newn = c.opts.free[4u];
if !valid_pkg_name(name) {
error(fmt!("'%s' is an invalid source name", name));
return;
}
if !valid_pkg_name(newn) {
error(fmt!("'%s' is an invalid source name", newn));
return;
}
match c.sources.find(name) {
Some(source) => {
c.sources.remove(name);
c.sources.insert(newn, source);
info(fmt!("renamed source: %s to %s", name, newn));
}
None => {
error(fmt!("no such source: %s", name));
}
}
}
_ => cmd_usage()
}
}
fn cmd_usage() {
print(~"Usage: cargo <cmd> [options] [args..]
e.g. cargo install <name>
Where <cmd> is one of:
init, install, list, search, sources,
uninstall, usage
Options:
-h, --help Display this message
<cmd> -h, <cmd> --help Display help for <cmd>
");
}
fn cmd_usage_init() {
print(~"cargo init
Re-initialize cargo in ~/.cargo. Clears all sources and then adds the
default sources from <www.rust-lang.org/sources.json>.");
}
fn cmd_usage_install() {
print(~"cargo install
cargo install [source/]<name>[@version]
cargo install [source/]<uuid>[@version]
cargo install <git url> [ref]
cargo install <tarball url>
cargo install <tarball file>
Options:
--test Run crate tests before installing
-g Install to the user level (~/.cargo/bin/ instead of
locally in ./.cargo/bin/ by default)
-G Install to the system level (/usr/local/lib/cargo/bin/)
Install a crate. If no arguments are supplied, it installs from
the current working directory. If a source is provided, only install
from that source, otherwise it installs from any source.");
}
fn cmd_usage_uninstall() {
print(~"cargo uninstall [source/]<name>[@version]
cargo uninstall [source/]<uuid>[@version]
cargo uninstall <meta-name>[@version]
cargo uninstall <meta-uuid>[@version]
Options:
-g Remove from the user level (~/.cargo/bin/ instead of
locally in ./.cargo/bin/ by default)
-G Remove from the system level (/usr/local/lib/cargo/bin/)
Remove a crate. If a source is provided, only remove
from that source, otherwise it removes from any source.
If a crate was installed directly (git, tarball, etc.), you can remove
it by metadata.");
}
fn cmd_usage_list() {
print(~"cargo list [sources..]
If no arguments are provided, list all sources and their packages.
If source names are provided, list those sources and their packages.
");
}
fn cmd_usage_search() {
print(~"cargo search <query | '*'> [tags..]
Search packages.");
}
fn cmd_usage_sources() {
print(~"cargo sources
cargo sources add <name> <url>
cargo sources remove <name>
cargo sources rename <name> <new>
cargo sources set-url <name> <url>
cargo sources set-method <name> <method>
If no arguments are supplied, list all sources (but not their packages).
Commands:
add Add a source. The source method will be guessed
from the URL.
remove Remove a source.
rename Rename a source.
set-url Change the URL for a source.
set-method Change the method for a source.");
}
fn main() {
let argv = os::args();
let o = build_cargo_options(argv);
if vec::len(o.free) < 2u {
cmd_usage();
return;
}
if o.help {
match o.free[1] {
~"init" => cmd_usage_init(),
~"install" => cmd_usage_install(),
~"uninstall" => cmd_usage_uninstall(),
~"list" => cmd_usage_list(),
~"search" => cmd_usage_search(),
~"sources" => cmd_usage_sources(),
_ => cmd_usage()
}
return;
}
if o.free[1] == ~"usage" {
cmd_usage();
return;
}
let mut c = configure(o);
let home = c.root;
let first_time = os::path_exists(&home.push("sources.json"));
if !first_time && o.free[1] != ~"init" {
cmd_init(&c);
// FIXME (#2662): shouldn't need to reconfigure
c = configure(o);
}
let c = &move c;
match o.free[1] {
~"init" => cmd_init(c),
~"install" => cmd_install(c),
~"uninstall" => cmd_uninstall(c),
~"list" => cmd_list(c),
~"search" => cmd_search(c),
~"sources" => cmd_sources(c),
_ => cmd_usage()
}
dump_cache(c);
dump_sources(c);
}
// cargo.rs - Rust package manager
#[legacy_exports];
use syntax::{ast, codemap, parse, visit, attr};
use syntax::diagnostic::span_handler;
use codemap::span;
use rustc::metadata::filesearch::{get_cargo_root, get_cargo_root_nearest,
get_cargo_sysroot, libdir};
use syntax::diagnostic;
use result::{Ok, Err};
use io::WriterUtil;
use send_map::linear::LinearMap;
use std::{map, json, tempfile, term, sort, getopts};
use map::HashMap;
use to_str::to_str;
use getopts::{optflag, optopt, opt_present};
use dvec::DVec;
struct Package {
name: ~str,
uuid: ~str,
url: ~str,
method: ~str,
description: ~str,
reference: Option<~str>,
tags: ~[~str],
versions: ~[(~str, ~str)]
}
impl Package : cmp::Ord {
#[cfg(stage0)]
pure fn lt(other: &Package) -> bool {
if self.name.lt(&(*other).name) { return true; }
if (*other).name.lt(&self.name) { return false; }
if self.uuid.lt(&(*other).uuid) { return true; }
if (*other).uuid.lt(&self.uuid) { return false; }
if self.url.lt(&(*other).url) { return true; }
if (*other).url.lt(&self.url) { return false; }
if self.method.lt(&(*other).method) { return true; }
if (*other).method.lt(&self.method) { return false; }
if self.description.lt(&(*other).description) { return true; }
if (*other).description.lt(&self.description) { return false; }
if self.tags.lt(&(*other).tags) { return true; }
if (*other).tags.lt(&self.tags) { return false; }
if self.versions.lt(&(*other).versions) { return true; }
return false;
}
#[cfg(stage1)]
#[cfg(stage2)]
pure fn lt(&self, other: &Package) -> bool {
if (*self).name.lt(&(*other).name) { return true; }
if (*other).name.lt(&(*self).name) { return false; }
if (*self).uuid.lt(&(*other).uuid) { return true; }
if (*other).uuid.lt(&(*self).uuid) { return false; }
if (*self).url.lt(&(*other).url) { return true; }
if (*other).url.lt(&(*self).url) { return false; }
if (*self).method.lt(&(*other).method) { return true; }
if (*other).method.lt(&(*self).method) { return false; }
if (*self).description.lt(&(*other).description) { return true; }
if (*other).description.lt(&(*self).description) { return false; }
if (*self).tags.lt(&(*other).tags) { return true; }
if (*other).tags.lt(&(*self).tags) { return false; }
if (*self).versions.lt(&(*other).versions) { return true; }
return false;
}
#[cfg(stage0)]
pure fn le(other: &Package) -> bool { !(*other).lt(&self) }
#[cfg(stage1)]
#[cfg(stage2)]
pure fn le(&self, other: &Package) -> bool { !(*other).lt(&(*self)) }
#[cfg(stage0)]
pure fn ge(other: &Package) -> bool { !self.lt(other) }
#[cfg(stage1)]
#[cfg(stage2)]
pure fn ge(&self, other: &Package) -> bool { !(*self).lt(other) }
#[cfg(stage0)]
pure fn gt(other: &Package) -> bool { (*other).lt(&self) }
#[cfg(stage1)]
#[cfg(stage2)]
pure fn gt(&self, other: &Package) -> bool { (*other).lt(&(*self)) }
}
struct Source {
name: ~str,
mut url: ~str,
mut method: ~str,
mut key: Option<~str>,
mut keyfp: Option<~str>,
packages: DVec<Package>
}
struct Cargo {
pgp: bool,
root: Path,
installdir: Path,
bindir: Path,
libdir: Path,
workdir: Path,
sourcedir: Path,
sources: map::HashMap<~str, @Source>,
mut current_install: ~str,
dep_cache: map::HashMap<~str, bool>,
opts: Options
}
struct Crate {
name: ~str,
vers: ~str,
uuid: ~str,
desc: Option<~str>,
sigs: Option<~str>,
crate_type: Option<~str>,
deps: ~[~str]
}
struct Options {
test: bool,
mode: Mode,
free: ~[~str],
help: bool,
}
enum Mode { SystemMode, UserMode, LocalMode }
impl Mode : cmp::Eq {
#[cfg(stage0)]
pure fn eq(other: &Mode) -> bool {
(self as uint) == ((*other) as uint)
}
#[cfg(stage1)]
#[cfg(stage2)]
pure fn eq(&self, other: &Mode) -> bool {
((*self) as uint) == ((*other) as uint)
}
#[cfg(stage0)]
pure fn ne(other: &Mode) -> bool { !self.eq(other) }
#[cfg(stage1)]
#[cfg(stage2)]
pure fn ne(&self, other: &Mode) -> bool { !(*self).eq(other) }
}
fn opts() -> ~[getopts::Opt] {
~[optflag(~"g"), optflag(~"G"), optflag(~"test"),
optflag(~"h"), optflag(~"help")]
}
fn info(msg: ~str) {
let out = io::stdout();
if term::color_supported() {
term::fg(out, term::color_green);
out.write_str(~"info: ");
term::reset(out);
out.write_line(msg);
} else { out.write_line(~"info: " + msg); }
}
fn warn(msg: ~str) {
let out = io::stdout();
if term::color_supported() {
term::fg(out, term::color_yellow);
out.write_str(~"warning: ");
term::reset(out);
out.write_line(msg);
}else { out.write_line(~"warning: " + msg); }
}
fn error(msg: ~str) {
let out = io::stdout();
if term::color_supported() {
term::fg(out, term::color_red);
out.write_str(~"error: ");
term::reset(out);
out.write_line(msg);
}
else { out.write_line(~"error: " + msg); }
}
fn is_uuid(id: ~str) -> bool {
let parts = str::split_str(id, ~"-");
if vec::len(parts) == 5u {
let mut correct = 0u;
for vec::eachi(parts) |i, part| {
fn is_hex_digit(+ch: char) -> bool {
('0' <= ch && ch <= '9') ||
('a' <= ch && ch <= 'f') ||
('A' <= ch && ch <= 'F')
}
if !part.all(is_hex_digit) {
return false;
}
match i {
0u => {
if part.len() == 8u {
correct += 1u;
}
}
1u | 2u | 3u => {
if part.len() == 4u {
correct += 1u;
}
}
4u => {
if part.len() == 12u {
correct += 1u;
}
}
_ => { }
}
}
if correct >= 5u {
return true;
}
}
return false;
}
#[test]
fn test_is_uuid() {
assert is_uuid(~"aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaafAF09");
assert !is_uuid(~"aaaaaaaa-aaaa-aaaa-aaaaa-aaaaaaaaaaaa");
assert !is_uuid(~"");
assert !is_uuid(~"aaaaaaaa-aaa -aaaa-aaaa-aaaaaaaaaaaa");
assert !is_uuid(~"aaaaaaaa-aaa!-aaaa-aaaa-aaaaaaaaaaaa");
assert !is_uuid(~"aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa-a");
assert !is_uuid(~"aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaป");
}
// FIXME (#2661): implement url/URL parsing so we don't have to resort
// to weak checks
fn has_archive_extension(p: ~str) -> bool {
str::ends_with(p, ~".tar") ||
str::ends_with(p, ~".tar.gz") ||
str::ends_with(p, ~".tar.bz2") ||
str::ends_with(p, ~".tar.Z") ||
str::ends_with(p, ~".tar.lz") ||
str::ends_with(p, ~".tar.xz") ||
str::ends_with(p, ~".tgz") ||
str::ends_with(p, ~".tbz") ||
str::ends_with(p, ~".tbz2") ||
str::ends_with(p, ~".tb2") ||
str::ends_with(p, ~".taz") ||
str::ends_with(p, ~".tlz") ||
str::ends_with(p, ~".txz")
}
fn is_archive_path(u: ~str) -> bool {
has_archive_extension(u) && os::path_exists(&Path(u))
}
fn is_archive_url(u: ~str) -> bool {
// FIXME (#2661): this requires the protocol bit - if we had proper
// url parsing, we wouldn't need it
match str::find_str(u, ~"://") {
option::Some(_) => has_archive_extension(u),
_ => false
}
}
fn is_git_url(url: ~str) -> bool {
if str::ends_with(url, ~"/") { str::ends_with(url, ~".git/") }
else {
str::starts_with(url, ~"git://") || str::ends_with(url, ~".git")
}
}
fn assume_source_method(url: ~str) -> ~str {
if is_git_url(url) {
return ~"git";
}
if str::starts_with(url, ~"file://") || os::path_exists(&Path(url)) {
return ~"file";
}
~"curl"
}
fn load_link(mis: ~[@ast::meta_item]) -> (Option<~str>,
Option<~str>,
Option<~str>) {
let mut name = None;
let mut vers = None;
let mut uuid = None;
for mis.each |a| {
match a.node {
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) => {
match v {
~"name" => name = Some(*s),
~"vers" => vers = Some(*s),
~"uuid" => uuid = Some(*s),
_ => { }
}
}
_ => fail ~"load_link: meta items must be name-values"
}
}
(name, vers, uuid)
}
fn load_crate(filename: &Path) -> Option<Crate> {
let sess = parse::new_parse_sess(None);
let c = parse::parse_crate_from_crate_file(filename, ~[], sess);
let mut name = None;
let mut vers = None;
let mut uuid = None;
let mut desc = None;
let mut sigs = None;
let mut crate_type = None;
for c.node.attrs.each |a| {
match a.node.value.node {
ast::meta_name_value(v, {node: ast::lit_str(_), span: _}) => {
match v {
~"desc" => desc = Some(v),
~"sigs" => sigs = Some(v),
~"crate_type" => crate_type = Some(v),
_ => { }
}
}
ast::meta_list(v, mis) => {
if v == ~"link" {
let (n, v, u) = load_link(mis);
name = n;
vers = v;
uuid = u;
}
}
_ => {
fail ~"crate attributes may not contain " +
~"meta_words";
}
}
}
type env = @{
mut deps: ~[~str]
};
fn goto_view_item(ps: syntax::parse::parse_sess, e: env,
i: @ast::view_item) {
match i.node {
ast::view_item_use(ident, metas, _) => {
let name_items =
attr::find_meta_items_by_name(metas, ~"name");
let m = if name_items.is_empty() {
metas + ~[attr::mk_name_value_item_str(
~"name", *ps.interner.get(ident))]
} else {
metas
};
let mut attr_name = ident;
let mut attr_vers = ~"";
let mut attr_from = ~"";
for m.each |item| {
match attr::get_meta_item_value_str(*item) {
Some(value) => {
let name = attr::get_meta_item_name(*item);
match name {
~"vers" => attr_vers = value,
~"from" => attr_from = value,
_ => ()
}
}
None => ()
}
}
let query = if !str::is_empty(attr_from) {
attr_from
} else {
if !str::is_empty(attr_vers) {
ps.interner.get(attr_name) + ~"@" + attr_vers
} else { *ps.interner.get(attr_name) }
};
match *ps.interner.get(attr_name) {
~"std" | ~"core" => (),
_ => e.deps.push(query)
}
}
_ => ()
}
}
fn goto_item(_e: env, _i: @ast::item) {
}
let e = @{
mut deps: ~[]
};
let v = visit::mk_simple_visitor(@{
visit_view_item: |a| goto_view_item(sess, e, a),
visit_item: |a| goto_item(e, a),
.. *visit::default_simple_visitor()
});
visit::visit_crate(*c, (), v);
let deps = copy e.deps;
match (name, vers, uuid) {
(Some(name0), Some(vers0), Some(uuid0)) => {
Some(Crate {
name: name0,
vers: vers0,
uuid: uuid0,
desc: desc,
sigs: sigs,
crate_type: crate_type,
deps: deps })
}
_ => return None
}
}
fn print(s: ~str) {
io::stdout().write_line(s);
}
fn rest(s: ~str, start: uint) -> ~str {
if (start >= str::len(s)) {
~""
} else {
str::slice(s, start, str::len(s))
}
}
fn need_dir(s: &Path) {
if os::path_is_dir(s) { return; }
if !os::make_dir(s, 493_i32 /* oct: 755 */) {
fail fmt!("can't make_dir %s", s.to_str());
}
}
fn valid_pkg_name(s: &str) -> bool {
fn is_valid_digit(+c: char) -> bool {
('0' <= c && c <= '9') ||
('a' <= c && c <= 'z') ||
('A' <= c && c <= 'Z') ||
c == '-' ||
c == '_'
}
s.all(is_valid_digit)
}
fn parse_source(name: ~str, j: &json::Json) -> @Source {
if !valid_pkg_name(name) {
fail fmt!("'%s' is an invalid source name", name);
}
match *j {
json::Object(j) => {
let mut url = match j.find(&~"url") {
Some(json::String(u)) => u,
_ => fail ~"needed 'url' field in source"
};
let method = match j.find(&~"method") {
Some(json::String(u)) => u,
_ => assume_source_method(url)
};
let key = match j.find(&~"key") {
Some(json::String(u)) => Some(u),
_ => None
};
let keyfp = match j.find(&~"keyfp") {
Some(json::String(u)) => Some(u),
_ => None
};
if method == ~"file" {
url = os::make_absolute(&Path(url)).to_str();
}
return @Source {
name: name,
mut url: url,
mut method: method,
mut key: key,
mut keyfp: keyfp,
packages: DVec() };
}
_ => fail ~"needed dict value in source"
};
}
fn try_parse_sources(filename: &Path, sources: map::HashMap<~str, @Source>) {
if !os::path_exists(filename) { return; }
let c = io::read_whole_file_str(filename);
match json::from_str(c.get()) {
Ok(json::Object(j)) => {
for j.each |k, v| {
sources.insert(copy *k, parse_source(*k, v));
debug!("source: %s", *k);
}
}
Ok(_) => fail ~"malformed sources.json",
Err(e) => fail fmt!("%s:%s", filename.to_str(), e.to_str())
}
}
fn load_one_source_package(src: @Source, p: &json::Object) {
let name = match p.find(&~"name") {
Some(json::String(n)) => {
if !valid_pkg_name(n) {
warn(~"malformed source json: "
+ src.name + ~", '" + n + ~"'"+
~" is an invalid name (alphanumeric, underscores and" +
~" dashes only)");
return;
}
n
}
_ => {
warn(~"malformed source json: " + src.name + ~" (missing name)");
return;
}
};
let uuid = match p.find(&~"uuid") {
Some(json::String(n)) => {
if !is_uuid(n) {
warn(~"malformed source json: "
+ src.name + ~", '" + n + ~"'"+
~" is an invalid uuid");
return;
}
n
}
_ => {
warn(~"malformed source json: " + src.name + ~" (missing uuid)");
return;
}
};
let url = match p.find(&~"url") {
Some(json::String(n)) => n,
_ => {
warn(~"malformed source json: " + src.name + ~" (missing url)");
return;
}
};
let method = match p.find(&~"method") {
Some(json::String(n)) => n,
_ => {
warn(~"malformed source json: "
+ src.name + ~" (missing method)");
return;
}
};
let reference = match p.find(&~"ref") {
Some(json::String(n)) => Some(n),
_ => None
};
let mut tags = ~[];
match p.find(&~"tags") {
Some(json::List(js)) => {
for js.each |j| {
match *j {
json::String(ref j) => tags.grow(1u, j),
_ => ()
}
}
}
_ => ()
}
let description = match p.find(&~"description") {
Some(json::String(n)) => n,
_ => {
warn(~"malformed source json: " + src.name
+ ~" (missing description)");
return;
}
};
let newpkg = Package {
name: name,
uuid: uuid,
url: url,
method: method,
description: description,
reference: reference,
tags: tags,
versions: ~[]
};
match src.packages.position(|pkg| pkg.uuid == uuid) {
Some(idx) => {
src.packages.set_elt(idx, newpkg);
log(debug, ~" updated package: " + src.name + ~"/" + name);
}
None => {
src.packages.push(newpkg);
}
}
log(debug, ~" loaded package: " + src.name + ~"/" + name);
}
fn load_source_info(c: &Cargo, src: @Source) {
let dir = c.sourcedir.push(src.name);
let srcfile = dir.push("source.json");
if !os::path_exists(&srcfile) { return; }
let srcstr = io::read_whole_file_str(&srcfile);
match json::from_str(srcstr.get()) {
Ok(ref json @ json::Object(_)) => {
let o = parse_source(src.name, json);
src.key = o.key;
src.keyfp = o.keyfp;
}
Ok(_) => {
warn(~"malformed source.json: " + src.name +
~"(source info is not a dict)");
}
Err(e) => {
warn(fmt!("%s:%s", src.name, e.to_str()));
}
};
}
fn load_source_packages(c: &Cargo, src: @Source) {
log(debug, ~"loading source: " + src.name);
let dir = c.sourcedir.push(src.name);
let pkgfile = dir.push("packages.json");
if !os::path_exists(&pkgfile) { return; }
let pkgstr = io::read_whole_file_str(&pkgfile);
match json::from_str(pkgstr.get()) {
Ok(json::List(js)) => {
for js.each |j| {
match *j {
json::Object(p) => {
load_one_source_package(src, p);
}
_ => {
warn(~"malformed source json: " + src.name +
~" (non-dict pkg)");
}
}
}
}
Ok(_) => {
warn(~"malformed packages.json: " + src.name +
~"(packages is not a list)");
}
Err(e) => {
warn(fmt!("%s:%s", src.name, e.to_str()));
}
};
}
fn build_cargo_options(argv: ~[~str]) -> Options {
let matches = match getopts::getopts(argv, opts()) {
result::Ok(m) => m,
result::Err(f) => {
fail fmt!("%s", getopts::fail_str(f));
}
};
let test = opt_present(matches, ~"test");
let G = opt_present(matches, ~"G");
let g = opt_present(matches, ~"g");
let help = opt_present(matches, ~"h") || opt_present(matches, ~"help");
let len = vec::len(matches.free);
let is_install = len > 1u && matches.free[1] == ~"install";
let is_uninstall = len > 1u && matches.free[1] == ~"uninstall";
if G && g { fail ~"-G and -g both provided"; }
if !is_install && !is_uninstall && (g || G) {
fail ~"-g and -G are only valid for `install` and `uninstall|rm`";
}
let mode =
if (!is_install && !is_uninstall) || g { UserMode }
else if G { SystemMode }
else { LocalMode };
Options {test: test, mode: mode, free: matches.free, help: help}
}
fn configure(opts: Options) -> Cargo {
let home = match get_cargo_root() {
Ok(home) => home,
Err(_err) => get_cargo_sysroot().get()
};
let get_cargo_dir = match opts.mode {
SystemMode => get_cargo_sysroot,
UserMode => get_cargo_root,
LocalMode => get_cargo_root_nearest
};
let p = get_cargo_dir().get();
let sources = HashMap();
try_parse_sources(&home.push("sources.json"), sources);
try_parse_sources(&home.push("local-sources.json"), sources);
let dep_cache = HashMap();
let mut c = Cargo {
pgp: pgp::supported(),
root: home,
installdir: p,
bindir: p.push("bin"),
libdir: p.push("lib"),
workdir: p.push("work"),
sourcedir: home.push("sources"),
sources: sources,
mut current_install: ~"",
dep_cache: dep_cache,
opts: opts
};
need_dir(&c.root);
need_dir(&c.installdir);
need_dir(&c.sourcedir);
need_dir(&c.workdir);
need_dir(&c.libdir);
need_dir(&c.bindir);
for sources.each_key |k| {
let mut s = sources.get(k);
load_source_packages(&c, s);
sources.insert(k, s);
}
if c.pgp {
pgp::init(&c.root);
} else {
warn(~"command `gpg` was not found");
warn(~"you have to install gpg from source " +
~" or package manager to get it to work correctly");
}
move c
}
fn for_each_package(c: &Cargo, b: fn(s: @Source, p: &Package)) {
for c.sources.each_value |v| {
for v.packages.each |p| {
b(v, p);
}
}
}
// Runs all programs in directory <buildpath>
fn run_programs(buildpath: &Path) {
let newv = os::list_dir_path(buildpath);
for newv.each |ct| {
run::run_program(ct.to_str(), ~[]);
}
}
// Runs rustc in <path + subdir> with the given flags
// and returns <patho + subdir>
fn run_in_buildpath(what: &str, path: &Path, subdir: &Path, cf: &Path,
extra_flags: ~[~str]) -> Option<Path> {
let buildpath = path.push_rel(subdir);
need_dir(&buildpath);
debug!("%s: %s -> %s", what, cf.to_str(), buildpath.to_str());
let p = run::program_output(rustc_sysroot(),
~[~"--out-dir",
buildpath.to_str(),
cf.to_str()] + extra_flags);
if p.status != 0 {
error(fmt!("rustc failed: %d\n%s\n%s", p.status, p.err, p.out));
return None;
}
Some(buildpath)
}
fn test_one_crate(_c: &Cargo, path: &Path, cf: &Path) {
let buildpath = match run_in_buildpath(~"testing", path,
&Path("test"),
cf,
~[ ~"--test"]) {
None => return,
Some(bp) => bp
};
run_programs(&buildpath);
}
fn install_one_crate(c: &Cargo, path: &Path, cf: &Path) {
let buildpath = match run_in_buildpath(~"installing", path,
&Path("build"),
cf, ~[]) {
None => return,
Some(bp) => bp
};
let newv = os::list_dir_path(&buildpath);
let exec_suffix = os::exe_suffix();
for newv.each |ct| {
if (exec_suffix != ~"" && str::ends_with(ct.to_str(),
exec_suffix)) ||
(exec_suffix == ~"" &&
!str::starts_with(ct.filename().get(),
~"lib")) {
debug!(" bin: %s", ct.to_str());
install_to_dir(*ct, &c.bindir);
if c.opts.mode == SystemMode {
// FIXME (#2662): Put this file in PATH / symlink it so it can
// be used as a generic executable
// `cargo install -G rustray` and `rustray file.obj`
}
} else {
debug!(" lib: %s", ct.to_str());
install_to_dir(*ct, &c.libdir);
}
}
}
fn rustc_sysroot() -> ~str {
match os::self_exe_path() {
Some(path) => {
let rustc = path.push_many([~"..", ~"bin", ~"rustc"]);
debug!(" rustc: %s", rustc.to_str());
rustc.to_str()
}
None => ~"rustc"
}
}
fn install_source(c: &Cargo, path: &Path) {
debug!("source: %s", path.to_str());
os::change_dir(path);
let mut cratefiles = ~[];
for os::walk_dir(&Path(".")) |p| {
if p.filetype() == Some(~".rc") {
cratefiles.push(*p);
}
}
if vec::is_empty(cratefiles) {
fail ~"this doesn't look like a rust package (no .rc files)";
}
for cratefiles.each |cf| {
match load_crate(cf) {
None => loop,
Some(crate) => {
for crate.deps.each |query| {
// FIXME (#1356): handle cyclic dependencies
// (n.b. #1356 says "Cyclic dependency is an error
// condition")
let wd = get_temp_workdir(c);
install_query(c, &wd, *query);
}
os::change_dir(path);
if c.opts.test {
test_one_crate(c, path, cf);
}
install_one_crate(c, path, cf);
}
}
}
}
fn install_git(c: &Cargo, wd: &Path, url: ~str, reference: Option<~str>) {
run::program_output(~"git", ~[~"clone", url, wd.to_str()]);
if reference.is_some() {
let r = reference.get();
os::change_dir(wd);
run::run_program(~"git", ~[~"checkout", r]);
}
install_source(c, wd);
}
fn install_curl(c: &Cargo, wd: &Path, url: ~str) {
let tarpath = wd.push("pkg.tar");
let p = run::program_output(~"curl", ~[~"-f", ~"-s", ~"-o",
tarpath.to_str(), url]);
if p.status != 0 {
fail fmt!("fetch of %s failed: %s", url, p.err);
}
run::run_program(~"tar", ~[~"-x", ~"--strip-components=1",
~"-C", wd.to_str(),
~"-f", tarpath.to_str()]);
install_source(c, wd);
}
fn install_file(c: &Cargo, wd: &Path, path: &Path) {
run::program_output(~"tar", ~[~"-x", ~"--strip-components=1",
~"-C", wd.to_str(),
~"-f", path.to_str()]);
install_source(c, wd);
}
fn install_package(c: &Cargo, src: ~str, wd: &Path, pkg: Package) {
let url = copy pkg.url;
let method = match pkg.method {
~"git" => ~"git",
~"file" => ~"file",
_ => ~"curl"
};
info(fmt!("installing %s/%s via %s...", src, pkg.name, method));
match method {
~"git" => install_git(c, wd, url, copy pkg.reference),
~"file" => install_file(c, wd, &Path(url)),
~"curl" => install_curl(c, wd, url),
_ => ()
}
}
fn cargo_suggestion(c: &Cargo, fallback: fn())
{
if c.sources.size() == 0u {
error(~"no sources defined - you may wish to run " +
~"`cargo init`");
return;
}
fallback();
}
fn install_uuid(c: &Cargo, wd: &Path, uuid: ~str) {
let mut ps = ~[];
for_each_package(c, |s, p| {
if p.uuid == uuid {
vec::push(&mut ps, (s.name, copy *p));
}
});
if vec::len(ps) == 1u {
let (sname, p) = copy ps[0];
install_package(c, sname, wd, p);
return;
} else if vec::len(ps) == 0u {
cargo_suggestion(c, || {
error(~"can't find package: " + uuid);
});
return;
}
error(~"found multiple packages:");
for ps.each |elt| {
let (sname,p) = copy *elt;
info(~" " + sname + ~"/" + p.uuid + ~" (" + p.name + ~")");
}
}
fn install_named(c: &Cargo, wd: &Path, name: ~str) {
let mut ps = ~[];
for_each_package(c, |s, p| {
if p.name == name {
vec::push(&mut ps, (s.name, copy *p));
}
});
if vec::len(ps) == 1u {
let (sname, p) = copy ps[0];
install_package(c, sname, wd, p);
return;
} else if vec::len(ps) == 0u {
cargo_suggestion(c, || {
error(~"can't find package: " + name);
});
return;
}
error(~"found multiple packages:");
for ps.each |elt| {
let (sname,p) = copy *elt;
info(~" " + sname + ~"/" + p.uuid + ~" (" + p.name + ~")");
}
}
fn install_uuid_specific(c: &Cargo, wd: &Path, src: ~str, uuid: ~str) {
match c.sources.find(src) {
Some(s) => {
for s.packages.each |p| {
if p.uuid == uuid {
install_package(c, src, wd, *p);
return;
}
}
}
_ => ()
}
error(~"can't find package: " + src + ~"/" + uuid);
}
fn install_named_specific(c: &Cargo, wd: &Path, src: ~str, name: ~str) {
match c.sources.find(src) {
Some(s) => {
for s.packages.each |p| {
if p.name == name {
install_package(c, src, wd, *p);
return;
}
}
}
_ => ()
}
error(~"can't find package: " + src + ~"/" + name);
}
fn cmd_uninstall(c: &Cargo) {
if vec::len(c.opts.free) < 3u {
cmd_usage();
return;
}
let lib = &c.libdir;
let bin = &c.bindir;
let target = c.opts.free[2u];
// FIXME (#2662): needs stronger pattern matching
// FIXME (#2662): needs to uninstall from a specified location in a
// cache instead of looking for it (binaries can be uninstalled by
// name only)
fn try_uninstall(p: &Path) -> bool {
if os::remove_file(p) {
info(~"uninstalled: '" + p.to_str() + ~"'");
true
} else {
error(~"could not uninstall: '" +
p.to_str() + ~"'");
false
}
}
if is_uuid(target) {
for os::list_dir(lib).each |file| {
match str::find_str(*file, ~"-" + target + ~"-") {
Some(_) => if !try_uninstall(&lib.push(*file)) { return },
None => ()
}
}
error(~"can't find package with uuid: " + target);
} else {
for os::list_dir(lib).each |file| {
match str::find_str(*file, ~"lib" + target + ~"-") {
Some(_) => if !try_uninstall(&lib.push(*file)) { return },
None => ()
}
}
for os::list_dir(bin).each |file| {
match str::find_str(*file, target) {
Some(_) => if !try_uninstall(&lib.push(*file)) { return },
None => ()
}
}
error(~"can't find package with name: " + target);
}
}
fn install_query(c: &Cargo, wd: &Path, target: ~str) {
match c.dep_cache.find(target) {
Some(inst) => {
if inst {
return;
}
}
None => ()
}
c.dep_cache.insert(target, true);
if is_archive_path(target) {
install_file(c, wd, &Path(target));
return;
} else if is_git_url(target) {
let reference = if c.opts.free.len() >= 4u {
Some(c.opts.free[3u])
} else {
None
};
install_git(c, wd, target, reference);
} else if !valid_pkg_name(target) && has_archive_extension(target) {
install_curl(c, wd, target);
return;
} else {
let mut ps = copy target;
match str::find_char(ps, '/') {
option::Some(idx) => {
let source = str::slice(ps, 0u, idx);
ps = str::slice(ps, idx + 1u, str::len(ps));
if is_uuid(ps) {
install_uuid_specific(c, wd, source, ps);
} else {
install_named_specific(c, wd, source, ps);
}
}
option::None => {
if is_uuid(ps) {
install_uuid(c, wd, ps);
} else {
install_named(c, wd, ps);
}
}
}
}
// FIXME (#2662): This whole dep_cache and current_install thing is
// a bit of a hack. It should be cleaned up in the future.
if target == c.current_install {
for c.dep_cache.each |k, _v| {
c.dep_cache.remove(k);
}
c.current_install = ~"";
}
}
fn get_temp_workdir(c: &Cargo) -> Path {
match tempfile::mkdtemp(&c.workdir, "cargo") {
Some(wd) => wd,
None => fail fmt!("needed temp dir: %s",
c.workdir.to_str())
}
}
fn cmd_install(c: &Cargo) unsafe {
let wd = get_temp_workdir(c);
if vec::len(c.opts.free) == 2u {
let cwd = os::getcwd();
let status = run::run_program(~"cp", ~[~"-R", cwd.to_str(),
wd.to_str()]);
if status != 0 {
fail fmt!("could not copy directory: %s", cwd.to_str());
}
install_source(c, &wd);
return;
}
sync(c);
let query = c.opts.free[2];
c.current_install = query.to_str();
install_query(c, &wd, query);
}
fn sync(c: &Cargo) {
for c.sources.each_key |k| {
let mut s = c.sources.get(k);
sync_one(c, s);
c.sources.insert(k, s);
}
}
fn sync_one_file(c: &Cargo, dir: &Path, src: @Source) -> bool {
let name = src.name;
let srcfile = dir.push("source.json.new");
let destsrcfile = dir.push("source.json");
let pkgfile = dir.push("packages.json.new");
let destpkgfile = dir.push("packages.json");
let keyfile = dir.push("key.gpg");
let srcsigfile = dir.push("source.json.sig");
let sigfile = dir.push("packages.json.sig");
let url = Path(src.url);
let mut has_src_file = false;
if !os::copy_file(&url.push("packages.json"), &pkgfile) {
error(fmt!("fetch for source %s (url %s) failed",
name, url.to_str()));
return false;
}
if os::copy_file(&url.push("source.json"), &srcfile) {
has_src_file = false;
}
os::copy_file(&url.push("source.json.sig"), &srcsigfile);
os::copy_file(&url.push("packages.json.sig"), &sigfile);
match copy src.key {
Some(u) => {
let p = run::program_output(~"curl",
~[~"-f", ~"-s",
~"-o", keyfile.to_str(), u]);
if p.status != 0 {
error(fmt!("fetch for source %s (key %s) failed", name, u));
return false;
}
pgp::add(&c.root, &keyfile);
}
_ => ()
}
match (src.key, src.keyfp) {
(Some(_), Some(f)) => {
let r = pgp::verify(&c.root, &pkgfile, &sigfile);
if !r {
error(fmt!("signature verification failed for source %s with \
key %s", name, f));
return false;
}
if has_src_file {
let e = pgp::verify(&c.root, &srcfile, &srcsigfile);
if !e {
error(fmt!("signature verification failed for source %s \
with key %s", name, f));
return false;
}
}
}
_ => ()
}
copy_warn(&pkgfile, &destpkgfile);
if has_src_file {
copy_warn(&srcfile, &destsrcfile);
}
os::remove_file(&keyfile);
os::remove_file(&srcfile);
os::remove_file(&srcsigfile);
os::remove_file(&pkgfile);
os::remove_file(&sigfile);
info(fmt!("synced source: %s", name));
return true;
}
fn sync_one_git(c: &Cargo, dir: &Path, src: @Source) -> bool {
let name = src.name;
let srcfile = dir.push("source.json");
let pkgfile = dir.push("packages.json");
let keyfile = dir.push("key.gpg");
let srcsigfile = dir.push("source.json.sig");
let sigfile = dir.push("packages.json.sig");
let url = src.url;
fn rollback(name: ~str, dir: &Path, insecure: bool) {
fn msg(name: ~str, insecure: bool) {
error(fmt!("could not rollback source: %s", name));
if insecure {
warn(~"a past security check failed on source " +
name + ~" and rolling back the source failed -"
+ ~" this source may be compromised");
}
}
if !os::change_dir(dir) {
msg(name, insecure);
}
else {
let p = run::program_output(~"git", ~[~"reset", ~"--hard",
~"HEAD@{1}"]);
if p.status != 0 {
msg(name, insecure);
}
}
}
if !os::path_exists(&dir.push(".git")) {
let p = run::program_output(~"git", ~[~"clone", url, dir.to_str()]);
if p.status != 0 {
error(fmt!("fetch for source %s (url %s) failed", name, url));
return false;
}
}
else {
if !os::change_dir(dir) {
error(fmt!("fetch for source %s (url %s) failed", name, url));
return false;
}
let p = run::program_output(~"git", ~[~"pull"]);
if p.status != 0 {
error(fmt!("fetch for source %s (url %s) failed", name, url));
return false;
}
}
let has_src_file = os::path_exists(&srcfile);
match copy src.key {
Some(u) => {
let p = run::program_output(~"curl",
~[~"-f", ~"-s",
~"-o", keyfile.to_str(), u]);
if p.status != 0 {
error(fmt!("fetch for source %s (key %s) failed", name, u));
rollback(name, dir, false);
return false;
}
pgp::add(&c.root, &keyfile);
}
_ => ()
}
match (src.key, src.keyfp) {
(Some(_), Some(f)) => {
let r = pgp::verify(&c.root, &pkgfile, &sigfile);
if !r {
error(fmt!("signature verification failed for source %s with \
key %s", name, f));
rollback(name, dir, false);
return false;
}
if has_src_file {
let e = pgp::verify(&c.root, &srcfile, &srcsigfile);
if !e {
error(fmt!("signature verification failed for source %s \
with key %s", name, f));
rollback(name, dir, false);
return false;
}
}
}
_ => ()
}
os::remove_file(&keyfile);
info(fmt!("synced source: %s", name));
return true;
}
fn sync_one_curl(c: &Cargo, dir: &Path, src: @Source) -> bool {
let name = src.name;
let srcfile = dir.push("source.json.new");
let destsrcfile = dir.push("source.json");
let pkgfile = dir.push("packages.json.new");
let destpkgfile = dir.push("packages.json");
let keyfile = dir.push("key.gpg");
let srcsigfile = dir.push("source.json.sig");
let sigfile = dir.push("packages.json.sig");
let mut url = src.url;
let smart = !str::ends_with(src.url, ~"packages.json");
let mut has_src_file = false;
if smart {
url += ~"/packages.json";
}
let p = run::program_output(~"curl",
~[~"-f", ~"-s",
~"-o", pkgfile.to_str(), url]);
if p.status != 0 {
error(fmt!("fetch for source %s (url %s) failed", name, url));
return false;
}
if smart {
url = src.url + ~"/source.json";
let p =
run::program_output(~"curl",
~[~"-f", ~"-s",
~"-o", srcfile.to_str(), url]);
if p.status == 0 {
has_src_file = true;
}
}
match copy src.key {
Some(u) => {
let p = run::program_output(~"curl",
~[~"-f", ~"-s",
~"-o", keyfile.to_str(), u]);
if p.status != 0 {
error(fmt!("fetch for source %s (key %s) failed", name, u));
return false;
}
pgp::add(&c.root, &keyfile);
}
_ => ()
}
match (src.key, src.keyfp) {
(Some(_), Some(f)) => {
if smart {
url = src.url + ~"/packages.json.sig";
}
else {
url = src.url + ~".sig";
}
let mut p = run::program_output(~"curl",
~[~"-f", ~"-s", ~"-o",
sigfile.to_str(), url]);
if p.status != 0 {
error(fmt!("fetch for source %s (sig %s) failed", name, url));
return false;
}
let r = pgp::verify(&c.root, &pkgfile, &sigfile);
if !r {
error(fmt!("signature verification failed for source %s with \
key %s", name, f));
return false;
}
if smart && has_src_file {
url = src.url + ~"/source.json.sig";
p = run::program_output(~"curl",
~[~"-f", ~"-s", ~"-o",
srcsigfile.to_str(), url]);
if p.status != 0 {
error(fmt!("fetch for source %s (sig %s) failed",
name, url));
return false;
}
let e = pgp::verify(&c.root, &srcfile, &srcsigfile);
if !e {
error(~"signature verification failed for " +
~"source " + name + ~" with key " + f);
return false;
}
}
}
_ => ()
}
copy_warn(&pkgfile, &destpkgfile);
if smart && has_src_file {
copy_warn(&srcfile, &destsrcfile);
}
os::remove_file(&keyfile);
os::remove_file(&srcfile);
os::remove_file(&srcsigfile);
os::remove_file(&pkgfile);
os::remove_file(&sigfile);
info(fmt!("synced source: %s", name));
return true;
}
fn sync_one(c: &Cargo, src: @Source) {
let name = src.name;
let dir = c.sourcedir.push(name);
info(fmt!("syncing source: %s...", name));
need_dir(&dir);
let result = match src.method {
~"git" => sync_one_git(c, &dir, src),
~"file" => sync_one_file(c, &dir, src),
_ => sync_one_curl(c, &dir, src)
};
if result {
load_source_info(c, src);
load_source_packages(c, src);
}
}
fn cmd_init(c: &Cargo) {
let srcurl = ~"http://www.rust-lang.org/cargo/sources.json";
let sigurl = ~"http://www.rust-lang.org/cargo/sources.json.sig";
let srcfile = c.root.push("sources.json.new");
let sigfile = c.root.push("sources.json.sig");
let destsrcfile = c.root.push("sources.json");
let p =
run::program_output(~"curl", ~[~"-f", ~"-s",
~"-o", srcfile.to_str(), srcurl]);
if p.status != 0 {
error(fmt!("fetch of sources.json failed: %s", p.out));
return;
}
let p =
run::program_output(~"curl", ~[~"-f", ~"-s",
~"-o", sigfile.to_str(), sigurl]);
if p.status != 0 {
error(fmt!("fetch of sources.json.sig failed: %s", p.out));
return;
}
let r = pgp::verify(&c.root, &srcfile, &sigfile);
if !r {
error(fmt!("signature verification failed for '%s'",
srcfile.to_str()));
return;
}
copy_warn(&srcfile, &destsrcfile);
os::remove_file(&srcfile);
os::remove_file(&sigfile);
info(fmt!("initialized .cargo in %s", c.root.to_str()));
}
fn print_pkg(s: @Source, p: &Package) {
let mut m = s.name + ~"/" + p.name + ~" (" + p.uuid + ~")";
if vec::len(p.tags) > 0u {
m = m + ~" [" + str::connect(p.tags, ~", ") + ~"]";
}
info(m);
if p.description != ~"" {
print(~" >> " + p.description + ~"\n")
}
}
fn print_source(s: @Source) {
info(s.name + ~" (" + s.url + ~")");
let pks = sort::merge_sort(s.packages.get(), sys::shape_lt);
let l = vec::len(pks);
print(io::with_str_writer(|writer| {
let mut list = ~" >> ";
for vec::eachi(pks) |i, pk| {
if str::len(list) > 78u {
writer.write_line(list);
list = ~" >> ";
}
list += pk.name + (if l - 1u == i { ~"" } else { ~", " });
}
writer.write_line(list);
}));
}
fn cmd_list(c: &Cargo) {
sync(c);
if vec::len(c.opts.free) >= 3u {
let v = vec::view(c.opts.free, 2u, vec::len(c.opts.free));
for vec::each(v) |name| {
if !valid_pkg_name(*name) {
error(fmt!("'%s' is an invalid source name", *name));
} else {
match c.sources.find(*name) {
Some(source) => {
print_source(source);
}
None => {
error(fmt!("no such source: %s", *name));
}
}
}
}
} else {
for c.sources.each_value |v| {
print_source(v);
}
}
}
fn cmd_search(c: &Cargo) {
if vec::len(c.opts.free) < 3u {
cmd_usage();
return;
}
sync(c);
let mut n = 0;
let name = c.opts.free[2];
let tags = vec::slice(c.opts.free, 3u, vec::len(c.opts.free));
for_each_package(c, |s, p| {
if (str::contains(p.name, name) || name == ~"*") &&
vec::all(tags, |t| vec::contains(p.tags, t) ) {
print_pkg(s, p);
n += 1;
}
});
info(fmt!("found %d packages", n));
}
fn install_to_dir(srcfile: &Path, destdir: &Path) {
let newfile = destdir.push(srcfile.filename().get());
let status = run::run_program(~"cp", ~[~"-r", srcfile.to_str(),
newfile.to_str()]);
if status == 0 {
info(fmt!("installed: '%s'", newfile.to_str()));
} else {
error(fmt!("could not install: '%s'", newfile.to_str()));
}
}
fn dump_cache(c: &Cargo) {
need_dir(&c.root);
let out = c.root.push("cache.json");
let _root = json::Object(~LinearMap());
if os::path_exists(&out) {
copy_warn(&out, &c.root.push("cache.json.old"));
}
}
fn dump_sources(c: &Cargo) {
if c.sources.size() < 1u {
return;
}
need_dir(&c.root);
let out = c.root.push("sources.json");
if os::path_exists(&out) {
copy_warn(&out, &c.root.push("sources.json.old"));
}
match io::buffered_file_writer(&out) {
result::Ok(writer) => {
let mut hash = ~LinearMap();
for c.sources.each |k, v| {
let mut chash = ~LinearMap();
chash.insert(~"url", json::String(v.url));
chash.insert(~"method", json::String(v.method));
match copy v.key {
Some(key) => {
chash.insert(~"key", json::String(copy key));
}
_ => ()
}
match copy v.keyfp {
Some(keyfp) => {
chash.insert(~"keyfp", json::String(copy keyfp));
}
_ => ()
}
hash.insert(copy k, json::Object(move chash));
}
json::to_writer(writer, &json::Object(move hash))
}
result::Err(e) => {
error(fmt!("could not dump sources: %s", e));
}
}
}
fn copy_warn(srcfile: &Path, destfile: &Path) {
if !os::copy_file(srcfile, destfile) {
warn(fmt!("copying %s to %s failed",
srcfile.to_str(), destfile.to_str()));
}
}
fn cmd_sources(c: &Cargo) {
if vec::len(c.opts.free) < 3u {
for c.sources.each_value |v| {
info(fmt!("%s (%s) via %s",
v.name, v.url, v.method));
}
return;
}
let action = c.opts.free[2u];
match action {
~"clear" => {
for c.sources.each_key |k| {
c.sources.remove(k);
}
info(~"cleared sources");
}
~"add" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
}
let name = c.opts.free[3u];
let url = c.opts.free[4u];
if !valid_pkg_name(name) {
error(fmt!("'%s' is an invalid source name", name));
return;
}
if c.sources.contains_key(name) {
error(fmt!("source already exists: %s", name));
} else {
c.sources.insert(name, @Source {
name: name,
mut url: url,
mut method: assume_source_method(url),
mut key: None,
mut keyfp: None,
packages: DVec()
});
info(fmt!("added source: %s", name));
}
}
~"remove" => {
if vec::len(c.opts.free) < 4u {
cmd_usage();
return;
}
let name = c.opts.free[3u];
if !valid_pkg_name(name) {
error(fmt!("'%s' is an invalid source name", name));
return;
}
if c.sources.contains_key(name) {
c.sources.remove(name);
info(fmt!("removed source: %s", name));
} else {
error(fmt!("no such source: %s", name));
}
}
~"set-url" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
}
let name = c.opts.free[3u];
let url = c.opts.free[4u];
if !valid_pkg_name(name) {
error(fmt!("'%s' is an invalid source name", name));
return;
}
match c.sources.find(name) {
Some(source) => {
let old = copy source.url;
let method = assume_source_method(url);
source.url = url;
source.method = method;
c.sources.insert(name, source);
info(fmt!("changed source url: '%s' to '%s'", old, url));
}
None => {
error(fmt!("no such source: %s", name));
}
}
}
~"set-method" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
}
let name = c.opts.free[3u];
let method = c.opts.free[4u];
if !valid_pkg_name(name) {
error(fmt!("'%s' is an invalid source name", name));
return;
}
match c.sources.find(name) {
Some(source) => {
let old = copy source.method;
source.method = match method {
~"git" => ~"git",
~"file" => ~"file",
_ => ~"curl"
};
c.sources.insert(name, source);
info(fmt!("changed source method: '%s' to '%s'", old,
method));
}
None => {
error(fmt!("no such source: %s", name));
}
}
}
~"rename" => {
if vec::len(c.opts.free) < 5u {
cmd_usage();
return;
}
let name = c.opts.free[3u];
let newn = c.opts.free[4u];
if !valid_pkg_name(name) {
error(fmt!("'%s' is an invalid source name", name));
return;
}
if !valid_pkg_name(newn) {
error(fmt!("'%s' is an invalid source name", newn));
return;
}
match c.sources.find(name) {
Some(source) => {
c.sources.remove(name);
c.sources.insert(newn, source);
info(fmt!("renamed source: %s to %s", name, newn));
}
None => {
error(fmt!("no such source: %s", name));
}
}
}
_ => cmd_usage()
}
}
fn cmd_usage() {
print(~"Usage: cargo <cmd> [options] [args..]
e.g. cargo install <name>
Where <cmd> is one of:
init, install, list, search, sources,
uninstall, usage
Options:
-h, --help Display this message
<cmd> -h, <cmd> --help Display help for <cmd>
");
}
fn cmd_usage_init() {
print(~"cargo init
Re-initialize cargo in ~/.cargo. Clears all sources and then adds the
default sources from <www.rust-lang.org/sources.json>.");
}
fn cmd_usage_install() {
print(~"cargo install
cargo install [source/]<name>[@version]
cargo install [source/]<uuid>[@version]
cargo install <git url> [ref]
cargo install <tarball url>
cargo install <tarball file>
Options:
--test Run crate tests before installing
-g Install to the user level (~/.cargo/bin/ instead of
locally in ./.cargo/bin/ by default)
-G Install to the system level (/usr/local/lib/cargo/bin/)
Install a crate. If no arguments are supplied, it installs from
the current working directory. If a source is provided, only install
from that source, otherwise it installs from any source.");
}
fn cmd_usage_uninstall() {
print(~"cargo uninstall [source/]<name>[@version]
cargo uninstall [source/]<uuid>[@version]
cargo uninstall <meta-name>[@version]
cargo uninstall <meta-uuid>[@version]
Options:
-g Remove from the user level (~/.cargo/bin/ instead of
locally in ./.cargo/bin/ by default)
-G Remove from the system level (/usr/local/lib/cargo/bin/)
Remove a crate. If a source is provided, only remove
from that source, otherwise it removes from any source.
If a crate was installed directly (git, tarball, etc.), you can remove
it by metadata.");
}
fn cmd_usage_list() {
print(~"cargo list [sources..]
If no arguments are provided, list all sources and their packages.
If source names are provided, list those sources and their packages.
");
}
fn cmd_usage_search() {
print(~"cargo search <query | '*'> [tags..]
Search packages.");
}
fn cmd_usage_sources() {
print(~"cargo sources
cargo sources add <name> <url>
cargo sources remove <name>
cargo sources rename <name> <new>
cargo sources set-url <name> <url>
cargo sources set-method <name> <method>
If no arguments are supplied, list all sources (but not their packages).
Commands:
add Add a source. The source method will be guessed
from the URL.
remove Remove a source.
rename Rename a source.
set-url Change the URL for a source.
set-method Change the method for a source.");
}
fn main() {
let argv = os::args();
let o = build_cargo_options(argv);
if vec::len(o.free) < 2u {
cmd_usage();
return;
}
if o.help {
match o.free[1] {
~"init" => cmd_usage_init(),
~"install" => cmd_usage_install(),
~"uninstall" => cmd_usage_uninstall(),
~"list" => cmd_usage_list(),
~"search" => cmd_usage_search(),
~"sources" => cmd_usage_sources(),
_ => cmd_usage()
}
return;
}
if o.free[1] == ~"usage" {
cmd_usage();
return;
}
let mut c = configure(o);
let home = c.root;
let first_time = os::path_exists(&home.push("sources.json"));
if !first_time && o.free[1] != ~"init" {
cmd_init(&c);
// FIXME (#2662): shouldn't need to reconfigure
c = configure(o);
}
let c = &move c;
match o.free[1] {
~"init" => cmd_init(c),
~"install" => cmd_install(c),
~"uninstall" => cmd_uninstall(c),
~"list" => cmd_list(c),
~"search" => cmd_search(c),
~"sources" => cmd_sources(c),
_ => cmd_usage()
}
dump_cache(c);
dump_sources(c);
}
/*!
The Rust core library.
The Rust core library provides runtime features required by the language,
including the task scheduler and memory allocators, as well as library
support for Rust built-in types, platform abstractions, and other commonly
used features.
`core` includes modules corresponding to each of the integer types, each of
the floating point types, the `bool` type, tuples, characters, strings,
vectors (`vec`), shared boxes (`box`), and unsafe and borrowed pointers
(`ptr`). Additionally, `core` provides task management and creation (`task`),
communication primitives (`comm` and `pipes`), an efficient vector builder
(`dvec`), platform abstractions (`os` and `path`), basic I/O abstractions
(`io`), common traits (`cmp`, `num`, `to_str`), and complete bindings
to the C standard library (`libc`).
`core` is linked to all crates by default and its contents imported.
Implicitly, all crates behave as if they included the following prologue:
extern mod core;
use core::*;
*/
#[link(name = "core",
vers = "0.5",
uuid = "c70c24a7-5551-4f73-8e37-380b11d80be8",
url = "https://github.com/mozilla/rust/tree/master/src/libcore")];
#[comment = "The Rust core library"];
#[license = "MIT"];
#[crate_type = "lib"];
// Don't link to core. We are core.
#[no_core];
#[warn(deprecated_mode)];
#[warn(deprecated_pattern)];
#[warn(vecs_implicitly_copyable)];
#[deny(non_camel_case_types)];
// Built-in-type support modules
/// Operations and constants for `int`
#[path = "int-template.rs"]
#[merge = "int-template/intb.rs"]
pub mod int;
/// Operations and constants for `i8`
#[path = "int-template.rs"]
#[merge = "int-template/i8b.rs"]
pub mod i8;
/// Operations and constants for `i16`
#[path = "int-template.rs"]
#[merge = "int-template/i16b.rs"]
pub mod i16;
/// Operations and constants for `i32`
#[path = "int-template.rs"]
#[merge = "int-template/i32b.rs"]
pub mod i32;
/// Operations and constants for `i64`
#[path = "int-template.rs"]
#[merge = "int-template/i64b.rs"]
pub mod i64;
/// Operations and constants for `uint`
#[path = "uint-template.rs"]
#[merge = "uint-template/uintb.rs"]
pub mod uint;
/// Operations and constants for `u8`
#[path = "uint-template.rs"]
#[merge = "uint-template/u8b.rs"]
pub mod u8;
/// Operations and constants for `u16`
#[path = "uint-template.rs"]
#[merge = "uint-template/u16b.rs"]
pub mod u16;
/// Operations and constants for `u32`
#[path = "uint-template.rs"]
#[merge = "uint-template/u32b.rs"]
pub mod u32;
/// Operations and constants for `u64`
#[path = "uint-template.rs"]
#[merge = "uint-template/u64b.rs"]
pub mod u64;
pub mod box;
pub mod char;
pub mod float;
pub mod f32;
pub mod f64;
pub mod str;
pub mod ptr;
pub mod vec;
pub mod at_vec;
pub mod bool;
pub mod tuple;
pub mod unit;
pub mod owned;
// Ubiquitous-utility-type modules
#[cfg(notest)]
pub mod ops;
pub mod cmp;
pub mod num;
pub mod hash;
pub mod either;
pub mod iter;
pub mod logging;
pub mod option;
#[path="iter-trait.rs"]
#[merge = "iter-trait/optionb.rs"]
pub mod option_iter;
pub mod result;
pub mod to_str;
pub mod to_bytes;
pub mod from_str;
pub mod util;
// Data structure modules
pub mod dvec;
#[path="iter-trait.rs"]
#[merge = "iter-trait/dvecb.rs"]
pub mod dvec_iter;
pub mod dlist;
#[path="iter-trait.rs"]
#[merge = "iter-trait/dlistb.rs"]
pub mod dlist_iter;
pub mod send_map;
// Concurrency
pub mod comm;
#[merge = "task/mod.rs"]
pub mod task;
pub mod pipes;
// Runtime and language-primitive support
pub mod gc;
pub mod io;
pub mod libc;
pub mod os;
pub mod path;
pub mod rand;
pub mod run;
pub mod sys;
pub mod cast;
pub mod mutable;
pub mod flate;
pub mod repr;
pub mod cleanup;
pub mod reflect;
pub mod condition;
// Modules supporting compiler-generated code
// Exported but not part of the public interface
pub mod extfmt;
// The test harness links against core, so don't include runtime in tests.
#[cfg(notest)]
#[legacy_exports]
pub mod rt;
// Ideally not exported, but currently is.
pub mod private;
// For internal use, not exported.
mod unicode;
mod cmath;
mod stackwalk;
// Top-level, visible-everywhere definitions.
// Export various ubiquitous types, constructors, methods.
pub use option::{Some, None};
pub use Option = option::Option;
pub use result::{Result, Ok, Err};
pub use Path = path::Path;
pub use GenericPath = path::GenericPath;
pub use WindowsPath = path::WindowsPath;
pub use PosixPath = path::PosixPath;
pub use tuple::{CopyableTuple, ImmutableTuple, ExtendedTupleOps};
pub use str::{StrSlice, Trimmable};
pub use vec::{ConstVector, CopyableVector, ImmutableVector};
pub use vec::{ImmutableEqVector, ImmutableCopyableVector};
pub use vec::{MutableVector, MutableCopyableVector};
pub use iter::{BaseIter, ExtendedIter, EqIter, CopyableIter};
pub use iter::{CopyableOrderedIter, CopyableNonstrictIter, Times};
pub use num::Num;
pub use ptr::Ptr;
pub use to_str::ToStr;
// The following exports are the core operators and kinds
// The compiler has special knowlege of these so we must not duplicate them
// when compiling for testing
#[cfg(notest)]
pub use ops::{Const, Copy, Send, Owned};
#[cfg(notest)]
pub use ops::{Drop};
#[cfg(notest)]
pub use ops::{Add, Sub, Mul, Div, Modulo, Neg, BitAnd, BitOr, BitXor};
#[cfg(notest)]
pub use ops::{Shl, Shr, Index};
#[cfg(test)]
extern mod coreops(name = "core", vers = "0.5");
#[cfg(test)]
pub use coreops::ops::{Const, Copy, Send, Owned};
#[cfg(test)]
pub use coreops::ops::{Drop};
#[cfg(test)]
pub use coreops::ops::{Add, Sub, Mul, Div, Modulo, Neg, BitAnd, BitOr};
#[cfg(test)]
pub use coreops::ops::{BitXor};
#[cfg(test)]
pub use coreops::ops::{Shl, Shr, Index};
// Export the log levels as global constants. Higher levels mean
// more-verbosity. Error is the bottom level, default logging level is
// warn-and-below.
/// The error log level
pub const error : u32 = 1_u32;
/// The warning log level
pub const warn : u32 = 2_u32;
/// The info log level
pub const info : u32 = 3_u32;
/// The debug log level
pub const debug : u32 = 4_u32;
// A curious inner-module that's not exported that contains the binding
// 'core' so that macro-expanded references to core::error and such
// can be resolved within libcore.
#[doc(hidden)] // FIXME #3538
mod core {
pub const error : u32 = 1_u32;
pub const warn : u32 = 2_u32;
pub const info : u32 = 3_u32;
pub const debug : u32 = 4_u32;
}
// Similar to above. Some magic to make core testable.
#[cfg(test)]
mod std {
extern mod std(vers = "0.5");
pub use std::test;
}
// Local Variables:
// mode: rust;
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
// DIVERT
/*!
The Rust core library.
......
mod inst {
pub type T = i16;
pub const bits: uint = u16::bits;
}
\ No newline at end of file
mod inst {
pub type T = i32;
pub const bits: uint = u32::bits;
}
mod inst {
pub type T = i64;
pub const bits: uint = u64::bits;
}
\ No newline at end of file
mod inst {
pub type T = i8;
pub const bits: uint = u8::bits;
}
\ No newline at end of file
pub use inst::pow;
mod inst {
pub type T = int;
pub const bits: uint = uint::bits;
/// Returns `base` raised to the power of `exponent`
pub fn pow(base: int, exponent: uint) -> int {
if exponent == 0u {
//Not mathemtically true if ~[base == 0]
return 1;
}
if base == 0 { return 0; }
let mut my_pow = exponent;
let mut acc = 1;
let mut multiplier = base;
while(my_pow > 0u) {
if my_pow % 2u == 1u {
acc *= multiplier;
}
my_pow /= 2u;
multiplier *= multiplier;
}
return acc;
}
#[test]
fn test_pow() {
assert (pow(0, 0u) == 1);
assert (pow(0, 1u) == 0);
assert (pow(0, 2u) == 0);
assert (pow(-1, 0u) == 1);
assert (pow(1, 0u) == 1);
assert (pow(-3, 2u) == 9);
assert (pow(-3, 3u) == -27);
assert (pow(4, 9u) == 262144);
}
#[test]
fn test_overflows() {
assert (max_value > 0);
assert (min_value <= 0);
assert (min_value + max_value + 1 == 0);
}
}
\ No newline at end of file
mod inst {
#[allow(non_camel_case_types)]
pub type IMPL_T<A> = dlist::DList<A>;
/**
* Iterates through the current contents.
*
* Attempts to access this dlist during iteration are allowed (to
* allow for e.g. breadth-first search with in-place enqueues), but
* removing the current node is forbidden.
*/
pub pure fn EACH<A>(self: &IMPL_T<A>, f: fn(v: &A) -> bool) {
let mut link = self.peek_n();
while option::is_some(&link) {
let nobe = option::get(link);
assert nobe.linked;
if !f(&nobe.data) { break; }
// Check (weakly) that the user didn't do a remove.
if self.size == 0 {
fail ~"The dlist became empty during iteration??"
}
if !nobe.linked ||
(!((nobe.prev.is_some()
|| box::ptr_eq(*self.hd.expect(~"headless dlist?"),
*nobe))
&& (nobe.next.is_some()
|| box::ptr_eq(*self.tl.expect(~"tailless dlist?"),
*nobe)))) {
fail ~"Removing a dlist node during iteration is forbidden!"
}
link = nobe.next_link();
}
}
pub pure fn SIZE_HINT<A>(self: &IMPL_T<A>) -> Option<uint> {
Some(self.len())
}
}
\ No newline at end of file
mod inst {
#[allow(non_camel_case_types)]
pub type IMPL_T<A> = dvec::DVec<A>;
/**
* Iterates through the current contents.
*
* Attempts to access this dvec during iteration will fail.
*/
pub pure fn EACH<A>(self: &IMPL_T<A>, f: fn(v: &A) -> bool) {
unsafe {
do self.swap |v| {
v.each(f);
move v
}
}
}
pub pure fn SIZE_HINT<A>(self: &IMPL_T<A>) -> Option<uint> {
Some(self.len())
}
}
\ No newline at end of file
mod inst {
#[allow(non_camel_case_types)]
pub type IMPL_T<A> = Option<A>;
pub pure fn EACH<A>(self: &IMPL_T<A>, f: fn(v: &A) -> bool) {
match *self {
None => (),
Some(ref a) => { f(a); }
}
}
pub pure fn SIZE_HINT<A>(self: &IMPL_T<A>) -> Option<uint> {
match *self {
None => Some(0),
Some(_) => Some(1)
}
}
}
\ No newline at end of file
mod local_data_priv;
pub mod local_data;
pub mod rt;
pub mod spawn;
mod inst {
pub type T = u16;
pub const bits: uint = 16;
}
mod inst {
pub type T = u32;
pub const bits: uint = 32;
}
\ No newline at end of file
mod inst {
pub type T = u64;
pub const bits: uint = 64;
}
\ No newline at end of file
pub use inst::is_ascii;
mod inst {
pub type T = u8;
pub const bits: uint = 8;
// Type-specific functions here. These must be reexported by the
// parent module so that they appear in core::u8 and not core::u8::u8;
pub pure fn is_ascii(x: T) -> bool { return 0 as T == x & 128 as T; }
}
pub use inst::{
div_ceil, div_round, div_floor, iterate,
next_power_of_two
};
mod inst {
pub type T = uint;
#[cfg(target_arch = "x86")]
#[cfg(target_arch = "arm")]
pub const bits: uint = 32;
#[cfg(target_arch = "x86_64")]
pub const bits: uint = 64;
/**
* Divide two numbers, return the result, rounded up.
*
* # Arguments
*
* * x - an integer
* * y - an integer distinct from 0u
*
* # Return value
*
* The smallest integer `q` such that `x/y <= q`.
*/
pub pure fn div_ceil(x: uint, y: uint) -> uint {
let div = x / y;
if x % y == 0u { div }
else { div + 1u }
}
/**
* Divide two numbers, return the result, rounded to the closest integer.
*
* # Arguments
*
* * x - an integer
* * y - an integer distinct from 0u
*
* # Return value
*
* The integer `q` closest to `x/y`.
*/
pub pure fn div_round(x: uint, y: uint) -> uint {
let div = x / y;
if x % y * 2u < y { div }
else { div + 1u }
}
/**
* Divide two numbers, return the result, rounded down.
*
* Note: This is the same function as `div`.
*
* # Arguments
*
* * x - an integer
* * y - an integer distinct from 0u
*
* # Return value
*
* The smallest integer `q` such that `x/y <= q`. This
* is either `x/y` or `x/y + 1`.
*/
pub pure fn div_floor(x: uint, y: uint) -> uint { return x / y; }
/**
* Iterate over the range [`lo`..`hi`), or stop when requested
*
* # Arguments
*
* * lo - The integer at which to start the loop (included)
* * hi - The integer at which to stop the loop (excluded)
* * it - A block to execute with each consecutive integer of the range.
* Return `true` to continue, `false` to stop.
*
* # Return value
*
* `true` If execution proceeded correctly, `false` if it was interrupted,
* that is if `it` returned `false` at any point.
*/
pub pure fn iterate(lo: uint, hi: uint, it: fn(uint) -> bool) -> bool {
let mut i = lo;
while i < hi {
if (!it(i)) { return false; }
i += 1u;
}
return true;
}
/// Returns the smallest power of 2 greater than or equal to `n`
#[inline(always)]
pub fn next_power_of_two(n: uint) -> uint {
let halfbits: uint = sys::size_of::<uint>() * 4u;
let mut tmp: uint = n - 1u;
let mut shift: uint = 1u;
while shift <= halfbits { tmp |= tmp >> shift; shift <<= 1u; }
return tmp + 1u;
}
#[test]
fn test_next_power_of_two() {
assert (uint::next_power_of_two(0u) == 0u);
assert (uint::next_power_of_two(1u) == 1u);
assert (uint::next_power_of_two(2u) == 2u);
assert (uint::next_power_of_two(3u) == 4u);
assert (uint::next_power_of_two(4u) == 4u);
assert (uint::next_power_of_two(5u) == 8u);
assert (uint::next_power_of_two(6u) == 8u);
assert (uint::next_power_of_two(7u) == 8u);
assert (uint::next_power_of_two(8u) == 8u);
assert (uint::next_power_of_two(9u) == 16u);
assert (uint::next_power_of_two(10u) == 16u);
assert (uint::next_power_of_two(11u) == 16u);
assert (uint::next_power_of_two(12u) == 16u);
assert (uint::next_power_of_two(13u) == 16u);
assert (uint::next_power_of_two(14u) == 16u);
assert (uint::next_power_of_two(15u) == 16u);
assert (uint::next_power_of_two(16u) == 16u);
assert (uint::next_power_of_two(17u) == 32u);
assert (uint::next_power_of_two(18u) == 32u);
assert (uint::next_power_of_two(19u) == 32u);
assert (uint::next_power_of_two(20u) == 32u);
assert (uint::next_power_of_two(21u) == 32u);
assert (uint::next_power_of_two(22u) == 32u);
assert (uint::next_power_of_two(23u) == 32u);
assert (uint::next_power_of_two(24u) == 32u);
assert (uint::next_power_of_two(25u) == 32u);
assert (uint::next_power_of_two(26u) == 32u);
assert (uint::next_power_of_two(27u) == 32u);
assert (uint::next_power_of_two(28u) == 32u);
assert (uint::next_power_of_two(29u) == 32u);
assert (uint::next_power_of_two(30u) == 32u);
assert (uint::next_power_of_two(31u) == 32u);
assert (uint::next_power_of_two(32u) == 32u);
assert (uint::next_power_of_two(33u) == 64u);
assert (uint::next_power_of_two(34u) == 64u);
assert (uint::next_power_of_two(35u) == 64u);
assert (uint::next_power_of_two(36u) == 64u);
assert (uint::next_power_of_two(37u) == 64u);
assert (uint::next_power_of_two(38u) == 64u);
assert (uint::next_power_of_two(39u) == 64u);
}
#[test]
fn test_overflows() {
assert (uint::max_value > 0u);
assert (uint::min_value <= 0u);
assert (uint::min_value + uint::max_value + 1u == 0u);
}
#[test]
fn test_div() {
assert(uint::div_floor(3u, 4u) == 0u);
assert(uint::div_ceil(3u, 4u) == 1u);
assert(uint::div_round(3u, 4u) == 1u);
}
}
\ No newline at end of file
......@@ -24,6 +24,635 @@ extern mod syntax(vers = "0.5");
use core::*;
use io::WriterUtil;
use syntax::{ast, ast_util, fold, visit, codemap};
use syntax::parse;
use syntax::print::pprust;
use syntax::diagnostic;
enum test_mode { tm_converge, tm_run, }
type context = { mode: test_mode }; // + rng
#[cfg(stage0)]
impl test_mode : cmp::Eq {
pure fn eq(other: &test_mode) -> bool {
(self as uint) == ((*other) as uint)
}
pure fn ne(other: &test_mode) -> bool { !self.eq(other) }
}
#[cfg(stage1)]
#[cfg(stage2)]
impl test_mode : cmp::Eq {
pure fn eq(&self, other: &test_mode) -> bool {
((*self) as uint) == ((*other) as uint)
}
pure fn ne(&self, other: &test_mode) -> bool { !(*self).eq(other) }
}
fn write_file(filename: &Path, content: ~str) {
result::get(
&io::file_writer(filename, ~[io::Create, io::Truncate]))
.write_str(content);
}
fn contains(haystack: ~str, needle: ~str) -> bool {
str::contains(haystack, needle)
}
fn find_rust_files(files: &mut ~[Path], path: &Path) {
if path.filetype() == Some(~".rs") && !contains(path.to_str(), ~"utf8") {
// ignoring "utf8" tests because something is broken
files.push(*path);
} else if os::path_is_dir(path)
&& !contains(path.to_str(), ~"compile-fail")
&& !contains(path.to_str(), ~"build") {
for os::list_dir_path(path).each |p| {
find_rust_files(files, *p);
}
}
}
fn common_exprs() -> ~[ast::expr] {
fn dse(e: ast::expr_) -> ast::expr {
{ id: 0, callee_id: -1, node: e, span: ast_util::dummy_sp() }
}
fn dsl(l: ast::lit_) -> ast::lit {
{ node: l, span: ast_util::dummy_sp() }
}
~[dse(ast::expr_break(option::None)),
dse(ast::expr_again(option::None)),
dse(ast::expr_fail(option::None)),
dse(ast::expr_fail(option::Some(
@dse(ast::expr_lit(@dsl(ast::lit_str(@~"boo"))))))),
dse(ast::expr_ret(option::None)),
dse(ast::expr_lit(@dsl(ast::lit_nil))),
dse(ast::expr_lit(@dsl(ast::lit_bool(false)))),
dse(ast::expr_lit(@dsl(ast::lit_bool(true)))),
dse(ast::expr_unary(ast::box(ast::m_imm),
@dse(ast::expr_lit(@dsl(ast::lit_bool(true)))))),
dse(ast::expr_unary(ast::uniq(ast::m_imm),
@dse(ast::expr_lit(@dsl(ast::lit_bool(true))))))
]
}
pure fn safe_to_steal_expr(e: @ast::expr, tm: test_mode) -> bool {
safe_to_use_expr(*e, tm)
}
pure fn safe_to_use_expr(e: ast::expr, tm: test_mode) -> bool {
match tm {
tm_converge => {
match e.node {
// If the fuzzer moves a block-ending-in-semicolon into callee
// position, the pretty-printer can't preserve this even by
// parenthesizing!! See email to marijn.
ast::expr_if(*) | ast::expr_block(*)
| ast::expr_match(*) | ast::expr_while(*) => { false }
// https://github.com/mozilla/rust/issues/929
ast::expr_cast(*) | ast::expr_assert(*) |
ast::expr_binary(*) | ast::expr_assign(*) |
ast::expr_assign_op(*) => { false }
ast::expr_fail(option::None) |
ast::expr_ret(option::None) => { false }
// https://github.com/mozilla/rust/issues/953
ast::expr_fail(option::Some(_)) => { false }
// https://github.com/mozilla/rust/issues/928
//ast::expr_cast(_, _) { false }
// https://github.com/mozilla/rust/issues/1458
ast::expr_call(_, _, _) => { false }
_ => { true }
}
}
tm_run => { true }
}
}
fn safe_to_steal_ty(t: @ast::Ty, tm: test_mode) -> bool {
// Restrictions happen to be the same.
safe_to_replace_ty(t.node, tm)
}
// Not type-parameterized: https://github.com/mozilla/rust/issues/898 (FIXED)
fn stash_expr_if(c: fn@(@ast::expr, test_mode)->bool,
es: @mut ~[ast::expr],
e: @ast::expr,
tm: test_mode) {
if c(e, tm) {
*es += ~[*e];
} else {/* now my indices are wrong :( */ }
}
fn stash_ty_if(c: fn@(@ast::Ty, test_mode)->bool,
es: @mut ~[ast::Ty],
e: @ast::Ty,
tm: test_mode) {
if c(e, tm) {
es.push(*e);
} else {/* now my indices are wrong :( */ }
}
type stolen_stuff = {exprs: ~[ast::expr], tys: ~[ast::Ty]};
fn steal(crate: ast::crate, tm: test_mode) -> stolen_stuff {
let exprs = @mut ~[];
let tys = @mut ~[];
let v = visit::mk_simple_visitor(@{
visit_expr: |a| stash_expr_if(safe_to_steal_expr, exprs, a, tm),
visit_ty: |a| stash_ty_if(safe_to_steal_ty, tys, a, tm),
.. *visit::default_simple_visitor()
});
visit::visit_crate(crate, (), v);
{exprs: *exprs, tys: *tys}
}
fn safe_to_replace_expr(e: ast::expr_, _tm: test_mode) -> bool {
match e {
// https://github.com/mozilla/rust/issues/652
ast::expr_if(*) => { false }
ast::expr_block(_) => { false }
// expr_call is also missing a constraint
ast::expr_fn_block(*) => { false }
_ => { true }
}
}
fn safe_to_replace_ty(t: ast::ty_, _tm: test_mode) -> bool {
match t {
ast::ty_infer => { false } // always implicit, always top level
ast::ty_bot => { false } // in source, can only appear
// as the out type of a function
ast::ty_mac(_) => { false }
_ => { true }
}
}
// Replace the |i|th expr (in fold order) of |crate| with |newexpr|.
fn replace_expr_in_crate(crate: ast::crate, i: uint,
newexpr: ast::expr, tm: test_mode) ->
ast::crate {
let j: @mut uint = @mut 0u;
fn fold_expr_rep(j_: @mut uint, i_: uint, newexpr_: ast::expr_,
original: ast::expr_, fld: fold::ast_fold,
tm_: test_mode) ->
ast::expr_ {
*j_ += 1u;
if i_ + 1u == *j_ && safe_to_replace_expr(original, tm_) {
newexpr_
} else {
fold::noop_fold_expr(original, fld)
}
}
let afp = @{
fold_expr: fold::wrap(|a,b| {
fold_expr_rep(j, i, newexpr.node, a, b, tm)
}),
.. *fold::default_ast_fold()
};
let af = fold::make_fold(afp);
let crate2: @ast::crate = @af.fold_crate(crate);
*crate2
}
// Replace the |i|th ty (in fold order) of |crate| with |newty|.
fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::Ty,
tm: test_mode) -> ast::crate {
let j: @mut uint = @mut 0u;
fn fold_ty_rep(j_: @mut uint, i_: uint, newty_: ast::ty_,
original: ast::ty_, fld: fold::ast_fold,
tm_: test_mode) ->
ast::ty_ {
*j_ += 1u;
if i_ + 1u == *j_ && safe_to_replace_ty(original, tm_) {
newty_
} else { fold::noop_fold_ty(original, fld) }
}
let afp = @{
fold_ty: fold::wrap(|a,b| fold_ty_rep(j, i, newty.node, a, b, tm) ),
.. *fold::default_ast_fold()
};
let af = fold::make_fold(afp);
let crate2: @ast::crate = @af.fold_crate(crate);
*crate2
}
fn under(n: uint, it: fn(uint)) {
let mut i: uint = 0u;
while i < n { it(i); i += 1u; }
}
fn as_str(f: fn@(+x: io::Writer)) -> ~str {
io::with_str_writer(f)
}
fn check_variants_of_ast(crate: ast::crate, codemap: @codemap::CodeMap,
filename: &Path, cx: context) {
let stolen = steal(crate, cx.mode);
let extra_exprs = vec::filter(common_exprs(),
|a| safe_to_use_expr(*a, cx.mode) );
check_variants_T(crate, codemap, filename, ~"expr",
extra_exprs + stolen.exprs, pprust::expr_to_str,
replace_expr_in_crate, cx);
check_variants_T(crate, codemap, filename, ~"ty", stolen.tys,
pprust::ty_to_str, replace_ty_in_crate, cx);
}
fn check_variants_T<T: Copy>(
crate: ast::crate,
codemap: @codemap::CodeMap,
filename: &Path,
thing_label: ~str,
things: ~[T],
stringifier: fn@(@T, @syntax::parse::token::ident_interner) -> ~str,
replacer: fn@(ast::crate, uint, T, test_mode) -> ast::crate,
cx: context
) {
error!("%s contains %u %s objects", filename.to_str(),
things.len(), thing_label);
// Assuming we're not generating any token_trees
let intr = syntax::parse::token::mk_fake_ident_interner();
let L = things.len();
if L < 100 {
do under(uint::min(L, 20)) |i| {
log(error, ~"Replacing... #" + uint::str(i));
let fname = str::from_slice(filename.to_str());
do under(uint::min(L, 30)) |j| {
log(error, ~"With... " + stringifier(@things[j], intr));
let crate2 = @replacer(crate, i, things[j], cx.mode);
// It would be best to test the *crate* for stability, but
// testing the string for stability is easier and ok for now.
let handler = diagnostic::mk_handler(None);
let str3 = do io::with_str_reader("") |rdr| {
@as_str(|a|pprust::print_crate(
codemap,
intr,
diagnostic::mk_span_handler(handler, codemap),
crate2,
fname,
rdr, a,
pprust::no_ann(),
false))
};
match cx.mode {
tm_converge => {
check_roundtrip_convergence(str3, 1u);
}
tm_run => {
let file_label = fmt!("rusttmp/%s_%s_%u_%u",
last_part(filename.to_str()),
thing_label, i, j);
let safe_to_run = !(content_is_dangerous_to_run(*str3)
|| has_raw_pointers(*crate2));
check_whole_compiler(*str3, &Path(file_label),
safe_to_run);
}
}
}
}
}
}
fn last_part(filename: ~str) -> ~str {
let ix = option::get(str::rfind_char(filename, '/'));
str::slice(filename, ix + 1u, str::len(filename) - 3u)
}
enum happiness {
passed,
cleanly_rejected(~str),
known_bug(~str),
failed(~str),
}
// We'd find more bugs if we could take an AST here, but
// - that would find many "false positives" or unimportant bugs
// - that would be tricky, requiring use of tasks or serialization
// or randomness.
// This seems to find plenty of bugs as it is :)
fn check_whole_compiler(code: ~str, suggested_filename_prefix: &Path,
allow_running: bool) {
let filename = &suggested_filename_prefix.with_filetype("rs");
write_file(filename, code);
let compile_result = check_compiling(filename);
let run_result = match (compile_result, allow_running) {
(passed, true) => { check_running(suggested_filename_prefix) }
(h, _) => { h }
};
match run_result {
passed | cleanly_rejected(_) | known_bug(_) => {
removeIfExists(suggested_filename_prefix);
removeIfExists(&suggested_filename_prefix.with_filetype("rs"));
removeDirIfExists(&suggested_filename_prefix.with_filetype("dSYM"));
}
failed(s) => {
log(error, ~"check_whole_compiler failure: " + s);
log(error, ~"Saved as: " + filename.to_str());
}
}
}
fn removeIfExists(filename: &Path) {
// So sketchy!
assert !contains(filename.to_str(), ~" ");
run::program_output(~"bash", ~[~"-c", ~"rm " + filename.to_str()]);
}
fn removeDirIfExists(filename: &Path) {
// So sketchy!
assert !contains(filename.to_str(), ~" ");
run::program_output(~"bash", ~[~"-c", ~"rm -r " + filename.to_str()]);
}
fn check_running(exe_filename: &Path) -> happiness {
let p = run::program_output(
~"/Users/jruderman/scripts/timed_run_rust_program.py",
~[exe_filename.to_str()]);
let comb = p.out + ~"\n" + p.err;
if str::len(comb) > 1u {
log(error, ~"comb comb comb: " + comb);
}
if contains(comb, ~"Assertion failed:") {
failed(~"C++ assertion failure")
} else if contains(comb, ~"leaked memory in rust main loop") {
// might also use exit code 134
//failed("Leaked")
known_bug(~"https://github.com/mozilla/rust/issues/910")
} else if contains(comb, ~"src/rt/") {
failed(~"Mentioned src/rt/")
} else if contains(comb, ~"malloc") {
failed(~"Mentioned malloc")
} else {
match p.status {
0 => { passed }
100 => { cleanly_rejected(~"running: explicit fail") }
101 | 247 => { cleanly_rejected(~"running: timed out") }
245 | 246 | 138 | 252 => {
known_bug(~"https://github.com/mozilla/rust/issues/1466")
}
136 | 248 => {
known_bug(
~"SIGFPE - https://github.com/mozilla/rust/issues/944")
}
rc => {
failed(~"Rust program ran but exited with status " +
int::str(rc))
}
}
}
}
fn check_compiling(filename: &Path) -> happiness {
let p = run::program_output(
~"/Users/jruderman/code/rust/build/x86_64-apple-darwin/\
stage1/bin/rustc",
~[filename.to_str()]);
//error!("Status: %d", p.status);
if p.status == 0 {
passed
} else if p.err != ~"" {
if contains(p.err, ~"error:") {
cleanly_rejected(~"rejected with span_error")
} else {
log(error, ~"Stderr: " + p.err);
failed(~"Unfamiliar error message")
}
} else if contains(p.out, ~"Assertion") && contains(p.out, ~"failed") {
log(error, ~"Stdout: " + p.out);
failed(~"Looks like an llvm assertion failure")
} else if contains(p.out, ~"internal compiler error unimplemented") {
known_bug(~"Something unimplemented")
} else if contains(p.out, ~"internal compiler error") {
log(error, ~"Stdout: " + p.out);
failed(~"internal compiler error")
} else {
log(error, p.status);
log(error, ~"!Stdout: " + p.out);
failed(~"What happened?")
}
}
fn parse_and_print(code: @~str) -> ~str {
let filename = Path("tmp.rs");
let sess = parse::new_parse_sess(option::None);
write_file(&filename, *code);
let crate = parse::parse_crate_from_source_str(
filename.to_str(), code, ~[], sess);
do io::with_str_reader(*code) |rdr| {
as_str(|a|
pprust::print_crate(
sess.cm,
// Assuming there are no token_trees
syntax::parse::token::mk_fake_ident_interner(),
sess.span_diagnostic,
crate,
filename.to_str(),
rdr, a,
pprust::no_ann(),
false) )
}
}
fn has_raw_pointers(c: ast::crate) -> bool {
let has_rp = @mut false;
fn visit_ty(flag: @mut bool, t: @ast::Ty) {
match t.node {
ast::ty_ptr(_) => { *flag = true; }
_ => { }
}
}
let v =
visit::mk_simple_visitor(@{visit_ty: |a| visit_ty(has_rp, a),
.. *visit::default_simple_visitor()});
visit::visit_crate(c, (), v);
return *has_rp;
}
fn content_is_dangerous_to_run(code: ~str) -> bool {
let dangerous_patterns =
~[~"xfail-test",
~"import", // espeically fs, run
~"extern",
~"unsafe",
~"log"]; // python --> rust pipe deadlock?
for dangerous_patterns.each |p| { if contains(code, *p) { return true; } }
return false;
}
fn content_is_dangerous_to_compile(code: ~str) -> bool {
let dangerous_patterns =
~[~"xfail-test"];
for dangerous_patterns.each |p| { if contains(code, *p) { return true; } }
return false;
}
fn content_might_not_converge(code: ~str) -> bool {
let confusing_patterns =
~[~"xfail-test",
~"xfail-pretty",
~"self", // crazy rules enforced by parser not typechecker?
~"spawn", // precedence issues?
~"bind", // precedence issues?
~" be ", // don't want to replace its child with a non-call:
// "Non-call expression in tail call"
~"\n\n\n\n\n" // https://github.com/mozilla/rust/issues/850
];
for confusing_patterns.each |p| { if contains(code, *p) { return true; } }
return false;
}
fn file_might_not_converge(filename: &Path) -> bool {
let confusing_files = ~[
~"expr-alt.rs", // pretty-printing "(a = b) = c"
// vs "a = b = c" and wrapping
~"block-arg-in-ternary.rs", // wrapping
~"move-3-unique.rs", // 0 becomes (0), but both seem reasonable. wtf?
~"move-3.rs" // 0 becomes (0), but both seem reasonable. wtf?
];
for confusing_files.each |f| {
if contains(filename.to_str(), *f) {
return true;
}
}
return false;
}
fn check_roundtrip_convergence(code: @~str, maxIters: uint) {
let mut i = 0u;
let mut newv = code;
let mut oldv = code;
while i < maxIters {
oldv = newv;
if content_might_not_converge(*oldv) { return; }
newv = @parse_and_print(oldv);
if oldv == newv { break; }
i += 1u;
}
if oldv == newv {
error!("Converged after %u iterations", i);
} else {
error!("Did not converge after %u iterations!", i);
write_file(&Path("round-trip-a.rs"), *oldv);
write_file(&Path("round-trip-b.rs"), *newv);
run::run_program(~"diff",
~[~"-w", ~"-u", ~"round-trip-a.rs",
~"round-trip-b.rs"]);
fail ~"Mismatch";
}
}
fn check_convergence(files: &[Path]) {
error!("pp convergence tests: %u files", vec::len(files));
for files.each |file| {
if !file_might_not_converge(file) {
let s = @result::get(&io::read_whole_file_str(file));
if !content_might_not_converge(*s) {
error!("pp converge: %s", file.to_str());
// Change from 7u to 2u once
// https://github.com/mozilla/rust/issues/850 is fixed
check_roundtrip_convergence(s, 7u);
}
}
}
}
fn check_variants(files: &[Path], cx: context) {
for files.each |file| {
if cx.mode == tm_converge &&
file_might_not_converge(file) {
error!("Skipping convergence test based on\
file_might_not_converge");
loop;
}
let s = @result::get(&io::read_whole_file_str(file));
if contains(*s, ~"#") {
loop; // Macros are confusing
}
if cx.mode == tm_converge && content_might_not_converge(*s) {
loop;
}
if cx.mode == tm_run && content_is_dangerous_to_compile(*s) {
loop;
}
let file_str = file.to_str();
log(error, ~"check_variants: " + file_str);
let sess = parse::new_parse_sess(option::None);
let crate =
parse::parse_crate_from_source_str(
file_str,
s, ~[], sess);
io::with_str_reader(*s, |rdr| {
error!("%s",
as_str(|a| pprust::print_crate(
sess.cm,
// Assuming no token_trees
syntax::parse::token::mk_fake_ident_interner(),
sess.span_diagnostic,
crate,
file_str,
rdr, a,
pprust::no_ann(),
false)))
});
check_variants_of_ast(*crate, sess.cm, file, cx);
}
}
fn main() {
let args = os::args();
if vec::len(args) != 2u {
error!("usage: %s <testdir>", args[0]);
return;
}
let mut files = ~[];
let root = Path(args[1]);
find_rust_files(&mut files, &root);
error!("== check_convergence ==");
check_convergence(files);
error!("== check_variants: converge ==");
check_variants(files, { mode: tm_converge });
error!("== check_variants: run ==");
check_variants(files, { mode: tm_run });
error!("Fuzzer done");
}
// Local Variables:
// fill-column: 78;
// indent-tabs-mode: nil
......
#[legacy_exports];
use io::WriterUtil;
use syntax::{ast, ast_util, fold, visit, codemap};
use syntax::parse;
use syntax::print::pprust;
use syntax::diagnostic;
enum test_mode { tm_converge, tm_run, }
type context = { mode: test_mode }; // + rng
#[cfg(stage0)]
impl test_mode : cmp::Eq {
pure fn eq(other: &test_mode) -> bool {
(self as uint) == ((*other) as uint)
}
pure fn ne(other: &test_mode) -> bool { !self.eq(other) }
}
#[cfg(stage1)]
#[cfg(stage2)]
impl test_mode : cmp::Eq {
pure fn eq(&self, other: &test_mode) -> bool {
((*self) as uint) == ((*other) as uint)
}
pure fn ne(&self, other: &test_mode) -> bool { !(*self).eq(other) }
}
fn write_file(filename: &Path, content: ~str) {
result::get(
&io::file_writer(filename, ~[io::Create, io::Truncate]))
.write_str(content);
}
fn contains(haystack: ~str, needle: ~str) -> bool {
str::contains(haystack, needle)
}
fn find_rust_files(files: &mut ~[Path], path: &Path) {
if path.filetype() == Some(~".rs") && !contains(path.to_str(), ~"utf8") {
// ignoring "utf8" tests because something is broken
files.push(*path);
} else if os::path_is_dir(path)
&& !contains(path.to_str(), ~"compile-fail")
&& !contains(path.to_str(), ~"build") {
for os::list_dir_path(path).each |p| {
find_rust_files(files, *p);
}
}
}
fn common_exprs() -> ~[ast::expr] {
fn dse(e: ast::expr_) -> ast::expr {
{ id: 0, callee_id: -1, node: e, span: ast_util::dummy_sp() }
}
fn dsl(l: ast::lit_) -> ast::lit {
{ node: l, span: ast_util::dummy_sp() }
}
~[dse(ast::expr_break(option::None)),
dse(ast::expr_again(option::None)),
dse(ast::expr_fail(option::None)),
dse(ast::expr_fail(option::Some(
@dse(ast::expr_lit(@dsl(ast::lit_str(@~"boo"))))))),
dse(ast::expr_ret(option::None)),
dse(ast::expr_lit(@dsl(ast::lit_nil))),
dse(ast::expr_lit(@dsl(ast::lit_bool(false)))),
dse(ast::expr_lit(@dsl(ast::lit_bool(true)))),
dse(ast::expr_unary(ast::box(ast::m_imm),
@dse(ast::expr_lit(@dsl(ast::lit_bool(true)))))),
dse(ast::expr_unary(ast::uniq(ast::m_imm),
@dse(ast::expr_lit(@dsl(ast::lit_bool(true))))))
]
}
pure fn safe_to_steal_expr(e: @ast::expr, tm: test_mode) -> bool {
safe_to_use_expr(*e, tm)
}
pure fn safe_to_use_expr(e: ast::expr, tm: test_mode) -> bool {
match tm {
tm_converge => {
match e.node {
// If the fuzzer moves a block-ending-in-semicolon into callee
// position, the pretty-printer can't preserve this even by
// parenthesizing!! See email to marijn.
ast::expr_if(*) | ast::expr_block(*)
| ast::expr_match(*) | ast::expr_while(*) => { false }
// https://github.com/mozilla/rust/issues/929
ast::expr_cast(*) | ast::expr_assert(*) |
ast::expr_binary(*) | ast::expr_assign(*) |
ast::expr_assign_op(*) => { false }
ast::expr_fail(option::None) |
ast::expr_ret(option::None) => { false }
// https://github.com/mozilla/rust/issues/953
ast::expr_fail(option::Some(_)) => { false }
// https://github.com/mozilla/rust/issues/928
//ast::expr_cast(_, _) { false }
// https://github.com/mozilla/rust/issues/1458
ast::expr_call(_, _, _) => { false }
_ => { true }
}
}
tm_run => { true }
}
}
fn safe_to_steal_ty(t: @ast::Ty, tm: test_mode) -> bool {
// Restrictions happen to be the same.
safe_to_replace_ty(t.node, tm)
}
// Not type-parameterized: https://github.com/mozilla/rust/issues/898 (FIXED)
fn stash_expr_if(c: fn@(@ast::expr, test_mode)->bool,
es: @mut ~[ast::expr],
e: @ast::expr,
tm: test_mode) {
if c(e, tm) {
*es += ~[*e];
} else {/* now my indices are wrong :( */ }
}
fn stash_ty_if(c: fn@(@ast::Ty, test_mode)->bool,
es: @mut ~[ast::Ty],
e: @ast::Ty,
tm: test_mode) {
if c(e, tm) {
es.push(*e);
} else {/* now my indices are wrong :( */ }
}
type stolen_stuff = {exprs: ~[ast::expr], tys: ~[ast::Ty]};
fn steal(crate: ast::crate, tm: test_mode) -> stolen_stuff {
let exprs = @mut ~[];
let tys = @mut ~[];
let v = visit::mk_simple_visitor(@{
visit_expr: |a| stash_expr_if(safe_to_steal_expr, exprs, a, tm),
visit_ty: |a| stash_ty_if(safe_to_steal_ty, tys, a, tm),
.. *visit::default_simple_visitor()
});
visit::visit_crate(crate, (), v);
{exprs: *exprs, tys: *tys}
}
fn safe_to_replace_expr(e: ast::expr_, _tm: test_mode) -> bool {
match e {
// https://github.com/mozilla/rust/issues/652
ast::expr_if(*) => { false }
ast::expr_block(_) => { false }
// expr_call is also missing a constraint
ast::expr_fn_block(*) => { false }
_ => { true }
}
}
fn safe_to_replace_ty(t: ast::ty_, _tm: test_mode) -> bool {
match t {
ast::ty_infer => { false } // always implicit, always top level
ast::ty_bot => { false } // in source, can only appear
// as the out type of a function
ast::ty_mac(_) => { false }
_ => { true }
}
}
// Replace the |i|th expr (in fold order) of |crate| with |newexpr|.
fn replace_expr_in_crate(crate: ast::crate, i: uint,
newexpr: ast::expr, tm: test_mode) ->
ast::crate {
let j: @mut uint = @mut 0u;
fn fold_expr_rep(j_: @mut uint, i_: uint, newexpr_: ast::expr_,
original: ast::expr_, fld: fold::ast_fold,
tm_: test_mode) ->
ast::expr_ {
*j_ += 1u;
if i_ + 1u == *j_ && safe_to_replace_expr(original, tm_) {
newexpr_
} else {
fold::noop_fold_expr(original, fld)
}
}
let afp = @{
fold_expr: fold::wrap(|a,b| {
fold_expr_rep(j, i, newexpr.node, a, b, tm)
}),
.. *fold::default_ast_fold()
};
let af = fold::make_fold(afp);
let crate2: @ast::crate = @af.fold_crate(crate);
*crate2
}
// Replace the |i|th ty (in fold order) of |crate| with |newty|.
fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::Ty,
tm: test_mode) -> ast::crate {
let j: @mut uint = @mut 0u;
fn fold_ty_rep(j_: @mut uint, i_: uint, newty_: ast::ty_,
original: ast::ty_, fld: fold::ast_fold,
tm_: test_mode) ->
ast::ty_ {
*j_ += 1u;
if i_ + 1u == *j_ && safe_to_replace_ty(original, tm_) {
newty_
} else { fold::noop_fold_ty(original, fld) }
}
let afp = @{
fold_ty: fold::wrap(|a,b| fold_ty_rep(j, i, newty.node, a, b, tm) ),
.. *fold::default_ast_fold()
};
let af = fold::make_fold(afp);
let crate2: @ast::crate = @af.fold_crate(crate);
*crate2
}
fn under(n: uint, it: fn(uint)) {
let mut i: uint = 0u;
while i < n { it(i); i += 1u; }
}
fn as_str(f: fn@(+x: io::Writer)) -> ~str {
io::with_str_writer(f)
}
fn check_variants_of_ast(crate: ast::crate, codemap: @codemap::CodeMap,
filename: &Path, cx: context) {
let stolen = steal(crate, cx.mode);
let extra_exprs = vec::filter(common_exprs(),
|a| safe_to_use_expr(*a, cx.mode) );
check_variants_T(crate, codemap, filename, ~"expr",
extra_exprs + stolen.exprs, pprust::expr_to_str,
replace_expr_in_crate, cx);
check_variants_T(crate, codemap, filename, ~"ty", stolen.tys,
pprust::ty_to_str, replace_ty_in_crate, cx);
}
fn check_variants_T<T: Copy>(
crate: ast::crate,
codemap: @codemap::CodeMap,
filename: &Path,
thing_label: ~str,
things: ~[T],
stringifier: fn@(@T, @syntax::parse::token::ident_interner) -> ~str,
replacer: fn@(ast::crate, uint, T, test_mode) -> ast::crate,
cx: context
) {
error!("%s contains %u %s objects", filename.to_str(),
things.len(), thing_label);
// Assuming we're not generating any token_trees
let intr = syntax::parse::token::mk_fake_ident_interner();
let L = things.len();
if L < 100 {
do under(uint::min(L, 20)) |i| {
log(error, ~"Replacing... #" + uint::str(i));
let fname = str::from_slice(filename.to_str());
do under(uint::min(L, 30)) |j| {
log(error, ~"With... " + stringifier(@things[j], intr));
let crate2 = @replacer(crate, i, things[j], cx.mode);
// It would be best to test the *crate* for stability, but
// testing the string for stability is easier and ok for now.
let handler = diagnostic::mk_handler(None);
let str3 = do io::with_str_reader("") |rdr| {
@as_str(|a|pprust::print_crate(
codemap,
intr,
diagnostic::mk_span_handler(handler, codemap),
crate2,
fname,
rdr, a,
pprust::no_ann(),
false))
};
match cx.mode {
tm_converge => {
check_roundtrip_convergence(str3, 1u);
}
tm_run => {
let file_label = fmt!("rusttmp/%s_%s_%u_%u",
last_part(filename.to_str()),
thing_label, i, j);
let safe_to_run = !(content_is_dangerous_to_run(*str3)
|| has_raw_pointers(*crate2));
check_whole_compiler(*str3, &Path(file_label),
safe_to_run);
}
}
}
}
}
}
fn last_part(filename: ~str) -> ~str {
let ix = option::get(str::rfind_char(filename, '/'));
str::slice(filename, ix + 1u, str::len(filename) - 3u)
}
enum happiness {
passed,
cleanly_rejected(~str),
known_bug(~str),
failed(~str),
}
// We'd find more bugs if we could take an AST here, but
// - that would find many "false positives" or unimportant bugs
// - that would be tricky, requiring use of tasks or serialization
// or randomness.
// This seems to find plenty of bugs as it is :)
fn check_whole_compiler(code: ~str, suggested_filename_prefix: &Path,
allow_running: bool) {
let filename = &suggested_filename_prefix.with_filetype("rs");
write_file(filename, code);
let compile_result = check_compiling(filename);
let run_result = match (compile_result, allow_running) {
(passed, true) => { check_running(suggested_filename_prefix) }
(h, _) => { h }
};
match run_result {
passed | cleanly_rejected(_) | known_bug(_) => {
removeIfExists(suggested_filename_prefix);
removeIfExists(&suggested_filename_prefix.with_filetype("rs"));
removeDirIfExists(&suggested_filename_prefix.with_filetype("dSYM"));
}
failed(s) => {
log(error, ~"check_whole_compiler failure: " + s);
log(error, ~"Saved as: " + filename.to_str());
}
}
}
fn removeIfExists(filename: &Path) {
// So sketchy!
assert !contains(filename.to_str(), ~" ");
run::program_output(~"bash", ~[~"-c", ~"rm " + filename.to_str()]);
}
fn removeDirIfExists(filename: &Path) {
// So sketchy!
assert !contains(filename.to_str(), ~" ");
run::program_output(~"bash", ~[~"-c", ~"rm -r " + filename.to_str()]);
}
fn check_running(exe_filename: &Path) -> happiness {
let p = run::program_output(
~"/Users/jruderman/scripts/timed_run_rust_program.py",
~[exe_filename.to_str()]);
let comb = p.out + ~"\n" + p.err;
if str::len(comb) > 1u {
log(error, ~"comb comb comb: " + comb);
}
if contains(comb, ~"Assertion failed:") {
failed(~"C++ assertion failure")
} else if contains(comb, ~"leaked memory in rust main loop") {
// might also use exit code 134
//failed("Leaked")
known_bug(~"https://github.com/mozilla/rust/issues/910")
} else if contains(comb, ~"src/rt/") {
failed(~"Mentioned src/rt/")
} else if contains(comb, ~"malloc") {
failed(~"Mentioned malloc")
} else {
match p.status {
0 => { passed }
100 => { cleanly_rejected(~"running: explicit fail") }
101 | 247 => { cleanly_rejected(~"running: timed out") }
245 | 246 | 138 | 252 => {
known_bug(~"https://github.com/mozilla/rust/issues/1466")
}
136 | 248 => {
known_bug(
~"SIGFPE - https://github.com/mozilla/rust/issues/944")
}
rc => {
failed(~"Rust program ran but exited with status " +
int::str(rc))
}
}
}
}
fn check_compiling(filename: &Path) -> happiness {
let p = run::program_output(
~"/Users/jruderman/code/rust/build/x86_64-apple-darwin/\
stage1/bin/rustc",
~[filename.to_str()]);
//error!("Status: %d", p.status);
if p.status == 0 {
passed
} else if p.err != ~"" {
if contains(p.err, ~"error:") {
cleanly_rejected(~"rejected with span_error")
} else {
log(error, ~"Stderr: " + p.err);
failed(~"Unfamiliar error message")
}
} else if contains(p.out, ~"Assertion") && contains(p.out, ~"failed") {
log(error, ~"Stdout: " + p.out);
failed(~"Looks like an llvm assertion failure")
} else if contains(p.out, ~"internal compiler error unimplemented") {
known_bug(~"Something unimplemented")
} else if contains(p.out, ~"internal compiler error") {
log(error, ~"Stdout: " + p.out);
failed(~"internal compiler error")
} else {
log(error, p.status);
log(error, ~"!Stdout: " + p.out);
failed(~"What happened?")
}
}
fn parse_and_print(code: @~str) -> ~str {
let filename = Path("tmp.rs");
let sess = parse::new_parse_sess(option::None);
write_file(&filename, *code);
let crate = parse::parse_crate_from_source_str(
filename.to_str(), code, ~[], sess);
do io::with_str_reader(*code) |rdr| {
as_str(|a|
pprust::print_crate(
sess.cm,
// Assuming there are no token_trees
syntax::parse::token::mk_fake_ident_interner(),
sess.span_diagnostic,
crate,
filename.to_str(),
rdr, a,
pprust::no_ann(),
false) )
}
}
fn has_raw_pointers(c: ast::crate) -> bool {
let has_rp = @mut false;
fn visit_ty(flag: @mut bool, t: @ast::Ty) {
match t.node {
ast::ty_ptr(_) => { *flag = true; }
_ => { }
}
}
let v =
visit::mk_simple_visitor(@{visit_ty: |a| visit_ty(has_rp, a),
.. *visit::default_simple_visitor()});
visit::visit_crate(c, (), v);
return *has_rp;
}
fn content_is_dangerous_to_run(code: ~str) -> bool {
let dangerous_patterns =
~[~"xfail-test",
~"import", // espeically fs, run
~"extern",
~"unsafe",
~"log"]; // python --> rust pipe deadlock?
for dangerous_patterns.each |p| { if contains(code, *p) { return true; } }
return false;
}
fn content_is_dangerous_to_compile(code: ~str) -> bool {
let dangerous_patterns =
~[~"xfail-test"];
for dangerous_patterns.each |p| { if contains(code, *p) { return true; } }
return false;
}
fn content_might_not_converge(code: ~str) -> bool {
let confusing_patterns =
~[~"xfail-test",
~"xfail-pretty",
~"self", // crazy rules enforced by parser not typechecker?
~"spawn", // precedence issues?
~"bind", // precedence issues?
~" be ", // don't want to replace its child with a non-call:
// "Non-call expression in tail call"
~"\n\n\n\n\n" // https://github.com/mozilla/rust/issues/850
];
for confusing_patterns.each |p| { if contains(code, *p) { return true; } }
return false;
}
fn file_might_not_converge(filename: &Path) -> bool {
let confusing_files = ~[
~"expr-alt.rs", // pretty-printing "(a = b) = c"
// vs "a = b = c" and wrapping
~"block-arg-in-ternary.rs", // wrapping
~"move-3-unique.rs", // 0 becomes (0), but both seem reasonable. wtf?
~"move-3.rs" // 0 becomes (0), but both seem reasonable. wtf?
];
for confusing_files.each |f| {
if contains(filename.to_str(), *f) {
return true;
}
}
return false;
}
fn check_roundtrip_convergence(code: @~str, maxIters: uint) {
let mut i = 0u;
let mut newv = code;
let mut oldv = code;
while i < maxIters {
oldv = newv;
if content_might_not_converge(*oldv) { return; }
newv = @parse_and_print(oldv);
if oldv == newv { break; }
i += 1u;
}
if oldv == newv {
error!("Converged after %u iterations", i);
} else {
error!("Did not converge after %u iterations!", i);
write_file(&Path("round-trip-a.rs"), *oldv);
write_file(&Path("round-trip-b.rs"), *newv);
run::run_program(~"diff",
~[~"-w", ~"-u", ~"round-trip-a.rs",
~"round-trip-b.rs"]);
fail ~"Mismatch";
}
}
fn check_convergence(files: &[Path]) {
error!("pp convergence tests: %u files", vec::len(files));
for files.each |file| {
if !file_might_not_converge(file) {
let s = @result::get(&io::read_whole_file_str(file));
if !content_might_not_converge(*s) {
error!("pp converge: %s", file.to_str());
// Change from 7u to 2u once
// https://github.com/mozilla/rust/issues/850 is fixed
check_roundtrip_convergence(s, 7u);
}
}
}
}
fn check_variants(files: &[Path], cx: context) {
for files.each |file| {
if cx.mode == tm_converge &&
file_might_not_converge(file) {
error!("Skipping convergence test based on\
file_might_not_converge");
loop;
}
let s = @result::get(&io::read_whole_file_str(file));
if contains(*s, ~"#") {
loop; // Macros are confusing
}
if cx.mode == tm_converge && content_might_not_converge(*s) {
loop;
}
if cx.mode == tm_run && content_is_dangerous_to_compile(*s) {
loop;
}
let file_str = file.to_str();
log(error, ~"check_variants: " + file_str);
let sess = parse::new_parse_sess(option::None);
let crate =
parse::parse_crate_from_source_str(
file_str,
s, ~[], sess);
io::with_str_reader(*s, |rdr| {
error!("%s",
as_str(|a| pprust::print_crate(
sess.cm,
// Assuming no token_trees
syntax::parse::token::mk_fake_ident_interner(),
sess.span_diagnostic,
crate,
file_str,
rdr, a,
pprust::no_ann(),
false)))
});
check_variants_of_ast(*crate, sess.cm, file, cx);
}
}
fn main() {
let args = os::args();
if vec::len(args) != 2u {
error!("usage: %s <testdir>", args[0]);
return;
}
let mut files = ~[];
let root = Path(args[1]);
find_rust_files(&mut files, &root);
error!("== check_convergence ==");
check_convergence(files);
error!("== check_variants: converge ==");
check_variants(files, { mode: tm_converge });
error!("== check_variants: run ==");
check_variants(files, { mode: tm_run });
error!("Fuzzer done");
}
// Local Variables:
// mode: rust;
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
// -*- rust -*-
#[link(name = "rustc",
vers = "0.5",
uuid = "0ce89b41-2f92-459e-bbc1-8f5fe32f16cf",
url = "https://github.com/mozilla/rust/tree/master/src/rustc")];
#[comment = "The Rust compiler"];
#[license = "MIT"];
#[crate_type = "lib"];
#[no_core];
#[legacy_modes];
#[legacy_exports];
#[allow(vecs_implicitly_copyable)];
#[allow(non_camel_case_types)];
#[allow(deprecated_mode)];
#[allow(deprecated_pattern)];
extern mod core(vers = "0.5");
extern mod std(vers = "0.5");
extern mod syntax(vers = "0.5");
use core::*;
/*
Alternate names for some modules.
I am using this to help extract metadata into its own crate. In metadata.rs
it redefines all these modules in order to gate access from metadata to the
rest of the compiler, then uses these to access the original implementation.
*/
use util_ = util;
use lib_ = lib;
use driver_ = driver;
use middle_ = middle;
use back_ = back;
mod middle {
#[legacy_exports];
mod trans {
#[legacy_exports];
#[legacy_exports]
#[path = "middle/trans/inline.rs"]
mod inline;
#[legacy_exports]
#[path = "middle/trans/monomorphize.rs"]
mod monomorphize;
#[legacy_exports]
#[path = "middle/trans/controlflow.rs"]
mod controlflow;
#[legacy_exports]
#[path = "middle/trans/glue.rs"]
mod glue;
#[legacy_exports]
#[path = "middle/trans/datum.rs"]
mod datum;
#[legacy_exports]
#[path = "middle/trans/callee.rs"]
mod callee;
#[legacy_exports]
#[path = "middle/trans/expr.rs"]
mod expr;
#[legacy_exports]
#[path = "middle/trans/common.rs"]
mod common;
#[legacy_exports]
#[path = "middle/trans/consts.rs"]
mod consts;
#[legacy_exports]
#[path = "middle/trans/type_of.rs"]
mod type_of;
#[legacy_exports]
#[path = "middle/trans/build.rs"]
mod build;
#[legacy_exports]
#[path = "middle/trans/base.rs"]
mod base;
#[legacy_exports]
#[path = "middle/trans/alt.rs"]
mod alt;
#[legacy_exports]
#[path = "middle/trans/uniq.rs"]
mod uniq;
#[legacy_exports]
#[path = "middle/trans/closure.rs"]
mod closure;
#[legacy_exports]
#[path = "middle/trans/tvec.rs"]
mod tvec;
#[legacy_exports]
#[path = "middle/trans/meth.rs"]
mod meth;
#[legacy_exports]
#[path = "middle/trans/foreign.rs"]
mod foreign;
#[legacy_exports]
#[path = "middle/trans/reflect.rs"]
mod reflect;
#[legacy_exports]
#[path = "middle/trans/shape.rs"]
mod shape;
#[legacy_exports]
#[path = "middle/trans/debuginfo.rs"]
mod debuginfo;
#[legacy_exports]
#[path = "middle/trans/type_use.rs"]
mod type_use;
#[legacy_exports]
#[path = "middle/trans/reachable.rs"]
mod reachable;
#[path = "middle/trans/machine.rs"]
mod machine;
#[path = "middle/trans/deriving.rs"]
mod deriving;
}
#[legacy_exports]
#[path = "middle/ty.rs"]
mod ty;
#[legacy_exports]
#[path = "middle/resolve.rs"]
mod resolve;
#[path = "middle/typeck.rs"]
#[merge = "middle/typeck/mod.rs"]
pub mod typeck;
#[legacy_exports]
#[path = "middle/check_loop.rs"]
mod check_loop;
#[legacy_exports]
#[path = "middle/check_alt.rs"]
mod check_alt;
#[legacy_exports]
#[path = "middle/check_const.rs"]
mod check_const;
#[legacy_exports]
#[path = "middle/lint.rs"]
mod lint;
#[path = "middle/borrowck.rs"]
#[merge = "middle/borrowck/mod.rs"]
mod borrowck;
#[legacy_exports]
#[path = "middle/mem_categorization.rs"]
mod mem_categorization;
#[legacy_exports]
#[path = "middle/liveness.rs"]
mod liveness;
#[legacy_exports]
#[path = "middle/kind.rs"]
mod kind;
#[legacy_exports]
#[path = "middle/freevars.rs"]
mod freevars;
#[legacy_exports]
#[path = "middle/capture.rs"]
mod capture;
#[legacy_exports]
#[path = "middle/pat_util.rs"]
mod pat_util;
#[legacy_exports]
#[path = "middle/region.rs"]
mod region;
#[legacy_exports]
#[path = "middle/const_eval.rs"]
mod const_eval;
#[legacy_exports]
#[path = "middle/astencode.rs"]
mod astencode;
#[legacy_exports]
#[path = "middle/lang_items.rs"]
mod lang_items;
#[legacy_exports]
#[path = "middle/privacy.rs"]
mod privacy;
}
mod front {
#[legacy_exports];
#[legacy_exports]
#[path = "front/config.rs"]
mod config;
#[legacy_exports]
#[path = "front/test.rs"]
mod test;
#[legacy_exports]
#[path = "front/core_inject.rs"]
mod core_inject;
#[legacy_exports]
#[path = "front/intrinsic_inject.rs"]
mod intrinsic_inject;
}
mod back {
#[legacy_exports];
#[legacy_exports]
#[path = "back/link.rs"]
mod link;
#[legacy_exports]
#[path = "back/abi.rs"]
mod abi;
#[legacy_exports]
#[path = "back/upcall.rs"]
mod upcall;
#[legacy_exports]
#[path = "back/x86.rs"]
mod x86;
#[legacy_exports]
#[path = "back/x86_64.rs"]
mod x86_64;
#[legacy_exports]
#[path = "back/rpath.rs"]
mod rpath;
#[legacy_exports]
#[path = "back/target_strs.rs"]
mod target_strs;
}
#[merge = "metadata/mod.rs"]
mod metadata;
#[merge = "driver/mod.rs"]
mod driver;
mod util {
#[legacy_exports];
#[legacy_exports]
#[path = "util/common.rs"]
mod common;
#[legacy_exports]
#[path = "util/ppaux.rs"]
mod ppaux;
}
mod lib {
#[legacy_exports];
#[legacy_exports]
#[path = "lib/llvm.rs"]
mod llvm;
}
use result::{Ok, Err};
use io::ReaderUtil;
use std::getopts;
use std::map::HashMap;
use getopts::{opt_present};
use getopts::groups;
use syntax::codemap;
use syntax::diagnostic;
use driver::driver::{host_triple, optgroups, early_error,
str_input, file_input, build_session_options,
build_session, build_configuration, parse_pretty,
pp_mode, pretty_print_input, list_metadata,
compile_input};
use driver::session;
use middle::lint;
fn version(argv0: &str) {
let mut vers = ~"unknown version";
let env_vers = env!("CFG_VERSION");
if env_vers.len() != 0 { vers = env_vers; }
io::println(fmt!("%s %s", argv0, vers));
io::println(fmt!("host: %s", host_triple()));
}
fn usage(argv0: &str) {
let message = fmt!("Usage: %s [OPTIONS] INPUT", argv0);
io::println(groups::usage(message, optgroups()) +
~"Additional help:
-W help Print 'lint' options and default settings
-Z help Print internal options for debugging rustc
");
}
fn describe_warnings() {
io::println(fmt!("
Available lint options:
-W <foo> Warn about <foo>
-A <foo> Allow <foo>
-D <foo> Deny <foo>
-F <foo> Forbid <foo> (deny, and deny all overrides)
"));
let lint_dict = lint::get_lint_dict();
let mut max_key = 0;
for lint_dict.each_key |k| { max_key = uint::max(k.len(), max_key); }
fn padded(max: uint, s: &str) -> ~str {
str::from_bytes(vec::from_elem(max - s.len(), ' ' as u8)) + s
}
io::println(fmt!("\nAvailable lint checks:\n"));
io::println(fmt!(" %s %7.7s %s",
padded(max_key, ~"name"), ~"default", ~"meaning"));
io::println(fmt!(" %s %7.7s %s\n",
padded(max_key, ~"----"), ~"-------", ~"-------"));
for lint_dict.each |k, v| {
let k = str::replace(k, ~"_", ~"-");
io::println(fmt!(" %s %7.7s %s",
padded(max_key, k),
match v.default {
lint::allow => ~"allow",
lint::warn => ~"warn",
lint::deny => ~"deny",
lint::forbid => ~"forbid"
},
v.desc));
}
io::println(~"");
}
fn describe_debug_flags() {
io::println(fmt!("\nAvailable debug options:\n"));
for session::debugging_opts_map().each |pair| {
let (name, desc, _) = *pair;
io::println(fmt!(" -Z %-20s -- %s", name, desc));
}
}
fn run_compiler(args: &~[~str], demitter: diagnostic::emitter) {
// Don't display log spew by default. Can override with RUST_LOG.
logging::console_off();
let mut args = *args;
let binary = args.shift();
if args.is_empty() { usage(binary); return; }
let matches =
match getopts::groups::getopts(args, optgroups()) {
Ok(m) => m,
Err(f) => {
early_error(demitter, getopts::fail_str(f))
}
};
if opt_present(matches, ~"h") || opt_present(matches, ~"help") {
usage(binary);
return;
}
let lint_flags = vec::append(getopts::opt_strs(matches, ~"W"),
getopts::opt_strs(matches, ~"warn"));
if lint_flags.contains(&~"help") {
describe_warnings();
return;
}
if getopts::opt_strs(matches, ~"Z").contains(&~"help") {
describe_debug_flags();
return;
}
if opt_present(matches, ~"v") || opt_present(matches, ~"version") {
version(binary);
return;
}
let input = match vec::len(matches.free) {
0u => early_error(demitter, ~"no input filename given"),
1u => {
let ifile = matches.free[0];
if ifile == ~"-" {
let src = str::from_bytes(io::stdin().read_whole_stream());
str_input(src)
} else {
file_input(Path(ifile))
}
}
_ => early_error(demitter, ~"multiple input filenames provided")
};
let sopts = build_session_options(binary, matches, demitter);
let sess = build_session(sopts, demitter);
let odir = getopts::opt_maybe_str(matches, ~"out-dir");
let odir = odir.map(|o| Path(*o));
let ofile = getopts::opt_maybe_str(matches, ~"o");
let ofile = ofile.map(|o| Path(*o));
let cfg = build_configuration(sess, binary, input);
let pretty =
option::map(&getopts::opt_default(matches, ~"pretty",
~"normal"),
|a| parse_pretty(sess, *a) );
match pretty {
Some::<pp_mode>(ppm) => {
pretty_print_input(sess, cfg, input, ppm);
return;
}
None::<pp_mode> => {/* continue */ }
}
let ls = opt_present(matches, ~"ls");
if ls {
match input {
file_input(ifile) => {
list_metadata(sess, &ifile, io::stdout());
}
str_input(_) => {
early_error(demitter, ~"can not list metadata for stdin");
}
}
return;
}
compile_input(sess, cfg, input, &odir, &ofile);
}
enum monitor_msg {
fatal,
done,
}
impl monitor_msg : cmp::Eq {
#[cfg(stage0)]
pure fn eq(other: &monitor_msg) -> bool {
(self as uint) == ((*other) as uint)
}
#[cfg(stage1)]
#[cfg(stage2)]
pure fn eq(&self, other: &monitor_msg) -> bool {
((*self) as uint) == ((*other) as uint)
}
#[cfg(stage0)]
pure fn ne(other: &monitor_msg) -> bool { !self.eq(other) }
#[cfg(stage1)]
#[cfg(stage2)]
pure fn ne(&self, other: &monitor_msg) -> bool { !(*self).eq(other) }
}
/*
This is a sanity check that any failure of the compiler is performed
through the diagnostic module and reported properly - we shouldn't be calling
plain-old-fail on any execution path that might be taken. Since we have
console logging off by default, hitting a plain fail statement would make the
compiler silently exit, which would be terrible.
This method wraps the compiler in a subtask and injects a function into the
diagnostic emitter which records when we hit a fatal error. If the task
fails without recording a fatal error then we've encountered a compiler
bug and need to present an error.
*/
fn monitor(+f: fn~(diagnostic::emitter)) {
let p = comm::Port();
let ch = comm::Chan(&p);
match do task::try |move f| {
// The 'diagnostics emitter'. Every error, warning, etc. should
// go through this function.
let demitter = fn@(cmsp: Option<(@codemap::CodeMap, codemap::span)>,
msg: &str, lvl: diagnostic::level) {
if lvl == diagnostic::fatal {
comm::send(ch, fatal);
}
diagnostic::emit(cmsp, msg, lvl);
};
struct finally {
ch: comm::Chan<monitor_msg>,
drop { comm::send(self.ch, done); }
}
let _finally = finally { ch: ch };
f(demitter)
} {
result::Ok(_) => { /* fallthrough */ }
result::Err(_) => {
// Task failed without emitting a fatal diagnostic
if comm::recv(p) == done {
diagnostic::emit(
None,
diagnostic::ice_msg(~"unexpected failure"),
diagnostic::error);
for [
~"the compiler hit an unexpected failure path. \
this is a bug",
~"try running with RUST_LOG=rustc=1,::rt::backtrace \
to get further details and report the results \
to github.com/mozilla/rust/issues"
].each |note| {
diagnostic::emit(None, *note, diagnostic::note)
}
}
// Fail so the process returns a failure code
fail;
}
}
}
fn main() {
let mut args = os::args();
do monitor |move args, demitter| {
run_compiler(&args, demitter);
}
}
// Local Variables:
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
#[legacy_exports];
#[legacy_exports]
mod driver;
#[legacy_exports]
mod session;
......@@ -357,7 +357,6 @@ fn make_crate(with_bin: bool, with_lib: bool) -> @ast::crate {
if with_bin { attrs += ~[make_crate_type_attr(~"bin")]; }
if with_lib { attrs += ~[make_crate_type_attr(~"lib")]; }
@ast_util::respan(ast_util::dummy_sp(), {
directives: ~[],
module: {view_items: ~[], items: ~[]},
attrs: attrs,
config: ~[]
......
#[legacy_exports];
export encoder;
export creader;
export cstore;
export csearch;
export common;
export decoder;
export tyencode;
export tydecode;
export loader;
export filesearch;
#[legacy_exports]
mod common;
#[legacy_exports]
mod tyencode;
#[legacy_exports]
mod tydecode;
#[legacy_exports]
mod encoder;
#[legacy_exports]
mod decoder;
#[legacy_exports]
mod creader;
#[legacy_exports]
mod cstore;
#[legacy_exports]
mod csearch;
#[legacy_exports]
mod loader;
#[legacy_exports]
mod filesearch;
#[legacy_exports];
#[legacy_exports]
mod check_loans;
#[legacy_exports]
mod gather_loans;
#[legacy_exports]
mod loan;
#[legacy_exports]
mod preserve;
#[legacy_exports]
mod alt;
#[legacy_exports]
mod vtable;
#[legacy_exports]
mod writeback;
#[legacy_exports]
mod regionmanip;
#[legacy_exports]
mod regionck;
#[legacy_exports]
mod demand;
#[legacy_exports]
pub mod method;
#[legacy_exports];
#[legacy_exports]
mod assignment;
#[legacy_exports]
mod combine;
#[legacy_exports]
mod glb;
#[legacy_exports]
mod integral;
mod floating;
#[legacy_exports]
mod lattice;
#[legacy_exports]
mod lub;
#[legacy_exports]
mod region_inference;
#[legacy_exports]
mod resolve;
#[legacy_exports]
mod sub;
#[legacy_exports]
mod to_str;
#[legacy_exports]
mod unify;
#[legacy_exports];
#[legacy_exports]
#[merge = "check/mod.rs"]
pub mod check;
#[legacy_exports]
mod rscope;
#[legacy_exports]
mod astconv;
#[merge = "infer/mod.rs"]
mod infer;
#[legacy_exports]
mod collect;
#[legacy_exports]
mod coherence;
mod deriving;
// DIVERT
// -*- rust -*-
#[link(name = "rustc",
......
......@@ -52,3 +52,95 @@ mod page_pass;
mod sectionalize_pass;
mod escape_pass;
mod prune_private_pass;
use doc::ItemUtils;
use doc::Item;
use pass::Pass;
use config::Config;
fn main() {
let args = os::args();
if args.contains(&~"-h") || args.contains(&~"--help") {
config::usage();
return;
}
let config = match config::parse_config(args) {
Ok(config) => config,
Err(err) => {
io::println(fmt!("error: %s", err));
return;
}
};
run(config);
}
/// Runs rustdoc over the given file
fn run(config: Config) {
let source_file = config.input_crate;
// Create an AST service from the source code
do astsrv::from_file(source_file.to_str()) |srv| {
// Just time how long it takes for the AST to become available
do time(~"wait_ast") {
do astsrv::exec(srv) |_ctxt| { }
};
// Extract the initial doc tree from the AST. This contains
// just names and node ids.
let doc = time(~"extract", || {
let default_name = source_file;
extract::from_srv(srv, default_name.to_str())
});
// Refine and publish the document
pass::run_passes(srv, doc, ~[
// Generate type and signature strings
tystr_pass::mk_pass(),
// Record the full paths to various nodes
path_pass::mk_pass(),
// Extract the docs attributes and attach them to doc nodes
attr_pass::mk_pass(),
// Perform various text escaping
escape_pass::mk_pass(),
// Remove things marked doc(hidden)
prune_hidden_pass::mk_pass(),
// Remove things that are private
// XXX enable this after 'export' is removed in favor of 'pub'
// prune_private_pass::mk_pass(),
// Extract brief documentation from the full descriptions
desc_to_brief_pass::mk_pass(),
// Massage the text to remove extra indentation
unindent_pass::mk_pass(),
// Split text into multiple sections according to headers
sectionalize_pass::mk_pass(),
// Trim extra spaces from text
trim_pass::mk_pass(),
// Sort items by name
sort_item_name_pass::mk_pass(),
// Sort items again by kind
sort_item_type_pass::mk_pass(),
// Create indexes appropriate for markdown
markdown_index_pass::mk_pass(config),
// Break the document into pages if required by the
// output format
page_pass::mk_pass(config.output_style),
// Render
markdown_pass::mk_pass(
markdown_writer::make_writer_factory(config)
)
]);
}
}
fn time<T>(what: ~str, f: fn() -> T) -> T {
let start = std::time::precise_time_s();
let rv = f();
let end = std::time::precise_time_s();
info!("time: %3.3f s %s", end - start, what);
move rv
}
use doc::ItemUtils;
use doc::Item;
use pass::Pass;
use config::Config;
fn main() {
let args = os::args();
if args.contains(&~"-h") || args.contains(&~"--help") {
config::usage();
return;
}
let config = match config::parse_config(args) {
Ok(config) => config,
Err(err) => {
io::println(fmt!("error: %s", err));
return;
}
};
run(config);
}
/// Runs rustdoc over the given file
fn run(config: Config) {
let source_file = config.input_crate;
// Create an AST service from the source code
do astsrv::from_file(source_file.to_str()) |srv| {
// Just time how long it takes for the AST to become available
do time(~"wait_ast") {
do astsrv::exec(srv) |_ctxt| { }
};
// Extract the initial doc tree from the AST. This contains
// just names and node ids.
let doc = time(~"extract", || {
let default_name = source_file;
extract::from_srv(srv, default_name.to_str())
});
// Refine and publish the document
pass::run_passes(srv, doc, ~[
// Generate type and signature strings
tystr_pass::mk_pass(),
// Record the full paths to various nodes
path_pass::mk_pass(),
// Extract the docs attributes and attach them to doc nodes
attr_pass::mk_pass(),
// Perform various text escaping
escape_pass::mk_pass(),
// Remove things marked doc(hidden)
prune_hidden_pass::mk_pass(),
// Remove things that are private
// XXX enable this after 'export' is removed in favor of 'pub'
// prune_private_pass::mk_pass(),
// Extract brief documentation from the full descriptions
desc_to_brief_pass::mk_pass(),
// Massage the text to remove extra indentation
unindent_pass::mk_pass(),
// Split text into multiple sections according to headers
sectionalize_pass::mk_pass(),
// Trim extra spaces from text
trim_pass::mk_pass(),
// Sort items by name
sort_item_name_pass::mk_pass(),
// Sort items again by kind
sort_item_type_pass::mk_pass(),
// Create indexes appropriate for markdown
markdown_index_pass::mk_pass(config),
// Break the document into pages if required by the
// output format
page_pass::mk_pass(config.output_style),
// Render
markdown_pass::mk_pass(
markdown_writer::make_writer_factory(config)
)
]);
}
}
fn time<T>(what: ~str, f: fn() -> T) -> T {
let start = std::time::precise_time_s();
let rv = f();
let end = std::time::precise_time_s();
info!("time: %3.3f s %s", end - start, what);
move rv
}
......@@ -31,3 +31,385 @@ use syntax::ast_util::*;
use parse::token;
use print::{pp, pprust};
use std::rl;
/**
* A structure shared across REPL instances for storing history
* such as statements and view items. I wish the AST was sendable.
*/
struct Repl {
prompt: ~str,
binary: ~str,
running: bool,
view_items: ~str,
stmts: ~str
}
// Action to do after reading a :command
enum CmdAction {
action_none,
action_run_line(~str),
}
/// A utility function that hands off a pretty printer to a callback.
fn with_pp(intr: @token::ident_interner,
cb: fn(pprust::ps, io::Writer)) -> ~str {
do io::with_str_writer |writer| {
let pp = pprust::rust_printer(writer, intr);
cb(pp, writer);
pp::eof(pp.s);
}
}
/**
* The AST (or the rest of rustc) are not sendable yet,
* so recorded things are printed to strings. A terrible hack that
* needs changes to rustc in order to be outed. This is unfortunately
* going to cause the REPL to regress in parser performance,
* because it has to parse the statements and view_items on each
* input.
*/
fn record(repl: Repl, blk: @ast::blk, intr: @token::ident_interner) -> Repl {
let view_items = if blk.node.view_items.len() > 0 {
let new_view_items = do with_pp(intr) |pp, writer| {
for blk.node.view_items.each |view_item| {
pprust::print_view_item(pp, *view_item);
writer.write_line(~"");
}
};
debug!("new view items %s", new_view_items);
repl.view_items + "\n" + new_view_items
} else { repl.view_items };
let stmts = if blk.node.stmts.len() > 0 {
let new_stmts = do with_pp(intr) |pp, writer| {
for blk.node.stmts.each |stmt| {
match stmt.node {
ast::stmt_decl(*) => {
pprust::print_stmt(pp, **stmt);
writer.write_line(~"");
}
ast::stmt_expr(expr, _) | ast::stmt_semi(expr, _) => {
match expr.node {
ast::expr_assign(*) |
ast::expr_assign_op(*) |
ast::expr_swap(*) => {
pprust::print_stmt(pp, **stmt);
writer.write_line(~"");
}
_ => {}
}
}
}
}
};
debug!("new stmts %s", new_stmts);
repl.stmts + "\n" + new_stmts
} else { repl.stmts };
Repl{
view_items: view_items,
stmts: stmts,
.. repl
}
}
/// Run an input string in a Repl, returning the new Repl.
fn run(repl: Repl, input: ~str) -> Repl {
let options: @session::options = @{
crate_type: session::unknown_crate,
binary: repl.binary,
addl_lib_search_paths: ~[os::getcwd()],
.. *session::basic_options()
};
debug!("building driver input");
let head = include_str!("wrapper.rs");
let foot = fmt!("%s\nfn main() {\n%s\n\nprint({\n%s\n})\n}",
repl.view_items, repl.stmts, input);
let wrapped = driver::str_input(head + foot);
debug!("inputting %s", head + foot);
debug!("building a driver session");
let sess = driver::build_session(options, diagnostic::emit);
debug!("building driver configuration");
let cfg = driver::build_configuration(sess,
repl.binary,
wrapped);
debug!("parsing");
let mut crate = driver::parse_input(sess, cfg, wrapped);
let mut opt = None;
for crate.node.module.items.each |item| {
match item.node {
ast::item_fn(_, _, _, blk) => {
if item.ident == sess.ident_of(~"main") {
opt = blk.node.expr;
}
}
_ => {}
}
}
let blk = match opt.get().node {
ast::expr_call(_, exprs, _) => {
match exprs[0].node {
ast::expr_block(blk) => @blk,
_ => fail
}
}
_ => fail
};
debug!("configuration");
crate = front::config::strip_unconfigured_items(crate);
debug!("maybe building test harness");
crate = front::test::modify_for_testing(sess, crate);
debug!("expansion");
crate = syntax::ext::expand::expand_crate(sess.parse_sess,
sess.opts.cfg,
crate);
debug!("intrinsic injection");
crate = front::intrinsic_inject::inject_intrinsic(sess, crate);
debug!("core injection");
crate = front::core_inject::maybe_inject_libcore_ref(sess, crate);
debug!("building lint settings table");
lint::build_settings_crate(sess, crate);
debug!("ast indexing");
let ast_map = syntax::ast_map::map_crate(sess.diagnostic(), *crate);
debug!("external crate/lib resolution");
creader::read_crates(sess.diagnostic(), *crate, sess.cstore,
sess.filesearch,
session::sess_os_to_meta_os(sess.targ_cfg.os),
sess.opts.static, sess.parse_sess.interner);
debug!("language item collection");
let lang_items = middle::lang_items::collect_language_items(crate, sess);
debug!("resolution");
let {def_map: def_map,
exp_map2: exp_map2,
trait_map: trait_map} = middle::resolve::resolve_crate(sess,
lang_items,
crate);
debug!("freevar finding");
let freevars = freevars::annotate_freevars(def_map, crate);
debug!("region_resolution");
let region_map = middle::region::resolve_crate(sess, def_map, crate);
debug!("region paramaterization inference");
let rp_set = middle::region::determine_rp_in_crate(sess, ast_map,
def_map, crate);
debug!("typechecking");
let ty_cx = ty::mk_ctxt(sess, def_map, ast_map, freevars,
region_map, rp_set, move lang_items, crate);
let (method_map, vtable_map) = typeck::check_crate(ty_cx, trait_map,
crate);
debug!("const marking");
middle::const_eval::process_crate(crate, def_map, ty_cx);
debug!("const checking");
middle::check_const::check_crate(sess, crate, ast_map, def_map,
method_map, ty_cx);
debug!("privacy checking");
middle::privacy::check_crate(ty_cx, &method_map, crate);
debug!("loop checking");
middle::check_loop::check_crate(ty_cx, crate);
debug!("alt checking");
middle::check_alt::check_crate(ty_cx, crate);
debug!("liveness checking");
let last_use_map = middle::liveness::check_crate(ty_cx,
method_map, crate);
debug!("borrow checking");
let (root_map, mutbl_map) = middle::borrowck::check_crate(ty_cx,
method_map,
last_use_map,
crate);
debug!("kind checking");
kind::check_crate(ty_cx, method_map, last_use_map, crate);
debug!("lint checking");
lint::check_crate(ty_cx, crate);
let maps = {mutbl_map: mutbl_map,
root_map: root_map,
last_use_map: last_use_map,
method_map: method_map,
vtable_map: vtable_map};
debug!("translation");
let (llmod, _) = trans::base::trans_crate(sess, crate, ty_cx,
~path::from_str("<repl>"),
exp_map2, maps);
let pm = llvm::LLVMCreatePassManager();
debug!("executing jit");
back::link::jit::exec(sess, pm, llmod, 0, false);
llvm::LLVMDisposePassManager(pm);
debug!("recording input into repl history");
record(repl, blk, sess.parse_sess.interner)
}
/// Tries to get a line from rl after outputting a prompt. Returns
/// None if no input was read (e.g. EOF was reached).
fn get_line(prompt: ~str) -> Option<~str> {
let result = unsafe { rl::read(prompt) };
if result.is_none() {
return None;
}
let line = result.get();
unsafe { rl::add_history(line) };
return Some(line);
}
/// Run a command, e.g. :clear, :exit, etc.
fn run_cmd(repl: &mut Repl, _in: io::Reader, _out: io::Writer,
cmd: ~str, _args: ~[~str]) -> CmdAction {
let mut action = action_none;
match cmd {
~"exit" => repl.running = false,
~"clear" => {
repl.view_items = ~"";
repl.stmts = ~"";
// XXX: Win32 version of linenoise can't do this
//rl::clear();
}
~"help" => {
io::println(
~":{\\n ..lines.. \\n:}\\n - execute multiline command\n" +
~":clear - clear the screen\n" +
~":exit - exit from the repl\n" +
~":help - show this message");
}
~"{" => {
let mut multiline_cmd = ~"";
let mut end_multiline = false;
while (!end_multiline) {
match get_line(~"rusti| ") {
None => fail ~"unterminated multiline command :{ .. :}",
Some(line) => {
if str::trim(line) == ~":}" {
end_multiline = true;
} else {
multiline_cmd += line + ~"\n";
}
}
}
}
action = action_run_line(multiline_cmd);
}
_ => io::println(~"unknown cmd: " + cmd)
}
return action;
}
/// Executes a line of input, which may either be rust code or a
/// :command. Returns a new Repl if it has changed.
fn run_line(repl: &mut Repl, in: io::Reader, out: io::Writer, line: ~str)
-> Option<Repl> {
if line.starts_with(~":") {
let full = line.substr(1, line.len() - 1);
let split = str::words(full);
let len = split.len();
if len > 0 {
let cmd = split[0];
if !cmd.is_empty() {
let args = if len > 1 {
do vec::view(split, 1, len - 1).map |arg| {
*arg
}
} else { ~[] };
match run_cmd(repl, in, out, cmd, args) {
action_none => { }
action_run_line(multiline_cmd) => {
if !multiline_cmd.is_empty() {
return run_line(repl, in, out, multiline_cmd);
}
}
}
return None;
}
}
}
let r = *repl;
let result = do task::try |copy r| {
run(r, line)
};
if result.is_ok() {
return Some(result.get());
}
return None;
}
pub fn main() {
let args = os::args();
let in = io::stdin();
let out = io::stdout();
let mut repl = Repl {
prompt: ~"rusti> ",
binary: args[0],
running: true,
view_items: ~"",
stmts: ~""
};
unsafe {
do rl::complete |line, suggest| {
if line.starts_with(":") {
suggest(~":clear");
suggest(~":exit");
suggest(~":help");
}
}
}
while repl.running {
match get_line(repl.prompt) {
None => break,
Some(line) => {
if line.is_empty() {
io::println(~"()");
loop;
}
match run_line(&mut repl, in, out, line) {
Some(new_repl) => repl = new_repl,
None => { }
}
}
}
}
}
/**
* A structure shared across REPL instances for storing history
* such as statements and view items. I wish the AST was sendable.
*/
struct Repl {
prompt: ~str,
binary: ~str,
running: bool,
view_items: ~str,
stmts: ~str
}
// Action to do after reading a :command
enum CmdAction {
action_none,
action_run_line(~str),
}
/// A utility function that hands off a pretty printer to a callback.
fn with_pp(intr: @token::ident_interner,
cb: fn(pprust::ps, io::Writer)) -> ~str {
do io::with_str_writer |writer| {
let pp = pprust::rust_printer(writer, intr);
cb(pp, writer);
pp::eof(pp.s);
}
}
/**
* The AST (or the rest of rustc) are not sendable yet,
* so recorded things are printed to strings. A terrible hack that
* needs changes to rustc in order to be outed. This is unfortunately
* going to cause the REPL to regress in parser performance,
* because it has to parse the statements and view_items on each
* input.
*/
fn record(repl: Repl, blk: @ast::blk, intr: @token::ident_interner) -> Repl {
let view_items = if blk.node.view_items.len() > 0 {
let new_view_items = do with_pp(intr) |pp, writer| {
for blk.node.view_items.each |view_item| {
pprust::print_view_item(pp, *view_item);
writer.write_line(~"");
}
};
debug!("new view items %s", new_view_items);
repl.view_items + "\n" + new_view_items
} else { repl.view_items };
let stmts = if blk.node.stmts.len() > 0 {
let new_stmts = do with_pp(intr) |pp, writer| {
for blk.node.stmts.each |stmt| {
match stmt.node {
ast::stmt_decl(*) => {
pprust::print_stmt(pp, **stmt);
writer.write_line(~"");
}
ast::stmt_expr(expr, _) | ast::stmt_semi(expr, _) => {
match expr.node {
ast::expr_assign(*) |
ast::expr_assign_op(*) |
ast::expr_swap(*) => {
pprust::print_stmt(pp, **stmt);
writer.write_line(~"");
}
_ => {}
}
}
}
}
};
debug!("new stmts %s", new_stmts);
repl.stmts + "\n" + new_stmts
} else { repl.stmts };
Repl{
view_items: view_items,
stmts: stmts,
.. repl
}
}
/// Run an input string in a Repl, returning the new Repl.
fn run(repl: Repl, input: ~str) -> Repl {
let options: @session::options = @{
crate_type: session::unknown_crate,
binary: repl.binary,
addl_lib_search_paths: ~[os::getcwd()],
.. *session::basic_options()
};
debug!("building driver input");
let head = include_str!("wrapper.rs");
let foot = fmt!("%s\nfn main() {\n%s\n\nprint({\n%s\n})\n}",
repl.view_items, repl.stmts, input);
let wrapped = driver::str_input(head + foot);
debug!("inputting %s", head + foot);
debug!("building a driver session");
let sess = driver::build_session(options, diagnostic::emit);
debug!("building driver configuration");
let cfg = driver::build_configuration(sess,
repl.binary,
wrapped);
debug!("parsing");
let mut crate = driver::parse_input(sess, cfg, wrapped);
let mut opt = None;
for crate.node.module.items.each |item| {
match item.node {
ast::item_fn(_, _, _, blk) => {
if item.ident == sess.ident_of(~"main") {
opt = blk.node.expr;
}
}
_ => {}
}
}
let blk = match opt.get().node {
ast::expr_call(_, exprs, _) => {
match exprs[0].node {
ast::expr_block(blk) => @blk,
_ => fail
}
}
_ => fail
};
debug!("configuration");
crate = front::config::strip_unconfigured_items(crate);
debug!("maybe building test harness");
crate = front::test::modify_for_testing(sess, crate);
debug!("expansion");
crate = syntax::ext::expand::expand_crate(sess.parse_sess,
sess.opts.cfg,
crate);
debug!("intrinsic injection");
crate = front::intrinsic_inject::inject_intrinsic(sess, crate);
debug!("core injection");
crate = front::core_inject::maybe_inject_libcore_ref(sess, crate);
debug!("building lint settings table");
lint::build_settings_crate(sess, crate);
debug!("ast indexing");
let ast_map = syntax::ast_map::map_crate(sess.diagnostic(), *crate);
debug!("external crate/lib resolution");
creader::read_crates(sess.diagnostic(), *crate, sess.cstore,
sess.filesearch,
session::sess_os_to_meta_os(sess.targ_cfg.os),
sess.opts.static, sess.parse_sess.interner);
debug!("language item collection");
let lang_items = middle::lang_items::collect_language_items(crate, sess);
debug!("resolution");
let {def_map: def_map,
exp_map2: exp_map2,
trait_map: trait_map} = middle::resolve::resolve_crate(sess,
lang_items,
crate);
debug!("freevar finding");
let freevars = freevars::annotate_freevars(def_map, crate);
debug!("region_resolution");
let region_map = middle::region::resolve_crate(sess, def_map, crate);
debug!("region paramaterization inference");
let rp_set = middle::region::determine_rp_in_crate(sess, ast_map,
def_map, crate);
debug!("typechecking");
let ty_cx = ty::mk_ctxt(sess, def_map, ast_map, freevars,
region_map, rp_set, move lang_items, crate);
let (method_map, vtable_map) = typeck::check_crate(ty_cx, trait_map,
crate);
debug!("const marking");
middle::const_eval::process_crate(crate, def_map, ty_cx);
debug!("const checking");
middle::check_const::check_crate(sess, crate, ast_map, def_map,
method_map, ty_cx);
debug!("privacy checking");
middle::privacy::check_crate(ty_cx, &method_map, crate);
debug!("loop checking");
middle::check_loop::check_crate(ty_cx, crate);
debug!("alt checking");
middle::check_alt::check_crate(ty_cx, crate);
debug!("liveness checking");
let last_use_map = middle::liveness::check_crate(ty_cx,
method_map, crate);
debug!("borrow checking");
let (root_map, mutbl_map) = middle::borrowck::check_crate(ty_cx,
method_map,
last_use_map,
crate);
debug!("kind checking");
kind::check_crate(ty_cx, method_map, last_use_map, crate);
debug!("lint checking");
lint::check_crate(ty_cx, crate);
let maps = {mutbl_map: mutbl_map,
root_map: root_map,
last_use_map: last_use_map,
method_map: method_map,
vtable_map: vtable_map};
debug!("translation");
let (llmod, _) = trans::base::trans_crate(sess, crate, ty_cx,
~path::from_str("<repl>"),
exp_map2, maps);
let pm = llvm::LLVMCreatePassManager();
debug!("executing jit");
back::link::jit::exec(sess, pm, llmod, 0, false);
llvm::LLVMDisposePassManager(pm);
debug!("recording input into repl history");
record(repl, blk, sess.parse_sess.interner)
}
/// Tries to get a line from rl after outputting a prompt. Returns
/// None if no input was read (e.g. EOF was reached).
fn get_line(prompt: ~str) -> Option<~str> {
let result = unsafe { rl::read(prompt) };
if result.is_none() {
return None;
}
let line = result.get();
unsafe { rl::add_history(line) };
return Some(line);
}
/// Run a command, e.g. :clear, :exit, etc.
fn run_cmd(repl: &mut Repl, _in: io::Reader, _out: io::Writer,
cmd: ~str, _args: ~[~str]) -> CmdAction {
let mut action = action_none;
match cmd {
~"exit" => repl.running = false,
~"clear" => {
repl.view_items = ~"";
repl.stmts = ~"";
// XXX: Win32 version of linenoise can't do this
//rl::clear();
}
~"help" => {
io::println(
~":{\\n ..lines.. \\n:}\\n - execute multiline command\n" +
~":clear - clear the screen\n" +
~":exit - exit from the repl\n" +
~":help - show this message");
}
~"{" => {
let mut multiline_cmd = ~"";
let mut end_multiline = false;
while (!end_multiline) {
match get_line(~"rusti| ") {
None => fail ~"unterminated multiline command :{ .. :}",
Some(line) => {
if str::trim(line) == ~":}" {
end_multiline = true;
} else {
multiline_cmd += line + ~"\n";
}
}
}
}
action = action_run_line(multiline_cmd);
}
_ => io::println(~"unknown cmd: " + cmd)
}
return action;
}
/// Executes a line of input, which may either be rust code or a
/// :command. Returns a new Repl if it has changed.
fn run_line(repl: &mut Repl, in: io::Reader, out: io::Writer, line: ~str)
-> Option<Repl> {
if line.starts_with(~":") {
let full = line.substr(1, line.len() - 1);
let split = str::words(full);
let len = split.len();
if len > 0 {
let cmd = split[0];
if !cmd.is_empty() {
let args = if len > 1 {
do vec::view(split, 1, len - 1).map |arg| {
*arg
}
} else { ~[] };
match run_cmd(repl, in, out, cmd, args) {
action_none => { }
action_run_line(multiline_cmd) => {
if !multiline_cmd.is_empty() {
return run_line(repl, in, out, multiline_cmd);
}
}
}
return None;
}
}
}
let r = *repl;
let result = do task::try |copy r| {
run(r, line)
};
if result.is_ok() {
return Some(result.get());
}
return None;
}
pub fn main() {
let args = os::args();
let in = io::stdin();
let out = io::stdout();
let mut repl = Repl {
prompt: ~"rusti> ",
binary: args[0],
running: true,
view_items: ~"",
stmts: ~""
};
unsafe {
do rl::complete |line, suggest| {
if line.starts_with(":") {
suggest(~":clear");
suggest(~":exit");
suggest(~":help");
}
}
}
while repl.running {
match get_line(repl.prompt) {
None => break,
Some(line) => {
if line.is_empty() {
io::println(~"()");
loop;
}
match run_line(&mut repl, in, out, line) {
Some(new_repl) => repl = new_repl,
None => { }
}
}
}
}
}
#[link(name = "syntax",
vers = "0.5",
uuid = "9311401b-d6ea-4cd9-a1d9-61f89499c645")];
#[crate_type = "lib"];
#[no_core];
#[legacy_modes];
#[legacy_exports];
#[allow(vecs_implicitly_copyable)];
#[allow(non_camel_case_types)];
#[allow(deprecated_mode)];
#[allow(deprecated_pattern)];
extern mod core(vers = "0.5");
extern mod std(vers = "0.5");
use core::*;
#[legacy_exports]
mod attr;
#[legacy_exports]
mod diagnostic;
mod codemap;
#[legacy_exports]
mod ast;
#[legacy_exports]
mod ast_util;
#[legacy_exports]
mod ast_map;
#[legacy_exports]
mod visit;
#[legacy_exports]
mod fold;
#[legacy_exports]
mod util {
#[legacy_exports];
#[legacy_exports]
#[path = "util/interner.rs"]
mod interner;
}
#[merge = "parse/mod.rs"]
mod parse;
mod print {
#[legacy_exports];
#[legacy_exports]
#[path = "print/pp.rs"]
mod pp;
#[legacy_exports]
#[path = "print/pprust.rs"]
mod pprust;
}
mod ext {
#[legacy_exports];
#[legacy_exports]
#[path = "ext/base.rs"]
mod base;
#[legacy_exports]
#[path = "ext/expand.rs"]
mod expand;
#[legacy_exports]
#[path = "ext/qquote.rs"]
mod qquote;
#[path = "ext/quote.rs"]
mod quote;
#[path = "ext/deriving.rs"]
mod deriving;
#[legacy_exports]
#[path = "ext/build.rs"]
mod build;
mod tt {
#[legacy_exports];
#[legacy_exports]
#[path = "ext/tt/transcribe.rs"]
mod transcribe;
#[legacy_exports]
#[path = "ext/tt/macro_parser.rs"]
mod macro_parser;
#[legacy_exports]
#[path = "ext/tt/macro_rules.rs"]
mod macro_rules;
}
#[legacy_exports]
#[path = "ext/simplext.rs"]
mod simplext;
#[legacy_exports]
#[path = "ext/fmt.rs"]
mod fmt;
#[legacy_exports]
#[path = "ext/env.rs"]
mod env;
#[legacy_exports]
#[path = "ext/concat_idents.rs"]
mod concat_idents;
#[legacy_exports]
#[path = "ext/ident_to_str.rs"]
mod ident_to_str;
#[legacy_exports]
#[path = "ext/log_syntax.rs"]
mod log_syntax;
#[legacy_exports]
#[path = "ext/auto_serialize.rs"]
mod auto_serialize;
#[legacy_exports]
#[path = "ext/source_util.rs"]
mod source_util;
#[legacy_exports]
#[path = "ext/pipes.rs"]
#[merge = "ext/pipes/mod.rs"]
mod pipes;
#[legacy_exports]
#[path = "ext/trace_macros.rs"]
mod trace_macros;
}
......@@ -406,25 +406,10 @@ impl def : cmp::Eq {
type crate = spanned<crate_>;
type crate_ =
{directives: ~[@crate_directive],
module: _mod,
{module: _mod,
attrs: ~[attribute],
config: crate_cfg};
enum crate_directive_ {
cdir_src_mod(visibility, ident, ~[attribute]),
cdir_dir_mod(visibility, ident, ~[@crate_directive], ~[attribute]),
// NB: cdir_view_item is *not* processed by the rest of the compiler, the
// attached view_items are sunk into the crate's module during parsing,
// and processed (resolved, imported, etc.) there. This enum-variant
// exists only to preserve the view items in order in case we decide to
// pretty-print crates in the future.
cdir_view_item(@view_item),
}
type crate_directive = spanned<crate_directive_>;
type meta_item = spanned<meta_item_>;
#[auto_serialize]
......
#[legacy_exports]
mod ast_builder;
#[legacy_exports]
mod parse_proto;
#[legacy_exports]
mod pipec;
#[legacy_exports]
mod proto;
#[legacy_exports]
mod check;
#[legacy_exports]
mod liveness;
......@@ -21,7 +21,6 @@
trait ast_fold {
fn fold_crate(crate) -> crate;
fn fold_crate_directive(&&v: @crate_directive) -> @crate_directive;
fn fold_view_item(&&v: @view_item) -> @view_item;
fn fold_foreign_item(&&v: @foreign_item) -> @foreign_item;
fn fold_item(&&v: @item) -> Option<@item>;
......@@ -51,8 +50,6 @@ trait ast_fold {
type ast_fold_precursor = @{
//unlike the others, item_ is non-trivial
fold_crate: fn@(crate_, span, ast_fold) -> (crate_, span),
fold_crate_directive: fn@(crate_directive_, span,
ast_fold) -> (crate_directive_, span),
fold_view_item: fn@(view_item_, ast_fold) -> view_item_,
fold_foreign_item: fn@(&&v: @foreign_item, ast_fold) -> @foreign_item,
fold_item: fn@(&&v: @item, ast_fold) -> Option<@item>,
......@@ -150,29 +147,12 @@ fn noop_fold_crate(c: crate_, fld: ast_fold) -> crate_ {
let fold_attribute = |x| fold_attribute_(x, fld);
return {
directives: vec::map(c.directives, |x| fld.fold_crate_directive(*x)),
module: fld.fold_mod(c.module),
attrs: vec::map(c.attrs, |x| fold_attribute(*x)),
config: vec::map(c.config, |x| fold_meta_item(*x))
};
}
fn noop_fold_crate_directive(cd: crate_directive_, fld: ast_fold) ->
crate_directive_ {
return match cd {
cdir_src_mod(vis, id, attrs) => {
cdir_src_mod(vis, fld.fold_ident(id),
/* FIXME (#2543) */ copy attrs)
}
cdir_dir_mod(vis, id, cds, attrs) => {
cdir_dir_mod(vis, fld.fold_ident(id),
vec::map(cds, |x| fld.fold_crate_directive(*x)),
/* FIXME (#2543) */ copy attrs)
}
cdir_view_item(vi) => cdir_view_item(fld.fold_view_item(vi)),
}
}
fn noop_fold_view_item(vi: view_item_, _fld: ast_fold) -> view_item_ {
return /* FIXME (#2543) */ copy vi;
}
......@@ -635,7 +615,6 @@ fn noop_map_exprs(f: fn@(&&v: @expr) -> @expr, es: ~[@expr]) -> ~[@expr] {
fn default_ast_fold() -> ast_fold_precursor {
return @{fold_crate: wrap(noop_fold_crate),
fold_crate_directive: wrap(noop_fold_crate_directive),
fold_view_item: noop_fold_view_item,
fold_foreign_item: noop_fold_foreign_item,
fold_item: noop_fold_item,
......@@ -666,12 +645,6 @@ fn fold_crate(c: crate) -> crate {
let (n, s) = self.fold_crate(c.node, c.span, self as ast_fold);
return {node: n, span: self.new_span(s)};
}
fn fold_crate_directive(&&c: @crate_directive) -> @crate_directive {
let (n, s) = self.fold_crate_directive(c.node, c.span,
self as ast_fold);
return @{node: n,
span: self.new_span(s)};
}
fn fold_view_item(&&x: @view_item) ->
@view_item {
return @{node: self.fold_view_item(x.node, self as ast_fold),
......
//! The main parser interface
#[legacy_exports];
export parser;
export common;
export lexer;
export token;
export comments;
export prec;
export classify;
export attr;
export parse_sess;
export new_parse_sess, new_parse_sess_special_handler;
export next_node_id;
......@@ -51,40 +62,6 @@ fn new_parse_sess_special_handler(sh: span_handler, cm: @codemap::CodeMap)
fn parse_crate_from_file(input: &Path, cfg: ast::crate_cfg,
sess: parse_sess) -> @ast::crate {
if input.filetype() == Some(~".rc") {
parse_crate_from_crate_file(input, cfg, sess)
} else if input.filetype() == Some(~".rs") {
parse_crate_from_source_file(input, cfg, sess)
} else {
sess.span_diagnostic.handler().fatal(~"unknown input file type: " +
input.to_str())
}
}
fn parse_crate_from_crate_file(input: &Path, cfg: ast::crate_cfg,
sess: parse_sess) -> @ast::crate {
let p = new_crate_parser_from_file(sess, cfg, input);
let lo = p.span.lo;
let prefix = input.dir_path();
let leading_attrs = p.parse_inner_attrs_and_next();
let { inner: crate_attrs, next: first_cdir_attr } = leading_attrs;
let cdirs = p.parse_crate_directives(token::EOF, first_cdir_attr);
let cx = @{sess: sess, cfg: /* FIXME (#2543) */ copy p.cfg};
let companionmod = input.filestem().map(|s| Path(*s));
let (m, attrs) = eval::eval_crate_directives_to_mod(
cx, cdirs, &prefix, &companionmod);
let mut hi = p.span.hi;
p.expect(token::EOF);
p.abort_if_errors();
return @ast_util::respan(ast_util::mk_sp(lo, hi),
{directives: cdirs,
module: m,
attrs: vec::append(crate_attrs, attrs),
config: /* FIXME (#2543) */ copy p.cfg});
}
fn parse_crate_from_source_file(input: &Path, cfg: ast::crate_cfg,
sess: parse_sess) -> @ast::crate {
let p = new_crate_parser_from_file(sess, cfg, input);
let r = p.parse_crate_mod(cfg);
return r;
......
#[legacy_exports]
mod lexer;
#[legacy_exports]
mod parser;
#[legacy_exports]
mod token;
#[legacy_exports]
mod comments;
#[legacy_exports]
mod attr;
#[legacy_exports]
/// Common routines shared by parser mods
#[legacy_exports]
mod common;
/// Functions dealing with operator precedence
#[legacy_exports]
mod prec;
/// Routines the parser uses to classify AST nodes
#[legacy_exports]
mod classify;
/// Reporting obsolete syntax
#[legacy_exports]
mod obsolete;
......@@ -27,9 +27,8 @@
bind_by_ref, bind_by_implicit_ref, bind_by_value, bind_by_move,
bitand, bitor, bitxor, blk, blk_check_mode, box, by_copy,
by_move, by_ref, by_val, capture_clause,
capture_item, cdir_dir_mod, cdir_src_mod, cdir_view_item,
class_immutable, class_mutable,
crate, crate_cfg, crate_directive, decl, decl_item, decl_local,
capture_item, class_immutable, class_mutable,
crate, crate_cfg, decl, decl_item, decl_local,
default_blk, deref, div, enum_def, enum_variant_kind, expl, expr,
expr_, expr_addr_of, expr_match, expr_again, expr_assert,
expr_assign, expr_assign_op, expr_binary, expr_block, expr_break,
......@@ -2966,15 +2965,7 @@ fn parse_item_mod(outer_attrs: ~[ast::attribute]) -> item_info {
let info_ = if self.token == token::SEMI {
self.bump();
// This mod is in an external file. Let's go get it!
let eval_ctx = @{
sess: self.sess,
cfg: self.cfg
};
let prefix = Path(self.sess.cm.span_to_filename(copy self.span));
let prefix = prefix.dir_path();
let (m, attrs) = eval::eval_src_mod(eval_ctx, &prefix,
outer_attrs,
id, id_span);
let (m, attrs) = self.eval_src_mod(id, outer_attrs, id_span);
(id, m, Some(move attrs))
} else {
self.expect(token::LBRACE);
......@@ -2990,20 +2981,18 @@ fn parse_item_mod(outer_attrs: ~[ast::attribute]) -> item_info {
// its contents
match ::attr::first_attr_value_str_by_name(outer_attrs, ~"merge") {
Some(path) => {
let eval_ctx = @{
sess: self.sess,
cfg: self.cfg
};
let prefix = Path(self.sess.cm.span_to_filename(copy self.span));
let prefix = Path(
self.sess.cm.span_to_filename(copy self.span));
let prefix = prefix.dir_path();
let path = Path(path);
let (new_mod_item, new_attrs) = eval::eval_src_mod_from_path(
eval_ctx, &prefix, &path, ~[], id_span);
let (new_mod_item, new_attrs) = self.eval_src_mod_from_path(
prefix, path, ~[], id_span);
let (main_id, main_mod_item, main_attrs) = info_;
let main_attrs = main_attrs.get();
let (main_mod, new_mod) = match (main_mod_item, new_mod_item) {
let (main_mod, new_mod) =
match (main_mod_item, new_mod_item) {
(item_mod(m), item_mod(n)) => (m, n),
_ => self.bug(~"parsed mod item should be mod")
};
......@@ -3019,6 +3008,51 @@ fn parse_item_mod(outer_attrs: ~[ast::attribute]) -> item_info {
}
}
fn eval_src_mod(id: ast::ident,
outer_attrs: ~[ast::attribute],
id_sp: span) -> (ast::item_, ~[ast::attribute]) {
let prefix = Path(self.sess.cm.span_to_filename(copy self.span));
let prefix = prefix.dir_path();
let default_path = self.sess.interner.get(id) + ~".rs";
let file_path = match ::attr::first_attr_value_str_by_name(
outer_attrs, ~"path") {
Some(d) => d,
None => default_path
};
let file_path = Path(file_path);
self.eval_src_mod_from_path(prefix, file_path,
outer_attrs, id_sp)
}
fn eval_src_mod_from_path(prefix: Path, path: Path,
outer_attrs: ~[ast::attribute],
id_sp: span
) -> (ast::item_, ~[ast::attribute]) {
let full_path = if path.is_absolute {
path
} else {
prefix.push_many(path.components)
};
let p0 =
new_sub_parser_from_file(self.sess, self.cfg,
&full_path, id_sp);
let inner_attrs = p0.parse_inner_attrs_and_next();
let mod_attrs = vec::append(outer_attrs, inner_attrs.inner);
let first_item_outer_attrs = inner_attrs.next;
let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
return (ast::item_mod(m0), mod_attrs);
fn cdir_path_opt(default: ~str, attrs: ~[ast::attribute]) -> ~str {
match ::attr::first_attr_value_str_by_name(attrs, ~"path") {
Some(d) => d,
None => default
}
}
}
fn parse_item_foreign_fn( +attrs: ~[attribute]) -> @foreign_item {
let lo = self.span.lo;
let vis = self.parse_visibility();
......@@ -3699,8 +3733,7 @@ fn parse_crate_mod(_cfg: crate_cfg) -> @crate {
let first_item_outer_attrs = crate_attrs.next;
let m = self.parse_mod_items(token::EOF, first_item_outer_attrs);
return @spanned(lo, self.span.lo,
{directives: ~[],
module: m,
{module: m,
attrs: crate_attrs.inner,
config: self.cfg});
}
......@@ -3711,78 +3744,6 @@ fn parse_str() -> @~str {
_ => self.fatal(~"expected string literal")
}
}
// Logic for parsing crate files (.rc)
//
// Each crate file is a sequence of directives.
//
// Each directive imperatively extends its environment with 0 or more
// items.
fn parse_crate_directive(first_outer_attr: ~[attribute]) ->
crate_directive {
// Collect the next attributes
let outer_attrs = vec::append(first_outer_attr,
self.parse_outer_attributes());
// In a crate file outer attributes are only going to apply to mods
let expect_mod = vec::len(outer_attrs) > 0u;
let lo = self.span.lo;
let vis = self.parse_visibility();
if expect_mod || self.is_keyword(~"mod") {
self.expect_keyword(~"mod");
let id = self.parse_ident();
match self.token {
// mod x = "foo.rs";
token::SEMI => {
let mut hi = self.span.hi;
self.bump();
return spanned(lo, hi, cdir_src_mod(vis, id, outer_attrs));
}
// mod x = "foo_dir" { ...directives... }
token::LBRACE => {
self.bump();
let inner_attrs = self.parse_inner_attrs_and_next();
let mod_attrs = vec::append(outer_attrs, inner_attrs.inner);
let next_outer_attr = inner_attrs.next;
let cdirs = self.parse_crate_directives(token::RBRACE,
next_outer_attr);
let mut hi = self.span.hi;
self.expect(token::RBRACE);
return spanned(lo, hi,
cdir_dir_mod(vis, id, cdirs, mod_attrs));
}
_ => self.unexpected()
}
} else if self.is_view_item() {
let vi = self.parse_view_item(outer_attrs, vis);
return spanned(lo, vi.span.hi, cdir_view_item(vi));
}
return self.fatal(~"expected crate directive");
}
fn parse_crate_directives(term: token::Token,
first_outer_attr: ~[attribute]) ->
~[@crate_directive] {
// This is pretty ugly. If we have an outer attribute then we can't
// accept seeing the terminator next, so if we do see it then fail the
// same way parse_crate_directive would
if vec::len(first_outer_attr) > 0u && self.token == term {
self.expect_keyword(~"mod");
}
let mut cdirs: ~[@crate_directive] = ~[];
let mut first_outer_attr = first_outer_attr;
while self.token != term {
let cdir = @self.parse_crate_directive(first_outer_attr);
cdirs.push(cdir);
first_outer_attr = ~[];
}
return cdirs;
}
}
impl restriction : cmp::Eq {
......
// DIVERT
#[link(name = "syntax",
vers = "0.5",
uuid = "9311401b-d6ea-4cd9-a1d9-61f89499c645")];
......@@ -54,8 +56,6 @@ mod parse {
export classify;
export attr;
#[legacy_exports]
mod eval;
#[legacy_exports]
mod lexer;
#[legacy_exports]
......
......@@ -93,16 +93,6 @@ fn visit_crate<E>(c: crate, e: E, v: vt<E>) {
v.visit_mod(c.node.module, c.span, crate_node_id, e, v);
}
fn visit_crate_directive<E>(cd: @crate_directive, e: E, v: vt<E>) {
match cd.node {
cdir_src_mod(_, _, _) => (),
cdir_dir_mod(_, _, cdirs, _) => for cdirs.each |cdir| {
visit_crate_directive(*cdir, e, v);
},
cdir_view_item(vi) => v.visit_view_item(vi, e, v),
}
}
fn visit_mod<E>(m: _mod, _sp: span, _id: node_id, e: E, v: vt<E>) {
for m.view_items.each |vi| { v.visit_view_item(*vi, e, v); }
for m.items.each |i| { v.visit_item(*i, e, v); }
......
// error-pattern: expected `mod`
// error-pattern: expected item
#[attr = "val"];
#[attr = "val"] // Unterminated
use g = x::f;
export g;
\ No newline at end of file
fn f() -> ~str { ~"ralph" }
\ No newline at end of file
use g = x::f;
export g;
\ No newline at end of file
fn f() -> ~str { ~"nelson" }
\ No newline at end of file
// xfail-win32 don't understand what's wrong
// Test that crates and directory modules can contain code
#[legacy_exports];
#[path = "companionmod-src"]
mod a {
#[legacy_exports];
mod b {
#[legacy_exports];
#[legacy_exports]
mod x;
}
#[path = "d"]
mod c {
#[legacy_exports];
#[legacy_exports]
mod x;
}
}
// This isn't really xfailed; it's used by the companionmod.rc test
// xfail-test
#[legacy_exports];
fn main() {
assert a::b::g() == ~"ralph";
assert a::c::g() == ~"nelson";
}
\ No newline at end of file
// These are attributes of the foo module
#[legacy_exports];
#[attr1 = "val"];
#[attr2 = "val"];
// Attributes of the following function
#[attr1 = "val"]
#[attr2 = "val"]
fn main() { }
#[name = "crate-attributes"];
#[vers = "1.0"];
#[attr1]
#[path = "crate-attributes-src"]
mod m {
#[legacy_exports];
#[attr_inner];
#[attr2]
#[legacy_exports]
mod foo;
}
fn plus(x: T, y: T) -> T {
x + y
}
\ No newline at end of file
#[path = "module-polymorphism-files"]
mod my_float {
#[legacy_exports];
// The type of the float
use inst::T;
// Define T as float
#[path = "inst_float.rs"]
#[legacy_exports]
mod inst;
// Add in the implementation from a single source file
#[path = "template.rs"]
#[legacy_exports]
mod template;
}
#[path = "module-polymorphism-files"]
mod my_f64 {
#[legacy_exports];
use inst::T;
// Define T as f64
#[path = "inst_f64.rs"]
#[legacy_exports]
mod inst;
// Use the implementation for the same source file!
#[path = "template.rs"]
#[legacy_exports]
mod template;
}
#[path = "module-polymorphism-files"]
mod my_f32 {
#[legacy_exports];
use inst::T;
#[path = "inst_f32.rs"]
#[legacy_exports]
mod inst;
#[path = "template.rs"]
#[legacy_exports]
mod template;
}
// This isn't really xfailed; it's used by the
// module-polymorphism.rc test
// xfail-test
fn main() {
// All of these functions are defined by a single module
// source file but instantiated for different types
assert my_float::template::plus(1.0f, 2.0f) == 3.0f;
assert my_f64::template::plus(1.0f64, 2.0f64) == 3.0f64;
assert my_f32::template::plus(1.0f32, 2.0f32) == 3.0f32;
}
\ No newline at end of file
fn plus(x: T, y: T) -> T {
x + y
}
\ No newline at end of file
#[path = "module-polymorphism2-files"]
mod mystd {
#[legacy_exports];
#[path = "float-template"]
mod float {
#[legacy_exports];
// The type of the float
use inst::T;
// Unfortunate
use template::*;
export plus;
// Define T as float
#[path = "inst_float.rs"]
#[legacy_exports]
mod inst;
// Add in the implementation from a single source file
#[path = "template.rs"]
#[legacy_exports]
mod template;
}
#[path = "float-template"]
mod f64 {
#[legacy_exports];
use inst::T;
// Unfortunate
use template::*;
export plus;
// Define T as f64
#[path = "inst_f64.rs"]
#[legacy_exports]
mod inst;
// Use the implementation for the same source file!
#[path = "template.rs"]
#[legacy_exports]
mod template;
}
#[path = "float-template"]
mod f32 {
#[legacy_exports];
use inst::T;
// Unfortunate
use template::*;
export plus;
#[path = "inst_f32.rs"]
#[legacy_exports]
mod inst;
#[path = "template.rs"]
#[legacy_exports]
mod template;
}
}
\ No newline at end of file
// This isn't really xfailed; it's used by the
// module-polymorphism.rc test
// xfail-test
fn main() {
// All of these functions are defined by a single module
// source file but instantiated for different types
assert mystd::float::plus(1.0f, 2.0f) == 3.0f;
assert mystd::f64::plus(1.0f64, 2.0f64) == 3.0f64;
assert mystd::f32::plus(1.0f32, 2.0f32) == 3.0f32;
}
\ No newline at end of file
fn plus(x: T, y: T) -> T {
x + y
}
\ No newline at end of file
// Use one template module to specify in a single file the implementation
// of functions for multiple types
#[path = "module-polymorphism3-files"]
mod mystd {
#[legacy_exports];
// The template is specified in float-template.rs
#[path = "float-template"]
mod float {
#[legacy_exports];
// The type of the float
use inst::T;
// Define T as appropriate for platform
#[path = "inst_float.rs"]
mod inst;
}
// Use the same template
#[path = "float-template"]
mod f64 {
#[legacy_exports];
use inst::T;
// Define T as f64
#[path = "inst_f64.rs"]
mod inst;
}
#[path = "float-template"]
mod f32 {
#[legacy_exports];
use inst::T;
#[path = "inst_f32.rs"]
mod inst;
}
}
\ No newline at end of file
// This isn't really xfailed; it's used by the
// module-polymorphism.rc test
// xfail-test
fn main() {
// All of these functions are defined by a single module
// source file but instantiated for different types
assert mystd::float::plus(1.0f, 2.0f) == 3.0f;
assert mystd::f64::plus(1.0f64, 2.0f64) == 3.0f64;
assert mystd::f32::plus(1.0f32, 2.0f32) == 3.0f32;
}
\ No newline at end of file
type T = cat;
enum cat {
howlycat,
meowlycat
}
fn animal() -> ~str { ~"cat" }
fn talk(c: cat) -> ~str {
match c {
howlycat => { ~"howl" }
meowlycat => { ~"meow" }
}
}
type T = dog;
enum dog {
dog
}
fn animal() -> ~str { ~"dog" }
fn talk(_d: dog) -> ~str { ~"woof" }
trait says {
fn says() -> ~str;
}
impl T: says {
// 'animal' and 'talk' functions are implemented by the module
// instantiating the talky trait. They are 'abstract'
fn says() -> ~str {
animal() + ~" says '" + talk(self) + ~"'"
}
}
#[path = "module-polymorphism4-files"]
mod cat {
#[legacy_exports];
use inst::*;
#[path = "cat.rs"]
#[legacy_exports]
mod inst;
#[path = "trait_.rs"]
#[legacy_exports]
mod trait_;
}
#[path = "module-polymorphism4-files"]
mod dog {
#[legacy_exports];
use inst::*;
#[path = "dog.rs"]
#[legacy_exports]
mod inst;
#[path = "trait_.rs"]
#[legacy_exports]
mod trait_;
}
// This isn't really xfailed; it's used by the
// module-polymorphism.rc test
// xfail-test
fn main() {
let cat1 = cat::inst::meowlycat;
let cat2 = cat::inst::howlycat;
let dog = dog::inst::dog;
assert cat1.says() == ~"cat says 'meow'";
assert cat2.says() == ~"cat says 'howl'";
assert dog.says() == ~"dog says 'woof'";
}
\ No newline at end of file
fn main() { debug!("hello, multi-file world."); bar::other(); }
#[path = "multi-src"]
mod multi {
#[legacy_exports];
// implicitly #[path = "foo.rs"]
#[legacy_exports]
mod foo;
#[path = "bar.rs"]
#[legacy_exports]
mod bar;
}
mod trait_mix {
#[legacy_exports];
#[path = "trait-mix.rs"]
#[legacy_exports]
mod trait_mix;
#[legacy_exports]
mod u_trait_mix;
}
impl f32: u_trait_mix::num {
pure fn add(&&other: f32) -> f32 { return self + other; }
pure fn sub(&&other: f32) -> f32 { return self - other; }
pure fn mul(&&other: f32) -> f32 { return self * other; }
pure fn div(&&other: f32) -> f32 { return self / other; }
pure fn modulo(&&other: f32) -> f32 { return self % other; }
pure fn neg() -> f32 { return -self; }
pure fn to_int() -> int { return self as int; }
static pure fn from_int(n: int) -> f32 { return n as f32; }
}
/*
It seems that this will fail if I try using it from another crate.
*/
/*
// ICEs if I put this in num -- ???
trait from_int {
}
*/
fn main() {}
trait num {
// FIXME: Trait composition. (#2616)
pure fn add(&&other: self) -> self;
pure fn sub(&&other: self) -> self;
pure fn mul(&&other: self) -> self;
pure fn div(&&other: self) -> self;
pure fn modulo(&&other: self) -> self;
pure fn neg() -> self;
pure fn to_int() -> int;
static pure fn from_int(n: int) -> self;
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册