提交 00a933f9 编写于 作者: A Alfie John

More deprecating of i/u suffixes in libraries

上级 fe4340ab
...@@ -39,7 +39,7 @@ ...@@ -39,7 +39,7 @@
//! //!
//! let five = Arc::new(5); //! let five = Arc::new(5);
//! //!
//! for _ in 0u..10 { //! for _ in 0..10 {
//! let five = five.clone(); //! let five = five.clone();
//! //!
//! Thread::spawn(move || { //! Thread::spawn(move || {
...@@ -56,7 +56,7 @@ ...@@ -56,7 +56,7 @@
//! //!
//! let five = Arc::new(Mutex::new(5)); //! let five = Arc::new(Mutex::new(5));
//! //!
//! for _ in 0u..10 { //! for _ in 0..10 {
//! let five = five.clone(); //! let five = five.clone();
//! //!
//! Thread::spawn(move || { //! Thread::spawn(move || {
...@@ -101,7 +101,7 @@ ...@@ -101,7 +101,7 @@
/// let numbers: Vec<_> = (0..100u32).map(|i| i as f32).collect(); /// let numbers: Vec<_> = (0..100u32).map(|i| i as f32).collect();
/// let shared_numbers = Arc::new(numbers); /// let shared_numbers = Arc::new(numbers);
/// ///
/// for _ in 0u..10 { /// for _ in 0..10 {
/// let child_numbers = shared_numbers.clone(); /// let child_numbers = shared_numbers.clone();
/// ///
/// Thread::spawn(move || { /// Thread::spawn(move || {
...@@ -661,7 +661,7 @@ fn manually_share_arc() { ...@@ -661,7 +661,7 @@ fn manually_share_arc() {
#[test] #[test]
fn test_cowarc_clone_make_unique() { fn test_cowarc_clone_make_unique() {
let mut cow0 = Arc::new(75u); let mut cow0 = Arc::new(75);
let mut cow1 = cow0.clone(); let mut cow1 = cow0.clone();
let mut cow2 = cow1.clone(); let mut cow2 = cow1.clone();
...@@ -685,7 +685,7 @@ fn test_cowarc_clone_make_unique() { ...@@ -685,7 +685,7 @@ fn test_cowarc_clone_make_unique() {
#[test] #[test]
fn test_cowarc_clone_unique2() { fn test_cowarc_clone_unique2() {
let mut cow0 = Arc::new(75u); let mut cow0 = Arc::new(75);
let cow1 = cow0.clone(); let cow1 = cow0.clone();
let cow2 = cow1.clone(); let cow2 = cow1.clone();
...@@ -708,7 +708,7 @@ fn test_cowarc_clone_unique2() { ...@@ -708,7 +708,7 @@ fn test_cowarc_clone_unique2() {
#[test] #[test]
fn test_cowarc_clone_weak() { fn test_cowarc_clone_weak() {
let mut cow0 = Arc::new(75u); let mut cow0 = Arc::new(75);
let cow1_weak = cow0.downgrade(); let cow1_weak = cow0.downgrade();
assert!(75 == *cow0); assert!(75 == *cow0);
......
...@@ -30,11 +30,11 @@ fn test_owned_clone() { ...@@ -30,11 +30,11 @@ fn test_owned_clone() {
#[test] #[test]
fn any_move() { fn any_move() {
let a = Box::new(8u) as Box<Any>; let a = Box::new(8us) as Box<Any>;
let b = Box::new(Test) as Box<Any>; let b = Box::new(Test) as Box<Any>;
match a.downcast::<uint>() { match a.downcast::<uint>() {
Ok(a) => { assert!(a == Box::new(8u)); } Ok(a) => { assert!(a == Box::new(8us)); }
Err(..) => panic!() Err(..) => panic!()
} }
match b.downcast::<Test>() { match b.downcast::<Test>() {
...@@ -42,7 +42,7 @@ fn any_move() { ...@@ -42,7 +42,7 @@ fn any_move() {
Err(..) => panic!() Err(..) => panic!()
} }
let a = Box::new(8u) as Box<Any>; let a = Box::new(8) as Box<Any>;
let b = Box::new(Test) as Box<Any>; let b = Box::new(Test) as Box<Any>;
assert!(a.downcast::<Box<Test>>().is_err()); assert!(a.downcast::<Box<Test>>().is_err());
...@@ -51,14 +51,14 @@ fn any_move() { ...@@ -51,14 +51,14 @@ fn any_move() {
#[test] #[test]
fn test_show() { fn test_show() {
let a = Box::new(8u) as Box<Any>; let a = Box::new(8) as Box<Any>;
let b = Box::new(Test) as Box<Any>; let b = Box::new(Test) as Box<Any>;
let a_str = format!("{:?}", a); let a_str = format!("{:?}", a);
let b_str = format!("{:?}", b); let b_str = format!("{:?}", b);
assert_eq!(a_str, "Box<Any>"); assert_eq!(a_str, "Box<Any>");
assert_eq!(b_str, "Box<Any>"); assert_eq!(b_str, "Box<Any>");
static EIGHT: usize = 8us; static EIGHT: usize = 8;
static TEST: Test = Test; static TEST: Test = Test;
let a = &EIGHT as &Any; let a = &EIGHT as &Any;
let b = &TEST as &Any; let b = &TEST as &Any;
......
...@@ -266,12 +266,12 @@ pub fn is_unique<T>(rc: &Rc<T>) -> bool { ...@@ -266,12 +266,12 @@ pub fn is_unique<T>(rc: &Rc<T>) -> bool {
/// ``` /// ```
/// use std::rc::{self, Rc}; /// use std::rc::{self, Rc};
/// ///
/// let x = Rc::new(3u); /// let x = Rc::new(3);
/// assert_eq!(rc::try_unwrap(x), Ok(3u)); /// assert_eq!(rc::try_unwrap(x), Ok(3));
/// ///
/// let x = Rc::new(4u); /// let x = Rc::new(4);
/// let _y = x.clone(); /// let _y = x.clone();
/// assert_eq!(rc::try_unwrap(x), Err(Rc::new(4u))); /// assert_eq!(rc::try_unwrap(x), Err(Rc::new(4)));
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "alloc")] #[unstable(feature = "alloc")]
...@@ -300,9 +300,9 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> { ...@@ -300,9 +300,9 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
/// ``` /// ```
/// use std::rc::{self, Rc}; /// use std::rc::{self, Rc};
/// ///
/// let mut x = Rc::new(3u); /// let mut x = Rc::new(3);
/// *rc::get_mut(&mut x).unwrap() = 4u; /// *rc::get_mut(&mut x).unwrap() = 4;
/// assert_eq!(*x, 4u); /// assert_eq!(*x, 4);
/// ///
/// let _y = x.clone(); /// let _y = x.clone();
/// assert!(rc::get_mut(&mut x).is_none()); /// assert!(rc::get_mut(&mut x).is_none());
...@@ -845,7 +845,7 @@ struct Cycle { ...@@ -845,7 +845,7 @@ struct Cycle {
#[test] #[test]
fn is_unique() { fn is_unique() {
let x = Rc::new(3u); let x = Rc::new(3);
assert!(super::is_unique(&x)); assert!(super::is_unique(&x));
let y = x.clone(); let y = x.clone();
assert!(!super::is_unique(&x)); assert!(!super::is_unique(&x));
...@@ -893,21 +893,21 @@ fn test_weak_count() { ...@@ -893,21 +893,21 @@ fn test_weak_count() {
#[test] #[test]
fn try_unwrap() { fn try_unwrap() {
let x = Rc::new(3u); let x = Rc::new(3);
assert_eq!(super::try_unwrap(x), Ok(3u)); assert_eq!(super::try_unwrap(x), Ok(3));
let x = Rc::new(4u); let x = Rc::new(4);
let _y = x.clone(); let _y = x.clone();
assert_eq!(super::try_unwrap(x), Err(Rc::new(4u))); assert_eq!(super::try_unwrap(x), Err(Rc::new(4)));
let x = Rc::new(5u); let x = Rc::new(5);
let _w = x.downgrade(); let _w = x.downgrade();
assert_eq!(super::try_unwrap(x), Err(Rc::new(5u))); assert_eq!(super::try_unwrap(x), Err(Rc::new(5)));
} }
#[test] #[test]
fn get_mut() { fn get_mut() {
let mut x = Rc::new(3u); let mut x = Rc::new(3);
*super::get_mut(&mut x).unwrap() = 4u; *super::get_mut(&mut x).unwrap() = 4;
assert_eq!(*x, 4u); assert_eq!(*x, 4);
let y = x.clone(); let y = x.clone();
assert!(super::get_mut(&mut x).is_none()); assert!(super::get_mut(&mut x).is_none());
drop(y); drop(y);
...@@ -918,7 +918,7 @@ fn get_mut() { ...@@ -918,7 +918,7 @@ fn get_mut() {
#[test] #[test]
fn test_cowrc_clone_make_unique() { fn test_cowrc_clone_make_unique() {
let mut cow0 = Rc::new(75u); let mut cow0 = Rc::new(75);
let mut cow1 = cow0.clone(); let mut cow1 = cow0.clone();
let mut cow2 = cow1.clone(); let mut cow2 = cow1.clone();
...@@ -942,7 +942,7 @@ fn test_cowrc_clone_make_unique() { ...@@ -942,7 +942,7 @@ fn test_cowrc_clone_make_unique() {
#[test] #[test]
fn test_cowrc_clone_unique2() { fn test_cowrc_clone_unique2() {
let mut cow0 = Rc::new(75u); let mut cow0 = Rc::new(75);
let cow1 = cow0.clone(); let cow1 = cow0.clone();
let cow2 = cow1.clone(); let cow2 = cow1.clone();
...@@ -965,7 +965,7 @@ fn test_cowrc_clone_unique2() { ...@@ -965,7 +965,7 @@ fn test_cowrc_clone_unique2() {
#[test] #[test]
fn test_cowrc_clone_weak() { fn test_cowrc_clone_weak() {
let mut cow0 = Rc::new(75u); let mut cow0 = Rc::new(75);
let cow1_weak = cow0.downgrade(); let cow1_weak = cow0.downgrade();
assert!(75 == *cow0); assert!(75 == *cow0);
...@@ -979,7 +979,7 @@ fn test_cowrc_clone_weak() { ...@@ -979,7 +979,7 @@ fn test_cowrc_clone_weak() {
#[test] #[test]
fn test_show() { fn test_show() {
let foo = Rc::new(75u); let foo = Rc::new(75);
assert_eq!(format!("{:?}", foo), "75"); assert_eq!(format!("{:?}", foo), "75");
} }
......
...@@ -493,7 +493,7 @@ fn check_foreign_fn(cx: &Context, decl: &ast::FnDecl) { ...@@ -493,7 +493,7 @@ fn check_foreign_fn(cx: &Context, decl: &ast::FnDecl) {
impl BoxPointers { impl BoxPointers {
fn check_heap_type<'a, 'tcx>(&self, cx: &Context<'a, 'tcx>, fn check_heap_type<'a, 'tcx>(&self, cx: &Context<'a, 'tcx>,
span: Span, ty: Ty<'tcx>) { span: Span, ty: Ty<'tcx>) {
let mut n_uniq = 0u; let mut n_uniq = 0us;
ty::fold_ty(cx.tcx, ty, |t| { ty::fold_ty(cx.tcx, ty, |t| {
match t.sty { match t.sty {
ty::ty_uniq(_) => { ty::ty_uniq(_) => {
......
...@@ -490,7 +490,7 @@ fn with_lint_attrs<F>(&mut self, ...@@ -490,7 +490,7 @@ fn with_lint_attrs<F>(&mut self,
// current dictionary of lint information. Along the way, keep a history // current dictionary of lint information. Along the way, keep a history
// of what we changed so we can roll everything back after invoking the // of what we changed so we can roll everything back after invoking the
// specified closure // specified closure
let mut pushed = 0u; let mut pushed = 0;
for result in gather_attrs(attrs).into_iter() { for result in gather_attrs(attrs).into_iter() {
let v = match result { let v = match result {
......
...@@ -88,7 +88,7 @@ pub fn maybe_find_item<'a>(item_id: ast::NodeId, ...@@ -88,7 +88,7 @@ pub fn maybe_find_item<'a>(item_id: ast::NodeId,
items: rbml::Doc<'a>) -> Option<rbml::Doc<'a>> { items: rbml::Doc<'a>) -> Option<rbml::Doc<'a>> {
fn eq_item(bytes: &[u8], item_id: ast::NodeId) -> bool { fn eq_item(bytes: &[u8], item_id: ast::NodeId) -> bool {
return u64_from_be_bytes( return u64_from_be_bytes(
&bytes[0u..4u], 0u, 4u) as ast::NodeId &bytes[0..4], 0, 4) as ast::NodeId
== item_id; == item_id;
} }
lookup_hash(items, lookup_hash(items,
...@@ -1164,7 +1164,7 @@ fn get_attributes(md: rbml::Doc) -> Vec<ast::Attribute> { ...@@ -1164,7 +1164,7 @@ fn get_attributes(md: rbml::Doc) -> Vec<ast::Attribute> {
let meta_items = get_meta_items(attr_doc); let meta_items = get_meta_items(attr_doc);
// Currently it's only possible to have a single meta item on // Currently it's only possible to have a single meta item on
// an attribute // an attribute
assert_eq!(meta_items.len(), 1u); assert_eq!(meta_items.len(), 1);
let meta_item = meta_items.into_iter().nth(0).unwrap(); let meta_item = meta_items.into_iter().nth(0).unwrap();
attrs.push( attrs.push(
codemap::Spanned { codemap::Spanned {
......
...@@ -1071,7 +1071,7 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, ...@@ -1071,7 +1071,7 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder,
encode_name(rbml_w, item.ident.name); encode_name(rbml_w, item.ident.name);
encode_path(rbml_w, path); encode_path(rbml_w, path);
encode_attributes(rbml_w, &item.attrs[]); encode_attributes(rbml_w, &item.attrs[]);
if tps_len > 0u || should_inline(&item.attrs[]) { if tps_len > 0 || should_inline(&item.attrs[]) {
encode_inlined_item(ecx, rbml_w, IIItemRef(item)); encode_inlined_item(ecx, rbml_w, IIItemRef(item));
} }
if tps_len == 0 { if tps_len == 0 {
......
...@@ -487,7 +487,7 @@ fn find_library_crate(&mut self) -> Option<Library> { ...@@ -487,7 +487,7 @@ fn find_library_crate(&mut self) -> Option<Library> {
fn extract_one(&mut self, m: HashMap<Path, PathKind>, flavor: &str, fn extract_one(&mut self, m: HashMap<Path, PathKind>, flavor: &str,
slot: &mut Option<MetadataBlob>) -> Option<(Path, PathKind)> { slot: &mut Option<MetadataBlob>) -> Option<(Path, PathKind)> {
let mut ret = None::<(Path, PathKind)>; let mut ret = None::<(Path, PathKind)>;
let mut error = 0u; let mut error = 0;
if slot.is_some() { if slot.is_some() {
// FIXME(#10786): for an optimization, we only read one of the // FIXME(#10786): for an optimization, we only read one of the
......
...@@ -76,13 +76,13 @@ fn peek(st: &PState) -> char { ...@@ -76,13 +76,13 @@ fn peek(st: &PState) -> char {
fn next(st: &mut PState) -> char { fn next(st: &mut PState) -> char {
let ch = st.data[st.pos] as char; let ch = st.data[st.pos] as char;
st.pos = st.pos + 1u; st.pos = st.pos + 1;
return ch; return ch;
} }
fn next_byte(st: &mut PState) -> u8 { fn next_byte(st: &mut PState) -> u8 {
let b = st.data[st.pos]; let b = st.data[st.pos];
st.pos = st.pos + 1u; st.pos = st.pos + 1;
return b; return b;
} }
...@@ -498,7 +498,7 @@ fn parse_ty_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> Ty<'tcx> w ...@@ -498,7 +498,7 @@ fn parse_ty_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> Ty<'tcx> w
assert_eq!(next(st), '['); assert_eq!(next(st), '[');
let mut params = Vec::new(); let mut params = Vec::new();
while peek(st) != ']' { params.push(parse_ty_(st, conv)); } while peek(st) != ']' { params.push(parse_ty_(st, conv)); }
st.pos = st.pos + 1u; st.pos = st.pos + 1;
return ty::mk_tup(tcx, params); return ty::mk_tup(tcx, params);
} }
'F' => { 'F' => {
...@@ -590,7 +590,7 @@ fn parse_uint(st: &mut PState) -> uint { ...@@ -590,7 +590,7 @@ fn parse_uint(st: &mut PState) -> uint {
loop { loop {
let cur = peek(st); let cur = peek(st);
if cur < '0' || cur > '9' { return n; } if cur < '0' || cur > '9' { return n; }
st.pos = st.pos + 1u; st.pos = st.pos + 1;
n *= 10; n *= 10;
n += (cur as uint) - ('0' as uint); n += (cur as uint) - ('0' as uint);
}; };
...@@ -608,15 +608,15 @@ fn parse_param_space(st: &mut PState) -> subst::ParamSpace { ...@@ -608,15 +608,15 @@ fn parse_param_space(st: &mut PState) -> subst::ParamSpace {
} }
fn parse_hex(st: &mut PState) -> uint { fn parse_hex(st: &mut PState) -> uint {
let mut n = 0u; let mut n = 0;
loop { loop {
let cur = peek(st); let cur = peek(st);
if (cur < '0' || cur > '9') && (cur < 'a' || cur > 'f') { return n; } if (cur < '0' || cur > '9') && (cur < 'a' || cur > 'f') { return n; }
st.pos = st.pos + 1u; st.pos = st.pos + 1;
n *= 16u; n *= 16;
if '0' <= cur && cur <= '9' { if '0' <= cur && cur <= '9' {
n += (cur as uint) - ('0' as uint); n += (cur as uint) - ('0' as uint);
} else { n += 10u + (cur as uint) - ('a' as uint); } } else { n += 10 + (cur as uint) - ('a' as uint); }
}; };
} }
...@@ -686,7 +686,7 @@ fn parse_sig_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> ty::PolyF ...@@ -686,7 +686,7 @@ fn parse_sig_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> ty::PolyF
while peek(st) != ']' { while peek(st) != ']' {
inputs.push(parse_ty_(st, conv)); inputs.push(parse_ty_(st, conv));
} }
st.pos += 1u; // eat the ']' st.pos += 1; // eat the ']'
let variadic = match next(st) { let variadic = match next(st) {
'V' => true, 'V' => true,
'N' => false, 'N' => false,
...@@ -694,7 +694,7 @@ fn parse_sig_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> ty::PolyF ...@@ -694,7 +694,7 @@ fn parse_sig_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> ty::PolyF
}; };
let output = match peek(st) { let output = match peek(st) {
'z' => { 'z' => {
st.pos += 1u; st.pos += 1;
ty::FnDiverging ty::FnDiverging
} }
_ => ty::FnConverging(parse_ty_(st, conv)) _ => ty::FnConverging(parse_ty_(st, conv))
...@@ -706,16 +706,16 @@ fn parse_sig_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> ty::PolyF ...@@ -706,16 +706,16 @@ fn parse_sig_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> ty::PolyF
// Rust metadata parsing // Rust metadata parsing
pub fn parse_def_id(buf: &[u8]) -> ast::DefId { pub fn parse_def_id(buf: &[u8]) -> ast::DefId {
let mut colon_idx = 0u; let mut colon_idx = 0;
let len = buf.len(); let len = buf.len();
while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1u; } while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1; }
if colon_idx == len { if colon_idx == len {
error!("didn't find ':' when parsing def id"); error!("didn't find ':' when parsing def id");
panic!(); panic!();
} }
let crate_part = &buf[0u..colon_idx]; let crate_part = &buf[0..colon_idx];
let def_part = &buf[colon_idx + 1u..len]; let def_part = &buf[colon_idx + 1..len];
let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| { let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| {
s.parse::<uint>().ok() s.parse::<uint>().ok()
......
...@@ -25,14 +25,14 @@ ...@@ -25,14 +25,14 @@
pub fn check_path_args(tcx: &ty::ctxt, pub fn check_path_args(tcx: &ty::ctxt,
path: &ast::Path, path: &ast::Path,
flags: uint) { flags: uint) {
if (flags & NO_TPS) != 0u { if (flags & NO_TPS) != 0 {
if path.segments.iter().any(|s| s.parameters.has_types()) { if path.segments.iter().any(|s| s.parameters.has_types()) {
span_err!(tcx.sess, path.span, E0109, span_err!(tcx.sess, path.span, E0109,
"type parameters are not allowed on this type"); "type parameters are not allowed on this type");
} }
} }
if (flags & NO_REGIONS) != 0u { if (flags & NO_REGIONS) != 0 {
if path.segments.iter().any(|s| s.parameters.has_lifetimes()) { if path.segments.iter().any(|s| s.parameters.has_lifetimes()) {
span_err!(tcx.sess, path.span, E0110, span_err!(tcx.sess, path.span, E0110,
"region parameters are not allowed on this type"); "region parameters are not allowed on this type");
......
...@@ -579,16 +579,16 @@ fn encode_method_callee<'a, 'tcx>(ecx: &e::EncodeContext<'a, 'tcx>, ...@@ -579,16 +579,16 @@ fn encode_method_callee<'a, 'tcx>(ecx: &e::EncodeContext<'a, 'tcx>,
use serialize::Encoder; use serialize::Encoder;
rbml_w.emit_struct("MethodCallee", 4, |rbml_w| { rbml_w.emit_struct("MethodCallee", 4, |rbml_w| {
rbml_w.emit_struct_field("adjustment", 0u, |rbml_w| { rbml_w.emit_struct_field("adjustment", 0, |rbml_w| {
adjustment.encode(rbml_w) adjustment.encode(rbml_w)
}); });
rbml_w.emit_struct_field("origin", 1u, |rbml_w| { rbml_w.emit_struct_field("origin", 1, |rbml_w| {
Ok(rbml_w.emit_method_origin(ecx, &method.origin)) Ok(rbml_w.emit_method_origin(ecx, &method.origin))
}); });
rbml_w.emit_struct_field("ty", 2u, |rbml_w| { rbml_w.emit_struct_field("ty", 2, |rbml_w| {
Ok(rbml_w.emit_ty(ecx, method.ty)) Ok(rbml_w.emit_ty(ecx, method.ty))
}); });
rbml_w.emit_struct_field("substs", 3u, |rbml_w| { rbml_w.emit_struct_field("substs", 3, |rbml_w| {
Ok(rbml_w.emit_substs(ecx, &method.substs)) Ok(rbml_w.emit_substs(ecx, &method.substs))
}) })
}).unwrap(); }).unwrap();
...@@ -743,30 +743,30 @@ fn read_vtable_origin(&mut self, ...@@ -743,30 +743,30 @@ fn read_vtable_origin(&mut self,
Ok(match i { Ok(match i {
0 => { 0 => {
ty::vtable_static( ty::vtable_static(
this.read_enum_variant_arg(0u, |this| { this.read_enum_variant_arg(0, |this| {
Ok(this.read_def_id_nodcx(cdata)) Ok(this.read_def_id_nodcx(cdata))
}).unwrap(), }).unwrap(),
this.read_enum_variant_arg(1u, |this| { this.read_enum_variant_arg(1, |this| {
Ok(this.read_substs_nodcx(tcx, cdata)) Ok(this.read_substs_nodcx(tcx, cdata))
}).unwrap(), }).unwrap(),
this.read_enum_variant_arg(2u, |this| { this.read_enum_variant_arg(2, |this| {
Ok(this.read_vtable_res(tcx, cdata)) Ok(this.read_vtable_res(tcx, cdata))
}).unwrap() }).unwrap()
) )
} }
1 => { 1 => {
ty::vtable_param( ty::vtable_param(
this.read_enum_variant_arg(0u, |this| { this.read_enum_variant_arg(0, |this| {
Decodable::decode(this) Decodable::decode(this)
}).unwrap(), }).unwrap(),
this.read_enum_variant_arg(1u, |this| { this.read_enum_variant_arg(1, |this| {
this.read_uint() this.read_uint()
}).unwrap() }).unwrap()
) )
} }
2 => { 2 => {
ty::vtable_closure( ty::vtable_closure(
this.read_enum_variant_arg(0u, |this| { this.read_enum_variant_arg(0, |this| {
Ok(this.read_def_id_nodcx(cdata)) Ok(this.read_def_id_nodcx(cdata))
}).unwrap() }).unwrap()
) )
......
...@@ -68,10 +68,10 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ...@@ -68,10 +68,10 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
.collect::<Vec<String>>() .collect::<Vec<String>>()
}).collect(); }).collect();
let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0u); let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0);
assert!(m.iter().all(|row| row.len() == column_count)); assert!(m.iter().all(|row| row.len() == column_count));
let column_widths: Vec<uint> = (0..column_count).map(|col| { let column_widths: Vec<uint> = (0..column_count).map(|col| {
pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0u) pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0)
}).collect(); }).collect();
let total_width = column_widths.iter().map(|n| *n).sum() + column_count * 3 + 1; let total_width = column_widths.iter().map(|n| *n).sum() + column_count * 3 + 1;
...@@ -588,13 +588,13 @@ fn is_useful(cx: &MatchCheckCtxt, ...@@ -588,13 +588,13 @@ fn is_useful(cx: &MatchCheckCtxt,
-> Usefulness { -> Usefulness {
let &Matrix(ref rows) = matrix; let &Matrix(ref rows) = matrix;
debug!("{:?}", matrix); debug!("{:?}", matrix);
if rows.len() == 0u { if rows.len() == 0 {
return match witness { return match witness {
ConstructWitness => UsefulWithWitness(vec!()), ConstructWitness => UsefulWithWitness(vec!()),
LeaveOutWitness => Useful LeaveOutWitness => Useful
}; };
} }
if rows[0].len() == 0u { if rows[0].len() == 0 {
return NotUseful; return NotUseful;
} }
let real_pat = match rows.iter().find(|r| (*r)[0].id != DUMMY_NODE_ID) { let real_pat = match rows.iter().find(|r| (*r)[0].id != DUMMY_NODE_ID) {
...@@ -669,9 +669,9 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix, ...@@ -669,9 +669,9 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix,
witness: WitnessPreference) -> Usefulness { witness: WitnessPreference) -> Usefulness {
let arity = constructor_arity(cx, &ctor, lty); let arity = constructor_arity(cx, &ctor, lty);
let matrix = Matrix(m.iter().filter_map(|r| { let matrix = Matrix(m.iter().filter_map(|r| {
specialize(cx, &r[], &ctor, 0u, arity) specialize(cx, &r[], &ctor, 0, arity)
}).collect()); }).collect());
match specialize(cx, v, &ctor, 0u, arity) { match specialize(cx, v, &ctor, 0, arity) {
Some(v) => is_useful(cx, &matrix, &v[], witness), Some(v) => is_useful(cx, &matrix, &v[], witness),
None => NotUseful None => NotUseful
} }
...@@ -742,20 +742,20 @@ fn pat_constructors(cx: &MatchCheckCtxt, p: &Pat, ...@@ -742,20 +742,20 @@ fn pat_constructors(cx: &MatchCheckCtxt, p: &Pat,
/// This computes the arity of a constructor. The arity of a constructor /// This computes the arity of a constructor. The arity of a constructor
/// is how many subpattern patterns of that constructor should be expanded to. /// is how many subpattern patterns of that constructor should be expanded to.
/// ///
/// For instance, a tuple pattern (_, 42u, Some([])) has the arity of 3. /// For instance, a tuple pattern (_, 42, Some([])) has the arity of 3.
/// A struct pattern's arity is the number of fields it contains, etc. /// A struct pattern's arity is the number of fields it contains, etc.
pub fn constructor_arity(cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> uint { pub fn constructor_arity(cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> uint {
match ty.sty { match ty.sty {
ty::ty_tup(ref fs) => fs.len(), ty::ty_tup(ref fs) => fs.len(),
ty::ty_uniq(_) => 1u, ty::ty_uniq(_) => 1,
ty::ty_rptr(_, ty::mt { ty, .. }) => match ty.sty { ty::ty_rptr(_, ty::mt { ty, .. }) => match ty.sty {
ty::ty_vec(_, None) => match *ctor { ty::ty_vec(_, None) => match *ctor {
Slice(length) => length, Slice(length) => length,
ConstantValue(_) => 0u, ConstantValue(_) => 0,
_ => unreachable!() _ => unreachable!()
}, },
ty::ty_str => 0u, ty::ty_str => 0,
_ => 1u _ => 1
}, },
ty::ty_enum(eid, _) => { ty::ty_enum(eid, _) => {
match *ctor { match *ctor {
...@@ -765,7 +765,7 @@ pub fn constructor_arity(cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> uin ...@@ -765,7 +765,7 @@ pub fn constructor_arity(cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> uin
} }
ty::ty_struct(cid, _) => ty::lookup_struct_fields(cx.tcx, cid).len(), ty::ty_struct(cid, _) => ty::lookup_struct_fields(cx.tcx, cid).len(),
ty::ty_vec(_, Some(n)) => n, ty::ty_vec(_, Some(n)) => n,
_ => 0u _ => 0
} }
} }
......
...@@ -352,7 +352,7 @@ fn each_bit<F>(&self, words: &[uint], mut f: F) -> bool where ...@@ -352,7 +352,7 @@ fn each_bit<F>(&self, words: &[uint], mut f: F) -> bool where
for (word_index, &word) in words.iter().enumerate() { for (word_index, &word) in words.iter().enumerate() {
if word != 0 { if word != 0 {
let base_index = word_index * uint::BITS; let base_index = word_index * uint::BITS;
for offset in 0u..uint::BITS { for offset in 0..uint::BITS {
let bit = 1 << offset; let bit = 1 << offset;
if (word & bit) != 0 { if (word & bit) != 0 {
// NB: we round up the total number of bits // NB: we round up the total number of bits
...@@ -447,7 +447,7 @@ pub fn propagate(&mut self, cfg: &cfg::CFG, blk: &ast::Block) { ...@@ -447,7 +447,7 @@ pub fn propagate(&mut self, cfg: &cfg::CFG, blk: &ast::Block) {
changed: true changed: true
}; };
let mut temp: Vec<_> = repeat(0u).take(words_per_id).collect(); let mut temp: Vec<_> = repeat(0).take(words_per_id).collect();
while propcx.changed { while propcx.changed {
propcx.changed = false; propcx.changed = false;
propcx.reset(temp.as_mut_slice()); propcx.reset(temp.as_mut_slice());
...@@ -466,7 +466,7 @@ fn pretty_print_to(&self, wr: Box<old_io::Writer+'static>, ...@@ -466,7 +466,7 @@ fn pretty_print_to(&self, wr: Box<old_io::Writer+'static>,
blk: &ast::Block) -> old_io::IoResult<()> { blk: &ast::Block) -> old_io::IoResult<()> {
let mut ps = pprust::rust_printer_annotated(wr, self); let mut ps = pprust::rust_printer_annotated(wr, self);
try!(ps.cbox(pprust::indent_unit)); try!(ps.cbox(pprust::indent_unit));
try!(ps.ibox(0u)); try!(ps.ibox(0));
try!(ps.print_block(blk)); try!(ps.print_block(blk));
pp::eof(&mut ps.s) pp::eof(&mut ps.s)
} }
...@@ -552,7 +552,7 @@ fn bits_to_string(words: &[uint]) -> String { ...@@ -552,7 +552,7 @@ fn bits_to_string(words: &[uint]) -> String {
for &word in words.iter() { for &word in words.iter() {
let mut v = word; let mut v = word;
for _ in 0u..uint::BYTES { for _ in 0..uint::BYTES {
result.push(sep); result.push(sep);
result.push_str(&format!("{:02x}", v & 0xFF)[]); result.push_str(&format!("{:02x}", v & 0xFF)[]);
v >>= 8; v >>= 8;
...@@ -593,7 +593,7 @@ fn set_bit(words: &mut [uint], bit: uint) -> bool { ...@@ -593,7 +593,7 @@ fn set_bit(words: &mut [uint], bit: uint) -> bool {
fn bit_str(bit: uint) -> String { fn bit_str(bit: uint) -> String {
let byte = bit >> 8; let byte = bit >> 8;
let lobits = 1u << (bit & 0xFF); let lobits = 1 << (bit & 0xFF);
format!("[{}:{}-{:02x}]", bit, byte, lobits) format!("[{}:{}-{:02x}]", bit, byte, lobits)
} }
......
...@@ -1259,7 +1259,7 @@ fn extract_values_and_collect_conflicts( ...@@ -1259,7 +1259,7 @@ fn extract_values_and_collect_conflicts(
let mut opt_graph = None; let mut opt_graph = None;
for idx in 0u..self.num_vars() as uint { for idx in 0..self.num_vars() as uint {
match var_data[idx].value { match var_data[idx].value {
Value(_) => { Value(_) => {
/* Inference successful */ /* Inference successful */
...@@ -1548,7 +1548,7 @@ fn process_edges<'a, 'tcx>(this: &RegionVarBindings<'a, 'tcx>, ...@@ -1548,7 +1548,7 @@ fn process_edges<'a, 'tcx>(this: &RegionVarBindings<'a, 'tcx>,
fn iterate_until_fixed_point<F>(&self, tag: &str, mut body: F) where fn iterate_until_fixed_point<F>(&self, tag: &str, mut body: F) where
F: FnMut(&Constraint) -> bool, F: FnMut(&Constraint) -> bool,
{ {
let mut iteration = 0u; let mut iteration = 0;
let mut changed = true; let mut changed = true;
while changed { while changed {
changed = false; changed = false;
......
...@@ -540,9 +540,9 @@ struct Specials { ...@@ -540,9 +540,9 @@ struct Specials {
clean_exit_var: Variable clean_exit_var: Variable
} }
static ACC_READ: uint = 1u; static ACC_READ: uint = 1;
static ACC_WRITE: uint = 2u; static ACC_WRITE: uint = 2;
static ACC_USE: uint = 4u; static ACC_USE: uint = 4;
struct Liveness<'a, 'tcx: 'a> { struct Liveness<'a, 'tcx: 'a> {
ir: &'a mut IrMaps<'a, 'tcx>, ir: &'a mut IrMaps<'a, 'tcx>,
...@@ -672,9 +672,9 @@ fn assigned_on_exit(&self, ln: LiveNode, var: Variable) ...@@ -672,9 +672,9 @@ fn assigned_on_exit(&self, ln: LiveNode, var: Variable)
fn indices2<F>(&mut self, ln: LiveNode, succ_ln: LiveNode, mut op: F) where fn indices2<F>(&mut self, ln: LiveNode, succ_ln: LiveNode, mut op: F) where
F: FnMut(&mut Liveness<'a, 'tcx>, uint, uint), F: FnMut(&mut Liveness<'a, 'tcx>, uint, uint),
{ {
let node_base_idx = self.idx(ln, Variable(0u)); let node_base_idx = self.idx(ln, Variable(0));
let succ_base_idx = self.idx(succ_ln, Variable(0u)); let succ_base_idx = self.idx(succ_ln, Variable(0));
for var_idx in 0u..self.ir.num_vars { for var_idx in 0..self.ir.num_vars {
op(self, node_base_idx + var_idx, succ_base_idx + var_idx); op(self, node_base_idx + var_idx, succ_base_idx + var_idx);
} }
} }
...@@ -687,7 +687,7 @@ fn write_vars<F>(&self, ...@@ -687,7 +687,7 @@ fn write_vars<F>(&self,
F: FnMut(uint) -> LiveNode, F: FnMut(uint) -> LiveNode,
{ {
let node_base_idx = self.idx(ln, Variable(0)); let node_base_idx = self.idx(ln, Variable(0));
for var_idx in 0u..self.ir.num_vars { for var_idx in 0..self.ir.num_vars {
let idx = node_base_idx + var_idx; let idx = node_base_idx + var_idx;
if test(idx).is_valid() { if test(idx).is_valid() {
try!(write!(wr, " {:?}", Variable(var_idx))); try!(write!(wr, " {:?}", Variable(var_idx)));
...@@ -847,7 +847,7 @@ fn compute(&mut self, decl: &ast::FnDecl, body: &ast::Block) -> LiveNode { ...@@ -847,7 +847,7 @@ fn compute(&mut self, decl: &ast::FnDecl, body: &ast::Block) -> LiveNode {
// hack to skip the loop unless debug! is enabled: // hack to skip the loop unless debug! is enabled:
debug!("^^ liveness computation results for body {} (entry={:?})", debug!("^^ liveness computation results for body {} (entry={:?})",
{ {
for ln_idx in 0u..self.ir.num_live_nodes { for ln_idx in 0..self.ir.num_live_nodes {
debug!("{:?}", self.ln_str(LiveNode(ln_idx))); debug!("{:?}", self.ln_str(LiveNode(ln_idx)));
} }
body.id body.id
...@@ -1303,7 +1303,7 @@ fn access_path(&mut self, expr: &Expr, succ: LiveNode, acc: uint) ...@@ -1303,7 +1303,7 @@ fn access_path(&mut self, expr: &Expr, succ: LiveNode, acc: uint)
match self.ir.tcx.def_map.borrow()[expr.id].clone() { match self.ir.tcx.def_map.borrow()[expr.id].clone() {
DefLocal(nid) => { DefLocal(nid) => {
let ln = self.live_node(expr.id, expr.span); let ln = self.live_node(expr.id, expr.span);
if acc != 0u { if acc != 0 {
self.init_from_succ(ln, succ); self.init_from_succ(ln, succ);
let var = self.variable(nid, expr.span); let var = self.variable(nid, expr.span);
self.acc(ln, var, acc); self.acc(ln, var, acc);
......
...@@ -454,7 +454,7 @@ pub fn cat_expr_autoderefd(&self, ...@@ -454,7 +454,7 @@ pub fn cat_expr_autoderefd(&self,
debug!("cat_expr_autoderefd: autoderefs={}, cmt={}", debug!("cat_expr_autoderefd: autoderefs={}, cmt={}",
autoderefs, autoderefs,
cmt.repr(self.tcx())); cmt.repr(self.tcx()));
for deref in 1u..autoderefs + 1 { for deref in 1..autoderefs + 1 {
cmt = try!(self.cat_deref(expr, cmt, deref)); cmt = try!(self.cat_deref(expr, cmt, deref));
} }
return Ok(cmt); return Ok(cmt);
......
...@@ -461,8 +461,8 @@ pub fn nearest_common_ancestor(&self, ...@@ -461,8 +461,8 @@ pub fn nearest_common_ancestor(&self,
let a_ancestors = ancestors_of(self, scope_a); let a_ancestors = ancestors_of(self, scope_a);
let b_ancestors = ancestors_of(self, scope_b); let b_ancestors = ancestors_of(self, scope_b);
let mut a_index = a_ancestors.len() - 1u; let mut a_index = a_ancestors.len() - 1;
let mut b_index = b_ancestors.len() - 1u; let mut b_index = b_ancestors.len() - 1;
// Here, ~[ab]_ancestors is a vector going from narrow to broad. // Here, ~[ab]_ancestors is a vector going from narrow to broad.
// The end of each vector will be the item where the scope is // The end of each vector will be the item where the scope is
...@@ -479,10 +479,10 @@ pub fn nearest_common_ancestor(&self, ...@@ -479,10 +479,10 @@ pub fn nearest_common_ancestor(&self,
loop { loop {
// Loop invariant: a_ancestors[a_index] == b_ancestors[b_index] // Loop invariant: a_ancestors[a_index] == b_ancestors[b_index]
// for all indices between a_index and the end of the array // for all indices between a_index and the end of the array
if a_index == 0u { return Some(scope_a); } if a_index == 0 { return Some(scope_a); }
if b_index == 0u { return Some(scope_b); } if b_index == 0 { return Some(scope_b); }
a_index -= 1u; a_index -= 1;
b_index -= 1u; b_index -= 1;
if a_ancestors[a_index] != b_ancestors[b_index] { if a_ancestors[a_index] != b_ancestors[b_index] {
return Some(a_ancestors[a_index + 1]); return Some(a_ancestors[a_index + 1]);
} }
......
...@@ -4475,7 +4475,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind { ...@@ -4475,7 +4475,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind {
match resolve_expr(tcx, expr) { match resolve_expr(tcx, expr) {
def::DefVariant(tid, vid, _) => { def::DefVariant(tid, vid, _) => {
let variant_info = enum_variant_with_id(tcx, tid, vid); let variant_info = enum_variant_with_id(tcx, tid, vid);
if variant_info.args.len() > 0u { if variant_info.args.len() > 0 {
// N-ary variant. // N-ary variant.
RvalueDatumExpr RvalueDatumExpr
} else { } else {
...@@ -4639,8 +4639,8 @@ pub fn stmt_node_id(s: &ast::Stmt) -> ast::NodeId { ...@@ -4639,8 +4639,8 @@ pub fn stmt_node_id(s: &ast::Stmt) -> ast::NodeId {
pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field]) pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
-> uint { -> uint {
let mut i = 0u; let mut i = 0;
for f in fields.iter() { if f.name == name { return i; } i += 1u; } for f in fields.iter() { if f.name == name { return i; } i += 1; }
tcx.sess.bug(&format!( tcx.sess.bug(&format!(
"no field named `{}` found in the list of fields `{:?}`", "no field named `{}` found in the list of fields `{:?}`",
token::get_name(name), token::get_name(name),
......
...@@ -260,7 +260,7 @@ fn split_msg_into_multilines(msg: &str) -> Option<String> { ...@@ -260,7 +260,7 @@ fn split_msg_into_multilines(msg: &str) -> Option<String> {
}).map(|(a, b)| (a - 1, b)); }).map(|(a, b)| (a - 1, b));
let mut new_msg = String::new(); let mut new_msg = String::new();
let mut head = 0u; let mut head = 0;
// Insert `\n` before expected and found. // Insert `\n` before expected and found.
for (pos1, pos2) in first.zip(second) { for (pos1, pos2) in first.zip(second) {
......
...@@ -531,8 +531,8 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, ...@@ -531,8 +531,8 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>,
pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String { pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String {
let mut s = typ.repr(cx).to_string(); let mut s = typ.repr(cx).to_string();
if s.len() >= 32u { if s.len() >= 32 {
s = (&s[0u..32u]).to_string(); s = (&s[0u..32]).to_string();
} }
return s; return s;
} }
......
...@@ -256,7 +256,7 @@ pub fn sanitize(s: &str) -> String { ...@@ -256,7 +256,7 @@ pub fn sanitize(s: &str) -> String {
} }
// Underscore-qualify anything that didn't start as an ident. // Underscore-qualify anything that didn't start as an ident.
if result.len() > 0u && if result.len() > 0 &&
result.as_bytes()[0] != '_' as u8 && result.as_bytes()[0] != '_' as u8 &&
! (result.as_bytes()[0] as char).is_xid_start() { ! (result.as_bytes()[0] as char).is_xid_start() {
return format!("_{}", &result[]); return format!("_{}", &result[]);
......
...@@ -62,7 +62,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, ...@@ -62,7 +62,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
let file = path.filename_str().unwrap(); let file = path.filename_str().unwrap();
let file = &file[3..file.len() - 5]; // chop off lib/.rlib let file = &file[3..file.len() - 5]; // chop off lib/.rlib
debug!("reading {}", file); debug!("reading {}", file);
for i in iter::count(0u, 1) { for i in iter::count(0us, 1) {
let bc_encoded = time(sess.time_passes(), let bc_encoded = time(sess.time_passes(),
format!("check for {}.{}.bytecode.deflate", name, i).as_slice(), format!("check for {}.{}.bytecode.deflate", name, i).as_slice(),
(), (),
......
...@@ -272,7 +272,7 @@ pub fn variable_str(&mut self, ...@@ -272,7 +272,7 @@ pub fn variable_str(&mut self,
self.check_and_record(Variable, self.check_and_record(Variable,
span, span,
sub_span, sub_span,
svec!(id, name, qualname, value, typ, 0u)); svec!(id, name, qualname, value, typ, 0));
} }
// formal parameters // formal parameters
...@@ -289,7 +289,7 @@ pub fn formal_str(&mut self, ...@@ -289,7 +289,7 @@ pub fn formal_str(&mut self,
self.check_and_record(Variable, self.check_and_record(Variable,
span, span,
sub_span, sub_span,
svec!(id, name, qualname, "", typ, 0u)); svec!(id, name, qualname, "", typ, 0));
} }
// value is the initialising expression of the static if it is not mut, otherwise "". // value is the initialising expression of the static if it is not mut, otherwise "".
...@@ -520,7 +520,7 @@ pub fn inherit_str(&mut self, ...@@ -520,7 +520,7 @@ pub fn inherit_str(&mut self,
svec!(base_id.node, svec!(base_id.node,
base_id.krate, base_id.krate,
deriv_id, deriv_id,
0u)); 0));
} }
pub fn fn_call_str(&mut self, pub fn fn_call_str(&mut self,
...@@ -562,7 +562,7 @@ pub fn sub_mod_ref_str(&mut self, ...@@ -562,7 +562,7 @@ pub fn sub_mod_ref_str(&mut self,
self.record_with_span(ModRef, self.record_with_span(ModRef,
span, span,
sub_span, sub_span,
svec!(0u, 0u, qualname, parent)); svec!(0, 0, qualname, parent));
} }
pub fn typedef_str(&mut self, pub fn typedef_str(&mut self,
...@@ -603,7 +603,7 @@ pub fn sub_type_ref_str(&mut self, ...@@ -603,7 +603,7 @@ pub fn sub_type_ref_str(&mut self,
self.record_with_span(TypeRef, self.record_with_span(TypeRef,
span, span,
sub_span, sub_span,
svec!(0u, 0u, qualname, 0u)); svec!(0, 0, qualname, 0));
} }
// A slightly generic function for a reference to an item of any kind. // A slightly generic function for a reference to an item of any kind.
......
...@@ -94,7 +94,7 @@ pub fn span_for_last_ident(&self, span: Span) -> Option<Span> { ...@@ -94,7 +94,7 @@ pub fn span_for_last_ident(&self, span: Span) -> Option<Span> {
let mut result = None; let mut result = None;
let mut toks = self.retokenise_span(span); let mut toks = self.retokenise_span(span);
let mut bracket_count = 0u; let mut bracket_count = 0;
loop { loop {
let ts = toks.real_token(); let ts = toks.real_token();
if ts.tok == token::Eof { if ts.tok == token::Eof {
...@@ -117,7 +117,7 @@ pub fn span_for_last_ident(&self, span: Span) -> Option<Span> { ...@@ -117,7 +117,7 @@ pub fn span_for_last_ident(&self, span: Span) -> Option<Span> {
// Return the span for the first identifier in the path. // Return the span for the first identifier in the path.
pub fn span_for_first_ident(&self, span: Span) -> Option<Span> { pub fn span_for_first_ident(&self, span: Span) -> Option<Span> {
let mut toks = self.retokenise_span(span); let mut toks = self.retokenise_span(span);
let mut bracket_count = 0u; let mut bracket_count = 0;
loop { loop {
let ts = toks.real_token(); let ts = toks.real_token();
if ts.tok == token::Eof { if ts.tok == token::Eof {
...@@ -143,7 +143,7 @@ pub fn sub_span_for_meth_name(&self, span: Span) -> Option<Span> { ...@@ -143,7 +143,7 @@ pub fn sub_span_for_meth_name(&self, span: Span) -> Option<Span> {
let mut toks = self.retokenise_span(span); let mut toks = self.retokenise_span(span);
let mut prev = toks.real_token(); let mut prev = toks.real_token();
let mut result = None; let mut result = None;
let mut bracket_count = 0u; let mut bracket_count = 0;
let mut last_span = None; let mut last_span = None;
while prev.tok != token::Eof { while prev.tok != token::Eof {
last_span = None; last_span = None;
...@@ -191,7 +191,7 @@ pub fn sub_span_for_type_name(&self, span: Span) -> Option<Span> { ...@@ -191,7 +191,7 @@ pub fn sub_span_for_type_name(&self, span: Span) -> Option<Span> {
let mut toks = self.retokenise_span(span); let mut toks = self.retokenise_span(span);
let mut prev = toks.real_token(); let mut prev = toks.real_token();
let mut result = None; let mut result = None;
let mut bracket_count = 0u; let mut bracket_count = 0;
loop { loop {
let next = toks.real_token(); let next = toks.real_token();
......
...@@ -32,7 +32,7 @@ ...@@ -32,7 +32,7 @@
//! match foo { //! match foo {
//! A => ..., //! A => ...,
//! B(x) => ..., //! B(x) => ...,
//! C(1u, 2) => ..., //! C(1, 2) => ...,
//! C(_) => ... //! C(_) => ...
//! } //! }
//! //!
...@@ -41,7 +41,7 @@ ...@@ -41,7 +41,7 @@
//! various options and then compile the code for the case where `foo` is an //! various options and then compile the code for the case where `foo` is an
//! `A`, a `B`, and a `C`. When we generate the code for `C`, we would (1) //! `A`, a `B`, and a `C`. When we generate the code for `C`, we would (1)
//! drop the two matches that do not match a `C` and (2) expand the other two //! drop the two matches that do not match a `C` and (2) expand the other two
//! into two patterns each. In the first case, the two patterns would be `1u` //! into two patterns each. In the first case, the two patterns would be `1`
//! and `2`, and the in the second case the _ pattern would be expanded into //! and `2`, and the in the second case the _ pattern would be expanded into
//! `_` and `_`. The two values are of course the arguments to `C`. //! `_` and `_`. The two values are of course the arguments to `C`.
//! //!
...@@ -638,8 +638,8 @@ fn bind_subslice_pat(bcx: Block, ...@@ -638,8 +638,8 @@ fn bind_subslice_pat(bcx: Block,
ty::mt {ty: vt.unit_ty, mutbl: ast::MutImmutable}); ty::mt {ty: vt.unit_ty, mutbl: ast::MutImmutable});
let scratch = rvalue_scratch_datum(bcx, slice_ty, ""); let scratch = rvalue_scratch_datum(bcx, slice_ty, "");
Store(bcx, slice_begin, Store(bcx, slice_begin,
GEPi(bcx, scratch.val, &[0u, abi::FAT_PTR_ADDR])); GEPi(bcx, scratch.val, &[0, abi::FAT_PTR_ADDR]));
Store(bcx, slice_len, GEPi(bcx, scratch.val, &[0u, abi::FAT_PTR_EXTRA])); Store(bcx, slice_len, GEPi(bcx, scratch.val, &[0, abi::FAT_PTR_EXTRA]));
scratch.val scratch.val
} }
...@@ -742,8 +742,8 @@ fn pick_column_to_specialize(def_map: &DefMap, m: &[Match]) -> Option<uint> { ...@@ -742,8 +742,8 @@ fn pick_column_to_specialize(def_map: &DefMap, m: &[Match]) -> Option<uint> {
fn pat_score(def_map: &DefMap, pat: &ast::Pat) -> uint { fn pat_score(def_map: &DefMap, pat: &ast::Pat) -> uint {
match pat.node { match pat.node {
ast::PatIdent(_, _, Some(ref inner)) => pat_score(def_map, &**inner), ast::PatIdent(_, _, Some(ref inner)) => pat_score(def_map, &**inner),
_ if pat_is_refutable(def_map, pat) => 1u, _ if pat_is_refutable(def_map, pat) => 1,
_ => 0u _ => 0
} }
} }
...@@ -922,7 +922,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ...@@ -922,7 +922,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let _indenter = indenter(); let _indenter = indenter();
let _icx = push_ctxt("match::compile_submatch"); let _icx = push_ctxt("match::compile_submatch");
let mut bcx = bcx; let mut bcx = bcx;
if m.len() == 0u { if m.len() == 0 {
if chk.is_fallible() { if chk.is_fallible() {
chk.handle_fail(bcx); chk.handle_fail(bcx);
} }
...@@ -982,8 +982,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, ...@@ -982,8 +982,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let tcx = bcx.tcx(); let tcx = bcx.tcx();
let dm = &tcx.def_map; let dm = &tcx.def_map;
let mut vals_left = vals[0u..col].to_vec(); let mut vals_left = vals[0..col].to_vec();
vals_left.push_all(&vals[col + 1u..]); vals_left.push_all(&vals[col + 1..]);
let ccx = bcx.fcx.ccx; let ccx = bcx.fcx.ccx;
// Find a real id (we're adding placeholder wildcard patterns, but // Find a real id (we're adding placeholder wildcard patterns, but
...@@ -1042,7 +1042,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, ...@@ -1042,7 +1042,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let mut kind = NoBranch; let mut kind = NoBranch;
let mut test_val = val; let mut test_val = val;
debug!("test_val={}", bcx.val_to_string(test_val)); debug!("test_val={}", bcx.val_to_string(test_val));
if opts.len() > 0u { if opts.len() > 0 {
match opts[0] { match opts[0] {
ConstantValue(_) | ConstantRange(_, _) => { ConstantValue(_) | ConstantRange(_, _) => {
test_val = load_if_immediate(bcx, val, left_ty); test_val = load_if_immediate(bcx, val, left_ty);
...@@ -1082,7 +1082,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, ...@@ -1082,7 +1082,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
}; };
let defaults = enter_default(else_cx, dm, m, col, val); let defaults = enter_default(else_cx, dm, m, col, val);
let exhaustive = chk.is_infallible() && defaults.len() == 0u; let exhaustive = chk.is_infallible() && defaults.len() == 0;
let len = opts.len(); let len = opts.len();
// Compile subtrees for each option // Compile subtrees for each option
...@@ -1157,7 +1157,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, ...@@ -1157,7 +1157,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
Br(bcx, else_cx.llbb, DebugLoc::None); Br(bcx, else_cx.llbb, DebugLoc::None);
} }
let mut size = 0u; let mut size = 0;
let mut unpacked = Vec::new(); let mut unpacked = Vec::new();
match *opt { match *opt {
Variant(disr_val, ref repr, _) => { Variant(disr_val, ref repr, _) => {
......
...@@ -401,7 +401,7 @@ pub fn get_tydesc<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ...@@ -401,7 +401,7 @@ pub fn get_tydesc<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
_ => { } _ => { }
} }
ccx.stats().n_static_tydescs.set(ccx.stats().n_static_tydescs.get() + 1u); ccx.stats().n_static_tydescs.set(ccx.stats().n_static_tydescs.get() + 1);
let inf = Rc::new(glue::declare_tydesc(ccx, t)); let inf = Rc::new(glue::declare_tydesc(ccx, t));
ccx.tydescs().borrow_mut().insert(t, inf.clone()); ccx.tydescs().borrow_mut().insert(t, inf.clone());
...@@ -2879,7 +2879,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { ...@@ -2879,7 +2879,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
panic!("struct variant kind unexpected in get_item_val") panic!("struct variant kind unexpected in get_item_val")
} }
}; };
assert!(args.len() != 0u); assert!(args.len() != 0);
let ty = ty::node_id_to_type(ccx.tcx(), id); let ty = ty::node_id_to_type(ccx.tcx(), id);
let parent = ccx.tcx().map.get_parent(id); let parent = ccx.tcx().map.get_parent(id);
let enm = ccx.tcx().map.expect_item(parent); let enm = ccx.tcx().map.expect_item(parent);
......
...@@ -61,22 +61,22 @@ pub fn count_insn(&self, category: &str) { ...@@ -61,22 +61,22 @@ pub fn count_insn(&self, category: &str) {
// Pass 1: scan table mapping str -> rightmost pos. // Pass 1: scan table mapping str -> rightmost pos.
let mut mm = FnvHashMap(); let mut mm = FnvHashMap();
let len = v.len(); let len = v.len();
let mut i = 0u; let mut i = 0;
while i < len { while i < len {
mm.insert(v[i], i); mm.insert(v[i], i);
i += 1u; i += 1;
} }
// Pass 2: concat strings for each elt, skipping // Pass 2: concat strings for each elt, skipping
// forwards over any cycles by advancing to rightmost // forwards over any cycles by advancing to rightmost
// occurrence of each element in path. // occurrence of each element in path.
let mut s = String::from_str("."); let mut s = String::from_str(".");
i = 0u; i = 0;
while i < len { while i < len {
i = mm[v[i]]; i = mm[v[i]];
s.push('/'); s.push('/');
s.push_str(v[i]); s.push_str(v[i]);
i += 1u; i += 1;
} }
s.push('/'); s.push('/');
...@@ -84,9 +84,9 @@ pub fn count_insn(&self, category: &str) { ...@@ -84,9 +84,9 @@ pub fn count_insn(&self, category: &str) {
let n = match h.get(&s) { let n = match h.get(&s) {
Some(&n) => n, Some(&n) => n,
_ => 0u _ => 0
}; };
h.insert(s, n+1u); h.insert(s, n+1);
}) })
} }
} }
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
use std::cmp; use std::cmp;
fn align_up_to(off: uint, a: uint) -> uint { fn align_up_to(off: uint, a: uint) -> uint {
return (off + a - 1u) / a * a; return (off + a - 1) / a * a;
} }
fn align(off: uint, ty: Type) -> uint { fn align(off: uint, ty: Type) -> uint {
......
...@@ -26,7 +26,7 @@ pub enum Flavor { ...@@ -26,7 +26,7 @@ pub enum Flavor {
type TyAlignFn = fn(ty: Type) -> uint; type TyAlignFn = fn(ty: Type) -> uint;
fn align_up_to(off: uint, a: uint) -> uint { fn align_up_to(off: uint, a: uint) -> uint {
return (off + a - 1u) / a * a; return (off + a - 1) / a * a;
} }
fn align(off: uint, ty: Type, align_fn: TyAlignFn) -> uint { fn align(off: uint, ty: Type, align_fn: TyAlignFn) -> uint {
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
use trans::type_::Type; use trans::type_::Type;
fn align_up_to(off: uint, a: uint) -> uint { fn align_up_to(off: uint, a: uint) -> uint {
return (off + a - 1u) / a * a; return (off + a - 1) / a * a;
} }
fn align(off: uint, ty: Type) -> uint { fn align(off: uint, ty: Type) -> uint {
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
use std::cmp; use std::cmp;
fn align_up_to(off: uint, a: uint) -> uint { fn align_up_to(off: uint, a: uint) -> uint {
return (off + a - 1u) / a * a; return (off + a - 1) / a * a;
} }
fn align(off: uint, ty: Type) -> uint { fn align(off: uint, ty: Type) -> uint {
......
...@@ -88,7 +88,7 @@ fn is_ret_bysret(&self) -> bool { ...@@ -88,7 +88,7 @@ fn is_ret_bysret(&self) -> bool {
fn classify_ty(ty: Type) -> Vec<RegClass> { fn classify_ty(ty: Type) -> Vec<RegClass> {
fn align(off: uint, ty: Type) -> uint { fn align(off: uint, ty: Type) -> uint {
let a = ty_align(ty); let a = ty_align(ty);
return (off + a - 1u) / a * a; return (off + a - 1) / a * a;
} }
fn ty_align(ty: Type) -> uint { fn ty_align(ty: Type) -> uint {
...@@ -211,12 +211,12 @@ fn classify(ty: Type, ...@@ -211,12 +211,12 @@ fn classify(ty: Type,
let t_size = ty_size(ty); let t_size = ty_size(ty);
let misalign = off % t_align; let misalign = off % t_align;
if misalign != 0u { if misalign != 0 {
let mut i = off / 8u; let mut i = off / 8;
let e = (off + t_size + 7u) / 8u; let e = (off + t_size + 7) / 8;
while i < e { while i < e {
unify(cls, ix + i, Memory); unify(cls, ix + i, Memory);
i += 1u; i += 1;
} }
return; return;
} }
...@@ -224,17 +224,17 @@ fn classify(ty: Type, ...@@ -224,17 +224,17 @@ fn classify(ty: Type,
match ty.kind() { match ty.kind() {
Integer | Integer |
Pointer => { Pointer => {
unify(cls, ix + off / 8u, Int); unify(cls, ix + off / 8, Int);
} }
Float => { Float => {
if off % 8u == 4u { if off % 8 == 4 {
unify(cls, ix + off / 8u, SSEFv); unify(cls, ix + off / 8, SSEFv);
} else { } else {
unify(cls, ix + off / 8u, SSEFs); unify(cls, ix + off / 8, SSEFs);
} }
} }
Double => { Double => {
unify(cls, ix + off / 8u, SSEDs); unify(cls, ix + off / 8, SSEDs);
} }
Struct => { Struct => {
classify_struct(ty.field_types().as_slice(), cls, ix, off, ty.is_packed()); classify_struct(ty.field_types().as_slice(), cls, ix, off, ty.is_packed());
...@@ -243,10 +243,10 @@ fn classify(ty: Type, ...@@ -243,10 +243,10 @@ fn classify(ty: Type,
let len = ty.array_length(); let len = ty.array_length();
let elt = ty.element_type(); let elt = ty.element_type();
let eltsz = ty_size(elt); let eltsz = ty_size(elt);
let mut i = 0u; let mut i = 0;
while i < len { while i < len {
classify(elt, cls, ix, off + i * eltsz); classify(elt, cls, ix, off + i * eltsz);
i += 1u; i += 1;
} }
} }
Vector => { Vector => {
...@@ -260,14 +260,14 @@ fn classify(ty: Type, ...@@ -260,14 +260,14 @@ fn classify(ty: Type,
_ => panic!("classify: unhandled vector element type") _ => panic!("classify: unhandled vector element type")
}; };
let mut i = 0u; let mut i = 0;
while i < len { while i < len {
unify(cls, ix + (off + i * eltsz) / 8, reg); unify(cls, ix + (off + i * eltsz) / 8, reg);
// everything after the first one is the upper // everything after the first one is the upper
// half of a register. // half of a register.
reg = SSEUp; reg = SSEUp;
i += 1u; i += 1;
} }
} }
_ => panic!("classify: unhandled type") _ => panic!("classify: unhandled type")
...@@ -275,18 +275,18 @@ fn classify(ty: Type, ...@@ -275,18 +275,18 @@ fn classify(ty: Type,
} }
fn fixup(ty: Type, cls: &mut [RegClass]) { fn fixup(ty: Type, cls: &mut [RegClass]) {
let mut i = 0u; let mut i = 0;
let ty_kind = ty.kind(); let ty_kind = ty.kind();
let e = cls.len(); let e = cls.len();
if cls.len() > 2u && (ty_kind == Struct || ty_kind == Array || ty_kind == Vector) { if cls.len() > 2 && (ty_kind == Struct || ty_kind == Array || ty_kind == Vector) {
if cls[i].is_sse() { if cls[i].is_sse() {
i += 1u; i += 1;
while i < e { while i < e {
if cls[i] != SSEUp { if cls[i] != SSEUp {
all_mem(cls); all_mem(cls);
return; return;
} }
i += 1u; i += 1;
} }
} else { } else {
all_mem(cls); all_mem(cls);
...@@ -308,10 +308,10 @@ fn fixup(ty: Type, cls: &mut [RegClass]) { ...@@ -308,10 +308,10 @@ fn fixup(ty: Type, cls: &mut [RegClass]) {
cls[i] = SSEDv; cls[i] = SSEDv;
} else if cls[i].is_sse() { } else if cls[i].is_sse() {
i += 1; i += 1;
while i != e && cls[i] == SSEUp { i += 1u; } while i != e && cls[i] == SSEUp { i += 1; }
} else if cls[i] == X87 { } else if cls[i] == X87 {
i += 1; i += 1;
while i != e && cls[i] == X87Up { i += 1u; } while i != e && cls[i] == X87Up { i += 1; }
} else { } else {
i += 1; i += 1;
} }
...@@ -332,18 +332,18 @@ fn fixup(ty: Type, cls: &mut [RegClass]) { ...@@ -332,18 +332,18 @@ fn fixup(ty: Type, cls: &mut [RegClass]) {
fn llreg_ty(ccx: &CrateContext, cls: &[RegClass]) -> Type { fn llreg_ty(ccx: &CrateContext, cls: &[RegClass]) -> Type {
fn llvec_len(cls: &[RegClass]) -> uint { fn llvec_len(cls: &[RegClass]) -> uint {
let mut len = 1u; let mut len = 1;
for c in cls.iter() { for c in cls.iter() {
if *c != SSEUp { if *c != SSEUp {
break; break;
} }
len += 1u; len += 1;
} }
return len; return len;
} }
let mut tys = Vec::new(); let mut tys = Vec::new();
let mut i = 0u; let mut i = 0;
let e = cls.len(); let e = cls.len();
while i < e { while i < e {
match cls[i] { match cls[i] {
...@@ -361,7 +361,7 @@ fn llvec_len(cls: &[RegClass]) -> uint { ...@@ -361,7 +361,7 @@ fn llvec_len(cls: &[RegClass]) -> uint {
} }
_ => unreachable!(), _ => unreachable!(),
}; };
let vec_len = llvec_len(&cls[i + 1u..]); let vec_len = llvec_len(&cls[i + 1..]);
let vec_ty = Type::vector(&elt_ty, vec_len as u64 * elts_per_word); let vec_ty = Type::vector(&elt_ty, vec_len as u64 * elts_per_word);
tys.push(vec_ty); tys.push(vec_ty);
i += vec_len; i += vec_len;
...@@ -375,7 +375,7 @@ fn llvec_len(cls: &[RegClass]) -> uint { ...@@ -375,7 +375,7 @@ fn llvec_len(cls: &[RegClass]) -> uint {
} }
_ => panic!("llregtype: unhandled class") _ => panic!("llregtype: unhandled class")
} }
i += 1u; i += 1;
} }
if tys.len() == 1 && tys[0].kind() == Vector { if tys.len() == 1 && tys[0].kind() == Vector {
// if the type contains only a vector, pass it as that vector. // if the type contains only a vector, pass it as that vector.
......
...@@ -182,7 +182,7 @@ fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ...@@ -182,7 +182,7 @@ fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
bcx.fcx.param_substs); bcx.fcx.param_substs);
// Nullary variants are not callable // Nullary variants are not callable
assert!(vinfo.args.len() > 0u); assert!(vinfo.args.len() > 0);
Callee { Callee {
bcx: bcx, bcx: bcx,
......
...@@ -747,7 +747,7 @@ fn get_or_create_landing_pad(&'blk self) -> BasicBlockRef { ...@@ -747,7 +747,7 @@ fn get_or_create_landing_pad(&'blk self) -> BasicBlockRef {
}; };
// The only landing pad clause will be 'cleanup' // The only landing pad clause will be 'cleanup'
let llretval = build::LandingPad(pad_bcx, llretty, llpersonality, 1u); let llretval = build::LandingPad(pad_bcx, llretty, llpersonality, 1);
// The landing pad block is a cleanup // The landing pad block is a cleanup
build::SetCleanup(pad_bcx, llretval); build::SetCleanup(pad_bcx, llretval);
......
...@@ -258,15 +258,15 @@ pub fn new(crate_name: &str, ...@@ -258,15 +258,15 @@ pub fn new(crate_name: &str,
symbol_hasher: RefCell::new(symbol_hasher), symbol_hasher: RefCell::new(symbol_hasher),
tcx: tcx, tcx: tcx,
stats: Stats { stats: Stats {
n_static_tydescs: Cell::new(0u), n_static_tydescs: Cell::new(0),
n_glues_created: Cell::new(0u), n_glues_created: Cell::new(0),
n_null_glues: Cell::new(0u), n_null_glues: Cell::new(0),
n_real_glues: Cell::new(0u), n_real_glues: Cell::new(0),
n_fns: Cell::new(0u), n_fns: Cell::new(0),
n_monos: Cell::new(0u), n_monos: Cell::new(0),
n_inlines: Cell::new(0u), n_inlines: Cell::new(0),
n_closures: Cell::new(0u), n_closures: Cell::new(0),
n_llvm_insns: Cell::new(0u), n_llvm_insns: Cell::new(0),
llvm_insns: RefCell::new(FnvHashMap()), llvm_insns: RefCell::new(FnvHashMap()),
fn_stats: RefCell::new(Vec::new()), fn_stats: RefCell::new(Vec::new()),
}, },
...@@ -418,7 +418,7 @@ fn new(shared: &SharedCrateContext<'tcx>, ...@@ -418,7 +418,7 @@ fn new(shared: &SharedCrateContext<'tcx>,
dbg_cx: dbg_cx, dbg_cx: dbg_cx,
eh_personality: RefCell::new(None), eh_personality: RefCell::new(None),
intrinsics: RefCell::new(FnvHashMap()), intrinsics: RefCell::new(FnvHashMap()),
n_llvm_insns: Cell::new(0u), n_llvm_insns: Cell::new(0),
trait_cache: RefCell::new(FnvHashMap()), trait_cache: RefCell::new(FnvHashMap()),
}; };
......
...@@ -1730,7 +1730,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile { ...@@ -1730,7 +1730,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile {
let work_dir = cx.sess().working_dir.as_str().unwrap(); let work_dir = cx.sess().working_dir.as_str().unwrap();
let file_name = let file_name =
if full_path.starts_with(work_dir) { if full_path.starts_with(work_dir) {
&full_path[work_dir.len() + 1u..full_path.len()] &full_path[work_dir.len() + 1..full_path.len()]
} else { } else {
full_path full_path
}; };
...@@ -2268,7 +2268,7 @@ fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>) ...@@ -2268,7 +2268,7 @@ fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>)
let null_variant_index = (1 - non_null_variant_index) as uint; let null_variant_index = (1 - non_null_variant_index) as uint;
let null_variant_name = token::get_name((*self.variants)[null_variant_index].name); let null_variant_name = token::get_name((*self.variants)[null_variant_index].name);
let union_member_name = format!("RUST$ENCODED$ENUM${}${}", let union_member_name = format!("RUST$ENCODED$ENUM${}${}",
0u, 0,
null_variant_name); null_variant_name);
// Finally create the (singleton) list of descriptions of union // Finally create the (singleton) list of descriptions of union
...@@ -3855,7 +3855,7 @@ fn push_item_name(cx: &CrateContext, ...@@ -3855,7 +3855,7 @@ fn push_item_name(cx: &CrateContext,
output.push_str("::"); output.push_str("::");
} }
let mut path_element_count = 0u; let mut path_element_count = 0;
for path_element in path { for path_element in path {
let name = token::get_name(path_element.name()); let name = token::get_name(path_element.name());
output.push_str(name.get()); output.push_str(name.get());
......
...@@ -153,11 +153,11 @@ pub fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ...@@ -153,11 +153,11 @@ pub fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
} }
pub fn get_len(bcx: Block, fat_ptr: ValueRef) -> ValueRef { pub fn get_len(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
GEPi(bcx, fat_ptr, &[0u, abi::FAT_PTR_EXTRA]) GEPi(bcx, fat_ptr, &[0, abi::FAT_PTR_EXTRA])
} }
pub fn get_dataptr(bcx: Block, fat_ptr: ValueRef) -> ValueRef { pub fn get_dataptr(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
GEPi(bcx, fat_ptr, &[0u, abi::FAT_PTR_ADDR]) GEPi(bcx, fat_ptr, &[0, abi::FAT_PTR_ADDR])
} }
/// Helper for trans that apply adjustments from `expr` to `datum`, which should be the unadjusted /// Helper for trans that apply adjustments from `expr` to `datum`, which should be the unadjusted
...@@ -366,7 +366,7 @@ fn unsize_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ...@@ -366,7 +366,7 @@ fn unsize_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}, info), }, info),
ty::UnsizeLength(..) => ty::UnsizeLength(..) =>
into_fat_ptr(bcx, expr, datum, dest_ty, |bcx, val| { into_fat_ptr(bcx, expr, datum, dest_ty, |bcx, val| {
GEPi(bcx, val, &[0u, 0u]) GEPi(bcx, val, &[0, 0])
}, info), }, info),
ty::UnsizeVtable(..) => ty::UnsizeVtable(..) =>
into_fat_ptr(bcx, expr, datum, dest_ty, |_bcx, val| { into_fat_ptr(bcx, expr, datum, dest_ty, |_bcx, val| {
...@@ -1185,7 +1185,7 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ...@@ -1185,7 +1185,7 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
match def { match def {
def::DefVariant(tid, vid, _) => { def::DefVariant(tid, vid, _) => {
let variant_info = ty::enum_variant_with_id(bcx.tcx(), tid, vid); let variant_info = ty::enum_variant_with_id(bcx.tcx(), tid, vid);
if variant_info.args.len() > 0u { if variant_info.args.len() > 0 {
// N-ary variant. // N-ary variant.
let llfn = callee::trans_fn_ref(bcx.ccx(), vid, let llfn = callee::trans_fn_ref(bcx.ccx(), vid,
ExprId(ref_expr.id), ExprId(ref_expr.id),
......
...@@ -337,8 +337,8 @@ fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: ...@@ -337,8 +337,8 @@ fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info:
// info points to the vtable and the second entry in the vtable is the // info points to the vtable and the second entry in the vtable is the
// dynamic size of the object. // dynamic size of the object.
let info = PointerCast(bcx, info, Type::int(bcx.ccx()).ptr_to()); let info = PointerCast(bcx, info, Type::int(bcx.ccx()).ptr_to());
let size_ptr = GEPi(bcx, info, &[1u]); let size_ptr = GEPi(bcx, info, &[1]);
let align_ptr = GEPi(bcx, info, &[2u]); let align_ptr = GEPi(bcx, info, &[2]);
(Load(bcx, size_ptr), Load(bcx, align_ptr)) (Load(bcx, size_ptr), Load(bcx, align_ptr))
} }
ty::ty_vec(_, None) | ty::ty_str => { ty::ty_vec(_, None) | ty::ty_str => {
...@@ -551,7 +551,7 @@ fn make_generic_glue<'a, 'tcx, F>(ccx: &CrateContext<'a, 'tcx>, ...@@ -551,7 +551,7 @@ fn make_generic_glue<'a, 'tcx, F>(ccx: &CrateContext<'a, 'tcx>,
update_linkage(ccx, llfn, None, OriginalTranslation); update_linkage(ccx, llfn, None, OriginalTranslation);
ccx.stats().n_glues_created.set(ccx.stats().n_glues_created.get() + 1u); ccx.stats().n_glues_created.set(ccx.stats().n_glues_created.get() + 1);
// All glue functions take values passed *by alias*; this is a // All glue functions take values passed *by alias*; this is a
// requirement since in many contexts glue is invoked indirectly and // requirement since in many contexts glue is invoked indirectly and
// the caller has no idea if it's dealing with something that can be // the caller has no idea if it's dealing with something that can be
......
...@@ -78,7 +78,7 @@ pub fn trans_impl(ccx: &CrateContext, ...@@ -78,7 +78,7 @@ pub fn trans_impl(ccx: &CrateContext,
for impl_item in impl_items.iter() { for impl_item in impl_items.iter() {
match *impl_item { match *impl_item {
ast::MethodImplItem(ref method) => { ast::MethodImplItem(ref method) => {
if method.pe_generics().ty_params.len() == 0u { if method.pe_generics().ty_params.len() == 0 {
let trans_everywhere = attr::requests_inline(&method.attrs[]); let trans_everywhere = attr::requests_inline(&method.attrs[]);
for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) { for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) {
let llfn = get_item_val(ccx, method.id); let llfn = get_item_val(ccx, method.id);
...@@ -488,7 +488,7 @@ pub fn trans_trait_callee_from_llval<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ...@@ -488,7 +488,7 @@ pub fn trans_trait_callee_from_llval<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
callee_ty.repr(ccx.tcx()), callee_ty.repr(ccx.tcx()),
vtable_index, vtable_index,
bcx.val_to_string(llpair)); bcx.val_to_string(llpair));
let llboxptr = GEPi(bcx, llpair, &[0u, abi::FAT_PTR_ADDR]); let llboxptr = GEPi(bcx, llpair, &[0, abi::FAT_PTR_ADDR]);
let llbox = Load(bcx, llboxptr); let llbox = Load(bcx, llboxptr);
let llself = PointerCast(bcx, llbox, Type::i8p(ccx)); let llself = PointerCast(bcx, llbox, Type::i8p(ccx));
...@@ -510,9 +510,9 @@ pub fn trans_trait_callee_from_llval<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ...@@ -510,9 +510,9 @@ pub fn trans_trait_callee_from_llval<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let llvtable = Load(bcx, let llvtable = Load(bcx,
PointerCast(bcx, PointerCast(bcx,
GEPi(bcx, llpair, GEPi(bcx, llpair,
&[0u, abi::FAT_PTR_EXTRA]), &[0, abi::FAT_PTR_EXTRA]),
Type::vtable(ccx).ptr_to().ptr_to())); Type::vtable(ccx).ptr_to().ptr_to()));
let mptr = Load(bcx, GEPi(bcx, llvtable, &[0u, vtable_index + VTABLE_OFFSET])); let mptr = Load(bcx, GEPi(bcx, llvtable, &[0, vtable_index + VTABLE_OFFSET]));
let mptr = PointerCast(bcx, mptr, llcallee_ty.ptr_to()); let mptr = PointerCast(bcx, mptr, llcallee_ty.ptr_to());
return Callee { return Callee {
...@@ -877,13 +877,13 @@ pub fn trans_trait_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ...@@ -877,13 +877,13 @@ pub fn trans_trait_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let llbox_ty = type_of(bcx.ccx(), datum_ty); let llbox_ty = type_of(bcx.ccx(), datum_ty);
// Store the pointer into the first half of pair. // Store the pointer into the first half of pair.
let llboxdest = GEPi(bcx, lldest, &[0u, abi::FAT_PTR_ADDR]); let llboxdest = GEPi(bcx, lldest, &[0, abi::FAT_PTR_ADDR]);
let llboxdest = PointerCast(bcx, llboxdest, llbox_ty.ptr_to()); let llboxdest = PointerCast(bcx, llboxdest, llbox_ty.ptr_to());
bcx = datum.store_to(bcx, llboxdest); bcx = datum.store_to(bcx, llboxdest);
// Store the vtable into the second half of pair. // Store the vtable into the second half of pair.
let vtable = get_vtable(bcx, datum_ty, trait_ref); let vtable = get_vtable(bcx, datum_ty, trait_ref);
let llvtabledest = GEPi(bcx, lldest, &[0u, abi::FAT_PTR_EXTRA]); let llvtabledest = GEPi(bcx, lldest, &[0, abi::FAT_PTR_EXTRA]);
let llvtabledest = PointerCast(bcx, llvtabledest, val_ty(vtable).ptr_to()); let llvtabledest = PointerCast(bcx, llvtabledest, val_ty(vtable).ptr_to());
Store(bcx, vtable, llvtabledest); Store(bcx, vtable, llvtabledest);
......
...@@ -73,7 +73,7 @@ pub fn make_drop_glue_unboxed<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ...@@ -73,7 +73,7 @@ pub fn make_drop_glue_unboxed<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let unit_size = llsize_of_alloc(ccx, llty); let unit_size = llsize_of_alloc(ccx, llty);
if unit_size != 0 { if unit_size != 0 {
let len = get_len(bcx, vptr); let len = get_len(bcx, vptr);
let not_empty = ICmp(bcx, llvm::IntNE, len, C_uint(ccx, 0u)); let not_empty = ICmp(bcx, llvm::IntNE, len, C_uint(ccx, 0us));
with_cond(bcx, not_empty, |bcx| { with_cond(bcx, not_empty, |bcx| {
let llalign = C_uint(ccx, machine::llalign_of_min(ccx, llty)); let llalign = C_uint(ccx, machine::llalign_of_min(ccx, llty));
let size = Mul(bcx, C_uint(ccx, unit_size), len, DebugLoc::None); let size = Mul(bcx, C_uint(ccx, unit_size), len, DebugLoc::None);
...@@ -213,8 +213,8 @@ pub fn trans_lit_str<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ...@@ -213,8 +213,8 @@ pub fn trans_lit_str<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let llbytes = C_uint(bcx.ccx(), bytes); let llbytes = C_uint(bcx.ccx(), bytes);
let llcstr = C_cstr(bcx.ccx(), str_lit, false); let llcstr = C_cstr(bcx.ccx(), str_lit, false);
let llcstr = consts::ptrcast(llcstr, Type::i8p(bcx.ccx())); let llcstr = consts::ptrcast(llcstr, Type::i8p(bcx.ccx()));
Store(bcx, llcstr, GEPi(bcx, lldest, &[0u, abi::FAT_PTR_ADDR])); Store(bcx, llcstr, GEPi(bcx, lldest, &[0, abi::FAT_PTR_ADDR]));
Store(bcx, llbytes, GEPi(bcx, lldest, &[0u, abi::FAT_PTR_EXTRA])); Store(bcx, llbytes, GEPi(bcx, lldest, &[0, abi::FAT_PTR_EXTRA]));
bcx bcx
} }
} }
...@@ -375,8 +375,8 @@ pub fn get_fixed_base_and_len(bcx: Block, ...@@ -375,8 +375,8 @@ pub fn get_fixed_base_and_len(bcx: Block,
fn get_slice_base_and_len(bcx: Block, fn get_slice_base_and_len(bcx: Block,
llval: ValueRef) llval: ValueRef)
-> (ValueRef, ValueRef) { -> (ValueRef, ValueRef) {
let base = Load(bcx, GEPi(bcx, llval, &[0u, abi::FAT_PTR_ADDR])); let base = Load(bcx, GEPi(bcx, llval, &[0, abi::FAT_PTR_ADDR]));
let len = Load(bcx, GEPi(bcx, llval, &[0u, abi::FAT_PTR_EXTRA])); let len = Load(bcx, GEPi(bcx, llval, &[0, abi::FAT_PTR_EXTRA]));
(base, len) (base, len)
} }
...@@ -400,7 +400,7 @@ pub fn get_base_and_len(bcx: Block, ...@@ -400,7 +400,7 @@ pub fn get_base_and_len(bcx: Block,
ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) => match ty.sty { ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) => match ty.sty {
ty::ty_vec(_, None) | ty::ty_str => get_slice_base_and_len(bcx, llval), ty::ty_vec(_, None) | ty::ty_str => get_slice_base_and_len(bcx, llval),
ty::ty_vec(_, Some(n)) => { ty::ty_vec(_, Some(n)) => {
let base = GEPi(bcx, Load(bcx, llval), &[0u, 0u]); let base = GEPi(bcx, Load(bcx, llval), &[0, 0]);
(base, C_uint(ccx, n)) (base, C_uint(ccx, n))
} }
_ => ccx.sess().bug("unexpected type in get_base_and_len"), _ => ccx.sess().bug("unexpected type in get_base_and_len"),
...@@ -430,7 +430,7 @@ pub fn iter_vec_loop<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, ...@@ -430,7 +430,7 @@ pub fn iter_vec_loop<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
let loop_counter = { let loop_counter = {
// i = 0 // i = 0
let i = alloca(loop_bcx, bcx.ccx().int_type(), "__i"); let i = alloca(loop_bcx, bcx.ccx().int_type(), "__i");
Store(loop_bcx, C_uint(bcx.ccx(), 0u), i); Store(loop_bcx, C_uint(bcx.ccx(), 0us), i);
Br(loop_bcx, cond_bcx.llbb, DebugLoc::None); Br(loop_bcx, cond_bcx.llbb, DebugLoc::None);
i i
...@@ -458,7 +458,7 @@ pub fn iter_vec_loop<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, ...@@ -458,7 +458,7 @@ pub fn iter_vec_loop<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
{ // i += 1 { // i += 1
let i = Load(inc_bcx, loop_counter); let i = Load(inc_bcx, loop_counter);
let plusone = Add(inc_bcx, i, C_uint(bcx.ccx(), 1u), DebugLoc::None); let plusone = Add(inc_bcx, i, C_uint(bcx.ccx(), 1us), DebugLoc::None);
Store(inc_bcx, plusone, loop_counter); Store(inc_bcx, plusone, loop_counter);
Br(inc_bcx, cond_bcx.llbb, DebugLoc::None); Br(inc_bcx, cond_bcx.llbb, DebugLoc::None);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册