提交 3d6f5100 编写于 作者: B bors

Auto merge of #21730 - Manishearth:rollup, r=alexcrichton

Should clear our backlog of rollups from the queue
......@@ -516,7 +516,7 @@ Olivier Saut <osaut@airpost.net>
Olle Jonsson <olle.jonsson@gmail.com>
Or Brostovski <tohava@gmail.com>
Oren Hazi <oren.hazi@gmail.com>
Orphée Lafond-Lummis <o@orftz.com>
Orpheus Lummis <o@orpheuslummis.com>
P1start <rewi-github@whanau.org>
Pablo Brasero <pablo@pablobm.com>
Palmer Cox <p@lmercox.com>
......
......@@ -50,11 +50,11 @@ example, if it's 2014, and you change a Rust file that was created in
# Coordination and communication
Get feedback from other developers on
[discuss.rust-lang.org][discuss], and
[internals.rust-lang.org][internals], and
[#rust-internals][pound-rust-internals].
[pound-rust-internals]: http://chat.mibbit.com/?server=irc.mozilla.org&channel=%23rust-internals
[discuss]: http://discuss.rust-lang.org
[internals]: http://internals.rust-lang.org
For more details, please refer to
[Note-development-policy](https://github.com/rust-lang/rust/wiki/Note-development-policy).
......@@ -30,9 +30,9 @@ documentation.
To build from the [tarball] do:
$ curl -O https://static.rust-lang.org/dist/rust-nightly.tar.gz
$ tar -xzf rust-nightly.tar.gz
$ cd rust-nightly
$ curl -O https://static.rust-lang.org/dist/rustc-nightly-src.tar.gz
$ tar -xzf rustc-nightly-src.tar.gz
$ cd rustc-nightly
Or to build from the [repo] do:
......@@ -80,7 +80,7 @@ $ pacman -S base-devel
$ make && make install
[repo]: https://github.com/rust-lang/rust
[tarball]: https://static.rust-lang.org/dist/rust-nightly.tar.gz
[tarball]: https://static.rust-lang.org/dist/rustc-nightly-src.tar.gz
[trpl]: http://doc.rust-lang.org/book/index.html
## Notes
......@@ -112,11 +112,11 @@ The Rust community congregates in a few places:
* [StackOverflow] - Get help here.
* [/r/rust] - General discussion.
* [discuss.rust-lang.org] - For development of the Rust language itself.
* [internals.rust-lang.org] - For development of the Rust language itself.
[StackOverflow]: http://stackoverflow.com/questions/tagged/rust
[/r/rust]: http://reddit.com/r/rust
[discuss.rust-lang.org]: http://discuss.rust-lang.org/
[internals.rust-lang.org]: http://internals.rust-lang.org/
## License
......
......@@ -41,7 +41,7 @@ problem might reveal someone who has asked it before!
There is an active [subreddit](http://reddit.com/r/rust) with lots of
discussion about Rust.
There is also a [developer forum](http://discuss.rust-lang.org/), where the
There is also a [developer forum](http://internals.rust-lang.org/), where the
development of Rust itself is discussed.
# Specification
......
......@@ -21,19 +21,16 @@ case $OS in
("Linux"|"FreeBSD"|"DragonFly")
BIN_SUF=
LIB_SUF=.so
break
;;
("Darwin")
BIN_SUF=
LIB_SUF=.dylib
break
;;
(*)
BIN_SUF=.exe
LIB_SUF=.dll
LIB_DIR=bin
LIB_PREFIX=
break
;;
esac
......
......@@ -8,6 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[cfg(not(test))]
use core::ptr::PtrExt;
// FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias`
......
......@@ -594,12 +594,6 @@ fn deref_mut<'a>(&'a mut self) -> &'a mut T {
/// The `UnsafeCell<T>` type is the only legal way to obtain aliasable data that is considered
/// mutable. In general, transmuting an `&T` type into an `&mut T` is considered undefined behavior.
///
/// Although it is possible to put an `UnsafeCell<T>` into static item, it is not permitted to take
/// the address of the static item if the item is not declared as mutable. This rule exists because
/// immutable static items are stored in read-only memory, and thus any attempt to mutate their
/// interior can cause segfaults. Immutable static items containing `UnsafeCell<T>` instances are
/// still useful as read-only initializers, however, so we do not forbid them altogether.
///
/// Types like `Cell<T>` and `RefCell<T>` use this type to wrap their internal data.
///
/// `UnsafeCell<T>` doesn't opt-out from any marker traits, instead, types with an `UnsafeCell<T>`
......
......@@ -447,7 +447,7 @@ mod tests {
use super::*;
fn same(fmt: &'static str, p: &[Piece<'static>]) {
let mut parser = Parser::new(fmt);
let parser = Parser::new(fmt);
assert!(p == parser.collect::<Vec<Piece<'static>>>());
}
......
......@@ -273,6 +273,7 @@
#[cfg(windows)] pub use funcs::extra::kernel32::{GetOverlappedResult, ConnectNamedPipe};
#[cfg(windows)] pub use funcs::extra::kernel32::{DisconnectNamedPipe, OpenProcess};
#[cfg(windows)] pub use funcs::extra::kernel32::{MoveFileExW, VirtualProtect};
#[cfg(windows)] pub use funcs::extra::kernel32::{RemoveDirectoryW};
#[cfg(windows)] pub use funcs::extra::msvcrt::{get_osfhandle, open_osfhandle};
#[cfg(windows)] pub use funcs::extra::winsock::{ioctlsocket};
......
......@@ -10,10 +10,6 @@
// Searching for information from the cstore
#![allow(non_camel_case_types)]
pub use self::found_ast::*;
use metadata::common::*;
use metadata::cstore;
use metadata::decoder;
......@@ -101,10 +97,10 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec<ast_map::PathElem>
r
}
pub enum found_ast<'ast> {
found(&'ast ast::InlinedItem),
found_parent(ast::DefId, &'ast ast::InlinedItem),
not_found,
pub enum FoundAst<'ast> {
Found(&'ast ast::InlinedItem),
FoundParent(ast::DefId, &'ast ast::InlinedItem),
NotFound,
}
// Finds the AST for this item in the crate metadata, if any. If the item was
......@@ -112,7 +108,7 @@ pub enum found_ast<'ast> {
// will be returned.
pub fn maybe_get_item_ast<'tcx>(tcx: &ty::ctxt<'tcx>, def: ast::DefId,
decode_inlined_item: decoder::DecodeInlinedItem)
-> found_ast<'tcx> {
-> FoundAst<'tcx> {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::maybe_get_item_ast(&*cdata, tcx, def.node, decode_inlined_item)
......
......@@ -693,23 +693,23 @@ pub fn get_item_path(cdata: Cmd, id: ast::NodeId) -> Vec<ast_map::PathElem> {
pub fn maybe_get_item_ast<'tcx>(cdata: Cmd, tcx: &ty::ctxt<'tcx>, id: ast::NodeId,
mut decode_inlined_item: DecodeInlinedItem)
-> csearch::found_ast<'tcx> {
-> csearch::FoundAst<'tcx> {
debug!("Looking up item: {}", id);
let item_doc = lookup_item(id, cdata.data());
let path = item_path(item_doc).init().to_vec();
match decode_inlined_item(cdata, tcx, path, item_doc) {
Ok(ii) => csearch::found(ii),
Ok(ii) => csearch::FoundAst::Found(ii),
Err(path) => {
match item_parent_item(item_doc) {
Some(did) => {
let did = translate_def_id(cdata, did);
let parent_item = lookup_item(did.node, cdata.data());
match decode_inlined_item(cdata, tcx, path, parent_item) {
Ok(ii) => csearch::found_parent(did, ii),
Err(_) => csearch::not_found
Ok(ii) => csearch::FoundAst::FoundParent(did, ii),
Err(_) => csearch::FoundAst::NotFound
}
}
None => csearch::not_found
None => csearch::FoundAst::NotFound
}
}
}
......
......@@ -1577,7 +1577,7 @@ fn encode_info_for_items(ecx: &EncodeContext,
&krate.module,
&[],
ast::CRATE_NODE_ID,
ast_map::Values([].iter()).chain(None),
[].iter().cloned().chain(None),
syntax::parse::token::special_idents::invalid,
ast::Public);
......@@ -1949,7 +1949,7 @@ fn encode_misc_info(ecx: &EncodeContext,
}
// Encode reexports for the root module.
encode_reexports(ecx, rbml_w, 0, ast_map::Values([].iter()).chain(None));
encode_reexports(ecx, rbml_w, 0, [].iter().cloned().chain(None));
rbml_w.end_tag();
rbml_w.end_tag();
......
......@@ -130,7 +130,7 @@ pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata,
debug!("> Decoding inlined fn: {:?}::?",
{
// Do an Option dance to use the path after it is moved below.
let s = ast_map::path_to_string(ast_map::Values(path.iter()));
let s = ast_map::path_to_string(path.iter().cloned());
path_as_str = Some(s);
path_as_str.as_ref().map(|x| &x[])
});
......
......@@ -133,7 +133,7 @@ fn variant_expr<'a>(variants: &'a [P<ast::Variant>], id: ast::NodeId)
}
let expr_id = match csearch::maybe_get_item_ast(tcx, enum_def,
box |a, b, c, d| astencode::decode_inlined_item(a, b, c, d)) {
csearch::found(&ast::IIItem(ref item)) => match item.node {
csearch::FoundAst::Found(&ast::IIItem(ref item)) => match item.node {
ast::ItemEnum(ast::EnumDef { ref variants }, _) => {
// NOTE this doesn't do the right thing, it compares inlined
// NodeId's to the original variant_def's NodeId, but they
......@@ -173,7 +173,7 @@ pub fn lookup_const_by_id<'a>(tcx: &'a ty::ctxt, def_id: ast::DefId)
}
let expr_id = match csearch::maybe_get_item_ast(tcx, def_id,
box |a, b, c, d| astencode::decode_inlined_item(a, b, c, d)) {
csearch::found(&ast::IIItem(ref item)) => match item.node {
csearch::FoundAst::Found(&ast::IIItem(ref item)) => match item.node {
ast::ItemConst(_, ref const_expr) => Some(const_expr.id),
_ => None
},
......
......@@ -5264,7 +5264,7 @@ pub fn with_path<T, F>(cx: &ctxt, id: ast::DefId, f: F) -> T where
if id.krate == ast::LOCAL_CRATE {
cx.map.with_path(id.node, f)
} else {
f(ast_map::Values(csearch::get_item_path(cx, id).iter()).chain(None))
f(csearch::get_item_path(cx, id).iter().cloned().chain(None))
}
}
......
......@@ -151,7 +151,6 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
mod test {
use super::{RPathConfig};
use super::{minimize_rpaths, rpaths_to_flags, get_rpath_relative_to_output};
use syntax::abi;
#[test]
fn test_rpaths_to_flags() {
......
......@@ -37,7 +37,6 @@
use serialize::hex::ToHex;
use syntax::ast;
use syntax::ast_map::{PathElem, PathElems, PathName};
use syntax::ast_map;
use syntax::attr::AttrMetaMethods;
use syntax::codemap::Span;
use syntax::parse::token;
......@@ -339,7 +338,7 @@ pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tc
let path = [PathName(token::intern(&s[])),
gensym_name(name)];
let hash = get_symbol_hash(ccx, t);
mangle(ast_map::Values(path.iter()), Some(&hash[]))
mangle(path.iter().cloned(), Some(&hash[]))
}
pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String {
......
......@@ -1012,6 +1012,9 @@ unsafe fn configure_llvm(sess: &Session) {
if sess.time_llvm_passes() { add("-time-passes"); }
if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
// FIXME #21627 disable faulty FastISel on AArch64 (even for -O0)
if sess.target.target.arch.as_slice() == "aarch64" { add("-fast-isel=0"); }
for arg in sess.opts.cg.llvm_args.iter() {
add(&(*arg)[]);
}
......
......@@ -43,11 +43,11 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId)
box |a,b,c,d| astencode::decode_inlined_item(a, b, c, d));
let inline_def = match csearch_result {
csearch::not_found => {
csearch::FoundAst::NotFound => {
ccx.external().borrow_mut().insert(fn_id, None);
return None;
}
csearch::found(&ast::IIItem(ref item)) => {
csearch::FoundAst::Found(&ast::IIItem(ref item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, fn_id);
......@@ -90,12 +90,12 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId)
local_def(item.id)
}
csearch::found(&ast::IIForeign(ref item)) => {
csearch::FoundAst::Found(&ast::IIForeign(ref item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, fn_id);
local_def(item.id)
}
csearch::found_parent(parent_id, &ast::IIItem(ref item)) => {
csearch::FoundAst::FoundParent(parent_id, &ast::IIItem(ref item)) => {
ccx.external().borrow_mut().insert(parent_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, parent_id);
......@@ -124,11 +124,11 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId)
trans_item(ccx, &**item);
local_def(my_id)
}
csearch::found_parent(_, _) => {
ccx.sess().bug("maybe_get_item_ast returned a found_parent \
csearch::FoundAst::FoundParent(_, _) => {
ccx.sess().bug("maybe_get_item_ast returned a FoundParent \
with a non-item parent");
}
csearch::found(&ast::IITraitItem(_, ref trait_item)) => {
csearch::FoundAst::Found(&ast::IITraitItem(_, ref trait_item)) => {
match *trait_item {
ast::RequiredMethod(_) => ccx.sess().bug("found RequiredMethod IITraitItem"),
ast::ProvidedMethod(ref mth) => {
......@@ -147,7 +147,7 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId)
}
}
}
csearch::found(&ast::IIImplItem(impl_did, ref impl_item)) => {
csearch::FoundAst::Found(&ast::IIImplItem(impl_did, ref impl_item)) => {
match *impl_item {
ast::MethodImplItem(ref mth) => {
ccx.external().borrow_mut().insert(fn_id, Some(mth.id));
......
......@@ -97,7 +97,7 @@
//! };
//!
//! // Serialize using `json::encode`
//! let encoded = json::encode(&object);
//! let encoded = json::encode(&object).unwrap();
//!
//! // Deserialize using `json::decode`
//! let decoded: TestStruct = json::decode(encoded.as_slice()).unwrap();
......@@ -143,7 +143,7 @@
//! uid: 1,
//! dsc: "test".to_string(),
//! val: num.to_json(),
//! });
//! }).unwrap();
//! println!("data: {}", data);
//! // data: {"uid":1,"dsc":"test","val":"0.0001+12.539j"};
//! }
......@@ -316,13 +316,13 @@ pub fn decode<T: ::Decodable>(s: &str) -> DecodeResult<T> {
}
/// Shortcut function to encode a `T` into a JSON `String`
pub fn encode<T: ::Encodable>(object: &T) -> string::String {
pub fn encode<T: ::Encodable>(object: &T) -> Result<string::String, EncoderError> {
let mut s = String::new();
{
let mut encoder = Encoder::new(&mut s);
let _ = object.encode(&mut encoder);
try!(object.encode(&mut encoder));
}
s
Ok(s)
}
impl fmt::Display for ErrorCode {
......@@ -536,7 +536,6 @@ fn emit_str(&mut self, v: &str) -> EncodeResult {
fn emit_enum<F>(&mut self, _name: &str, f: F) -> EncodeResult where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); }
f(self)
}
......@@ -550,10 +549,10 @@ fn emit_enum_variant<F>(&mut self,
// enums are encoded as strings or objects
// Bunny => "Bunny"
// Kangaroo(34,"William") => {"variant": "Kangaroo", "fields": [34,"William"]}
if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); }
if cnt == 0 {
escape_str(self.writer, name)
} else {
if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); }
try!(write!(self.writer, "{{\"variant\":"));
try!(escape_str(self.writer, name));
try!(write!(self.writer, ",\"fields\":["));
......@@ -785,7 +784,6 @@ fn emit_str(&mut self, v: &str) -> EncodeResult {
fn emit_enum<F>(&mut self, _name: &str, f: F) -> EncodeResult where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); }
f(self)
}
......@@ -797,10 +795,10 @@ fn emit_enum_variant<F>(&mut self,
-> EncodeResult where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); }
if cnt == 0 {
escape_str(self.writer, name)
} else {
if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); }
try!(write!(self.writer, "{{\n"));
self.curr_indent += self.indent;
try!(spaces(self.writer, self.curr_indent));
......@@ -2618,7 +2616,7 @@ mod tests {
use super::JsonEvent::*;
use super::{Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser,
StackElement, Stack, Decoder, Encoder, EncoderError};
use std::{i64, u64, f32, f64, old_io};
use std::{i64, u64, f32, f64};
use std::collections::BTreeMap;
use std::num::Float;
use std::string;
......@@ -3537,6 +3535,24 @@ fn indents(source: &str) -> uint {
}
}
#[test]
fn test_hashmap_with_enum_key() {
use std::collections::HashMap;
use json;
#[derive(RustcEncodable, Eq, Hash, PartialEq, RustcDecodable, Show)]
enum Enum {
Foo,
#[allow(dead_code)]
Bar,
}
let mut map = HashMap::new();
map.insert(Enum::Foo, 0);
let result = json::encode(&map).unwrap();
assert_eq!(&result[], r#"{"Foo":0}"#);
let decoded: HashMap<Enum, _> = json::decode(result.as_slice()).unwrap();
assert_eq!(map, decoded);
}
#[test]
fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() {
use std::collections::HashMap;
......@@ -3928,7 +3944,6 @@ fn test_to_json() {
#[test]
fn test_encode_hashmap_with_arbitrary_key() {
use std::str::from_utf8;
use std::old_io::Writer;
use std::collections::HashMap;
use std::fmt;
......
......@@ -342,7 +342,7 @@ pub fn chmod(p: &Path, mode: uint) -> IoResult<()> {
pub fn rmdir(p: &Path) -> IoResult<()> {
let p = try!(to_utf16(p));
mkerr_libc(unsafe { libc::wrmdir(p.as_ptr()) })
super::mkerr_winbool(unsafe { libc::RemoveDirectoryW(p.as_ptr()) })
}
pub fn chown(_p: &Path, _uid: int, _gid: int) -> IoResult<()> {
......
......@@ -129,6 +129,9 @@ fn drop(&mut self) {
}
}
unsafe impl Send for Event {}
unsafe impl Sync for Event {}
struct Inner {
handle: libc::HANDLE,
lock: Mutex<()>,
......@@ -156,6 +159,9 @@ fn drop(&mut self) {
}
}
unsafe impl Send for Inner {}
unsafe impl Sync for Inner {}
unsafe fn pipe(name: *const u16, init: bool) -> libc::HANDLE {
libc::CreateNamedPipeW(
name,
......@@ -220,9 +226,6 @@ pub struct UnixStream {
write_deadline: u64,
}
unsafe impl Send for UnixStream {}
unsafe impl Sync for UnixStream {}
impl UnixStream {
fn try_connect(p: *const u16) -> Option<libc::HANDLE> {
// Note that most of this is lifted from the libuv implementation.
......@@ -615,17 +618,11 @@ pub struct UnixAcceptor {
deadline: u64,
}
unsafe impl Send for UnixAcceptor {}
unsafe impl Sync for UnixAcceptor {}
struct AcceptorState {
abort: Event,
closed: AtomicBool,
}
unsafe impl Send for AcceptorState {}
unsafe impl Sync for AcceptorState {}
impl UnixAcceptor {
pub fn accept(&mut self) -> IoResult<UnixStream> {
// This function has some funky implementation details when working with
......
......@@ -116,9 +116,6 @@ pub struct TcpAcceptor {
deadline: u64,
}
unsafe impl Send for TcpAcceptor {}
unsafe impl Sync for TcpAcceptor {}
struct AcceptorInner {
listener: TcpListener,
abort: Event,
......@@ -126,7 +123,6 @@ struct AcceptorInner {
closed: AtomicBool,
}
unsafe impl Send for AcceptorInner {}
unsafe impl Sync for AcceptorInner {}
impl TcpAcceptor {
......
......@@ -75,21 +75,8 @@ fn next(&mut self) -> Option<PathElem> {
}
}
// HACK(eddyb) move this into libstd (value wrapper for slice::Iter).
#[derive(Clone)]
pub struct Values<'a, T:'a>(pub slice::Iter<'a, T>);
impl<'a, T: Copy> Iterator for Values<'a, T> {
type Item = T;
fn next(&mut self) -> Option<T> {
let &mut Values(ref mut items) = self;
items.next().map(|&x| x)
}
}
/// The type of the iterator used by with_path.
pub type PathElems<'a, 'b> = iter::Chain<Values<'a, PathElem>, LinkedPath<'b>>;
pub type PathElems<'a, 'b> = iter::Chain<iter::Cloned<slice::Iter<'a, PathElem>>, LinkedPath<'b>>;
pub fn path_to_string<PI: Iterator<Item=PathElem>>(path: PI) -> String {
let itr = token::get_ident_interner();
......@@ -101,7 +88,7 @@ pub fn path_to_string<PI: Iterator<Item=PathElem>>(path: PI) -> String {
}
s.push_str(&e[]);
s
}).to_string()
})
}
#[derive(Copy, Show)]
......@@ -458,9 +445,9 @@ fn with_path_next<T, F>(&self, id: NodeId, next: LinkedPath, f: F) -> T where
if parent == id {
match self.find_entry(id) {
Some(RootInlinedParent(data)) => {
f(Values(data.path.iter()).chain(next))
f(data.path.iter().cloned().chain(next))
}
_ => f(Values([].iter()).chain(next))
_ => f([].iter().cloned().chain(next))
}
} else {
self.with_path_next(parent, Some(&LinkedPathNode {
......
......@@ -670,20 +670,13 @@ pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool {
// are two arrays of segments equal when compared unhygienically?
pub fn segments_name_eq(a : &[ast::PathSegment], b : &[ast::PathSegment]) -> bool {
if a.len() != b.len() {
false
} else {
for (idx,seg) in a.iter().enumerate() {
if seg.identifier.name != b[idx].identifier.name
// FIXME #7743: ident -> name problems in lifetime comparison?
// can types contain idents?
|| seg.parameters != b[idx].parameters
{
return false;
}
}
true
}
a.len() == b.len() &&
a.iter().zip(b.iter()).all(|(s, t)| {
s.identifier.name == t.identifier.name &&
// FIXME #7743: ident -> name problems in lifetime comparison?
// can types contain idents?
s.parameters == t.parameters
})
}
/// Returns true if this literal is a string and false otherwise.
......
......@@ -1433,15 +1433,12 @@ mod test {
use super::{pattern_bindings, expand_crate};
use super::{PatIdentFinder, IdentRenamer, PatIdentRenamer, ExpansionConfig};
use ast;
use ast::{Attribute_, AttrOuter, MetaWord, Name};
use attr;
use ast::Name;
use codemap;
use codemap::Spanned;
use ext::mtwt;
use fold::Folder;
use parse;
use parse::token;
use ptr::P;
use util::parser_testing::{string_to_parser};
use util::parser_testing::{string_to_pat, string_to_crate, strs_to_idents};
use visit;
......
......@@ -854,7 +854,7 @@ fn string_to_tts_macro () {
#[test]
fn string_to_tts_1 () {
let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
assert_eq!(json::encode(&tts),
assert_eq!(json::encode(&tts).unwrap(),
"[\
{\
\"variant\":\"TtToken\",\
......
......@@ -2975,7 +2975,6 @@ mod test {
use ast_util;
use codemap;
use parse::token;
use ptr::P;
#[test]
fn test_fun_to_string() {
......
......@@ -1126,9 +1126,8 @@ pub fn benchmark<F>(f: F) -> BenchSamples where F: FnMut(&mut Bencher) {
mod tests {
use test::{TrFailed, TrIgnored, TrOk, filter_tests, parse_opts,
TestDesc, TestDescAndFn, TestOpts, run_test,
Metric, MetricMap,
MetricMap,
StaticTestName, DynTestName, DynTestFn, ShouldFail};
use std::old_io::TempDir;
use std::thunk::Thunk;
use std::sync::mpsc::channel;
......
......@@ -29,6 +29,7 @@
font-size: 16px;
background: none repeat scroll 0% 0% #FFF;
box-sizing: border-box;
-webkit-overflow-scrolling: touch;
}
#page-wrapper {
......@@ -40,6 +41,7 @@
bottom: 0px;
box-sizing: border-box;
background: none repeat scroll 0% 0% #FFF;
-webkit-overflow-scrolling: touch;
}
#page {
......
......@@ -24,7 +24,7 @@ struct A {
fn main() {
let obj = A { foo: box [true, false] };
let s = json::encode(&obj);
let s = json::encode(&obj).unwrap();
let obj2: A = json::decode(s.as_slice()).unwrap();
assert!(obj.foo == obj2.foo);
}
......@@ -35,7 +35,7 @@ fn main() {
foo: Cell::new(true),
bar: RefCell::new( A { baz: 2 } )
};
let s = json::encode(&obj);
let s = json::encode(&obj).unwrap();
let obj2: B = json::decode(s.as_slice()).unwrap();
assert!(obj.foo.get() == obj2.foo.get());
assert!(obj.bar.borrow().baz == obj2.bar.borrow().baz);
......
......@@ -20,7 +20,7 @@
pub fn main() {
let obj = UnitLikeStruct;
let json_str: String = json::encode(&obj);
let json_str: String = json::encode(&obj).unwrap();
let json_object = json::from_str(json_str.as_slice());
let mut decoder = json::Decoder::new(json_object.unwrap());
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册