提交 3b1862a8 编写于 作者: B Brian Anderson

Don't allow newtype structs to be dereferenced. #6246

上级 18cef3fa
......@@ -107,7 +107,8 @@ fn new(count: int, q: Q) -> Sem<Q> {
pub fn acquire(&self) {
unsafe {
let mut waiter_nobe = None;
(**self).with(|state| {
let Sem(ref lock) = *self;
lock.with(|state| {
state.count -= 1;
if state.count < 0 {
// Create waiter nobe, enqueue ourself, and tell
......@@ -126,7 +127,8 @@ pub fn acquire(&self) {
pub fn release(&self) {
unsafe {
(**self).with(|state| {
let Sem(ref lock) = *self;
lock.with(|state| {
state.count += 1;
if state.count <= 0 {
state.waiters.signal();
......@@ -206,7 +208,8 @@ pub fn wait_on(&self, condvar_id: uint) {
let mut out_of_bounds = None;
// Release lock, 'atomically' enqueuing ourselves in so doing.
unsafe {
(**self.sem).with(|state| {
let Sem(ref queue) = *self.sem;
queue.with(|state| {
if condvar_id < state.blocked.len() {
// Drop the lock.
state.count += 1;
......@@ -248,7 +251,8 @@ pub fn signal_on(&self, condvar_id: uint) -> bool {
unsafe {
let mut out_of_bounds = None;
let mut result = false;
(**self.sem).with(|state| {
let Sem(ref lock) = *self.sem;
lock.with(|state| {
if condvar_id < state.blocked.len() {
result = state.blocked[condvar_id].signal();
} else {
......@@ -270,7 +274,8 @@ pub fn broadcast_on(&self, condvar_id: uint) -> uint {
let mut out_of_bounds = None;
let mut queue = None;
unsafe {
(**self.sem).with(|state| {
let Sem(ref lock) = *self.sem;
lock.with(|state| {
if condvar_id < state.blocked.len() {
// To avoid :broadcast_heavy, we make a new waitqueue,
// swap it out with the old one, and broadcast on the
......@@ -336,7 +341,8 @@ pub struct Semaphore { priv sem: Sem<()> }
impl Clone for Semaphore {
/// Create a new handle to the semaphore.
fn clone(&self) -> Semaphore {
Semaphore { sem: Sem((*self.sem).clone()) }
let Sem(ref lock) = self.sem;
Semaphore { sem: Sem(lock.clone()) }
}
}
......@@ -378,7 +384,9 @@ pub fn access<U>(&self, blk: || -> U) -> U { (&self.sem).access(blk) }
pub struct Mutex { priv sem: Sem<~[WaitQueue]> }
impl Clone for Mutex {
/// Create a new handle to the mutex.
fn clone(&self) -> Mutex { Mutex { sem: Sem((*self.sem).clone()) } }
fn clone(&self) -> Mutex {
let Sem(ref queue) = self.sem;
Mutex { sem: Sem(queue.clone()) } }
}
impl Mutex {
......@@ -467,8 +475,9 @@ pub fn new_with_condvars(num_condvars: uint) -> RWLock {
/// Create a new handle to the rwlock.
pub fn clone(&self) -> RWLock {
let Sem(ref access_lock_queue) = self.access_lock;
RWLock { order_lock: (&(self.order_lock)).clone(),
access_lock: Sem((*self.access_lock).clone()),
access_lock: Sem(access_lock_queue.clone()),
state: self.state.clone() }
}
......
......@@ -137,7 +137,8 @@ pub struct Metric {
impl Clone for MetricMap {
fn clone(&self) -> MetricMap {
MetricMap((**self).clone())
let MetricMap(ref map) = *self;
MetricMap(map.clone())
}
}
......@@ -584,6 +585,7 @@ pub fn write_run_finish(&mut self,
}
pub fn fmt_metrics(mm: &MetricMap) -> ~str {
let MetricMap(ref mm) = *mm;
let v : ~[~str] = mm.iter()
.map(|(k,v)| format!("{}: {} (+/- {})",
*k,
......@@ -622,6 +624,7 @@ fn callback<T: Writer>(event: &TestEvent, st: &mut ConsoleTestState<T>) {
TrIgnored => st.ignored += 1,
TrMetrics(mm) => {
let tname = test.name.to_str();
let MetricMap(mm) = mm;
for (k,v) in mm.iter() {
st.metrics.insert_metric(tname + "." + *k,
v.value, v.noise);
......@@ -950,7 +953,8 @@ pub fn load(p: &Path) -> MetricMap {
/// Write MetricDiff to a file.
pub fn save(&self, p: &Path) {
let mut file = File::create(p);
self.to_json().to_pretty_writer(&mut file)
let MetricMap(ref map) = *self;
map.to_json().to_pretty_writer(&mut file)
}
/// Compare against another MetricMap. Optionally compare all
......@@ -962,8 +966,10 @@ pub fn save(&self, p: &Path) {
pub fn compare_to_old(&self, old: &MetricMap,
noise_pct: Option<f64>) -> MetricDiff {
let mut diff : MetricDiff = TreeMap::new();
let MetricMap(ref selfmap) = *self;
let MetricMap(ref old) = *old;
for (k, vold) in old.iter() {
let r = match self.find(k) {
let r = match selfmap.find(k) {
None => MetricRemoved,
Some(v) => {
let delta = (v.value - vold.value);
......@@ -999,7 +1005,8 @@ pub fn compare_to_old(&self, old: &MetricMap,
};
diff.insert((*k).clone(), r);
}
for (k, _) in self.iter() {
let MetricMap(ref map) = *self;
for (k, _) in map.iter() {
if !diff.contains_key(k) {
diff.insert((*k).clone(), MetricAdded);
}
......@@ -1025,7 +1032,8 @@ pub fn insert_metric(&mut self, name: &str, value: f64, noise: f64) {
value: value,
noise: noise
};
self.insert(name.to_owned(), m);
let MetricMap(ref mut map) = *self;
map.insert(name.to_owned(), m);
}
/// Attempt to "ratchet" an external metric file. This involves loading
......@@ -1464,6 +1472,7 @@ pub fn ratchet_test() {
// Check that it was not rewritten.
let m3 = MetricMap::load(&pth);
let MetricMap(m3) = m3;
assert_eq!(m3.len(), 2);
assert_eq!(*(m3.find(&~"runtime").unwrap()), Metric { value: 1000.0, noise: 2.0 });
assert_eq!(*(m3.find(&~"throughput").unwrap()), Metric { value: 50.0, noise: 2.0 });
......@@ -1478,6 +1487,7 @@ pub fn ratchet_test() {
// Check that it was rewritten.
let m4 = MetricMap::load(&pth);
let MetricMap(m4) = m4;
assert_eq!(m4.len(), 2);
assert_eq!(*(m4.find(&~"runtime").unwrap()), Metric { value: 1100.0, noise: 2.0 });
assert_eq!(*(m4.find(&~"throughput").unwrap()), Metric { value: 50.0, noise: 2.0 });
......
......@@ -116,13 +116,14 @@ fn new() -> WorkMap { WorkMap(TreeMap::new()) }
fn insert_work_key(&mut self, k: WorkKey, val: ~str) {
let WorkKey { kind, name } = k;
match self.find_mut(&name) {
let WorkMap(ref mut map) = *self;
match map.find_mut(&name) {
Some(&KindMap(ref mut m)) => { m.insert(kind, val); return; }
None => ()
}
let mut new_map = TreeMap::new();
new_map.insert(kind, val);
self.insert(name, KindMap(new_map));
map.insert(name, KindMap(new_map));
}
}
......@@ -328,8 +329,10 @@ pub fn discover_output(&mut self,
// returns pairs of (kind, name)
pub fn lookup_discovered_inputs(&self) -> ~[(~str, ~str)] {
let mut rs = ~[];
for (k, v) in self.discovered_inputs.iter() {
for (k1, _) in v.iter() {
let WorkMap(ref discovered_inputs) = self.discovered_inputs;
for (k, v) in discovered_inputs.iter() {
let KindMap(ref vmap) = *v;
for (k1, _) in vmap.iter() {
rs.push((k1.clone(), k.clone()));
}
}
......@@ -348,8 +351,10 @@ fn new(ctxt: &'a Context, fn_name: &'a str) -> Prep<'a> {
pub fn lookup_declared_inputs(&self) -> ~[~str] {
let mut rs = ~[];
for (_, v) in self.declared_inputs.iter() {
for (inp, _) in v.iter() {
let WorkMap(ref declared_inputs) = self.declared_inputs;
for (_, v) in declared_inputs.iter() {
let KindMap(ref vmap) = *v;
for (inp, _) in vmap.iter() {
rs.push(inp.clone());
}
}
......@@ -386,8 +391,10 @@ fn is_fresh(&self, cat: &str, kind: &str,
}
fn all_fresh(&self, cat: &str, map: &WorkMap) -> bool {
let WorkMap(ref map) = *map;
for (k_name, kindmap) in map.iter() {
for (k_kind, v) in kindmap.iter() {
let KindMap(ref kindmap_) = *kindmap;
for (k_kind, v) in kindmap_.iter() {
if ! self.is_fresh(cat, *k_kind, *k_name, *v) {
return false;
}
......
......@@ -61,11 +61,11 @@ fn drop(&mut self) {
impl StackPool {
pub fn new() -> StackPool { StackPool(()) }
fn take_segment(&self, min_size: uint) -> StackSegment {
pub fn take_segment(&self, min_size: uint) -> StackSegment {
StackSegment::new(min_size)
}
fn give_segment(&self, _stack: StackSegment) {
pub fn give_segment(&self, _stack: StackSegment) {
}
}
......
......@@ -72,9 +72,15 @@ pub struct FlowedMoveData {
#[deriving(Eq)]
pub struct MovePathIndex(uint);
impl MovePathIndex {
fn get(&self) -> uint {
let MovePathIndex(v) = *self; v
}
}
impl Clone for MovePathIndex {
fn clone(&self) -> MovePathIndex {
MovePathIndex(**self)
MovePathIndex(self.get())
}
}
......@@ -85,6 +91,12 @@ fn clone(&self) -> MovePathIndex {
#[deriving(Eq)]
pub struct MoveIndex(uint);
impl MoveIndex {
fn get(&self) -> uint {
let MoveIndex(v) = *self; v
}
}
static InvalidMoveIndex: MoveIndex =
MoveIndex(uint::max_value);
......@@ -177,47 +189,47 @@ pub fn new() -> MoveData {
fn path_loan_path(&self, index: MovePathIndex) -> @LoanPath {
let paths = self.paths.borrow();
paths.get()[*index].loan_path
paths.get()[index.get()].loan_path
}
fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
let paths = self.paths.borrow();
paths.get()[*index].parent
paths.get()[index.get()].parent
}
fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
let paths = self.paths.borrow();
paths.get()[*index].first_move
paths.get()[index.get()].first_move
}
fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
let paths = self.paths.borrow();
paths.get()[*index].first_child
paths.get()[index.get()].first_child
}
fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
let paths = self.paths.borrow();
paths.get()[*index].next_sibling
paths.get()[index.get()].next_sibling
}
fn set_path_first_move(&self,
index: MovePathIndex,
first_move: MoveIndex) {
let mut paths = self.paths.borrow_mut();
paths.get()[*index].first_move = first_move
paths.get()[index.get()].first_move = first_move
}
fn set_path_first_child(&self,
index: MovePathIndex,
first_child: MovePathIndex) {
let mut paths = self.paths.borrow_mut();
paths.get()[*index].first_child = first_child
paths.get()[index.get()].first_child = first_child
}
fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
//! Type safe indexing operator
let moves = self.moves.borrow();
moves.get()[*index].next_move
moves.get()[index.get()].next_move
}
fn is_var_path(&self, index: MovePathIndex) -> bool {
......@@ -291,7 +303,7 @@ pub fn move_path(&self,
index);
let paths = self.paths.borrow();
assert_eq!(*index, paths.get().len() - 1);
assert_eq!(index.get(), paths.get().len() - 1);
let mut path_map = self.path_map.borrow_mut();
path_map.get().insert(lp, index);
......@@ -549,7 +561,7 @@ fn kill_moves(&self,
kill_id: ast::NodeId,
dfcx_moves: &mut MoveDataFlow) {
self.each_applicable_move(path, |move_index| {
dfcx_moves.add_kill(kill_id, *move_index);
dfcx_moves.add_kill(kill_id, move_index.get());
true
});
}
......
......@@ -67,6 +67,14 @@ pub struct Direction { priv repr: uint }
pub static Outgoing: Direction = Direction { repr: 0 };
pub static Incoming: Direction = Direction { repr: 1 };
impl NodeIndex {
fn get(&self) -> uint { let NodeIndex(v) = *self; v }
}
impl EdgeIndex {
fn get(&self) -> uint { let EdgeIndex(v) = *self; v }
}
impl<N,E> Graph<N,E> {
pub fn new() -> Graph<N,E> {
Graph {nodes: ~[], edges: ~[]}
......@@ -110,15 +118,15 @@ pub fn add_node(&mut self, data: N) -> NodeIndex {
}
pub fn mut_node_data<'a>(&'a mut self, idx: NodeIndex) -> &'a mut N {
&mut self.nodes[*idx].data
&mut self.nodes[idx.get()].data
}
pub fn node_data<'a>(&'a self, idx: NodeIndex) -> &'a N {
&self.nodes[*idx].data
&self.nodes[idx.get()].data
}
pub fn node<'a>(&'a self, idx: NodeIndex) -> &'a Node<N> {
&self.nodes[*idx]
&self.nodes[idx.get()]
}
///////////////////////////////////////////////////////////////////////////
......@@ -135,8 +143,8 @@ pub fn add_edge(&mut self,
let idx = self.next_edge_index();
// read current first of the list of edges from each node
let source_first = self.nodes[*source].first_edge[Outgoing.repr];
let target_first = self.nodes[*target].first_edge[Incoming.repr];
let source_first = self.nodes[source.get()].first_edge[Outgoing.repr];
let target_first = self.nodes[target.get()].first_edge[Incoming.repr];
// create the new edge, with the previous firsts from each node
// as the next pointers
......@@ -148,22 +156,22 @@ pub fn add_edge(&mut self,
});
// adjust the firsts for each node target be the next object.
self.nodes[*source].first_edge[Outgoing.repr] = idx;
self.nodes[*target].first_edge[Incoming.repr] = idx;
self.nodes[source.get()].first_edge[Outgoing.repr] = idx;
self.nodes[target.get()].first_edge[Incoming.repr] = idx;
return idx;
}
pub fn mut_edge_data<'a>(&'a mut self, idx: EdgeIndex) -> &'a mut E {
&mut self.edges[*idx].data
&mut self.edges[idx.get()].data
}
pub fn edge_data<'a>(&'a self, idx: EdgeIndex) -> &'a E {
&self.edges[*idx].data
&self.edges[idx.get()].data
}
pub fn edge<'a>(&'a self, idx: EdgeIndex) -> &'a Edge<E> {
&self.edges[*idx]
&self.edges[idx.get()]
}
pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {
......@@ -171,7 +179,7 @@ pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {
//! This is useful if you wish to modify the graph while walking
//! the linked list of edges.
self.nodes[*node].first_edge[dir.repr]
self.nodes[node.get()].first_edge[dir.repr]
}
pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {
......@@ -179,7 +187,7 @@ pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {
//! This is useful if you wish to modify the graph while walking
//! the linked list of edges.
self.edges[*edge].next_edge[dir.repr]
self.edges[edge.get()].next_edge[dir.repr]
}
///////////////////////////////////////////////////////////////////////////
......@@ -223,7 +231,7 @@ pub fn each_adjacent_edge(&self,
let mut edge_idx = self.first_adjacent(node, dir);
while edge_idx != InvalidEdgeIndex {
let edge = &self.edges[*edge_idx];
let edge = &self.edges[edge_idx.get()];
if !f(edge_idx, edge) {
return false;
}
......@@ -260,7 +268,7 @@ pub fn iterate_until_fixed_point(&self,
pub fn each_edge_index(max_edge_index: EdgeIndex, f: |EdgeIndex| -> bool) {
let mut i = 0;
let n = *max_edge_index;
let n = max_edge_index.get();
while i < n {
if !f(EdgeIndex(i)) {
return;
......@@ -319,8 +327,8 @@ fn each_node() {
let graph = create_graph();
let expected = ["A", "B", "C", "D", "E", "F"];
graph.each_node(|idx, node| {
assert_eq!(&expected[*idx], graph.node_data(idx));
assert_eq!(expected[*idx], node.data);
assert_eq!(&expected[idx.get()], graph.node_data(idx));
assert_eq!(expected[idx.get()], node.data);
true
});
}
......@@ -330,8 +338,8 @@ fn each_edge() {
let graph = create_graph();
let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
graph.each_edge(|idx, edge| {
assert_eq!(&expected[*idx], graph.edge_data(idx));
assert_eq!(expected[*idx], edge.data);
assert_eq!(&expected[idx.get()], graph.edge_data(idx));
assert_eq!(expected[idx.get()], edge.data);
true
});
}
......
......@@ -129,9 +129,17 @@
#[deriving(Eq)]
struct LiveNode(uint);
impl Variable {
fn get(&self) -> uint { let Variable(v) = *self; v }
}
impl LiveNode {
fn get(&self) -> uint { let LiveNode(v) = *self; v }
}
impl Clone for LiveNode {
fn clone(&self) -> LiveNode {
LiveNode(**self)
LiveNode(self.get())
}
}
......@@ -176,11 +184,11 @@ pub fn check_crate(tcx: ty::ctxt,
}
impl to_str::ToStr for LiveNode {
fn to_str(&self) -> ~str { format!("ln({})", **self) }
fn to_str(&self) -> ~str { format!("ln({})", self.get()) }
}
impl to_str::ToStr for Variable {
fn to_str(&self) -> ~str { format!("v({})", **self) }
fn to_str(&self) -> ~str { format!("v({})", self.get()) }
}
// ______________________________________________________________________
......@@ -207,7 +215,7 @@ fn to_str(&self) -> ~str { format!("v({})", **self) }
impl LiveNode {
pub fn is_valid(&self) -> bool {
**self != uint::max_value
self.get() != uint::max_value
}
}
......@@ -326,7 +334,7 @@ pub fn variable(&self, node_id: NodeId, span: Span) -> Variable {
pub fn variable_name(&self, var: Variable) -> @str {
let var_kinds = self.var_kinds.borrow();
match var_kinds.get()[*var] {
match var_kinds.get()[var.get()] {
Local(LocalInfo { ident: nm, .. }) | Arg(_, nm) => {
self.tcx.sess.str_of(nm)
},
......@@ -351,7 +359,7 @@ pub fn captures(&self, expr: &Expr) -> @~[CaptureInfo] {
pub fn lnk(&self, ln: LiveNode) -> LiveNodeKind {
let lnks = self.lnks.borrow();
lnks.get()[*ln]
lnks.get()[ln.get()]
}
}
......@@ -680,7 +688,7 @@ pub fn define_bindings_in_arm_pats(&self, pats: &[@Pat], succ: LiveNode)
}
pub fn idx(&self, ln: LiveNode, var: Variable) -> uint {
*ln * self.ir.num_vars.get() + *var
ln.get() * self.ir.num_vars.get() + var.get()
}
pub fn live_on_entry(&self, ln: LiveNode, var: Variable)
......@@ -698,7 +706,7 @@ pub fn live_on_exit(&self, ln: LiveNode, var: Variable)
-> Option<LiveNodeKind> {
let successor = {
let successors = self.successors.borrow();
successors.get()[*ln]
successors.get()[ln.get()]
};
self.live_on_entry(successor, var)
}
......@@ -721,7 +729,7 @@ pub fn assigned_on_exit(&self, ln: LiveNode, var: Variable)
-> Option<LiveNodeKind> {
let successor = {
let successors = self.successors.borrow();
successors.get()[*ln]
successors.get()[ln.get()]
};
self.assigned_on_entry(successor, var)
}
......@@ -792,8 +800,8 @@ pub fn ln_str(&self, ln: LiveNode) -> ~str {
let lnks = self.ir.lnks.try_borrow();
write!(wr,
"[ln({}) of kind {:?} reads",
*ln,
lnks.and_then(|lnks| Some(lnks.get()[*ln])));
ln.get(),
lnks.and_then(|lnks| Some(lnks.get()[ln.get()])));
}
let users = self.users.try_borrow();
match users {
......@@ -809,7 +817,7 @@ pub fn ln_str(&self, ln: LiveNode) -> ~str {
let successors = self.successors.try_borrow();
match successors {
Some(successors) => {
write!(wr, " precedes {}]", successors.get()[*ln].to_str());
write!(wr, " precedes {}]", successors.get()[ln.get()].to_str());
}
None => {
write!(wr, " precedes (successors borrowed)]");
......@@ -821,7 +829,7 @@ pub fn ln_str(&self, ln: LiveNode) -> ~str {
pub fn init_empty(&self, ln: LiveNode, succ_ln: LiveNode) {
{
let mut successors = self.successors.borrow_mut();
successors.get()[*ln] = succ_ln;
successors.get()[ln.get()] = succ_ln;
}
// It is not necessary to initialize the
......@@ -838,7 +846,7 @@ pub fn init_from_succ(&self, ln: LiveNode, succ_ln: LiveNode) {
// more efficient version of init_empty() / merge_from_succ()
{
let mut successors = self.successors.borrow_mut();
successors.get()[*ln] = succ_ln;
successors.get()[ln.get()] = succ_ln;
}
self.indices2(ln, succ_ln, |idx, succ_idx| {
......@@ -1441,7 +1449,7 @@ pub fn with_loop_nodes<R>(
cont_ln: LiveNode,
f: || -> R)
-> R {
debug!("with_loop_nodes: {} {}", loop_node_id, *break_ln);
debug!("with_loop_nodes: {} {}", loop_node_id, break_ln.get());
{
let mut loop_scope = self.loop_scope.borrow_mut();
loop_scope.get().push(loop_node_id);
......
......@@ -648,7 +648,7 @@ pub fn cat_deref<N:ast_node>(&self,
base_cmt: cmt,
deref_cnt: uint)
-> cmt {
let mt = match ty::deref(self.tcx, base_cmt.ty, true) {
let mt = match ty::deref(base_cmt.ty, true) {
Some(mt) => mt,
None => {
self.tcx.sess.span_bug(
......
......@@ -642,8 +642,7 @@ fn visit_expr(&mut self, expr: @ast::Expr, _: ()) {
// With type_autoderef, make sure we don't
// allow pointers to violate privacy
let t = ty::type_autoderef(self.tcx,
ty::expr_ty(self.tcx, base));
let t = ty::type_autoderef(ty::expr_ty(self.tcx, base));
match ty::get(t).sty {
ty::ty_struct(id, _) => {
self.check_field(expr.span, id, ident);
......@@ -653,8 +652,7 @@ fn visit_expr(&mut self, expr: @ast::Expr, _: ()) {
}
ast::ExprMethodCall(_, base, ident, _, _, _) => {
// see above
let t = ty::type_autoderef(self.tcx,
ty::expr_ty(self.tcx, base));
let t = ty::type_autoderef(ty::expr_ty(self.tcx, base));
match ty::get(t).sty {
ty::ty_enum(_, _) | ty::ty_struct(_, _) => {
let method_map = self.method_map.borrow();
......
......@@ -169,6 +169,16 @@ enum ReducedGraphParent {
ModuleReducedGraphParent(@Module)
}
impl ReducedGraphParent {
fn module(&self) -> @Module {
match *self {
ModuleReducedGraphParent(m) => {
m
}
}
}
}
enum ResolveResult<T> {
Failed, // Failed to resolve the name.
Indeterminate, // Couldn't determine due to unresolved globs.
......@@ -1246,7 +1256,7 @@ fn build_reduced_graph_for_item(&mut self,
let name = path_to_ident(path);
let existing_parent_opt = {
let children = parent.children.borrow();
let children = parent.module().children.borrow();
children.get().find_copy(&name.name)
};
let new_parent = match existing_parent_opt {
......@@ -1523,7 +1533,7 @@ fn build_reduced_graph_for_view_item(&mut self,
{
let mut external_module_children =
parent.external_module_children.borrow_mut();
parent.module().external_module_children.borrow_mut();
external_module_children.get().insert(
name.name,
external_module);
......
......@@ -1884,7 +1884,7 @@ pub fn build_return_block(fcx: &FunctionContext, ret_cx: @Block) {
// If there's only a single store to the ret slot, we can directly return
// the value that was stored and omit the store and the alloca
Some(s) => {
let retval = *s.get_operand(0).unwrap();
let retval = s.get_operand(0).unwrap().get();
s.erase_from_parent();
if retptr.has_no_uses() {
......
......@@ -20,9 +20,13 @@
* Wrapper for LLVM BasicBlockRef
*/
impl BasicBlock {
pub fn get(&self) -> BasicBlockRef {
let BasicBlock(v) = *self; v
}
pub fn as_value(self) -> Value {
unsafe {
Value(llvm::LLVMBasicBlockAsValue(*self))
Value(llvm::LLVMBasicBlockAsValue(self.get()))
}
}
......
......@@ -131,7 +131,7 @@ fn const_deref_newtype(cx: &CrateContext, v: ValueRef, t: ty::t)
fn const_deref(cx: &CrateContext, v: ValueRef, t: ty::t, explicit: bool)
-> (ValueRef, ty::t) {
match ty::deref(cx.tcx, t, explicit) {
match ty::deref(t, explicit) {
Some(ref mt) => {
assert!(mt.mutbl != ast::MutMutable);
let dv = match ty::get(t).sty {
......
......@@ -90,7 +90,6 @@
use lib;
use lib::llvm::ValueRef;
use middle::trans::adt;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::common::*;
......@@ -107,7 +106,6 @@
use std::uint;
use syntax::ast;
use syntax::codemap::Span;
use syntax::parse::token::special_idents;
#[deriving(Eq)]
pub enum CopyAction {
......@@ -605,8 +603,6 @@ pub fn try_deref(&self,
derefs: uint,
is_auto: bool)
-> (Option<Datum>, @Block) {
let ccx = bcx.ccx();
debug!("try_deref(expr_id={:?}, derefs={:?}, is_auto={}, self={:?})",
expr_id, derefs, is_auto, self.to_str(bcx.ccx()));
......@@ -628,45 +624,6 @@ pub fn try_deref(&self,
ty::ty_rptr(_, mt) => {
return (Some(deref_ptr(bcx, self, mt.ty)), bcx);
}
ty::ty_struct(did, ref substs) => {
// Check whether this struct is a newtype struct.
let fields = ty::struct_fields(ccx.tcx, did, substs);
if fields.len() != 1 || fields[0].ident !=
special_idents::unnamed_field {
return (None, bcx);
}
let repr = adt::represent_type(ccx, self.ty);
let ty = fields[0].mt.ty;
return match self.mode {
ByRef(_) => {
// Recast lv.val as a pointer to the newtype rather
// than a pointer to the struct type.
// FIXME #6572: This isn't correct for structs with
// destructors.
(
Some(Datum {
val: adt::trans_field_ptr(bcx, repr, self.val,
0, 0),
ty: ty,
mode: ByRef(ZeroMem)
}),
bcx
)
}
ByValue => {
assert!(type_is_immediate(bcx.ccx(), ty));
(
Some(Datum {
val: ExtractValue(bcx, self.val, 0),
ty: ty,
mode: ByValue
}),
bcx
)
}
}
}
_ => { // not derefable.
return (None, bcx);
}
......
......@@ -564,7 +564,7 @@ pub fn set_source_location(fcx: &FunctionContext,
let loc = span_start(cx, span);
let scope = scope_metadata(fcx, node_id, span);
set_debug_location(cx, DebugLocation::new(scope, loc.line, *loc.col));
set_debug_location(cx, DebugLocation::new(scope, loc.line, loc.col.to_uint()));
} else {
set_debug_location(cx, UnknownLocation);
}
......@@ -1032,7 +1032,7 @@ fn declare_local(bcx: @Block,
}
});
set_debug_location(cx, DebugLocation::new(scope_metadata, loc.line, *loc.col));
set_debug_location(cx, DebugLocation::new(scope_metadata, loc.line, loc.col.to_uint()));
unsafe {
let instr = llvm::LLVMDIBuilderInsertDeclareAtEnd(
DIB(cx),
......
......@@ -28,10 +28,15 @@
* Wrapper for LLVM ValueRef
*/
impl Value {
/// Returns the native ValueRef
pub fn get(&self) -> ValueRef {
let Value(v) = *self; v
}
/// Returns the BasicBlock that contains this value
pub fn get_parent(self) -> Option<BasicBlock> {
unsafe {
match llvm::LLVMGetInstructionParent(*self) {
match llvm::LLVMGetInstructionParent(self.get()) {
p if p.is_not_null() => Some(BasicBlock(p)),
_ => None
}
......@@ -41,7 +46,7 @@ pub fn get_parent(self) -> Option<BasicBlock> {
/// Removes this value from its containing BasicBlock
pub fn erase_from_parent(self) {
unsafe {
llvm::LLVMInstructionEraseFromParent(*self);
llvm::LLVMInstructionEraseFromParent(self.get());
}
}
......@@ -55,7 +60,7 @@ pub fn get_dominating_store(self, bcx: &Block) -> Option<Value> {
store.get_parent().and_then(|store_bb| {
let mut bb = BasicBlock(bcx.llbb);
let mut ret = Some(store);
while *bb != *store_bb {
while bb.get() != store_bb.get() {
match bb.get_single_predecessor() {
Some(pred) => bb = pred,
None => { ret = None; break }
......@@ -71,7 +76,7 @@ pub fn get_dominating_store(self, bcx: &Block) -> Option<Value> {
/// Returns the first use of this value, if any
pub fn get_first_use(self) -> Option<Use> {
unsafe {
match llvm::LLVMGetFirstUse(*self) {
match llvm::LLVMGetFirstUse(self.get()) {
u if u.is_not_null() => Some(Use(u)),
_ => None
}
......@@ -103,18 +108,18 @@ pub fn user_iter(self) -> UserIterator {
/// Returns the requested operand of this instruction
/// Returns None, if there's no operand at the given index
pub fn get_operand(self, i: uint) -> Option<Value> {
opt_val!(llvm::LLVMGetOperand(*self, i as c_uint))
opt_val!(llvm::LLVMGetOperand(self.get(), i as c_uint))
}
/// Returns the Store represent by this value, if any
pub fn as_store_inst(self) -> Option<Value> {
opt_val!(llvm::LLVMIsAStoreInst(*self))
opt_val!(llvm::LLVMIsAStoreInst(self.get()))
}
/// Tests if this value is a terminator instruction
pub fn is_a_terminator_inst(self) -> bool {
unsafe {
llvm::LLVMIsATerminatorInst(*self).is_not_null()
llvm::LLVMIsATerminatorInst(self.get()).is_not_null()
}
}
}
......@@ -125,15 +130,19 @@ pub fn is_a_terminator_inst(self) -> bool {
* Wrapper for LLVM UseRef
*/
impl Use {
pub fn get(&self) -> UseRef {
let Use(v) = *self; v
}
pub fn get_user(self) -> Value {
unsafe {
Value(llvm::LLVMGetUser(*self))
Value(llvm::LLVMGetUser(self.get()))
}
}
pub fn get_next_use(self) -> Option<Use> {
unsafe {
match llvm::LLVMGetNextUse(*self) {
match llvm::LLVMGetNextUse(self.get()) {
u if u.is_not_null() => Some(Use(u)),
_ => None
}
......
......@@ -815,7 +815,7 @@ pub trait Vid {
}
impl Vid for TyVid {
fn to_uint(&self) -> uint { **self }
fn to_uint(&self) -> uint { let TyVid(v) = *self; v }
}
impl ToStr for TyVid {
......@@ -823,7 +823,7 @@ fn to_str(&self) -> ~str { format!("<V{}>", self.to_uint()) }
}
impl Vid for IntVid {
fn to_uint(&self) -> uint { **self }
fn to_uint(&self) -> uint { let IntVid(v) = *self; v }
}
impl ToStr for IntVid {
......@@ -831,7 +831,7 @@ fn to_str(&self) -> ~str { format!("<VI{}>", self.to_uint()) }
}
impl Vid for FloatVid {
fn to_uint(&self) -> uint { **self }
fn to_uint(&self) -> uint { let FloatVid(v) = *self; v }
}
impl ToStr for FloatVid {
......@@ -2610,11 +2610,11 @@ pub fn type_param(ty: t) -> Option<uint> {
//
// The parameter `explicit` indicates if this is an *explicit* dereference.
// Some types---notably unsafe ptrs---can only be dereferenced explicitly.
pub fn deref(cx: ctxt, t: t, explicit: bool) -> Option<mt> {
deref_sty(cx, &get(t).sty, explicit)
pub fn deref(t: t, explicit: bool) -> Option<mt> {
deref_sty(&get(t).sty, explicit)
}
pub fn deref_sty(cx: ctxt, sty: &sty, explicit: bool) -> Option<mt> {
pub fn deref_sty(sty: &sty, explicit: bool) -> Option<mt> {
match *sty {
ty_box(typ) => {
Some(mt {
......@@ -2631,24 +2631,14 @@ pub fn deref_sty(cx: ctxt, sty: &sty, explicit: bool) -> Option<mt> {
Some(mt)
}
ty_struct(did, ref substs) => {
let fields = struct_fields(cx, did, substs);
if fields.len() == 1 && fields[0].ident ==
syntax::parse::token::special_idents::unnamed_field {
Some(mt {ty: fields[0].mt.ty, mutbl: ast::MutImmutable})
} else {
None
}
}
_ => None
}
}
pub fn type_autoderef(cx: ctxt, t: t) -> t {
pub fn type_autoderef(t: t) -> t {
let mut t = t;
loop {
match deref(cx, t, false) {
match deref(t, false) {
None => return t,
Some(mt) => t = mt.ty
}
......@@ -2917,7 +2907,7 @@ pub fn adjust_ty(cx: ctxt,
if (!ty::type_is_error(adjusted_ty)) {
for i in range(0, adj.autoderefs) {
match ty::deref(cx, adjusted_ty, true) {
match ty::deref(adjusted_ty, true) {
Some(mt) => { adjusted_ty = mt.ty; }
None => {
cx.sess.span_bug(
......@@ -4310,7 +4300,7 @@ pub fn normalize_ty(cx: ctxt, t: t) -> t {
struct TypeNormalizer(ctxt);
impl TypeFolder for TypeNormalizer {
fn tcx(&self) -> ty::ctxt { **self }
fn tcx(&self) -> ty::ctxt { let TypeNormalizer(c) = *self; c }
fn fold_ty(&mut self, t: ty::t) -> ty::t {
let normalized_opt = {
......
......@@ -265,7 +265,7 @@ fn search(&self, self_ty: ty::t) -> Option<method_map_entry> {
}
fn deref(&self, ty: ty::t) -> Option<ty::t> {
match ty::deref(self.tcx(), ty, false) {
match ty::deref(ty, false) {
None => None,
Some(t) => {
Some(structurally_resolved_type(self.fcx,
......
......@@ -1365,7 +1365,7 @@ pub fn do_autoderef(fcx: @FnCtxt, sp: Span, t: ty::t) -> (ty::t, uint) {
}
// Otherwise, deref if type is derefable:
match ty::deref_sty(fcx.ccx.tcx, sty, false) {
match ty::deref_sty(sty, false) {
None => {
return (t1, autoderefs);
}
......@@ -2758,18 +2758,13 @@ fn check_struct_enum_variant(fcx: @FnCtxt,
}
ast::UnDeref => {
let sty = structure_of(fcx, expr.span, oprnd_t);
let operand_ty = ty::deref_sty(tcx, sty, true);
let operand_ty = ty::deref_sty(sty, true);
match operand_ty {
Some(mt) => {
oprnd_t = mt.ty
}
None => {
match *sty {
ty::ty_struct(..) => {
tcx.sess.span_err(
expr.span,
"can only dereference structs with one anonymous field");
}
_ => {
fcx.type_error_message(expr.span,
|actual| {
......
......@@ -628,7 +628,6 @@ fn constrain_derefs(rcx: &mut Rcx,
* pointer being derefenced, the lifetime of the pointer includes
* the deref expr.
*/
let tcx = rcx.fcx.tcx();
let r_deref_expr = ty::ReScope(deref_expr.id);
for i in range(0u, derefs) {
debug!("constrain_derefs(deref_expr=?, derefd_ty={}, derefs={:?}/{:?}",
......@@ -644,7 +643,7 @@ fn constrain_derefs(rcx: &mut Rcx,
_ => {}
}
match ty::deref(tcx, derefd_ty, true) {
match ty::deref(derefd_ty, true) {
Some(mt) => derefd_ty = mt.ty,
/* if this type can't be dereferenced, then there's already an error
in the session saying so. Just bail out for now */
......@@ -1193,7 +1192,7 @@ fn apply_autoderefs(
for _ in range(0u, autoderefs) {
ct.cat.guarantor = guarantor_of_deref(&ct.cat);
match ty::deref(tcx, ct.ty, true) {
match ty::deref(ct.ty, true) {
Some(mt) => {
ct.ty = mt.ty;
ct.cat.pointer = pointer_categorize(ct.ty);
......
......@@ -86,10 +86,14 @@ fn foo<A>(a: A, b: A) { ... }
pub struct Coerce(CombineFields);
impl Coerce {
pub fn get_ref<'a>(&'a self) -> &'a CombineFields {
let Coerce(ref v) = *self; v
}
pub fn tys(&self, a: ty::t, b: ty::t) -> CoerceResult {
debug!("Coerce.tys({} => {})",
a.inf_str(self.infcx),
b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx),
b.inf_str(self.get_ref().infcx));
let _indent = indenter();
// Examine the supertype and consider auto-borrowing.
......@@ -200,7 +204,7 @@ pub fn tys(&self, a: ty::t, b: ty::t) -> CoerceResult {
}
pub fn subtype(&self, a: ty::t, b: ty::t) -> CoerceResult {
match Sub(**self).tys(a, b) {
match Sub(*self.get_ref()).tys(a, b) {
Ok(_) => Ok(None), // No coercion required.
Err(ref e) => Err(*e)
}
......@@ -208,13 +212,13 @@ pub fn subtype(&self, a: ty::t, b: ty::t) -> CoerceResult {
pub fn unpack_actual_value(&self, a: ty::t, f: |&ty::sty| -> CoerceResult)
-> CoerceResult {
match resolve_type(self.infcx, a, try_resolve_tvar_shallow) {
match resolve_type(self.get_ref().infcx, a, try_resolve_tvar_shallow) {
Ok(t) => {
f(&ty::get(t).sty)
}
Err(e) => {
self.infcx.tcx.sess.span_bug(
self.trace.origin.span(),
self.get_ref().infcx.tcx.sess.span_bug(
self.get_ref().trace.origin.span(),
format!("Failed to resolve even without \
any force options: {:?}", e));
}
......@@ -228,8 +232,8 @@ pub fn coerce_borrowed_pointer(&self,
mt_b: ty::mt)
-> CoerceResult {
debug!("coerce_borrowed_pointer(a={}, sty_a={:?}, b={}, mt_b={:?})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx), mt_b);
a.inf_str(self.get_ref().infcx), sty_a,
b.inf_str(self.get_ref().infcx), mt_b);
// If we have a parameter of type `&M T_a` and the value
// provided is `expr`, we will be adding an implicit borrow,
......@@ -237,8 +241,8 @@ pub fn coerce_borrowed_pointer(&self,
// to type check, we will construct the type that `&M*expr` would
// yield.
let sub = Sub(**self);
let r_borrow = self.infcx.next_region_var(Coercion(self.trace));
let sub = Sub(*self.get_ref());
let r_borrow = self.get_ref().infcx.next_region_var(Coercion(self.get_ref().trace));
let inner_ty = match *sty_a {
ty::ty_box(typ) => typ,
......@@ -249,7 +253,7 @@ pub fn coerce_borrowed_pointer(&self,
}
};
let a_borrowed = ty::mk_rptr(self.infcx.tcx,
let a_borrowed = ty::mk_rptr(self.get_ref().infcx.tcx,
r_borrow,
mt {ty: inner_ty, mutbl: mt_b.mutbl});
if_ok!(sub.tys(a_borrowed, b));
......@@ -265,8 +269,8 @@ pub fn coerce_borrowed_string(&self,
b: ty::t)
-> CoerceResult {
debug!("coerce_borrowed_string(a={}, sty_a={:?}, b={})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx), sty_a,
b.inf_str(self.get_ref().infcx));
match *sty_a {
ty::ty_estr(vstore_box) |
......@@ -276,8 +280,8 @@ pub fn coerce_borrowed_string(&self,
}
};
let r_a = self.infcx.next_region_var(Coercion(self.trace));
let a_borrowed = ty::mk_estr(self.infcx.tcx, vstore_slice(r_a));
let r_a = self.get_ref().infcx.next_region_var(Coercion(self.get_ref().trace));
let a_borrowed = ty::mk_estr(self.get_ref().infcx.tcx, vstore_slice(r_a));
if_ok!(self.subtype(a_borrowed, b));
Ok(Some(@AutoDerefRef(AutoDerefRef {
autoderefs: 0,
......@@ -292,11 +296,11 @@ pub fn coerce_borrowed_vector(&self,
mt_b: ty::mt)
-> CoerceResult {
debug!("coerce_borrowed_vector(a={}, sty_a={:?}, b={})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx), sty_a,
b.inf_str(self.get_ref().infcx));
let sub = Sub(**self);
let r_borrow = self.infcx.next_region_var(Coercion(self.trace));
let sub = Sub(*self.get_ref());
let r_borrow = self.get_ref().infcx.next_region_var(Coercion(self.get_ref().trace));
let ty_inner = match *sty_a {
ty::ty_evec(mt, _) => mt.ty,
_ => {
......@@ -304,7 +308,7 @@ pub fn coerce_borrowed_vector(&self,
}
};
let a_borrowed = ty::mk_evec(self.infcx.tcx,
let a_borrowed = ty::mk_evec(self.get_ref().infcx.tcx,
mt {ty: ty_inner, mutbl: mt_b.mutbl},
vstore_slice(r_borrow));
if_ok!(sub.tys(a_borrowed, b));
......@@ -321,11 +325,11 @@ fn coerce_borrowed_object(&self,
b_mutbl: ast::Mutability) -> CoerceResult
{
debug!("coerce_borrowed_object(a={}, sty_a={:?}, b={})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx), sty_a,
b.inf_str(self.get_ref().infcx));
let tcx = self.infcx.tcx;
let r_a = self.infcx.next_region_var(Coercion(self.trace));
let tcx = self.get_ref().infcx.tcx;
let r_a = self.get_ref().infcx.next_region_var(Coercion(self.get_ref().trace));
let a_borrowed = match *sty_a {
ty::ty_trait(did, ref substs, _, _, b) => {
......@@ -350,8 +354,8 @@ pub fn coerce_borrowed_fn(&self,
b: ty::t)
-> CoerceResult {
debug!("coerce_borrowed_fn(a={}, sty_a={:?}, b={})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx), sty_a,
b.inf_str(self.get_ref().infcx));
let fn_ty = match *sty_a {
ty::ty_closure(ref f) if f.sigil == ast::ManagedSigil ||
......@@ -366,9 +370,9 @@ pub fn coerce_borrowed_fn(&self,
}
};
let r_borrow = self.infcx.next_region_var(Coercion(self.trace));
let r_borrow = self.get_ref().infcx.next_region_var(Coercion(self.get_ref().trace));
let a_borrowed = ty::mk_closure(
self.infcx.tcx,
self.get_ref().infcx.tcx,
ty::ClosureTy {
sigil: ast::BorrowedSigil,
region: r_borrow,
......@@ -405,7 +409,7 @@ pub fn coerce_from_bare_fn_post_unpack(&self,
*/
debug!("coerce_from_bare_fn(a={}, b={})",
a.inf_str(self.infcx), b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx), b.inf_str(self.get_ref().infcx));
if !fn_ty_a.abis.is_rust() {
return self.subtype(a, b);
......@@ -417,7 +421,7 @@ pub fn coerce_from_bare_fn_post_unpack(&self,
};
let adj = @ty::AutoAddEnv(fn_ty_b.region, fn_ty_b.sigil);
let a_closure = ty::mk_closure(self.infcx.tcx,
let a_closure = ty::mk_closure(self.get_ref().infcx.tcx,
ty::ClosureTy {
sig: fn_ty_a.sig.clone(),
..fn_ty_b
......@@ -433,8 +437,8 @@ pub fn coerce_unsafe_ptr(&self,
mt_b: ty::mt)
-> CoerceResult {
debug!("coerce_unsafe_ptr(a={}, sty_a={:?}, b={})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx), sty_a,
b.inf_str(self.get_ref().infcx));
let mt_a = match *sty_a {
ty::ty_rptr(_, mt) => mt,
......@@ -444,7 +448,7 @@ pub fn coerce_unsafe_ptr(&self,
};
// check that the types which they point at are compatible
let a_unsafe = ty::mk_ptr(self.infcx.tcx, mt_a);
let a_unsafe = ty::mk_ptr(self.get_ref().infcx.tcx, mt_a);
if_ok!(self.subtype(a_unsafe, b));
// although borrowed ptrs and unsafe ptrs have the same
......@@ -467,8 +471,8 @@ pub fn coerce_object(&self,
bounds: ty::BuiltinBounds) -> CoerceResult {
debug!("coerce_object(a={}, sty_a={:?}, b={})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx), sty_a,
b.inf_str(self.get_ref().infcx));
let (sigil, region) = match trait_store {
ty::BoxTraitStore => (ast::ManagedSigil, None),
......
......@@ -29,18 +29,22 @@
pub struct Glb(CombineFields); // "greatest lower bound" (common subtype)
impl Glb {
pub fn get_ref<'a>(&'a self) -> &'a CombineFields { let Glb(ref v) = *self; v }
}
impl Combine for Glb {
fn infcx(&self) -> @InferCtxt { self.infcx }
fn infcx(&self) -> @InferCtxt { self.get_ref().infcx }
fn tag(&self) -> ~str { ~"glb" }
fn a_is_expected(&self) -> bool { self.a_is_expected }
fn trace(&self) -> TypeTrace { self.trace }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace }
fn sub(&self) -> Sub { Sub(**self) }
fn lub(&self) -> Lub { Lub(**self) }
fn glb(&self) -> Glb { Glb(**self) }
fn sub(&self) -> Sub { Sub(*self.get_ref()) }
fn lub(&self) -> Lub { Lub(*self.get_ref()) }
fn glb(&self) -> Glb { Glb(*self.get_ref()) }
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
let tcx = self.infcx.tcx;
let tcx = self.get_ref().infcx.tcx;
debug!("{}.mts({}, {})",
self.tag(),
......@@ -73,7 +77,7 @@ fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
}
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
Lub(**self).tys(a, b)
Lub(*self.get_ref()).tys(a, b)
}
fn purities(&self, a: purity, b: purity) -> cres<purity> {
......@@ -100,15 +104,15 @@ fn bounds(&self, a: BuiltinBounds, b: BuiltinBounds) -> cres<BuiltinBounds> {
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("{}.regions({:?}, {:?})",
self.tag(),
a.inf_str(self.infcx),
b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx),
b.inf_str(self.get_ref().infcx));
Ok(self.infcx.region_vars.glb_regions(Subtype(self.trace), a, b))
Ok(self.get_ref().infcx.region_vars.glb_regions(Subtype(self.get_ref().trace), a, b))
}
fn contraregions(&self, a: ty::Region, b: ty::Region)
-> cres<ty::Region> {
Lub(**self).regions(a, b)
Lub(*self.get_ref()).regions(a, b)
}
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
......@@ -120,41 +124,41 @@ fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
// please see the large comment in `region_inference.rs`.
debug!("{}.fn_sigs({:?}, {:?})",
self.tag(), a.inf_str(self.infcx), b.inf_str(self.infcx));
self.tag(), a.inf_str(self.get_ref().infcx), b.inf_str(self.get_ref().infcx));
let _indenter = indenter();
// Take a snapshot. We'll never roll this back, but in later
// phases we do want to be able to examine "all bindings that
// were created as part of this type comparison", and making a
// snapshot is a convenient way to do that.
let snapshot = self.infcx.region_vars.start_snapshot();
let snapshot = self.get_ref().infcx.region_vars.start_snapshot();
// Instantiate each bound region with a fresh region variable.
let (a_with_fresh, a_map) =
self.infcx.replace_bound_regions_with_fresh_regions(
self.trace, a);
self.get_ref().infcx.replace_bound_regions_with_fresh_regions(
self.get_ref().trace, a);
let a_vars = var_ids(self, &a_map);
let (b_with_fresh, b_map) =
self.infcx.replace_bound_regions_with_fresh_regions(
self.trace, b);
self.get_ref().infcx.replace_bound_regions_with_fresh_regions(
self.get_ref().trace, b);
let b_vars = var_ids(self, &b_map);
// Collect constraints.
let sig0 = if_ok!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh));
debug!("sig0 = {}", sig0.inf_str(self.infcx));
debug!("sig0 = {}", sig0.inf_str(self.get_ref().infcx));
// Generalize the regions appearing in fn_ty0 if possible
let new_vars =
self.infcx.region_vars.vars_created_since_snapshot(snapshot);
self.get_ref().infcx.region_vars.vars_created_since_snapshot(snapshot);
let sig1 =
fold_regions_in_sig(
self.infcx.tcx,
self.get_ref().infcx.tcx,
&sig0,
|r| generalize_region(self, snapshot,
new_vars, sig0.binder_id,
&a_map, a_vars, b_vars,
r));
debug!("sig1 = {}", sig1.inf_str(self.infcx));
debug!("sig1 = {}", sig1.inf_str(self.get_ref().infcx));
return Ok(sig1);
fn generalize_region(this: &Glb,
......@@ -170,7 +174,7 @@ fn generalize_region(this: &Glb,
return r0;
}
let tainted = this.infcx.region_vars.tainted(snapshot, r0);
let tainted = this.get_ref().infcx.region_vars.tainted(snapshot, r0);
let mut a_r = None;
let mut b_r = None;
......@@ -237,13 +241,13 @@ fn rev_lookup(this: &Glb,
return ty::ReLateBound(new_binder_id, *a_br);
}
}
this.infcx.tcx.sess.span_bug(
this.trace.origin.span(),
this.get_ref().infcx.tcx.sess.span_bug(
this.get_ref().trace.origin.span(),
format!("could not find original bound region for {:?}", r))
}
fn fresh_bound_variable(this: &Glb, binder_id: NodeId) -> ty::Region {
this.infcx.region_vars.new_bound(binder_id)
this.get_ref().infcx.region_vars.new_bound(binder_id)
}
}
}
......@@ -335,7 +335,7 @@ pub trait TyLatticeDir {
}
impl LatticeDir for Lub {
fn combine_fields(&self) -> CombineFields { **self }
fn combine_fields(&self) -> CombineFields { *self.get_ref() }
fn bnd<T:Clone>(&self, b: &Bounds<T>) -> Option<T> { b.ub.clone() }
fn with_bnd<T:Clone>(&self, b: &Bounds<T>, t: T) -> Bounds<T> {
Bounds { ub: Some(t), ..(*b).clone() }
......@@ -349,7 +349,7 @@ fn ty_bot(&self, t: ty::t) -> cres<ty::t> {
}
impl LatticeDir for Glb {
fn combine_fields(&self) -> CombineFields { **self }
fn combine_fields(&self) -> CombineFields { *self.get_ref() }
fn bnd<T:Clone>(&self, b: &Bounds<T>) -> Option<T> { b.lb.clone() }
fn with_bnd<T:Clone>(&self, b: &Bounds<T>, t: T) -> Bounds<T> {
Bounds { lb: Some(t), ..(*b).clone() }
......
......@@ -29,6 +29,7 @@
pub struct Lub(CombineFields); // least-upper-bound: common supertype
impl Lub {
pub fn get_ref<'a>(&'a self) -> &'a CombineFields { let Lub(ref v) = *self; v }
pub fn bot_ty(&self, b: ty::t) -> cres<ty::t> { Ok(b) }
pub fn ty_bot(&self, b: ty::t) -> cres<ty::t> {
self.bot_ty(b) // commutative
......@@ -36,17 +37,17 @@ pub fn ty_bot(&self, b: ty::t) -> cres<ty::t> {
}
impl Combine for Lub {
fn infcx(&self) -> @InferCtxt { self.infcx }
fn infcx(&self) -> @InferCtxt { self.get_ref().infcx }
fn tag(&self) -> ~str { ~"lub" }
fn a_is_expected(&self) -> bool { self.a_is_expected }
fn trace(&self) -> TypeTrace { self.trace }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace }
fn sub(&self) -> Sub { Sub(**self) }
fn lub(&self) -> Lub { Lub(**self) }
fn glb(&self) -> Glb { Glb(**self) }
fn sub(&self) -> Sub { Sub(*self.get_ref()) }
fn lub(&self) -> Lub { Lub(*self.get_ref()) }
fn glb(&self) -> Glb { Glb(*self.get_ref()) }
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
let tcx = self.infcx.tcx;
let tcx = self.get_ref().infcx.tcx;
debug!("{}.mts({}, {})",
self.tag(),
......@@ -64,7 +65,7 @@ fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
}
MutMutable => {
self.infcx.try(|| {
self.get_ref().infcx.try(|| {
eq_tys(self, a.ty, b.ty).then(|| {
Ok(ty::mt {ty: a.ty, mutbl: m})
})
......@@ -74,7 +75,7 @@ fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
}
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
Glb(**self).tys(a, b)
Glb(*self.get_ref()).tys(a, b)
}
fn purities(&self, a: purity, b: purity) -> cres<purity> {
......@@ -100,16 +101,16 @@ fn bounds(&self, a: BuiltinBounds, b: BuiltinBounds) -> cres<BuiltinBounds> {
fn contraregions(&self, a: ty::Region, b: ty::Region)
-> cres<ty::Region> {
return Glb(**self).regions(a, b);
return Glb(*self.get_ref()).regions(a, b);
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("{}.regions({:?}, {:?})",
self.tag(),
a.inf_str(self.infcx),
b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx),
b.inf_str(self.get_ref().infcx));
Ok(self.infcx.region_vars.lub_regions(Subtype(self.trace), a, b))
Ok(self.get_ref().infcx.region_vars.lub_regions(Subtype(self.get_ref().trace), a, b))
}
fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
......@@ -120,26 +121,26 @@ fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
// phases we do want to be able to examine "all bindings that
// were created as part of this type comparison", and making a
// snapshot is a convenient way to do that.
let snapshot = self.infcx.region_vars.start_snapshot();
let snapshot = self.get_ref().infcx.region_vars.start_snapshot();
// Instantiate each bound region with a fresh region variable.
let (a_with_fresh, a_map) =
self.infcx.replace_bound_regions_with_fresh_regions(
self.trace, a);
self.get_ref().infcx.replace_bound_regions_with_fresh_regions(
self.get_ref().trace, a);
let (b_with_fresh, _) =
self.infcx.replace_bound_regions_with_fresh_regions(
self.trace, b);
self.get_ref().infcx.replace_bound_regions_with_fresh_regions(
self.get_ref().trace, b);
// Collect constraints.
let sig0 = if_ok!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh));
debug!("sig0 = {}", sig0.inf_str(self.infcx));
debug!("sig0 = {}", sig0.inf_str(self.get_ref().infcx));
// Generalize the regions appearing in sig0 if possible
let new_vars =
self.infcx.region_vars.vars_created_since_snapshot(snapshot);
self.get_ref().infcx.region_vars.vars_created_since_snapshot(snapshot);
let sig1 =
fold_regions_in_sig(
self.infcx.tcx,
self.get_ref().infcx.tcx,
&sig0,
|r| generalize_region(self, snapshot, new_vars,
sig0.binder_id, &a_map, r));
......@@ -159,7 +160,7 @@ fn generalize_region(this: &Lub,
return r0;
}
let tainted = this.infcx.region_vars.tainted(snapshot, r0);
let tainted = this.get_ref().infcx.region_vars.tainted(snapshot, r0);
// Variables created during LUB computation which are
// *related* to regions that pre-date the LUB computation
......@@ -186,8 +187,8 @@ fn generalize_region(this: &Lub,
}
}
this.infcx.tcx.sess.span_bug(
this.trace.origin.span(),
this.get_ref().infcx.tcx.sess.span_bug(
this.get_ref().trace.origin.span(),
format!("Region {:?} is not associated with \
any bound region from A!", r0))
}
......
......@@ -28,19 +28,23 @@
pub struct Sub(CombineFields); // "subtype", "subregion" etc
impl Sub {
pub fn get_ref<'a>(&'a self) -> &'a CombineFields { let Sub(ref v) = *self; v }
}
impl Combine for Sub {
fn infcx(&self) -> @InferCtxt { self.infcx }
fn infcx(&self) -> @InferCtxt { self.get_ref().infcx }
fn tag(&self) -> ~str { ~"sub" }
fn a_is_expected(&self) -> bool { self.a_is_expected }
fn trace(&self) -> TypeTrace { self.trace }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace }
fn sub(&self) -> Sub { Sub(**self) }
fn lub(&self) -> Lub { Lub(**self) }
fn glb(&self) -> Glb { Glb(**self) }
fn sub(&self) -> Sub { Sub(*self.get_ref()) }
fn lub(&self) -> Lub { Lub(*self.get_ref()) }
fn glb(&self) -> Glb { Glb(*self.get_ref()) }
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
let opp = CombineFields {
a_is_expected: !self.a_is_expected,.. **self
a_is_expected: !self.get_ref().a_is_expected,.. *self.get_ref()
};
Sub(opp).tys(b, a)
}
......@@ -48,7 +52,7 @@ fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
fn contraregions(&self, a: ty::Region, b: ty::Region)
-> cres<ty::Region> {
let opp = CombineFields {
a_is_expected: !self.a_is_expected,.. **self
a_is_expected: !self.get_ref().a_is_expected,.. *self.get_ref()
};
Sub(opp).regions(b, a)
}
......@@ -56,14 +60,14 @@ fn contraregions(&self, a: ty::Region, b: ty::Region)
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("{}.regions({}, {})",
self.tag(),
a.inf_str(self.infcx),
b.inf_str(self.infcx));
self.infcx.region_vars.make_subregion(Subtype(self.trace), a, b);
a.inf_str(self.get_ref().infcx),
b.inf_str(self.get_ref().infcx));
self.get_ref().infcx.region_vars.make_subregion(Subtype(self.get_ref().trace), a, b);
Ok(a)
}
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
debug!("mts({} <: {})", a.inf_str(self.infcx), b.inf_str(self.infcx));
debug!("mts({} <: {})", a.inf_str(self.get_ref().infcx), b.inf_str(self.get_ref().infcx));
if a.mutbl != b.mutbl {
return Err(ty::terr_mutability);
......@@ -109,7 +113,7 @@ fn bounds(&self, a: BuiltinBounds, b: BuiltinBounds) -> cres<BuiltinBounds> {
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
debug!("{}.tys({}, {})", self.tag(),
a.inf_str(self.infcx), b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx), b.inf_str(self.get_ref().infcx));
if a == b { return Ok(a); }
let _indenter = indenter();
match (&ty::get(a).sty, &ty::get(b).sty) {
......@@ -118,15 +122,15 @@ fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
}
(&ty::ty_infer(TyVar(a_id)), &ty::ty_infer(TyVar(b_id))) => {
if_ok!(self.var_sub_var(a_id, b_id));
if_ok!(self.get_ref().var_sub_var(a_id, b_id));
Ok(a)
}
(&ty::ty_infer(TyVar(a_id)), _) => {
if_ok!(self.var_sub_t(a_id, b));
if_ok!(self.get_ref().var_sub_t(a_id, b));
Ok(a)
}
(_, &ty::ty_infer(TyVar(b_id))) => {
if_ok!(self.t_sub_var(a, b_id));
if_ok!(self.get_ref().t_sub_var(a, b_id));
Ok(a)
}
......@@ -142,7 +146,7 @@ fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
debug!("fn_sigs(a={}, b={})",
a.inf_str(self.infcx), b.inf_str(self.infcx));
a.inf_str(self.get_ref().infcx), b.inf_str(self.get_ref().infcx));
let _indenter = indenter();
// Rather than checking the subtype relationship between `a` and `b`
......@@ -156,28 +160,28 @@ fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
// phases we do want to be able to examine "all bindings that
// were created as part of this type comparison", and making a
// snapshot is a convenient way to do that.
let snapshot = self.infcx.region_vars.start_snapshot();
let snapshot = self.get_ref().infcx.region_vars.start_snapshot();
// First, we instantiate each bound region in the subtype with a fresh
// region variable.
let (a_sig, _) =
self.infcx.replace_bound_regions_with_fresh_regions(
self.trace, a);
self.get_ref().infcx.replace_bound_regions_with_fresh_regions(
self.get_ref().trace, a);
// Second, we instantiate each bound region in the supertype with a
// fresh concrete region.
let (skol_map, _, b_sig) = {
replace_bound_regions_in_fn_sig(self.infcx.tcx, None, b, |br| {
let skol = self.infcx.region_vars.new_skolemized(br);
replace_bound_regions_in_fn_sig(self.get_ref().infcx.tcx, None, b, |br| {
let skol = self.get_ref().infcx.region_vars.new_skolemized(br);
debug!("Bound region {} skolemized to {:?}",
bound_region_to_str(self.infcx.tcx, "", false, br),
bound_region_to_str(self.get_ref().infcx.tcx, "", false, br),
skol);
skol
})
};
debug!("a_sig={}", a_sig.inf_str(self.infcx));
debug!("b_sig={}", b_sig.inf_str(self.infcx));
debug!("a_sig={}", a_sig.inf_str(self.get_ref().infcx));
debug!("b_sig={}", b_sig.inf_str(self.get_ref().infcx));
// Compare types now that bound regions have been replaced.
let sig = if_ok!(super_fn_sigs(self, &a_sig, &b_sig));
......@@ -185,9 +189,9 @@ fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
// Presuming type comparison succeeds, we need to check
// that the skolemized regions do not "leak".
let new_vars =
self.infcx.region_vars.vars_created_since_snapshot(snapshot);
self.get_ref().infcx.region_vars.vars_created_since_snapshot(snapshot);
for (&skol_br, &skol) in skol_map.iter() {
let tainted = self.infcx.region_vars.tainted(snapshot, skol);
let tainted = self.get_ref().infcx.region_vars.tainted(snapshot, skol);
for tainted_region in tainted.iter() {
// Each skolemized should only be relatable to itself
// or new variables:
......@@ -201,7 +205,7 @@ fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
};
// A is not as polymorphic as B:
if self.a_is_expected {
if self.a_is_expected() {
return Err(ty::terr_regions_insufficiently_polymorphic(
skol_br, *tainted_region));
} else {
......
......@@ -242,7 +242,7 @@ fn to_str(&self) -> ~str {
ConstantTerm(c1) => format!("{}", c1.to_str()),
TransformTerm(v1, v2) => format!("({} \u00D7 {})",
v1.to_str(), v2.to_str()),
InferredTerm(id) => format!("[{}]", *id)
InferredTerm(id) => format!("[{}]", { let InferredIndex(i) = id; i })
}
}
}
......@@ -543,8 +543,8 @@ fn declared_variance(&self,
// Parameter on an item defined within current crate:
// variance not yet inferred, so return a symbolic
// variance.
let index = self.inferred_index(param_def_id.node);
self.terms_cx.inferred_infos[*index].term
let InferredIndex(index) = self.inferred_index(param_def_id.node);
self.terms_cx.inferred_infos[index].term
} else {
// Parameter on an item defined within another crate:
// variance already inferred, just look it up.
......@@ -559,11 +559,11 @@ fn declared_variance(&self,
}
fn add_constraint(&mut self,
index: InferredIndex,
InferredIndex(index): InferredIndex,
variance: VarianceTermPtr<'a>) {
debug!("add_constraint(index={}, variance={})",
*index, variance.to_str());
self.constraints.push(Constraint { inferred: index,
index, variance.to_str());
self.constraints.push(Constraint { inferred: InferredIndex(index),
variance: variance });
}
......@@ -852,19 +852,20 @@ fn solve(&mut self) {
for constraint in self.constraints.iter() {
let Constraint { inferred, variance: term } = *constraint;
let InferredIndex(inferred) = inferred;
let variance = self.evaluate(term);
let old_value = self.solutions[*inferred];
let old_value = self.solutions[inferred];
let new_value = glb(variance, old_value);
if old_value != new_value {
debug!("Updating inferred {} (node {}) \
from {:?} to {:?} due to {}",
*inferred,
self.terms_cx.inferred_infos[*inferred].param_id,
inferred,
self.terms_cx.inferred_infos[inferred].param_id,
old_value,
new_value,
term.to_str());
self.solutions[*inferred] = new_value;
self.solutions[inferred] = new_value;
changed = true;
}
}
......@@ -943,8 +944,8 @@ fn evaluate(&self, term: VarianceTermPtr<'a>) -> ty::Variance {
v1.xform(v2)
}
InferredTerm(index) => {
self.solutions[*index]
InferredTerm(InferredIndex(index)) => {
self.solutions[index]
}
}
}
......
......@@ -19,6 +19,7 @@
use syntax::ast_util;
use syntax::attr;
use syntax::attr::AttributeMethods;
use syntax::codemap::Pos;
use rustc::metadata::cstore;
use rustc::metadata::csearch;
......@@ -289,6 +290,14 @@ fn clean(&self) -> TyParamBound {
#[deriving(Clone, Encodable, Decodable)]
pub struct Lifetime(~str);
impl Lifetime {
pub fn get_ref<'a>(&'a self) -> &'a str {
let Lifetime(ref s) = *self;
let s: &'a str = *s;
return s;
}
}
impl Clean<Lifetime> for ast::Lifetime {
fn clean(&self) -> Lifetime {
Lifetime(self.ident.clean())
......@@ -798,9 +807,9 @@ fn clean(&self) -> Span {
Span {
filename: filename.to_owned(),
loline: lo.line,
locol: *lo.col,
locol: lo.col.to_uint(),
hiline: hi.line,
hicol: *hi.col,
hicol: hi.col.to_uint(),
}
}
}
......
......@@ -23,6 +23,7 @@ impl<'a> fmt::Default for Escape<'a> {
fn fmt(s: &Escape<'a>, fmt: &mut fmt::Formatter) {
// Because the internet is always right, turns out there's not that many
// characters to escape: http://stackoverflow.com/questions/7381974
let Escape(s) = *s;
let pile_o_bits = s.as_slice();
let mut last = 0;
for (i, ch) in s.bytes().enumerate() {
......
......@@ -35,6 +35,18 @@
/// Wrapper struct for properly emitting a method declaration.
pub struct Method<'a>(&'a clean::SelfTy, &'a clean::FnDecl);
impl VisSpace {
pub fn get(&self) -> Option<ast::visibility> {
let VisSpace(v) = *self; v
}
}
impl PuritySpace {
pub fn get(&self) -> ast::purity {
let PuritySpace(v) = *self; v
}
}
impl fmt::Default for clean::Generics {
fn fmt(g: &clean::Generics, f: &mut fmt::Formatter) {
if g.lifetimes.len() == 0 && g.type_params.len() == 0 { return }
......@@ -68,7 +80,7 @@ fn fmt(g: &clean::Generics, f: &mut fmt::Formatter) {
impl fmt::Default for clean::Lifetime {
fn fmt(l: &clean::Lifetime, f: &mut fmt::Formatter) {
f.buf.write("'".as_bytes());
f.buf.write(l.as_bytes());
f.buf.write(l.get_ref().as_bytes());
}
}
......@@ -424,7 +436,7 @@ fn fmt(m: &Method<'a>, f: &mut fmt::Formatter) {
impl fmt::Default for VisSpace {
fn fmt(v: &VisSpace, f: &mut fmt::Formatter) {
match **v {
match v.get() {
Some(ast::public) => { write!(f.buf, "pub "); }
Some(ast::private) => { write!(f.buf, "priv "); }
Some(ast::inherited) | None => {}
......@@ -434,7 +446,7 @@ fn fmt(v: &VisSpace, f: &mut fmt::Formatter) {
impl fmt::Default for PuritySpace {
fn fmt(p: &PuritySpace, f: &mut fmt::Formatter) {
match **p {
match p.get() {
ast::unsafe_fn => write!(f.buf, "unsafe "),
ast::extern_fn => write!(f.buf, "extern "),
ast::impure_fn => {}
......
......@@ -209,6 +209,7 @@ pub fn find_testable_code(doc: &str, tests: &mut ::test::Collector) {
impl<'a> fmt::Default for Markdown<'a> {
fn fmt(md: &Markdown<'a>, fmt: &mut fmt::Formatter) {
let Markdown(md) = *md;
// This is actually common enough to special-case
if md.len() == 0 { return; }
render(fmt.buf, md.as_slice());
......
......@@ -972,6 +972,7 @@ fn cmp(i1: &clean::Item, i2: &clean::Item, idx1: uint, idx2: uint) -> Ordering {
struct Initializer<'a>(&'a str);
impl<'a> fmt::Default for Initializer<'a> {
fn fmt(s: &Initializer<'a>, f: &mut fmt::Formatter) {
let Initializer(s) = *s;
if s.len() == 0 { return; }
write!(f.buf, "<code> = </code>");
let tag = if s.contains("\n") { "pre" } else { "code" };
......@@ -1537,6 +1538,7 @@ fn build_sidebar(m: &clean::Module) -> HashMap<~str, ~[~str]> {
impl<'a> fmt::Default for Source<'a> {
fn fmt(s: &Source<'a>, fmt: &mut fmt::Formatter) {
let Source(s) = *s;
let lines = s.lines().len();
let mut cols = 0;
let mut tmp = lines;
......
......@@ -157,7 +157,8 @@ fn fold_item(&mut self, i: Item) -> Option<Item> {
clean::ImplItem(ref imp) => {
match imp.trait_ {
Some(clean::ResolvedPath{ id, .. }) => {
if !self.contains(&id) {
let ImplStripper(s) = *self;
if !s.contains(&id) {
return None;
}
}
......
......@@ -140,8 +140,9 @@ impl Callback for MyCallback {
fn call(&mut self) {
// this can get called more than once, but we only want to send
// once
if self.is_some() {
self.take_unwrap().send(1);
let MyCallback(ref mut s) = *self;
if s.is_some() {
s.take_unwrap().send(1);
}
}
}
......
......@@ -310,7 +310,8 @@ pub fn desc(&self) -> ~str {
}
pub fn is_eof(&self) -> bool {
**self == uvll::EOF
let UvError(handle) = *self;
handle == uvll::EOF
}
}
......@@ -331,10 +332,11 @@ pub fn uv_error_to_io_error(uverr: UvError) -> IoError {
// Importing error constants
// uv error descriptions are static
let c_desc = uvll::uv_strerror(*uverr);
let UvError(errcode) = uverr;
let c_desc = uvll::uv_strerror(errcode);
let desc = str::raw::c_str_to_static_slice(c_desc);
let kind = match *uverr {
let kind = match errcode {
uvll::UNKNOWN => io::OtherIoError,
uvll::OK => io::OtherIoError,
uvll::EOF => io::EndOfFile,
......
......@@ -309,7 +309,8 @@ mod tests {
struct Bytes<'a>(&'a [u8]);
impl<'a> IterBytes for Bytes<'a> {
fn iter_bytes(&self, _lsb0: bool, f: |&[u8]| -> bool) -> bool {
f(**self)
let Bytes(v) = *self;
f(v)
}
}
......
......@@ -243,9 +243,31 @@ fn inner_mut_ref<'a>(&'a mut self) -> &'a mut W { self.inner.inner_mut_ref() }
struct InternalBufferedWriter<W>(BufferedWriter<W>);
impl<W> InternalBufferedWriter<W> {
fn get_mut_ref<'a>(&'a mut self) -> &'a mut BufferedWriter<W> {
let InternalBufferedWriter(ref mut w) = *self;
return w;
}
}
impl<W: Writer> Decorator<W> for InternalBufferedWriter<W> {
fn inner(self) -> W {
let InternalBufferedWriter(s) = self;
s.inner()
}
fn inner_ref<'a>(&'a self) -> &'a W {
let InternalBufferedWriter(ref s) = *self;
s.inner_ref()
}
fn inner_mut_ref<'a>(&'a mut self) -> &'a mut W {
let InternalBufferedWriter(ref mut s) = *self;
s.inner_mut_ref()
}
}
impl<W: Reader> Reader for InternalBufferedWriter<W> {
fn read(&mut self, buf: &mut [u8]) -> Option<uint> { self.inner.read(buf) }
fn eof(&mut self) -> bool { self.inner.eof() }
fn read(&mut self, buf: &mut [u8]) -> Option<uint> { self.get_mut_ref().inner.read(buf) }
fn eof(&mut self) -> bool { self.get_mut_ref().inner.eof() }
}
/// Wraps a Stream and buffers input and output to and from it
......@@ -282,15 +304,15 @@ fn eof(&mut self) -> bool { self.inner.eof() }
}
impl<S: Stream> Writer for BufferedStream<S> {
fn write(&mut self, buf: &[u8]) { self.inner.inner.write(buf) }
fn flush(&mut self) { self.inner.inner.flush() }
fn write(&mut self, buf: &[u8]) { self.inner.inner.get_mut_ref().write(buf) }
fn flush(&mut self) { self.inner.inner.get_mut_ref().flush() }
}
impl<S: Stream> Decorator<S> for BufferedStream<S> {
fn inner(self) -> S { self.inner.inner.inner() }
fn inner_ref<'a>(&'a self) -> &'a S { self.inner.inner.inner_ref() }
fn inner_mut_ref<'a>(&'a mut self) -> &'a mut S {
self.inner.inner.inner_mut_ref()
self.inner.inner.get_mut_ref().inner_mut_ref()
}
}
......
......@@ -706,11 +706,24 @@ mod test {
struct TempDir(Path);
impl TempDir {
fn join(&self, path: &str) -> Path {
let TempDir(ref p) = *self;
p.join(path)
}
fn path<'a>(&'a self) -> &'a Path {
let TempDir(ref p) = *self;
p
}
}
impl Drop for TempDir {
fn drop(&mut self) {
// Gee, seeing how we're testing the fs module I sure hope that we
// at least implement this correctly!
io::fs::rmdir_recursive(&**self);
let TempDir(ref p) = *self;
io::fs::rmdir_recursive(p);
}
}
......@@ -961,7 +974,7 @@ pub fn tmpdir() -> TempDir {
let tmpdir = tmpdir();
let mut dirpath = tmpdir.clone();
let mut dirpath = tmpdir.path().clone();
dirpath.push(format!("test-가一ー你好"));
mkdir(&dirpath, io::UserRWX);
assert!(dirpath.is_dir());
......@@ -978,7 +991,7 @@ pub fn tmpdir() -> TempDir {
assert!(!Path::new("test/nonexistent-bogus-path").exists());
let tmpdir = tmpdir();
let unicode = tmpdir.clone();
let unicode = tmpdir.path();
let unicode = unicode.join(format!("test-각丁ー再见"));
mkdir(&unicode, io::UserRWX);
assert!(unicode.exists());
......@@ -1015,7 +1028,7 @@ pub fn tmpdir() -> TempDir {
let out = tmpdir.join("out");
File::create(&out);
match io::result(|| copy(&out, &*tmpdir)) {
match io::result(|| copy(&out, tmpdir.path())) {
Ok(..) => fail!(), Err(..) => {}
}
})
......@@ -1037,7 +1050,7 @@ pub fn tmpdir() -> TempDir {
let tmpdir = tmpdir();
let out = tmpdir.join("out");
match io::result(|| copy(&*tmpdir, &out)) {
match io::result(|| copy(tmpdir.path(), &out)) {
Ok(..) => fail!(), Err(..) => {}
}
assert!(!out.exists());
......@@ -1082,7 +1095,7 @@ pub fn tmpdir() -> TempDir {
iotest!(fn readlink_not_symlink() {
let tmpdir = tmpdir();
match io::result(|| readlink(&*tmpdir)) {
match io::result(|| readlink(tmpdir.path())) {
Ok(..) => fail!("wanted a failure"),
Err(..) => {}
}
......
......@@ -83,7 +83,8 @@ fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for Exp {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
(*rng.gen::<Exp1>()) * self.lambda_inverse
let Exp1(n) = rng.gen::<Exp1>();
n * self.lambda_inverse
}
}
......
......@@ -141,7 +141,7 @@ fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
}
impl IndependentSample<f64> for GammaSmallShape {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
let u = *rng.gen::<Open01<f64>>();
let Open01(u) = rng.gen::<Open01<f64>>();
self.large_shape.ind_sample(rng) * num::pow(u, self.inv_shape)
}
......@@ -149,14 +149,14 @@ fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
impl IndependentSample<f64> for GammaLargeShape {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
loop {
let x = *rng.gen::<StandardNormal>();
let StandardNormal(x) = rng.gen::<StandardNormal>();
let v_cbrt = 1.0 + self.c * x;
if v_cbrt <= 0.0 { // a^3 <= 0 iff a <= 0
continue
}
let v = v_cbrt * v_cbrt * v_cbrt;
let u = *rng.gen::<Open01<f64>>();
let Open01(u) = rng.gen::<Open01<f64>>();
let x_sqr = x * x;
if u < 1.0 - 0.0331 * x_sqr * x_sqr ||
......@@ -213,7 +213,7 @@ fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
match *self {
DoFExactlyOne => {
// k == 1 => N(0,1)^2
let norm = *rng.gen::<StandardNormal>();
let StandardNormal(norm) = rng.gen::<StandardNormal>();
norm * norm
}
DoFAnythingElse(ref g) => g.ind_sample(rng)
......@@ -302,7 +302,7 @@ fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for StudentT {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
let norm = *rng.gen::<StandardNormal>();
let StandardNormal(norm) = rng.gen::<StandardNormal>();
norm * (self.dof / self.chi.ind_sample(rng)).sqrt()
}
}
......
......@@ -254,6 +254,7 @@ mod tests {
use super::*;
use option::{Some, None};
#[deriving(Eq)]
struct ConstRand(uint);
impl Rand for ConstRand {
fn rand<R: Rng>(_: &mut R) -> ConstRand {
......@@ -277,8 +278,8 @@ fn next_u64(&mut self) -> u64 {
fn test_rand_sample() {
let mut rand_sample = RandSample::<ConstRand>;
assert_eq!(*rand_sample.sample(&mut task_rng()), 0);
assert_eq!(*rand_sample.ind_sample(&mut task_rng()), 0);
assert_eq!(rand_sample.sample(&mut task_rng()), ConstRand(0));
assert_eq!(rand_sample.ind_sample(&mut task_rng()), ConstRand(0));
}
#[test]
fn test_weighted_choice() {
......
......@@ -46,8 +46,8 @@ fn zero_case<R:Rng>(rng: &mut R, u: f64) -> f64 {
let mut y = 0.0f64;
while -2.0 * y < x * x {
let x_ = *rng.gen::<Open01<f64>>();
let y_ = *rng.gen::<Open01<f64>>();
let Open01(x_) = rng.gen::<Open01<f64>>();
let Open01(y_) = rng.gen::<Open01<f64>>();
x = x_.ln() / ziggurat_tables::ZIG_NORM_R;
y = y_.ln();
......@@ -102,7 +102,8 @@ fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for Normal {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
self.mean + self.std_dev * (*rng.gen::<StandardNormal>())
let StandardNormal(n) = rng.gen::<StandardNormal>();
self.mean + self.std_dev * n
}
}
......
......@@ -630,13 +630,11 @@ pub fn random<T: Rand>() -> T {
/// `[0,1)`.
///
/// # Example
/// ```rust
/// ```rust,ignore
/// use std::rand::{random, Open01};
///
/// println!("f32 from (0,1): {}", *random::<Open01<f32>>());
///
/// let x: Open01<f64> = random();
/// println!("f64 from (0,1): {}", *x);
/// let Open01(val) = random::<Open01<f32>>();
/// println!("f32 from (0,1): {}", val);
/// ```
pub struct Open01<F>(F);
......@@ -648,13 +646,11 @@ pub fn random<T: Rand>() -> T {
/// `[0,1)`.
///
/// # Example
/// ```rust
/// ```rust,ignore
/// use std::rand::{random, Closed01};
///
/// println!("f32 from [0,1]: {}", *random::<Closed01<f32>>());
///
/// let x: Closed01<f64> = random();
/// println!("f64 from [0,1]: {}", *x);
/// let Closed01(val) = random::<Closed01<f32>>();
/// println!("f32 from [0,1]: {}", val);
/// ```
pub struct Closed01<F>(F);
......
......@@ -233,10 +233,12 @@ mod tests {
struct ConstantRng(u64);
impl Rng for ConstantRng {
fn next_u32(&mut self) -> u32 {
(**self) as u32
let ConstantRng(v) = *self;
v as u32
}
fn next_u64(&mut self) -> u64 {
**self
let ConstantRng(v) = *self;
v
}
}
......@@ -254,10 +256,10 @@ fn rand_open() {
let mut rng = task_rng();
for _ in range(0, 1_000) {
// strict inequalities
let f = *rng.gen::<Open01<f64>>();
let Open01(f) = rng.gen::<Open01<f64>>();
assert!(0.0 < f && f < 1.0);
let f = *rng.gen::<Open01<f32>>();
let Open01(f) = rng.gen::<Open01<f32>>();
assert!(0.0 < f && f < 1.0);
}
}
......@@ -267,10 +269,10 @@ fn rand_closed() {
let mut rng = task_rng();
for _ in range(0, 1_000) {
// strict inequalities
let f = *rng.gen::<Closed01<f64>>();
let Closed01(f) = rng.gen::<Closed01<f64>>();
assert!(0.0 <= f && f <= 1.0);
let f = *rng.gen::<Closed01<f32>>();
let Closed01(f) = rng.gen::<Closed01<f32>>();
assert!(0.0 <= f && f <= 1.0);
}
}
......
......@@ -153,9 +153,13 @@ fn flush(w: Option<~Writer>) {
// annihilated invoke TLS. Sadly these two operations seemed to
// be intertwined, and miraculously work for now...
let mut task = Local::borrow(None::<Task>);
let storage = task.get().storage.take();
let storage_map = {
let task = task.get();
let LocalStorage(ref mut optmap) = task.storage;
optmap.take()
};
drop(task);
drop(storage);
drop(storage_map);
// Destroy remaining boxes. Also may run user dtors.
unsafe { cleanup::annihilate(); }
......
......@@ -4042,7 +4042,8 @@ fn split_unicode_not_ascii(bh: &mut BenchHarness) {
struct NotAscii(char);
impl CharEq for NotAscii {
fn matches(&self, c: char) -> bool {
**self == c
let NotAscii(cc) = *self;
cc == c
}
fn only_ascii(&self) -> bool { false }
}
......@@ -4065,7 +4066,10 @@ fn split_not_ascii(bh: &mut BenchHarness) {
struct NotAscii(char);
impl CharEq for NotAscii {
#[inline]
fn matches(&self, c: char) -> bool { **self == c }
fn matches(&self, c: char) -> bool {
let NotAscii(cc) = *self;
cc == c
}
fn only_ascii(&self) -> bool { false }
}
let s = "Mary had a little lamb, Little lamb, little-lamb.";
......
......@@ -46,35 +46,35 @@ pub trait Pos {
impl Pos for BytePos {
fn from_uint(n: uint) -> BytePos { BytePos(n as u32) }
fn to_uint(&self) -> uint { **self as uint }
fn to_uint(&self) -> uint { let BytePos(n) = *self; n as uint }
}
impl Add<BytePos, BytePos> for BytePos {
fn add(&self, rhs: &BytePos) -> BytePos {
BytePos(**self + **rhs)
BytePos((self.to_uint() + rhs.to_uint()) as u32)
}
}
impl Sub<BytePos, BytePos> for BytePos {
fn sub(&self, rhs: &BytePos) -> BytePos {
BytePos(**self - **rhs)
BytePos((self.to_uint() - rhs.to_uint()) as u32)
}
}
impl Pos for CharPos {
fn from_uint(n: uint) -> CharPos { CharPos(n) }
fn to_uint(&self) -> uint { **self }
fn to_uint(&self) -> uint { let CharPos(n) = *self; n }
}
impl Add<CharPos,CharPos> for CharPos {
fn add(&self, rhs: &CharPos) -> CharPos {
CharPos(**self + **rhs)
CharPos(self.to_uint() + rhs.to_uint())
}
}
impl Sub<CharPos,CharPos> for CharPos {
fn sub(&self, rhs: &CharPos) -> CharPos {
CharPos(**self - **rhs)
CharPos(self.to_uint() - rhs.to_uint())
}
}
......
......@@ -172,7 +172,8 @@ fn new(start_color: u8) -> Colors {
}
fn next(&self) -> u8 {
let val = **self & HEADS;
let Colors(c) = *self;
let val = c & HEADS;
if (0u16 == val) {
return 0u8;
} else {
......@@ -184,7 +185,7 @@ fn next(&self) -> u8 {
fn remove(&mut self, color: u8) {
if color != 0u8 {
let val = **self;
let Colors(val) = *self;
let mask = !(1u16 << color);
*self = Colors(val & mask);
}
......
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct foo(int);
fn main() {
let x = foo(3);
*x = 4; //~ ERROR cannot assign to immutable anonymous field
}
......@@ -14,7 +14,8 @@ enum Either<T, U> { Left(T), Right(U) }
impl X {
pub fn with(&self, blk: |x: &Either<(uint,uint),extern fn()>|) {
blk(&**self)
let X(ref e) = *self;
blk(e)
}
}
......
......@@ -12,7 +12,9 @@
impl Add<foo, foo> for foo {
fn add(&self, f: &foo) -> foo {
foo(~(***self + **(*f)))
let foo(~i) = *self;
let foo(~j) = *f;
foo(~(i + j))
}
}
......
struct S(~str);
impl Drop for S {
fn drop(&mut self) { println(**self); }
fn drop(&mut self) { }
}
fn move_in_match() {
......
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[feature(managed_boxes)];
struct foo(~int);
fn borrow(x: @foo) {
let _y = &***x;
*x = foo(~4); //~ ERROR cannot assign
}
fn main() {
}
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: can only dereference structs
struct cat {
x: ()
}
fn main() {
let kitty : cat = cat { x: () };
error!("{:?}", *kitty);
}
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: can only dereference structs
struct cat {
foo: ()
}
fn main() {
let nyan = cat { foo: () };
error!("{:?}", *nyan);
}
......@@ -10,7 +10,7 @@
struct thing(uint);
impl Ord for thing { //~ ERROR not all trait methods implemented, missing: `lt`
fn le(&self, other: &thing) -> bool { **self < **other }
fn ge(&self, other: &thing) -> bool { **self < **other }
fn le(&self, other: &thing) -> bool { true }
fn ge(&self, other: &thing) -> bool { true }
}
fn main() {}
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum Either<T, U> { Left(T), Right(U) }
struct S(Either<uint, uint>);
fn main() {
match *S(Left(5)) {
S(_) => {} //~ ERROR mismatched types: expected `Either<uint,uint>` but found a structure pattern
}
}
......@@ -26,7 +26,9 @@ fn make_cycle<A:'static>(a: A) {
let g: @RefCell<RecEnum<A>> = @RefCell::new(RecEnum(Rec {val: a, rec: None}));
{
let mut gb = g.borrow_mut();
gb.get().rec = Some(g);
let gg = gb.get();
let RecEnum(ref mut gg) = *gg;
gg.rec = Some(g);
}
}
......
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that we can define inherent methods on newtype enums that use
// an auto-ref'd receiver.
struct Foo(uint);
impl Foo {
pub fn len(&self) -> uint { **self }
}
pub fn main() {
let m = Foo(3);
assert_eq!(m.len(), 3);
}
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait double {
fn double(&self) -> uint;
}
impl double for uint {
fn double(&self) -> uint { *self * 2u }
}
struct foo(uint);
pub fn main() {
let x = foo(3u);
assert_eq!(x.double(), 6u);
}
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct noncopyable {
i: (),
}
impl Drop for noncopyable {
fn drop(&mut self) {
error!("dropped");
}
}
fn noncopyable() -> noncopyable {
noncopyable {
i: ()
}
}
struct wrapper(noncopyable);
pub fn main() {
let x1 = wrapper(noncopyable());
let _x2 = *x1;
}
......@@ -14,7 +14,9 @@
impl Eq for Fool {
fn eq(&self, other: &Fool) -> bool {
**self != **other
let Fool(this) = *self;
let Fool(other) = *other;
this != other
}
}
......@@ -22,7 +24,9 @@ fn eq(&self, other: &Fool) -> bool {
impl Ord for Int {
fn lt(&self, other: &Int) -> bool {
**self < **other
let Int(this) = *self;
let Int(other) = *other;
this < other
}
}
......@@ -30,7 +34,9 @@ fn lt(&self, other: &Int) -> bool {
impl Ord for RevInt {
fn lt(&self, other: &RevInt) -> bool {
**self > **other
let RevInt(this) = *self;
let RevInt(other) = *other;
this > other
}
}
......
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct S(&'static [int]);
static C0: S = S([3]);
static C1: int = C0[0];
pub fn main() {
assert_eq!(C1, 3);
}
......@@ -10,11 +10,7 @@
static C: &'static int = &1000;
static D: int = *C;
struct S(&'static int);
static E: &'static S = &S(C);
static F: int = ***E;
pub fn main() {
assert_eq!(D, 1000);
assert_eq!(F, 1000);
}
struct S<T>(T);
pub fn main() {
let s = S(2i);
println(s.to_str());
let _s = S(2i);
}
......@@ -16,7 +16,7 @@
impl Drop for NonCopyable {
fn drop(&mut self) {
let p = **self;
let NonCopyable(p) = *self;
let _v = unsafe { transmute::<*c_void, ~int>(p) };
}
}
......
......@@ -11,5 +11,6 @@
struct T (&'static [int]);
static t : T = T (&'static [5, 4, 3]);
pub fn main () {
assert_eq!(t[0], 5);
let T(ref v) = t;
assert_eq!(v[0], 5);
}
......@@ -16,7 +16,8 @@ pub fn new(wrapped: ~str) -> Wrapper {
}
pub fn say_hi(&self) {
println!("hello {}", **self);
let Wrapper(ref s) = *self;
println!("hello {}", *s);
}
}
......
......@@ -18,6 +18,7 @@
impl fmt::Default for Foo {
fn fmt(f: &Foo, _fmt: &mut fmt::Formatter) {
let Foo(ref f) = *f;
assert!(f.get() == 0);
f.set(1);
}
......@@ -28,6 +29,7 @@ pub fn main() {
do spawn {
let mut f = Foo(Cell::new(0));
debug!("{}", f);
let Foo(ref mut f) = f;
assert!(f.get() == 1);
c.send(());
}
......
......@@ -20,12 +20,14 @@
struct S<T> { i:u8, t:T }
impl<T> S<T> { fn unwrap(self) -> T { self.t } }
#[deriving(Eq)]
struct A((u32, u32));
#[deriving(Eq)]
struct B(u64);
pub fn main() {
static Ca: S<A> = S { i: 0, t: A((13, 104)) };
static Cb: S<B> = S { i: 0, t: B(31337) };
assert_eq!(*(Ca.unwrap()), (13, 104));
assert_eq!(*(Cb.unwrap()), 31337);
assert_eq!(Ca.unwrap(), A((13, 104)));
assert_eq!(Cb.unwrap(), B(31337));
}
......@@ -11,13 +11,20 @@
#[deriving(Clone)]
struct myvec<X>(~[X]);
fn myvec_deref<X:Clone>(mv: myvec<X>) -> ~[X] { return (*mv).clone(); }
fn myvec_deref<X:Clone>(mv: myvec<X>) -> ~[X] {
let myvec(v) = mv;
return v.clone();
}
fn myvec_elt<X>(mv: myvec<X>) -> X { return mv[0]; }
fn myvec_elt<X>(mv: myvec<X>) -> X {
let myvec(v) = mv;
return v[0];
}
pub fn main() {
let mv = myvec(~[1, 2, 3]);
assert_eq!(myvec_deref(mv.clone())[1], 2);
assert_eq!(myvec_elt(mv.clone()), 1);
assert_eq!(mv[2], 3);
let myvec(v) = mv;
assert_eq!(v[2], 3);
}
......@@ -19,7 +19,8 @@
#[unsafe_destructor]
impl Drop for Foo {
fn drop(&mut self) {
self.set(23);
let Foo(i) = *self;
i.set(23);
}
}
......
......@@ -6,7 +6,8 @@
impl Drop for Fd {
fn drop(&mut self) {
unsafe {
libc::close(**self);
let Fd(s) = *self;
libc::close(s);
}
}
}
......
......@@ -8,6 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[deriving(Eq)]
struct Foo(uint);
fn foo() -> Foo {
......@@ -15,5 +16,5 @@ fn foo() -> Foo {
}
pub fn main() {
assert_eq!(*foo(), 42);
assert_eq!(foo(), Foo(42));
}
......@@ -12,10 +12,14 @@
struct Mytype {compute: extern fn(mytype) -> int, val: int}
fn compute(i: mytype) -> int { return i.val + 20; }
fn compute(i: mytype) -> int {
let mytype(m) = i;
return m.val + 20;
}
pub fn main() {
let myval = mytype(Mytype{compute: compute, val: 30});
println!("{}", compute(myval));
assert_eq!((myval.compute)(myval), 50);
let mytype(m) = myval;
assert_eq!((m.compute)(myval), 50);
}
......@@ -34,16 +34,23 @@ fn align(size: uint, align: uint) -> uint {
struct ptr_visit_adaptor<V>(Inner<V>);
impl<V:TyVisitor + movable_ptr> ptr_visit_adaptor<V> {
fn inner<'a>(&'a mut self) -> &'a mut V {
let ptr_visit_adaptor(ref mut i) = *self;
&mut i.inner
}
}
impl<V:TyVisitor + movable_ptr> ptr_visit_adaptor<V> {
#[inline(always)]
pub fn bump(&mut self, sz: uint) {
self.inner.move_ptr(|p| ((p as uint) + sz) as *c_void)
self.inner().move_ptr(|p| ((p as uint) + sz) as *c_void)
}
#[inline(always)]
pub fn align(&mut self, a: uint) {
self.inner.move_ptr(|p| align(p as uint, a) as *c_void)
self.inner().move_ptr(|p| align(p as uint, a) as *c_void)
}
#[inline(always)]
......@@ -62,133 +69,133 @@ impl<V:TyVisitor + movable_ptr> TyVisitor for ptr_visit_adaptor<V> {
fn visit_bot(&mut self) -> bool {
self.align_to::<()>();
if ! self.inner.visit_bot() { return false; }
if ! self.inner().visit_bot() { return false; }
self.bump_past::<()>();
true
}
fn visit_nil(&mut self) -> bool {
self.align_to::<()>();
if ! self.inner.visit_nil() { return false; }
if ! self.inner().visit_nil() { return false; }
self.bump_past::<()>();
true
}
fn visit_bool(&mut self) -> bool {
self.align_to::<bool>();
if ! self.inner.visit_bool() { return false; }
if ! self.inner().visit_bool() { return false; }
self.bump_past::<bool>();
true
}
fn visit_int(&mut self) -> bool {
self.align_to::<int>();
if ! self.inner.visit_int() { return false; }
if ! self.inner().visit_int() { return false; }
self.bump_past::<int>();
true
}
fn visit_i8(&mut self) -> bool {
self.align_to::<i8>();
if ! self.inner.visit_i8() { return false; }
if ! self.inner().visit_i8() { return false; }
self.bump_past::<i8>();
true
}
fn visit_i16(&mut self) -> bool {
self.align_to::<i16>();
if ! self.inner.visit_i16() { return false; }
if ! self.inner().visit_i16() { return false; }
self.bump_past::<i16>();
true
}
fn visit_i32(&mut self) -> bool {
self.align_to::<i32>();
if ! self.inner.visit_i32() { return false; }
if ! self.inner().visit_i32() { return false; }
self.bump_past::<i32>();
true
}
fn visit_i64(&mut self) -> bool {
self.align_to::<i64>();
if ! self.inner.visit_i64() { return false; }
if ! self.inner().visit_i64() { return false; }
self.bump_past::<i64>();
true
}
fn visit_uint(&mut self) -> bool {
self.align_to::<uint>();
if ! self.inner.visit_uint() { return false; }
if ! self.inner().visit_uint() { return false; }
self.bump_past::<uint>();
true
}
fn visit_u8(&mut self) -> bool {
self.align_to::<u8>();
if ! self.inner.visit_u8() { return false; }
if ! self.inner().visit_u8() { return false; }
self.bump_past::<u8>();
true
}
fn visit_u16(&mut self) -> bool {
self.align_to::<u16>();
if ! self.inner.visit_u16() { return false; }
if ! self.inner().visit_u16() { return false; }
self.bump_past::<u16>();
true
}
fn visit_u32(&mut self) -> bool {
self.align_to::<u32>();
if ! self.inner.visit_u32() { return false; }
if ! self.inner().visit_u32() { return false; }
self.bump_past::<u32>();
true
}
fn visit_u64(&mut self) -> bool {
self.align_to::<u64>();
if ! self.inner.visit_u64() { return false; }
if ! self.inner().visit_u64() { return false; }
self.bump_past::<u64>();
true
}
fn visit_f32(&mut self) -> bool {
self.align_to::<f32>();
if ! self.inner.visit_f32() { return false; }
if ! self.inner().visit_f32() { return false; }
self.bump_past::<f32>();
true
}
fn visit_f64(&mut self) -> bool {
self.align_to::<f64>();
if ! self.inner.visit_f64() { return false; }
if ! self.inner().visit_f64() { return false; }
self.bump_past::<f64>();
true
}
fn visit_char(&mut self) -> bool {
self.align_to::<char>();
if ! self.inner.visit_char() { return false; }
if ! self.inner().visit_char() { return false; }
self.bump_past::<char>();
true
}
fn visit_estr_box(&mut self) -> bool {
self.align_to::<@str>();
if ! self.inner.visit_estr_box() { return false; }
if ! self.inner().visit_estr_box() { return false; }
self.bump_past::<@str>();
true
}
fn visit_estr_uniq(&mut self) -> bool {
self.align_to::<~str>();
if ! self.inner.visit_estr_uniq() { return false; }
if ! self.inner().visit_estr_uniq() { return false; }
self.bump_past::<~str>();
true
}
fn visit_estr_slice(&mut self) -> bool {
self.align_to::<&'static str>();
if ! self.inner.visit_estr_slice() { return false; }
if ! self.inner().visit_estr_slice() { return false; }
self.bump_past::<&'static str>();
true
}
......@@ -197,42 +204,42 @@ fn visit_estr_fixed(&mut self, n: uint,
sz: uint,
align: uint) -> bool {
self.align(align);
if ! self.inner.visit_estr_fixed(n, sz, align) { return false; }
if ! self.inner().visit_estr_fixed(n, sz, align) { return false; }
self.bump(sz);
true
}
fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<@u8>();
if ! self.inner.visit_box(mtbl, inner) { return false; }
if ! self.inner().visit_box(mtbl, inner) { return false; }
self.bump_past::<@u8>();
true
}
fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~u8>();
if ! self.inner.visit_uniq(mtbl, inner) { return false; }
if ! self.inner().visit_uniq(mtbl, inner) { return false; }
self.bump_past::<~u8>();
true
}
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~u8>();
if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; }
if ! self.inner().visit_uniq_managed(mtbl, inner) { return false; }
self.bump_past::<~u8>();
true
}
fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<*u8>();
if ! self.inner.visit_ptr(mtbl, inner) { return false; }
if ! self.inner().visit_ptr(mtbl, inner) { return false; }
self.bump_past::<*u8>();
true
}
fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<&'static u8>();
if ! self.inner.visit_rptr(mtbl, inner) { return false; }
if ! self.inner().visit_rptr(mtbl, inner) { return false; }
self.bump_past::<&'static u8>();
true
}
......@@ -243,41 +250,41 @@ fn visit_unboxed_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
// or else possibly we could have some weird interface wherein we
// read-off a word from inner's pointers, but the read-word has to
// always be the same in all sub-pointers? Dubious.
if ! self.inner.visit_vec(mtbl, inner) { return false; }
if ! self.inner().visit_vec(mtbl, inner) { return false; }
true
}
fn visit_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~[u8]>();
if ! self.inner.visit_vec(mtbl, inner) { return false; }
if ! self.inner().visit_vec(mtbl, inner) { return false; }
self.bump_past::<~[u8]>();
true
}
fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<@[u8]>();
if ! self.inner.visit_evec_box(mtbl, inner) { return false; }
if ! self.inner().visit_evec_box(mtbl, inner) { return false; }
self.bump_past::<@[u8]>();
true
}
fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~[u8]>();
if ! self.inner.visit_evec_uniq(mtbl, inner) { return false; }
if ! self.inner().visit_evec_uniq(mtbl, inner) { return false; }
self.bump_past::<~[u8]>();
true
}
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~[@u8]>();
if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; }
if ! self.inner().visit_evec_uniq_managed(mtbl, inner) { return false; }
self.bump_past::<~[@u8]>();
true
}
fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<&'static [u8]>();
if ! self.inner.visit_evec_slice(mtbl, inner) { return false; }
if ! self.inner().visit_evec_slice(mtbl, inner) { return false; }
self.bump_past::<&'static [u8]>();
true
}
......@@ -285,7 +292,7 @@ fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,
mtbl: uint, inner: *TyDesc) -> bool {
self.align(align);
if ! self.inner.visit_evec_fixed(n, sz, align, mtbl, inner) {
if ! self.inner().visit_evec_fixed(n, sz, align, mtbl, inner) {
return false;
}
self.bump(sz);
......@@ -294,25 +301,25 @@ fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,
fn visit_enter_rec(&mut self, n_fields: uint, sz: uint, align: uint) -> bool {
self.align(align);
if ! self.inner.visit_enter_rec(n_fields, sz, align) { return false; }
if ! self.inner().visit_enter_rec(n_fields, sz, align) { return false; }
true
}
fn visit_rec_field(&mut self, i: uint, name: &str,
mtbl: uint, inner: *TyDesc) -> bool {
if ! self.inner.visit_rec_field(i, name, mtbl, inner) { return false; }
if ! self.inner().visit_rec_field(i, name, mtbl, inner) { return false; }
true
}
fn visit_leave_rec(&mut self, n_fields: uint, sz: uint, align: uint) -> bool {
if ! self.inner.visit_leave_rec(n_fields, sz, align) { return false; }
if ! self.inner().visit_leave_rec(n_fields, sz, align) { return false; }
true
}
fn visit_enter_class(&mut self, name: &str, named_fields: bool, n_fields: uint, sz: uint,
align: uint) -> bool {
self.align(align);
if ! self.inner.visit_enter_class(name, named_fields, n_fields, sz, align) {
if ! self.inner().visit_enter_class(name, named_fields, n_fields, sz, align) {
return false;
}
true
......@@ -320,7 +327,7 @@ fn visit_enter_class(&mut self, name: &str, named_fields: bool, n_fields: uint,
fn visit_class_field(&mut self, i: uint, name: &str, named: bool,
mtbl: uint, inner: *TyDesc) -> bool {
if ! self.inner.visit_class_field(i, name, named, mtbl, inner) {
if ! self.inner().visit_class_field(i, name, named, mtbl, inner) {
return false;
}
true
......@@ -328,7 +335,7 @@ fn visit_class_field(&mut self, i: uint, name: &str, named: bool,
fn visit_leave_class(&mut self, name: &str, named_fields: bool, n_fields: uint, sz: uint,
align: uint) -> bool {
if ! self.inner.visit_leave_class(name, named_fields, n_fields, sz, align) {
if ! self.inner().visit_leave_class(name, named_fields, n_fields, sz, align) {
return false;
}
true
......@@ -336,41 +343,41 @@ fn visit_leave_class(&mut self, name: &str, named_fields: bool, n_fields: uint,
fn visit_enter_tup(&mut self, n_fields: uint, sz: uint, align: uint) -> bool {
self.align(align);
if ! self.inner.visit_enter_tup(n_fields, sz, align) { return false; }
if ! self.inner().visit_enter_tup(n_fields, sz, align) { return false; }
true
}
fn visit_tup_field(&mut self, i: uint, inner: *TyDesc) -> bool {
if ! self.inner.visit_tup_field(i, inner) { return false; }
if ! self.inner().visit_tup_field(i, inner) { return false; }
true
}
fn visit_leave_tup(&mut self, n_fields: uint, sz: uint, align: uint) -> bool {
if ! self.inner.visit_leave_tup(n_fields, sz, align) { return false; }
if ! self.inner().visit_leave_tup(n_fields, sz, align) { return false; }
true
}
fn visit_enter_fn(&mut self, purity: uint, proto: uint,
n_inputs: uint, retstyle: uint) -> bool {
if ! self.inner.visit_enter_fn(purity, proto, n_inputs, retstyle) {
if ! self.inner().visit_enter_fn(purity, proto, n_inputs, retstyle) {
return false
}
true
}
fn visit_fn_input(&mut self, i: uint, mode: uint, inner: *TyDesc) -> bool {
if ! self.inner.visit_fn_input(i, mode, inner) { return false; }
if ! self.inner().visit_fn_input(i, mode, inner) { return false; }
true
}
fn visit_fn_output(&mut self, retstyle: uint, variadic: bool, inner: *TyDesc) -> bool {
if ! self.inner.visit_fn_output(retstyle, variadic, inner) { return false; }
if ! self.inner().visit_fn_output(retstyle, variadic, inner) { return false; }
true
}
fn visit_leave_fn(&mut self, purity: uint, proto: uint,
n_inputs: uint, retstyle: uint) -> bool {
if ! self.inner.visit_leave_fn(purity, proto, n_inputs, retstyle) {
if ! self.inner().visit_leave_fn(purity, proto, n_inputs, retstyle) {
return false;
}
true
......@@ -381,7 +388,7 @@ fn visit_enter_enum(&mut self, n_variants: uint,
sz: uint, align: uint)
-> bool {
self.align(align);
if ! self.inner.visit_enter_enum(n_variants, get_disr, sz, align) { return false; }
if ! self.inner().visit_enter_enum(n_variants, get_disr, sz, align) { return false; }
true
}
......@@ -389,7 +396,7 @@ fn visit_enter_enum_variant(&mut self, variant: uint,
disr_val: Disr,
n_fields: uint,
name: &str) -> bool {
if ! self.inner.visit_enter_enum_variant(variant, disr_val,
if ! self.inner().visit_enter_enum_variant(variant, disr_val,
n_fields, name) {
return false;
}
......@@ -397,7 +404,7 @@ fn visit_enter_enum_variant(&mut self, variant: uint,
}
fn visit_enum_variant_field(&mut self, i: uint, offset: uint, inner: *TyDesc) -> bool {
if ! self.inner.visit_enum_variant_field(i, offset, inner) { return false; }
if ! self.inner().visit_enum_variant_field(i, offset, inner) { return false; }
true
}
......@@ -405,7 +412,7 @@ fn visit_leave_enum_variant(&mut self, variant: uint,
disr_val: Disr,
n_fields: uint,
name: &str) -> bool {
if ! self.inner.visit_leave_enum_variant(variant, disr_val,
if ! self.inner().visit_leave_enum_variant(variant, disr_val,
n_fields, name) {
return false;
}
......@@ -416,44 +423,44 @@ fn visit_leave_enum(&mut self, n_variants: uint,
get_disr: extern unsafe fn(ptr: *Opaque) -> Disr,
sz: uint, align: uint)
-> bool {
if ! self.inner.visit_leave_enum(n_variants, get_disr, sz, align) { return false; }
if ! self.inner().visit_leave_enum(n_variants, get_disr, sz, align) { return false; }
true
}
fn visit_trait(&mut self, name: &str) -> bool {
self.align_to::<@TyVisitor>();
if ! self.inner.visit_trait(name) { return false; }
if ! self.inner().visit_trait(name) { return false; }
self.bump_past::<@TyVisitor>();
true
}
fn visit_param(&mut self, i: uint) -> bool {
if ! self.inner.visit_param(i) { return false; }
if ! self.inner().visit_param(i) { return false; }
true
}
fn visit_self(&mut self) -> bool {
self.align_to::<&'static u8>();
if ! self.inner.visit_self() { return false; }
if ! self.inner().visit_self() { return false; }
self.align_to::<&'static u8>();
true
}
fn visit_type(&mut self) -> bool {
if ! self.inner.visit_type() { return false; }
if ! self.inner().visit_type() { return false; }
true
}
fn visit_opaque_box(&mut self) -> bool {
self.align_to::<@u8>();
if ! self.inner.visit_opaque_box() { return false; }
if ! self.inner().visit_opaque_box() { return false; }
self.bump_past::<@u8>();
true
}
fn visit_closure_ptr(&mut self, ck: uint) -> bool {
self.align_to::<(uint,uint)>();
if ! self.inner.visit_closure_ptr(ck) { return false; }
if ! self.inner().visit_closure_ptr(ck) { return false; }
self.bump_past::<(uint,uint)>();
true
}
......@@ -471,13 +478,15 @@ struct Stuff {
impl my_visitor {
pub fn get<T:Clone>(&mut self, f: |T|) {
unsafe {
f((*((**self).get().ptr1 as *T)).clone());
let my_visitor(s) = *self;
f((*((*s).get().ptr1 as *T)).clone());
}
}
pub fn visit_inner(&mut self, inner: *TyDesc) -> bool {
unsafe {
let u = my_visitor(**self);
let my_visitor(s) = *self;
let u = my_visitor(s);
let mut v = ptr_visit_adaptor::<my_visitor>(Inner {inner: u});
visit_tydesc(inner, &mut v as &mut TyVisitor);
true
......@@ -489,7 +498,8 @@ struct Inner<V> { inner: V }
impl movable_ptr for my_visitor {
fn move_ptr(&mut self, adjustment: |*c_void| -> *c_void) {
let mut this = self.borrow_mut();
let my_visitor(s) = *self;
let mut this = s.borrow_mut();
this.get().ptr1 = adjustment(this.get().ptr1);
this.get().ptr2 = adjustment(this.get().ptr2);
}
......@@ -501,14 +511,16 @@ fn visit_bot(&mut self) -> bool { true }
fn visit_nil(&mut self) -> bool { true }
fn visit_bool(&mut self) -> bool {
self.get::<bool>(|b| {
let mut this = self.borrow_mut();
let my_visitor(s) = *self;
let mut this = s.borrow_mut();
this.get().vals.push(b.to_str());
});
true
}
fn visit_int(&mut self) -> bool {
self.get::<int>(|i| {
let mut this = self.borrow_mut();
let my_visitor(s) = *self;
let mut this = s.borrow_mut();
this.get().vals.push(i.to_str());
});
true
......@@ -640,7 +652,8 @@ pub fn main() {
(*td).size, (*td).align);
visit_tydesc(td, &mut v as &mut TyVisitor);
let mut ub = u.borrow_mut();
let my_visitor(m) = u;
let mut ub = m.borrow_mut();
let r = ub.get().vals.clone();
for s in r.iter() {
println!("val: {}", *s);
......
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct T(&'static [int]);
static A: T = T(&'static [5, 4, 3]);
static B: T = T(&[5, 4, 3]);
static C: T = T([5, 4, 3]);
pub fn main() {
assert_eq!(A[0], 5);
assert_eq!(B[1], 4);
assert_eq!(C[2], 3);
}
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Foo(int);
pub fn main() {
let x: Foo = Foo(2);
assert_eq!(*x, 2);
}
......@@ -40,7 +40,8 @@ fn to_str_(&self) -> ~str { self.to_str() }
impl to_str for Tree {
fn to_str_(&self) -> ~str {
let this = self.borrow();
let Tree(t) = *self;
let this = t.borrow();
let (l, r) = (this.get().left, this.get().right);
let val = &this.get().val;
format!("[{}, {}, {}]", val.to_str_(), l.to_str_(), r.to_str_())
......@@ -61,7 +62,8 @@ pub fn main() {
assert!(foo(t2) == expected);
{
let mut t1 = t1.borrow_mut();
let Tree(t1_) = t1;
let mut t1 = t1_.borrow_mut();
t1.get().left = Some(t2); // create cycle
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册