提交 757b7ac2 编写于 作者: B bors

Auto merge of #43986 - petrochenkov:pubcrate3, r=pnkfelix

rustc: Remove some dead code

Extracted from https://github.com/rust-lang/rust/pull/43192

r? @EddyB
......@@ -8,6 +8,8 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(warnings)]
#![feature(rustc_private)]
extern crate rustc;
......@@ -22,61 +24,51 @@
name: "alloc",
inputs: &[AllocatorTy::Layout],
output: AllocatorTy::ResultPtr,
is_unsafe: true,
},
AllocatorMethod {
name: "oom",
inputs: &[AllocatorTy::AllocErr],
output: AllocatorTy::Bang,
is_unsafe: false,
},
AllocatorMethod {
name: "dealloc",
inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout],
output: AllocatorTy::Unit,
is_unsafe: true,
},
AllocatorMethod {
name: "usable_size",
inputs: &[AllocatorTy::LayoutRef],
output: AllocatorTy::UsizePair,
is_unsafe: false,
},
AllocatorMethod {
name: "realloc",
inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
output: AllocatorTy::ResultPtr,
is_unsafe: true,
},
AllocatorMethod {
name: "alloc_zeroed",
inputs: &[AllocatorTy::Layout],
output: AllocatorTy::ResultPtr,
is_unsafe: true,
},
AllocatorMethod {
name: "alloc_excess",
inputs: &[AllocatorTy::Layout],
output: AllocatorTy::ResultExcess,
is_unsafe: true,
},
AllocatorMethod {
name: "realloc_excess",
inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
output: AllocatorTy::ResultExcess,
is_unsafe: true,
},
AllocatorMethod {
name: "grow_in_place",
inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
output: AllocatorTy::ResultUnit,
is_unsafe: true,
},
AllocatorMethod {
name: "shrink_in_place",
inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
output: AllocatorTy::ResultUnit,
is_unsafe: true,
},
];
......@@ -84,7 +76,6 @@ pub struct AllocatorMethod {
pub name: &'static str,
pub inputs: &'static [AllocatorTy],
pub output: AllocatorTy,
pub is_unsafe: bool,
}
pub enum AllocatorTy {
......
......@@ -12,9 +12,8 @@
//!
//! A simple wrapper over the platform's dynamic library facilities
use std::env;
use std::ffi::{CString, OsString};
use std::path::{Path, PathBuf};
use std::ffi::CString;
use std::path::Path;
pub struct DynamicLibrary {
handle: *mut u8
......@@ -43,24 +42,6 @@ pub fn open(filename: Option<&Path>) -> Result<DynamicLibrary, String> {
}
}
/// Prepends a path to this process's search path for dynamic libraries
pub fn prepend_search_path(path: &Path) {
let mut search_path = DynamicLibrary::search_path();
search_path.insert(0, path.to_path_buf());
env::set_var(DynamicLibrary::envvar(), &DynamicLibrary::create_path(&search_path));
}
/// From a slice of paths, create a new vector which is suitable to be an
/// environment variable for this platforms dylib search path.
pub fn create_path(path: &[PathBuf]) -> OsString {
let mut newvar = OsString::new();
for (i, path) in path.iter().enumerate() {
if i > 0 { newvar.push(DynamicLibrary::separator()); }
newvar.push(path);
}
return newvar;
}
/// Returns the environment variable for this process's dynamic library
/// search path
pub fn envvar() -> &'static str {
......@@ -75,19 +56,6 @@ pub fn envvar() -> &'static str {
}
}
fn separator() -> &'static str {
if cfg!(windows) { ";" } else { ":" }
}
/// Returns the current search path for dynamic libraries being used by this
/// process
pub fn search_path() -> Vec<PathBuf> {
match env::var_os(DynamicLibrary::envvar()) {
Some(var) => env::split_paths(&var).collect(),
None => Vec::new(),
}
}
/// Accesses the value at the symbol of the dynamic library.
pub unsafe fn symbol<T>(&self, symbol: &str) -> Result<*mut T, String> {
// This function should have a lifetime constraint of 'a on
......
......@@ -95,15 +95,6 @@ pub fn path(&self) -> &path::Path {
self.path.as_ref().unwrap()
}
/// Close and remove the temporary directory
///
/// Although `TempDir` removes the directory on drop, in the destructor
/// any errors are ignored. To detect errors cleaning up the temporary
/// directory, call `close` instead.
pub fn close(mut self) -> io::Result<()> {
self.cleanup_dir()
}
fn cleanup_dir(&mut self) -> io::Result<()> {
match self.path {
Some(ref p) => fs::remove_dir_all(p),
......
......@@ -113,37 +113,6 @@ pub fn gather_move_from_expr<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
gather_move(bccx, move_data, move_error_collector, move_info);
}
pub fn gather_match_variant<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
move_data: &MoveData<'tcx>,
_move_error_collector: &mut MoveErrorCollector<'tcx>,
move_pat: &hir::Pat,
cmt: mc::cmt<'tcx>,
mode: euv::MatchMode) {
let tcx = bccx.tcx;
debug!("gather_match_variant(move_pat={}, cmt={:?}, mode={:?})",
move_pat.id, cmt, mode);
let opt_lp = opt_loan_path(&cmt);
match opt_lp {
Some(lp) => {
match lp.kind {
LpDowncast(ref base_lp, _) =>
move_data.add_variant_match(
tcx, lp.clone(), move_pat.id, base_lp.clone(), mode),
_ => bug!("should only call gather_match_variant \
for cat_downcast cmt"),
}
}
None => {
// We get None when input to match is non-path (e.g.
// temporary result like a function call). Since no
// loan-path is being matched, no need to record a
// downcast.
return;
}
}
}
pub fn gather_move_from_pat<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
move_data: &MoveData<'tcx>,
move_error_collector: &mut MoveErrorCollector<'tcx>,
......
......@@ -94,12 +94,6 @@ fn matched_pat(&mut self,
matched_pat,
cmt,
mode);
if let Categorization::Downcast(..) = cmt.cat {
gather_moves::gather_match_variant(
self.bccx, &self.move_data, &mut self.move_error_collector,
matched_pat, cmt, mode);
}
}
fn consume_pat(&mut self,
......
......@@ -714,15 +714,6 @@ pub fn report_reassigned_immutable_variable(&self,
err.emit();
}
pub fn span_err(&self, s: Span, m: &str) {
self.tcx.sess.span_err(s, m);
}
pub fn struct_span_err<S: Into<MultiSpan>>(&self, s: S, m: &str)
-> DiagnosticBuilder<'a> {
self.tcx.sess.struct_span_err(s, m)
}
pub fn struct_span_err_with_code<S: Into<MultiSpan>>(&self,
s: S,
msg: &str,
......@@ -731,10 +722,6 @@ pub fn struct_span_err_with_code<S: Into<MultiSpan>>(&self,
self.tcx.sess.struct_span_err_with_code(s, msg, code)
}
pub fn span_err_with_code<S: Into<MultiSpan>>(&self, s: S, msg: &str, code: &str) {
self.tcx.sess.span_err_with_code(s, msg, code);
}
fn bckerr_to_diag(&self, err: &BckError<'tcx>) -> DiagnosticBuilder<'a> {
let span = err.span.clone();
......
......@@ -53,10 +53,6 @@ pub struct MoveData<'tcx> {
/// kill move bits.
pub path_assignments: RefCell<Vec<Assignment>>,
/// Enum variant matched within a pattern on some match arm, like
/// `SomeStruct{ f: Variant1(x, y) } => ...`
pub variant_matches: RefCell<Vec<VariantMatch>>,
/// Assignments to a variable or path, like `x = foo`, but not `x += foo`.
pub assignee_ids: RefCell<NodeSet>,
}
......@@ -161,21 +157,6 @@ pub struct Assignment {
pub assignee_id: ast::NodeId,
}
#[derive(Copy, Clone)]
pub struct VariantMatch {
/// downcast to the variant.
pub path: MovePathIndex,
/// path being downcast to the variant.
pub base_path: MovePathIndex,
/// id where variant's pattern occurs
pub id: ast::NodeId,
/// says if variant established by move (and why), by copy, or by borrow.
pub mode: euv::MatchMode
}
#[derive(Clone, Copy)]
pub struct MoveDataFlowOperator;
......@@ -215,7 +196,6 @@ pub fn new() -> MoveData<'tcx> {
moves: RefCell::new(Vec::new()),
path_assignments: RefCell::new(Vec::new()),
var_assignments: RefCell::new(Vec::new()),
variant_matches: RefCell::new(Vec::new()),
assignee_ids: RefCell::new(NodeSet()),
}
}
......@@ -485,31 +465,6 @@ fn add_assignment_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
}
/// Adds a new record for a match of `base_lp`, downcast to
/// variant `lp`, that occurs at location `pattern_id`. (One
/// should be able to recover the span info from the
/// `pattern_id` and the hir_map, I think.)
pub fn add_variant_match(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
lp: Rc<LoanPath<'tcx>>,
pattern_id: ast::NodeId,
base_lp: Rc<LoanPath<'tcx>>,
mode: euv::MatchMode) {
debug!("add_variant_match(lp={:?}, pattern_id={})",
lp, pattern_id);
let path_index = self.move_path(tcx, lp.clone());
let base_path_index = self.move_path(tcx, base_lp.clone());
let variant_match = VariantMatch {
path: path_index,
base_path: base_path_index,
id: pattern_id,
mode,
};
self.variant_matches.borrow_mut().push(variant_match);
}
/// Adds the gen/kills for the various moves and
/// assignments into the provided data flow contexts.
/// Moves are generated by moves and killed by assignments and
......
......@@ -37,11 +37,10 @@
pub use borrowck::check_crate;
pub use borrowck::build_borrowck_dataflow_data_for_fn;
pub use borrowck::{AnalysisData, BorrowckCtxt};
// NB: This module needs to be declared first so diagnostics are
// registered before they are used.
pub mod diagnostics;
mod diagnostics;
mod borrowck;
......
......@@ -41,7 +41,7 @@
// NB: This module needs to be declared first so diagnostics are
// registered before they are used.
pub mod diagnostics;
mod diagnostics;
mod eval;
mod _match;
......
......@@ -37,13 +37,6 @@ pub fn description(&self) -> &'static str {
self.ty.ty_to_string()
}
pub fn is_nan(&self) -> bool {
match self.ty {
ast::FloatTy::F32 => Single::from_bits(self.bits).is_nan(),
ast::FloatTy::F64 => Double::from_bits(self.bits).is_nan(),
}
}
/// Compares the values if they are of the same type
pub fn try_cmp(self, rhs: Self) -> Result<Ordering, ConstMathErr> {
match (self.ty, rhs.ty) {
......
......@@ -24,7 +24,7 @@
use std::slice;
#[repr(C)]
pub struct Blake2bCtx {
struct Blake2bCtx {
b: [u8; 128],
h: [u64; 8],
t: [u64; 2],
......
......@@ -134,56 +134,10 @@ pub fn is_dominated_by(&self, node: Node, dom: Node) -> bool {
self.dominators(node).any(|n| n == dom)
}
pub fn mutual_dominator_node(&self, node1: Node, node2: Node) -> Node {
assert!(self.is_reachable(node1),
"node {:?} is not reachable",
node1);
assert!(self.is_reachable(node2),
"node {:?} is not reachable",
node2);
intersect::<Node>(&self.post_order_rank,
&self.immediate_dominators,
node1,
node2)
}
pub fn mutual_dominator<I>(&self, iter: I) -> Option<Node>
where I: IntoIterator<Item = Node>
{
let mut iter = iter.into_iter();
iter.next()
.map(|dom| iter.fold(dom, |dom, node| self.mutual_dominator_node(dom, node)))
}
pub fn all_immediate_dominators(&self) -> &IndexVec<Node, Option<Node>> {
#[cfg(test)]
fn all_immediate_dominators(&self) -> &IndexVec<Node, Option<Node>> {
&self.immediate_dominators
}
pub fn dominator_tree(&self) -> DominatorTree<Node> {
let elem: Vec<Node> = Vec::new();
let mut children: IndexVec<Node, Vec<Node>> =
IndexVec::from_elem_n(elem, self.immediate_dominators.len());
let mut root = None;
for (index, immed_dom) in self.immediate_dominators.iter().enumerate() {
let node = Node::new(index);
match *immed_dom {
None => {
// node not reachable
}
Some(immed_dom) => {
if node == immed_dom {
root = Some(node);
} else {
children[immed_dom].push(node);
}
}
}
}
DominatorTree {
root: root.unwrap(),
children,
}
}
}
pub struct Iter<'dom, Node: Idx + 'dom> {
......@@ -215,38 +169,9 @@ pub struct DominatorTree<N: Idx> {
}
impl<Node: Idx> DominatorTree<Node> {
pub fn root(&self) -> Node {
self.root
}
pub fn children(&self, node: Node) -> &[Node] {
&self.children[node]
}
pub fn iter_children_of(&self, node: Node) -> IterChildrenOf<Node> {
IterChildrenOf {
tree: self,
stack: vec![node],
}
}
}
pub struct IterChildrenOf<'iter, Node: Idx + 'iter> {
tree: &'iter DominatorTree<Node>,
stack: Vec<Node>,
}
impl<'iter, Node: Idx> Iterator for IterChildrenOf<'iter, Node> {
type Item = Node;
fn next(&mut self) -> Option<Node> {
if let Some(node) = self.stack.pop() {
self.stack.extend(self.tree.children(node));
Some(node)
} else {
None
}
}
}
impl<Node: Idx> fmt::Debug for DominatorTree<Node> {
......
......@@ -47,22 +47,6 @@ fn post_order_walk<G: ControlFlowGraph>(graph: &G,
result.push(node);
}
pub fn pre_order_walk<G: ControlFlowGraph>(graph: &G,
node: G::Node,
result: &mut Vec<G::Node>,
visited: &mut IndexVec<G::Node, bool>) {
if visited[node] {
return;
}
visited[node] = true;
result.push(node);
for successor in graph.successors(node) {
pre_order_walk(graph, successor, result, visited);
}
}
pub fn reverse_post_order<G: ControlFlowGraph>(graph: &G, start_node: G::Node) -> Vec<G::Node> {
let mut vec = post_order_from(graph, start_node);
vec.reverse();
......
......@@ -9,7 +9,6 @@
// except according to those terms.
use super::super::test::TestGraph;
use super::super::transpose::TransposedGraph;
use super::*;
......@@ -20,22 +19,3 @@ fn diamond_post_order() {
let result = post_order_from(&graph, 0);
assert_eq!(result, vec![3, 1, 2, 0]);
}
#[test]
fn rev_post_order_inner_loop() {
// 0 -> 1 -> 2 -> 3 -> 5
// ^ ^ v |
// | 6 <- 4 |
// +-----------------+
let graph = TestGraph::new(0,
&[(0, 1), (1, 2), (2, 3), (3, 5), (3, 1), (2, 4), (4, 6), (6, 2)]);
let rev_graph = TransposedGraph::new(&graph);
let result = post_order_from_to(&rev_graph, 6, Some(2));
assert_eq!(result, vec![4, 6]);
let result = post_order_from_to(&rev_graph, 3, Some(1));
assert_eq!(result, vec![4, 6, 2, 3]);
}
......@@ -9,13 +9,10 @@
// except according to those terms.
use super::indexed_vec::Idx;
pub use std::slice::Iter;
pub mod dominators;
pub mod iterate;
pub mod reachable;
mod reference;
pub mod transpose;
#[cfg(test)]
mod test;
......
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Compute reachability using a simple dataflow propagation.
//! Store end-result in a big NxN bit matrix.
use super::ControlFlowGraph;
use super::super::bitvec::BitVector;
use super::iterate::reverse_post_order;
use super::super::indexed_vec::{IndexVec, Idx};
#[cfg(test)]
mod test;
pub fn reachable<G: ControlFlowGraph>(graph: &G) -> Reachability<G::Node> {
let reverse_post_order = reverse_post_order(graph, graph.start_node());
reachable_given_rpo(graph, &reverse_post_order)
}
pub fn reachable_given_rpo<G: ControlFlowGraph>(graph: &G,
reverse_post_order: &[G::Node])
-> Reachability<G::Node> {
let mut reachability = Reachability::new(graph);
let mut changed = true;
while changed {
changed = false;
for &node in reverse_post_order.iter().rev() {
// every node can reach itself
changed |= reachability.bits[node].insert(node.index());
// and every pred can reach everything node can reach
for pred in graph.predecessors(node) {
let nodes_bits = reachability.bits[node].clone();
changed |= reachability.bits[pred].insert_all(&nodes_bits);
}
}
}
reachability
}
pub struct Reachability<Node: Idx> {
bits: IndexVec<Node, BitVector>,
}
impl<Node: Idx> Reachability<Node> {
fn new<G: ControlFlowGraph>(graph: &G) -> Self {
let num_nodes = graph.num_nodes();
Reachability { bits: IndexVec::from_elem_n(BitVector::new(num_nodes), num_nodes) }
}
pub fn can_reach(&self, source: Node, target: Node) -> bool {
let bit: usize = target.index();
self.bits[source].contains(bit)
}
}
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::super::test::TestGraph;
use super::*;
#[test]
fn test1() {
// 0 -> 1 -> 2 -> 3
// ^ v
// 6 <- 4 -> 5
let graph = TestGraph::new(0, &[(0, 1), (1, 2), (2, 3), (2, 4), (4, 5), (4, 6), (6, 1)]);
let reachable = reachable(&graph);
assert!((0..6).all(|i| reachable.can_reach(0, i)));
assert!((1..6).all(|i| reachable.can_reach(1, i)));
assert!((1..6).all(|i| reachable.can_reach(2, i)));
assert!((1..6).all(|i| reachable.can_reach(4, i)));
assert!((1..6).all(|i| reachable.can_reach(6, i)));
assert!(reachable.can_reach(3, 3));
assert!(!reachable.can_reach(3, 5));
assert!(!reachable.can_reach(5, 3));
}
/// use bigger indices to cross between words in the bit set
#[test]
fn test2() {
// 30 -> 31 -> 32 -> 33
// ^ v
// 36 <- 34 -> 35
let graph = TestGraph::new(30,
&[(30, 31), (31, 32), (32, 33), (32, 34), (34, 35), (34, 36),
(36, 31)]);
let reachable = reachable(&graph);
assert!((30..36).all(|i| reachable.can_reach(30, i)));
assert!((31..36).all(|i| reachable.can_reach(31, i)));
assert!((31..36).all(|i| reachable.can_reach(32, i)));
assert!((31..36).all(|i| reachable.can_reach(34, i)));
assert!((31..36).all(|i| reachable.can_reach(36, i)));
assert!(reachable.can_reach(33, 33));
assert!(!reachable.can_reach(33, 35));
assert!(!reachable.can_reach(35, 33));
}
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::*;
pub struct TransposedGraph<G: ControlFlowGraph> {
base_graph: G,
start_node: G::Node,
}
impl<G: ControlFlowGraph> TransposedGraph<G> {
pub fn new(base_graph: G) -> Self {
let start_node = base_graph.start_node();
Self::with_start(base_graph, start_node)
}
pub fn with_start(base_graph: G, start_node: G::Node) -> Self {
TransposedGraph {
base_graph,
start_node,
}
}
}
impl<G: ControlFlowGraph> ControlFlowGraph for TransposedGraph<G> {
type Node = G::Node;
fn num_nodes(&self) -> usize {
self.base_graph.num_nodes()
}
fn start_node(&self) -> Self::Node {
self.start_node
}
fn predecessors<'graph>(&'graph self,
node: Self::Node)
-> <Self as GraphPredecessors<'graph>>::Iter {
self.base_graph.successors(node)
}
fn successors<'graph>(&'graph self,
node: Self::Node)
-> <Self as GraphSuccessors<'graph>>::Iter {
self.base_graph.predecessors(node)
}
}
impl<'graph, G: ControlFlowGraph> GraphPredecessors<'graph> for TransposedGraph<G> {
type Item = G::Node;
type Iter = <G as GraphSuccessors<'graph>>::Iter;
}
impl<'graph, G: ControlFlowGraph> GraphSuccessors<'graph> for TransposedGraph<G> {
type Item = G::Node;
type Iter = <G as GraphPredecessors<'graph>>::Iter;
}
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt;
// Provide some more formatting options for some data types (at the moment
// that's just `{:x}` for slices of u8).
pub struct FmtWrap<T>(pub T);
impl<'a> fmt::LowerHex for FmtWrap<&'a [u8]> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
for byte in self.0.iter() {
try!(write!(formatter, "{:02x}", byte));
}
Ok(())
}
}
#[test]
fn test_lower_hex() {
let bytes: &[u8] = &[0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef];
assert_eq!("0123456789abcdef", &format!("{:x}", FmtWrap(bytes)));
}
......@@ -107,9 +107,3 @@ fn finish(&self) -> u64 {
self.hash as u64
}
}
pub fn hash<T: Hash>(v: &T) -> u64 {
let mut state = FxHasher::default();
v.hash(&mut state);
state.finish()
}
......@@ -106,13 +106,6 @@ pub fn node_id(&self) -> usize {
}
}
impl EdgeIndex {
/// Returns unique id (unique with respect to the graph holding associated edge).
pub fn edge_id(&self) -> usize {
self.0
}
}
impl<N: Debug, E: Debug> Graph<N, E> {
pub fn new() -> Graph<N, E> {
Graph {
......@@ -201,34 +194,10 @@ pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> Edg
return idx;
}
pub fn mut_edge_data(&mut self, idx: EdgeIndex) -> &mut E {
&mut self.edges[idx.0].data
}
pub fn edge_data(&self, idx: EdgeIndex) -> &E {
&self.edges[idx.0].data
}
pub fn edge(&self, idx: EdgeIndex) -> &Edge<E> {
&self.edges[idx.0]
}
pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {
//! Accesses the index of the first edge adjacent to `node`.
//! This is useful if you wish to modify the graph while walking
//! the linked list of edges.
self.nodes[node.0].first_edge[dir.repr]
}
pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {
//! Accesses the next edge in a given direction.
//! This is useful if you wish to modify the graph while walking
//! the linked list of edges.
self.edges[edge.0].next_edge[dir.repr]
}
// # Iterating over nodes, edges
pub fn enumerated_nodes(&self) -> EnumeratedNodes<N> {
......@@ -282,25 +251,6 @@ pub fn predecessor_nodes(&self, target: NodeIndex) -> AdjacentSources<N, E> {
self.incoming_edges(target).sources()
}
/// A common use for graphs in our compiler is to perform
/// fixed-point iteration. In this case, each edge represents a
/// constraint, and the nodes themselves are associated with
/// variables or other bitsets. This method facilitates such a
/// computation.
pub fn iterate_until_fixed_point<'a, F>(&'a self, mut op: F)
where F: FnMut(usize, EdgeIndex, &'a Edge<E>) -> bool
{
let mut iteration = 0;
let mut changed = true;
while changed {
changed = false;
iteration += 1;
for (edge_index, edge) in self.enumerated_edges() {
changed |= op(iteration, edge_index, edge);
}
}
}
pub fn depth_traverse<'a>(&'a self,
start: NodeIndex,
direction: Direction)
......@@ -343,35 +293,6 @@ pub fn nodes_in_postorder<'a>(&'a self,
assert_eq!(result.len(), self.len_nodes());
result
}
/// Whether or not a node can be reached from itself.
pub fn is_node_cyclic(&self, starting_node_index: NodeIndex) -> bool {
// This is similar to depth traversal below, but we
// can't use that, because depth traversal doesn't show
// the starting node a second time.
let mut visited = BitVector::new(self.len_nodes());
let mut stack = vec![starting_node_index];
while let Some(current_node_index) = stack.pop() {
visited.insert(current_node_index.0);
// Directionality doesn't change the answer,
// so just use outgoing edges.
for (_, edge) in self.outgoing_edges(current_node_index) {
let target_node_index = edge.target();
if target_node_index == starting_node_index {
return true;
}
if !visited.contains(target_node_index.0) {
stack.push(target_node_index);
}
}
}
false
}
}
// # Iterators
......@@ -479,16 +400,6 @@ pub struct DepthFirstTraversal<'g, N, E>
}
impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {
pub fn new(graph: &'g Graph<N, E>, direction: Direction) -> Self {
let visited = BitVector::new(graph.len_nodes());
DepthFirstTraversal {
graph,
stack: vec![],
visited,
direction,
}
}
pub fn with_start_node(graph: &'g Graph<N, E>,
start_node: NodeIndex,
direction: Direction)
......@@ -503,13 +414,6 @@ pub fn with_start_node(graph: &'g Graph<N, E>,
}
}
pub fn reset(&mut self, start_node: NodeIndex) {
self.stack.truncate(0);
self.stack.push(start_node);
self.visited.clear();
self.visited.insert(start_node.node_id());
}
fn visit(&mut self, node: NodeIndex) {
if self.visited.insert(node.node_id()) {
self.stack.push(node);
......@@ -532,19 +436,6 @@ fn next(&mut self) -> Option<NodeIndex> {
}
}
pub fn each_edge_index<F>(max_edge_index: EdgeIndex, mut f: F)
where F: FnMut(EdgeIndex) -> bool
{
let mut i = 0;
let n = max_edge_index.0;
while i < n {
if !f(EdgeIndex(i)) {
return;
}
i += 1;
}
}
impl<E> Edge<E> {
pub fn source(&self) -> NodeIndex {
self.source
......
......@@ -43,29 +43,6 @@ fn create_graph() -> TestGraph {
return graph;
}
fn create_graph_with_cycle() -> TestGraph {
let mut graph = Graph::new();
// Create a graph with a cycle.
//
// A --> B <-- +
// | |
// v |
// C --> D
let a = graph.add_node("A");
let b = graph.add_node("B");
let c = graph.add_node("C");
let d = graph.add_node("D");
graph.add_edge(a, b, "AB");
graph.add_edge(b, c, "BC");
graph.add_edge(c, d, "CD");
graph.add_edge(d, b, "DB");
return graph;
}
#[test]
fn each_node() {
let graph = create_graph();
......@@ -82,7 +59,6 @@ fn each_edge() {
let graph = create_graph();
let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
graph.each_edge(|idx, edge| {
assert_eq!(&expected[idx.0], graph.edge_data(idx));
assert_eq!(expected[idx.0], edge.data);
true
});
......@@ -97,7 +73,6 @@ fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(graph: &Graph
let mut counter = 0;
for (edge_index, edge) in graph.incoming_edges(start_index) {
assert!(graph.edge_data(edge_index) == &edge.data);
assert!(counter < expected_incoming.len());
debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
counter,
......@@ -117,7 +92,6 @@ fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(graph: &Graph
let mut counter = 0;
for (edge_index, edge) in graph.outgoing_edges(start_index) {
assert!(graph.edge_data(edge_index) == &edge.data);
assert!(counter < expected_outgoing.len());
debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
counter,
......@@ -163,58 +137,3 @@ fn each_adjacent_from_d() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(3), "D", &[("BD", "B")], &[("DE", "E")]);
}
#[test]
fn is_node_cyclic_a() {
let graph = create_graph_with_cycle();
assert!(!graph.is_node_cyclic(NodeIndex(0)));
}
#[test]
fn is_node_cyclic_b() {
let graph = create_graph_with_cycle();
assert!(graph.is_node_cyclic(NodeIndex(1)));
}
#[test]
fn nodes_in_postorder() {
let expected = vec![
("A", vec!["C", "E", "D", "B", "A", "F"]),
("B", vec!["C", "E", "D", "B", "A", "F"]),
("C", vec!["C", "E", "D", "B", "A", "F"]),
("D", vec!["C", "E", "D", "B", "A", "F"]),
("E", vec!["C", "E", "D", "B", "A", "F"]),
("F", vec!["C", "E", "D", "B", "F", "A"])
];
let graph = create_graph();
for ((idx, node), &(node_name, ref expected))
in graph.enumerated_nodes().zip(&expected)
{
assert_eq!(node.data, node_name);
assert_eq!(expected,
&graph.nodes_in_postorder(OUTGOING, idx)
.into_iter().map(|idx| *graph.node_data(idx))
.collect::<Vec<&str>>());
}
let expected = vec![
("A", vec!["D", "C", "B", "A"]),
("B", vec!["D", "C", "B", "A"]),
("C", vec!["B", "D", "C", "A"]),
("D", vec!["C", "B", "D", "A"]),
];
let graph = create_graph_with_cycle();
for ((idx, node), &(node_name, ref expected))
in graph.enumerated_nodes().zip(&expected)
{
assert_eq!(node.data, node_name);
assert_eq!(expected,
&graph.nodes_in_postorder(OUTGOING, idx)
.into_iter().map(|idx| *graph.node_data(idx))
.collect::<Vec<&str>>());
}
}
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt;
use std::cell::Cell;
/// A write-once variable. When constructed, it is empty, and
/// can only be set once.
///
/// Ivars ensure that data that can only be initialized once. A full
/// implementation is used for concurrency and blocks on a read of an
/// unfulfilled value. This implementation is more minimal and panics
/// if you attempt to read the value before it has been set. It is also
/// not `Sync`, but may be extended in the future to be usable as a true
/// concurrency type.
///
/// The `T: Copy` bound is not strictly needed, but it is required by
/// Cell (so removing it would require using UnsafeCell), and it
/// suffices for the current purposes.
#[derive(PartialEq)]
pub struct Ivar<T: Copy> {
data: Cell<Option<T>>,
}
impl<T: Copy> Ivar<T> {
pub fn new() -> Ivar<T> {
Ivar { data: Cell::new(None) }
}
pub fn get(&self) -> Option<T> {
self.data.get()
}
pub fn fulfill(&self, value: T) {
assert!(self.data.get().is_none(), "Value already set!");
self.data.set(Some(value));
}
pub fn is_fulfilled(&self) -> bool {
self.data.get().is_some()
}
pub fn unwrap(&self) -> T {
self.get().unwrap()
}
}
impl<T: Copy + fmt::Debug> fmt::Debug for Ivar<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.get() {
Some(val) => write!(f, "Ivar({:?})", val),
None => f.write_str("Ivar(<unfulfilled>)"),
}
}
}
impl<T: Copy> Clone for Ivar<T> {
fn clone(&self) -> Ivar<T> {
match self.get() {
Some(val) => Ivar { data: Cell::new(Some(val)) },
None => Ivar::new(),
}
}
}
......@@ -52,11 +52,9 @@
pub mod small_vec;
pub mod base_n;
pub mod bitslice;
pub mod blake2b;
pub mod bitvec;
pub mod fmt_wrap;
pub mod blake2b;
pub mod graph;
pub mod ivar;
pub mod indexed_set;
pub mod indexed_vec;
pub mod obligation_forest;
......
......@@ -57,11 +57,6 @@ fn process_backedge<'c, I>(&mut self,
where I: Clone + Iterator<Item=&'c Self::Obligation>;
}
struct SnapshotData {
node_len: usize,
cache_list_len: usize,
}
pub struct ObligationForest<O: ForestObligation> {
/// The list of obligations. In between calls to
/// `process_obligations`, this list only contains nodes in the
......@@ -83,14 +78,9 @@ pub struct ObligationForest<O: ForestObligation> {
/// A list of the obligations added in snapshots, to allow
/// for their removal.
cache_list: Vec<O::Predicate>,
snapshots: Vec<SnapshotData>,
scratch: Option<Vec<usize>>,
}
pub struct Snapshot {
len: usize,
}
#[derive(Debug)]
struct Node<O> {
obligation: O,
......@@ -166,7 +156,6 @@ impl<O: ForestObligation> ObligationForest<O> {
pub fn new() -> ObligationForest<O> {
ObligationForest {
nodes: vec![],
snapshots: vec![],
done_cache: FxHashSet(),
waiting_cache: FxHashMap(),
cache_list: vec![],
......@@ -180,39 +169,6 @@ pub fn len(&self) -> usize {
self.nodes.len()
}
pub fn start_snapshot(&mut self) -> Snapshot {
self.snapshots.push(SnapshotData {
node_len: self.nodes.len(),
cache_list_len: self.cache_list.len()
});
Snapshot { len: self.snapshots.len() }
}
pub fn commit_snapshot(&mut self, snapshot: Snapshot) {
assert_eq!(snapshot.len, self.snapshots.len());
let info = self.snapshots.pop().unwrap();
assert!(self.nodes.len() >= info.node_len);
assert!(self.cache_list.len() >= info.cache_list_len);
}
pub fn rollback_snapshot(&mut self, snapshot: Snapshot) {
// Check that we are obeying stack discipline.
assert_eq!(snapshot.len, self.snapshots.len());
let info = self.snapshots.pop().unwrap();
for entry in &self.cache_list[info.cache_list_len..] {
self.done_cache.remove(entry);
self.waiting_cache.remove(entry);
}
self.nodes.truncate(info.node_len);
self.cache_list.truncate(info.cache_list_len);
}
pub fn in_snapshot(&self) -> bool {
!self.snapshots.is_empty()
}
/// Registers an obligation
///
/// This CAN be done in a snapshot
......@@ -262,7 +218,6 @@ fn register_obligation_at(&mut self, obligation: O, parent: Option<NodeIndex>)
///
/// This cannot be done during a snapshot.
pub fn to_errors<E: Clone>(&mut self, error: E) -> Vec<Error<O, E>> {
assert!(!self.in_snapshot());
let mut errors = vec![];
for index in 0..self.nodes.len() {
if let NodeState::Pending = self.nodes[index].state.get() {
......@@ -297,7 +252,6 @@ pub fn process_obligations<P>(&mut self, processor: &mut P) -> Outcome<O, P::Err
where P: ObligationProcessor<Obligation=O>
{
debug!("process_obligations(len={})", self.nodes.len());
assert!(!self.in_snapshot()); // cannot unroll this action
let mut errors = vec![];
let mut stalled = true;
......@@ -528,8 +482,6 @@ fn mark_as_waiting_from(&self, node: &Node<O>) {
/// on these nodes may be present. This is done by e.g. `process_cycles`.
#[inline(never)]
fn compress(&mut self) -> Vec<O> {
assert!(!self.in_snapshot()); // didn't write code to unroll this action
let nodes_len = self.nodes.len();
let mut node_rewrites: Vec<_> = self.scratch.take().unwrap();
node_rewrites.extend(0..nodes_len);
......
......@@ -275,7 +275,8 @@ pub fn find_value(&mut self, id: K) -> K::Value {
self.get(id).value
}
pub fn unioned(&mut self, a_id: K, b_id: K) -> bool {
#[cfg(test)]
fn unioned(&mut self, a_id: K, b_id: K) -> bool {
self.find(a_id) == self.find(b_id)
}
}
......
......@@ -102,7 +102,7 @@
use syntax_pos::{DUMMY_SP, MultiSpan};
#[cfg(test)]
pub mod test;
mod test;
pub mod driver;
pub mod pretty;
......@@ -859,17 +859,17 @@ fn print_crate_info(sess: &Session,
}
/// Returns a version string such as "0.12.0-dev".
pub fn release_str() -> Option<&'static str> {
fn release_str() -> Option<&'static str> {
option_env!("CFG_RELEASE")
}
/// Returns the full SHA1 hash of HEAD of the Git repo from which rustc was built.
pub fn commit_hash_str() -> Option<&'static str> {
fn commit_hash_str() -> Option<&'static str> {
option_env!("CFG_VER_HASH")
}
/// Returns the "commit date" of HEAD of the Git repo from which rustc was built as a static string.
pub fn commit_date_str() -> Option<&'static str> {
fn commit_date_str() -> Option<&'static str> {
option_env!("CFG_VER_DATE")
}
......
......@@ -105,10 +105,6 @@ pub fn cancelled(&self) -> bool {
self.level == Level::Cancelled
}
pub fn is_fatal(&self) -> bool {
self.level == Level::Fatal
}
/// Add a span/label to be included in the resulting snippet.
/// This is pushed onto the `MultiSpan` that was created when the
/// diagnostic was first built. If you don't call this function at
......@@ -278,18 +274,10 @@ pub fn message(&self) -> String {
self.message.iter().map(|i| i.0.to_owned()).collect::<String>()
}
pub fn set_message(&mut self, message: &str) {
self.message = vec![(message.to_owned(), Style::NoStyle)];
}
pub fn styled_message(&self) -> &Vec<(String, Style)> {
&self.message
}
pub fn level(&self) -> Level {
self.level
}
/// Used by a lint. Copies over all details *but* the "main
/// message".
pub fn copy_details_not_message(&mut self, from: &Diagnostic) {
......
......@@ -183,13 +183,6 @@ pub fn new_with_code(handler: &'a Handler,
diagnostic: Diagnostic::new_with_code(level, code, message)
}
}
pub fn into_diagnostic(mut self) -> Diagnostic {
// annoyingly, the Drop impl means we can't actually move
let result = self.diagnostic.clone();
self.cancel();
result
}
}
impl<'a> Debug for DiagnosticBuilder<'a> {
......
......@@ -38,8 +38,8 @@
use std::{error, fmt};
use std::rc::Rc;
pub mod diagnostic;
pub mod diagnostic_builder;
mod diagnostic;
mod diagnostic_builder;
pub mod emitter;
mod snippet;
pub mod registry;
......@@ -111,7 +111,7 @@ fn substitutions(&self) -> usize {
}
/// Returns the number of substitutions
pub fn substitution_spans<'a>(&'a self) -> impl Iterator<Item = Span> + 'a {
fn substitution_spans<'a>(&'a self) -> impl Iterator<Item = Span> + 'a {
self.substitution_parts.iter().map(|sub| sub.span)
}
......@@ -262,7 +262,7 @@ fn description(&self) -> &str {
}
}
pub use diagnostic::{Diagnostic, SubDiagnostic, DiagnosticStyledString, StringPart};
pub use diagnostic::{Diagnostic, SubDiagnostic, DiagnosticStyledString};
pub use diagnostic_builder::DiagnosticBuilder;
/// A handler deals with errors; certain errors
......@@ -491,7 +491,7 @@ pub fn unimpl(&self, msg: &str) -> ! {
self.bug(&format!("unimplemented {}", msg));
}
pub fn bump_err_count(&self) {
fn bump_err_count(&self) {
self.panic_if_treat_err_as_bug();
self.err_count.set(self.err_count.get() + 1);
}
......@@ -571,7 +571,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
}
impl Level {
pub fn color(self) -> term::color::Color {
fn color(self) -> term::color::Color {
match self {
Bug | Fatal | PhaseFatal | Error => term::color::BRIGHT_RED,
Warning => {
......@@ -598,12 +598,3 @@ pub fn to_str(self) -> &'static str {
}
}
}
pub fn expect<T, M>(diag: &Handler, opt: Option<T>, msg: M) -> T
where M: FnOnce() -> String
{
match opt {
Some(t) => t,
None => diag.bug(&msg()),
}
}
......@@ -62,10 +62,6 @@ pub fn new() -> IncrementalHashesMap {
}
}
pub fn get(&self, k: &DepNode) -> Option<&Fingerprint> {
self.hashes.get(k)
}
pub fn insert(&mut self, k: DepNode, v: Fingerprint) {
assert!(self.hashes.insert(k, v).is_none());
}
......
......@@ -44,4 +44,3 @@
pub use persist::save_work_products;
pub use persist::in_incr_comp_dir;
pub use persist::finalize_session_directory;
pub use persist::delete_workproduct_files;
......@@ -28,4 +28,3 @@
pub use self::save::save_dep_graph;
pub use self::save::save_work_products;
pub use self::work_product::save_trans_partition;
pub use self::work_product::delete_workproduct_files;
......@@ -45,10 +45,10 @@
extern crate rustc_const_eval;
extern crate syntax_pos;
pub use rustc::lint;
pub use rustc::middle;
pub use rustc::session;
pub use rustc::util;
use rustc::lint;
use rustc::middle;
use rustc::session;
use rustc::util;
use session::Session;
use lint::LintId;
......
......@@ -38,9 +38,7 @@
pub use self::AtomicRmwBinOp::*;
pub use self::MetadataType::*;
pub use self::CodeGenOptSize::*;
pub use self::DiagnosticKind::*;
pub use self::CallConv::*;
pub use self::DiagnosticSeverity::*;
pub use self::Linkage::*;
use std::str::FromStr;
......@@ -51,7 +49,7 @@
pub mod archive_ro;
pub mod diagnostic;
pub mod ffi;
mod ffi;
pub use ffi::*;
......@@ -120,7 +118,7 @@ fn from_str(s: &str) -> Result<Self, Self::Err> {
#[allow(missing_copy_implementations)]
pub enum RustString_opaque {}
pub type RustStringRef = *mut RustString_opaque;
type RustStringRef = *mut RustString_opaque;
type RustStringRepr = *mut RefCell<Vec<u8>>;
/// Appending to a Rust string -- used by RawRustStringOstream.
......@@ -199,8 +197,8 @@ pub fn toggle_llfn(&self, idx: AttributePlace, llfn: ValueRef, set: bool) {
// Memory-managed interface to target data.
pub struct TargetData {
pub lltd: TargetDataRef,
struct TargetData {
lltd: TargetDataRef,
}
impl Drop for TargetData {
......@@ -211,7 +209,7 @@ fn drop(&mut self) {
}
}
pub fn mk_target_data(string_rep: &str) -> TargetData {
fn mk_target_data(string_rep: &str) -> TargetData {
let string_rep = CString::new(string_rep).unwrap();
TargetData { lltd: unsafe { LLVMCreateTargetData(string_rep.as_ptr()) } }
}
......@@ -272,7 +270,7 @@ pub fn get_param(llfn: ValueRef, index: c_uint) -> ValueRef {
}
}
pub fn get_params(llfn: ValueRef) -> Vec<ValueRef> {
fn get_params(llfn: ValueRef) -> Vec<ValueRef> {
unsafe {
let num_params = LLVMCountParams(llfn);
let mut params = Vec::with_capacity(num_params as usize);
......
......@@ -32,7 +32,7 @@
pub use rustc::middle::cstore::{NativeLibrary, NativeLibraryKind, LinkagePreference};
pub use rustc::middle::cstore::NativeLibraryKind::*;
pub use rustc::middle::cstore::{CrateSource, LinkMeta, LibSource};
pub use rustc::middle::cstore::{CrateSource, LibSource};
pub use cstore_impl::{provide, provide_local};
......@@ -142,14 +142,6 @@ pub fn iter_crate_data<I>(&self, mut i: I)
}
}
pub fn reset(&self) {
self.metas.borrow_mut().clear();
self.extern_mod_crate_map.borrow_mut().clear();
self.used_libraries.borrow_mut().clear();
self.used_link_args.borrow_mut().clear();
self.statically_included_foreign_items.borrow_mut().clear();
}
pub fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec<CrateNum> {
let mut ordering = Vec::new();
self.push_dependencies_in_postorder(&mut ordering, krate);
......
......@@ -100,32 +100,6 @@ pub fn lookup(&self, bytes: &[u8], def_index: DefIndex) -> Option<Lazy<Entry<'tc
Some(Lazy::with_position(position as usize))
}
}
pub fn iter_enumerated<'a>(&self,
bytes: &'a [u8])
-> impl Iterator<Item = (DefIndex, Lazy<Entry<'tcx>>)> + 'a {
let words = &bytes_to_words(&bytes[self.position..])[..self.len];
let lo_count = u32::from_le(words[0].get()) as usize;
let lo = &words[1 .. lo_count + 1];
let hi = &words[1 + lo_count ..];
lo.iter().map(|word| word.get()).enumerate().filter_map(|(index, pos)| {
if pos == u32::MAX {
None
} else {
let pos = u32::from_le(pos) as usize;
Some((DefIndex::new(index), Lazy::with_position(pos)))
}
}).chain(hi.iter().map(|word| word.get()).enumerate().filter_map(|(index, pos)| {
if pos == u32::MAX {
None
} else {
let pos = u32::from_le(pos) as usize;
Some((DefIndex::new(index + DefIndexAddressSpace::High.start()),
Lazy::with_position(pos)))
}
}))
}
}
#[repr(packed)]
......
......@@ -47,8 +47,6 @@
mod diagnostics;
pub use rustc::middle;
mod astencode;
mod index_builder;
mod index;
......
......@@ -306,10 +306,6 @@ pub fn maybe_load_library_crate(&mut self) -> Option<Library> {
self.find_library_crate()
}
pub fn load_library_crate(&mut self) -> Library {
self.find_library_crate().unwrap_or_else(|| self.report_errs())
}
pub fn report_errs(&mut self) -> ! {
let add = match self.root {
&None => String::new(),
......
......@@ -14,16 +14,13 @@
use rustc::ty::TyCtxt;
use rustc::mir::{self, Mir, Location};
use rustc_data_structures::bitslice::BitSlice; // adds set_bit/get_bit to &[usize] bitvector rep.
use rustc_data_structures::bitslice::{BitwiseOperator};
use rustc_data_structures::indexed_set::{IdxSet};
use rustc_data_structures::indexed_vec::Idx;
use super::MoveDataParamEnv;
use util::elaborate_drops::DropFlagState;
use super::move_paths::{HasMoveData, MoveData, MoveOutIndex, MovePathIndex};
use super::move_paths::LookupResult;
use super::move_paths::{HasMoveData, MoveData, MovePathIndex};
use super::{BitDenotation, BlockSets, DataflowOperator};
use super::drop_flag_effects_for_function_entry;
......@@ -204,40 +201,6 @@ impl<'a, 'tcx: 'a> HasMoveData<'tcx> for DefinitelyInitializedLvals<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
}
/// `MovingOutStatements` tracks the statements that perform moves out
/// of particular l-values. More precisely, it tracks whether the
/// *effect* of such moves (namely, the uninitialization of the
/// l-value in question) can reach some point in the control-flow of
/// the function, or if that effect is "killed" by some intervening
/// operation reinitializing that l-value.
///
/// The resulting dataflow is a more enriched version of
/// `MaybeUninitializedLvals`. Both structures on their own only tell
/// you if an l-value *might* be uninitialized at a given point in the
/// control flow. But `MovingOutStatements` also includes the added
/// data of *which* particular statement causing the deinitialization
/// that the borrow checker's error message may need to report.
#[allow(dead_code)]
pub struct MovingOutStatements<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &'a Mir<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx: 'a> MovingOutStatements<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &'a Mir<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>)
-> Self
{
MovingOutStatements { tcx: tcx, mir: mir, mdpe: mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for MovingOutStatements<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
}
impl<'a, 'tcx> MaybeInitializedLvals<'a, 'tcx> {
fn update_bits(sets: &mut BlockSets<MovePathIndex>, path: MovePathIndex,
state: DropFlagState)
......@@ -432,121 +395,6 @@ fn propagate_call_return(&self,
}
}
impl<'a, 'tcx> BitDenotation for MovingOutStatements<'a, 'tcx> {
type Idx = MoveOutIndex;
fn name() -> &'static str { "moving_out" }
fn bits_per_block(&self) -> usize {
self.move_data().moves.len()
}
fn start_block_effect(&self, _sets: &mut BlockSets<MoveOutIndex>) {
// no move-statements have been executed prior to function
// execution, so this method has no effect on `_sets`.
}
fn statement_effect(&self,
sets: &mut BlockSets<MoveOutIndex>,
location: Location) {
let (tcx, mir, move_data) = (self.tcx, self.mir, self.move_data());
let stmt = &mir[location.block].statements[location.statement_index];
let loc_map = &move_data.loc_map;
let path_map = &move_data.path_map;
let rev_lookup = &move_data.rev_lookup;
debug!("stmt {:?} at loc {:?} moves out of move_indexes {:?}",
stmt, location, &loc_map[location]);
for move_index in &loc_map[location] {
// Every path deinitialized by a *particular move*
// has corresponding bit, "gen'ed" (i.e. set)
// here, in dataflow vector
zero_to_one(sets.gen_set.words_mut(), *move_index);
}
let bits_per_block = self.bits_per_block();
match stmt.kind {
mir::StatementKind::SetDiscriminant { .. } => {
span_bug!(stmt.source_info.span, "SetDiscriminant should not exist in borrowck");
}
mir::StatementKind::Assign(ref lvalue, ref rvalue) => {
// assigning into this `lvalue` kills all
// MoveOuts from it, and *also* all MoveOuts
// for children and associated fragment sets.
match rvalue.initialization_state() {
mir::tcx::RvalueInitializationState::Shallow => {
if let LookupResult::Exact(mpi) = rev_lookup.find(lvalue) {
for moi in &path_map[mpi] {
assert!(moi.index() < bits_per_block);
sets.kill_set.add(&moi);
}
}
}
mir::tcx::RvalueInitializationState::Deep => {
on_lookup_result_bits(tcx,
mir,
move_data,
rev_lookup.find(lvalue),
|mpi| for moi in &path_map[mpi] {
assert!(moi.index() < bits_per_block);
sets.kill_set.add(&moi);
});
}
}
}
mir::StatementKind::StorageLive(_) |
mir::StatementKind::StorageDead(_) |
mir::StatementKind::InlineAsm { .. } |
mir::StatementKind::EndRegion(_) |
mir::StatementKind::Validate(..) |
mir::StatementKind::Nop => {}
}
}
fn terminator_effect(&self,
sets: &mut BlockSets<MoveOutIndex>,
location: Location)
{
let (mir, move_data) = (self.mir, self.move_data());
let term = mir[location.block].terminator();
let loc_map = &move_data.loc_map;
debug!("terminator {:?} at loc {:?} moves out of move_indexes {:?}",
term, location, &loc_map[location]);
let bits_per_block = self.bits_per_block();
for move_index in &loc_map[location] {
assert!(move_index.index() < bits_per_block);
zero_to_one(sets.gen_set.words_mut(), *move_index);
}
}
fn propagate_call_return(&self,
in_out: &mut IdxSet<MoveOutIndex>,
_call_bb: mir::BasicBlock,
_dest_bb: mir::BasicBlock,
dest_lval: &mir::Lvalue) {
let move_data = self.move_data();
let bits_per_block = self.bits_per_block();
let path_map = &move_data.path_map;
on_lookup_result_bits(self.tcx,
self.mir,
move_data,
move_data.rev_lookup.find(dest_lval),
|mpi| for moi in &path_map[mpi] {
assert!(moi.index() < bits_per_block);
in_out.remove(&moi);
});
}
}
fn zero_to_one(bitvec: &mut [usize], move_index: MoveOutIndex) {
let retval = bitvec.set_bit(move_index.index());
assert!(retval);
}
impl<'a, 'tcx> BitwiseOperator for MovingOutStatements<'a, 'tcx> {
#[inline]
fn join(&self, pred1: usize, pred2: usize) -> usize {
pred1 | pred2 // moves from both preds are in scope
}
}
impl<'a, 'tcx> BitwiseOperator for MaybeInitializedLvals<'a, 'tcx> {
#[inline]
fn join(&self, pred1: usize, pred2: usize) -> usize {
......@@ -578,13 +426,6 @@ fn join(&self, pred1: usize, pred2: usize) -> usize {
// propagating, or you start at all-ones and then use Intersect as
// your merge when propagating.
impl<'a, 'tcx> DataflowOperator for MovingOutStatements<'a, 'tcx> {
#[inline]
fn bottom_value() -> bool {
false // bottom = no loans in scope by default
}
}
impl<'a, 'tcx> DataflowOperator for MaybeInitializedLvals<'a, 'tcx> {
#[inline]
fn bottom_value() -> bool {
......
......@@ -25,7 +25,7 @@
use std::usize;
pub use self::impls::{MaybeInitializedLvals, MaybeUninitializedLvals};
pub use self::impls::{DefinitelyInitializedLvals, MovingOutStatements};
pub use self::impls::{DefinitelyInitializedLvals};
pub use self::impls::borrows::{Borrows, BorrowData, BorrowIndex};
pub(crate) use self::drop_flag_effects::*;
......@@ -364,8 +364,6 @@ pub fn results(self) -> DataflowResults<O> {
DataflowResults(self.flow_state)
}
pub fn flow_state(&self) -> &DataflowState<O> { &self.flow_state }
pub fn mir(&self) -> &'a Mir<'tcx> { self.mir }
}
......
......@@ -43,10 +43,10 @@
extern crate rustc_const_eval;
extern crate core; // for NonZero
pub mod diagnostics;
mod diagnostics;
mod build;
pub mod dataflow;
mod dataflow;
mod hair;
mod shim;
pub mod transform;
......
......@@ -27,7 +27,6 @@
use dataflow::{MoveDataParamEnv};
use dataflow::{BitDenotation, BlockSets, DataflowResults, DataflowResultsConsumer};
use dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals};
use dataflow::{MovingOutStatements};
use dataflow::{Borrows, BorrowData, BorrowIndex};
use dataflow::move_paths::{HasMoveData, MoveData, MovePathIndex, LookupResult};
use util::borrowck_errors::{BorrowckErrors, Origin};
......@@ -80,9 +79,6 @@ fn borrowck_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &Mir
let flow_uninits = do_dataflow(tcx, mir, id, &attributes, &dead_unwinds,
MaybeUninitializedLvals::new(tcx, mir, &mdpe),
|bd, i| &bd.move_data().move_paths[i]);
let flow_move_outs = do_dataflow(tcx, mir, id, &attributes, &dead_unwinds,
MovingOutStatements::new(tcx, mir, &mdpe),
|bd, i| &bd.move_data().moves[i]);
let mut mbcx = MirBorrowckCtxt {
tcx: tcx,
......@@ -95,8 +91,7 @@ fn borrowck_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &Mir
let mut state = InProgress::new(flow_borrows,
flow_inits,
flow_uninits,
flow_move_outs);
flow_uninits);
mbcx.analyze_results(&mut state); // entry point for DataflowResultsConsumer
});
......@@ -119,7 +114,6 @@ pub struct InProgress<'b, 'tcx: 'b> {
borrows: FlowInProgress<Borrows<'b, 'tcx>>,
inits: FlowInProgress<MaybeInitializedLvals<'b, 'tcx>>,
uninits: FlowInProgress<MaybeUninitializedLvals<'b, 'tcx>>,
move_outs: FlowInProgress<MovingOutStatements<'b, 'tcx>>,
}
struct FlowInProgress<BD> where BD: BitDenotation {
......@@ -511,10 +505,6 @@ fn check_if_path_is_moved(&mut self,
if let Some(mpi) = self.move_path_for_lvalue(context, move_data, lvalue) {
if maybe_uninits.curr_state.contains(&mpi) {
// find and report move(s) that could cause this to be uninitialized
// FIXME: for each move in flow_state.move_outs ...
&flow_state.move_outs;
self.report_use_of_moved(context, lvalue_span);
} else {
// sanity check: initialized on *some* path, right?
......@@ -1129,13 +1119,12 @@ fn new(self, loc: Location) -> Context { Context { kind: self, loc: loc } }
impl<'b, 'tcx: 'b> InProgress<'b, 'tcx> {
pub(super) fn new(borrows: DataflowResults<Borrows<'b, 'tcx>>,
inits: DataflowResults<MaybeInitializedLvals<'b, 'tcx>>,
uninits: DataflowResults<MaybeUninitializedLvals<'b, 'tcx>>,
move_outs: DataflowResults<MovingOutStatements<'b, 'tcx>>) -> Self {
uninits: DataflowResults<MaybeUninitializedLvals<'b, 'tcx>>)
-> Self {
InProgress {
borrows: FlowInProgress::new(borrows),
inits: FlowInProgress::new(inits),
uninits: FlowInProgress::new(uninits),
move_outs: FlowInProgress::new(move_outs),
}
}
......
......@@ -740,12 +740,6 @@ fn verify_obligations(&mut self, mir: &Mir<'tcx>) {
pub struct TypeckMir;
impl TypeckMir {
pub fn new() -> Self {
TypeckMir
}
}
impl MirPass for TypeckMir {
fn run_pass<'a, 'tcx>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
......
......@@ -50,10 +50,6 @@ pub fn local_info(&self, local: Local) -> &Info<'tcx> {
&self.info[local]
}
pub fn local_info_mut(&mut self, local: Local) -> &mut Info<'tcx> {
&mut self.info[local]
}
fn mutate_defs_and_uses<F>(&self, local: Local, mir: &mut Mir<'tcx>, mut callback: F)
where F: for<'a> FnMut(&'a mut Lvalue<'tcx>,
LvalueContext<'tcx>,
......
......@@ -36,13 +36,13 @@
extern crate syntax_pos;
extern crate rustc_errors as errors;
pub mod diagnostics;
mod diagnostics;
pub mod ast_validation;
pub mod consts;
pub mod hir_stats;
pub mod loops;
pub mod mir_stats;
mod mir_stats;
pub mod no_asm;
pub mod static_recursion;
......
......@@ -13,7 +13,6 @@
// completely accurate (some things might be counted twice, others missed).
use rustc_const_math::{ConstUsize};
use rustc::hir::def_id::LOCAL_CRATE;
use rustc::middle::const_val::{ConstVal};
use rustc::mir::{AggregateKind, AssertMessage, BasicBlock, BasicBlockData};
use rustc::mir::{Constant, Literal, Location, LocalDecl};
......@@ -22,9 +21,7 @@
use rustc::mir::{Rvalue, SourceInfo, Statement, StatementKind};
use rustc::mir::{Terminator, TerminatorKind, VisibilityScope, VisibilityScopeData};
use rustc::mir::visit as mir_visit;
use rustc::mir::visit::Visitor;
use rustc::ty::{ClosureSubsts, TyCtxt};
use rustc::util::common::to_readable_str;
use rustc::util::nodemap::{FxHashMap};
struct NodeData {
......@@ -37,21 +34,6 @@ struct StatCollector<'a, 'tcx: 'a> {
data: FxHashMap<&'static str, NodeData>,
}
pub fn print_mir_stats<'tcx, 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, title: &str) {
let mut collector = StatCollector {
_tcx: tcx,
data: FxHashMap(),
};
// For debugging instrumentation like this, we don't need to worry
// about maintaining the dep graph.
let _ignore = tcx.dep_graph.in_ignore();
for &def_id in tcx.mir_keys(LOCAL_CRATE).iter() {
let mir = tcx.optimized_mir(def_id);
collector.visit_mir(&mir);
}
collector.print(title);
}
impl<'a, 'tcx> StatCollector<'a, 'tcx> {
fn record_with_size(&mut self, label: &'static str, node_size: usize) {
......@@ -67,27 +49,6 @@ fn record_with_size(&mut self, label: &'static str, node_size: usize) {
fn record<T>(&mut self, label: &'static str, node: &T) {
self.record_with_size(label, ::std::mem::size_of_val(node));
}
fn print(&self, title: &str) {
let mut stats: Vec<_> = self.data.iter().collect();
stats.sort_by_key(|&(_, ref d)| d.count * d.size);
println!("\n{}\n", title);
println!("{:<32}{:>18}{:>14}{:>14}",
"Name", "Accumulated Size", "Count", "Item Size");
println!("------------------------------------------------------------------------------");
for (label, data) in stats {
println!("{:<32}{:>18}{:>14}{:>14}",
label,
to_readable_str(data.count * data.size),
to_readable_str(data.count),
to_readable_str(data.size));
}
println!("------------------------------------------------------------------------------");
}
}
impl<'a, 'tcx> mir_visit::Visitor<'tcx> for StatCollector<'a, 'tcx> {
......
......@@ -80,7 +80,7 @@
pub use self::registry::Registry;
pub mod diagnostics;
mod diagnostics;
pub mod registry;
pub mod load;
pub mod build;
......
......@@ -41,7 +41,7 @@
use std::mem::replace;
use std::rc::Rc;
pub mod diagnostics;
mod diagnostics;
////////////////////////////////////////////////////////////////////////////////
/// Visitor used to determine if pub(restricted) is used anywhere in the crate.
......
......@@ -782,8 +782,6 @@ fn visit_generics(&mut self, generics: &'tcx Generics) {
}
}
pub type ErrorMessage = Option<(Span, String)>;
#[derive(Copy, Clone)]
enum TypeParameters<'a, 'b> {
NoTypeParameters,
......@@ -915,7 +913,7 @@ pub struct ModuleData<'a> {
expansion: Mark,
}
pub type Module<'a> = &'a ModuleData<'a>;
type Module<'a> = &'a ModuleData<'a>;
impl<'a> ModuleData<'a> {
fn new(parent: Option<Module<'a>>,
......@@ -3031,31 +3029,6 @@ fn adjust_local_def(&mut self,
return def;
}
// Calls `f` with a `Resolver` whose current lexical scope is `module`'s lexical scope,
// i.e. the module's items and the prelude (unless the module is `#[no_implicit_prelude]`).
// FIXME #34673: This needs testing.
pub fn with_module_lexical_scope<T, F>(&mut self, module: Module<'a>, f: F) -> T
where F: FnOnce(&mut Resolver<'a>) -> T,
{
self.with_empty_ribs(|this| {
this.ribs[ValueNS].push(Rib::new(ModuleRibKind(module)));
this.ribs[TypeNS].push(Rib::new(ModuleRibKind(module)));
f(this)
})
}
fn with_empty_ribs<T, F>(&mut self, f: F) -> T
where F: FnOnce(&mut Resolver<'a>) -> T,
{
let ribs = replace(&mut self.ribs, PerNS::<Vec<Rib>>::default());
let label_ribs = replace(&mut self.label_ribs, Vec::new());
let result = f(self);
self.ribs = ribs;
self.label_ribs = label_ribs;
result
}
fn lookup_assoc_candidate<FilterFn>(&mut self,
ident: Ident,
ns: Namespace,
......
......@@ -51,7 +51,7 @@
use std::fs::File;
use std::path::{Path, PathBuf};
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
use syntax::ast::{self, NodeId, PatKind, Attribute};
use syntax::parse::lexer::comments::strip_doc_comment_decoration;
use syntax::parse::token;
use syntax::print::pprust;
......@@ -80,8 +80,6 @@ pub struct SaveContext<'l, 'tcx: 'l> {
#[derive(Debug)]
pub enum Data {
/// Data about a macro use.
MacroUseData(MacroRef),
RefData(Ref),
DefData(Def),
RelationData(Relation),
......@@ -759,11 +757,6 @@ fn lookup_ref_id(&self, ref_id: NodeId) -> Option<DefId> {
}
}
#[inline]
pub fn enclosing_scope(&self, id: NodeId) -> NodeId {
self.tcx.hir.get_enclosing_scope(id).unwrap_or(CRATE_NODE_ID)
}
fn docs_for_attrs(&self, attrs: &[Attribute]) -> String {
let mut result = String::new();
......
......@@ -10,7 +10,7 @@
//! A helper class for dealing with static archives
use std::ffi::{CString, CStr, OsString};
use std::ffi::{CString, CStr};
use std::io;
use std::mem;
use std::path::{Path, PathBuf};
......@@ -28,8 +28,6 @@ pub struct ArchiveConfig<'a> {
pub dst: PathBuf,
pub src: Option<PathBuf>,
pub lib_search_paths: Vec<PathBuf>,
pub ar_prog: String,
pub command_path: OsString,
}
/// Helper for adding many files to an archive with a single invocation of
......
......@@ -138,12 +138,6 @@ pub fn msvc_link_exe_cmd(_sess: &Session) -> (Command, Vec<(OsString, OsString)>
(Command::new("link.exe"), vec![])
}
pub fn get_ar_prog(sess: &Session) -> String {
sess.opts.cg.ar.clone().unwrap_or_else(|| {
sess.target.target.options.ar.clone()
})
}
fn command_path(sess: &Session) -> OsString {
// The compiler's sysroot often has some bundled tools, so add it to the
// PATH for the child.
......@@ -383,8 +377,6 @@ fn archive_config<'a>(sess: &'a Session,
dst: output.to_path_buf(),
src: input.map(|p| p.to_path_buf()),
lib_search_paths: archive_search_paths(sess),
ar_prog: get_ar_prog(sess),
command_path: command_path(sess),
}
}
......
......@@ -785,8 +785,6 @@ pub fn start_async_translation(sess: &Session,
crate_name,
link,
metadata,
exported_symbols,
no_builtins,
windows_subsystem,
linker_info,
no_integrated_as,
......@@ -1801,8 +1799,6 @@ pub struct OngoingCrateTranslation {
crate_name: Symbol,
link: LinkMeta,
metadata: EncodedMetadata,
exported_symbols: Arc<ExportedSymbols>,
no_builtins: bool,
windows_subsystem: Option<String>,
linker_info: LinkerInfo,
no_integrated_as: bool,
......@@ -1852,13 +1848,10 @@ pub fn join(self, sess: &Session) -> CrateTranslation {
crate_name: self.crate_name,
link: self.link,
metadata: self.metadata,
exported_symbols: self.exported_symbols,
no_builtins: self.no_builtins,
windows_subsystem: self.windows_subsystem,
linker_info: self.linker_info,
modules: compiled_modules.modules,
metadata_module: compiled_modules.metadata_module,
allocator_module: compiled_modules.allocator_module,
};
......
......@@ -27,10 +27,9 @@
use rustc_data_structures::base_n;
use rustc::session::config::{self, NoDebugInfo, OutputFilenames};
use rustc::session::Session;
use rustc::ty::subst::Substs;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::layout::{LayoutCx, LayoutError, LayoutTyper, TyLayout};
use rustc::util::nodemap::{DefIdMap, FxHashMap, FxHashSet};
use rustc::util::nodemap::{FxHashMap, FxHashSet};
use std::ffi::{CStr, CString};
use std::cell::{Cell, RefCell};
......@@ -39,7 +38,6 @@
use std::str;
use std::sync::Arc;
use std::marker::PhantomData;
use syntax::ast;
use syntax::symbol::InternedString;
use syntax_pos::DUMMY_SP;
use abi::Abi;
......@@ -124,12 +122,6 @@ pub struct LocalCrateContext<'a, 'tcx: 'a> {
/// Cache of emitted const globals (value -> global)
const_globals: RefCell<FxHashMap<ValueRef, ValueRef>>,
/// Cache of emitted const values
const_values: RefCell<FxHashMap<(ast::NodeId, &'tcx Substs<'tcx>), ValueRef>>,
/// Cache of external const values
extern_const_values: RefCell<DefIdMap<ValueRef>>,
/// Mapping from static definitions to their DefId's.
statics: RefCell<FxHashMap<ValueRef, DefId>>,
......@@ -144,7 +136,6 @@ pub struct LocalCrateContext<'a, 'tcx: 'a> {
used_statics: RefCell<Vec<ValueRef>>,
lltypes: RefCell<FxHashMap<Ty<'tcx>, Type>>,
type_hashcodes: RefCell<FxHashMap<Ty<'tcx>, String>>,
int_type: Type,
opaque_vec_type: Type,
str_slice_type: Type,
......@@ -157,9 +148,6 @@ pub struct LocalCrateContext<'a, 'tcx: 'a> {
intrinsics: RefCell<FxHashMap<&'static str, ValueRef>>,
/// Depth of the current type-of computation - used to bail out
type_of_depth: Cell<usize>,
/// A counter that is used for generating local symbol names
local_gen_sym_counter: Cell<usize>,
......@@ -406,13 +394,10 @@ pub fn new(shared: &SharedCrateContext<'a, 'tcx>,
const_cstr_cache: RefCell::new(FxHashMap()),
const_unsized: RefCell::new(FxHashMap()),
const_globals: RefCell::new(FxHashMap()),
const_values: RefCell::new(FxHashMap()),
extern_const_values: RefCell::new(DefIdMap()),
statics: RefCell::new(FxHashMap()),
statics_to_rauw: RefCell::new(Vec::new()),
used_statics: RefCell::new(Vec::new()),
lltypes: RefCell::new(FxHashMap()),
type_hashcodes: RefCell::new(FxHashMap()),
int_type: Type::from_ref(ptr::null_mut()),
opaque_vec_type: Type::from_ref(ptr::null_mut()),
str_slice_type: Type::from_ref(ptr::null_mut()),
......@@ -421,7 +406,6 @@ pub fn new(shared: &SharedCrateContext<'a, 'tcx>,
eh_unwind_resume: Cell::new(None),
rust_try_fn: Cell::new(None),
intrinsics: RefCell::new(FxHashMap()),
type_of_depth: Cell::new(0),
local_gen_sym_counter: Cell::new(0),
placeholder: PhantomData,
};
......@@ -545,15 +529,6 @@ pub fn const_globals<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, ValueRef>>
&self.local().const_globals
}
pub fn const_values<'a>(&'a self) -> &'a RefCell<FxHashMap<(ast::NodeId, &'tcx Substs<'tcx>),
ValueRef>> {
&self.local().const_values
}
pub fn extern_const_values<'a>(&'a self) -> &'a RefCell<DefIdMap<ValueRef>> {
&self.local().extern_const_values
}
pub fn statics<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, DefId>> {
&self.local().statics
}
......@@ -570,10 +545,6 @@ pub fn lltypes<'a>(&'a self) -> &'a RefCell<FxHashMap<Ty<'tcx>, Type>> {
&self.local().lltypes
}
pub fn type_hashcodes<'a>(&'a self) -> &'a RefCell<FxHashMap<Ty<'tcx>, String>> {
&self.local().type_hashcodes
}
pub fn stats<'a>(&'a self) -> &'a Stats {
&self.local().stats
}
......@@ -582,10 +553,6 @@ pub fn int_type(&self) -> Type {
self.local().int_type
}
pub fn opaque_vec_type(&self) -> Type {
self.local().opaque_vec_type
}
pub fn str_slice_type(&self) -> Type {
self.local().str_slice_type
}
......@@ -602,27 +569,6 @@ fn intrinsics<'a>(&'a self) -> &'a RefCell<FxHashMap<&'static str, ValueRef>> {
&self.local().intrinsics
}
pub fn obj_size_bound(&self) -> u64 {
self.tcx().data_layout.obj_size_bound()
}
pub fn report_overbig_object(&self, obj: Ty<'tcx>) -> ! {
self.sess().fatal(
&format!("the type `{:?}` is too big for the current architecture",
obj))
}
pub fn enter_type_of(&self, ty: Ty<'tcx>) -> TypeOfDepthLock<'b, 'tcx> {
let current_depth = self.local().type_of_depth.get();
debug!("enter_type_of({:?}) at depth {:?}", ty, current_depth);
if current_depth > self.sess().recursion_limit.get() {
self.sess().fatal(
&format!("overflow representing the type `{}`", ty))
}
self.local().type_of_depth.set(current_depth + 1);
TypeOfDepthLock(self.local())
}
pub fn check_overflow(&self) -> bool {
self.shared.check_overflow
}
......@@ -631,12 +577,6 @@ pub fn use_dll_storage_attrs(&self) -> bool {
self.shared.use_dll_storage_attrs()
}
/// Given the def-id of some item that has no type parameters, make
/// a suitable "empty substs" for it.
pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> {
self.tcx().empty_substs_for_def_id(item_def_id)
}
/// Generate a new symbol name with the given prefix. This symbol name must
/// only be used for definitions with `internal` or `private` linkage.
pub fn generate_local_symbol_name(&self, prefix: &str) -> String {
......@@ -776,14 +716,6 @@ fn normalize_projections(self, ty: Ty<'tcx>) -> Ty<'tcx> {
}
}
pub struct TypeOfDepthLock<'a, 'tcx: 'a>(&'a LocalCrateContext<'a, 'tcx>);
impl<'a, 'tcx> Drop for TypeOfDepthLock<'a, 'tcx> {
fn drop(&mut self) {
self.0.type_of_depth.set(self.0.type_of_depth.get() - 1);
}
}
/// Declare any llvm intrinsics that you might need
fn declare_intrinsic(ccx: &CrateContext, key: &str) -> Option<ValueRef> {
macro_rules! ifn {
......
......@@ -36,7 +36,6 @@
use rustc::dep_graph::WorkProduct;
use syntax_pos::symbol::Symbol;
use std::sync::Arc;
extern crate flate2;
extern crate libc;
......@@ -46,7 +45,7 @@
extern crate rustc_back;
extern crate rustc_data_structures;
extern crate rustc_incremental;
pub extern crate rustc_llvm as llvm;
extern crate rustc_llvm as llvm;
extern crate rustc_platform_intrinsics as intrinsics;
extern crate rustc_const_math;
#[macro_use]
......@@ -78,7 +77,7 @@ pub mod back {
pub(crate) mod symbol_export;
pub(crate) mod symbol_names;
pub mod write;
pub mod rpath;
mod rpath;
}
mod diagnostics;
......@@ -138,8 +137,8 @@ pub struct ModuleTranslation {
/// unique amongst **all** crates. Therefore, it should contain
/// something unique to this crate (e.g., a module path) as well
/// as the crate name and disambiguator.
pub name: String,
pub symbol_name_hash: u64,
name: String,
symbol_name_hash: u64,
pub source: ModuleSource,
pub kind: ModuleKind,
}
......@@ -206,7 +205,7 @@ pub enum ModuleSource {
#[derive(Copy, Clone, Debug)]
pub struct ModuleLlvm {
pub llcx: llvm::ContextRef,
llcx: llvm::ContextRef,
pub llmod: llvm::ModuleRef,
}
......@@ -216,14 +215,11 @@ unsafe impl Sync for ModuleTranslation { }
pub struct CrateTranslation {
pub crate_name: Symbol,
pub modules: Vec<CompiledModule>,
pub metadata_module: CompiledModule,
pub allocator_module: Option<CompiledModule>,
allocator_module: Option<CompiledModule>,
pub link: rustc::middle::cstore::LinkMeta,
pub metadata: rustc::middle::cstore::EncodedMetadata,
pub exported_symbols: Arc<back::symbol_export::ExportedSymbols>,
pub no_builtins: bool,
pub windows_subsystem: Option<String>,
pub linker_info: back::linker::LinkerInfo
windows_subsystem: Option<String>,
linker_info: back::linker::LinkerInfo
}
__build_diagnostic_array! { librustc_trans, DIAGNOSTICS }
......@@ -237,19 +237,6 @@ pub fn ptr_to(&self) -> Type {
ty!(llvm::LLVMPointerType(self.to_ref(), 0))
}
pub fn is_aggregate(&self) -> bool {
match self.kind() {
TypeKind::Struct | TypeKind::Array => true,
_ => false
}
}
pub fn is_packed(&self) -> bool {
unsafe {
llvm::LLVMIsPackedStruct(self.to_ref()) == True
}
}
pub fn element_type(&self) -> Type {
unsafe {
Type::from_ref(llvm::LLVMGetElementType(self.to_ref()))
......@@ -263,12 +250,6 @@ pub fn vector_length(&self) -> usize {
}
}
pub fn array_length(&self) -> usize {
unsafe {
llvm::LLVMGetArrayLength(self.to_ref()) as usize
}
}
pub fn field_types(&self) -> Vec<Type> {
unsafe {
let n_elts = llvm::LLVMCountStructElementTypes(self.to_ref()) as usize;
......@@ -282,10 +263,6 @@ pub fn field_types(&self) -> Vec<Type> {
}
}
pub fn return_type(&self) -> Type {
ty!(llvm::LLVMGetReturnType(self.to_ref()))
}
pub fn func_params(&self) -> Vec<Type> {
unsafe {
let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as usize;
......@@ -324,13 +301,4 @@ pub fn from_integer(cx: &CrateContext, i: layout::Integer) -> Type {
I128 => Type::i128(cx),
}
}
pub fn from_primitive(ccx: &CrateContext, p: layout::Primitive) -> Type {
match p {
layout::Int(i) => Type::from_integer(ccx, i),
layout::F32 => Type::f32(ccx),
layout::F64 => Type::f64(ccx),
layout::Pointer => bug!("It is not possible to convert Pointer directly to Type.")
}
}
}
......@@ -27,8 +27,6 @@
pub use self::MethodError::*;
pub use self::CandidateSource::*;
pub use self::suggest::AllTraitsVec;
mod confirm;
pub mod probe;
mod suggest;
......
......@@ -95,12 +95,11 @@
extern crate rustc_data_structures;
extern crate rustc_errors as errors;
pub use rustc::dep_graph;
pub use rustc::hir;
pub use rustc::lint;
pub use rustc::middle;
pub use rustc::session;
pub use rustc::util;
use rustc::hir;
use rustc::lint;
use rustc::middle;
use rustc::session;
use rustc::util;
use hir::map as hir_map;
use rustc::infer::InferOk;
......@@ -118,7 +117,7 @@
use std::iter;
// NB: This module needs to be declared first so diagnostics are
// registered before they are used.
pub mod diagnostics;
mod diagnostics;
mod check;
mod check_unused;
......@@ -130,8 +129,8 @@
mod variance;
pub struct TypeAndSubsts<'tcx> {
pub substs: &'tcx Substs<'tcx>,
pub ty: Ty<'tcx>,
substs: &'tcx Substs<'tcx>,
ty: Ty<'tcx>,
}
fn require_c_abi_if_variadic(tcx: TyCtxt,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册