提交 e6badfd4 编写于 作者: M Michael Woerister

incr.comp.: Use red/green tracking for CGU re-use.

上级 c9a17ef1
......@@ -67,6 +67,15 @@ pub enum DepNodeColor {
Green(DepNodeIndex)
}
impl DepNodeColor {
pub fn is_green(self) -> bool {
match self {
DepNodeColor::Red => false,
DepNodeColor::Green(_) => true,
}
}
}
struct DepGraphData {
/// The old, initial encoding of the dependency graph. This will soon go
/// away.
......@@ -94,6 +103,9 @@ struct DepGraphData {
work_products: RefCell<FxHashMap<WorkProductId, WorkProduct>>,
dep_node_debug: RefCell<FxHashMap<DepNode, String>>,
// Used for testing, only populated when -Zquery-dep-graph is specified.
loaded_from_cache: RefCell<FxHashMap<DepNodeIndexNew, bool>>,
}
impl DepGraph {
......@@ -108,6 +120,7 @@ pub fn new(prev_graph: PreviousDepGraph) -> DepGraph {
current: RefCell::new(CurrentDepGraph::new()),
previous: prev_graph,
colors: RefCell::new(FxHashMap()),
loaded_from_cache: RefCell::new(FxHashMap()),
})),
fingerprints: Rc::new(RefCell::new(FxHashMap())),
}
......@@ -256,16 +269,9 @@ pub fn with_anon_task<OP,R>(&self, dep_kind: DepKind, op: OP) -> (R, DepNodeInde
data.current.borrow_mut().push_anon_task();
let result = op();
let dep_node_index_legacy = data.edges.borrow_mut().pop_anon_task(dep_kind);
let (new_dep_node, dep_node_index_new) = data.current
.borrow_mut()
.pop_anon_task(dep_kind);
if let Some(new_dep_node) = new_dep_node {
assert!(data.colors
.borrow_mut()
.insert(new_dep_node, DepNodeColor::Red)
.is_none());
}
let dep_node_index_new = data.current
.borrow_mut()
.pop_anon_task(dep_kind);
(result, DepNodeIndex {
legacy: dep_node_index_legacy,
new: dep_node_index_new,
......@@ -594,6 +600,25 @@ pub fn is_green(&self, dep_node_index: DepNodeIndex) -> bool {
}
}).unwrap_or(false)
}
pub fn mark_loaded_from_cache(&self, dep_node: DepNodeIndex, state: bool) {
debug!("mark_loaded_from_cache({:?}, {})",
self.data.as_ref().unwrap().current.borrow().nodes[dep_node.new],
state);
self.data
.as_ref()
.unwrap()
.loaded_from_cache
.borrow_mut()
.insert(dep_node.new, state);
}
pub fn was_loaded_from_cache(&self, dep_node: &DepNode) -> Option<bool> {
let data = self.data.as_ref().unwrap();
let dep_node_index = data.current.borrow().node_to_node_index[dep_node];
data.loaded_from_cache.borrow().get(&dep_node_index).cloned()
}
}
/// A "work product" is an intermediate result that we save into the
......@@ -630,11 +655,6 @@ pub fn is_green(&self, dep_node_index: DepNodeIndex) -> bool {
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub struct WorkProduct {
pub cgu_name: String,
/// Extra hash used to decide if work-product is still suitable;
/// note that this is *not* a hash of the work-product itself.
/// See documentation on `WorkProduct` type for an example.
pub input_hash: u64,
/// Saved files associated with this CGU
pub saved_files: Vec<(OutputType, String)>,
}
......@@ -644,15 +664,26 @@ pub(super) struct CurrentDepGraph {
edges: IndexVec<DepNodeIndexNew, Vec<DepNodeIndexNew>>,
node_to_node_index: FxHashMap<DepNode, DepNodeIndexNew>,
anon_id_seed: Fingerprint,
task_stack: Vec<OpenTask>,
}
impl CurrentDepGraph {
fn new() -> CurrentDepGraph {
use std::time::{SystemTime, UNIX_EPOCH};
let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
let nanos = duration.as_secs() * 1_000_000_000 +
duration.subsec_nanos() as u64;
let mut stable_hasher = StableHasher::new();
nanos.hash(&mut stable_hasher);
CurrentDepGraph {
nodes: IndexVec::new(),
edges: IndexVec::new(),
node_to_node_index: FxHashMap(),
anon_id_seed: stable_hasher.finish(),
task_stack: Vec::new(),
}
}
......@@ -696,14 +727,14 @@ fn push_anon_task(&mut self) {
});
}
fn pop_anon_task(&mut self, kind: DepKind) -> (Option<DepNode>, DepNodeIndexNew) {
fn pop_anon_task(&mut self, kind: DepKind) -> DepNodeIndexNew {
let popped_node = self.task_stack.pop().unwrap();
if let OpenTask::Anon {
read_set: _,
reads
} = popped_node {
let mut fingerprint = Fingerprint::zero();
let mut fingerprint = self.anon_id_seed;
let mut hasher = StableHasher::new();
for &read in reads.iter() {
......@@ -725,9 +756,9 @@ fn pop_anon_task(&mut self, kind: DepKind) -> (Option<DepNode>, DepNodeIndexNew)
};
if let Some(&index) = self.node_to_node_index.get(&target_dep_node) {
(None, index)
index
} else {
(Some(target_dep_node), self.alloc_node(target_dep_node, reads))
self.alloc_node(target_dep_node, reads)
}
} else {
bug!("pop_anon_task() - Expected anonymous task to be popped")
......
......@@ -320,6 +320,10 @@ fn try_get_with(tcx: TyCtxt<'a, $tcx, 'lcx>,
dep_node_index)
}
debug!("ty::queries::{}::try_get_with(key={:?}) - running try_mark_green",
stringify!($name),
key);
if let Some(dep_node_index) = tcx.dep_graph.try_mark_green(tcx, &dep_node) {
debug_assert!(tcx.dep_graph.is_green(dep_node_index));
profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
......@@ -363,6 +367,10 @@ fn load_from_disk_and_cache_in_memory(tcx: TyCtxt<'a, $tcx, 'lcx>,
})
})?;
if tcx.sess.opts.debugging_opts.query_dep_graph {
tcx.dep_graph.mark_loaded_from_cache(dep_node_index, true);
}
let value = QueryValue::new(result, dep_node_index, diagnostics);
Ok((&tcx.maps
......@@ -394,6 +402,10 @@ fn force(tcx: TyCtxt<'a, $tcx, 'lcx>,
let ((result, dep_node_index), diagnostics) = res;
if tcx.sess.opts.debugging_opts.query_dep_graph {
tcx.dep_graph.mark_loaded_from_cache(dep_node_index, false);
}
let value = QueryValue::new(result, dep_node_index, diagnostics);
Ok(((&tcx.maps
......@@ -406,8 +418,6 @@ fn force(tcx: TyCtxt<'a, $tcx, 'lcx>,
dep_node_index))
}
pub fn try_get(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K)
-> Result<$V, DiagnosticBuilder<'a>> {
match Self::try_get_with(tcx, span, key) {
......
......@@ -39,3 +39,4 @@
pub use persist::in_incr_comp_dir;
pub use persist::prepare_session_directory;
pub use persist::finalize_session_directory;
pub use persist::delete_workproduct_files;
......@@ -99,6 +99,7 @@ fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
/// variants that represent inputs (HIR and imported Metadata).
fn does_still_exist(tcx: TyCtxt, dep_node: &DepNode) -> bool {
match dep_node.kind {
DepKind::Krate |
DepKind::Hir |
DepKind::HirBody |
DepKind::InScopeTraits |
......@@ -258,33 +259,28 @@ fn transitive_dirty_nodes(serialized_dep_graph: &SerializedDepGraph,
/// otherwise no longer applicable.
fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
work_products: Vec<SerializedWorkProduct>,
clean_work_products: &FxHashSet<WorkProductId>) {
_clean_work_products: &FxHashSet<WorkProductId>) {
debug!("reconcile_work_products({:?})", work_products);
for swp in work_products {
if !clean_work_products.contains(&swp.id) {
debug!("reconcile_work_products: dep-node for {:?} is dirty", swp);
delete_dirty_work_product(tcx, swp);
} else {
let mut all_files_exist = true;
for &(_, ref file_name) in swp.work_product.saved_files.iter() {
let path = in_incr_comp_dir_sess(tcx.sess, file_name);
if !path.exists() {
all_files_exist = false;
if tcx.sess.opts.debugging_opts.incremental_info {
eprintln!("incremental: could not find file for \
up-to-date work product: {}", path.display());
}
let mut all_files_exist = true;
for &(_, ref file_name) in swp.work_product.saved_files.iter() {
let path = in_incr_comp_dir_sess(tcx.sess, file_name);
if !path.exists() {
all_files_exist = false;
if tcx.sess.opts.debugging_opts.incremental_info {
eprintln!("incremental: could not find file for \
up-to-date work product: {}", path.display());
}
}
}
if all_files_exist {
debug!("reconcile_work_products: all files for {:?} exist", swp);
tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product);
} else {
debug!("reconcile_work_products: some file for {:?} does not exist", swp);
delete_dirty_work_product(tcx, swp);
}
if all_files_exist {
debug!("reconcile_work_products: all files for {:?} exist", swp);
tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product);
} else {
debug!("reconcile_work_products: some file for {:?} does not exist", swp);
delete_dirty_work_product(tcx, swp);
}
}
}
......
......@@ -29,3 +29,4 @@
pub use self::save::save_dep_graph;
pub use self::save::save_work_products;
pub use self::work_product::save_trans_partition;
pub use self::work_product::delete_workproduct_files;
......@@ -21,11 +21,9 @@
pub fn save_trans_partition(sess: &Session,
dep_graph: &DepGraph,
cgu_name: &str,
partition_hash: u64,
files: &[(OutputType, PathBuf)]) {
debug!("save_trans_partition({:?},{},{:?})",
debug!("save_trans_partition({:?},{:?})",
cgu_name,
partition_hash,
files);
if sess.opts.incremental.is_none() {
return;
......@@ -57,7 +55,6 @@ pub fn save_trans_partition(sess: &Session,
let work_product = WorkProduct {
cgu_name: cgu_name.to_string(),
input_hash: partition_hash,
saved_files,
};
......
......@@ -27,47 +27,32 @@
//! the HIR doesn't change as a result of the annotations, which might
//! perturb the reuse results.
use rustc::dep_graph::{DepNode, DepConstructor};
use rustc::ty::TyCtxt;
use syntax::ast;
use {ModuleSource, ModuleTranslation};
use rustc::ich::{ATTR_PARTITION_REUSED, ATTR_PARTITION_TRANSLATED};
const MODULE: &'static str = "module";
const CFG: &'static str = "cfg";
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Disposition { Reused, Translated }
impl ModuleTranslation {
pub fn disposition(&self) -> (String, Disposition) {
let disposition = match self.source {
ModuleSource::Preexisting(_) => Disposition::Reused,
ModuleSource::Translated(_) => Disposition::Translated,
};
enum Disposition { Reused, Translated }
(self.name.clone(), disposition)
}
}
pub(crate) fn assert_module_sources<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
modules: &[(String, Disposition)]) {
pub(crate) fn assert_module_sources<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let _ignore = tcx.dep_graph.in_ignore();
if tcx.sess.opts.incremental.is_none() {
return;
}
let ams = AssertModuleSource { tcx: tcx, modules: modules };
let ams = AssertModuleSource { tcx };
for attr in &tcx.hir.krate().attrs {
ams.check_attr(attr);
}
}
struct AssertModuleSource<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
modules: &'a [(String, Disposition)],
tcx: TyCtxt<'a, 'tcx, 'tcx>
}
impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> {
......@@ -86,32 +71,31 @@ fn check_attr(&self, attr: &ast::Attribute) {
}
let mname = self.field(attr, MODULE);
let mtrans = self.modules.iter().find(|&&(ref name, _)| name == mname.as_str());
let mtrans = match mtrans {
Some(m) => m,
None => {
debug!("module name `{}` not found amongst:", mname);
for &(ref name, ref disposition) in self.modules {
debug!("module named `{}` with disposition {:?}",
name,
disposition);
}
self.tcx.sess.span_err(
attr.span,
&format!("no module named `{}`", mname));
return;
}
};
let dep_node = DepNode::new(self.tcx,
DepConstructor::CompileCodegenUnit(mname.as_str()));
let mtrans_disposition = mtrans.1;
if disposition != mtrans_disposition {
self.tcx.sess.span_err(
attr.span,
&format!("expected module named `{}` to be {:?} but is {:?}",
mname,
disposition,
mtrans_disposition));
if let Some(loaded_from_cache) = self.tcx.dep_graph.was_loaded_from_cache(&dep_node) {
match (disposition, loaded_from_cache) {
(Disposition::Reused, false) => {
self.tcx.sess.span_err(
attr.span,
&format!("expected module named `{}` to be Reused but is Translated",
mname));
}
(Disposition::Translated, true) => {
self.tcx.sess.span_err(
attr.span,
&format!("expected module named `{}` to be Translated but is Reused",
mname));
}
(Disposition::Reused, true) |
(Disposition::Translated, false) => {
// These are what we would expect.
}
}
} else {
self.tcx.sess.span_err(attr.span, &format!("no module named `{}`", mname));
}
}
......
......@@ -79,9 +79,7 @@ pub fn provide_local(providers: &mut Providers) {
providers.is_exported_symbol = |tcx, id| {
// FIXME(#42293) needs red/green to not break a bunch of incremental
// tests
tcx.dep_graph.with_ignore(|| {
tcx.exported_symbol_ids(id.krate).contains(&id)
})
tcx.exported_symbol_ids(id.krate).contains(&id)
};
providers.exported_symbols = |tcx, cnum| {
......
......@@ -884,7 +884,6 @@ fn copy_module_artifacts_into_incr_comp_cache(sess: &Session,
save_trans_partition(sess,
dep_graph,
&module.name,
module.symbol_name_hash,
&files);
}
}
......@@ -1134,7 +1133,6 @@ fn execute_work_item(cgcx: &CodegenContext, work_item: WorkItem)
name: module_name,
kind: ModuleKind::Regular,
pre_existing: true,
symbol_name_hash: mtrans.symbol_name_hash,
emit_bc: config.emit_bc,
emit_obj: config.emit_obj,
}))
......
......@@ -28,7 +28,7 @@
use super::ModuleTranslation;
use super::ModuleKind;
use assert_module_sources::{self, Disposition};
use assert_module_sources;
use back::link;
use back::symbol_export;
use back::write::{self, OngoingCrateTranslation, create_target_machine};
......@@ -41,7 +41,7 @@
use rustc::middle::cstore::{EncodedMetadata, EncodedMetadataHashes};
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::maps::Providers;
use rustc::dep_graph::{DepNode, DepKind};
use rustc::dep_graph::{DepNode, DepKind, DepConstructor};
use rustc::middle::cstore::{self, LinkMeta, LinkagePreference};
use rustc::util::common::{time, print_time_passes_entry};
use rustc::session::config::{self, NoDebugInfo};
......@@ -78,7 +78,6 @@
use CrateInfo;
use std::any::Any;
use std::cell::RefCell;
use std::ffi::{CStr, CString};
use std::str;
use std::sync::Arc;
......@@ -904,7 +903,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let metadata_module = ModuleTranslation {
name: link::METADATA_MODULE_NAME.to_string(),
llmod_id: llmod_id.to_string(),
symbol_name_hash: 0, // we always rebuild metadata, at least for now
source: ModuleSource::Translated(ModuleLlvm {
llcx: metadata_llcx,
llmod: metadata_llmod,
......@@ -985,7 +983,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
Some(ModuleTranslation {
name: link::ALLOCATOR_MODULE_NAME.to_string(),
llmod_id: llmod_id.to_string(),
symbol_name_hash: 0, // we always rebuild allocator shims
source: ModuleSource::Translated(modules),
kind: ModuleKind::Allocator,
})
......@@ -1017,6 +1014,50 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ongoing_translation.wait_for_signal_to_translate_item();
ongoing_translation.check_for_errors(tcx.sess);
// First, if incremental compilation is enabled, we try to re-use the
// codegen unit from the cache.
if tcx.dep_graph.is_fully_enabled() {
let cgu_id = cgu.work_product_id();
// Check whether there is a previous work-product we can
// re-use. Not only must the file exist, and the inputs not
// be dirty, but the hash of the symbols we will generate must
// be the same.
if let Some(buf) = tcx.dep_graph.previous_work_product(&cgu_id) {
let dep_node = &DepNode::new(tcx,
DepConstructor::CompileCodegenUnit(cgu.name().clone()));
// We try to mark the DepNode::CompileCodegenUnit green. If we
// succeed it means that none of the dependencies has changed
// and we can safely re-use.
if let Some(dep_node_index) = tcx.dep_graph.try_mark_green(tcx, dep_node) {
// Append ".rs" to LLVM module identifier.
//
// LLVM code generator emits a ".file filename" directive
// for ELF backends. Value of the "filename" is set as the
// LLVM module identifier. Due to a LLVM MC bug[1], LLVM
// crashes if the module identifier is same as other symbols
// such as a function name in the module.
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
let llmod_id = format!("{}.rs", cgu.name());
let module = ModuleTranslation {
name: cgu.name().to_string(),
source: ModuleSource::Preexisting(buf),
kind: ModuleKind::Regular,
llmod_id,
};
tcx.dep_graph.mark_loaded_from_cache(dep_node_index, true);
write::submit_translated_module_to_llvm(tcx, module, 0);
// Continue to next cgu, this one is done.
continue
}
} else {
// This can happen if files were deleted from the cache
// directory for some reason. We just re-compile then.
}
}
let _timing_guard = time_graph.as_ref().map(|time_graph| {
time_graph.start(write::TRANS_WORKER_TIMELINE,
write::TRANS_WORK_PACKAGE_KIND,
......@@ -1037,9 +1078,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
total_trans_time);
if tcx.sess.opts.incremental.is_some() {
DISPOSITIONS.with(|d| {
assert_module_sources::assert_module_sources(tcx, &d.borrow());
});
assert_module_sources::assert_module_sources(tcx);
}
symbol_names_test::report_symbol_names(tcx);
......@@ -1074,10 +1113,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ongoing_translation
}
// FIXME(#42293) hopefully once red/green is enabled we're testing everything
// via a method that doesn't require this!
thread_local!(static DISPOSITIONS: RefCell<Vec<(String, Disposition)>> = Default::default());
fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
metadata_incr_hashes: EncodedMetadataHashes,
link_meta: LinkMeta) {
......@@ -1301,38 +1336,19 @@ pub fn new(tcx: TyCtxt) -> CrateInfo {
}
fn is_translated_function(tcx: TyCtxt, id: DefId) -> bool {
// FIXME(#42293) needs red/green tracking to avoid failing a bunch of
// existing tests
tcx.dep_graph.with_ignore(|| {
let (all_trans_items, _) =
tcx.collect_and_partition_translation_items(LOCAL_CRATE);
all_trans_items.contains(&id)
})
let (all_trans_items, _) =
tcx.collect_and_partition_translation_items(LOCAL_CRATE);
all_trans_items.contains(&id)
}
fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
cgu: InternedString) -> Stats {
// FIXME(#42293) needs red/green tracking to avoid failing a bunch of
// existing tests
let cgu = tcx.dep_graph.with_ignore(|| {
tcx.codegen_unit(cgu)
});
let cgu = tcx.codegen_unit(cgu);
let start_time = Instant::now();
let dep_node = cgu.work_product_dep_node();
let ((stats, module), _) =
tcx.dep_graph.with_task(dep_node,
tcx,
cgu,
module_translation);
let (stats, module) = module_translation(tcx, cgu);
let time_to_translate = start_time.elapsed();
if tcx.sess.opts.incremental.is_some() {
DISPOSITIONS.with(|d| {
d.borrow_mut().push(module.disposition());
});
}
// We assume that the cost to run LLVM on a CGU is proportional to
// the time we needed for translating it.
let cost = time_to_translate.as_secs() * 1_000_000_000 +
......@@ -1349,8 +1365,6 @@ fn module_translation<'a, 'tcx>(
-> (Stats, ModuleTranslation)
{
let cgu_name = cgu.name().to_string();
let cgu_id = cgu.work_product_id();
let symbol_name_hash = cgu.compute_symbol_name_hash(tcx);
// Append ".rs" to LLVM module identifier.
//
......@@ -1362,40 +1376,6 @@ fn module_translation<'a, 'tcx>(
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
let llmod_id = format!("{}.rs", cgu.name());
// Check whether there is a previous work-product we can
// re-use. Not only must the file exist, and the inputs not
// be dirty, but the hash of the symbols we will generate must
// be the same.
let previous_work_product =
tcx.dep_graph.previous_work_product(&cgu_id).and_then(|work_product| {
if work_product.input_hash == symbol_name_hash {
debug!("trans_reuse_previous_work_products: reusing {:?}", work_product);
Some(work_product)
} else {
if tcx.sess.opts.debugging_opts.incremental_info {
eprintln!("incremental: CGU `{}` invalidated because of \
changed partitioning hash.",
cgu.name());
}
debug!("trans_reuse_previous_work_products: \
not reusing {:?} because hash changed to {:?}",
work_product, symbol_name_hash);
None
}
});
if let Some(buf) = previous_work_product {
// Don't need to translate this module.
let module = ModuleTranslation {
llmod_id: llmod_id,
name: cgu_name,
symbol_name_hash,
source: ModuleSource::Preexisting(buf.clone()),
kind: ModuleKind::Regular,
};
return (Stats::default(), module);
}
// Instantiate translation items without filling out definitions yet...
let scx = SharedCrateContext::new(tcx);
let lcx = LocalCrateContext::new(&scx, cgu, &llmod_id);
......@@ -1461,7 +1441,6 @@ fn module_translation<'a, 'tcx>(
ModuleTranslation {
name: cgu_name,
symbol_name_hash,
source: ModuleSource::Translated(llvm_module),
kind: ModuleKind::Regular,
llmod_id,
......
......@@ -155,17 +155,14 @@ pub fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
}
}
// FIXME(#42293) we should actually track this, but fails too many tests
// today.
tcx.dep_graph.with_ignore(|| {
if ccx.use_dll_storage_attrs() &&
tcx.is_dllimport_foreign_item(instance_def_id)
{
unsafe {
llvm::LLVMSetDLLStorageClass(llfn, llvm::DLLStorageClass::DllImport);
}
if ccx.use_dll_storage_attrs() &&
tcx.is_dllimport_foreign_item(instance_def_id)
{
unsafe {
llvm::LLVMSetDLLStorageClass(llfn, llvm::DLLStorageClass::DllImport);
}
});
}
llfn
};
......
......@@ -296,26 +296,22 @@ pub fn collect_crate_translation_items<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mode: TransItemCollectionMode)
-> (FxHashSet<TransItem<'tcx>>,
InliningMap<'tcx>) {
// We are not tracking dependencies of this pass as it has to be re-executed
// every time no matter what.
tcx.dep_graph.with_ignore(|| {
let roots = collect_roots(tcx, mode);
debug!("Building translation item graph, beginning at roots");
let mut visited = FxHashSet();
let mut recursion_depths = DefIdMap();
let mut inlining_map = InliningMap::new();
for root in roots {
collect_items_rec(tcx,
root,
&mut visited,
&mut recursion_depths,
&mut inlining_map);
}
let roots = collect_roots(tcx, mode);
debug!("Building translation item graph, beginning at roots");
let mut visited = FxHashSet();
let mut recursion_depths = DefIdMap();
let mut inlining_map = InliningMap::new();
for root in roots {
collect_items_rec(tcx,
root,
&mut visited,
&mut recursion_depths,
&mut inlining_map);
}
(visited, inlining_map)
})
(visited, inlining_map)
}
// Find all non-generic items by walking the HIR. These items serve as roots to
......
......@@ -231,17 +231,13 @@ pub fn get_static(ccx: &CrateContext, def_id: DefId) -> ValueRef {
g
};
// FIXME(#42293) we should actually track this, but fails too many tests
// today.
ccx.tcx().dep_graph.with_ignore(|| {
if ccx.use_dll_storage_attrs() && ccx.tcx().is_dllimport_foreign_item(def_id) {
// For foreign (native) libs we know the exact storage type to use.
unsafe {
llvm::LLVMSetDLLStorageClass(g, llvm::DLLStorageClass::DllImport);
}
if ccx.use_dll_storage_attrs() && ccx.tcx().is_dllimport_foreign_item(def_id) {
// For foreign (native) libs we know the exact storage type to use.
unsafe {
llvm::LLVMSetDLLStorageClass(g, llvm::DLLStorageClass::DllImport);
}
});
}
ccx.instances().borrow_mut().insert(instance, g);
ccx.statics().borrow_mut().insert(g, def_id);
g
......
......@@ -204,7 +204,6 @@ pub struct ModuleTranslation {
/// as the crate name and disambiguator.
name: String,
llmod_id: String,
symbol_name_hash: u64,
pub source: ModuleSource,
pub kind: ModuleKind,
}
......@@ -238,7 +237,6 @@ pub fn into_compiled_module(self,
llmod_id: self.llmod_id,
name: self.name.clone(),
kind: self.kind,
symbol_name_hash: self.symbol_name_hash,
pre_existing,
emit_obj,
emit_bc,
......@@ -253,7 +251,6 @@ pub struct CompiledModule {
pub llmod_id: String,
pub object: PathBuf,
pub kind: ModuleKind,
pub symbol_name_hash: u64,
pub pre_existing: bool,
pub emit_obj: bool,
pub emit_bc: bool,
......
......@@ -108,14 +108,11 @@
use rustc::hir::def_id::DefId;
use rustc::hir::map::DefPathData;
use rustc::middle::trans::{Linkage, Visibility};
use rustc::ich::Fingerprint;
use rustc::session::config::NUMBERED_CODEGEN_UNIT_MARKER;
use rustc::ty::{self, TyCtxt, InstanceDef};
use rustc::ty::item_path::characteristic_def_id_of_type;
use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::stable_hasher::StableHasher;
use std::collections::hash_map::Entry;
use std::hash::Hash;
use syntax::ast::NodeId;
use syntax::symbol::{Symbol, InternedString};
use trans_item::{TransItem, TransItemExt, InstantiationMode};
......@@ -155,19 +152,6 @@ fn work_product_dep_node(&self) -> DepNode {
self.work_product_id().to_dep_node()
}
fn compute_symbol_name_hash<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> u64 {
let mut state: StableHasher<Fingerprint> = StableHasher::new();
let all_items = self.items_in_deterministic_order(tcx);
for (item, (linkage, visibility)) in all_items {
let symbol_name = item.symbol_name(tcx);
symbol_name.len().hash(&mut state);
symbol_name.hash(&mut state);
linkage.hash(&mut state);
visibility.hash(&mut state);
}
state.finish().to_smaller_hash()
}
fn items_in_deterministic_order<'a>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> Vec<(TransItem<'tcx>,
......
......@@ -15,7 +15,6 @@
// revisions:rpass1 rpass2
// compile-flags: -Z query-dep-graph
// aux-build:point.rs
// ignore-test FIXME(#42293) this regressed in #44142 but should get fixed with red/green
#![feature(rustc_attrs)]
#![feature(stmt_expr_attributes)]
......
......@@ -20,6 +20,7 @@
//[rpass1] rustc-env:RUSTC_FORCE_INCR_COMP_ARTIFACT_HEADER="l33t haxx0r rustc 2.1 LTS"
// revisions:rpass1 rpass2
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![rustc_partition_translated(module="cache_file_headers", cfg="rpass2")]
......
......@@ -12,8 +12,6 @@
// revisions:rpass1 rpass2
// compile-flags:-Z query-dep-graph
// ignore-test -- ignored until red/green restores cross-crate tracking fidelity
#![feature(rustc_attrs)]
extern crate a;
......
......@@ -15,8 +15,6 @@
// compile-flags: -Z query-dep-graph
// aux-build:point.rs
// ignore-test -- ignored until red/green restores cross-crate tracking fidelity
#![feature(rustc_attrs)]
#![feature(stmt_expr_attributes)]
#![allow(dead_code)]
......
......@@ -15,8 +15,6 @@
// compile-flags: -Z query-dep-graph
// aux-build:point.rs
// ignore-test -- ignored until red/green restores cross-crate tracking fidelity
#![feature(rustc_attrs)]
#![feature(stmt_expr_attributes)]
#![allow(dead_code)]
......
......@@ -9,13 +9,13 @@
// except according to those terms.
// revisions: rpass1 rpass2
// compile-flags: -Zquery-dep-graph
#![feature(rustc_attrs)]
#![allow(private_no_mangle_fns)]
#![rustc_partition_reused(module="change_symbol_export_status", cfg="rpass2")]
#![rustc_partition_translated(module="change_symbol_export_status-mod1", cfg="rpass2")]
#![rustc_partition_reused(module="change_symbol_export_status-mod2", cfg="rpass2")]
// This test case makes sure that a change in symbol visibility is detected by
// our dependency tracking. We do this by changing a module's visibility to
......@@ -37,6 +37,11 @@ mod mod1 {
pub fn foo() {}
}
pub mod mod2 {
#[no_mangle]
pub fn bar() {}
}
fn main() {
mod1::foo();
}
......@@ -12,6 +12,7 @@
// the cache while changing an untracked one doesn't.
// revisions:rpass1 rpass2 rpass3
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
......
......@@ -12,6 +12,7 @@
// equal example.
// revisions:rpass1 rpass2
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![rustc_partition_reused(module="issue_35593", cfg="rpass2")]
......
......@@ -12,6 +12,8 @@
// dep-node.
// revisions:rpass1 rpass2
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
......
......@@ -11,8 +11,6 @@
// revisions:rpass1 rpass2 rpass3
// compile-flags: -Z query-dep-graph -g -Zincremental-cc
// aux-build:extern_crate.rs
// ignore-test FIXME(#42293) this regressed in #44142 but should get fixed with red/green
// This test case makes sure that we detect if paths emitted into debuginfo
// are changed, even when the change happens in an external crate.
......
......@@ -17,8 +17,7 @@
#![feature(rustc_attrs)]
#![crate_type = "bin"]
// FIXME(#42293) this regressed in #44142 but should get fixed with red/green
// #![rustc_partition_reused(module="main", cfg="rpass2")]
#![rustc_partition_reused(module="main", cfg="rpass2")]
extern crate a;
......
......@@ -18,8 +18,6 @@
// no-prefer-dynamic
// compile-flags: -Z query-dep-graph
// ignore-test -- ignored until red/green restores cross-crate tracking fidelity
#![feature(rustc_attrs)]
extern crate a;
......
......@@ -13,6 +13,7 @@
// `y` module entirely (but not the `x` module).
// revisions:rpass1 rpass2
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
......
......@@ -12,8 +12,6 @@
// revisions:rpass1 rpass2
// compile-flags: -Z query-dep-graph
// ignore-test -- ignored until red/green restores cross-crate tracking fidelity
#![feature(rustc_attrs)]
extern crate a;
......
......@@ -12,8 +12,6 @@
// revisions:rpass1 rpass2 rpass3
// compile-flags: -Z query-dep-graph
// ignore-test -- ignored until red/green restores cross-crate tracking fidelity
#![feature(rustc_attrs)]
extern crate a;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册