提交 c60b0e43 编写于 作者: M Michael Woerister

incr.comp.: Remove on-export crate metadata hashing.

上级 dc0e2277
......@@ -28,8 +28,6 @@
pub const ATTR_DIRTY: &'static str = "rustc_dirty";
pub const ATTR_CLEAN: &'static str = "rustc_clean";
pub const ATTR_DIRTY_METADATA: &'static str = "rustc_metadata_dirty";
pub const ATTR_CLEAN_METADATA: &'static str = "rustc_metadata_clean";
pub const ATTR_IF_THIS_CHANGED: &'static str = "rustc_if_this_changed";
pub const ATTR_THEN_THIS_WOULD_NEED: &'static str = "rustc_then_this_would_need";
pub const ATTR_PARTITION_REUSED: &'static str = "rustc_partition_reused";
......@@ -41,8 +39,6 @@
ATTR_THEN_THIS_WOULD_NEED,
ATTR_DIRTY,
ATTR_CLEAN,
ATTR_DIRTY_METADATA,
ATTR_CLEAN_METADATA,
ATTR_PARTITION_REUSED,
ATTR_PARTITION_TRANSLATED,
];
......@@ -53,8 +49,6 @@
ATTR_THEN_THIS_WOULD_NEED,
ATTR_DIRTY,
ATTR_CLEAN,
ATTR_DIRTY_METADATA,
ATTR_CLEAN_METADATA,
ATTR_PARTITION_REUSED,
ATTR_PARTITION_TRANSLATED,
];
......@@ -175,32 +175,6 @@ pub fn new() -> EncodedMetadata {
}
}
/// The hash for some metadata that (when saving) will be exported
/// from this crate, or which (when importing) was exported by an
/// upstream crate.
#[derive(Debug, RustcEncodable, RustcDecodable, Copy, Clone)]
pub struct EncodedMetadataHash {
pub def_index: u32,
pub hash: ich::Fingerprint,
}
/// The hash for some metadata that (when saving) will be exported
/// from this crate, or which (when importing) was exported by an
/// upstream crate.
#[derive(Debug, RustcEncodable, RustcDecodable, Clone)]
pub struct EncodedMetadataHashes {
// Stable content hashes for things in crate metadata, indexed by DefIndex.
pub hashes: Vec<EncodedMetadataHash>,
}
impl EncodedMetadataHashes {
pub fn new() -> EncodedMetadataHashes {
EncodedMetadataHashes {
hashes: Vec::new(),
}
}
}
/// The backend's way to give the crate store access to the metadata in a library.
/// Note that it returns the raw metadata bytes stored in the library file, whether
/// it is compressed, uncompressed, some weird mix, etc.
......@@ -286,7 +260,7 @@ fn encode_metadata<'a, 'tcx>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
link_meta: &LinkMeta,
reachable: &NodeSet)
-> (EncodedMetadata, EncodedMetadataHashes);
-> EncodedMetadata;
fn metadata_encoding_version(&self) -> &[u8];
}
......@@ -370,7 +344,7 @@ fn encode_metadata<'a, 'tcx>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
link_meta: &LinkMeta,
reachable: &NodeSet)
-> (EncodedMetadata, EncodedMetadataHashes) {
-> EncodedMetadata {
bug!("encode_metadata")
}
fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") }
......
......@@ -1072,8 +1072,6 @@ fn parse_optimization_fuel(slot: &mut Option<(String, u64)>, v: Option<&str>) ->
"attempt to recover from parse errors (experimental)"),
incremental: Option<String> = (None, parse_opt_string, [UNTRACKED],
"enable incremental compilation (experimental)"),
incremental_cc: bool = (false, parse_bool, [UNTRACKED],
"enable cross-crate incremental compilation (even more experimental)"),
incremental_queries: bool = (true, parse_bool, [UNTRACKED],
"enable incremental compilation support for queries (experimental)"),
incremental_info: bool = (false, parse_bool, [UNTRACKED],
......
......@@ -24,7 +24,7 @@
use lint::{self, Lint};
use ich::{StableHashingContext, NodeIdHashingMode};
use middle::const_val::ConstVal;
use middle::cstore::{CrateStore, LinkMeta, EncodedMetadataHashes};
use middle::cstore::{CrateStore, LinkMeta};
use middle::cstore::EncodedMetadata;
use middle::free_region::FreeRegionMap;
use middle::lang_items;
......@@ -1246,7 +1246,7 @@ pub fn serialize_query_result_cache<E>(self,
impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
pub fn encode_metadata(self, link_meta: &LinkMeta, reachable: &NodeSet)
-> (EncodedMetadata, EncodedMetadataHashes)
-> EncodedMetadata
{
self.cstore.encode_metadata(self, link_meta, reachable)
}
......
......@@ -11,9 +11,6 @@
//! The data that we will serialize and deserialize.
use rustc::dep_graph::{WorkProduct, WorkProductId};
use rustc::hir::map::DefPathHash;
use rustc::middle::cstore::EncodedMetadataHash;
use rustc_data_structures::fx::FxHashMap;
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct SerializedWorkProduct {
......@@ -23,39 +20,3 @@ pub struct SerializedWorkProduct {
/// work-product data itself
pub work_product: WorkProduct,
}
/// Data for use when downstream crates get recompiled.
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct SerializedMetadataHashes {
/// For each def-id defined in this crate that appears in the
/// metadata, we hash all the inputs that were used when producing
/// the metadata. We save this after compilation is done. Then,
/// when some downstream crate is being recompiled, it can compare
/// the hashes we saved against the hashes that it saw from
/// before; this will tell it which of the items in this crate
/// changed, which in turn implies what items in the downstream
/// crate need to be recompiled.
///
/// Note that we store the def-ids here. This is because we don't
/// reload this file when we recompile this crate, we will just
/// regenerate it completely with the current hashes and new def-ids.
///
/// Then downstream creates will load up their
/// `SerializedDepGraph`, which may contain `MetaData(X)` nodes
/// where `X` refers to some item in this crate. That `X` will be
/// a `DefPathIndex` that gets retracted to the current `DefId`
/// (matching the one found in this structure).
pub entry_hashes: Vec<EncodedMetadataHash>,
/// For each DefIndex (as it occurs in SerializedMetadataHash), this
/// map stores the DefPathIndex (as it occurs in DefIdDirectory), so
/// that we can find the new DefId for a SerializedMetadataHash in a
/// subsequent compilation session.
///
/// This map is only needed for running auto-tests using the
/// #[rustc_metadata_dirty] and #[rustc_metadata_clean] attributes, and
/// is only populated if -Z query-dep-graph is specified. It will be
/// empty otherwise. Importing crates are perfectly happy with just having
/// the DefIndex.
pub index_map: FxHashMap<u32, DefPathHash>
}
......@@ -23,21 +23,6 @@
//! Errors are reported if we are in the suitable configuration but
//! the required condition is not met.
//!
//! The `#[rustc_metadata_dirty]` and `#[rustc_metadata_clean]` attributes
//! can be used to check the incremental compilation hash (ICH) values of
//! metadata exported in rlibs.
//!
//! - If a node is marked with `#[rustc_metadata_clean(cfg="rev2")]` we
//! check that the metadata hash for that node is the same for "rev2"
//! it was for "rev1".
//! - If a node is marked with `#[rustc_metadata_dirty(cfg="rev2")]` we
//! check that the metadata hash for that node is *different* for "rev2"
//! than it was for "rev1".
//!
//! Note that the metadata-testing attributes must never specify the
//! first revision. This would lead to a crash since there is no
//! previous revision to compare things to.
//!
use std::collections::HashSet;
use std::iter::FromIterator;
......@@ -49,10 +34,9 @@
use rustc::hir::def_id::DefId;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::hir::intravisit;
use rustc::ich::{Fingerprint, ATTR_DIRTY, ATTR_CLEAN, ATTR_DIRTY_METADATA,
ATTR_CLEAN_METADATA};
use rustc::ich::{ATTR_DIRTY, ATTR_CLEAN};
use syntax::ast::{self, Attribute, NestedMetaItem};
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use rustc_data_structures::fx::FxHashSet;
use syntax_pos::Span;
use rustc::ty::TyCtxt;
......@@ -553,157 +537,6 @@ fn visit_impl_item(&mut self, item: &hir::ImplItem) {
}
}
pub fn check_dirty_clean_metadata<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
prev_metadata_hashes: &FxHashMap<DefId, Fingerprint>,
current_metadata_hashes: &FxHashMap<DefId, Fingerprint>)
{
if !tcx.sess.opts.debugging_opts.query_dep_graph {
return;
}
tcx.dep_graph.with_ignore(||{
let krate = tcx.hir.krate();
let mut dirty_clean_visitor = DirtyCleanMetadataVisitor {
tcx,
prev_metadata_hashes,
current_metadata_hashes,
checked_attrs: FxHashSet(),
};
intravisit::walk_crate(&mut dirty_clean_visitor, krate);
let mut all_attrs = FindAllAttrs {
tcx,
attr_names: vec![ATTR_DIRTY_METADATA, ATTR_CLEAN_METADATA],
found_attrs: vec![],
};
intravisit::walk_crate(&mut all_attrs, krate);
// Note that we cannot use the existing "unused attribute"-infrastructure
// here, since that is running before trans. This is also the reason why
// all trans-specific attributes are `Whitelisted` in syntax::feature_gate.
all_attrs.report_unchecked_attrs(&dirty_clean_visitor.checked_attrs);
});
}
pub struct DirtyCleanMetadataVisitor<'a, 'tcx: 'a, 'm> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
prev_metadata_hashes: &'m FxHashMap<DefId, Fingerprint>,
current_metadata_hashes: &'m FxHashMap<DefId, Fingerprint>,
checked_attrs: FxHashSet<ast::AttrId>,
}
impl<'a, 'tcx, 'm> intravisit::Visitor<'tcx> for DirtyCleanMetadataVisitor<'a, 'tcx, 'm> {
fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'tcx> {
intravisit::NestedVisitorMap::All(&self.tcx.hir)
}
fn visit_item(&mut self, item: &'tcx hir::Item) {
self.check_item(item.id, item.span);
intravisit::walk_item(self, item);
}
fn visit_variant(&mut self,
variant: &'tcx hir::Variant,
generics: &'tcx hir::Generics,
parent_id: ast::NodeId) {
if let Some(e) = variant.node.disr_expr {
self.check_item(e.node_id, variant.span);
}
intravisit::walk_variant(self, variant, generics, parent_id);
}
fn visit_variant_data(&mut self,
variant_data: &'tcx hir::VariantData,
_: ast::Name,
_: &'tcx hir::Generics,
_parent_id: ast::NodeId,
span: Span) {
if self.tcx.hir.find(variant_data.id()).is_some() {
// VariantData that represent structs or tuples don't have a
// separate entry in the HIR map and checking them would error,
// so only check if this is an enum or union variant.
self.check_item(variant_data.id(), span);
}
intravisit::walk_struct_def(self, variant_data);
}
fn visit_trait_item(&mut self, item: &'tcx hir::TraitItem) {
self.check_item(item.id, item.span);
intravisit::walk_trait_item(self, item);
}
fn visit_impl_item(&mut self, item: &'tcx hir::ImplItem) {
self.check_item(item.id, item.span);
intravisit::walk_impl_item(self, item);
}
fn visit_foreign_item(&mut self, i: &'tcx hir::ForeignItem) {
self.check_item(i.id, i.span);
intravisit::walk_foreign_item(self, i);
}
fn visit_struct_field(&mut self, s: &'tcx hir::StructField) {
self.check_item(s.id, s.span);
intravisit::walk_struct_field(self, s);
}
}
impl<'a, 'tcx, 'm> DirtyCleanMetadataVisitor<'a, 'tcx, 'm> {
fn check_item(&mut self, item_id: ast::NodeId, item_span: Span) {
let def_id = self.tcx.hir.local_def_id(item_id);
for attr in self.tcx.get_attrs(def_id).iter() {
if attr.check_name(ATTR_DIRTY_METADATA) {
if check_config(self.tcx, attr) {
if self.checked_attrs.insert(attr.id) {
self.assert_state(false, def_id, item_span);
}
}
} else if attr.check_name(ATTR_CLEAN_METADATA) {
if check_config(self.tcx, attr) {
if self.checked_attrs.insert(attr.id) {
self.assert_state(true, def_id, item_span);
}
}
}
}
}
fn assert_state(&self, should_be_clean: bool, def_id: DefId, span: Span) {
let item_path = self.tcx.item_path_str(def_id);
debug!("assert_state({})", item_path);
if let Some(&prev_hash) = self.prev_metadata_hashes.get(&def_id) {
let hashes_are_equal = prev_hash == self.current_metadata_hashes[&def_id];
if should_be_clean && !hashes_are_equal {
self.tcx.sess.span_err(
span,
&format!("Metadata hash of `{}` is dirty, but should be clean",
item_path));
}
let should_be_dirty = !should_be_clean;
if should_be_dirty && hashes_are_equal {
self.tcx.sess.span_err(
span,
&format!("Metadata hash of `{}` is clean, but should be dirty",
item_path));
}
} else {
self.tcx.sess.span_err(
span,
&format!("Could not find previous metadata hash of `{}`",
item_path));
}
}
}
/// Given a `#[rustc_dirty]` or `#[rustc_clean]` attribute, scan
/// for a `cfg="foo"` attribute and check whether we have a cfg
/// flag called `foo`.
......@@ -759,7 +592,6 @@ fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name {
}
}
// A visitor that collects all #[rustc_dirty]/#[rustc_clean] attributes from
// the HIR. It is used to verfiy that we really ran checks for all annotated
// nodes.
......
......@@ -131,7 +131,6 @@
const LOCK_FILE_EXT: &'static str = ".lock";
const DEP_GRAPH_FILENAME: &'static str = "dep-graph.bin";
const WORK_PRODUCTS_FILENAME: &'static str = "work-products.bin";
const METADATA_HASHES_FILENAME: &'static str = "metadata.bin";
const QUERY_CACHE_FILENAME: &'static str = "query-cache.bin";
// We encode integers using the following base, so they are shorter than decimal
......@@ -148,10 +147,6 @@ pub fn work_products_path(sess: &Session) -> PathBuf {
in_incr_comp_dir_sess(sess, WORK_PRODUCTS_FILENAME)
}
pub fn metadata_hash_export_path(sess: &Session) -> PathBuf {
in_incr_comp_dir_sess(sess, METADATA_HASHES_FILENAME)
}
pub fn query_cache_path(sess: &Session) -> PathBuf {
in_incr_comp_dir_sess(sess, QUERY_CACHE_FILENAME)
}
......
......@@ -11,12 +11,9 @@
//! Code to save/load the dep-graph from files.
use rustc::dep_graph::{PreviousDepGraph, SerializedDepGraph};
use rustc::hir::svh::Svh;
use rustc::ich::Fingerprint;
use rustc::session::Session;
use rustc::ty::TyCtxt;
use rustc::ty::maps::OnDiskCache;
use rustc::util::nodemap::DefIdMap;
use rustc_serialize::Decodable as RustcDecodable;
use rustc_serialize::opaque::Decoder;
use std::path::Path;
......@@ -106,64 +103,6 @@ fn delete_dirty_work_product(tcx: TyCtxt,
work_product::delete_workproduct_files(tcx.sess, &swp.work_product);
}
pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap<Fingerprint> {
let mut output = DefIdMap();
if !tcx.sess.opts.debugging_opts.query_dep_graph {
// Previous metadata hashes are only needed for testing.
return output
}
debug!("load_prev_metadata_hashes() - Loading previous metadata hashes");
let file_path = metadata_hash_export_path(tcx.sess);
if !file_path.exists() {
debug!("load_prev_metadata_hashes() - Couldn't find file containing \
hashes at `{}`", file_path.display());
return output
}
debug!("load_prev_metadata_hashes() - File: {}", file_path.display());
let (data, start_pos) = match file_format::read_file(tcx.sess, &file_path) {
Ok(Some(data_and_pos)) => data_and_pos,
Ok(None) => {
debug!("load_prev_metadata_hashes() - File produced by incompatible \
compiler version: {}", file_path.display());
return output
}
Err(err) => {
debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}",
file_path.display(), err);
return output
}
};
debug!("load_prev_metadata_hashes() - Decoding hashes");
let mut decoder = Decoder::new(&data, start_pos);
let _ = Svh::decode(&mut decoder).unwrap();
let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap();
debug!("load_prev_metadata_hashes() - Mapping DefIds");
assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.entry_hashes.len());
let def_path_hash_to_def_id = tcx.def_path_hash_to_def_id.as_ref().unwrap();
for serialized_hash in serialized_hashes.entry_hashes {
let def_path_hash = serialized_hashes.index_map[&serialized_hash.def_index];
if let Some(&def_id) = def_path_hash_to_def_id.get(&def_path_hash) {
let old = output.insert(def_id, serialized_hash.hash);
assert!(old.is_none(), "already have hash for {:?}", def_id);
}
}
debug!("load_prev_metadata_hashes() - successfully loaded {} hashes",
serialized_hashes.index_map.len());
output
}
pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph {
let empty = PreviousDepGraph::new(SerializedDepGraph::new());
......
......@@ -9,14 +9,9 @@
// except according to those terms.
use rustc::dep_graph::{DepGraph, DepKind};
use rustc::hir::def_id::{DefId, DefIndex};
use rustc::hir::svh::Svh;
use rustc::ich::Fingerprint;
use rustc::middle::cstore::EncodedMetadataHashes;
use rustc::session::Session;
use rustc::ty::TyCtxt;
use rustc::util::common::time;
use rustc::util::nodemap::DefIdMap;
use rustc_data_structures::fx::FxHashMap;
use rustc_serialize::Encodable as RustcEncodable;
use rustc_serialize::opaque::Encoder;
......@@ -30,11 +25,7 @@
use super::file_format;
use super::work_product;
use super::load::load_prev_metadata_hashes;
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
metadata_hashes: &EncodedMetadataHashes,
svh: Svh) {
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
debug!("save_dep_graph()");
let _ignore = tcx.dep_graph.in_ignore();
let sess = tcx.sess;
......@@ -42,27 +33,6 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
return;
}
// We load the previous metadata hashes now before overwriting the file
// (if we need them for testing).
let prev_metadata_hashes = if tcx.sess.opts.debugging_opts.query_dep_graph {
load_prev_metadata_hashes(tcx)
} else {
DefIdMap()
};
let mut current_metadata_hashes = FxHashMap();
if sess.opts.debugging_opts.incremental_cc ||
sess.opts.debugging_opts.query_dep_graph {
save_in(sess,
metadata_hash_export_path(sess),
|e| encode_metadata_hashes(tcx,
svh,
metadata_hashes,
&mut current_metadata_hashes,
e));
}
time(sess.time_passes(), "persist query result cache", || {
save_in(sess,
query_cache_path(sess),
......@@ -78,9 +48,6 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
dirty_clean::check_dirty_clean_annotations(tcx);
dirty_clean::check_dirty_clean_metadata(tcx,
&prev_metadata_hashes,
&current_metadata_hashes);
}
pub fn save_work_products(sess: &Session, dep_graph: &DepGraph) {
......@@ -258,43 +225,6 @@ struct Stat {
Ok(())
}
fn encode_metadata_hashes(tcx: TyCtxt,
svh: Svh,
metadata_hashes: &EncodedMetadataHashes,
current_metadata_hashes: &mut FxHashMap<DefId, Fingerprint>,
encoder: &mut Encoder)
-> io::Result<()> {
assert_eq!(metadata_hashes.hashes.len(),
metadata_hashes.hashes.iter().map(|x| (x.def_index, ())).collect::<FxHashMap<_,_>>().len());
let mut serialized_hashes = SerializedMetadataHashes {
entry_hashes: metadata_hashes.hashes.to_vec(),
index_map: FxHashMap()
};
if tcx.sess.opts.debugging_opts.query_dep_graph {
for serialized_hash in &serialized_hashes.entry_hashes {
let def_id = DefId::local(DefIndex::from_u32(serialized_hash.def_index));
// Store entry in the index_map
let def_path_hash = tcx.def_path_hash(def_id);
serialized_hashes.index_map.insert(def_id.index.as_u32(), def_path_hash);
// Record hash in current_metadata_hashes
current_metadata_hashes.insert(def_id, serialized_hash.hash);
}
debug!("save: stored index_map (len={}) for serialized hashes",
serialized_hashes.index_map.len());
}
// Encode everything.
svh.encode(encoder)?;
serialized_hashes.encode(encoder)?;
Ok(())
}
fn encode_work_products(dep_graph: &DepGraph,
encoder: &mut Encoder) -> io::Result<()> {
let work_products: Vec<_> = dep_graph
......
......@@ -17,8 +17,7 @@
use rustc::ty::maps::QueryConfig;
use rustc::middle::cstore::{CrateStore, DepKind,
MetadataLoader, LinkMeta,
LoadedMacro, EncodedMetadata,
EncodedMetadataHashes, NativeLibraryKind};
LoadedMacro, EncodedMetadata, NativeLibraryKind};
use rustc::middle::stability::DeprecationEntry;
use rustc::hir::def;
use rustc::session::{CrateDisambiguator, Session};
......@@ -498,7 +497,7 @@ fn encode_metadata<'a, 'tcx>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
link_meta: &LinkMeta,
reachable: &NodeSet)
-> (EncodedMetadata, EncodedMetadataHashes)
-> EncodedMetadata
{
encoder::encode_metadata(tcx, link_meta, reachable)
}
......
......@@ -14,12 +14,10 @@
use schema::*;
use rustc::middle::cstore::{LinkMeta, LinkagePreference, NativeLibrary,
EncodedMetadata, EncodedMetadataHashes,
EncodedMetadataHash};
EncodedMetadata};
use rustc::hir::def::CtorKind;
use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefIndex, DefId, LOCAL_CRATE};
use rustc::hir::map::definitions::{DefPathTable, GlobalMetaDataKind};
use rustc::ich::Fingerprint;
use rustc::hir::map::definitions::DefPathTable;
use rustc::middle::dependency_format::Linkage;
use rustc::middle::lang_items;
use rustc::mir;
......@@ -57,9 +55,6 @@ pub struct EncodeContext<'a, 'tcx: 'a> {
lazy_state: LazyState,
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
pub metadata_hashes: EncodedMetadataHashes,
pub compute_ich: bool,
}
macro_rules! encoder_methods {
......@@ -230,22 +225,10 @@ pub fn lazy_seq_ref<'b, I, T>(&mut self, iter: I) -> LazySeq<T>
// Encodes something that corresponds to a single DepNode::GlobalMetaData
// and registers the Fingerprint in the `metadata_hashes` map.
pub fn tracked<'x, DATA, R>(&'x mut self,
def_index: DefIndex,
op: fn(&mut IsolatedEncoder<'x, 'a, 'tcx>, DATA) -> R,
data: DATA)
-> R {
let mut entry_builder = IsolatedEncoder::new(self);
let ret = op(&mut entry_builder, data);
let (fingerprint, this) = entry_builder.finish();
if let Some(fingerprint) = fingerprint {
this.metadata_hashes.hashes.push(EncodedMetadataHash {
def_index: def_index.as_u32(),
hash: fingerprint,
})
}
ret
op(&mut IsolatedEncoder::new(self), data)
}
fn encode_info_for_items(&mut self) -> Index {
......@@ -311,30 +294,16 @@ fn encode_codemap(&mut self) -> LazySeq<syntax_pos::FileMap> {
fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
let mut i = self.position();
let tcx = self.tcx;
let global_metadata_def_index = move |kind: GlobalMetaDataKind| {
kind.def_index(tcx.hir.definitions().def_path_table())
};
let crate_deps = self.tracked(
global_metadata_def_index(GlobalMetaDataKind::CrateDeps),
IsolatedEncoder::encode_crate_deps,
());
let crate_deps = self.tracked(IsolatedEncoder::encode_crate_deps, ());
let dylib_dependency_formats = self.tracked(
global_metadata_def_index(GlobalMetaDataKind::DylibDependencyFormats),
IsolatedEncoder::encode_dylib_dependency_formats,
());
let dep_bytes = self.position() - i;
// Encode the language items.
i = self.position();
let lang_items = self.tracked(
global_metadata_def_index(GlobalMetaDataKind::LangItems),
IsolatedEncoder::encode_lang_items,
());
let lang_items = self.tracked(IsolatedEncoder::encode_lang_items, ());
let lang_items_missing = self.tracked(
global_metadata_def_index(GlobalMetaDataKind::LangItemsMissing),
IsolatedEncoder::encode_lang_items_missing,
());
let lang_item_bytes = self.position() - i;
......@@ -342,7 +311,6 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
// Encode the native libraries used
i = self.position();
let native_libraries = self.tracked(
global_metadata_def_index(GlobalMetaDataKind::NativeLibraries),
IsolatedEncoder::encode_native_libraries,
());
let native_lib_bytes = self.position() - i;
......@@ -359,16 +327,12 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
// Encode the def IDs of impls, for coherence checking.
i = self.position();
let impls = self.tracked(
global_metadata_def_index(GlobalMetaDataKind::Impls),
IsolatedEncoder::encode_impls,
());
let impls = self.tracked(IsolatedEncoder::encode_impls, ());
let impl_bytes = self.position() - i;
// Encode exported symbols info.
i = self.position();
let exported_symbols = self.tracked(
global_metadata_def_index(GlobalMetaDataKind::ExportedSymbols),
IsolatedEncoder::encode_exported_symbols,
self.exported_symbols);
let exported_symbols_bytes = self.position() - i;
......@@ -421,11 +385,6 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
let total_bytes = self.position();
self.metadata_hashes.hashes.push(EncodedMetadataHash {
def_index: global_metadata_def_index(GlobalMetaDataKind::Krate).as_u32(),
hash: Fingerprint::from_smaller_hash(link_meta.crate_hash.as_u64())
});
if self.tcx.sess.meta_stats() {
let mut zero_bytes = 0;
for e in self.opaque.cursor.get_ref() {
......@@ -1654,7 +1613,7 @@ fn visit_impl_item(&mut self, _impl_item: &'v hir::ImplItem) {
pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
link_meta: &LinkMeta,
exported_symbols: &NodeSet)
-> (EncodedMetadata, EncodedMetadataHashes)
-> EncodedMetadata
{
let mut cursor = Cursor::new(vec![]);
cursor.write_all(METADATA_HEADER).unwrap();
......@@ -1662,11 +1621,7 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// Will be filled with the root position after encoding everything.
cursor.write_all(&[0, 0, 0, 0]).unwrap();
let compute_ich = (tcx.sess.opts.debugging_opts.query_dep_graph ||
tcx.sess.opts.debugging_opts.incremental_cc) &&
tcx.sess.opts.build_dep_graph();
let (root, metadata_hashes) = {
let root = {
let mut ecx = EncodeContext {
opaque: opaque::Encoder::new(&mut cursor),
tcx,
......@@ -1675,8 +1630,6 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
lazy_state: LazyState::NoNode,
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
metadata_hashes: EncodedMetadataHashes::new(),
compute_ich,
};
// Encode the rustc version string in a predictable location.
......@@ -1684,8 +1637,7 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// Encode all the entries and extra information in the crate,
// culminating in the `CrateRoot` which points to all of it.
let root = ecx.encode_crate_root();
(root, ecx.metadata_hashes)
ecx.encode_crate_root()
};
let mut result = cursor.into_inner();
......@@ -1697,7 +1649,7 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
result[header + 2] = (pos >> 8) as u8;
result[header + 3] = (pos >> 0) as u8;
(EncodedMetadata { raw_data: result }, metadata_hashes)
EncodedMetadata { raw_data: result }
}
pub fn get_repr_options<'a, 'tcx, 'gcx>(tcx: &TyCtxt<'a, 'tcx, 'gcx>, did: DefId) -> ReprOptions {
......
......@@ -62,7 +62,6 @@
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::middle::cstore::EncodedMetadataHash;
use rustc::ty::TyCtxt;
use syntax::ast;
......@@ -128,19 +127,10 @@ pub fn record<'x, DATA>(&'x mut self,
// unclear whether that would be a win since hashing is cheap enough.
let _task = tcx.dep_graph.in_ignore();
let ecx: &'x mut EncodeContext<'b, 'tcx> = &mut *self.ecx;
let mut entry_builder = IsolatedEncoder::new(ecx);
let mut entry_builder = IsolatedEncoder::new(self.ecx);
let entry = op(&mut entry_builder, data);
let entry = entry_builder.lazy(&entry);
let (fingerprint, ecx) = entry_builder.finish();
if let Some(hash) = fingerprint {
ecx.metadata_hashes.hashes.push(EncodedMetadataHash {
def_index: id.index.as_u32(),
hash,
});
}
self.items.record(id, entry);
}
......
......@@ -10,12 +10,7 @@
use encoder::EncodeContext;
use schema::{Lazy, LazySeq};
use rustc::ich::{StableHashingContext, Fingerprint};
use rustc::ty::TyCtxt;
use rustc_data_structures::accumulate_vec::AccumulateVec;
use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
use rustc_serialize::Encodable;
/// The IsolatedEncoder provides facilities to write to crate metadata while
......@@ -23,148 +18,47 @@
pub struct IsolatedEncoder<'a, 'b: 'a, 'tcx: 'b> {
pub tcx: TyCtxt<'b, 'tcx, 'tcx>,
ecx: &'a mut EncodeContext<'b, 'tcx>,
hcx: Option<(StableHashingContext<'tcx>, StableHasher<Fingerprint>)>,
}
impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self {
let tcx = ecx.tcx;
let compute_ich = ecx.compute_ich;
IsolatedEncoder {
tcx,
ecx,
hcx: if compute_ich {
// We are always hashing spans for things in metadata because
// don't know if a downstream crate will use them or not.
// Except when -Zquery-dep-graph is specified because we don't
// want to mess up our tests.
let hcx = if tcx.sess.opts.debugging_opts.query_dep_graph {
tcx.create_stable_hashing_context()
} else {
tcx.create_stable_hashing_context().force_span_hashing()
};
Some((hcx, StableHasher::new()))
} else {
None
}
}
}
pub fn finish(self) -> (Option<Fingerprint>, &'a mut EncodeContext<'b, 'tcx>) {
if let Some((_, hasher)) = self.hcx {
(Some(hasher.finish()), self.ecx)
} else {
(None, self.ecx)
}
}
pub fn lazy<T>(&mut self, value: &T) -> Lazy<T>
where T: Encodable + HashStable<StableHashingContext<'tcx>>
where T: Encodable
{
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
value.hash_stable(hcx, hasher);
debug!("metadata-hash: {:?}", hasher);
}
self.ecx.lazy(value)
}
pub fn lazy_seq<I, T>(&mut self, iter: I) -> LazySeq<T>
where I: IntoIterator<Item = T>,
T: Encodable + HashStable<StableHashingContext<'tcx>>
T: Encodable
{
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
let iter = iter.into_iter();
let (lower_bound, upper_bound) = iter.size_hint();
if upper_bound == Some(lower_bound) {
lower_bound.hash_stable(hcx, hasher);
let mut num_items_hashed = 0;
let ret = self.ecx.lazy_seq(iter.inspect(|item| {
item.hash_stable(hcx, hasher);
num_items_hashed += 1;
}));
// Sometimes items in a sequence are filtered out without being
// hashed (e.g. for &[ast::Attribute]) and this code path cannot
// handle that correctly, so we want to make sure we didn't hit
// it by accident.
if lower_bound != num_items_hashed {
bug!("Hashed a different number of items ({}) than expected ({})",
num_items_hashed,
lower_bound);
}
debug!("metadata-hash: {:?}", hasher);
ret
} else {
// Collect into a vec so we know the length of the sequence
let items: AccumulateVec<[T; 32]> = iter.collect();
items.hash_stable(hcx, hasher);
debug!("metadata-hash: {:?}", hasher);
self.ecx.lazy_seq(items)
}
} else {
self.ecx.lazy_seq(iter)
}
self.ecx.lazy_seq(iter)
}
pub fn lazy_seq_ref<'x, I, T>(&mut self, iter: I) -> LazySeq<T>
where I: IntoIterator<Item = &'x T>,
T: 'x + Encodable + HashStable<StableHashingContext<'tcx>>
T: 'x + Encodable
{
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
let iter = iter.into_iter();
let (lower_bound, upper_bound) = iter.size_hint();
if upper_bound == Some(lower_bound) {
lower_bound.hash_stable(hcx, hasher);
let mut num_items_hashed = 0;
let ret = self.ecx.lazy_seq_ref(iter.inspect(|item| {
item.hash_stable(hcx, hasher);
num_items_hashed += 1;
}));
// Sometimes items in a sequence are filtered out without being
// hashed (e.g. for &[ast::Attribute]) and this code path cannot
// handle that correctly, so we want to make sure we didn't hit
// it by accident.
if lower_bound != num_items_hashed {
bug!("Hashed a different number of items ({}) than expected ({})",
num_items_hashed,
lower_bound);
}
debug!("metadata-hash: {:?}", hasher);
ret
} else {
// Collect into a vec so we know the length of the sequence
let items: AccumulateVec<[&'x T; 32]> = iter.collect();
items.hash_stable(hcx, hasher);
debug!("metadata-hash: {:?}", hasher);
self.ecx.lazy_seq_ref(items.iter().map(|x| *x))
}
} else {
self.ecx.lazy_seq_ref(iter)
}
self.ecx.lazy_seq_ref(iter)
}
pub fn lazy_seq_from_slice<T>(&mut self, slice: &[T]) -> LazySeq<T>
where T: Encodable + HashStable<StableHashingContext<'tcx>>
where T: Encodable
{
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
slice.hash_stable(hcx, hasher);
debug!("metadata-hash: {:?}", hasher);
}
self.ecx.lazy_seq_ref(slice.iter())
}
pub fn lazy_seq_ref_from_slice<T>(&mut self, slice: &[&T]) -> LazySeq<T>
where T: Encodable + HashStable<StableHashingContext<'tcx>>
where T: Encodable
{
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
slice.hash_stable(hcx, hasher);
debug!("metadata-hash: {:?}", hasher);
}
self.ecx.lazy_seq_ref(slice.iter().map(|x| *x))
}
}
......@@ -39,7 +39,7 @@
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::middle::lang_items::StartFnLangItem;
use rustc::middle::trans::{Linkage, Visibility, Stats};
use rustc::middle::cstore::{EncodedMetadata, EncodedMetadataHashes};
use rustc::middle::cstore::EncodedMetadata;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::layout::{self, Align, TyLayout, LayoutOf};
use rustc::ty::maps::Providers;
......@@ -602,8 +602,7 @@ fn write_metadata<'a, 'gcx>(tcx: TyCtxt<'a, 'gcx, 'gcx>,
llmod_id: &str,
link_meta: &LinkMeta,
exported_symbols: &NodeSet)
-> (ContextRef, ModuleRef,
EncodedMetadata, EncodedMetadataHashes) {
-> (ContextRef, ModuleRef, EncodedMetadata) {
use std::io::Write;
use flate2::Compression;
use flate2::write::DeflateEncoder;
......@@ -635,13 +634,12 @@ enum MetadataKind {
if kind == MetadataKind::None {
return (metadata_llcx,
metadata_llmod,
EncodedMetadata::new(),
EncodedMetadataHashes::new());
EncodedMetadata::new());
}
let (metadata, hashes) = tcx.encode_metadata(link_meta, exported_symbols);
let metadata = tcx.encode_metadata(link_meta, exported_symbols);
if kind == MetadataKind::Uncompressed {
return (metadata_llcx, metadata_llmod, metadata, hashes);
return (metadata_llcx, metadata_llmod, metadata);
}
assert!(kind == MetadataKind::Compressed);
......@@ -669,7 +667,7 @@ enum MetadataKind {
let directive = CString::new(directive).unwrap();
llvm::LLVMSetModuleInlineAsm(metadata_llmod, directive.as_ptr())
}
return (metadata_llcx, metadata_llmod, metadata, hashes);
return (metadata_llcx, metadata_llmod, metadata);
}
pub struct ValueIter {
......@@ -720,7 +718,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let shared_ccx = SharedCrateContext::new(tcx);
// Translate the metadata.
let llmod_id = "metadata";
let (metadata_llcx, metadata_llmod, metadata, metadata_incr_hashes) =
let (metadata_llcx, metadata_llmod, metadata) =
time(tcx.sess.time_passes(), "write metadata", || {
write_metadata(tcx, llmod_id, &link_meta, &exported_symbol_node_ids)
});
......@@ -756,9 +754,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ongoing_translation.submit_pre_translated_module_to_llvm(tcx, metadata_module);
ongoing_translation.translation_finished(tcx);
assert_and_save_dep_graph(tcx,
metadata_incr_hashes,
link_meta);
assert_and_save_dep_graph(tcx);
ongoing_translation.check_for_errors(tcx.sess);
......@@ -932,24 +928,18 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ongoing_translation.check_for_errors(tcx.sess);
assert_and_save_dep_graph(tcx,
metadata_incr_hashes,
link_meta);
assert_and_save_dep_graph(tcx);
ongoing_translation
}
fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
metadata_incr_hashes: EncodedMetadataHashes,
link_meta: LinkMeta) {
fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
time(tcx.sess.time_passes(),
"assert dep graph",
|| rustc_incremental::assert_dep_graph(tcx));
time(tcx.sess.time_passes(),
"serialize dep graph",
|| rustc_incremental::save_dep_graph(tcx,
&metadata_incr_hashes,
link_meta.crate_hash));
|| rustc_incremental::save_dep_graph(tcx));
}
#[inline(never)] // give this a place in the profiler
......
......@@ -201,7 +201,7 @@ fn trans_crate<'a, 'tcx>(
.fingerprint_of(&DepNode::new_no_params(DepKind::Krate));
let link_meta = build_link_meta(crate_hash);
let exported_symbols = ::find_exported_symbols(tcx);
let (metadata, _hashes) = tcx.encode_metadata(&link_meta, &exported_symbols);
let metadata = tcx.encode_metadata(&link_meta, &exported_symbols);
OngoingCrateTranslation {
metadata: metadata,
......
......@@ -742,18 +742,6 @@ pub fn is_builtin_attr(attr: &ast::Attribute) -> bool {
is just used for rustc unit tests \
and will never be stable",
cfg_fn!(rustc_attrs))),
("rustc_metadata_dirty", Whitelisted, Gated(Stability::Unstable,
"rustc_attrs",
"the `#[rustc_metadata_dirty]` attribute \
is just used for rustc unit tests \
and will never be stable",
cfg_fn!(rustc_attrs))),
("rustc_metadata_clean", Whitelisted, Gated(Stability::Unstable,
"rustc_attrs",
"the `#[rustc_metadata_clean]` attribute \
is just used for rustc unit tests \
and will never be stable",
cfg_fn!(rustc_attrs))),
("rustc_partition_reused", Whitelisted, Gated(Stability::Unstable,
"rustc_attrs",
"this attribute \
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册