提交 17412bae 编写于 作者: A Andrew Xie

Removed use of iteration through a HashMap/HashSet in rustc_incremental and...

Removed use of iteration through a HashMap/HashSet in rustc_incremental and replaced with IndexMap/IndexSet
上级 dcf3571c
......@@ -34,7 +34,7 @@
use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::ModuleCodegen;
use rustc_codegen_ssa::{CodegenResults, CompiledModule};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::fx::FxIndexMap;
use rustc_errors::{DiagnosticMessage, ErrorGuaranteed, FatalError, Handler, SubdiagnosticMessage};
use rustc_fluent_macro::fluent_messages;
use rustc_metadata::EncodedMetadata;
......@@ -356,7 +356,7 @@ fn join_codegen(
ongoing_codegen: Box<dyn Any>,
sess: &Session,
outputs: &OutputFilenames,
) -> Result<(CodegenResults, FxHashMap<WorkProductId, WorkProduct>), ErrorGuaranteed> {
) -> Result<(CodegenResults, FxIndexMap<WorkProductId, WorkProduct>), ErrorGuaranteed> {
let (codegen_results, work_products) = ongoing_codegen
.downcast::<rustc_codegen_ssa::back::write::OngoingCodegen<LlvmCodegenBackend>>()
.expect("Expected LlvmCodegenBackend's OngoingCodegen, found Box<Any>")
......
......@@ -9,7 +9,7 @@
};
use jobserver::{Acquired, Client};
use rustc_ast::attr;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_data_structures::memmap::Mmap;
use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_data_structures::profiling::TimingGuard;
......@@ -498,8 +498,8 @@ pub fn start_async_codegen<B: ExtraBackendMethods>(
fn copy_all_cgu_workproducts_to_incr_comp_cache_dir(
sess: &Session,
compiled_modules: &CompiledModules,
) -> FxHashMap<WorkProductId, WorkProduct> {
let mut work_products = FxHashMap::default();
) -> FxIndexMap<WorkProductId, WorkProduct> {
let mut work_products = FxIndexMap::default();
if sess.opts.incremental.is_none() {
return work_products;
......@@ -1885,7 +1885,7 @@ pub struct OngoingCodegen<B: ExtraBackendMethods> {
}
impl<B: ExtraBackendMethods> OngoingCodegen<B> {
pub fn join(self, sess: &Session) -> (CodegenResults, FxHashMap<WorkProductId, WorkProduct>) {
pub fn join(self, sess: &Session) -> (CodegenResults, FxIndexMap<WorkProductId, WorkProduct>) {
let _timer = sess.timer("finish_ongoing_codegen");
self.shared_emitter_main.check(sess, true);
......
......@@ -6,7 +6,7 @@
use crate::{CodegenResults, ModuleCodegen};
use rustc_ast::expand::allocator::AllocatorKind;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::sync::{DynSend, DynSync};
use rustc_errors::ErrorGuaranteed;
use rustc_metadata::EncodedMetadata;
......@@ -101,7 +101,7 @@ fn join_codegen(
ongoing_codegen: Box<dyn Any>,
sess: &Session,
outputs: &OutputFilenames,
) -> Result<(CodegenResults, FxHashMap<WorkProductId, WorkProduct>), ErrorGuaranteed>;
) -> Result<(CodegenResults, FxIndexMap<WorkProductId, WorkProduct>), ErrorGuaranteed>;
/// This is called on the returned `Box<dyn Any>` from `join_codegen`
///
......
......@@ -35,7 +35,7 @@
use crate::errors;
use rustc_ast as ast;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::fx::FxIndexSet;
use rustc_data_structures::graph::implementation::{Direction, NodeIndex, INCOMING, OUTGOING};
use rustc_graphviz as dot;
use rustc_hir as hir;
......@@ -258,7 +258,7 @@ fn dump_graph(query: &DepGraphQuery) {
}
#[allow(missing_docs)]
pub struct GraphvizDepGraph(FxHashSet<DepKind>, Vec<(DepKind, DepKind)>);
pub struct GraphvizDepGraph(FxIndexSet<DepKind>, Vec<(DepKind, DepKind)>);
impl<'a> dot::GraphWalk<'a> for GraphvizDepGraph {
type Node = DepKind;
......@@ -303,7 +303,7 @@ fn node_label(&self, n: &DepKind) -> dot::LabelText<'_> {
fn node_set<'q>(
query: &'q DepGraphQuery,
filter: &DepNodeFilter,
) -> Option<FxHashSet<&'q DepNode>> {
) -> Option<FxIndexSet<&'q DepNode>> {
debug!("node_set(filter={:?})", filter);
if filter.accepts_all() {
......@@ -315,9 +315,9 @@ fn node_set<'q>(
fn filter_nodes<'q>(
query: &'q DepGraphQuery,
sources: &Option<FxHashSet<&'q DepNode>>,
targets: &Option<FxHashSet<&'q DepNode>>,
) -> FxHashSet<DepKind> {
sources: &Option<FxIndexSet<&'q DepNode>>,
targets: &Option<FxIndexSet<&'q DepNode>>,
) -> FxIndexSet<DepKind> {
if let Some(sources) = sources {
if let Some(targets) = targets {
walk_between(query, sources, targets)
......@@ -333,10 +333,10 @@ fn filter_nodes<'q>(
fn walk_nodes<'q>(
query: &'q DepGraphQuery,
starts: &FxHashSet<&'q DepNode>,
starts: &FxIndexSet<&'q DepNode>,
direction: Direction,
) -> FxHashSet<DepKind> {
let mut set = FxHashSet::default();
) -> FxIndexSet<DepKind> {
let mut set = FxIndexSet::default();
for &start in starts {
debug!("walk_nodes: start={:?} outgoing?={:?}", start, direction == OUTGOING);
if set.insert(start.kind) {
......@@ -357,9 +357,9 @@ fn walk_nodes<'q>(
fn walk_between<'q>(
query: &'q DepGraphQuery,
sources: &FxHashSet<&'q DepNode>,
targets: &FxHashSet<&'q DepNode>,
) -> FxHashSet<DepKind> {
sources: &FxIndexSet<&'q DepNode>,
targets: &FxIndexSet<&'q DepNode>,
) -> FxIndexSet<DepKind> {
// This is a bit tricky. We want to include a node only if it is:
// (a) reachable from a source and (b) will reach a target. And we
// have to be careful about cycles etc. Luckily efficiency is not
......@@ -426,8 +426,8 @@ fn recurse(query: &DepGraphQuery, node_states: &mut [State], node: NodeIndex) ->
}
}
fn filter_edges(query: &DepGraphQuery, nodes: &FxHashSet<DepKind>) -> Vec<(DepKind, DepKind)> {
let uniq: FxHashSet<_> = query
fn filter_edges(query: &DepGraphQuery, nodes: &FxIndexSet<DepKind>) -> Vec<(DepKind, DepKind)> {
let uniq: FxIndexSet<_> = query
.edges()
.into_iter()
.map(|(s, t)| (s.kind, t.kind))
......
......@@ -24,7 +24,7 @@
use crate::errors;
use rustc_ast as ast;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::fx::FxIndexSet;
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::mir::mono::CodegenUnitNameBuilder;
use rustc_middle::ty::TyCtxt;
......@@ -52,7 +52,7 @@ pub fn assert_module_sources(tcx: TyCtxt<'_>) {
struct AssertModuleSource<'tcx> {
tcx: TyCtxt<'tcx>,
available_cgus: FxHashSet<Symbol>,
available_cgus: FxIndexSet<Symbol>,
}
impl<'tcx> AssertModuleSource<'tcx> {
......
......@@ -4,7 +4,6 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(never_type)]
#![recursion_limit = "256"]
#![allow(rustc::potential_query_instability)]
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]
......
......@@ -21,7 +21,7 @@
use crate::errors;
use rustc_ast::{self as ast, Attribute, NestedMetaItem};
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit;
use rustc_hir::Node as HirNode;
......@@ -125,7 +125,7 @@
//
// type_of for these.
type Labels = FxHashSet<String>;
type Labels = FxIndexSet<String>;
/// Represents the requested configuration by rustc_clean/dirty
struct Assertion {
......
......@@ -104,7 +104,7 @@
//! implemented.
use crate::errors;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
use rustc_data_structures::svh::Svh;
use rustc_data_structures::{base_n, flock};
use rustc_errors::ErrorGuaranteed;
......@@ -635,8 +635,8 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
// First do a pass over the crate directory, collecting lock files and
// session directories
let mut session_directories = FxHashSet::default();
let mut lock_files = FxHashSet::default();
let mut session_directories = FxIndexSet::default();
let mut lock_files = FxIndexSet::default();
for dir_entry in crate_directory.read_dir()? {
let Ok(dir_entry) = dir_entry else {
......@@ -659,7 +659,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
}
// Now map from lock files to session directories
let lock_file_to_session_dir: FxHashMap<String, Option<String>> = lock_files
let lock_file_to_session_dir: FxIndexMap<String, Option<String>> = lock_files
.into_iter()
.map(|lock_file_name| {
assert!(lock_file_name.ends_with(LOCK_FILE_EXT));
......@@ -705,7 +705,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
}
// Filter out `None` directories
let lock_file_to_session_dir: FxHashMap<String, String> = lock_file_to_session_dir
let lock_file_to_session_dir: FxIndexMap<String, String> = lock_file_to_session_dir
.into_iter()
.filter_map(|(lock_file_name, directory_name)| directory_name.map(|n| (lock_file_name, n)))
.collect();
......@@ -846,7 +846,7 @@ fn delete_old(sess: &Session, path: &Path) {
fn all_except_most_recent(
deletion_candidates: Vec<(SystemTime, PathBuf, Option<flock::Lock>)>,
) -> FxHashMap<PathBuf, Option<flock::Lock>> {
) -> FxIndexMap<PathBuf, Option<flock::Lock>> {
let most_recent = deletion_candidates.iter().map(|&(timestamp, ..)| timestamp).max();
if let Some(most_recent) = most_recent {
......@@ -856,7 +856,7 @@ fn all_except_most_recent(
.map(|(_, path, lock)| (path, lock))
.collect()
} else {
FxHashMap::default()
FxIndexMap::default()
}
}
......
//! Code to save/load the dep-graph from files.
use crate::errors;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::memmap::Mmap;
use rustc_middle::dep_graph::{SerializedDepGraph, WorkProduct, WorkProductId};
use rustc_middle::query::on_disk_cache::OnDiskCache;
......@@ -16,7 +16,7 @@
use super::fs::*;
use super::work_product;
type WorkProductMap = FxHashMap<WorkProductId, WorkProduct>;
type WorkProductMap = FxIndexMap<WorkProductId, WorkProduct>;
#[derive(Debug)]
/// Represents the result of an attempt to load incremental compilation data.
......@@ -147,7 +147,7 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
let report_incremental_info = sess.opts.unstable_opts.incremental_info;
let expected_hash = sess.opts.dep_tracking_hash(false);
let mut prev_work_products = FxHashMap::default();
let mut prev_work_products = FxIndexMap::default();
// If we are only building with -Zquery-dep-graph but without an actual
// incr. comp. session directory, we skip this. Otherwise we'd fail
......
use crate::errors;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::sync::join;
use rustc_middle::dep_graph::{DepGraph, SerializedDepGraph, WorkProduct, WorkProductId};
use rustc_middle::ty::TyCtxt;
......@@ -79,7 +79,7 @@ pub fn save_dep_graph(tcx: TyCtxt<'_>) {
pub fn save_work_product_index(
sess: &Session,
dep_graph: &DepGraph,
new_work_products: FxHashMap<WorkProductId, WorkProduct>,
new_work_products: FxIndexMap<WorkProductId, WorkProduct>,
) {
if sess.opts.incremental.is_none() {
return;
......@@ -119,7 +119,7 @@ pub fn save_work_product_index(
}
fn encode_work_product_index(
work_products: &FxHashMap<WorkProductId, WorkProduct>,
work_products: &FxIndexMap<WorkProductId, WorkProduct>,
encoder: &mut FileEncoder,
) {
let serialized_products: Vec<_> = work_products
......@@ -146,7 +146,7 @@ fn encode_query_cache(tcx: TyCtxt<'_>, encoder: FileEncoder) -> FileEncodeResult
pub fn build_dep_graph(
sess: &Session,
prev_graph: SerializedDepGraph,
prev_work_products: FxHashMap<WorkProductId, WorkProduct>,
prev_work_products: FxIndexMap<WorkProductId, WorkProduct>,
) -> Option<DepGraph> {
if sess.opts.incremental.is_none() {
// No incremental compilation.
......
......@@ -4,7 +4,7 @@
use crate::errors;
use crate::persist::fs::*;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::fx::FxIndexMap;
use rustc_fs_util::link_or_copy;
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
use rustc_session::Session;
......@@ -20,7 +20,7 @@ pub fn copy_cgu_workproduct_to_incr_comp_cache_dir(
debug!(?cgu_name, ?files);
sess.opts.incremental.as_ref()?;
let mut saved_files = FxHashMap::default();
let mut saved_files = FxIndexMap::default();
for (ext, path) in files {
let file_name = format!("{cgu_name}.{ext}");
let path_in_incr_dir = in_incr_comp_dir_sess(sess, &file_name);
......
use parking_lot::Mutex;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
use rustc_data_structures::profiling::{EventId, QueryInvocationId, SelfProfilerRef};
use rustc_data_structures::sharded::{self, Sharded};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
......@@ -93,7 +93,7 @@ pub struct DepGraphData<K: DepKind> {
/// things available to us. If we find that they are not dirty, we
/// load the path to the file storing those work-products here into
/// this map. We can later look for and extract that data.
previous_work_products: FxHashMap<WorkProductId, WorkProduct>,
previous_work_products: FxIndexMap<WorkProductId, WorkProduct>,
dep_node_debug: Lock<FxHashMap<DepNode<K>, String>>,
......@@ -116,7 +116,7 @@ impl<K: DepKind> DepGraph<K> {
pub fn new(
profiler: &SelfProfilerRef,
prev_graph: SerializedDepGraph<K>,
prev_work_products: FxHashMap<WorkProductId, WorkProduct>,
prev_work_products: FxIndexMap<WorkProductId, WorkProduct>,
encoder: FileEncoder,
record_graph: bool,
record_stats: bool,
......@@ -688,7 +688,7 @@ pub fn previous_work_product(&self, v: &WorkProductId) -> Option<WorkProduct> {
/// Access the map of work-products created during the cached run. Only
/// used during saving of the dep-graph.
pub fn previous_work_products(&self) -> &FxHashMap<WorkProductId, WorkProduct> {
pub fn previous_work_products(&self) -> &FxIndexMap<WorkProductId, WorkProduct> {
&self.data.as_ref().unwrap().previous_work_products
}
......@@ -1048,7 +1048,7 @@ pub struct WorkProduct {
///
/// By convention, file extensions are currently used as identifiers, i.e. the key "o" maps to
/// the object file's path, and "dwo" to the dwarf object file's path.
pub saved_files: FxHashMap<String, String>,
pub saved_files: FxIndexMap<String, String>,
}
// Index type for `DepNodeData`'s edges.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册