提交 d129ac26 编写于 作者: R Rich Kadel

Moved coverage counter injection from BasicBlock to Statement.

上级 01ffbcbe
......@@ -1949,70 +1949,6 @@
#[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "41079")]
pub fn ptr_offset_from<T>(ptr: *const T, base: *const T) -> isize;
/// Internal placeholder for injecting code coverage counters when the "instrument-coverage"
/// option is enabled. The source code region information is extracted prior to code generation,
/// and added to the "coverage map", which is injected into the generated code as additional
/// data. This intrinsic then triggers the generation of LLVM intrinsic call
/// `instrprof.increment`, using the remaining args (`function_source_hash` and `index`).
#[cfg(not(bootstrap))]
#[lang = "count_code_region"]
pub fn count_code_region(
function_source_hash: u64,
index: u32,
file_name: &'static str,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
);
/// Internal marker for code coverage expressions, injected into the MIR when the
/// "instrument-coverage" option is enabled. This intrinsic is not converted into a
/// backend intrinsic call, but its arguments are extracted during the production of a
/// "coverage map", which is injected into the generated code, as additional data.
/// This marker identifies a code region and two other counters or counter expressions
/// whose sum is the number of times the code region was executed.
#[cfg(not(bootstrap))]
#[lang = "coverage_counter_add"]
pub fn coverage_counter_add(
index: u32,
left_index: u32,
right_index: u32,
file_name: &'static str,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
);
/// This marker identifies a code region and two other counters or counter expressions
/// whose difference is the number of times the code region was executed.
/// (See `coverage_counter_add` for more information.)
#[cfg(not(bootstrap))]
#[lang = "coverage_counter_subtract"]
pub fn coverage_counter_subtract(
index: u32,
left_index: u32,
right_index: u32,
file_name: &'static str,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
);
/// This marker identifies a code region to be added to the "coverage map" to indicate source
/// code that can never be reached.
/// (See `coverage_counter_add` for more information.)
#[cfg(not(bootstrap))]
pub fn coverage_unreachable(
file_name: &'static str,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
);
/// See documentation of `<*const T>::guaranteed_eq` for details.
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
pub fn ptr_guaranteed_eq<T>(ptr: *const T, other: *const T) -> bool;
......
......@@ -1117,7 +1117,7 @@ fn instrprof_increment(
hash: &'ll Value,
num_counters: &'ll Value,
index: &'ll Value,
) -> &'ll Value {
) {
debug!(
"instrprof_increment() with args ({:?}, {:?}, {:?}, {:?})",
fn_name, hash, num_counters, index
......@@ -1128,13 +1128,13 @@ fn instrprof_increment(
let args = self.check_call("call", llfn, args);
unsafe {
llvm::LLVMRustBuildCall(
let _ = llvm::LLVMRustBuildCall(
self.llbuilder,
llfn,
args.as_ptr() as *const &llvm::Value,
args.len() as c_uint,
None,
)
);
}
}
......
......@@ -3,14 +3,16 @@
use crate::llvm;
use llvm::coverageinfo::CounterMappingRegion;
use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression, Region};
use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression};
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods};
use rustc_data_structures::fx::FxIndexSet;
use rustc_llvm::RustString;
use tracing::debug;
use rustc_middle::mir::coverage::CodeRegion;
use std::ffi::CString;
use tracing::debug;
/// Generates and exports the Coverage Map.
///
/// This Coverage Map complies with Coverage Mapping Format version 3 (zero-based encoded as 2),
......@@ -91,7 +93,7 @@ fn new() -> Self {
fn write_coverage_mappings(
&mut self,
expressions: Vec<CounterExpression>,
counter_regions: impl Iterator<Item = (Counter, &'tcx Region<'tcx>)>,
counter_regions: impl Iterator<Item = (Counter, &'a CodeRegion)>,
coverage_mappings_buffer: &RustString,
) {
let mut counter_regions = counter_regions.collect::<Vec<_>>();
......@@ -104,22 +106,22 @@ fn write_coverage_mappings(
let mut current_file_name = None;
let mut current_file_id = 0;
// Convert the list of (Counter, Region) pairs to an array of `CounterMappingRegion`, sorted
// Convert the list of (Counter, CodeRegion) pairs to an array of `CounterMappingRegion`, sorted
// by filename and position. Capture any new files to compute the `CounterMappingRegion`s
// `file_id` (indexing files referenced by the current function), and construct the
// function-specific `virtual_file_mapping` from `file_id` to its index in the module's
// `filenames` array.
counter_regions.sort_unstable_by_key(|(_counter, region)| *region);
for (counter, region) in counter_regions {
let Region { file_name, start_line, start_col, end_line, end_col } = *region;
let same_file = current_file_name.as_ref().map_or(false, |p| p == file_name);
let CodeRegion { file_name, start_line, start_col, end_line, end_col } = *region;
let same_file = current_file_name.as_ref().map_or(false, |p| *p == file_name);
if !same_file {
if current_file_name.is_some() {
current_file_id += 1;
}
current_file_name = Some(file_name.to_string());
let c_filename =
CString::new(file_name).expect("null error converting filename to C string");
current_file_name = Some(file_name);
let c_filename = CString::new(file_name.to_string())
.expect("null error converting filename to C string");
debug!(" file_id: {} = '{:?}'", current_file_id, c_filename);
let (filenames_index, _) = self.filenames.insert_full(c_filename);
virtual_file_mapping.push(filenames_index as u32);
......
......@@ -5,18 +5,22 @@
use libc::c_uint;
use llvm::coverageinfo::CounterMappingRegion;
use rustc_codegen_ssa::coverageinfo::map::{CounterExpression, ExprKind, FunctionCoverage, Region};
use rustc_codegen_ssa::coverageinfo::map::{CounterExpression, FunctionCoverage};
use rustc_codegen_ssa::traits::{
BaseTypeMethods, CoverageInfoBuilderMethods, CoverageInfoMethods, MiscMethods, StaticMethods,
};
use rustc_data_structures::fx::FxHashMap;
use rustc_llvm::RustString;
use rustc_middle::mir::coverage::{
CodeRegion, CounterValueReference, ExpressionOperandId, InjectedExpressionIndex, Op,
};
use rustc_middle::ty::Instance;
use tracing::debug;
use std::cell::RefCell;
use std::ffi::CString;
use tracing::debug;
pub mod mapgen;
const COVMAP_VAR_ALIGN_BYTES: usize = 8;
......@@ -24,7 +28,7 @@
/// A context object for maintaining all state needed by the coverageinfo module.
pub struct CrateCoverageContext<'tcx> {
// Coverage region data for each instrumented function identified by DefId.
pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage<'tcx>>>,
pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage>>,
}
impl<'tcx> CrateCoverageContext<'tcx> {
......@@ -32,7 +36,7 @@ pub fn new() -> Self {
Self { function_coverage_map: Default::default() }
}
pub fn take_function_coverage_map(&self) -> FxHashMap<Instance<'tcx>, FunctionCoverage<'tcx>> {
pub fn take_function_coverage_map(&self) -> FxHashMap<Instance<'tcx>, FunctionCoverage> {
self.function_coverage_map.replace(FxHashMap::default())
}
}
......@@ -58,11 +62,11 @@ fn add_counter_region(
&mut self,
instance: Instance<'tcx>,
function_source_hash: u64,
id: u32,
region: Region<'tcx>,
id: CounterValueReference,
region: CodeRegion,
) {
debug!(
"adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={}, \
"adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={:?}, \
at {:?}",
instance, function_source_hash, id, region,
);
......@@ -76,25 +80,25 @@ fn add_counter_region(
fn add_counter_expression_region(
&mut self,
instance: Instance<'tcx>,
id_descending_from_max: u32,
lhs: u32,
op: ExprKind,
rhs: u32,
region: Region<'tcx>,
id: InjectedExpressionIndex,
lhs: ExpressionOperandId,
op: Op,
rhs: ExpressionOperandId,
region: CodeRegion,
) {
debug!(
"adding counter expression to coverage_regions: instance={:?}, id={}, {} {:?} {}, \
"adding counter expression to coverage_regions: instance={:?}, id={:?}, {:?} {:?} {:?}, \
at {:?}",
instance, id_descending_from_max, lhs, op, rhs, region,
instance, id, lhs, op, rhs, region,
);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_counter_expression(id_descending_from_max, lhs, op, rhs, region);
.add_counter_expression(id, lhs, op, rhs, region);
}
fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: Region<'tcx>) {
fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion) {
debug!(
"adding unreachable code to coverage_regions: instance={:?}, at {:?}",
instance, region,
......
......@@ -11,22 +11,18 @@
use rustc_codegen_ssa::base::{compare_simd_types, to_immediate, wants_msvc_seh};
use rustc_codegen_ssa::common::span_invalid_monomorphization_error;
use rustc_codegen_ssa::common::{IntPredicate, TypeKind};
use rustc_codegen_ssa::coverageinfo;
use rustc_codegen_ssa::glue;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef;
use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::MemFlags;
use rustc_hir as hir;
use rustc_middle::mir::coverage;
use rustc_middle::mir::Operand;
use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt};
use rustc_middle::ty::{self, Ty};
use rustc_middle::{bug, span_bug};
use rustc_span::{sym, symbol::kw, Span, Symbol};
use rustc_target::abi::{self, HasDataLayout, LayoutOf, Primitive};
use rustc_target::spec::PanicStrategy;
use tracing::debug;
use std::cmp::Ordering;
use std::iter;
......@@ -83,77 +79,6 @@ fn get_simple_intrinsic(cx: &CodegenCx<'ll, '_>, name: Symbol) -> Option<&'ll Va
}
impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
fn is_codegen_intrinsic(
&mut self,
intrinsic: Symbol,
args: &Vec<Operand<'tcx>>,
caller_instance: ty::Instance<'tcx>,
) -> bool {
let mut is_codegen_intrinsic = true;
// Set `is_codegen_intrinsic` to `false` to bypass `codegen_intrinsic_call()`.
// FIXME(richkadel): Make sure to add coverage analysis tests on a crate with
// external crate dependencies, where:
// 1. Both binary and dependent crates are compiled with `-Zinstrument-coverage`
// 2. Only binary is compiled with `-Zinstrument-coverage`
// 3. Only dependent crates are compiled with `-Zinstrument-coverage`
match intrinsic {
sym::count_code_region => {
use coverage::count_code_region_args::*;
self.add_counter_region(
caller_instance,
op_to_u64(&args[FUNCTION_SOURCE_HASH]),
op_to_u32(&args[COUNTER_ID]),
coverageinfo::Region::new(
op_to_str_slice(&args[FILE_NAME]),
op_to_u32(&args[START_LINE]),
op_to_u32(&args[START_COL]),
op_to_u32(&args[END_LINE]),
op_to_u32(&args[END_COL]),
),
);
}
sym::coverage_counter_add | sym::coverage_counter_subtract => {
is_codegen_intrinsic = false;
use coverage::coverage_counter_expression_args::*;
self.add_counter_expression_region(
caller_instance,
op_to_u32(&args[EXPRESSION_ID]),
op_to_u32(&args[LEFT_ID]),
if intrinsic == sym::coverage_counter_add {
coverageinfo::ExprKind::Add
} else {
coverageinfo::ExprKind::Subtract
},
op_to_u32(&args[RIGHT_ID]),
coverageinfo::Region::new(
op_to_str_slice(&args[FILE_NAME]),
op_to_u32(&args[START_LINE]),
op_to_u32(&args[START_COL]),
op_to_u32(&args[END_LINE]),
op_to_u32(&args[END_COL]),
),
);
}
sym::coverage_unreachable => {
is_codegen_intrinsic = false;
use coverage::coverage_unreachable_args::*;
self.add_unreachable_region(
caller_instance,
coverageinfo::Region::new(
op_to_str_slice(&args[FILE_NAME]),
op_to_u32(&args[START_LINE]),
op_to_u32(&args[START_COL]),
op_to_u32(&args[END_LINE]),
op_to_u32(&args[END_COL]),
),
);
}
_ => {}
}
is_codegen_intrinsic
}
fn codegen_intrinsic_call(
&mut self,
instance: ty::Instance<'tcx>,
......@@ -161,7 +86,6 @@ fn codegen_intrinsic_call(
args: &[OperandRef<'tcx, &'ll Value>],
llresult: &'ll Value,
span: Span,
caller_instance: ty::Instance<'tcx>,
) {
let tcx = self.tcx;
let callee_ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
......@@ -213,21 +137,6 @@ fn codegen_intrinsic_call(
let llfn = self.get_intrinsic(&("llvm.debugtrap"));
self.call(llfn, &[], None)
}
sym::count_code_region => {
use coverage::count_code_region_args::*;
let coverageinfo = tcx.coverageinfo(caller_instance.def_id());
let fn_name = self.create_pgo_func_name_var(caller_instance);
let hash = args[FUNCTION_SOURCE_HASH].immediate();
let num_counters = self.const_u32(coverageinfo.num_counters);
let index = args[COUNTER_ID].immediate();
debug!(
"translating Rust intrinsic `count_code_region()` to LLVM intrinsic: \
instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})",
fn_name, hash, num_counters, index,
);
self.instrprof_increment(fn_name, hash, num_counters, index)
}
sym::va_start => self.va_start(args[0].immediate()),
sym::va_end => self.va_end(args[0].immediate()),
sym::va_copy => {
......@@ -2238,15 +2147,3 @@ fn float_type_width(ty: Ty<'_>) -> Option<u64> {
_ => None,
}
}
fn op_to_str_slice<'tcx>(op: &Operand<'tcx>) -> &'tcx str {
Operand::value_from_const(op).try_to_str_slice().expect("Value is &str")
}
fn op_to_u32<'tcx>(op: &Operand<'tcx>) -> u32 {
Operand::scalar_from_const(op).to_u32().expect("Scalar is u32")
}
fn op_to_u64<'tcx>(op: &Operand<'tcx>) -> u64 {
Operand::scalar_from_const(op).to_u64().expect("Scalar is u64")
}
use super::map::{CounterValueReference, MappedExpressionIndex};
use rustc_middle::mir::coverage::{CounterValueReference, MappedExpressionIndex};
/// Aligns with [llvm::coverage::Counter::CounterKind](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L91)
#[derive(Copy, Clone, Debug)]
......
pub use super::ffi::*;
use rustc_index::vec::IndexVec;
use rustc_middle::mir::coverage::{
CodeRegion, CounterValueReference, ExpressionOperandId, InjectedExpressionIndex,
MappedExpressionIndex, Op,
};
use rustc_middle::ty::Instance;
use rustc_middle::ty::TyCtxt;
use std::cmp::Ord;
rustc_index::newtype_index! {
pub struct ExpressionOperandId {
DEBUG_FORMAT = "ExpressionOperandId({})",
MAX = 0xFFFF_FFFF,
}
}
rustc_index::newtype_index! {
pub struct CounterValueReference {
DEBUG_FORMAT = "CounterValueReference({})",
MAX = 0xFFFF_FFFF,
}
}
rustc_index::newtype_index! {
pub struct InjectedExpressionIndex {
DEBUG_FORMAT = "InjectedExpressionIndex({})",
MAX = 0xFFFF_FFFF,
}
}
rustc_index::newtype_index! {
pub struct MappedExpressionIndex {
DEBUG_FORMAT = "MappedExpressionIndex({})",
MAX = 0xFFFF_FFFF,
}
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct Region<'tcx> {
pub file_name: &'tcx str,
pub start_line: u32,
pub start_col: u32,
pub end_line: u32,
pub end_col: u32,
}
impl<'tcx> Region<'tcx> {
pub fn new(
file_name: &'tcx str,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
) -> Self {
Self { file_name, start_line, start_col, end_line, end_col }
}
}
#[derive(Clone, Debug)]
pub struct ExpressionRegion<'tcx> {
pub struct ExpressionRegion {
lhs: ExpressionOperandId,
op: ExprKind,
op: Op,
rhs: ExpressionOperandId,
region: Region<'tcx>,
region: CodeRegion,
}
/// Collects all of the coverage regions associated with (a) injected counters, (b) counter
......@@ -75,15 +28,15 @@ pub struct ExpressionRegion<'tcx> {
/// only whitespace or comments). According to LLVM Code Coverage Mapping documentation, "A count
/// for a gap area is only used as the line execution count if there are no other regions on a
/// line."
pub struct FunctionCoverage<'tcx> {
pub struct FunctionCoverage {
source_hash: u64,
counters: IndexVec<CounterValueReference, Option<Region<'tcx>>>,
expressions: IndexVec<InjectedExpressionIndex, Option<ExpressionRegion<'tcx>>>,
unreachable_regions: Vec<Region<'tcx>>,
counters: IndexVec<CounterValueReference, Option<CodeRegion>>,
expressions: IndexVec<InjectedExpressionIndex, Option<ExpressionRegion>>,
unreachable_regions: Vec<CodeRegion>,
}
impl<'tcx> FunctionCoverage<'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
impl FunctionCoverage {
pub fn new<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
let coverageinfo = tcx.coverageinfo(instance.def_id());
Self {
source_hash: 0, // will be set with the first `add_counter()`
......@@ -96,15 +49,13 @@ pub fn new(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
/// Adds a code region to be counted by an injected counter intrinsic.
/// The source_hash (computed during coverage instrumentation) should also be provided, and
/// should be the same for all counters in a given function.
pub fn add_counter(&mut self, source_hash: u64, id: u32, region: Region<'tcx>) {
pub fn add_counter(&mut self, source_hash: u64, id: CounterValueReference, region: CodeRegion) {
if self.source_hash == 0 {
self.source_hash = source_hash;
} else {
debug_assert_eq!(source_hash, self.source_hash);
}
self.counters[CounterValueReference::from(id)]
.replace(region)
.expect_none("add_counter called with duplicate `id`");
self.counters[id].replace(region).expect_none("add_counter called with duplicate `id`");
}
/// Both counters and "counter expressions" (or simply, "expressions") can be operands in other
......@@ -123,24 +74,20 @@ pub fn add_counter(&mut self, source_hash: u64, id: u32, region: Region<'tcx>) {
/// counters and expressions have been added.
pub fn add_counter_expression(
&mut self,
id_descending_from_max: u32,
lhs: u32,
op: ExprKind,
rhs: u32,
region: Region<'tcx>,
expression_id: InjectedExpressionIndex,
lhs: ExpressionOperandId,
op: Op,
rhs: ExpressionOperandId,
region: CodeRegion,
) {
let expression_id = ExpressionOperandId::from(id_descending_from_max);
let lhs = ExpressionOperandId::from(lhs);
let rhs = ExpressionOperandId::from(rhs);
let expression_index = self.expression_index(expression_id);
let expression_index = self.expression_index(u32::from(expression_id));
self.expressions[expression_index]
.replace(ExpressionRegion { lhs, op, rhs, region })
.expect_none("add_counter_expression called with duplicate `id_descending_from_max`");
}
/// Add a region that will be marked as "unreachable", with a constant "zero counter".
pub fn add_unreachable_region(&mut self, region: Region<'tcx>) {
pub fn add_unreachable_region(&mut self, region: CodeRegion) {
self.unreachable_regions.push(region)
}
......@@ -153,9 +100,9 @@ pub fn source_hash(&self) -> u64 {
/// Generate an array of CounterExpressions, and an iterator over all `Counter`s and their
/// associated `Regions` (from which the LLVM-specific `CoverageMapGenerator` will create
/// `CounterMappingRegion`s.
pub fn get_expressions_and_counter_regions(
&'tcx self,
) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'tcx Region<'tcx>)>) {
pub fn get_expressions_and_counter_regions<'a>(
&'a self,
) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'a CodeRegion)>) {
assert!(self.source_hash != 0);
let counter_regions = self.counter_regions();
......@@ -167,7 +114,7 @@ pub fn get_expressions_and_counter_regions(
(counter_expressions, counter_regions)
}
fn counter_regions(&'tcx self) -> impl Iterator<Item = (Counter, &'tcx Region<'tcx>)> {
fn counter_regions<'a>(&'a self) -> impl Iterator<Item = (Counter, &'a CodeRegion)> {
self.counters.iter_enumerated().filter_map(|(index, entry)| {
// Option::map() will return None to filter out missing counters. This may happen
// if, for example, a MIR-instrumented counter is removed during an optimization.
......@@ -178,8 +125,8 @@ fn counter_regions(&'tcx self) -> impl Iterator<Item = (Counter, &'tcx Region<'t
}
fn expressions_with_regions(
&'tcx self,
) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'tcx Region<'tcx>)>) {
&'a self,
) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'a CodeRegion)>) {
let mut counter_expressions = Vec::with_capacity(self.expressions.len());
let mut expression_regions = Vec::with_capacity(self.expressions.len());
let mut new_indexes =
......@@ -204,7 +151,7 @@ fn expressions_with_regions(
.as_ref()
.map(|_| Counter::counter_value_reference(index))
} else {
let index = self.expression_index(id);
let index = self.expression_index(u32::from(id));
self.expressions
.get(index)
.expect("expression id is out of range")
......@@ -232,7 +179,14 @@ fn expressions_with_regions(
// been assigned a `new_index`.
let mapped_expression_index =
MappedExpressionIndex::from(counter_expressions.len());
counter_expressions.push(CounterExpression::new(lhs_counter, op, rhs_counter));
counter_expressions.push(CounterExpression::new(
lhs_counter,
match op {
Op::Add => ExprKind::Add,
Op::Subtract => ExprKind::Subtract,
},
rhs_counter,
));
new_indexes[original_index] = mapped_expression_index;
expression_regions.push((Counter::expression(mapped_expression_index), region));
}
......@@ -240,15 +194,12 @@ fn expressions_with_regions(
(counter_expressions, expression_regions.into_iter())
}
fn unreachable_regions(&'tcx self) -> impl Iterator<Item = (Counter, &'tcx Region<'tcx>)> {
fn unreachable_regions<'a>(&'a self) -> impl Iterator<Item = (Counter, &'a CodeRegion)> {
self.unreachable_regions.iter().map(|region| (Counter::zero(), region))
}
fn expression_index(
&self,
id_descending_from_max: ExpressionOperandId,
) -> InjectedExpressionIndex {
debug_assert!(id_descending_from_max.index() >= self.counters.len());
InjectedExpressionIndex::from(u32::MAX - u32::from(id_descending_from_max))
fn expression_index(&self, id_descending_from_max: u32) -> InjectedExpressionIndex {
debug_assert!(id_descending_from_max >= self.counters.len() as u32);
InjectedExpressionIndex::from(u32::MAX - id_descending_from_max)
}
}
pub mod ffi;
pub mod map;
pub use map::ExprKind;
pub use map::Region;
......@@ -647,17 +647,6 @@ fn codegen_call_terminator(
if intrinsic.is_some() && intrinsic != Some(sym::drop_in_place) {
let intrinsic = intrinsic.unwrap();
// `is_codegen_intrinsic()` allows the backend implementation to perform compile-time
// operations before converting the `args` to backend values.
if !bx.is_codegen_intrinsic(intrinsic, &args, self.instance) {
// If the intrinsic call was fully addressed by the `is_codegen_intrinsic()` call
// (as a compile-time operation), return immediately. This avoids the need to
// convert the arguments, the call to `codegen_intrinsic_call()`, and the return
// value handling.
return;
}
let dest = match ret_dest {
_ if fn_abi.ret.is_indirect() => llargs[0],
ReturnDest::Nothing => {
......@@ -702,7 +691,6 @@ fn codegen_call_terminator(
&args,
dest,
terminator.source_info.span,
self.instance,
);
if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
......
use crate::traits::*;
use rustc_middle::mir::coverage::*;
use rustc_middle::mir::Coverage;
use super::FunctionCx;
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
pub fn codegen_coverage(&self, bx: &mut Bx, coverage: Coverage) {
let Coverage { kind, code_region } = coverage;
match kind {
CoverageKind::Counter { function_source_hash, id } => {
bx.add_counter_region(self.instance, function_source_hash, id, code_region);
let coverageinfo = bx.tcx().coverageinfo(self.instance.def_id());
let fn_name = bx.create_pgo_func_name_var(self.instance);
let hash = bx.const_u64(function_source_hash);
let num_counters = bx.const_u32(coverageinfo.num_counters);
let id = bx.const_u32(u32::from(id));
debug!(
"codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})",
fn_name, hash, num_counters, id,
);
bx.instrprof_increment(fn_name, hash, num_counters, id);
}
CoverageKind::Expression { id, lhs, op, rhs } => {
bx.add_counter_expression_region(self.instance, id, lhs, op, rhs, code_region);
}
CoverageKind::Unreachable => {
bx.add_unreachable_region(self.instance, code_region);
}
}
}
}
......@@ -484,6 +484,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
mod analyze;
mod block;
pub mod constant;
pub mod coverageinfo;
pub mod debuginfo;
pub mod operand;
pub mod place;
......
......@@ -111,6 +111,10 @@ pub fn codegen_statement(&mut self, mut bx: Bx, statement: &mir::Statement<'tcx>
}
bx
}
mir::StatementKind::Coverage(box ref coverage) => {
self.codegen_coverage(&mut bx, coverage.clone());
bx
}
mir::StatementKind::FakeRead(..)
| mir::StatementKind::Retag { .. }
| mir::StatementKind::AscribeUserType(..)
......
......@@ -271,7 +271,7 @@ fn instrprof_increment(
hash: Self::Value,
num_counters: Self::Value,
index: Self::Value,
) -> Self::Value;
);
fn call(
&mut self,
......
use super::BackendTypes;
use crate::coverageinfo::{ExprKind, Region};
use rustc_middle::mir::coverage::*;
use rustc_middle::ty::Instance;
pub trait CoverageInfoMethods: BackendTypes {
......@@ -13,19 +13,19 @@ fn add_counter_region(
&mut self,
instance: Instance<'tcx>,
function_source_hash: u64,
index: u32,
region: Region<'tcx>,
id: CounterValueReference,
region: CodeRegion,
);
fn add_counter_expression_region(
&mut self,
instance: Instance<'tcx>,
index: u32,
lhs: u32,
op: ExprKind,
rhs: u32,
region: Region<'tcx>,
id: InjectedExpressionIndex,
lhs: ExpressionOperandId,
op: Op,
rhs: ExpressionOperandId,
region: CodeRegion,
);
fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: Region<'tcx>);
fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion);
}
use super::BackendTypes;
use crate::mir::operand::OperandRef;
use rustc_middle::mir::Operand;
use rustc_middle::ty::{self, Ty};
use rustc_span::{Span, Symbol};
use rustc_span::Span;
use rustc_target::abi::call::FnAbi;
pub trait IntrinsicCallMethods<'tcx>: BackendTypes {
......@@ -16,19 +15,8 @@ fn codegen_intrinsic_call(
args: &[OperandRef<'tcx, Self::Value>],
llresult: Self::Value,
span: Span,
caller_instance: ty::Instance<'tcx>,
);
/// Intrinsic-specific pre-codegen processing, if any is required. Some intrinsics are handled
/// at compile time and do not generate code. Returns true if codegen is required or false if
/// the intrinsic does not need code generation.
fn is_codegen_intrinsic(
&mut self,
intrinsic: Symbol,
args: &Vec<Operand<'tcx>>,
caller_instance: ty::Instance<'tcx>,
) -> bool;
fn abort(&mut self);
fn assume(&mut self, val: Self::Value);
fn expect(&mut self, cond: Self::Value, expected: bool) -> Self::Value;
......
//! Validity checking for fake lang items
use crate::def_id::DefId;
use crate::{lang_items, LangItem, LanguageItems};
use rustc_data_structures::fx::FxHashMap;
use rustc_span::symbol::{sym, Symbol};
use lazy_static::lazy_static;
macro_rules! fake_lang_items {
($($item:ident, $name:ident, $method:ident;)*) => (
lazy_static! {
pub static ref FAKE_ITEMS_REFS: FxHashMap<Symbol, LangItem> = {
let mut map = FxHashMap::default();
$(map.insert(sym::$name, lang_items::$item);)*
map
};
}
impl LanguageItems {
pub fn is_fake_lang_item(&self, item_def_id: DefId) -> bool {
let did = Some(item_def_id);
$(self.$method() == did)||*
}
}
) }
fake_lang_items! {
// Variant name, Symbol, Method name,
CountCodeRegionFnLangItem, count_code_region, count_code_region_fn;
CoverageCounterAddFnLangItem, coverage_counter_add, coverage_counter_add_fn;
CoverageCounterSubtractFnLangItem, coverage_counter_subtract, coverage_counter_subtract_fn;
}
......@@ -303,11 +303,6 @@ pub fn extract<'a, F>(check_name: F, attrs: &'a [ast::Attribute]) -> Option<(Sym
TryTraitLangItem, kw::Try, try_trait, Target::Trait;
// language items related to source code coverage instrumentation (-Zinstrument-coverage)
CountCodeRegionFnLangItem, sym::count_code_region, count_code_region_fn, Target::Fn;
CoverageCounterAddFnLangItem, sym::coverage_counter_add, coverage_counter_add_fn, Target::Fn;
CoverageCounterSubtractFnLangItem, sym::coverage_counter_subtract, coverage_counter_subtract_fn, Target::Fn;
// Language items from AST lowering
TryFromError, sym::from_error, from_error_fn, Target::Method(MethodKind::Trait { body: false });
TryFromOk, sym::from_ok, from_ok_fn, Target::Method(MethodKind::Trait { body: false });
......
......@@ -19,7 +19,6 @@
pub mod def;
pub mod definitions;
pub use rustc_span::def_id;
pub mod fake_lang_items;
mod hir;
pub mod hir_id;
pub mod intravisit;
......
//! Metadata from source code coverage analysis and instrumentation.
/// Positional arguments to `libcore::count_code_region()`
pub mod count_code_region_args {
pub const FUNCTION_SOURCE_HASH: usize = 0;
pub const COUNTER_ID: usize = 1;
pub const FILE_NAME: usize = 2;
pub const START_LINE: usize = 3;
pub const START_COL: usize = 4;
pub const END_LINE: usize = 5;
pub const END_COL: usize = 6;
}
/// Positional arguments to `libcore::coverage_counter_add()` and
/// `libcore::coverage_counter_subtract()`
pub mod coverage_counter_expression_args {
pub const EXPRESSION_ID: usize = 0;
pub const LEFT_ID: usize = 1;
pub const RIGHT_ID: usize = 2;
pub const FILE_NAME: usize = 3;
pub const START_LINE: usize = 4;
pub const START_COL: usize = 5;
pub const END_LINE: usize = 6;
pub const END_COL: usize = 7;
}
/// Positional arguments to `libcore::coverage_unreachable()`
pub mod coverage_unreachable_args {
pub const FILE_NAME: usize = 0;
pub const START_LINE: usize = 1;
pub const START_COL: usize = 2;
pub const END_LINE: usize = 3;
pub const END_COL: usize = 4;
use rustc_macros::HashStable;
use rustc_span::Symbol;
use std::cmp::Ord;
use std::fmt::{self, Debug, Formatter};
rustc_index::newtype_index! {
pub struct ExpressionOperandId {
derive [HashStable]
DEBUG_FORMAT = "ExpressionOperandId({})",
MAX = 0xFFFF_FFFF,
}
}
rustc_index::newtype_index! {
pub struct CounterValueReference {
derive [HashStable]
DEBUG_FORMAT = "CounterValueReference({})",
MAX = 0xFFFF_FFFF,
}
}
rustc_index::newtype_index! {
pub struct InjectedExpressionIndex {
derive [HashStable]
DEBUG_FORMAT = "InjectedExpressionIndex({})",
MAX = 0xFFFF_FFFF,
}
}
rustc_index::newtype_index! {
pub struct MappedExpressionIndex {
derive [HashStable]
DEBUG_FORMAT = "MappedExpressionIndex({})",
MAX = 0xFFFF_FFFF,
}
}
impl From<CounterValueReference> for ExpressionOperandId {
#[inline]
fn from(v: CounterValueReference) -> ExpressionOperandId {
ExpressionOperandId::from(v.as_u32())
}
}
impl From<InjectedExpressionIndex> for ExpressionOperandId {
#[inline]
fn from(v: InjectedExpressionIndex) -> ExpressionOperandId {
ExpressionOperandId::from(v.as_u32())
}
}
#[derive(Clone, Debug, PartialEq, TyEncodable, TyDecodable, HashStable, TypeFoldable)]
pub enum CoverageKind {
Counter {
function_source_hash: u64,
id: CounterValueReference,
},
Expression {
id: InjectedExpressionIndex,
lhs: ExpressionOperandId,
op: Op,
rhs: ExpressionOperandId,
},
Unreachable,
}
impl CoverageKind {
pub fn as_operand_id(&self) -> ExpressionOperandId {
match *self {
CoverageKind::Counter { id, .. } => ExpressionOperandId::from(id),
CoverageKind::Expression { id, .. } => ExpressionOperandId::from(id),
CoverageKind::Unreachable => {
bug!("Unreachable coverage cannot be part of an expression")
}
}
}
}
#[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, PartialEq, Eq, PartialOrd, Ord)]
pub struct CodeRegion {
pub file_name: Symbol,
pub start_line: u32,
pub start_col: u32,
pub end_line: u32,
pub end_col: u32,
}
impl Debug for CodeRegion {
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
write!(
fmt,
"{}:{}:{} - {}:{}",
self.file_name, self.start_line, self.start_col, self.end_line, self.end_col
)
}
}
#[derive(Copy, Clone, Debug, PartialEq, TyEncodable, TyDecodable, HashStable, TypeFoldable)]
pub enum Op {
Subtract,
Add,
}
......@@ -2,6 +2,7 @@
//!
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html
use crate::mir::coverage::{CodeRegion, CoverageKind};
use crate::mir::interpret::{Allocation, ConstValue, GlobalAlloc, Scalar};
use crate::mir::visit::MirVisitable;
use crate::ty::adjustment::PointerCast;
......@@ -25,7 +26,6 @@
use rustc_data_structures::graph::{self, GraphSuccessors};
use rustc_index::bit_set::BitMatrix;
use rustc_index::vec::{Idx, IndexVec};
use rustc_macros::HashStable;
use rustc_serialize::{Decodable, Encodable};
use rustc_span::symbol::Symbol;
use rustc_span::{Span, DUMMY_SP};
......@@ -1400,6 +1400,12 @@ pub enum StatementKind<'tcx> {
/// - `Bivariant` -- no effect
AscribeUserType(Box<(Place<'tcx>, UserTypeProjection)>, ty::Variance),
/// Marks the start of a "coverage region", injected with '-Zinstrument-coverage'. A
/// `CoverageInfo` statement carries metadata about the coverage region, used to inject a coverage
/// map into the binary. The `Counter` kind also generates executable code, to increment a
/// counter varible at runtime, each time the code region is executed.
Coverage(Box<Coverage>),
/// No-op. Useful for deleting instructions without affecting statement indices.
Nop,
}
......@@ -1495,11 +1501,18 @@ fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
AscribeUserType(box (ref place, ref c_ty), ref variance) => {
write!(fmt, "AscribeUserType({:?}, {:?}, {:?})", place, variance, c_ty)
}
Coverage(box ref coverage) => write!(fmt, "{:?}", coverage),
Nop => write!(fmt, "nop"),
}
}
}
#[derive(Clone, Debug, PartialEq, TyEncodable, TyDecodable, HashStable, TypeFoldable)]
pub struct Coverage {
pub kind: CoverageKind,
pub code_region: CodeRegion,
}
///////////////////////////////////////////////////////////////////////////
// Places
......
......@@ -134,6 +134,12 @@ fn visit_ascribe_user_ty(&mut self,
self.super_ascribe_user_ty(place, variance, user_ty, location);
}
fn visit_coverage(&mut self,
coverage: & $($mutability)? Coverage,
location: Location) {
self.super_coverage(coverage, location);
}
fn visit_retag(&mut self,
kind: & $($mutability)? RetagKind,
place: & $($mutability)? Place<'tcx>,
......@@ -389,6 +395,12 @@ fn super_statement(&mut self,
) => {
self.visit_ascribe_user_ty(place, variance, user_ty, location);
}
StatementKind::Coverage(coverage) => {
self.visit_coverage(
coverage,
location
)
}
StatementKind::Nop => {}
}
}
......@@ -739,6 +751,11 @@ fn super_ascribe_user_ty(&mut self,
self.visit_user_type_projection(user_ty);
}
fn super_coverage(&mut self,
_kind: & $($mutability)? Coverage,
_location: Location) {
}
fn super_retag(&mut self,
_kind: & $($mutability)? RetagKind,
place: & $($mutability)? Place<'tcx>,
......@@ -1133,6 +1150,8 @@ pub enum NonUseContext {
StorageDead,
/// User type annotation assertions for NLL.
AscribeUserTy,
/// Coverage code region and counter metadata.
Coverage,
/// The data of an user variable, for debug info.
VarDebugInfo,
}
......
......@@ -271,6 +271,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
bool,
usize,
::rustc_target::abi::VariantIdx,
u32,
u64,
String,
crate::middle::region::Scope,
......@@ -289,6 +290,10 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
::rustc_hir::Unsafety,
::rustc_target::asm::InlineAsmRegOrRegClass,
::rustc_target::spec::abi::Abi,
crate::mir::coverage::ExpressionOperandId,
crate::mir::coverage::CounterValueReference,
crate::mir::coverage::InjectedExpressionIndex,
crate::mir::coverage::MappedExpressionIndex,
crate::mir::Local,
crate::mir::Promoted,
crate::traits::Reveal,
......
......@@ -72,7 +72,8 @@ pub fn categorize(context: PlaceContext) -> Option<DefUse> {
PlaceContext::MutatingUse(MutatingUseContext::Drop) =>
Some(DefUse::Drop),
// Debug info is neither def nor use.
// Coverage and debug info are neither def nor use.
PlaceContext::NonUse(NonUseContext::Coverage) |
PlaceContext::NonUse(NonUseContext::VarDebugInfo) => None,
}
}
......@@ -93,6 +93,7 @@ fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
}
}
StatementKind::Nop
| StatementKind::Coverage(..)
| StatementKind::AscribeUserType(..)
| StatementKind::Retag { .. }
| StatementKind::StorageLive(..) => {
......
......@@ -644,6 +644,7 @@ fn visit_statement_before_primary_effect(
}
}
StatementKind::Nop
| StatementKind::Coverage(..)
| StatementKind::AscribeUserType(..)
| StatementKind::Retag { .. }
| StatementKind::StorageLive(..) => {
......
......@@ -1532,6 +1532,7 @@ fn check_stmt(&mut self, body: &Body<'tcx>, stmt: &Statement<'tcx>, location: Lo
| StatementKind::StorageDead(..)
| StatementKind::LlvmInlineAsm { .. }
| StatementKind::Retag { .. }
| StatementKind::Coverage(..)
| StatementKind::Nop => {}
}
}
......
......@@ -302,6 +302,7 @@ fn statement_effect(
| mir::StatementKind::StorageLive(..)
| mir::StatementKind::Retag { .. }
| mir::StatementKind::AscribeUserType(..)
| mir::StatementKind::Coverage(..)
| mir::StatementKind::Nop => {}
}
}
......
......@@ -99,6 +99,7 @@ fn visit_local(&mut self, &local: &Local, context: PlaceContext, _: Location) {
PlaceContext::NonUse(
NonUseContext::StorageLive
| NonUseContext::AscribeUserTy
| NonUseContext::Coverage
| NonUseContext::VarDebugInfo,
)
| PlaceContext::NonMutatingUse(
......
......@@ -145,6 +145,7 @@ fn before_statement_effect(
// Nothing to do for these. Match exhaustively so this fails to compile when new
// variants are added.
StatementKind::AscribeUserType(..)
| StatementKind::Coverage(..)
| StatementKind::FakeRead(..)
| StatementKind::Nop
| StatementKind::Retag(..)
......
......@@ -318,6 +318,7 @@ fn gather_statement(&mut self, stmt: &Statement<'tcx>) {
}
StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
| StatementKind::Coverage(..)
| StatementKind::Nop => {}
}
}
......
......@@ -440,11 +440,6 @@ pub fn emulate_intrinsic(
// These just return their argument
self.copy_op(args[0], dest)?;
}
// FIXME(#73156): Handle source code coverage in const eval
sym::count_code_region
| sym::coverage_counter_add
| sym::coverage_counter_subtract
| sym::coverage_unreachable => (),
_ => return Ok(false),
}
......
......@@ -118,6 +118,19 @@ pub fn step(&mut self) -> InterpResult<'tcx, bool> {
// Statements we do not track.
AscribeUserType(..) => {}
// Currently, Miri discards Coverage statements. Coverage statements are only injected
// via an optional compile time MIR pass and have no side effects. Since Coverage
// statements don't exist at the source level, it is safe for Miri to ignore them, even
// for undefined behavior (UB) checks.
//
// A coverage counter inside a const expression (for example, a counter injected in a
// const function) is discarded when the const is evaluated at compile time. Whether
// this should change, and/or how to implement a const eval counter, is a subject of the
// following issue:
//
// FIXME(#73156): Handle source code coverage in const eval
Coverage(..) => {}
// Defined to do nothing. These are added by optimization passes, to avoid changing the
// size of MIR constantly.
Nop => {}
......
......@@ -485,6 +485,7 @@ fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
| StatementKind::StorageDead(_)
| StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
| StatementKind::Coverage(..)
| StatementKind::Nop => {}
}
}
......
......@@ -114,6 +114,7 @@ fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
| StatementKind::StorageDead(..)
| StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
| StatementKind::Coverage(..)
| StatementKind::Nop => {
// safe (at least as emitted during MIR construction)
}
......
......@@ -1443,6 +1443,7 @@ fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
| StatementKind::StorageDead(_)
| StatementKind::Retag(..)
| StatementKind::AscribeUserType(..)
| StatementKind::Coverage(..)
| StatementKind::Nop => {}
}
}
......
use crate::transform::{MirPass, MirSource};
use crate::util::patch::MirPatch;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_hir::lang_items;
use rustc_middle::hir;
use rustc_middle::ich::StableHashingContext;
use rustc_middle::mir;
use rustc_middle::mir::coverage::*;
use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::traversal;
use rustc_middle::mir::{
BasicBlock, BasicBlockData, CoverageInfo, Operand, Place, SourceInfo, SourceScope,
StatementKind, Terminator, TerminatorKind,
};
use rustc_middle::ty;
use rustc_middle::mir::visit::Visitor;
use rustc_middle::mir::{BasicBlock, Coverage, CoverageInfo, Location, Statement, StatementKind};
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{FnDef, TyCtxt};
use rustc_middle::ty::TyCtxt;
use rustc_span::def_id::DefId;
use rustc_span::{FileName, Pos, RealFileName, Span};
use rustc_span::{FileName, Pos, RealFileName, Span, Symbol};
/// Inserts call to count_code_region() as a placeholder to be replaced during code generation with
/// the intrinsic llvm.instrprof.increment.
......@@ -29,16 +22,29 @@ pub(crate) fn provide(providers: &mut Providers) {
providers.coverageinfo = |tcx, def_id| coverageinfo_from_mir(tcx, def_id);
}
struct CoverageVisitor {
info: CoverageInfo,
}
impl Visitor<'_> for CoverageVisitor {
fn visit_coverage(&mut self, coverage: &Coverage, _location: Location) {
match coverage.kind {
CoverageKind::Counter { id, .. } => {
let counter_id = u32::from(id);
self.info.num_counters = std::cmp::max(self.info.num_counters, counter_id + 1);
}
CoverageKind::Expression { id, .. } => {
let expression_index = u32::MAX - u32::from(id);
self.info.num_expressions =
std::cmp::max(self.info.num_expressions, expression_index + 1);
}
_ => {}
}
}
}
fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> CoverageInfo {
let mir_body = tcx.optimized_mir(mir_def_id);
// FIXME(richkadel): The current implementation assumes the MIR for the given DefId
// represents a single function. Validate and/or correct if inlining (which should be disabled
// if -Zinstrument-coverage is enabled) and/or monomorphization invalidates these assumptions.
let count_code_region_fn = tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None);
let coverage_counter_add_fn =
tcx.require_lang_item(lang_items::CoverageCounterAddFnLangItem, None);
let coverage_counter_subtract_fn =
tcx.require_lang_item(lang_items::CoverageCounterSubtractFnLangItem, None);
// The `num_counters` argument to `llvm.instrprof.increment` is the number of injected
// counters, with each counter having a counter ID from `0..num_counters-1`. MIR optimization
......@@ -49,49 +55,11 @@ fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> Coverage
// `num_expressions` is the number of counter expressions added to the MIR body. Both
// `num_counters` and `num_expressions` are used to initialize new vectors, during backend
// code generate, to lookup counters and expressions by simple u32 indexes.
let mut num_counters: u32 = 0;
let mut num_expressions: u32 = 0;
for terminator in
traversal::preorder(mir_body).map(|(_, data)| data).filter_map(call_terminators)
{
if let TerminatorKind::Call { func: Operand::Constant(func), args, .. } = &terminator.kind {
match func.literal.ty.kind {
FnDef(id, _) if id == count_code_region_fn => {
let counter_id_arg =
args.get(count_code_region_args::COUNTER_ID).expect("arg found");
let counter_id = mir::Operand::scalar_from_const(counter_id_arg)
.to_u32()
.expect("counter_id arg is u32");
num_counters = std::cmp::max(num_counters, counter_id + 1);
}
FnDef(id, _)
if id == coverage_counter_add_fn || id == coverage_counter_subtract_fn =>
{
let expression_id_arg = args
.get(coverage_counter_expression_args::EXPRESSION_ID)
.expect("arg found");
let id_descending_from_max = mir::Operand::scalar_from_const(expression_id_arg)
.to_u32()
.expect("expression_id arg is u32");
// Counter expressions are initially assigned IDs descending from `u32::MAX`, so
// the range of expression IDs is disjoint from the range of counter IDs. This
// way, both counters and expressions can be operands in other expressions.
let expression_index = u32::MAX - id_descending_from_max;
num_expressions = std::cmp::max(num_expressions, expression_index + 1);
}
_ => {}
}
}
}
CoverageInfo { num_counters, num_expressions }
}
let mut coverage_visitor =
CoverageVisitor { info: CoverageInfo { num_counters: 0, num_expressions: 0 } };
fn call_terminators(data: &'tcx BasicBlockData<'tcx>) -> Option<&'tcx Terminator<'tcx>> {
let terminator = data.terminator();
match terminator.kind {
TerminatorKind::Call { .. } => Some(terminator),
_ => None,
}
coverage_visitor.visit_body(mir_body);
coverage_visitor.info
}
impl<'tcx> MirPass<'tcx> for InstrumentCoverage {
......@@ -104,19 +72,6 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, mir_body: &mut mir::
}
}
/// Distinguishes the expression operators.
enum Op {
Add,
Subtract,
}
struct InjectedCall<'tcx> {
func: Operand<'tcx>,
args: Vec<Operand<'tcx>>,
span: Span,
inject_at: Span,
}
struct Instrumentor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
mir_def_id: DefId,
......@@ -143,21 +98,21 @@ fn new(tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, mir_body: &'a mut mir::Body<'tcx
}
/// Counter IDs start from zero and go up.
fn next_counter(&mut self) -> u32 {
fn next_counter(&mut self) -> CounterValueReference {
assert!(self.num_counters < u32::MAX - self.num_expressions);
let next = self.num_counters;
self.num_counters += 1;
next
CounterValueReference::from(next)
}
/// Expression IDs start from u32::MAX and go down because a CounterExpression can reference
/// (add or subtract counts) of both Counter regions and CounterExpression regions. The counter
/// expression operand IDs must be unique across both types.
fn next_expression(&mut self) -> u32 {
fn next_expression(&mut self) -> InjectedExpressionIndex {
assert!(self.num_counters < u32::MAX - self.num_expressions);
let next = u32::MAX - self.num_expressions;
self.num_expressions += 1;
next
InjectedExpressionIndex::from(next)
}
fn function_source_hash(&mut self) -> u64 {
......@@ -172,24 +127,14 @@ fn function_source_hash(&mut self) -> u64 {
}
fn inject_counters(&mut self) {
let mir_body = &self.mir_body;
let body_span = self.hir_body.value.span;
debug!("instrumenting {:?}, span: {:?}", self.mir_def_id, body_span);
// FIXME(richkadel): As a first step, counters are only injected at the top of each
// function. The complete solution will inject counters at each conditional code branch.
let _ignore = mir_body;
let id = self.next_counter();
let function_source_hash = self.function_source_hash();
let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE;
let is_cleanup = false;
let next_block = rustc_middle::mir::START_BLOCK;
self.inject_call(
self.make_counter(id, function_source_hash, body_span),
scope,
is_cleanup,
next_block,
);
let block = rustc_middle::mir::START_BLOCK;
let counter = self.make_counter();
self.inject_statement(counter, body_span, block);
// FIXME(richkadel): The next step to implement source based coverage analysis will be
// instrumenting branches within functions, and some regions will be counted by "counter
......@@ -198,211 +143,86 @@ fn inject_counters(&mut self) {
let fake_use = false;
if fake_use {
let add = false;
let lhs = 1;
let fake_counter = CoverageKind::Counter {
function_source_hash: self.function_source_hash(),
id: CounterValueReference::from_u32(1),
};
let fake_expression = CoverageKind::Expression {
id: InjectedExpressionIndex::from(u32::MAX - 1),
lhs: ExpressionOperandId::from_u32(1),
op: Op::Add,
rhs: ExpressionOperandId::from_u32(2),
};
let lhs = fake_counter.as_operand_id();
let op = if add { Op::Add } else { Op::Subtract };
let rhs = 2;
let rhs = fake_expression.as_operand_id();
let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE;
let is_cleanup = false;
let next_block = rustc_middle::mir::START_BLOCK;
let block = rustc_middle::mir::START_BLOCK;
let id = self.next_expression();
self.inject_call(
self.make_expression(id, body_span, lhs, op, rhs),
scope,
is_cleanup,
next_block,
);
let expression = self.make_expression(lhs, op, rhs);
self.inject_statement(expression, body_span, block);
}
}
fn make_counter(&self, id: u32, function_source_hash: u64, span: Span) -> InjectedCall<'tcx> {
let inject_at = span.shrink_to_lo();
let func = function_handle(
self.tcx,
self.tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None),
inject_at,
);
let mut args = Vec::new();
use count_code_region_args::*;
debug_assert_eq!(FUNCTION_SOURCE_HASH, args.len());
args.push(self.const_u64(function_source_hash, inject_at));
debug_assert_eq!(COUNTER_ID, args.len());
args.push(self.const_u32(id, inject_at));
InjectedCall { func, args, span, inject_at }
fn make_counter(&mut self) -> CoverageKind {
CoverageKind::Counter {
function_source_hash: self.function_source_hash(),
id: self.next_counter(),
}
}
fn make_expression(
&self,
id: u32,
span: Span,
lhs: u32,
&mut self,
lhs: ExpressionOperandId,
op: Op,
rhs: u32,
) -> InjectedCall<'tcx> {
let inject_at = span.shrink_to_lo();
let func = function_handle(
self.tcx,
self.tcx.require_lang_item(
match op {
Op::Add => lang_items::CoverageCounterAddFnLangItem,
Op::Subtract => lang_items::CoverageCounterSubtractFnLangItem,
},
None,
),
inject_at,
);
let mut args = Vec::new();
use coverage_counter_expression_args::*;
debug_assert_eq!(EXPRESSION_ID, args.len());
args.push(self.const_u32(id, inject_at));
debug_assert_eq!(LEFT_ID, args.len());
args.push(self.const_u32(lhs, inject_at));
debug_assert_eq!(RIGHT_ID, args.len());
args.push(self.const_u32(rhs, inject_at));
InjectedCall { func, args, span, inject_at }
rhs: ExpressionOperandId,
) -> CoverageKind {
CoverageKind::Expression { id: self.next_expression(), lhs, op, rhs }
}
fn inject_call(
&mut self,
call: InjectedCall<'tcx>,
scope: SourceScope,
is_cleanup: bool,
next_block: BasicBlock,
) {
let InjectedCall { func, mut args, span, inject_at } = call;
debug!(
" injecting {}call to {:?}({:?}) at: {:?}, scope: {:?}",
if is_cleanup { "cleanup " } else { "" },
func,
args,
inject_at,
scope,
);
let mut patch = MirPatch::new(self.mir_body);
let (file_name, start_line, start_col, end_line, end_col) = self.code_region(&span);
// FIXME(richkadel): Note that `const_str()` results in the creation of an `Allocation` to
// hold one copy of each unique filename. It looks like that `Allocation` may translate into
// the creation of an `@alloc` in LLVM IR that is never actually used by runtime code.
//
// Example LLVM IR:
//
// @alloc4 = private unnamed_addr constant <{ [43 x i8] }> \
// <{ [43 x i8] c"C:\\msys64\\home\\richkadel\\rust\\rust_basic.rs" }>, align 1
//
// Can I flag the alloc as something not to be added to codegen? Or somehow remove it before
// it gets added to the LLVM IR? Do we need some kind of reference counting to know it's
// not used by any runtime code?
//
// This question is moot if I convert the Call Terminators to Statements, I believe:
// https://rust-lang.zulipchat.com/#narrow/stream/233931-t-compiler.2Fmajor-changes/topic/Implement.20LLVM-compatible.20source-based.20cod.20compiler-team.23278/near/206731748
args.push(self.const_str(&file_name, inject_at));
args.push(self.const_u32(start_line, inject_at));
args.push(self.const_u32(start_col, inject_at));
args.push(self.const_u32(end_line, inject_at));
args.push(self.const_u32(end_col, inject_at));
let temp = patch.new_temp(self.tcx.mk_unit(), inject_at);
let new_block = patch.new_block(placeholder_block(inject_at, scope, is_cleanup));
patch.patch_terminator(
new_block,
TerminatorKind::Call {
func,
args,
// new_block will swapped with the next_block, after applying patch
destination: Some((Place::from(temp), new_block)),
cleanup: None,
from_hir_call: false,
fn_span: inject_at,
},
);
patch.add_statement(new_block.start_location(), StatementKind::StorageLive(temp));
patch.add_statement(next_block.start_location(), StatementKind::StorageDead(temp));
patch.apply(self.mir_body);
// To insert the `new_block` in front of the first block in the counted branch (the
// `next_block`), just swap the indexes, leaving the rest of the graph unchanged.
self.mir_body.basic_blocks_mut().swap(next_block, new_block);
}
fn inject_statement(&mut self, coverage_kind: CoverageKind, span: Span, block: BasicBlock) {
let code_region = make_code_region(self.tcx, &span);
debug!(" injecting statement {:?} covering {:?}", coverage_kind, code_region);
/// Convert the Span into its file name, start line and column, and end line and column
fn code_region(&self, span: &Span) -> (String, u32, u32, u32, u32) {
let source_map = self.tcx.sess.source_map();
let start = source_map.lookup_char_pos(span.lo());
let end = if span.hi() == span.lo() {
start.clone()
} else {
let end = source_map.lookup_char_pos(span.hi());
debug_assert_eq!(
start.file.name,
end.file.name,
"Region start ({:?} -> {:?}) and end ({:?} -> {:?}) don't come from the same source file!",
span.lo(),
start,
span.hi(),
end
);
end
let data = &mut self.mir_body[block];
let source_info = data.terminator().source_info;
let statement = Statement {
source_info,
kind: StatementKind::Coverage(box Coverage { kind: coverage_kind, code_region }),
};
match &start.file.name {
FileName::Real(RealFileName::Named(path)) => (
path.to_string_lossy().to_string(),
start.line as u32,
start.col.to_u32() + 1,
end.line as u32,
end.col.to_u32() + 1,
),
_ => {
bug!("start.file.name should be a RealFileName, but it was: {:?}", start.file.name)
}
}
}
fn const_str(&self, value: &str, span: Span) -> Operand<'tcx> {
Operand::const_from_str(self.tcx, value, span)
}
fn const_u32(&self, value: u32, span: Span) -> Operand<'tcx> {
Operand::const_from_scalar(self.tcx, self.tcx.types.u32, Scalar::from_u32(value), span)
}
fn const_u64(&self, value: u64, span: Span) -> Operand<'tcx> {
Operand::const_from_scalar(self.tcx, self.tcx.types.u64, Scalar::from_u64(value), span)
data.statements.push(statement);
}
}
fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Operand<'tcx> {
let ret_ty = tcx.fn_sig(fn_def_id).output();
let ret_ty = ret_ty.no_bound_vars().unwrap();
let substs = tcx.mk_substs(::std::iter::once(ty::subst::GenericArg::from(ret_ty)));
Operand::function_handle(tcx, fn_def_id, substs, span)
}
fn placeholder_block(span: Span, scope: SourceScope, is_cleanup: bool) -> BasicBlockData<'tcx> {
BasicBlockData {
statements: vec![],
terminator: Some(Terminator {
source_info: SourceInfo { span, scope },
// this gets overwritten by the counter Call
kind: TerminatorKind::Unreachable,
}),
is_cleanup,
/// Convert the Span into its file name, start line and column, and end line and column
fn make_code_region<'tcx>(tcx: TyCtxt<'tcx>, span: &Span) -> CodeRegion {
let source_map = tcx.sess.source_map();
let start = source_map.lookup_char_pos(span.lo());
let end = if span.hi() == span.lo() {
start.clone()
} else {
let end = source_map.lookup_char_pos(span.hi());
debug_assert_eq!(
start.file.name,
end.file.name,
"Region start ({:?} -> {:?}) and end ({:?} -> {:?}) don't come from the same source file!",
span.lo(),
start,
span.hi(),
end
);
end
};
match &start.file.name {
FileName::Real(RealFileName::Named(path)) => CodeRegion {
file_name: Symbol::intern(&path.to_string_lossy()),
start_line: start.line as u32,
start_col: start.col.to_u32() + 1,
end_line: end.line as u32,
end_col: end.col.to_u32() + 1,
},
_ => bug!("start.file.name should be a RealFileName, but it was: {:?}", start.file.name),
}
}
......
......@@ -273,6 +273,7 @@ fn check_statement(
| StatementKind::StorageDead(_)
| StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
| StatementKind::Coverage(..)
| StatementKind::Nop => Ok(()),
}
}
......
......@@ -38,6 +38,7 @@ fn is_nop_landing_pad(
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::AscribeUserType(..)
| StatementKind::Coverage(..)
| StatementKind::Nop => {
// These are all nops in a landing pad
}
......
......@@ -3,7 +3,6 @@
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::struct_span_err;
use rustc_hir as hir;
use rustc_hir::fake_lang_items::FAKE_ITEMS_REFS;
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_hir::lang_items;
use rustc_hir::weak_lang_items::WEAK_ITEMS_REFS;
......@@ -71,20 +70,11 @@ fn verify<'tcx>(tcx: TyCtxt<'tcx>, items: &lang_items::LanguageItems) {
}
impl<'a, 'tcx> Context<'a, 'tcx> {
fn register(&mut self, name: Symbol, span: Span, hir_id: hir::HirId) {
fn register(&mut self, name: Symbol, span: Span) {
if let Some(&item) = WEAK_ITEMS_REFS.get(&name) {
if self.items.require(item).is_err() {
self.items.missing.push(item);
}
} else if let Some(&item) = FAKE_ITEMS_REFS.get(&name) {
// Ensure "fake lang items" are registered. These are `extern` lang items that are
// injected into the MIR automatically (such as source code coverage counters), but are
// never actually linked; therefore, unlike "weak lang items", they cannot by registered
// when used, because they never appear to be used.
if self.items.items[item as usize].is_none() {
let item_def_id = self.tcx.hir().local_def_id(hir_id).to_def_id();
self.items.items[item as usize] = Some(item_def_id);
}
} else {
struct_span_err!(self.tcx.sess, span, E0264, "unknown external lang item: `{}`", name)
.emit();
......@@ -102,7 +92,7 @@ fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
fn visit_foreign_item(&mut self, i: &hir::ForeignItem<'_>) {
let check_name = |attr, sym| self.tcx.sess.check_name(attr, sym);
if let Some((lang_item, _)) = hir::lang_items::extract(check_name, &i.attrs) {
self.register(lang_item, i.span, i.hir_id);
self.register(lang_item, i.span);
}
intravisit::walk_foreign_item(self, i)
}
......
......@@ -379,10 +379,6 @@
core_intrinsics,
cosf32,
cosf64,
count_code_region,
coverage_counter_add,
coverage_counter_subtract,
coverage_unreachable,
crate_id,
crate_in_paths,
crate_local,
......
......@@ -379,47 +379,6 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
sym::nontemporal_store => (1, vec![tcx.mk_mut_ptr(param(0)), param(0)], tcx.mk_unit()),
sym::count_code_region => (
0,
vec![
tcx.types.u64,
tcx.types.u32,
tcx.mk_static_str(),
tcx.types.u32,
tcx.types.u32,
tcx.types.u32,
tcx.types.u32,
],
tcx.mk_unit(),
),
sym::coverage_counter_add | sym::coverage_counter_subtract => (
0,
vec![
tcx.types.u32,
tcx.types.u32,
tcx.types.u32,
tcx.mk_static_str(),
tcx.types.u32,
tcx.types.u32,
tcx.types.u32,
tcx.types.u32,
],
tcx.mk_unit(),
),
sym::coverage_unreachable => (
0,
vec![
tcx.mk_static_str(),
tcx.types.u32,
tcx.types.u32,
tcx.types.u32,
tcx.types.u32,
],
tcx.mk_unit(),
),
other => {
struct_span_err!(
tcx.sess,
......
......@@ -3,32 +3,10 @@
fn bar() -> bool {
let mut _0: bool; // return place in scope 0 at /the/src/instrument_coverage.rs:19:13: 19:17
+ let mut _1: (); // in scope 0 at /the/src/instrument_coverage.rs:19:18: 19:18
bb0: {
+ StorageLive(_1); // scope 0 at /the/src/instrument_coverage.rs:19:18: 19:18
+ _1 = const std::intrinsics::count_code_region(const 10208505205182607101_u64, const 0_u32, const "/the/src/instrument_coverage.rs", const 19_u32, const 18_u32, const 21_u32, const 2_u32) -> bb2; // scope 0 at /the/src/instrument_coverage.rs:19:18: 19:18
+ // ty::Const
+ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}
+ // + val: Value(Scalar(<ZST>))
+ // mir::Constant
+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
+ // ty::Const
+ // + ty: &str
+ // + val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 })
+ // mir::Constant
+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
+ // + literal: Const { ty: &str, val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 }) }
+ }
+
+ bb1 (cleanup): {
+ resume; // scope 0 at /the/src/instrument_coverage.rs:19:1: 21:2
+ }
+
+ bb2: {
+ StorageDead(_1); // scope 0 at /the/src/instrument_coverage.rs:20:5: 20:9
_0 = const true; // scope 0 at /the/src/instrument_coverage.rs:20:5: 20:9
+ Coverage { kind: Counter { function_source_hash: 10208505205182607101, id: CounterValueReference(0) }, code_region: /the/src/instrument_coverage.rs:19:18 - 21:2 }; // scope 0 at /the/src/instrument_coverage.rs:21:2: 21:2
return; // scope 0 at /the/src/instrument_coverage.rs:21:2: 21:2
}
}
......
......@@ -6,24 +6,10 @@
let mut _1: (); // in scope 0 at /the/src/instrument_coverage.rs:10:1: 16:2
let mut _2: bool; // in scope 0 at /the/src/instrument_coverage.rs:12:12: 12:17
let mut _3: !; // in scope 0 at /the/src/instrument_coverage.rs:12:18: 14:10
+ let mut _4: (); // in scope 0 at /the/src/instrument_coverage.rs:10:11: 10:11
bb0: {
- falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
+ StorageLive(_4); // scope 0 at /the/src/instrument_coverage.rs:10:11: 10:11
+ _4 = const std::intrinsics::count_code_region(const 16004455475339839479_u64, const 0_u32, const "/the/src/instrument_coverage.rs", const 10_u32, const 11_u32, const 16_u32, const 2_u32) -> bb7; // scope 0 at /the/src/instrument_coverage.rs:10:11: 10:11
+ // ty::Const
+ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}
+ // + val: Value(Scalar(<ZST>))
+ // mir::Constant
+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
+ // ty::Const
+ // + ty: &str
+ // + val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 })
+ // mir::Constant
+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
+ // + literal: Const { ty: &str, val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 }) }
+ Coverage { kind: Counter { function_source_hash: 16004455475339839479, id: CounterValueReference(0) }, code_region: /the/src/instrument_coverage.rs:10:11 - 16:2 }; // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
}
bb1: {
......@@ -72,11 +58,6 @@
// + literal: Const { ty: (), val: Value(Scalar(<ZST>)) }
StorageDead(_2); // scope 0 at /the/src/instrument_coverage.rs:15:5: 15:6
return; // scope 0 at /the/src/instrument_coverage.rs:16:2: 16:2
+ }
+
+ bb7: {
+ StorageDead(_4); // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
+ falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
}
}
// Test that the initial version of Rust coverage injects count_code_region() placeholder calls,
// at the top of each function. The placeholders are later converted into LLVM instrprof.increment
// Test that the initial version of Rust coverage injects Coverage statements at the top of each
// function. The Coverage Counter statements are later converted into LLVM instrprof.increment
// intrinsics, during codegen.
// needs-profiler-support
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册