提交 db0492ac 编写于 作者: B bors

Auto merge of #74733 - richkadel:llvm-coverage-map-gen-5, r=tmandry

Fixed coverage map issues; better aligned with LLVM APIs

Found some problems with the coverage map encoding when testing with more than one counter per function.

While debugging, I realized some better ways to structure the Rust implementation of the coverage mapping generator. I refactored somewhat, resulting in less code overall, expanded coverage of LLVM Coverage Map capabilities, and much closer alignment with LLVM data structures, APIs, and naming.

This should be easier to follow and easier to maintain.

r? @tmandry

Rust compiler MCP rust-lang/compiler-team#278
Relevant issue: #34701 - Implement support for LLVMs code coverage instrumentation
use crate::llvm;
use crate::common::CodegenCx;
use crate::coverageinfo;
use crate::llvm;
use llvm::coverageinfo::CounterMappingRegion;
use log::debug;
use rustc_codegen_ssa::coverageinfo::map::*;
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods, MiscMethods};
use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression, Region};
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods};
use rustc_data_structures::fx::FxHashMap;
use rustc_llvm::RustString;
use rustc_middle::ty::Instance;
use rustc_middle::{bug, mir};
use std::collections::BTreeMap;
use std::ffi::CString;
use std::path::PathBuf;
// FIXME(richkadel): Complete all variations of generating and exporting the coverage map to LLVM.
// The current implementation is an initial foundation with basic capabilities (Counters, but not
// CounterExpressions, etc.).
/// Generates and exports the Coverage Map.
///
......@@ -32,174 +24,123 @@
/// undocumented details in Clang's implementation (that may or may not be important) were also
/// replicated for Rust's Coverage Map.
pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) {
let mut coverage_writer = CoverageMappingWriter::new(cx);
let function_coverage_map = cx.coverage_context().take_function_coverage_map();
if function_coverage_map.is_empty() {
// This module has no functions with coverage instrumentation
return;
}
let mut mapgen = CoverageMapGenerator::new();
// Encode coverage mappings and generate function records
let mut function_records = Vec::<&'ll llvm::Value>::new();
let coverage_mappings_buffer = llvm::build_byte_buffer(|coverage_mappings_buffer| {
for (instance, function_coverage) in function_coverage_map.into_iter() {
if let Some(function_record) = coverage_writer.write_function_mappings_and_record(
instance,
function_coverage,
coverage_mappings_buffer,
) {
function_records.push(function_record);
}
debug!("Generate coverage map for: {:?}", instance);
let mangled_function_name = cx.tcx.symbol_name(instance).to_string();
let function_source_hash = function_coverage.source_hash();
let (expressions, counter_regions) =
function_coverage.get_expressions_and_counter_regions();
let old_len = coverage_mappings_buffer.len();
mapgen.write_coverage_mappings(expressions, counter_regions, coverage_mappings_buffer);
let mapping_data_size = coverage_mappings_buffer.len() - old_len;
debug_assert!(
mapping_data_size > 0,
"Every `FunctionCoverage` should have at least one counter"
);
let function_record = mapgen.make_function_record(
cx,
mangled_function_name,
function_source_hash,
mapping_data_size,
);
function_records.push(function_record);
}
});
// Encode all filenames covered in this module, ordered by `file_id`
// Encode all filenames referenced by counters/expressions in this module
let filenames_buffer = llvm::build_byte_buffer(|filenames_buffer| {
coverageinfo::write_filenames_section_to_buffer(
&coverage_writer.filenames,
filenames_buffer,
);
coverageinfo::write_filenames_section_to_buffer(&mapgen.filenames, filenames_buffer);
});
if coverage_mappings_buffer.len() > 0 {
// Generate the LLVM IR representation of the coverage map and store it in a well-known
// global constant.
coverage_writer.write_coverage_map(
function_records,
filenames_buffer,
coverage_mappings_buffer,
);
}
// Generate the LLVM IR representation of the coverage map and store it in a well-known global
mapgen.save_generated_coverage_map(
cx,
function_records,
filenames_buffer,
coverage_mappings_buffer,
);
}
struct CoverageMappingWriter<'a, 'll, 'tcx> {
cx: &'a CodegenCx<'ll, 'tcx>,
struct CoverageMapGenerator {
filenames: Vec<CString>,
filename_to_index: FxHashMap<CString, u32>,
}
impl<'a, 'll, 'tcx> CoverageMappingWriter<'a, 'll, 'tcx> {
fn new(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
Self { cx, filenames: Vec::new(), filename_to_index: FxHashMap::<CString, u32>::default() }
impl CoverageMapGenerator {
fn new() -> Self {
Self { filenames: Vec::new(), filename_to_index: FxHashMap::default() }
}
/// For the given function, get the coverage region data, stream it to the given buffer, and
/// then generate and return a new function record.
fn write_function_mappings_and_record(
/// Using the `expressions` and `counter_regions` collected for the current function, generate
/// the `mapping_regions` and `virtual_file_mapping`, and capture any new filenames. Then use
/// LLVM APIs to encode the `virtual_file_mapping`, `expressions`, and `mapping_regions` into
/// the given `coverage_mappings` byte buffer, compliant with the LLVM Coverage Mapping format.
fn write_coverage_mappings(
&mut self,
instance: Instance<'tcx>,
mut function_coverage: FunctionCoverage,
expressions: Vec<CounterExpression>,
counter_regions: impl Iterator<Item = (Counter, &'a Region)>,
coverage_mappings_buffer: &RustString,
) -> Option<&'ll llvm::Value> {
let cx = self.cx;
let coverageinfo: &mir::CoverageInfo = cx.tcx.coverageinfo(instance.def_id());
debug!(
"Generate coverage map for: {:?}, num_counters: {}, num_expressions: {}",
instance, coverageinfo.num_counters, coverageinfo.num_expressions
);
debug_assert!(coverageinfo.num_counters > 0);
let regions_in_file_order = function_coverage.regions_in_file_order(cx.sess().source_map());
if regions_in_file_order.len() == 0 {
return None;
) {
let mut counter_regions = counter_regions.collect::<Vec<_>>();
if counter_regions.is_empty() {
return;
}
// Stream the coverage mapping regions for the function (`instance`) to the buffer, and
// compute the data byte size used.
let old_len = coverage_mappings_buffer.len();
self.regions_to_mappings(regions_in_file_order, coverage_mappings_buffer);
let mapping_data_size = coverage_mappings_buffer.len() - old_len;
debug_assert!(mapping_data_size > 0);
let mangled_function_name = cx.tcx.symbol_name(instance).to_string();
let name_ref = coverageinfo::compute_hash(&mangled_function_name);
let function_source_hash = function_coverage.source_hash();
// Generate and return the function record
let name_ref_val = cx.const_u64(name_ref);
let mapping_data_size_val = cx.const_u32(mapping_data_size as u32);
let func_hash_val = cx.const_u64(function_source_hash);
Some(cx.const_struct(
&[name_ref_val, mapping_data_size_val, func_hash_val],
/*packed=*/ true,
))
}
/// For each coverage region, extract its coverage data from the earlier coverage analysis.
/// Use LLVM APIs to convert the data into buffered bytes compliant with the LLVM Coverage
/// Mapping format.
fn regions_to_mappings(
&mut self,
regions_in_file_order: BTreeMap<PathBuf, BTreeMap<CoverageLoc, (usize, CoverageKind)>>,
coverage_mappings_buffer: &RustString,
) {
let mut virtual_file_mapping = Vec::new();
let mut mapping_regions = coverageinfo::SmallVectorCounterMappingRegion::new();
let mut expressions = coverageinfo::SmallVectorCounterExpression::new();
for (file_id, (file_path, file_coverage_regions)) in
regions_in_file_order.into_iter().enumerate()
{
let file_id = file_id as u32;
let filename = CString::new(file_path.to_string_lossy().to_string())
.expect("null error converting filename to C string");
debug!(" file_id: {} = '{:?}'", file_id, filename);
let filenames_index = match self.filename_to_index.get(&filename) {
Some(index) => *index,
None => {
let index = self.filenames.len() as u32;
self.filenames.push(filename.clone());
self.filename_to_index.insert(filename, index);
index
let mut mapping_regions = Vec::new();
let mut current_file_path = None;
let mut current_file_id = 0;
// Convert the list of (Counter, Region) pairs to an array of `CounterMappingRegion`, sorted
// by filename and position. Capture any new files to compute the `CounterMappingRegion`s
// `file_id` (indexing files referenced by the current function), and construct the
// function-specific `virtual_file_mapping` from `file_id` to its index in the module's
// `filenames` array.
counter_regions.sort_unstable_by_key(|(_counter, region)| *region);
for (counter, region) in counter_regions {
let (file_path, start_line, start_col, end_line, end_col) = region.file_start_and_end();
let same_file = current_file_path.as_ref().map_or(false, |p| p == file_path);
if !same_file {
if current_file_path.is_some() {
current_file_id += 1;
}
};
virtual_file_mapping.push(filenames_index);
let mut mapping_indexes = vec![0 as u32; file_coverage_regions.len()];
for (mapping_index, (region_id, _)) in file_coverage_regions.values().enumerate() {
mapping_indexes[*region_id] = mapping_index as u32;
}
for (region_loc, (region_id, region_kind)) in file_coverage_regions.into_iter() {
let mapping_index = mapping_indexes[region_id];
match region_kind {
CoverageKind::Counter => {
debug!(
" Counter {}, file_id: {}, region_loc: {}",
mapping_index, file_id, region_loc
);
mapping_regions.push_from(
mapping_index,
file_id,
region_loc.start_line,
region_loc.start_col,
region_loc.end_line,
region_loc.end_col,
);
}
CoverageKind::CounterExpression(lhs, op, rhs) => {
debug!(
" CounterExpression {} = {} {:?} {}, file_id: {}, region_loc: {:?}",
mapping_index, lhs, op, rhs, file_id, region_loc,
);
mapping_regions.push_from(
mapping_index,
file_id,
region_loc.start_line,
region_loc.start_col,
region_loc.end_line,
region_loc.end_col,
);
expressions.push_from(op, lhs, rhs);
}
CoverageKind::Unreachable => {
debug!(
" Unreachable region, file_id: {}, region_loc: {:?}",
file_id, region_loc,
);
bug!("Unreachable region not expected and not yet handled!")
// FIXME(richkadel): implement and call
// mapping_regions.push_from(...) for unreachable regions
current_file_path = Some(file_path.clone());
let filename = CString::new(file_path.to_string_lossy().to_string())
.expect("null error converting filename to C string");
debug!(" file_id: {} = '{:?}'", current_file_id, filename);
let filenames_index = match self.filename_to_index.get(&filename) {
Some(index) => *index,
None => {
let index = self.filenames.len() as u32;
self.filenames.push(filename.clone());
self.filename_to_index.insert(filename.clone(), index);
index
}
}
};
virtual_file_mapping.push(filenames_index);
}
mapping_regions.push(CounterMappingRegion::code_region(
counter,
current_file_id,
start_line,
start_col,
end_line,
end_col,
));
}
// Encode and append the current function's coverage mapping data
......@@ -211,14 +152,35 @@ fn regions_to_mappings(
);
}
fn write_coverage_map(
/// Generate and return the function record `Value`
fn make_function_record(
&mut self,
cx: &CodegenCx<'ll, 'tcx>,
mangled_function_name: String,
function_source_hash: u64,
mapping_data_size: usize,
) -> &'ll llvm::Value {
let name_ref = coverageinfo::compute_hash(&mangled_function_name);
let name_ref_val = cx.const_u64(name_ref);
let mapping_data_size_val = cx.const_u32(mapping_data_size as u32);
let func_hash_val = cx.const_u64(function_source_hash);
cx.const_struct(
&[name_ref_val, mapping_data_size_val, func_hash_val],
/*packed=*/ true,
)
}
/// Combine the filenames and coverage mappings buffers, construct coverage map header and the
/// array of function records, and combine everything into the complete coverage map. Save the
/// coverage map data into the LLVM IR as a static global using a specific, well-known section
/// and name.
fn save_generated_coverage_map(
self,
cx: &CodegenCx<'ll, 'tcx>,
function_records: Vec<&'ll llvm::Value>,
filenames_buffer: Vec<u8>,
mut coverage_mappings_buffer: Vec<u8>,
) {
let cx = self.cx;
// Concatenate the encoded filenames and encoded coverage mappings, and add additional zero
// bytes as-needed to ensure 8-byte alignment.
let mut coverage_size = coverage_mappings_buffer.len();
......
......@@ -4,8 +4,9 @@
use crate::common::CodegenCx;
use libc::c_uint;
use llvm::coverageinfo::CounterMappingRegion;
use log::debug;
use rustc_codegen_ssa::coverageinfo::map::*;
use rustc_codegen_ssa::coverageinfo::map::{CounterExpression, ExprKind, FunctionCoverage};
use rustc_codegen_ssa::traits::{
BaseTypeMethods, CoverageInfoBuilderMethods, CoverageInfoMethods, StaticMethods,
};
......@@ -23,7 +24,7 @@
/// A context object for maintaining all state needed by the coverageinfo module.
pub struct CrateCoverageContext<'tcx> {
// Coverage region data for each instrumented function identified by DefId.
pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage>>,
pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage<'tcx>>>,
}
impl<'tcx> CrateCoverageContext<'tcx> {
......@@ -31,7 +32,7 @@ pub fn new() -> Self {
Self { function_coverage_map: Default::default() }
}
pub fn take_function_coverage_map(&self) -> FxHashMap<Instance<'tcx>, FunctionCoverage> {
pub fn take_function_coverage_map(&self) -> FxHashMap<Instance<'tcx>, FunctionCoverage<'tcx>> {
self.function_coverage_map.replace(FxHashMap::default())
}
}
......@@ -47,44 +48,49 @@ fn add_counter_region(
&mut self,
instance: Instance<'tcx>,
function_source_hash: u64,
index: u32,
id: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
debug!(
"adding counter to coverage_regions: instance={:?}, function_source_hash={}, index={}, byte range {}..{}",
instance, function_source_hash, index, start_byte_pos, end_byte_pos,
"adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={}, \
byte range {}..{}",
instance, function_source_hash, id, start_byte_pos, end_byte_pos,
);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| {
FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
})
.add_counter(function_source_hash, index, start_byte_pos, end_byte_pos);
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_counter(function_source_hash, id, start_byte_pos, end_byte_pos);
}
fn add_counter_expression_region(
&mut self,
instance: Instance<'tcx>,
index: u32,
id_descending_from_max: u32,
lhs: u32,
op: CounterOp,
op: ExprKind,
rhs: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
debug!(
"adding counter expression to coverage_regions: instance={:?}, index={}, {} {:?} {}, byte range {}..{}",
instance, index, lhs, op, rhs, start_byte_pos, end_byte_pos,
"adding counter expression to coverage_regions: instance={:?}, id={}, {} {:?} {}, \
byte range {}..{}",
instance, id_descending_from_max, lhs, op, rhs, start_byte_pos, end_byte_pos,
);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| {
FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
})
.add_counter_expression(index, lhs, op, rhs, start_byte_pos, end_byte_pos);
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_counter_expression(
id_descending_from_max,
lhs,
op,
rhs,
start_byte_pos,
end_byte_pos,
);
}
fn add_unreachable_region(
......@@ -100,108 +106,8 @@ fn add_unreachable_region(
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| {
FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
})
.add_unreachable(start_byte_pos, end_byte_pos);
}
}
/// This struct wraps an opaque reference to the C++ template instantiation of
/// `llvm::SmallVector<coverage::CounterExpression>`. Each `coverage::CounterExpression` object is
/// constructed from primative-typed arguments, and pushed to the `SmallVector`, in the C++
/// implementation of `LLVMRustCoverageSmallVectorCounterExpressionAdd()` (see
/// `src/rustllvm/CoverageMappingWrapper.cpp`).
pub struct SmallVectorCounterExpression<'a> {
pub raw: &'a mut llvm::coverageinfo::SmallVectorCounterExpression<'a>,
}
impl SmallVectorCounterExpression<'a> {
pub fn new() -> Self {
SmallVectorCounterExpression {
raw: unsafe { llvm::LLVMRustCoverageSmallVectorCounterExpressionCreate() },
}
}
pub fn as_ptr(&self) -> *const llvm::coverageinfo::SmallVectorCounterExpression<'a> {
self.raw
}
pub fn push_from(
&mut self,
kind: rustc_codegen_ssa::coverageinfo::CounterOp,
left_index: u32,
right_index: u32,
) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterExpressionAdd(
&mut *(self.raw as *mut _),
kind,
left_index,
right_index,
)
}
}
}
impl Drop for SmallVectorCounterExpression<'a> {
fn drop(&mut self) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterExpressionDispose(&mut *(self.raw as *mut _));
}
}
}
/// This struct wraps an opaque reference to the C++ template instantiation of
/// `llvm::SmallVector<coverage::CounterMappingRegion>`. Each `coverage::CounterMappingRegion`
/// object is constructed from primative-typed arguments, and pushed to the `SmallVector`, in the
/// C++ implementation of `LLVMRustCoverageSmallVectorCounterMappingRegionAdd()` (see
/// `src/rustllvm/CoverageMappingWrapper.cpp`).
pub struct SmallVectorCounterMappingRegion<'a> {
pub raw: &'a mut llvm::coverageinfo::SmallVectorCounterMappingRegion<'a>,
}
impl SmallVectorCounterMappingRegion<'a> {
pub fn new() -> Self {
SmallVectorCounterMappingRegion {
raw: unsafe { llvm::LLVMRustCoverageSmallVectorCounterMappingRegionCreate() },
}
}
pub fn as_ptr(&self) -> *const llvm::coverageinfo::SmallVectorCounterMappingRegion<'a> {
self.raw
}
pub fn push_from(
&mut self,
index: u32,
file_id: u32,
line_start: u32,
column_start: u32,
line_end: u32,
column_end: u32,
) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
&mut *(self.raw as *mut _),
index,
file_id,
line_start,
column_start,
line_end,
column_end,
)
}
}
}
impl Drop for SmallVectorCounterMappingRegion<'a> {
fn drop(&mut self) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
&mut *(self.raw as *mut _),
);
}
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_unreachable_region(start_byte_pos, end_byte_pos);
}
}
......@@ -218,8 +124,8 @@ pub(crate) fn write_filenames_section_to_buffer(filenames: &Vec<CString>, buffer
pub(crate) fn write_mapping_to_buffer(
virtual_file_mapping: Vec<u32>,
expressions: SmallVectorCounterExpression<'_>,
mapping_regions: SmallVectorCounterMappingRegion<'_>,
expressions: Vec<CounterExpression>,
mut mapping_regions: Vec<CounterMappingRegion>,
buffer: &RustString,
) {
unsafe {
......@@ -227,7 +133,9 @@ pub(crate) fn write_mapping_to_buffer(
virtual_file_mapping.as_ptr(),
virtual_file_mapping.len() as c_uint,
expressions.as_ptr(),
mapping_regions.as_ptr(),
expressions.len() as c_uint,
mapping_regions.as_mut_ptr(),
mapping_regions.len() as c_uint,
buffer,
);
}
......
......@@ -13,7 +13,7 @@
use rustc_codegen_ssa::base::{compare_simd_types, to_immediate, wants_msvc_seh};
use rustc_codegen_ssa::common::span_invalid_monomorphization_error;
use rustc_codegen_ssa::common::{IntPredicate, TypeKind};
use rustc_codegen_ssa::coverageinfo::CounterOp;
use rustc_codegen_ssa::coverageinfo::ExprKind;
use rustc_codegen_ssa::glue;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef;
......@@ -101,7 +101,7 @@ fn is_codegen_intrinsic(
self.add_counter_region(
caller_instance,
op_to_u64(&args[FUNCTION_SOURCE_HASH]),
op_to_u32(&args[COUNTER_INDEX]),
op_to_u32(&args[COUNTER_ID]),
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
......@@ -111,14 +111,14 @@ fn is_codegen_intrinsic(
use coverage::coverage_counter_expression_args::*;
self.add_counter_expression_region(
caller_instance,
op_to_u32(&args[COUNTER_EXPRESSION_INDEX]),
op_to_u32(&args[LEFT_INDEX]),
op_to_u32(&args[EXPRESSION_ID]),
op_to_u32(&args[LEFT_ID]),
if intrinsic == sym::coverage_counter_add {
CounterOp::Add
ExprKind::Add
} else {
CounterOp::Subtract
ExprKind::Subtract
},
op_to_u32(&args[RIGHT_INDEX]),
op_to_u32(&args[RIGHT_ID]),
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
......@@ -219,7 +219,7 @@ fn codegen_intrinsic_call(
let num_counters = self.const_u32(coverageinfo.num_counters);
use coverage::count_code_region_args::*;
let hash = args[FUNCTION_SOURCE_HASH].immediate();
let index = args[COUNTER_INDEX].immediate();
let index = args[COUNTER_ID].immediate();
debug!(
"translating Rust intrinsic `count_code_region()` to LLVM intrinsic: \
instrprof.increment(fn_name={}, hash={:?}, num_counters={:?}, index={:?})",
......
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
use super::coverageinfo::{SmallVectorCounterExpression, SmallVectorCounterMappingRegion};
use rustc_codegen_ssa::coverageinfo::map as coverage_map;
use super::debuginfo::{
DIArray, DIBasicType, DIBuilder, DICompositeType, DIDerivedType, DIDescriptor, DIEnumerator,
......@@ -653,13 +653,152 @@ struct InvariantOpaque<'a> {
pub type InlineAsmDiagHandler = unsafe extern "C" fn(&SMDiagnostic, *const c_void, c_uint);
pub mod coverageinfo {
use super::InvariantOpaque;
use super::coverage_map;
/// Aligns with [llvm::coverage::CounterMappingRegion::RegionKind](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L205-L221)
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub struct SmallVectorCounterExpression<'a>(InvariantOpaque<'a>);
pub enum RegionKind {
/// A CodeRegion associates some code with a counter
CodeRegion = 0,
/// An ExpansionRegion represents a file expansion region that associates
/// a source range with the expansion of a virtual source file, such as
/// for a macro instantiation or #include file.
ExpansionRegion = 1,
/// A SkippedRegion represents a source range with code that was skipped
/// by a preprocessor or similar means.
SkippedRegion = 2,
/// A GapRegion is like a CodeRegion, but its count is only set as the
/// line execution count when its the only region in the line.
GapRegion = 3,
}
/// This struct provides LLVM's representation of a "CoverageMappingRegion", encoded into the
/// coverage map, in accordance with the
/// [LLVM Code Coverage Mapping Format](https://github.com/rust-lang/llvm-project/blob/llvmorg-8.0.0/llvm/docs/CoverageMappingFormat.rst#llvm-code-coverage-mapping-format).
/// The struct composes fields representing the `Counter` type and value(s) (injected counter
/// ID, or expression type and operands), the source file (an indirect index into a "filenames
/// array", encoded separately), and source location (start and end positions of the represented
/// code region).
///
/// Aligns with [llvm::coverage::CounterMappingRegion](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L223-L226)
/// Important: The Rust struct layout (order and types of fields) must match its C++
/// counterpart.
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub struct SmallVectorCounterMappingRegion<'a>(InvariantOpaque<'a>);
pub struct CounterMappingRegion {
/// The counter type and type-dependent counter data, if any.
counter: coverage_map::Counter,
/// An indirect reference to the source filename. In the LLVM Coverage Mapping Format, the
/// file_id is an index into a function-specific `virtual_file_mapping` array of indexes
/// that, in turn, are used to look up the filename for this region.
file_id: u32,
/// If the `RegionKind` is an `ExpansionRegion`, the `expanded_file_id` can be used to find
/// the mapping regions created as a result of macro expansion, by checking if their file id
/// matches the expanded file id.
expanded_file_id: u32,
/// 1-based starting line of the mapping region.
start_line: u32,
/// 1-based starting column of the mapping region.
start_col: u32,
/// 1-based ending line of the mapping region.
end_line: u32,
/// 1-based ending column of the mapping region. If the high bit is set, the current
/// mapping region is a gap area.
end_col: u32,
kind: RegionKind,
}
impl CounterMappingRegion {
pub fn code_region(
counter: coverage_map::Counter,
file_id: u32,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
) -> Self {
Self {
counter,
file_id,
expanded_file_id: 0,
start_line,
start_col,
end_line,
end_col,
kind: RegionKind::CodeRegion,
}
}
pub fn expansion_region(
file_id: u32,
expanded_file_id: u32,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
) -> Self {
Self {
counter: coverage_map::Counter::zero(),
file_id,
expanded_file_id,
start_line,
start_col,
end_line,
end_col,
kind: RegionKind::ExpansionRegion,
}
}
pub fn skipped_region(
file_id: u32,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
) -> Self {
Self {
counter: coverage_map::Counter::zero(),
file_id,
expanded_file_id: 0,
start_line,
start_col,
end_line,
end_col,
kind: RegionKind::SkippedRegion,
}
}
pub fn gap_region(
counter: coverage_map::Counter,
file_id: u32,
start_line: u32,
start_col: u32,
end_line: u32,
end_col: u32,
) -> Self {
Self {
counter,
file_id,
expanded_file_id: 0,
start_line,
start_col,
end_line,
end_col: ((1 as u32) << 31) | end_col,
kind: RegionKind::GapRegion,
}
}
}
}
pub mod debuginfo {
......@@ -1645,33 +1784,6 @@ pub fn LLVMRustInlineAsmVerify(
ConstraintsLen: size_t,
) -> bool;
pub fn LLVMRustCoverageSmallVectorCounterExpressionCreate()
-> &'a mut SmallVectorCounterExpression<'a>;
pub fn LLVMRustCoverageSmallVectorCounterExpressionDispose(
Container: &'a mut SmallVectorCounterExpression<'a>,
);
pub fn LLVMRustCoverageSmallVectorCounterExpressionAdd(
Container: &mut SmallVectorCounterExpression<'a>,
Kind: rustc_codegen_ssa::coverageinfo::CounterOp,
LeftIndex: c_uint,
RightIndex: c_uint,
);
pub fn LLVMRustCoverageSmallVectorCounterMappingRegionCreate()
-> &'a mut SmallVectorCounterMappingRegion<'a>;
pub fn LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
Container: &'a mut SmallVectorCounterMappingRegion<'a>,
);
pub fn LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
Container: &mut SmallVectorCounterMappingRegion<'a>,
Index: c_uint,
FileID: c_uint,
LineStart: c_uint,
ColumnStart: c_uint,
LineEnd: c_uint,
ColumnEnd: c_uint,
);
#[allow(improper_ctypes)]
pub fn LLVMRustCoverageWriteFilenamesSectionToBuffer(
Filenames: *const *const c_char,
......@@ -1683,8 +1795,10 @@ pub fn LLVMRustCoverageWriteFilenamesSectionToBuffer(
pub fn LLVMRustCoverageWriteMappingToBuffer(
VirtualFileMappingIDs: *const c_uint,
NumVirtualFileMappingIDs: c_uint,
Expressions: *const SmallVectorCounterExpression<'_>,
MappingRegions: *const SmallVectorCounterMappingRegion<'_>,
Expressions: *const coverage_map::CounterExpression,
NumExpressions: c_uint,
MappingRegions: *mut coverageinfo::CounterMappingRegion,
NumMappingRegions: c_uint,
BufferOut: &RustString,
);
......
use super::map::{CounterValueReference, MappedExpressionIndex};
/// Aligns with [llvm::coverage::Counter::CounterKind](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L91)
#[derive(Copy, Clone, Debug)]
#[repr(C)]
enum CounterKind {
Zero = 0,
CounterValueReference = 1,
Expression = 2,
}
/// A reference to an instance of an abstract "counter" that will yield a value in a coverage
/// report. Note that `id` has different interpretations, depending on the `kind`:
/// * For `CounterKind::Zero`, `id` is assumed to be `0`
/// * For `CounterKind::CounterValueReference`, `id` matches the `counter_id` of the injected
/// instrumentation counter (the `index` argument to the LLVM intrinsic
/// `instrprof.increment()`)
/// * For `CounterKind::Expression`, `id` is the index into the coverage map's array of
/// counter expressions.
/// Aligns with [llvm::coverage::Counter](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L98-L99)
/// Important: The Rust struct layout (order and types of fields) must match its C++ counterpart.
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub struct Counter {
// Important: The layout (order and types of fields) must match its C++ counterpart.
kind: CounterKind,
id: u32,
}
impl Counter {
pub fn zero() -> Self {
Self { kind: CounterKind::Zero, id: 0 }
}
pub fn counter_value_reference(counter_id: CounterValueReference) -> Self {
Self { kind: CounterKind::CounterValueReference, id: counter_id.into() }
}
pub fn expression(mapped_expression_index: MappedExpressionIndex) -> Self {
Self { kind: CounterKind::Expression, id: mapped_expression_index.into() }
}
}
/// Aligns with [llvm::coverage::CounterExpression::ExprKind](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L146)
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub enum ExprKind {
Subtract = 0,
Add = 1,
}
/// Aligns with [llvm::coverage::CounterExpression](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L147-L148)
/// Important: The Rust struct layout (order and types of fields) must match its C++
/// counterpart.
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub struct CounterExpression {
kind: ExprKind,
lhs: Counter,
rhs: Counter,
}
impl CounterExpression {
pub fn new(lhs: Counter, kind: ExprKind, rhs: Counter) -> Self {
Self { kind, lhs, rhs }
}
}
pub mod ffi;
pub mod map;
pub use map::CounterOp;
pub use map::ExprKind;
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(bool_to_option)]
#![feature(option_expect_none)]
#![feature(box_patterns)]
#![feature(try_blocks)]
#![feature(in_band_lifetimes)]
......@@ -7,6 +8,8 @@
#![feature(or_patterns)]
#![feature(trusted_len)]
#![feature(associated_type_bounds)]
#![feature(const_fn)] // for rustc_index::newtype_index
#![feature(const_panic)] // for rustc_index::newtype_index
#![recursion_limit = "256"]
//! This crate contains codegen code that is used by all codegen backends (LLVM and others).
......
use super::BackendTypes;
use crate::coverageinfo::CounterOp;
use crate::coverageinfo::ExprKind;
use rustc_middle::ty::Instance;
pub trait CoverageInfoMethods: BackendTypes {
......@@ -21,7 +21,7 @@ fn add_counter_expression_region(
instance: Instance<'tcx>,
index: u32,
lhs: u32,
op: CounterOp,
op: ExprKind,
rhs: u32,
start_byte_pos: u32,
end_byte_pos: u32,
......
......@@ -3,7 +3,7 @@
/// Positional arguments to `libcore::count_code_region()`
pub mod count_code_region_args {
pub const FUNCTION_SOURCE_HASH: usize = 0;
pub const COUNTER_INDEX: usize = 1;
pub const COUNTER_ID: usize = 1;
pub const START_BYTE_POS: usize = 2;
pub const END_BYTE_POS: usize = 3;
}
......@@ -11,9 +11,9 @@ pub mod count_code_region_args {
/// Positional arguments to `libcore::coverage_counter_add()` and
/// `libcore::coverage_counter_subtract()`
pub mod coverage_counter_expression_args {
pub const COUNTER_EXPRESSION_INDEX: usize = 0;
pub const LEFT_INDEX: usize = 1;
pub const RIGHT_INDEX: usize = 2;
pub const EXPRESSION_ID: usize = 0;
pub const LEFT_ID: usize = 1;
pub const RIGHT_ID: usize = 2;
pub const START_BYTE_POS: usize = 3;
pub const END_BYTE_POS: usize = 4;
}
......
......@@ -7,10 +7,9 @@
use rustc_middle::ich::StableHashingContext;
use rustc_middle::mir::coverage::*;
use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::CoverageInfo;
use rustc_middle::mir::{
self, traversal, BasicBlock, BasicBlockData, Operand, Place, SourceInfo, StatementKind,
Terminator, TerminatorKind, START_BLOCK,
self, traversal, BasicBlock, BasicBlockData, CoverageInfo, Operand, Place, SourceInfo,
SourceScope, StatementKind, Terminator, TerminatorKind,
};
use rustc_middle::ty;
use rustc_middle::ty::query::Providers;
......@@ -41,14 +40,14 @@ fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> Coverage
tcx.require_lang_item(lang_items::CoverageCounterSubtractFnLangItem, None);
// The `num_counters` argument to `llvm.instrprof.increment` is the number of injected
// counters, with each counter having an index from `0..num_counters-1`. MIR optimization
// counters, with each counter having a counter ID from `0..num_counters-1`. MIR optimization
// may split and duplicate some BasicBlock sequences. Simply counting the calls may not
// not work; but computing the num_counters by adding `1` to the highest index (for a given
// work; but computing the num_counters by adding `1` to the highest counter_id (for a given
// instrumented function) is valid.
//
// `num_expressions` is the number of counter expressions added to the MIR body. Both
// `num_counters` and `num_expressions` are used to initialize new vectors, during backend
// code generate, to lookup counters and expressions by their simple u32 indexes.
// code generate, to lookup counters and expressions by simple u32 indexes.
let mut num_counters: u32 = 0;
let mut num_expressions: u32 = 0;
for terminator in
......@@ -57,27 +56,26 @@ fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> Coverage
if let TerminatorKind::Call { func: Operand::Constant(func), args, .. } = &terminator.kind {
match func.literal.ty.kind {
FnDef(id, _) if id == count_code_region_fn => {
let index_arg =
args.get(count_code_region_args::COUNTER_INDEX).expect("arg found");
let counter_index = mir::Operand::scalar_from_const(index_arg)
let counter_id_arg =
args.get(count_code_region_args::COUNTER_ID).expect("arg found");
let counter_id = mir::Operand::scalar_from_const(counter_id_arg)
.to_u32()
.expect("index arg is u32");
num_counters = std::cmp::max(num_counters, counter_index + 1);
.expect("counter_id arg is u32");
num_counters = std::cmp::max(num_counters, counter_id + 1);
}
FnDef(id, _)
if id == coverage_counter_add_fn || id == coverage_counter_subtract_fn =>
{
let index_arg = args
.get(coverage_counter_expression_args::COUNTER_EXPRESSION_INDEX)
let expression_id_arg = args
.get(coverage_counter_expression_args::EXPRESSION_ID)
.expect("arg found");
let translated_index = mir::Operand::scalar_from_const(index_arg)
let id_descending_from_max = mir::Operand::scalar_from_const(expression_id_arg)
.to_u32()
.expect("index arg is u32");
// Counter expressions start with "translated indexes", descending from
// `u32::MAX`, so the range of expression indexes is disjoint from the range of
// counter indexes. This way, both counters and expressions can be operands in
// other expressions.
let expression_index = u32::MAX - translated_index;
.expect("expression_id arg is u32");
// Counter expressions are initially assigned IDs descending from `u32::MAX`, so
// the range of expression IDs is disjoint from the range of counter IDs. This
// way, both counters and expressions can be operands in other expressions.
let expression_index = u32::MAX - id_descending_from_max;
num_expressions = std::cmp::max(num_expressions, expression_index + 1);
}
_ => {}
......@@ -97,12 +95,10 @@ fn call_terminators(data: &'tcx BasicBlockData<'tcx>) -> Option<&'tcx Terminator
impl<'tcx> MirPass<'tcx> for InstrumentCoverage {
fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, mir_body: &mut mir::Body<'tcx>) {
if tcx.sess.opts.debugging_opts.instrument_coverage {
// If the InstrumentCoverage pass is called on promoted MIRs, skip them.
// See: https://github.com/rust-lang/rust/pull/73011#discussion_r438317601
if src.promoted.is_none() {
Instrumentor::new(tcx, src, mir_body).inject_counters();
}
// If the InstrumentCoverage pass is called on promoted MIRs, skip them.
// See: https://github.com/rust-lang/rust/pull/73011#discussion_r438317601
if src.promoted.is_none() {
Instrumentor::new(tcx, src, mir_body).inject_counters();
}
}
}
......@@ -113,6 +109,12 @@ enum Op {
Subtract,
}
struct InjectedCall<'tcx> {
func: Operand<'tcx>,
args: Vec<Operand<'tcx>>,
inject_at: Span,
}
struct Instrumentor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
mir_def_id: DefId,
......@@ -147,11 +149,8 @@ fn next_counter(&mut self) -> u32 {
}
/// Expression IDs start from u32::MAX and go down because a CounterExpression can reference
/// (add or subtract counts) of both Counter regions and CounterExpression regions. The indexes
/// of each type of region must be contiguous, but also must be unique across both sets.
/// The expression IDs are eventually translated into region indexes (starting after the last
/// counter index, for the given function), during backend code generation, by the helper method
/// `rustc_codegen_ssa::coverageinfo::map::FunctionCoverage::translate_expressions()`.
/// (add or subtract counts) of both Counter regions and CounterExpression regions. The counter
/// expression operand IDs must be unique across both types.
fn next_expression(&mut self) -> u32 {
assert!(self.num_counters < u32::MAX - self.num_expressions);
let next = u32::MAX - self.num_expressions;
......@@ -171,17 +170,25 @@ fn function_source_hash(&mut self) -> u64 {
}
fn inject_counters(&mut self) {
let mir_body = &self.mir_body;
let body_span = self.hir_body.value.span;
debug!(
"instrumenting {:?}, span: {}",
self.mir_def_id,
self.tcx.sess.source_map().span_to_string(body_span)
);
debug!("instrumenting {:?}, span: {:?}", self.mir_def_id, body_span);
// FIXME(richkadel): As a first step, counters are only injected at the top of each
// function. The complete solution will inject counters at each conditional code branch.
let next_block = START_BLOCK;
self.inject_counter(body_span, next_block);
let _ignore = mir_body;
let id = self.next_counter();
let function_source_hash = self.function_source_hash();
let code_region = body_span;
let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE;
let is_cleanup = false;
let next_block = rustc_middle::mir::START_BLOCK;
self.inject_call(
self.make_counter(id, function_source_hash, code_region),
scope,
is_cleanup,
next_block,
);
// FIXME(richkadel): The next step to implement source based coverage analysis will be
// instrumenting branches within functions, and some regions will be counted by "counter
......@@ -190,57 +197,68 @@ fn inject_counters(&mut self) {
let fake_use = false;
if fake_use {
let add = false;
if add {
self.inject_counter_expression(body_span, next_block, 1, Op::Add, 2);
} else {
self.inject_counter_expression(body_span, next_block, 1, Op::Subtract, 2);
}
let lhs = 1;
let op = if add { Op::Add } else { Op::Subtract };
let rhs = 2;
let code_region = body_span;
let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE;
let is_cleanup = false;
let next_block = rustc_middle::mir::START_BLOCK;
let id = self.next_expression();
self.inject_call(
self.make_expression(id, code_region, lhs, op, rhs),
scope,
is_cleanup,
next_block,
);
}
}
fn inject_counter(&mut self, code_region: Span, next_block: BasicBlock) -> u32 {
let counter_id = self.next_counter();
let function_source_hash = self.function_source_hash();
let injection_point = code_region.shrink_to_lo();
fn make_counter(
&self,
id: u32,
function_source_hash: u64,
code_region: Span,
) -> InjectedCall<'tcx> {
let inject_at = code_region.shrink_to_lo();
let count_code_region_fn = function_handle(
let func = function_handle(
self.tcx,
self.tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None),
injection_point,
inject_at,
);
let mut args = Vec::new();
use count_code_region_args::*;
debug_assert_eq!(FUNCTION_SOURCE_HASH, args.len());
args.push(self.const_u64(function_source_hash, injection_point));
args.push(self.const_u64(function_source_hash, inject_at));
debug_assert_eq!(COUNTER_INDEX, args.len());
args.push(self.const_u32(counter_id, injection_point));
debug_assert_eq!(COUNTER_ID, args.len());
args.push(self.const_u32(id, inject_at));
debug_assert_eq!(START_BYTE_POS, args.len());
args.push(self.const_u32(code_region.lo().to_u32(), injection_point));
args.push(self.const_u32(code_region.lo().to_u32(), inject_at));
debug_assert_eq!(END_BYTE_POS, args.len());
args.push(self.const_u32(code_region.hi().to_u32(), injection_point));
self.inject_call(count_code_region_fn, args, injection_point, next_block);
args.push(self.const_u32(code_region.hi().to_u32(), inject_at));
counter_id
InjectedCall { func, args, inject_at }
}
fn inject_counter_expression(
&mut self,
fn make_expression(
&self,
id: u32,
code_region: Span,
next_block: BasicBlock,
lhs: u32,
op: Op,
rhs: u32,
) -> u32 {
let expression_id = self.next_expression();
let injection_point = code_region.shrink_to_lo();
) -> InjectedCall<'tcx> {
let inject_at = code_region.shrink_to_lo();
let count_code_region_fn = function_handle(
let func = function_handle(
self.tcx,
self.tcx.require_lang_item(
match op {
......@@ -249,43 +267,51 @@ fn inject_counter_expression(
},
None,
),
injection_point,
inject_at,
);
let mut args = Vec::new();
use coverage_counter_expression_args::*;
debug_assert_eq!(COUNTER_EXPRESSION_INDEX, args.len());
args.push(self.const_u32(expression_id, injection_point));
debug_assert_eq!(EXPRESSION_ID, args.len());
args.push(self.const_u32(id, inject_at));
debug_assert_eq!(LEFT_INDEX, args.len());
args.push(self.const_u32(lhs, injection_point));
debug_assert_eq!(LEFT_ID, args.len());
args.push(self.const_u32(lhs, inject_at));
debug_assert_eq!(RIGHT_INDEX, args.len());
args.push(self.const_u32(rhs, injection_point));
debug_assert_eq!(RIGHT_ID, args.len());
args.push(self.const_u32(rhs, inject_at));
debug_assert_eq!(START_BYTE_POS, args.len());
args.push(self.const_u32(code_region.lo().to_u32(), injection_point));
args.push(self.const_u32(code_region.lo().to_u32(), inject_at));
debug_assert_eq!(END_BYTE_POS, args.len());
args.push(self.const_u32(code_region.hi().to_u32(), injection_point));
args.push(self.const_u32(code_region.hi().to_u32(), inject_at));
self.inject_call(count_code_region_fn, args, injection_point, next_block);
expression_id
InjectedCall { func, args, inject_at }
}
fn inject_call(
&mut self,
func: Operand<'tcx>,
args: Vec<Operand<'tcx>>,
fn_span: Span,
call: InjectedCall<'tcx>,
scope: SourceScope,
is_cleanup: bool,
next_block: BasicBlock,
) {
let InjectedCall { func, args, inject_at } = call;
debug!(
" injecting {}call to {:?}({:?}) at: {:?}, scope: {:?}",
if is_cleanup { "cleanup " } else { "" },
func,
args,
inject_at,
scope,
);
let mut patch = MirPatch::new(self.mir_body);
let temp = patch.new_temp(self.tcx.mk_unit(), fn_span);
let new_block = patch.new_block(placeholder_block(fn_span));
let temp = patch.new_temp(self.tcx.mk_unit(), inject_at);
let new_block = patch.new_block(placeholder_block(inject_at, scope, is_cleanup));
patch.patch_terminator(
new_block,
TerminatorKind::Call {
......@@ -295,7 +321,7 @@ fn inject_call(
destination: Some((Place::from(temp), new_block)),
cleanup: None,
from_hir_call: false,
fn_span,
fn_span: inject_at,
},
);
......@@ -325,15 +351,15 @@ fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Ope
Operand::function_handle(tcx, fn_def_id, substs, span)
}
fn placeholder_block(span: Span) -> BasicBlockData<'tcx> {
fn placeholder_block(span: Span, scope: SourceScope, is_cleanup: bool) -> BasicBlockData<'tcx> {
BasicBlockData {
statements: vec![],
terminator: Some(Terminator {
source_info: SourceInfo::outermost(span),
source_info: SourceInfo { span, scope },
// this gets overwritten by the counter Call
kind: TerminatorKind::Unreachable,
}),
is_cleanup: false,
is_cleanup,
}
}
......
......@@ -332,21 +332,25 @@ fn mir_validated(
body.required_consts = required_consts;
let promote_pass = promote_consts::PromoteTemps::default();
let promote: &[&dyn MirPass<'tcx>] = &[
// What we need to run borrowck etc.
&promote_pass,
&simplify::SimplifyCfg::new("qualify-consts"),
];
let opt_coverage: &[&dyn MirPass<'tcx>] = if tcx.sess.opts.debugging_opts.instrument_coverage {
&[&instrument_coverage::InstrumentCoverage]
} else {
&[]
};
run_passes(
tcx,
&mut body,
InstanceDef::Item(def.to_global()),
None,
MirPhase::Validated,
&[&[
// What we need to run borrowck etc.
&promote_pass,
&simplify::SimplifyCfg::new("qualify-consts"),
// If the `instrument-coverage` option is enabled, analyze the CFG, identify each
// conditional branch, construct a coverage map to be passed to LLVM, and inject counters
// where needed.
&instrument_coverage::InstrumentCoverage,
]],
&[promote, opt_coverage],
);
let promoted = promote_pass.promoted_fragments.into_inner();
......
......@@ -883,7 +883,7 @@ fn parse_target_feature(slot: &mut String, v: Option<&str>) -> bool {
"instrument the generated code to support LLVM source-based code coverage \
reports (note, the compiler build config must include `profiler = true`, \
and is mutually exclusive with `-C profile-generate`/`-C profile-use`); \
implies `-C link-dead-code` (unless explicitly disabled)` and
implies `-C link-dead-code` (unless explicitly disabled)` and \
`-Z symbol-mangling-version=v0`; and disables/overrides some optimization \
options (default: no)"),
instrument_mcount: bool = (false, parse_bool, [TRACKED],
......
......@@ -8,60 +8,6 @@
using namespace llvm;
extern "C" SmallVectorTemplateBase<coverage::CounterExpression>
*LLVMRustCoverageSmallVectorCounterExpressionCreate() {
return new SmallVector<coverage::CounterExpression, 32>();
}
extern "C" void LLVMRustCoverageSmallVectorCounterExpressionDispose(
SmallVectorTemplateBase<coverage::CounterExpression> *Vector) {
delete Vector;
}
extern "C" void LLVMRustCoverageSmallVectorCounterExpressionAdd(
SmallVectorTemplateBase<coverage::CounterExpression> *Expressions,
coverage::CounterExpression::ExprKind Kind,
unsigned LeftIndex,
unsigned RightIndex) {
auto LHS = coverage::Counter::getCounter(LeftIndex);
auto RHS = coverage::Counter::getCounter(RightIndex);
Expressions->push_back(coverage::CounterExpression { Kind, LHS, RHS });
}
extern "C" SmallVectorTemplateBase<coverage::CounterMappingRegion>
*LLVMRustCoverageSmallVectorCounterMappingRegionCreate() {
return new SmallVector<coverage::CounterMappingRegion, 32>();
}
extern "C" void LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
SmallVectorTemplateBase<coverage::CounterMappingRegion> *Vector) {
delete Vector;
}
extern "C" void LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
SmallVectorTemplateBase<coverage::CounterMappingRegion> *MappingRegions,
unsigned Index,
unsigned FileID,
unsigned LineStart,
unsigned ColumnStart,
unsigned LineEnd,
unsigned ColumnEnd) {
auto Counter = coverage::Counter::getCounter(Index);
MappingRegions->push_back(coverage::CounterMappingRegion::makeRegion(
Counter, FileID, LineStart,
ColumnStart, LineEnd, ColumnEnd));
// FIXME(richkadel): As applicable, implement additional CounterMappingRegion types using the
// static method alternatives to `coverage::CounterMappingRegion::makeRegion`:
//
// makeExpansion(unsigned FileID, unsigned ExpandedFileID, unsigned LineStart,
// unsigned ColumnStart, unsigned LineEnd, unsigned ColumnEnd) {
// makeSkipped(unsigned FileID, unsigned LineStart, unsigned ColumnStart,
// unsigned LineEnd, unsigned ColumnEnd) {
// makeGapRegion(Counter Count, unsigned FileID, unsigned LineStart,
// unsigned ColumnStart, unsigned LineEnd, unsigned ColumnEnd) {
}
extern "C" void LLVMRustCoverageWriteFilenamesSectionToBuffer(
const char* const Filenames[],
size_t FilenamesLen,
......@@ -79,13 +25,15 @@ extern "C" void LLVMRustCoverageWriteFilenamesSectionToBuffer(
extern "C" void LLVMRustCoverageWriteMappingToBuffer(
const unsigned *VirtualFileMappingIDs,
unsigned NumVirtualFileMappingIDs,
const SmallVectorImpl<coverage::CounterExpression> *Expressions,
SmallVectorImpl<coverage::CounterMappingRegion> *MappingRegions,
const coverage::CounterExpression *Expressions,
unsigned NumExpressions,
coverage::CounterMappingRegion *MappingRegions,
unsigned NumMappingRegions,
RustStringRef BufferOut) {
auto CoverageMappingWriter = coverage::CoverageMappingWriter(
makeArrayRef(VirtualFileMappingIDs, NumVirtualFileMappingIDs),
makeArrayRef(*Expressions),
MutableArrayRef<coverage::CounterMappingRegion> { *MappingRegions });
makeArrayRef(VirtualFileMappingIDs, NumVirtualFileMappingIDs),
makeArrayRef(Expressions, NumExpressions),
makeMutableArrayRef(MappingRegions, NumMappingRegions));
RawRustStringOstream OS(BufferOut);
CoverageMappingWriter.write(OS);
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册