Skip to content

Commit

Permalink
Add metrics about how much data is read and decompressed for sections (
Browse files Browse the repository at this point in the history
  • Loading branch information
marxin authored Sep 12, 2024
1 parent 99af4ac commit cd8de8d
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 1 deletion.
12 changes: 12 additions & 0 deletions wild_lib/src/elf.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate::error::Result;
use crate::resolution::LoadedMetrics;
use anyhow::anyhow;
use anyhow::bail;
use anyhow::Context;
Expand All @@ -15,6 +16,7 @@ use object::read::elf::SectionHeader as _;
use object::LittleEndian;
use std::borrow::Cow;
use std::io::Read as _;
use std::sync::atomic::Ordering;

/// Our starting address in memory when linking non-relocatable executables. We can start memory
/// addresses wherever we like, even from 0. We pick 400k because it's the same as what ld does and
Expand Down Expand Up @@ -133,13 +135,23 @@ impl<'data> File<'data> {
&self,
section: &SectionHeader,
member: &bumpalo_herd::Member<'data>,
loaded_metrics: &LoadedMetrics,
) -> Result<&'data [u8]> {
let data = section.data(LittleEndian, self.data)?;
loaded_metrics
.loaded_bytes
.fetch_add(data.len(), Ordering::Relaxed);

if let Some((compression, _, _)) = section.compression(LittleEndian, self.data)? {
loaded_metrics
.loaded_compressed_bytes
.fetch_add(data.len(), Ordering::Relaxed);
let len = self.section_size(section)?;
let decompressed = member.alloc_slice_fill_default(len as usize);
decompress_into(compression, &data[COMPRESSION_HEADER_SIZE..], decompressed)?;
loaded_metrics
.decompressed_bytes
.fetch_add(decompressed.len(), Ordering::Relaxed);
Ok(decompressed)
} else {
Ok(data)
Expand Down
30 changes: 29 additions & 1 deletion wild_lib/src/resolution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ use std::collections::HashMap;
use std::fmt::Display;
use std::hash::Hash;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::thread::Thread;

Expand Down Expand Up @@ -188,6 +189,7 @@ pub(crate) fn resolve_symbols_in_files<'data>(
symbol_db,
outputs: &outputs,
work_queue,
loaded_metrics: Default::default(),
};

let done = AtomicBool::new(false);
Expand Down Expand Up @@ -239,6 +241,20 @@ pub(crate) fn resolve_symbols_in_files<'data>(
}
});

let loaded_bytes = resources
.loaded_metrics
.loaded_bytes
.load(Ordering::Relaxed);
let loaded_compressed_bytes = resources
.loaded_metrics
.loaded_compressed_bytes
.load(Ordering::Relaxed);
let decompressed_bytes = resources
.loaded_metrics
.decompressed_bytes
.load(Ordering::Relaxed);
tracing::debug!(target: "metrics", loaded_bytes, loaded_compressed_bytes, decompressed_bytes, "input_sections");

drop(resources);
drop(definitions_per_group_and_file);
symbol_db.restore_definitions(symbol_definitions);
Expand All @@ -259,13 +275,21 @@ struct WorkItem<'definitions> {
definitions: &'definitions mut [SymbolId],
}

#[derive(Default)]
pub(crate) struct LoadedMetrics {
pub(crate) loaded_bytes: AtomicUsize,
pub(crate) loaded_compressed_bytes: AtomicUsize,
pub(crate) decompressed_bytes: AtomicUsize,
}

struct ResolutionResources<'data, 'definitions, 'outer_scope> {
groups: &'data [Group<'data>],
definitions_per_file: &'outer_scope Vec<Vec<DefinitionsCell<'definitions>>>,
idle_threads: Option<ArrayQueue<Thread>>,
symbol_db: &'outer_scope SymbolDb<'data>,
outputs: &'outer_scope Outputs<'data>,
work_queue: SegQueue<WorkItem<'definitions>>,
loaded_metrics: LoadedMetrics,
}

impl<'data, 'definitions, 'outer_scope> ResolutionResources<'data, 'definitions, 'outer_scope> {
Expand Down Expand Up @@ -795,6 +819,7 @@ impl<'data> ResolvedObject<'data> {
&mut merge_strings_sections,
resources.symbol_db.args,
allocator,
&resources.loaded_metrics,
)?;

resolve_symbols(obj, resources, undefined_symbols_out, definitions_out)
Expand Down Expand Up @@ -823,6 +848,7 @@ fn resolve_sections<'data>(
merge_strings_out: &mut Vec<UnresolvedMergeStringsFileSection<'data>>,
args: &Args,
allocator: &bumpalo_herd::Member<'data>,
loaded_metrics: &LoadedMetrics,
) -> Result<Vec<SectionSlot<'data>>> {
let sections = obj
.object
Expand All @@ -848,7 +874,9 @@ fn resolve_sections<'data>(
_ => (),
}
let slot = if unloaded.is_string_merge {
let section_data = obj.object.section_data(input_section, allocator)?;
let section_data =
obj.object
.section_data(input_section, allocator, loaded_metrics)?;
merge_strings_out.push(UnresolvedMergeStringsFileSection::new(
section_data,
input_section_index,
Expand Down

0 comments on commit cd8de8d

Please sign in to comment.