Skip to content

Commit 9117b57

Browse files
committed
use metadata for more info about events
deserialize metadata and store it in ProfilingData. use the start_time as base for all event times. to be able to use merge multiple crox logs: use the porcess_id for the crox events. use duration from epoch as timestamp in the crox events.
1 parent 13891f8 commit 9117b57

File tree

4 files changed

+41
-30
lines changed

4 files changed

+41
-30
lines changed

analyzeme/Cargo.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,5 @@ license = "MIT OR Apache-2.0"
88
[dependencies]
99
measureme = { path = "../measureme" }
1010
rustc-hash = "1.0.1"
11+
serde = { version = "1.0", features = [ "derive" ] }
12+
serde_json = "1.0"

analyzeme/src/profiling_data.rs

Lines changed: 32 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,18 +5,38 @@ use measureme::file_header::{
55
};
66
use measureme::ByteVecSink;
77
use measureme::{ProfilerFiles, RawEvent, SerializationSink, StringTable, StringTableBuilder};
8+
use serde::{Deserialize, Deserializer};
89
use std::error::Error;
910
use std::fs;
1011
use std::mem;
1112
use std::path::Path;
1213
use std::sync::Arc;
13-
use std::time::SystemTime;
14+
use std::time::{Duration, SystemTime, UNIX_EPOCH};
1415

1516
const RAW_EVENT_SIZE: usize = mem::size_of::<RawEvent>();
1617

18+
fn system_time_from_nanos<'de, D>(deserializer: D) -> Result<SystemTime, D::Error>
19+
where
20+
D: Deserializer<'de>,
21+
{
22+
let duration_from_epoch = Duration::from_nanos(u64::deserialize(deserializer)?);
23+
Ok(UNIX_EPOCH
24+
.checked_add(duration_from_epoch)
25+
.expect("a time that can be represented as SystemTime"))
26+
}
27+
28+
#[derive(Deserialize)]
29+
pub struct MetaData {
30+
#[serde(deserialize_with = "system_time_from_nanos")]
31+
pub start_time: SystemTime,
32+
pub process_id: u32,
33+
pub cmd: String,
34+
}
35+
1736
pub struct ProfilingData {
1837
event_data: Vec<u8>,
1938
string_table: StringTable,
39+
pub meta_data: MetaData,
2040
}
2141

2242
impl ProfilingData {
@@ -39,9 +59,13 @@ impl ProfilingData {
3959

4060
let string_table = StringTable::new(string_data, index_data)?;
4161

62+
let meta_data = string_table.get_metadata().to_string();
63+
let meta_data: MetaData = serde_json::from_str(&meta_data)?;
64+
4265
Ok(ProfilingData {
4366
string_table,
4467
event_data,
68+
meta_data,
4569
})
4670
}
4771

@@ -88,8 +112,7 @@ impl<'a> ProfilerEventIterator<'a> {
88112

89113
let string_table = &self.data.string_table;
90114

91-
// FIXME: Lots of Rust code compiled in the seventies apparently.
92-
let timestamp = Timestamp::from_raw_event(&raw_event, SystemTime::UNIX_EPOCH);
115+
let timestamp = Timestamp::from_raw_event(&raw_event, self.data.meta_data.start_time);
93116

94117
Event {
95118
event_kind: string_table.get(raw_event.event_kind).to_string(),
@@ -240,10 +263,16 @@ impl ProfilingDataBuilder {
240263
CURRENT_FILE_FORMAT_VERSION
241264
);
242265
let string_table = StringTable::new(data_bytes, index_bytes).unwrap();
266+
let meta_data = MetaData {
267+
start_time: UNIX_EPOCH,
268+
process_id: 0,
269+
cmd: "test cmd".to_string(),
270+
};
243271

244272
ProfilingData {
245273
event_data,
246274
string_table,
275+
meta_data,
247276
}
248277
}
249278

crox/src/main.rs

Lines changed: 3 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,7 @@ use rustc_hash::FxHashMap;
22
use std::fs;
33
use std::io::BufWriter;
44
use std::path::PathBuf;
5-
use std::time::Duration;
6-
use std::time::SystemTime;
5+
use std::time::{Duration, SystemTime, UNIX_EPOCH};
76

87
use analyzeme::{ProfilingData, Timestamp};
98

@@ -118,10 +117,6 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
118117

119118
let chrome_file = BufWriter::new(fs::File::create("chrome_profiler.json")?);
120119

121-
//find the earlier timestamp (it should be the first event)
122-
//subtract one tick so that the start of the event shows in Chrome
123-
let first_event_timestamp = make_start_timestamp(&data);
124-
125120
let mut serializer = serde_json::Serializer::new(chrome_file);
126121
let thread_to_collapsed_thread = generate_thread_to_collapsed_thread_mapping(&opt, &data);
127122
let mut event_iterator = data.iter();
@@ -142,9 +137,9 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
142137
name: event.label.clone().into_owned(),
143138
category: event.event_kind.clone().into_owned(),
144139
event_type: EventType::Complete,
145-
timestamp: start.duration_since(first_event_timestamp).unwrap(),
140+
timestamp: start.duration_since(UNIX_EPOCH).unwrap(),
146141
duration,
147-
process_id: 0,
142+
process_id: data.meta_data.process_id,
148143
thread_id: *thread_to_collapsed_thread
149144
.get(&event.thread_id)
150145
.unwrap_or(&event.thread_id),
@@ -171,22 +166,3 @@ fn timestamp_to_min_max(timestamp: Timestamp) -> (SystemTime, SystemTime) {
171166
}
172167
}
173168
}
174-
175-
// FIXME: Move this to `ProfilingData` and base it on the `start_time` field
176-
// from metadata.
177-
fn make_start_timestamp(data: &ProfilingData) -> SystemTime {
178-
// We cannot assume the first event in the stream actually is the first
179-
// event because interval events are emitted at their end. So in theory it
180-
// is possible that the event with the earliest starting time is the last
181-
// event in the stream (i.e. if there is an interval event that spans the
182-
// entire execution of the profile).
183-
//
184-
// Let's be on the safe side and iterate the whole stream.
185-
let min = data
186-
.iter()
187-
.map(|e| timestamp_to_min_max(e.timestamp).0)
188-
.min()
189-
.unwrap();
190-
191-
min - Duration::from_micros(1)
192-
}

measureme/src/stringtable.rs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -295,6 +295,10 @@ impl StringTable {
295295
pub fn get<'a>(&'a self, id: StringId) -> StringRef<'a> {
296296
StringRef { id, table: self }
297297
}
298+
pub fn get_metadata<'a>(&'a self) -> StringRef<'a> {
299+
let id = StringId(METADATA_STRING_ID);
300+
self.get(id)
301+
}
298302
}
299303

300304
#[cfg(test)]

0 commit comments

Comments
 (0)