1use std::borrow::ToOwned;
8use std::collections::{BTreeMap, HashMap};
9use std::fs::File;
10use std::io::{self, Write};
11use std::path::Path;
12use std::thread;
13
14use ipc_channel::ipc::{self, IpcReceiver};
15use profile_traits::time::{
16 ProfilerCategory, ProfilerChan, ProfilerData, ProfilerMsg, TimerMetadata,
17 TimerMetadataFrameType, TimerMetadataReflowType,
18};
19use servo_config::opts::OutputOptions;
20use time::Duration;
21
22use crate::trace_dump::TraceDump;
23
24pub trait Formattable {
25 fn format(&self, output: &Option<OutputOptions>) -> String;
26}
27
28impl Formattable for Option<TimerMetadata> {
29 fn format(&self, output: &Option<OutputOptions>) -> String {
30 match *self {
31 Some(ref meta) => {
33 let url = &*meta.url;
34 match *output {
35 Some(OutputOptions::FileName(_)) => {
36 let incremental = match meta.incremental {
38 TimerMetadataReflowType::Incremental => "yes",
39 TimerMetadataReflowType::FirstReflow => "no",
40 };
41 let iframe = match meta.iframe {
42 TimerMetadataFrameType::RootWindow => "yes",
43 TimerMetadataFrameType::IFrame => "no",
44 };
45 format!(" {}\t{}\t{}", incremental, iframe, url)
46 },
47 _ => {
48 let url = if url.len() > 30 { &url[..30] } else { url };
50 let incremental = match meta.incremental {
51 TimerMetadataReflowType::Incremental => " yes",
52 TimerMetadataReflowType::FirstReflow => " no ",
53 };
54 let iframe = match meta.iframe {
55 TimerMetadataFrameType::RootWindow => " yes",
56 TimerMetadataFrameType::IFrame => " no ",
57 };
58 format!(" {:14} {:9} {:30}", incremental, iframe, url)
59 },
60 }
61 },
62 None => match *output {
63 Some(OutputOptions::FileName(_)) => {
64 format!(" {}\t{}\t{}", " N/A", " N/A", " N/A")
65 },
66 _ => format!(" {:14} {:9} {:30}", " N/A", " N/A", " N/A"),
67 },
68 }
69 }
70}
71
72type ProfilerBuckets = BTreeMap<(ProfilerCategory, Option<TimerMetadata>), Vec<Duration>>;
73
74pub struct Profiler {
76 pub port: IpcReceiver<ProfilerMsg>,
77 buckets: ProfilerBuckets,
78 output: Option<OutputOptions>,
79 pub last_msg: Option<ProfilerMsg>,
80 trace: Option<TraceDump>,
81 blocked_layout_queries: HashMap<String, u32>,
82}
83
84impl Profiler {
85 pub fn create(output: &Option<OutputOptions>, file_path: Option<String>) -> ProfilerChan {
86 match *output {
87 Some(ref option) => {
88 let (chan, port) = ipc::channel().unwrap();
89 let outputoption = option.clone();
91 thread::Builder::new()
92 .name("TimeProfiler".to_owned())
93 .spawn(move || {
94 let trace = file_path.as_ref().and_then(|p| TraceDump::new(p).ok());
95 let mut profiler = Profiler::new(port, trace, Some(outputoption));
96 profiler.start();
97 })
98 .expect("Thread spawning failed");
99 match *option {
101 OutputOptions::FileName(_) => { },
102 OutputOptions::Stdout(period) => {
103 let chan = chan.clone();
105 thread::Builder::new()
106 .name("TimeProfTimer".to_owned())
107 .spawn(move || {
108 loop {
109 thread::sleep(std::time::Duration::from_secs_f64(period));
110 if chan.send(ProfilerMsg::Print).is_err() {
111 break;
112 }
113 }
114 })
115 .expect("Thread spawning failed");
116 },
117 }
118
119 ProfilerChan(Some(chan))
120 },
121 None => {
122 match file_path {
123 Some(path) => {
124 let (chan, port) = ipc::channel().unwrap();
125 thread::Builder::new()
127 .name("TimeProfiler".to_owned())
128 .spawn(move || {
129 let trace = TraceDump::new(path).ok();
130 let mut profiler = Profiler::new(port, trace, None);
131 profiler.start();
132 })
133 .expect("Thread spawning failed");
134
135 ProfilerChan(Some(chan))
136 },
137 None => ProfilerChan(None),
138 }
139 },
140 }
141 }
142
143 pub fn new(
144 port: IpcReceiver<ProfilerMsg>,
145 trace: Option<TraceDump>,
146 output: Option<OutputOptions>,
147 ) -> Profiler {
148 Profiler {
149 port,
150 buckets: BTreeMap::new(),
151 output,
152 last_msg: None,
153 trace,
154 blocked_layout_queries: HashMap::new(),
155 }
156 }
157
158 pub fn start(&mut self) {
159 while let Ok(msg) = self.port.recv() {
160 if !self.handle_msg(msg) {
161 break;
162 }
163 }
164 }
165
166 fn find_or_insert(&mut self, k: (ProfilerCategory, Option<TimerMetadata>), duration: Duration) {
167 self.buckets.entry(k).or_default().push(duration);
168 }
169
170 fn handle_msg(&mut self, msg: ProfilerMsg) -> bool {
171 match msg.clone() {
172 ProfilerMsg::Time(category_and_metadata, (start_time, end_time)) => {
173 if let Some(ref mut trace) = self.trace {
174 trace.write_one(&category_and_metadata, start_time, end_time);
175 }
176 self.find_or_insert(category_and_metadata, end_time - start_time);
177 },
178 ProfilerMsg::Print => {
179 if let Some(ProfilerMsg::Time(..)) = self.last_msg {
180 self.print_buckets();
182 }
183 },
184 ProfilerMsg::Get(k, sender) => {
185 let vec_option = self.buckets.get(&k);
186 match vec_option {
187 Some(vec_entry) => sender
188 .send(ProfilerData::Record(vec_entry.to_vec()))
189 .unwrap(),
190 None => sender.send(ProfilerData::NoRecords).unwrap(),
191 };
192 },
193 ProfilerMsg::BlockedLayoutQuery(url) => {
194 *self.blocked_layout_queries.entry(url).or_insert(0) += 1;
195 },
196 ProfilerMsg::Exit(chan) => {
197 self.print_buckets();
198 let _ = chan.send(());
199 return false;
200 },
201 };
202 self.last_msg = Some(msg);
203 true
204 }
205
206 pub fn get_statistics(data: &[Duration]) -> (Duration, Duration, Duration, Duration) {
208 debug_assert!(
209 data.windows(2).all(|window| window[0] <= window[1]),
210 "Data must be sorted"
211 );
212
213 let data_len = data.len();
214 debug_assert!(data_len > 0);
215 let (mean, median, min, max) = (
216 data.iter().sum::<Duration>() / data_len as u32,
217 data[data_len / 2],
218 data[0],
219 data[data_len - 1],
220 );
221 (mean, median, min, max)
222 }
223
224 fn print_buckets(&mut self) {
225 match self.output {
226 Some(OutputOptions::FileName(ref filename)) => {
227 let path = Path::new(&filename);
228 let mut file = match File::create(path) {
229 Err(e) => panic!("Couldn't create {}: {}", path.display(), e),
230 Ok(file) => file,
231 };
232 writeln!(
233 file,
234 "_category_\t_incremental?_\t_iframe?_\t_url_\t_mean (ms)_\t\
235 _median (ms)_\t_min (ms)_\t_max (ms)_\t_events_"
236 )
237 .unwrap();
238 for ((category, meta), ref mut data) in &mut self.buckets {
239 data.sort_by(|a, b| a.partial_cmp(b).expect("No NaN values in profiles"));
240 let data_len = data.len();
241 if data_len > 0 {
242 let (mean, median, min, max) = Self::get_statistics(data);
243 writeln!(
244 file,
245 "{}\t{}\t{:15.4}\t{:15.4}\t{:15.4}\t{:15.4}\t{:15}",
246 category.variant_name(),
247 meta.format(&self.output),
248 mean.as_seconds_f64() * 1000.,
249 median.as_seconds_f64() * 1000.,
250 min.as_seconds_f64() * 1000.,
251 max.as_seconds_f64() * 1000.,
252 data_len
253 )
254 .unwrap();
255 }
256 }
257
258 writeln!(file, "_url\t_blocked layout queries_").unwrap();
259 for (url, count) in &self.blocked_layout_queries {
260 writeln!(file, "{}\t{}", url, count).unwrap();
261 }
262 },
263 Some(OutputOptions::Stdout(_)) => {
264 let stdout = io::stdout();
265 let mut lock = stdout.lock();
266
267 writeln!(
268 &mut lock,
269 "{:35} {:14} {:9} {:30} {:15} {:15} {:-15} {:-15} {:-15}",
270 "_category_",
271 "_incremental?_",
272 "_iframe?_",
273 " _url_",
274 " _mean (ms)_",
275 " _median (ms)_",
276 " _min (ms)_",
277 " _max (ms)_",
278 " _events_"
279 )
280 .unwrap();
281 for ((category, meta), ref mut data) in &mut self.buckets {
282 data.sort_by(|a, b| a.partial_cmp(b).expect("No NaN values in profiles"));
283 let data_len = data.len();
284 if data_len > 0 {
285 let (mean, median, min, max) = Self::get_statistics(data);
286 writeln!(
287 &mut lock,
288 "{:-35}{} {:15.4} {:15.4} {:15.4} {:15.4} {:15}",
289 category.variant_name(),
290 meta.format(&self.output),
291 mean.as_seconds_f64() * 1000.,
292 median.as_seconds_f64() * 1000.,
293 min.as_seconds_f64() * 1000.,
294 max.as_seconds_f64() * 1000.,
295 data_len
296 )
297 .unwrap();
298 }
299 }
300 writeln!(&mut lock).unwrap();
301
302 writeln!(&mut lock, "_url_\t_blocked layout queries_").unwrap();
303 for (url, count) in &self.blocked_layout_queries {
304 writeln!(&mut lock, "{}\t{}", url, count).unwrap();
305 }
306 writeln!(&mut lock).unwrap();
307 },
308 None => { },
309 };
310 }
311}