about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorWesley Wiser <wwiser@gmail.com>2019-04-04 19:41:49 -0400
committerWesley Wiser <wwiser@gmail.com>2019-04-12 20:27:29 -0400
commit56e434d84d8fc7f9a67c19294206c733f25b890b (patch)
treea0c6159f3a310823e1a2a1eb6d208b2dcc7d5706 /src
parent99da733f7f38ce8fe68453e859b7ac96bf7caa0f (diff)
downloadrust-56e434d84d8fc7f9a67c19294206c733f25b890b.tar.gz
rust-56e434d84d8fc7f9a67c19294206c733f25b890b.zip
Use measureme in self-profiler
Related to #58372
Related to #58967
Diffstat (limited to 'src')
-rw-r--r--src/librustc/Cargo.toml1
-rw-r--r--src/librustc/session/mod.rs26
-rw-r--r--src/librustc/ty/query/config.rs4
-rw-r--r--src/librustc/ty/query/mod.rs1
-rw-r--r--src/librustc/ty/query/plumbing.rs67
-rw-r--r--src/librustc/util/profiling.rs440
-rw-r--r--src/librustc_codegen_llvm/back/lto.rs10
-rw-r--r--src/librustc_codegen_llvm/back/write.rs24
-rw-r--r--src/librustc_codegen_llvm/lib.rs5
-rw-r--r--src/librustc_codegen_ssa/back/write.rs37
-rw-r--r--src/librustc_codegen_ssa/base.rs11
-rw-r--r--src/librustc_interface/interface.rs4
-rw-r--r--src/librustc_interface/passes.rs14
-rw-r--r--src/librustc_typeck/lib.rs5
-rw-r--r--src/tools/tidy/src/deps.rs1
15 files changed, 186 insertions, 464 deletions
diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml
index 31e10c19c7a..a668ebacabf 100644
--- a/src/librustc/Cargo.toml
+++ b/src/librustc/Cargo.toml
@@ -36,6 +36,7 @@ byteorder = { version = "1.1", features = ["i128"]}
 chalk-engine = { version = "0.9.0", default-features=false }
 rustc_fs_util = { path = "../librustc_fs_util" }
 smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
+measureme = "0.2.1"
 
 # Note that these dependencies are a lie, they're just here to get linkage to
 # work.
diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs
index 9cd6c30e272..a1966c02683 100644
--- a/src/librustc/session/mod.rs
+++ b/src/librustc/session/mod.rs
@@ -46,8 +46,6 @@ use std::path::PathBuf;
 use std::time::Duration;
 use std::sync::{Arc, mpsc};
 
-use parking_lot::Mutex as PlMutex;
-
 mod code_stats;
 pub mod config;
 pub mod filesearch;
@@ -130,7 +128,7 @@ pub struct Session {
     pub profile_channel: Lock<Option<mpsc::Sender<ProfileQueriesMsg>>>,
 
     /// Used by -Z self-profile
-    pub self_profiling: Option<Arc<PlMutex<SelfProfiler>>>,
+    pub self_profiling: Option<Arc<SelfProfiler>>,
 
     /// Some measurements that are being gathered during compilation.
     pub perf_stats: PerfStats,
@@ -838,19 +836,17 @@ impl Session {
 
     #[inline(never)]
     #[cold]
-    fn profiler_active<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
+    fn profiler_active<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
         match &self.self_profiling {
             None => bug!("profiler_active() called but there was no profiler active"),
             Some(profiler) => {
-                let mut p = profiler.lock();
-
-                f(&mut p);
+                f(&profiler);
             }
         }
     }
 
     #[inline(always)]
-    pub fn profiler<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
+    pub fn profiler<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
         if unlikely!(self.self_profiling.is_some()) {
             self.profiler_active(f)
         }
@@ -1138,7 +1134,19 @@ fn build_session_(
     driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
 ) -> Session {
     let self_profiler =
-        if sopts.debugging_opts.self_profile { Some(Arc::new(PlMutex::new(SelfProfiler::new()))) }
+        if sopts.debugging_opts.self_profile {
+            let profiler = SelfProfiler::new();
+            match profiler {
+                Ok(profiler) => {
+                    crate::ty::query::QueryName::register_with_profiler(&profiler);
+                    Some(Arc::new(profiler))
+                },
+                Err(e) => {
+                    early_warn(sopts.error_format, &format!("failed to create profiler: {}", e));
+                    None
+                }
+            }
+        }
         else { None };
 
     let host_triple = TargetTriple::from_triple(config::host_triple());
diff --git a/src/librustc/ty/query/config.rs b/src/librustc/ty/query/config.rs
index 73b79027972..0eda92ea8b8 100644
--- a/src/librustc/ty/query/config.rs
+++ b/src/librustc/ty/query/config.rs
@@ -3,7 +3,7 @@ use crate::dep_graph::DepNode;
 use crate::hir::def_id::{CrateNum, DefId};
 use crate::ty::TyCtxt;
 use crate::ty::query::queries;
-use crate::ty::query::Query;
+use crate::ty::query::{Query, QueryName};
 use crate::ty::query::QueryCache;
 use crate::ty::query::plumbing::CycleError;
 use crate::util::profiling::ProfileCategory;
@@ -18,7 +18,7 @@ use crate::ich::StableHashingContext;
 // Query configuration and description traits.
 
 pub trait QueryConfig<'tcx> {
-    const NAME: &'static str;
+    const NAME: QueryName;
     const CATEGORY: ProfileCategory;
 
     type Key: Eq + Hash + Clone + Debug;
diff --git a/src/librustc/ty/query/mod.rs b/src/librustc/ty/query/mod.rs
index c4bc35fff66..cd29ca81890 100644
--- a/src/librustc/ty/query/mod.rs
+++ b/src/librustc/ty/query/mod.rs
@@ -41,7 +41,6 @@ use crate::ty::subst::SubstsRef;
 use crate::util::nodemap::{DefIdSet, DefIdMap, ItemLocalSet};
 use crate::util::common::{ErrorReported};
 use crate::util::profiling::ProfileCategory::*;
-use crate::session::Session;
 
 use rustc_data_structures::svh::Svh;
 use rustc_data_structures::bit_set::BitSet;
diff --git a/src/librustc/ty/query/plumbing.rs b/src/librustc/ty/query/plumbing.rs
index a03cfd19b91..d671b58470c 100644
--- a/src/librustc/ty/query/plumbing.rs
+++ b/src/librustc/ty/query/plumbing.rs
@@ -114,7 +114,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
             let mut lock = cache.borrow_mut();
             if let Some(value) = lock.results.get(key) {
                 profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
-                tcx.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
+                tcx.sess.profiler(|p| p.record_query_hit(Q::NAME));
                 let result = (value.value.clone(), value.index);
                 #[cfg(debug_assertions)]
                 {
@@ -130,7 +130,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
                             //in another thread has completed. Record how long we wait in the
                             //self-profiler
                             #[cfg(parallel_compiler)]
-                            tcx.sess.profiler(|p| p.query_blocked_start(Q::NAME, Q::CATEGORY));
+                            tcx.sess.profiler(|p| p.query_blocked_start(Q::NAME));
 
                             job.clone()
                         },
@@ -172,7 +172,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
             #[cfg(parallel_compiler)]
             {
                 let result = job.r#await(tcx, span);
-                tcx.sess.profiler(|p| p.query_blocked_end(Q::NAME, Q::CATEGORY));
+                tcx.sess.profiler(|p| p.query_blocked_end(Q::NAME));
 
                 if let Err(cycle) = result {
                     return TryGetJob::Cycle(Q::handle_cycle_error(tcx, cycle));
@@ -358,14 +358,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
         key: Q::Key)
     -> Q::Value {
         debug!("ty::query::get_query<{}>(key={:?}, span={:?})",
-               Q::NAME,
+               Q::NAME.as_str(),
                key,
                span);
 
         profq_msg!(self,
             ProfileQueriesMsg::QueryBegin(
                 span.data(),
-                profq_query_msg!(Q::NAME, self, key),
+                profq_query_msg!(Q::NAME.as_str(), self, key),
             )
         );
 
@@ -389,7 +389,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
 
         if dep_node.kind.is_anon() {
             profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
-            self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.start_query(Q::NAME));
 
             let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
                 self.start_query(job.job.clone(), diagnostics, |tcx| {
@@ -399,7 +399,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
                 })
             });
 
-            self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.end_query(Q::NAME));
             profq_msg!(self, ProfileQueriesMsg::ProviderEnd);
 
             self.dep_graph.read_index(dep_node_index);
@@ -474,14 +474,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
 
         let result = if let Some(result) = result {
             profq_msg!(self, ProfileQueriesMsg::CacheHit);
-            self.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.record_query_hit(Q::NAME));
 
             result
         } else {
             // We could not load a result from the on-disk cache, so
             // recompute.
 
-            self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.start_query(Q::NAME));
 
             // The dep-graph for this computation is already in
             // place
@@ -489,7 +489,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
                 Q::compute(self, key)
             });
 
-            self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.end_query(Q::NAME));
             result
         };
 
@@ -552,7 +552,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
                 key, dep_node);
 
         profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
-        self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
+        self.sess.profiler(|p| p.start_query(Q::NAME));
 
         let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
             self.start_query(job.job.clone(), diagnostics, |tcx| {
@@ -572,7 +572,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
             })
         });
 
-        self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
+        self.sess.profiler(|p| p.end_query(Q::NAME));
         profq_msg!(self, ProfileQueriesMsg::ProviderEnd);
 
         if unlikely!(self.sess.opts.debugging_opts.query_dep_graph) {
@@ -619,7 +619,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
             let _ = self.get_query::<Q>(DUMMY_SP, key);
         } else {
             profq_msg!(self, ProfileQueriesMsg::CacheHit);
-            self.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.record_query_hit(Q::NAME));
         }
     }
 
@@ -632,7 +632,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
     ) {
         profq_msg!(
             self,
-            ProfileQueriesMsg::QueryBegin(span.data(), profq_query_msg!(Q::NAME, self, key))
+            ProfileQueriesMsg::QueryBegin(span.data(),
+                                          profq_query_msg!(Q::NAME.as_str(), self, key))
         );
 
         // We may be concurrently trying both execute and force a query
@@ -725,18 +726,6 @@ macro_rules! define_queries_inner {
                 }
             }
 
-            pub fn record_computed_queries(&self, sess: &Session) {
-                sess.profiler(|p| {
-                    $(
-                        p.record_computed_queries(
-                            <queries::$name<'_> as QueryConfig<'_>>::NAME,
-                            <queries::$name<'_> as QueryConfig<'_>>::CATEGORY,
-                            self.$name.lock().results.len()
-                        );
-                    )*
-                });
-            }
-
             #[cfg(parallel_compiler)]
             pub fn collect_active_jobs(&self) -> Vec<Lrc<QueryJob<$tcx>>> {
                 let mut jobs = Vec::new();
@@ -855,6 +844,24 @@ macro_rules! define_queries_inner {
         }
 
         #[allow(nonstandard_style)]
+        #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+        pub enum QueryName {
+            $($name),*
+        }
+
+        impl QueryName {
+            pub fn register_with_profiler(profiler: &crate::util::profiling::SelfProfiler) {
+                $(profiler.register_query_name(QueryName::$name);)*
+            }
+
+            pub fn as_str(&self) -> &'static str {
+                match self {
+                    $(QueryName::$name => stringify!($name),)*
+                }
+            }
+        }
+
+        #[allow(nonstandard_style)]
         #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
         pub enum Query<$tcx> {
             $($(#[$attr])* $name($K)),*
@@ -894,6 +901,12 @@ macro_rules! define_queries_inner {
                     $(Query::$name(key) => key.default_span(tcx),)*
                 }
             }
+
+            pub fn query_name(&self) -> QueryName {
+                match self {
+                    $(Query::$name(_) => QueryName::$name,)*
+                }
+            }
         }
 
         impl<'a, $tcx> HashStable<StableHashingContext<'a>> for Query<$tcx> {
@@ -930,7 +943,7 @@ macro_rules! define_queries_inner {
             type Key = $K;
             type Value = $V;
 
-            const NAME: &'static str = stringify!($name);
+            const NAME: QueryName = QueryName::$name;
             const CATEGORY: ProfileCategory = $category;
         }
 
diff --git a/src/librustc/util/profiling.rs b/src/librustc/util/profiling.rs
index 2739a30a291..aabf9a401c6 100644
--- a/src/librustc/util/profiling.rs
+++ b/src/librustc/util/profiling.rs
@@ -1,14 +1,20 @@
 use std::borrow::Cow;
-use std::fs;
-use std::io::{BufWriter, Write};
-use std::mem;
+use std::error::Error;
+use std::mem::{self, Discriminant};
 use std::process;
 use std::thread::ThreadId;
-use std::time::{Duration, Instant, SystemTime};
+use std::u32;
 
-use crate::session::config::Options;
+use crate::ty::query::QueryName;
 
-use rustc_data_structures::fx::FxHashMap;
+use measureme::{StringId, TimestampKind};
+
+/// MmapSerializatioSink is faster on macOS and Linux
+/// but FileSerializationSink is faster on Windows
+#[cfg(not(windows))]
+type Profiler = measureme::Profiler<measureme::MmapSerializationSink>;
+#[cfg(windows)]
+type Profiler = measureme::Profiler<measureme::FileSerializationSink>;
 
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Ord, PartialOrd)]
 pub enum ProfileCategory {
@@ -35,409 +41,129 @@ pub enum ProfilerEvent {
     QueryBlockedEnd { query_name: &'static str, category: ProfileCategory, time: u64 },
 }
 
-impl ProfilerEvent {
-    fn timestamp(&self) -> u64 {
-        use self::ProfilerEvent::*;
-
-        match self {
-            QueryStart { time, .. } |
-            QueryEnd { time, .. } |
-            GenericActivityStart { time, .. } |
-            GenericActivityEnd { time, .. } |
-            QueryCacheHit { time, .. } |
-            QueryCount { time, .. } |
-            IncrementalLoadResultStart { time, .. } |
-            IncrementalLoadResultEnd { time, .. } |
-            QueryBlockedStart { time, .. } |
-            QueryBlockedEnd { time, .. } => *time
-        }
-    }
-}
-
 fn thread_id_to_u64(tid: ThreadId) -> u64 {
     unsafe { mem::transmute::<ThreadId, u64>(tid) }
 }
 
 pub struct SelfProfiler {
-    events: FxHashMap<ThreadId, Vec<ProfilerEvent>>,
-    start_time: SystemTime,
-    start_instant: Instant,
+    profiler: Profiler,
+    query_event_kind: StringId,
+    generic_activity_event_kind: StringId,
+    incremental_load_result_event_kind: StringId,
+    query_blocked_event_kind: StringId,
+    query_cache_hit_event_kind: StringId,
 }
 
 impl SelfProfiler {
-    pub fn new() -> SelfProfiler {
-        let profiler = SelfProfiler {
-            events: Default::default(),
-            start_time: SystemTime::now(),
-            start_instant: Instant::now(),
+    pub fn new() -> Result<SelfProfiler, Box<dyn Error>> {
+        let filename = format!("pid-{}.rustc_profile", process::id());
+        let path = std::path::Path::new(&filename);
+        let profiler = Profiler::new(path)?;
+
+        let query_event_kind = profiler.alloc_string("Query");
+        let generic_activity_event_kind = profiler.alloc_string("GenericActivity");
+        let incremental_load_result_event_kind = profiler.alloc_string("IncrementalLoadResult");
+        let query_blocked_event_kind = profiler.alloc_string("QueryBlocked");
+        let query_cache_hit_event_kind = profiler.alloc_string("QueryCacheHit");
+
+        Ok(SelfProfiler {
+            profiler,
+            query_event_kind,
+            generic_activity_event_kind,
+            incremental_load_result_event_kind,
+            query_blocked_event_kind,
+            query_cache_hit_event_kind,
+        })
+    }
+
+    fn get_query_name_string_id(query_name: QueryName) -> StringId {
+        let discriminant = unsafe {
+            mem::transmute::<Discriminant<QueryName>, u64>(mem::discriminant(&query_name))
         };
 
-        profiler
+        StringId::reserved(discriminant as u32)
+    }
+
+    pub fn register_query_name(&self, query_name: QueryName) {
+        let id = SelfProfiler::get_query_name_string_id(query_name);
+
+        self.profiler.alloc_string_with_reserved_id(id, query_name.as_str());
     }
 
     #[inline]
     pub fn start_activity(
-        &mut self,
-        category: ProfileCategory,
+        &self,
         label: impl Into<Cow<'static, str>>,
     ) {
-        self.record(ProfilerEvent::GenericActivityStart {
-            category,
-            label: label.into(),
-            time: self.get_time_from_start(),
-        })
+        self.record(&label.into(), self.generic_activity_event_kind, TimestampKind::Start);
     }
 
     #[inline]
     pub fn end_activity(
-        &mut self,
-        category: ProfileCategory,
+        &self,
         label: impl Into<Cow<'static, str>>,
     ) {
-        self.record(ProfilerEvent::GenericActivityEnd {
-            category,
-            label: label.into(),
-            time: self.get_time_from_start(),
-        })
+        self.record(&label.into(), self.generic_activity_event_kind, TimestampKind::End);
     }
 
     #[inline]
-    pub fn record_computed_queries(
-        &mut self,
-        query_name: &'static str,
-        category: ProfileCategory,
-        count: usize)
-        {
-        self.record(ProfilerEvent::QueryCount {
-            query_name,
-            category,
-            count,
-            time: self.get_time_from_start(),
-        })
+    pub fn record_query_hit(&self, query_name: QueryName) {
+        self.record_query(query_name, self.query_cache_hit_event_kind, TimestampKind::Instant);
     }
 
     #[inline]
-    pub fn record_query_hit(&mut self, query_name: &'static str, category: ProfileCategory) {
-        self.record(ProfilerEvent::QueryCacheHit {
-            query_name,
-            category,
-            time: self.get_time_from_start(),
-        })
+    pub fn start_query(&self, query_name: QueryName) {
+        self.record_query(query_name, self.query_event_kind, TimestampKind::Start);
     }
 
     #[inline]
-    pub fn start_query(&mut self, query_name: &'static str, category: ProfileCategory) {
-        self.record(ProfilerEvent::QueryStart {
-            query_name,
-            category,
-            time: self.get_time_from_start(),
-        });
+    pub fn end_query(&self, query_name: QueryName) {
+        self.record_query(query_name, self.query_event_kind, TimestampKind::End);
     }
 
     #[inline]
-    pub fn end_query(&mut self, query_name: &'static str, category: ProfileCategory) {
-        self.record(ProfilerEvent::QueryEnd {
+    pub fn incremental_load_result_start(&self, query_name: QueryName) {
+        self.record_query(
             query_name,
-            category,
-            time: self.get_time_from_start(),
-        })
+            self.incremental_load_result_event_kind,
+            TimestampKind::Start
+        );
     }
 
     #[inline]
-    pub fn incremental_load_result_start(&mut self, query_name: &'static str) {
-        self.record(ProfilerEvent::IncrementalLoadResultStart {
-            query_name,
-            time: self.get_time_from_start(),
-        })
+    pub fn incremental_load_result_end(&self, query_name: QueryName) {
+        self.record_query(query_name, self.incremental_load_result_event_kind, TimestampKind::End);
     }
 
     #[inline]
-    pub fn incremental_load_result_end(&mut self, query_name: &'static str) {
-        self.record(ProfilerEvent::IncrementalLoadResultEnd {
-            query_name,
-            time: self.get_time_from_start(),
-        })
-    }
-
-    #[inline]
-    pub fn query_blocked_start(&mut self, query_name: &'static str, category: ProfileCategory) {
-        self.record(ProfilerEvent::QueryBlockedStart {
-            query_name,
-            category,
-            time: self.get_time_from_start(),
-        })
+    pub fn query_blocked_start(&self, query_name: QueryName) {
+        self.record_query(query_name, self.query_blocked_event_kind, TimestampKind::Start);
     }
 
     #[inline]
-    pub fn query_blocked_end(&mut self, query_name: &'static str, category: ProfileCategory) {
-        self.record(ProfilerEvent::QueryBlockedEnd {
-            query_name,
-            category,
-            time: self.get_time_from_start(),
-        })
+    pub fn query_blocked_end(&self, query_name: QueryName) {
+        self.record_query(query_name, self.query_blocked_event_kind, TimestampKind::End);
     }
 
     #[inline]
-    fn record(&mut self, event: ProfilerEvent) {
-        let thread_id = std::thread::current().id();
-        let events = self.events.entry(thread_id).or_default();
+    fn record(&self, event_id: &str, event_kind: StringId, timestamp_kind: TimestampKind) {
+        let thread_id = thread_id_to_u64(std::thread::current().id());
 
-        events.push(event);
+        let event_id = self.profiler.alloc_string(event_id);
+        self.profiler.record_event(event_kind, event_id, thread_id, timestamp_kind);
     }
 
     #[inline]
-    fn get_time_from_start(&self) -> u64 {
-        let duration = Instant::now() - self.start_instant;
-        duration.as_nanos() as u64
-    }
-
-    pub fn dump_raw_events(&self, opts: &Options) {
-        use self::ProfilerEvent::*;
-
-        let pid = process::id();
-
-        let filename =
-            format!("{}.profile_events.json", opts.crate_name.clone().unwrap_or_default());
-
-        let mut file = BufWriter::new(fs::File::create(filename).unwrap());
-
-        let threads: Vec<_> =
-            self.events
-                .keys()
-                .into_iter()
-                .map(|tid| format!("{}", thread_id_to_u64(*tid)))
-                .collect();
-
-        write!(file,
-            "{{\
-                \"processes\": {{\
-                    \"{}\": {{\
-                        \"threads\": [{}],\
-                        \"crate_name\": \"{}\",\
-                        \"opt_level\": \"{:?}\",\
-                        \"incremental\": {}\
-                    }}\
-                }},\
-                \"events\": [\
-             ",
-            pid,
-            threads.join(","),
-            opts.crate_name.clone().unwrap_or_default(),
-            opts.optimize,
-            if opts.incremental.is_some() { "true" } else { "false" },
-        ).unwrap();
-
-        let mut is_first = true;
-        for (thread_id, events) in &self.events {
-            let thread_id = thread_id_to_u64(*thread_id);
-
-            for event in events {
-                if is_first {
-                    is_first = false;
-                } else {
-                    writeln!(file, ",").unwrap();
-                }
-
-                let (secs, nanos) = {
-                    let time = self.start_time + Duration::from_nanos(event.timestamp());
-                    let time_since_unix =
-                        time.duration_since(SystemTime::UNIX_EPOCH).unwrap_or_default();
-                    (time_since_unix.as_secs(), time_since_unix.subsec_nanos())
-                };
+    fn record_query(
+        &self,
+        query_name: QueryName,
+        event_kind: StringId,
+        timestamp_kind: TimestampKind,
+    ) {
+        let dep_node_name = SelfProfiler::get_query_name_string_id(query_name);
 
-                match event {
-                    QueryStart { query_name, category, time: _ } =>
-                        write!(file,
-                            "{{ \
-                                \"QueryStart\": {{ \
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    QueryEnd { query_name, category, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"QueryEnd\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    GenericActivityStart { category, label, time: _ } =>
-                        write!(file,
-                            "{{
-                                \"GenericActivityStart\": {{\
-                                    \"category\": \"{:?}\",\
-                                    \"label\": \"{}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            category,
-                            label,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    GenericActivityEnd { category, label, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"GenericActivityEnd\": {{\
-                                    \"category\": \"{:?}\",\
-                                    \"label\": \"{}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            category,
-                            label,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    QueryCacheHit { query_name, category, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"QueryCacheHit\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    QueryCount { query_name, category, count, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"QueryCount\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"count\": {},\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            count,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    IncrementalLoadResultStart { query_name, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"IncrementalLoadResultStart\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    IncrementalLoadResultEnd { query_name, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"IncrementalLoadResultEnd\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    QueryBlockedStart { query_name, category, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"QueryBlockedStart\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    QueryBlockedEnd { query_name, category, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"QueryBlockedEnd\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                }
-            }
-        }
+        let thread_id = thread_id_to_u64(std::thread::current().id());
 
-        write!(file, "] }}").unwrap();
+        self.profiler.record_event(event_kind, dep_node_name, thread_id, timestamp_kind);
     }
 }
diff --git a/src/librustc_codegen_llvm/back/lto.rs b/src/librustc_codegen_llvm/back/lto.rs
index 84c652ff238..348f1a12126 100644
--- a/src/librustc_codegen_llvm/back/lto.rs
+++ b/src/librustc_codegen_llvm/back/lto.rs
@@ -15,7 +15,6 @@ use rustc::hir::def_id::LOCAL_CRATE;
 use rustc::middle::exported_symbols::SymbolExportLevel;
 use rustc::session::config::{self, Lto};
 use rustc::util::common::time_ext;
-use rustc::util::profiling::ProfileCategory;
 use rustc_data_structures::fx::FxHashMap;
 use rustc_codegen_ssa::{ModuleCodegen, ModuleKind};
 
@@ -67,8 +66,7 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
         .iter()
         .filter_map(symbol_filter)
         .collect::<Vec<CString>>();
-    let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
-                                       "generate_symbol_white_list_for_thinlto");
+    let _timer = cgcx.profile_activity("generate_symbol_white_list_for_thinlto");
     info!("{} symbols to preserve in this crate", symbol_white_list.len());
 
     // If we're performing LTO for the entire crate graph, then for each of our
@@ -97,8 +95,7 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
         }
 
         for &(cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() {
-            let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
-                                               format!("load: {}", path.display()));
+            let _timer = cgcx.profile_activity(format!("load: {}", path.display()));
             let exported_symbols = cgcx.exported_symbols
                 .as_ref().expect("needs exported symbols for LTO");
             symbol_white_list.extend(
@@ -727,8 +724,7 @@ pub unsafe fn optimize_thin_module(
         // Like with "fat" LTO, get some better optimizations if landing pads
         // are disabled by removing all landing pads.
         if cgcx.no_landing_pads {
-            let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
-                                               "LLVM_remove_landing_pads");
+            let _timer = cgcx.profile_activity("LLVM_remove_landing_pads");
             llvm::LLVMRustMarkAllFunctionsNounwind(llmod);
             save_temp_bitcode(&cgcx, &module, "thin-lto-after-nounwind");
         }
diff --git a/src/librustc_codegen_llvm/back/write.rs b/src/librustc_codegen_llvm/back/write.rs
index f0ed201ad5c..4d52df91a8d 100644
--- a/src/librustc_codegen_llvm/back/write.rs
+++ b/src/librustc_codegen_llvm/back/write.rs
@@ -18,7 +18,6 @@ use rustc::session::Session;
 use rustc::ty::TyCtxt;
 use rustc_codegen_ssa::{ModuleCodegen, CompiledModule};
 use rustc::util::common::time_ext;
-use rustc::util::profiling::ProfileCategory;
 use rustc_fs_util::{path_to_c_string, link_or_copy};
 use rustc_data_structures::small_c_str::SmallCStr;
 use errors::{Handler, FatalError};
@@ -414,7 +413,7 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>,
 
         // Finally, run the actual optimization passes
         {
-            let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_function_passes");
+            let _timer = cgcx.profile_activity("LLVM_function_passes");
             time_ext(config.time_passes,
                         None,
                         &format!("llvm function passes [{}]", module_name.unwrap()),
@@ -423,7 +422,7 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>,
             });
         }
         {
-            let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_module_passes");
+            let _timer = cgcx.profile_activity("LLVM_module_passes");
             time_ext(config.time_passes,
                     None,
                     &format!("llvm module passes [{}]", module_name.unwrap()),
@@ -445,7 +444,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
                   config: &ModuleConfig)
     -> Result<CompiledModule, FatalError>
 {
-    let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "codegen");
+    let _timer = cgcx.profile_activity("codegen");
     {
         let llmod = module.module_llvm.llmod();
         let llcx = &*module.module_llvm.llcx;
@@ -496,12 +495,12 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
 
 
         if write_bc || config.emit_bc_compressed || config.embed_bitcode {
-            let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_make_bitcode");
+            let _timer = cgcx.profile_activity("LLVM_make_bitcode");
             let thin = ThinBuffer::new(llmod);
             let data = thin.data();
 
             if write_bc {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_bitcode");
+                let _timer = cgcx.profile_activity("LLVM_emit_bitcode");
                 if let Err(e) = fs::write(&bc_out, data) {
                     let msg = format!("failed to write bytecode to {}: {}", bc_out.display(), e);
                     diag_handler.err(&msg);
@@ -509,13 +508,12 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
             }
 
             if config.embed_bitcode {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_embed_bitcode");
+                let _timer = cgcx.profile_activity("LLVM_embed_bitcode");
                 embed_bitcode(cgcx, llcx, llmod, Some(data));
             }
 
             if config.emit_bc_compressed {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
-                                                   "LLVM_compress_bitcode");
+                let _timer = cgcx.profile_activity("LLVM_compress_bitcode");
                 let dst = bc_out.with_extension(RLIB_BYTECODE_EXTENSION);
                 let data = bytecode::encode(&module.name, data);
                 if let Err(e) = fs::write(&dst, data) {
@@ -530,7 +528,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
         time_ext(config.time_passes, None, &format!("codegen passes [{}]", module_name.unwrap()),
             || -> Result<(), FatalError> {
             if config.emit_ir {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_ir");
+                let _timer = cgcx.profile_activity("LLVM_emit_ir");
                 let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
                 let out_c = path_to_c_string(&out);
 
@@ -577,7 +575,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
             }
 
             if config.emit_asm || asm_to_obj {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_asm");
+                let _timer = cgcx.profile_activity("LLVM_emit_asm");
                 let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
 
                 // We can't use the same module for asm and binary output, because that triggers
@@ -595,13 +593,13 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
             }
 
             if write_obj {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_obj");
+                let _timer = cgcx.profile_activity("LLVM_emit_obj");
                 with_codegen(tm, llmod, config.no_builtins, |cpm| {
                     write_output_file(diag_handler, tm, cpm, llmod, &obj_out,
                                       llvm::FileType::ObjectFile)
                 })?;
             } else if asm_to_obj {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_asm_to_obj");
+                let _timer = cgcx.profile_activity("LLVM_asm_to_obj");
                 let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
                 run_assembler(cgcx, diag_handler, &assembly, &obj_out);
 
diff --git a/src/librustc_codegen_llvm/lib.rs b/src/librustc_codegen_llvm/lib.rs
index 0aae6b46e3d..da91217e95e 100644
--- a/src/librustc_codegen_llvm/lib.rs
+++ b/src/librustc_codegen_llvm/lib.rs
@@ -65,7 +65,6 @@ use rustc::middle::cstore::{EncodedMetadata, MetadataLoader};
 use rustc::session::Session;
 use rustc::session::config::{OutputFilenames, OutputType, PrintRequest, OptLevel};
 use rustc::ty::{self, TyCtxt};
-use rustc::util::profiling::ProfileCategory;
 use rustc::util::common::ErrorReported;
 use rustc_mir::monomorphize;
 use rustc_codegen_ssa::ModuleCodegen;
@@ -330,12 +329,12 @@ impl CodegenBackend for LlvmCodegenBackend {
 
         // Run the linker on any artifacts that resulted from the LLVM run.
         // This should produce either a finished executable or library.
-        sess.profiler(|p| p.start_activity(ProfileCategory::Linking, "link_crate"));
+        sess.profiler(|p| p.start_activity("link_crate"));
         time(sess, "linking", || {
             back::link::link_binary(sess, &codegen_results,
                                     outputs, &codegen_results.crate_name.as_str());
         });
-        sess.profiler(|p| p.end_activity(ProfileCategory::Linking, "link_crate"));
+        sess.profiler(|p| p.end_activity("link_crate"));
 
         // Now that we won't touch anything in the incremental compilation directory
         // any more, we can finalize it (which involves renaming it)
diff --git a/src/librustc_codegen_ssa/back/write.rs b/src/librustc_codegen_ssa/back/write.rs
index fa8c4177eaf..f5affad4cce 100644
--- a/src/librustc_codegen_ssa/back/write.rs
+++ b/src/librustc_codegen_ssa/back/write.rs
@@ -18,7 +18,7 @@ use rustc::util::nodemap::FxHashMap;
 use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
 use rustc::ty::TyCtxt;
 use rustc::util::common::{time_depth, set_time_depth, print_time_passes_entry};
-use rustc::util::profiling::{ProfileCategory, SelfProfiler};
+use rustc::util::profiling::SelfProfiler;
 use rustc_fs_util::link_or_copy;
 use rustc_data_structures::svh::Svh;
 use rustc_errors::{Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId};
@@ -29,7 +29,6 @@ use syntax::ext::hygiene::Mark;
 use syntax_pos::MultiSpan;
 use syntax_pos::symbol::Symbol;
 use jobserver::{Client, Acquired};
-use parking_lot::Mutex as PlMutex;
 
 use std::any::Any;
 use std::borrow::Cow;
@@ -198,25 +197,21 @@ impl<B: WriteBackendMethods> Clone for TargetMachineFactory<B> {
 }
 
 pub struct ProfileGenericActivityTimer {
-    profiler: Option<Arc<PlMutex<SelfProfiler>>>,
-    category: ProfileCategory,
+    profiler: Option<Arc<SelfProfiler>>,
     label: Cow<'static, str>,
 }
 
 impl ProfileGenericActivityTimer {
     pub fn start(
-        profiler: Option<Arc<PlMutex<SelfProfiler>>>,
-        category: ProfileCategory,
+        profiler: Option<Arc<SelfProfiler>>,
         label: Cow<'static, str>,
     ) -> ProfileGenericActivityTimer {
         if let Some(profiler) = &profiler {
-            let mut p = profiler.lock();
-            p.start_activity(category, label.clone());
+            profiler.start_activity(label.clone());
         }
 
         ProfileGenericActivityTimer {
             profiler,
-            category,
             label,
         }
     }
@@ -225,8 +220,7 @@ impl ProfileGenericActivityTimer {
 impl Drop for ProfileGenericActivityTimer {
     fn drop(&mut self) {
         if let Some(profiler) = &self.profiler {
-            let mut p = profiler.lock();
-            p.end_activity(self.category, self.label.clone());
+            profiler.end_activity(self.label.clone());
         }
     }
 }
@@ -237,7 +231,7 @@ pub struct CodegenContext<B: WriteBackendMethods> {
     // Resources needed when running LTO
     pub backend: B,
     pub time_passes: bool,
-    pub profiler: Option<Arc<PlMutex<SelfProfiler>>>,
+    pub profiler: Option<Arc<SelfProfiler>>,
     pub lto: Lto,
     pub no_landing_pads: bool,
     pub save_temps: bool,
@@ -291,19 +285,17 @@ impl<B: WriteBackendMethods> CodegenContext<B> {
 
     #[inline(never)]
     #[cold]
-    fn profiler_active<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
+    fn profiler_active<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
         match &self.profiler {
             None => bug!("profiler_active() called but there was no profiler active"),
             Some(profiler) => {
-                let mut p = profiler.lock();
-
-                f(&mut p);
+                f(&*profiler);
             }
         }
     }
 
     #[inline(always)]
-    pub fn profile<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
+    pub fn profile<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
         if unlikely!(self.profiler.is_some()) {
             self.profiler_active(f)
         }
@@ -311,10 +303,9 @@ impl<B: WriteBackendMethods> CodegenContext<B> {
 
     pub fn profile_activity(
         &self,
-        category: ProfileCategory,
         label: impl Into<Cow<'static, str>>,
     ) -> ProfileGenericActivityTimer {
-        ProfileGenericActivityTimer::start(self.profiler.clone(), category, label.into())
+        ProfileGenericActivityTimer::start(self.profiler.clone(), label.into())
     }
 }
 
@@ -324,7 +315,7 @@ fn generate_lto_work<B: ExtraBackendMethods>(
     needs_thin_lto: Vec<(String, B::ThinBuffer)>,
     import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>
 ) -> Vec<(WorkItem<B>, u64)> {
-    cgcx.profile(|p| p.start_activity(ProfileCategory::Linking, "codegen_run_lto"));
+    cgcx.profile(|p| p.start_activity("codegen_run_lto"));
 
     let (lto_modules, copy_jobs) = if !needs_fat_lto.is_empty() {
         assert!(needs_thin_lto.is_empty());
@@ -351,7 +342,7 @@ fn generate_lto_work<B: ExtraBackendMethods>(
         }), 0)
     })).collect();
 
-    cgcx.profile(|p| p.end_activity(ProfileCategory::Linking, "codegen_run_lto"));
+    cgcx.profile(|p| p.end_activity("codegen_run_lto"));
 
     result
 }
@@ -1655,9 +1646,9 @@ fn spawn_work<B: ExtraBackendMethods>(
         // surface that there was an error in this worker.
         bomb.result = {
             let label = work.name();
-            cgcx.profile(|p| p.start_activity(ProfileCategory::Codegen, label.clone()));
+            cgcx.profile(|p| p.start_activity(label.clone()));
             let result = execute_work_item(&cgcx, work).ok();
-            cgcx.profile(|p| p.end_activity(ProfileCategory::Codegen, label));
+            cgcx.profile(|p| p.end_activity(label));
 
             result
         };
diff --git a/src/librustc_codegen_ssa/base.rs b/src/librustc_codegen_ssa/base.rs
index 27e3e306699..6cb54831a07 100644
--- a/src/librustc_codegen_ssa/base.rs
+++ b/src/librustc_codegen_ssa/base.rs
@@ -25,7 +25,6 @@ use rustc::ty::layout::{self, Align, TyLayout, LayoutOf, VariantIdx, HasTyCtxt};
 use rustc::ty::query::Providers;
 use rustc::middle::cstore::{self, LinkagePreference};
 use rustc::util::common::{time, print_time_passes_entry};
-use rustc::util::profiling::ProfileCategory;
 use rustc::session::config::{self, EntryFnType, Lto};
 use rustc::session::Session;
 use rustc_mir::monomorphize::item::DefPathBasedNames;
@@ -539,7 +538,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
     let cgu_name_builder = &mut CodegenUnitNameBuilder::new(tcx);
 
     // Codegen the metadata.
-    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen, "codegen crate metadata"));
+    tcx.sess.profiler(|p| p.start_activity("codegen crate metadata"));
 
     let metadata_cgu_name = cgu_name_builder.build_cgu_name(LOCAL_CRATE,
                                                             &["crate"],
@@ -549,7 +548,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
     let metadata = time(tcx.sess, "write metadata", || {
         backend.write_metadata(tcx, &mut metadata_llvm_module)
     });
-    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen, "codegen crate metadata"));
+    tcx.sess.profiler(|p| p.end_activity("codegen crate metadata"));
 
     let metadata_module = ModuleCodegen {
         name: metadata_cgu_name,
@@ -662,14 +661,12 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
 
         match cgu_reuse {
             CguReuse::No => {
-                tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen,
-                                                       format!("codegen {}", cgu.name())));
+                tcx.sess.profiler(|p| p.start_activity(format!("codegen {}", cgu.name())));
                 let start_time = Instant::now();
                 let stats = backend.compile_codegen_unit(tcx, *cgu.name());
                 all_stats.extend(stats);
                 total_codegen_time += start_time.elapsed();
-                tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen,
-                                                     format!("codegen {}", cgu.name())));
+                tcx.sess.profiler(|p| p.end_activity(format!("codegen {}", cgu.name())));
                 false
             }
             CguReuse::PreLto => {
diff --git a/src/librustc_interface/interface.rs b/src/librustc_interface/interface.rs
index 74085123f89..23575e51dfc 100644
--- a/src/librustc_interface/interface.rs
+++ b/src/librustc_interface/interface.rs
@@ -123,10 +123,6 @@ where
         profile::dump(&compiler.sess, "profile_queries".to_string())
     }
 
-    if compiler.sess.opts.debugging_opts.self_profile {
-        compiler.sess.profiler(|p| p.dump_raw_events(&compiler.sess.opts));
-    }
-
     r
 }
 
diff --git a/src/librustc_interface/passes.rs b/src/librustc_interface/passes.rs
index 1547e15fd48..3fd684f5d97 100644
--- a/src/librustc_interface/passes.rs
+++ b/src/librustc_interface/passes.rs
@@ -69,7 +69,7 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
         .set_continue_after_error(sess.opts.debugging_opts.continue_parse_after_error);
     hygiene::set_default_edition(sess.edition());
 
-    sess.profiler(|p| p.start_activity(ProfileCategory::Parsing, "parsing"));
+    sess.profiler(|p| p.start_activity("parsing"));
     let krate = time(sess, "parsing", || match *input {
         Input::File(ref file) => parse::parse_crate_from_file(file, &sess.parse_sess),
         Input::Str {
@@ -77,7 +77,7 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
             ref name,
         } => parse::parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess),
     })?;
-    sess.profiler(|p| p.end_activity(ProfileCategory::Parsing, "parsing"));
+    sess.profiler(|p| p.end_activity("parsing"));
 
     sess.diagnostic().set_continue_after_error(true);
 
@@ -375,7 +375,7 @@ fn configure_and_expand_inner<'a>(
     syntax_ext::register_builtins(&mut resolver, plugin_info.syntax_exts);
 
     // Expand all macros
-    sess.profiler(|p| p.start_activity(ProfileCategory::Expansion, "macro expansion"));
+    sess.profiler(|p| p.start_activity("macro expansion"));
     krate = time(sess, "expansion", || {
         // Windows dlls do not have rpaths, so they don't know how to find their
         // dependencies. It's up to us to tell the system where to find all the
@@ -450,7 +450,7 @@ fn configure_and_expand_inner<'a>(
         }
         krate
     });
-    sess.profiler(|p| p.end_activity(ProfileCategory::Expansion, "macro expansion"));
+    sess.profiler(|p| p.end_activity("macro expansion"));
 
     time(sess, "maybe building test harness", || {
         syntax::test::modify_for_testing(
@@ -869,8 +869,6 @@ pub fn create_global_ctxt(
         yield BoxedGlobalCtxt::initial_yield(());
         box_region_allow_access!(for('gcx), (&'gcx GlobalCtxt<'gcx>), (gcx));
 
-        gcx.queries.record_computed_queries(sess);
-
         if sess.opts.debugging_opts.query_stats {
             gcx.queries.print_stats();
         }
@@ -1022,9 +1020,9 @@ pub fn start_codegen<'tcx>(
         ::rustc::middle::dependency_format::calculate(tcx)
     });
 
-    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen, "codegen crate"));
+    tcx.sess.profiler(|p| p.start_activity("codegen crate"));
     let codegen = time(tcx.sess, "codegen", move || codegen_backend.codegen_crate(tcx, rx));
-    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen, "codegen crate"));
+    tcx.sess.profiler(|p| p.end_activity("codegen crate"));
 
     if log_enabled!(::log::Level::Info) {
         println!("Post-codegen");
diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs
index 710c84a6bc9..21d1af229dd 100644
--- a/src/librustc_typeck/lib.rs
+++ b/src/librustc_typeck/lib.rs
@@ -109,7 +109,6 @@ use rustc::ty::subst::SubstsRef;
 use rustc::ty::{self, Ty, TyCtxt};
 use rustc::ty::query::Providers;
 use rustc::util;
-use rustc::util::profiling::ProfileCategory;
 use syntax_pos::Span;
 use util::common::time;
 
@@ -319,7 +318,7 @@ pub fn provide(providers: &mut Providers<'_>) {
 pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
                              -> Result<(), ErrorReported>
 {
-    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::TypeChecking, "type-check crate"));
+    tcx.sess.profiler(|p| p.start_activity("type-check crate"));
 
     // this ensures that later parts of type checking can assume that items
     // have valid types and not error
@@ -370,7 +369,7 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
     check_unused::check_crate(tcx);
     check_for_entry_fn(tcx);
 
-    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::TypeChecking, "type-check crate"));
+    tcx.sess.profiler(|p| p.end_activity("type-check crate"));
 
     if tcx.sess.err_count() == 0 {
         Ok(())
diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs
index 30fe327cac4..411961d70bf 100644
--- a/src/tools/tidy/src/deps.rs
+++ b/src/tools/tidy/src/deps.rs
@@ -96,6 +96,7 @@ const WHITELIST: &[Crate<'_>] = &[
     Crate("lock_api"),
     Crate("log"),
     Crate("log_settings"),
+    Crate("measureme"),
     Crate("memchr"),
     Crate("memmap"),
     Crate("memoffset"),