about summary refs log tree commit diff
diff options
context:
space:
mode:
authorMichael Woerister <michaelwoerister@posteo>2017-11-13 15:13:44 +0100
committerMichael Woerister <michaelwoerister@posteo>2017-11-13 15:13:44 +0100
commit8cbc02238da9f36245d61e1d77ecd214eeed2e6e (patch)
treef7dccab684b8dd496fcce9183a932a4aea97bd11
parent67d2b1b7fd2d0efed9734bcab0f25528b9e37492 (diff)
downloadrust-8cbc02238da9f36245d61e1d77ecd214eeed2e6e.tar.gz
rust-8cbc02238da9f36245d61e1d77ecd214eeed2e6e.zip
incr.comp.: Include header when loading cache files in order to get the same byte offsets as when saving.
-rw-r--r--src/librustc/ty/maps/on_disk_cache.rs4
-rw-r--r--src/librustc_incremental/persist/file_format.rs18
-rw-r--r--src/librustc_incremental/persist/load.rs26
3 files changed, 28 insertions, 20 deletions
diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs
index 24ce8fb2995..a301b0ce6a7 100644
--- a/src/librustc/ty/maps/on_disk_cache.rs
+++ b/src/librustc/ty/maps/on_disk_cache.rs
@@ -58,10 +58,10 @@ impl<'sess> OnDiskCache<'sess> {
     /// so far) will eagerly deserialize the complete cache. Once we are
     /// dealing with larger amounts of data (i.e. cached query results),
     /// deserialization will need to happen lazily.
-    pub fn new(sess: &'sess Session, data: &[u8]) -> OnDiskCache<'sess> {
+    pub fn new(sess: &'sess Session, data: &[u8], start_pos: usize) -> OnDiskCache<'sess> {
         debug_assert!(sess.opts.incremental.is_some());
 
-        let mut decoder = opaque::Decoder::new(&data[..], 0);
+        let mut decoder = opaque::Decoder::new(&data[..], start_pos);
         let header = Header::decode(&mut decoder).unwrap();
 
         let prev_diagnostics: FxHashMap<_, _> = {
diff --git a/src/librustc_incremental/persist/file_format.rs b/src/librustc_incremental/persist/file_format.rs
index 7d1400b6b95..7d27b842a68 100644
--- a/src/librustc_incremental/persist/file_format.rs
+++ b/src/librustc_incremental/persist/file_format.rs
@@ -53,19 +53,25 @@ pub fn write_file_header<W: io::Write>(stream: &mut W) -> io::Result<()> {
 
 /// Reads the contents of a file with a file header as defined in this module.
 ///
-/// - Returns `Ok(Some(data))` if the file existed and was generated by a
+/// - Returns `Ok(Some(data, pos))` if the file existed and was generated by a
 ///   compatible compiler version. `data` is the entire contents of the file
-///   *after* the header.
+///   and `pos` points to the first byte after the header.
 /// - Returns `Ok(None)` if the file did not exist or was generated by an
 ///   incompatible version of the compiler.
 /// - Returns `Err(..)` if some kind of IO error occurred while reading the
 ///   file.
-pub fn read_file(sess: &Session, path: &Path) -> io::Result<Option<Vec<u8>>> {
+pub fn read_file(sess: &Session, path: &Path) -> io::Result<Option<(Vec<u8>, usize)>> {
     if !path.exists() {
         return Ok(None);
     }
 
     let mut file = File::open(path)?;
+    let file_size = file.metadata()?.len() as usize;
+
+    let mut data = Vec::with_capacity(file_size);
+    file.read_to_end(&mut data)?;
+
+    let mut file = io::Cursor::new(data);
 
     // Check FILE_MAGIC
     {
@@ -107,10 +113,8 @@ pub fn read_file(sess: &Session, path: &Path) -> io::Result<Option<Vec<u8>>> {
         }
     }
 
-    let mut data = vec![];
-    file.read_to_end(&mut data)?;
-
-    Ok(Some(data))
+    let post_header_start_pos = file.position() as usize;
+    Ok(Some((file.into_inner(), post_header_start_pos)))
 }
 
 fn report_format_mismatch(sess: &Session, file: &Path, message: &str) {
diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs
index 158e9f2677a..624a9ed930a 100644
--- a/src/librustc_incremental/persist/load.rs
+++ b/src/librustc_incremental/persist/load.rs
@@ -42,9 +42,9 @@ pub fn dep_graph_tcx_init<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
     }
 
     let work_products_path = work_products_path(tcx.sess);
-    if let Some(work_products_data) = load_data(tcx.sess, &work_products_path) {
+    if let Some((work_products_data, start_pos)) = load_data(tcx.sess, &work_products_path) {
         // Decode the list of work_products
-        let mut work_product_decoder = Decoder::new(&work_products_data[..], 0);
+        let mut work_product_decoder = Decoder::new(&work_products_data[..], start_pos);
         let work_products: Vec<SerializedWorkProduct> =
             RustcDecodable::decode(&mut work_product_decoder).unwrap_or_else(|e| {
                 let msg = format!("Error decoding `work-products` from incremental \
@@ -77,9 +77,9 @@ pub fn dep_graph_tcx_init<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
     }
 }
 
-fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
+fn load_data(sess: &Session, path: &Path) -> Option<(Vec<u8>, usize)> {
     match file_format::read_file(sess, path) {
-        Ok(Some(data)) => return Some(data),
+        Ok(Some(data_and_pos)) => return Some(data_and_pos),
         Ok(None) => {
             // The file either didn't exist or was produced by an incompatible
             // compiler version. Neither is an error.
@@ -126,8 +126,8 @@ pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap<Fingerprint> {
 
     debug!("load_prev_metadata_hashes() - File: {}", file_path.display());
 
-    let data = match file_format::read_file(tcx.sess, &file_path) {
-        Ok(Some(data)) => data,
+    let (data, start_pos) = match file_format::read_file(tcx.sess, &file_path) {
+        Ok(Some(data_and_pos)) => data_and_pos,
         Ok(None) => {
             debug!("load_prev_metadata_hashes() - File produced by incompatible \
                     compiler version: {}", file_path.display());
@@ -141,7 +141,7 @@ pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap<Fingerprint> {
     };
 
     debug!("load_prev_metadata_hashes() - Decoding hashes");
-    let mut decoder = Decoder::new(&data, 0);
+    let mut decoder = Decoder::new(&data, start_pos);
     let _ = Svh::decode(&mut decoder).unwrap();
     let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap();
 
@@ -171,8 +171,8 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph {
         return empty
     }
 
-    if let Some(bytes) = load_data(sess, &dep_graph_path(sess)) {
-        let mut decoder = Decoder::new(&bytes, 0);
+    if let Some((bytes, start_pos)) = load_data(sess, &dep_graph_path(sess)) {
+        let mut decoder = Decoder::new(&bytes, start_pos);
         let prev_commandline_args_hash = u64::decode(&mut decoder)
             .expect("Error reading commandline arg hash from cached dep-graph");
 
@@ -184,6 +184,10 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph {
             // We can't reuse the cache, purge it.
             debug!("load_dep_graph_new: differing commandline arg hashes");
 
+            delete_all_session_dir_contents(sess)
+                .expect("Failed to delete invalidated incr. comp. session \
+                         directory contents.");
+
             // No need to do any further work
             return empty
         }
@@ -202,8 +206,8 @@ pub fn load_query_result_cache<'sess>(sess: &'sess Session) -> OnDiskCache<'sess
         return OnDiskCache::new_empty(sess.codemap());
     }
 
-    if let Some(bytes) = load_data(sess, &query_cache_path(sess)) {
-        OnDiskCache::new(sess, &bytes[..])
+    if let Some((bytes, start_pos)) = load_data(sess, &query_cache_path(sess)) {
+        OnDiskCache::new(sess, &bytes[..], start_pos)
     } else {
         OnDiskCache::new_empty(sess.codemap())
     }