diff options
Diffstat (limited to 'compiler/rustc_span')
| -rw-r--r-- | compiler/rustc_span/Cargo.toml | 3 | ||||
| -rw-r--r-- | compiler/rustc_span/src/analyze_source_file.rs | 257 | ||||
| -rw-r--r-- | compiler/rustc_span/src/caching_source_map_view.rs | 27 | ||||
| -rw-r--r-- | compiler/rustc_span/src/def_id.rs | 5 | ||||
| -rw-r--r-- | compiler/rustc_span/src/hygiene.rs | 14 | ||||
| -rw-r--r-- | compiler/rustc_span/src/lib.rs | 32 | ||||
| -rw-r--r-- | compiler/rustc_span/src/source_map.rs | 58 | ||||
| -rw-r--r-- | compiler/rustc_span/src/source_map/tests.rs | 2 | ||||
| -rw-r--r-- | compiler/rustc_span/src/symbol.rs | 40 |
9 files changed, 126 insertions, 312 deletions
diff --git a/compiler/rustc_span/Cargo.toml b/compiler/rustc_span/Cargo.toml index 781fe6a11fe..43a2d692577 100644 --- a/compiler/rustc_span/Cargo.toml +++ b/compiler/rustc_span/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "rustc_span" version = "0.0.0" -edition = "2021" +edition = "2024" [dependencies] # tidy-alphabetical-start @@ -12,6 +12,7 @@ itoa = "1.0" md5 = { package = "md-5", version = "0.10.0" } rustc_arena = { path = "../rustc_arena" } rustc_data_structures = { path = "../rustc_data_structures" } +rustc_hashes = { path = "../rustc_hashes" } rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } diff --git a/compiler/rustc_span/src/analyze_source_file.rs b/compiler/rustc_span/src/analyze_source_file.rs index fba20566580..a39bb884faa 100644 --- a/compiler/rustc_span/src/analyze_source_file.rs +++ b/compiler/rustc_span/src/analyze_source_file.rs @@ -29,165 +29,6 @@ pub(crate) fn analyze_source_file(src: &str) -> (Vec<RelativeBytePos>, Vec<Multi (lines, multi_byte_chars) } -#[cfg(bootstrap)] -cfg_match! { - cfg(any(target_arch = "x86", target_arch = "x86_64")) => { - fn analyze_source_file_dispatch( - src: &str, - lines: &mut Vec<RelativeBytePos>, - multi_byte_chars: &mut Vec<MultiByteChar>, - ) { - if is_x86_feature_detected!("sse2") { - unsafe { - analyze_source_file_sse2(src, lines, multi_byte_chars); - } - } else { - analyze_source_file_generic( - src, - src.len(), - RelativeBytePos::from_u32(0), - lines, - multi_byte_chars, - ); - } - } - - /// Checks 16 byte chunks of text at a time. If the chunk contains - /// something other than printable ASCII characters and newlines, the - /// function falls back to the generic implementation. Otherwise it uses - /// SSE2 intrinsics to quickly find all newlines. - #[target_feature(enable = "sse2")] - unsafe fn analyze_source_file_sse2( - src: &str, - lines: &mut Vec<RelativeBytePos>, - multi_byte_chars: &mut Vec<MultiByteChar>, - ) { - #[cfg(target_arch = "x86")] - use std::arch::x86::*; - #[cfg(target_arch = "x86_64")] - use std::arch::x86_64::*; - - const CHUNK_SIZE: usize = 16; - - let src_bytes = src.as_bytes(); - - let chunk_count = src.len() / CHUNK_SIZE; - - // This variable keeps track of where we should start decoding a - // chunk. If a multi-byte character spans across chunk boundaries, - // we need to skip that part in the next chunk because we already - // handled it. - let mut intra_chunk_offset = 0; - - for chunk_index in 0..chunk_count { - let ptr = src_bytes.as_ptr() as *const __m128i; - // We don't know if the pointer is aligned to 16 bytes, so we - // use `loadu`, which supports unaligned loading. - let chunk = unsafe { _mm_loadu_si128(ptr.add(chunk_index)) }; - - // For character in the chunk, see if its byte value is < 0, which - // indicates that it's part of a UTF-8 char. - let multibyte_test = unsafe { _mm_cmplt_epi8(chunk, _mm_set1_epi8(0)) }; - // Create a bit mask from the comparison results. - let multibyte_mask = unsafe { _mm_movemask_epi8(multibyte_test) }; - - // If the bit mask is all zero, we only have ASCII chars here: - if multibyte_mask == 0 { - assert!(intra_chunk_offset == 0); - - // Check if there are any control characters in the chunk. All - // control characters that we can encounter at this point have a - // byte value less than 32 or ... - let control_char_test0 = unsafe { _mm_cmplt_epi8(chunk, _mm_set1_epi8(32)) }; - let control_char_mask0 = unsafe { _mm_movemask_epi8(control_char_test0) }; - - // ... it's the ASCII 'DEL' character with a value of 127. - let control_char_test1 = unsafe { _mm_cmpeq_epi8(chunk, _mm_set1_epi8(127)) }; - let control_char_mask1 = unsafe { _mm_movemask_epi8(control_char_test1) }; - - let control_char_mask = control_char_mask0 | control_char_mask1; - - if control_char_mask != 0 { - // Check for newlines in the chunk - let newlines_test = unsafe { _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8)) }; - let newlines_mask = unsafe { _mm_movemask_epi8(newlines_test) }; - - if control_char_mask == newlines_mask { - // All control characters are newlines, record them - let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32; - let output_offset = RelativeBytePos::from_usize(chunk_index * CHUNK_SIZE + 1); - - loop { - let index = newlines_mask.trailing_zeros(); - - if index >= CHUNK_SIZE as u32 { - // We have arrived at the end of the chunk. - break; - } - - lines.push(RelativeBytePos(index) + output_offset); - - // Clear the bit, so we can find the next one. - newlines_mask &= (!1) << index; - } - - // We are done for this chunk. All control characters were - // newlines and we took care of those. - continue; - } else { - // Some of the control characters are not newlines, - // fall through to the slow path below. - } - } else { - // No control characters, nothing to record for this chunk - continue; - } - } - - // The slow path. - // There are control chars in here, fallback to generic decoding. - let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset; - intra_chunk_offset = analyze_source_file_generic( - &src[scan_start..], - CHUNK_SIZE - intra_chunk_offset, - RelativeBytePos::from_usize(scan_start), - lines, - multi_byte_chars, - ); - } - - // There might still be a tail left to analyze - let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset; - if tail_start < src.len() { - analyze_source_file_generic( - &src[tail_start..], - src.len() - tail_start, - RelativeBytePos::from_usize(tail_start), - lines, - multi_byte_chars, - ); - } - } - } - _ => { - // The target (or compiler version) does not support SSE2 ... - fn analyze_source_file_dispatch( - src: &str, - lines: &mut Vec<RelativeBytePos>, - multi_byte_chars: &mut Vec<MultiByteChar>, - ) { - analyze_source_file_generic( - src, - src.len(), - RelativeBytePos::from_u32(0), - lines, - multi_byte_chars, - ); - } - } -} - -#[cfg(not(bootstrap))] cfg_match! { any(target_arch = "x86", target_arch = "x86_64") => { fn analyze_source_file_dispatch( @@ -253,65 +94,32 @@ cfg_match! { if multibyte_mask == 0 { assert!(intra_chunk_offset == 0); - // Check if there are any control characters in the chunk. All - // control characters that we can encounter at this point have a - // byte value less than 32 or ... - let control_char_test0 = unsafe { _mm_cmplt_epi8(chunk, _mm_set1_epi8(32)) }; - let control_char_mask0 = unsafe { _mm_movemask_epi8(control_char_test0) }; - - // ... it's the ASCII 'DEL' character with a value of 127. - let control_char_test1 = unsafe { _mm_cmpeq_epi8(chunk, _mm_set1_epi8(127)) }; - let control_char_mask1 = unsafe { _mm_movemask_epi8(control_char_test1) }; - - let control_char_mask = control_char_mask0 | control_char_mask1; - - if control_char_mask != 0 { - // Check for newlines in the chunk - let newlines_test = unsafe { _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8)) }; - let newlines_mask = unsafe { _mm_movemask_epi8(newlines_test) }; + // Check for newlines in the chunk + let newlines_test = unsafe { _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8)) }; + let mut newlines_mask = unsafe { _mm_movemask_epi8(newlines_test) }; - if control_char_mask == newlines_mask { - // All control characters are newlines, record them - let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32; - let output_offset = RelativeBytePos::from_usize(chunk_index * CHUNK_SIZE + 1); + let output_offset = RelativeBytePos::from_usize(chunk_index * CHUNK_SIZE + 1); - loop { - let index = newlines_mask.trailing_zeros(); + while newlines_mask != 0 { + let index = newlines_mask.trailing_zeros(); - if index >= CHUNK_SIZE as u32 { - // We have arrived at the end of the chunk. - break; - } + lines.push(RelativeBytePos(index) + output_offset); - lines.push(RelativeBytePos(index) + output_offset); - - // Clear the bit, so we can find the next one. - newlines_mask &= (!1) << index; - } - - // We are done for this chunk. All control characters were - // newlines and we took care of those. - continue; - } else { - // Some of the control characters are not newlines, - // fall through to the slow path below. - } - } else { - // No control characters, nothing to record for this chunk - continue; + // Clear the bit, so we can find the next one. + newlines_mask &= newlines_mask - 1; } + } else { + // The slow path. + // There are multibyte chars in here, fallback to generic decoding. + let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset; + intra_chunk_offset = analyze_source_file_generic( + &src[scan_start..], + CHUNK_SIZE - intra_chunk_offset, + RelativeBytePos::from_usize(scan_start), + lines, + multi_byte_chars, + ); } - - // The slow path. - // There are control chars in here, fallback to generic decoding. - let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset; - intra_chunk_offset = analyze_source_file_generic( - &src[scan_start..], - CHUNK_SIZE - intra_chunk_offset, - RelativeBytePos::from_usize(scan_start), - lines, - multi_byte_chars, - ); } // There might still be a tail left to analyze @@ -369,29 +177,18 @@ fn analyze_source_file_generic( // string. let mut char_len = 1; - if byte < 32 { - // This is an ASCII control character, it could be one of the cases - // that are interesting to us. - + if byte == b'\n' { let pos = RelativeBytePos::from_usize(i) + output_offset; - - if let b'\n' = byte { - lines.push(pos + RelativeBytePos(1)); - } - } else if byte >= 127 { - // The slow path: - // This is either ASCII control character "DEL" or the beginning of - // a multibyte char. Just decode to `char`. + lines.push(pos + RelativeBytePos(1)); + } else if byte >= 128 { + // This is the beginning of a multibyte char. Just decode to `char`. let c = src[i..].chars().next().unwrap(); char_len = c.len_utf8(); let pos = RelativeBytePos::from_usize(i) + output_offset; - - if char_len > 1 { - assert!((2..=4).contains(&char_len)); - let mbc = MultiByteChar { pos, bytes: char_len as u8 }; - multi_byte_chars.push(mbc); - } + assert!((2..=4).contains(&char_len)); + let mbc = MultiByteChar { pos, bytes: char_len as u8 }; + multi_byte_chars.push(mbc); } i += char_len; diff --git a/compiler/rustc_span/src/caching_source_map_view.rs b/compiler/rustc_span/src/caching_source_map_view.rs index 9f977b98b82..d8a4cc2f2e2 100644 --- a/compiler/rustc_span/src/caching_source_map_view.rs +++ b/compiler/rustc_span/src/caching_source_map_view.rs @@ -1,6 +1,5 @@ use std::ops::Range; - -use rustc_data_structures::sync::Lrc; +use std::sync::Arc; use crate::source_map::SourceMap; use crate::{BytePos, Pos, RelativeBytePos, SourceFile, SpanData}; @@ -22,7 +21,7 @@ struct CacheEntry { // misses for these rare positions. A line lookup for the position via `SourceMap::lookup_line` // after a cache miss will produce the last line number, as desired. line: Range<BytePos>, - file: Lrc<SourceFile>, + file: Arc<SourceFile>, file_index: usize, } @@ -30,7 +29,7 @@ impl CacheEntry { #[inline] fn update( &mut self, - new_file_and_idx: Option<(Lrc<SourceFile>, usize)>, + new_file_and_idx: Option<(Arc<SourceFile>, usize)>, pos: BytePos, time_stamp: usize, ) { @@ -63,7 +62,7 @@ pub struct CachingSourceMapView<'sm> { impl<'sm> CachingSourceMapView<'sm> { pub fn new(source_map: &'sm SourceMap) -> CachingSourceMapView<'sm> { let files = source_map.files(); - let first_file = Lrc::clone(&files[0]); + let first_file = Arc::clone(&files[0]); let entry = CacheEntry { time_stamp: 0, line_number: 0, @@ -82,7 +81,7 @@ impl<'sm> CachingSourceMapView<'sm> { pub fn byte_pos_to_line_and_col( &mut self, pos: BytePos, - ) -> Option<(Lrc<SourceFile>, usize, RelativeBytePos)> { + ) -> Option<(Arc<SourceFile>, usize, RelativeBytePos)> { self.time_stamp += 1; // Check if the position is in one of the cached lines @@ -92,7 +91,7 @@ impl<'sm> CachingSourceMapView<'sm> { cache_entry.touch(self.time_stamp); let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32()); - return Some((Lrc::clone(&cache_entry.file), cache_entry.line_number, col)); + return Some((Arc::clone(&cache_entry.file), cache_entry.line_number, col)); } // No cache hit ... @@ -109,13 +108,13 @@ impl<'sm> CachingSourceMapView<'sm> { cache_entry.update(new_file_and_idx, pos, self.time_stamp); let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32()); - Some((Lrc::clone(&cache_entry.file), cache_entry.line_number, col)) + Some((Arc::clone(&cache_entry.file), cache_entry.line_number, col)) } pub fn span_data_to_lines_and_cols( &mut self, span_data: &SpanData, - ) -> Option<(Lrc<SourceFile>, usize, BytePos, usize, BytePos)> { + ) -> Option<(Arc<SourceFile>, usize, BytePos, usize, BytePos)> { self.time_stamp += 1; // Check if lo and hi are in the cached lines. @@ -133,7 +132,7 @@ impl<'sm> CachingSourceMapView<'sm> { } ( - Lrc::clone(&lo.file), + Arc::clone(&lo.file), lo.line_number, span_data.lo - lo.line.start, hi.line_number, @@ -181,7 +180,7 @@ impl<'sm> CachingSourceMapView<'sm> { lo.update(new_file_and_idx, span_data.lo, self.time_stamp); if !lo.line.contains(&span_data.hi) { - let new_file_and_idx = Some((Lrc::clone(&lo.file), lo.file_index)); + let new_file_and_idx = Some((Arc::clone(&lo.file), lo.file_index)); let next_oldest = self.oldest_cache_entry_index_avoid(oldest); let hi = &mut self.line_cache[next_oldest]; hi.update(new_file_and_idx, span_data.hi, self.time_stamp); @@ -227,7 +226,7 @@ impl<'sm> CachingSourceMapView<'sm> { assert_eq!(lo.file_index, hi.file_index); Some(( - Lrc::clone(&lo.file), + Arc::clone(&lo.file), lo.line_number, span_data.lo - lo.line.start, hi.line_number, @@ -271,13 +270,13 @@ impl<'sm> CachingSourceMapView<'sm> { oldest } - fn file_for_position(&self, pos: BytePos) -> Option<(Lrc<SourceFile>, usize)> { + fn file_for_position(&self, pos: BytePos) -> Option<(Arc<SourceFile>, usize)> { if !self.source_map.files().is_empty() { let file_idx = self.source_map.lookup_source_file_idx(pos); let file = &self.source_map.files()[file_idx]; if file_contains(file, pos) { - return Some((Lrc::clone(file), file_idx)); + return Some((Arc::clone(file), file_idx)); } } diff --git a/compiler/rustc_span/src/def_id.rs b/compiler/rustc_span/src/def_id.rs index f61ce37131e..641bac88ad0 100644 --- a/compiler/rustc_span/src/def_id.rs +++ b/compiler/rustc_span/src/def_id.rs @@ -3,10 +3,9 @@ use std::hash::{BuildHasherDefault, Hash, Hasher}; use rustc_data_structures::AtomicRef; use rustc_data_structures::fingerprint::Fingerprint; -use rustc_data_structures::stable_hasher::{ - Hash64, HashStable, StableHasher, StableOrd, ToStableHashKey, -}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableOrd, ToStableHashKey}; use rustc_data_structures::unhash::Unhasher; +use rustc_hashes::Hash64; use rustc_index::Idx; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_serialize::{Decodable, Encodable}; diff --git a/compiler/rustc_span/src/hygiene.rs b/compiler/rustc_span/src/hygiene.rs index dbbbb5077cb..9bf1d305e54 100644 --- a/compiler/rustc_span/src/hygiene.rs +++ b/compiler/rustc_span/src/hygiene.rs @@ -29,12 +29,14 @@ use std::collections::hash_map::Entry; use std::collections::hash_set::Entry as SetEntry; use std::fmt; use std::hash::Hash; +use std::sync::Arc; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_data_structures::stable_hasher::{Hash64, HashStable, HashingControls, StableHasher}; -use rustc_data_structures::sync::{Lock, Lrc, WorkerLocal}; +use rustc_data_structures::stable_hasher::{HashStable, HashingControls, StableHasher}; +use rustc_data_structures::sync::{Lock, WorkerLocal}; use rustc_data_structures::unhash::UnhashMap; +use rustc_hashes::Hash64; use rustc_index::IndexVec; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; @@ -904,7 +906,7 @@ impl Span { /// allowed inside this span. pub fn mark_with_reason( self, - allow_internal_unstable: Option<Lrc<[Symbol]>>, + allow_internal_unstable: Option<Arc<[Symbol]>>, reason: DesugaringKind, edition: Edition, ctx: impl HashStableContext, @@ -959,7 +961,7 @@ pub struct ExpnData { /// List of `#[unstable]`/feature-gated features that the macro is allowed to use /// internally without forcing the whole crate to opt-in /// to them. - pub allow_internal_unstable: Option<Lrc<[Symbol]>>, + pub allow_internal_unstable: Option<Arc<[Symbol]>>, /// Edition of the crate in which the macro is defined. pub edition: Edition, /// The `DefId` of the macro being invoked, @@ -985,7 +987,7 @@ impl ExpnData { parent: ExpnId, call_site: Span, def_site: Span, - allow_internal_unstable: Option<Lrc<[Symbol]>>, + allow_internal_unstable: Option<Arc<[Symbol]>>, edition: Edition, macro_def_id: Option<DefId>, parent_module: Option<DefId>, @@ -1037,7 +1039,7 @@ impl ExpnData { kind: ExpnKind, call_site: Span, edition: Edition, - allow_internal_unstable: Lrc<[Symbol]>, + allow_internal_unstable: Arc<[Symbol]>, macro_def_id: Option<DefId>, parent_module: Option<DefId>, ) -> ExpnData { diff --git a/compiler/rustc_span/src/lib.rs b/compiler/rustc_span/src/lib.rs index b1f9f4a01c5..c09669d959c 100644 --- a/compiler/rustc_span/src/lib.rs +++ b/compiler/rustc_span/src/lib.rs @@ -83,12 +83,14 @@ use std::io::{self, Read}; use std::ops::{Add, Range, Sub}; use std::path::{Path, PathBuf}; use std::str::FromStr; +use std::sync::Arc; use std::{fmt, iter}; use md5::{Digest, Md5}; -use rustc_data_structures::stable_hasher::{Hash64, Hash128, HashStable, StableHasher}; -use rustc_data_structures::sync::{FreezeLock, FreezeWriteGuard, Lock, Lrc}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_data_structures::sync::{FreezeLock, FreezeWriteGuard, Lock}; use rustc_data_structures::unord::UnordMap; +use rustc_hashes::{Hash64, Hash128}; use sha1::Sha1; use sha2::Sha256; @@ -110,7 +112,7 @@ pub struct SessionGlobals { /// The session's source map, if there is one. This field should only be /// used in places where the `Session` is truly not available, such as /// `<Span as Debug>::fmt`. - source_map: Option<Lrc<SourceMap>>, + source_map: Option<Arc<SourceMap>>, } impl SessionGlobals { @@ -120,7 +122,7 @@ impl SessionGlobals { span_interner: Lock::new(span_encoding::SpanInterner::default()), metavar_spans: Default::default(), hygiene_data: Lock::new(hygiene::HygieneData::new(edition)), - source_map: sm_inputs.map(|inputs| Lrc::new(SourceMap::with_inputs(inputs))), + source_map: sm_inputs.map(|inputs| Arc::new(SourceMap::with_inputs(inputs))), } } } @@ -402,7 +404,7 @@ impl fmt::Display for FileNameDisplay<'_> { impl<'a> FileNameDisplay<'a> { pub fn to_string_lossy(&self) -> Cow<'a, str> { match self.inner { - FileName::Real(ref inner) => inner.to_string_lossy(self.display_pref), + FileName::Real(inner) => inner.to_string_lossy(self.display_pref), _ => Cow::from(self.to_string()), } } @@ -1430,7 +1432,7 @@ pub enum ExternalSource { #[derive(PartialEq, Eq, Clone, Debug)] pub enum ExternalSourceKind { /// The external source has been loaded already. - Present(Lrc<String>), + Present(Arc<String>), /// No attempt has been made to load the external source. AbsentOk, /// A failed attempt has been made to load the external source. @@ -1440,7 +1442,7 @@ pub enum ExternalSourceKind { impl ExternalSource { pub fn get_source(&self) -> Option<&str> { match self { - ExternalSource::Foreign { kind: ExternalSourceKind::Present(ref src), .. } => Some(src), + ExternalSource::Foreign { kind: ExternalSourceKind::Present(src), .. } => Some(src), _ => None, } } @@ -1670,7 +1672,7 @@ pub struct SourceFile { /// (e.g., `<anon>`). pub name: FileName, /// The complete source code. - pub src: Option<Lrc<String>>, + pub src: Option<Arc<String>>, /// The source code's hash. pub src_hash: SourceFileHash, /// Used to enable cargo to use checksums to check if a crate is fresh rather @@ -1931,7 +1933,7 @@ impl SourceFile { Ok(SourceFile { name, - src: Some(Lrc::new(src)), + src: Some(Arc::new(src)), src_hash, checksum_hash, external_src: FreezeLock::frozen(ExternalSource::Unneeded), @@ -2050,7 +2052,7 @@ impl SourceFile { } = &mut *external_src { *src_kind = if let Some(src) = src { - ExternalSourceKind::Present(Lrc::new(src)) + ExternalSourceKind::Present(Arc::new(src)) } else { ExternalSourceKind::AbsentErr }; @@ -2490,7 +2492,7 @@ impl<D: Decoder> Decodable<D> for RelativeBytePos { #[derive(Debug, Clone)] pub struct Loc { /// Information about the original source. - pub file: Lrc<SourceFile>, + pub file: Arc<SourceFile>, /// The (1-based) line number. pub line: usize, /// The (0-based) column offset. @@ -2502,13 +2504,13 @@ pub struct Loc { // Used to be structural records. #[derive(Debug)] pub struct SourceFileAndLine { - pub sf: Lrc<SourceFile>, + pub sf: Arc<SourceFile>, /// Index of line, starting from 0. pub line: usize, } #[derive(Debug)] pub struct SourceFileAndBytePos { - pub sf: Lrc<SourceFile>, + pub sf: Arc<SourceFile>, pub pos: BytePos, } @@ -2525,7 +2527,7 @@ pub struct LineInfo { } pub struct FileLines { - pub file: Lrc<SourceFile>, + pub file: Arc<SourceFile>, pub lines: Vec<LineInfo>, } @@ -2591,7 +2593,7 @@ pub trait HashStableContext { fn span_data_to_lines_and_cols( &mut self, span: &SpanData, - ) -> Option<(Lrc<SourceFile>, usize, BytePos, usize, BytePos)>; + ) -> Option<(Arc<SourceFile>, usize, BytePos, usize, BytePos)>; fn hashing_controls(&self) -> HashingControls; } diff --git a/compiler/rustc_span/src/source_map.rs b/compiler/rustc_span/src/source_map.rs index 55e106b661b..6fdf8e46fec 100644 --- a/compiler/rustc_span/src/source_map.rs +++ b/compiler/rustc_span/src/source_map.rs @@ -102,8 +102,8 @@ pub trait FileLoader { fn read_file(&self, path: &Path) -> io::Result<String>; /// Read the contents of a potentially non-UTF-8 file into memory. - /// We don't normalize binary files, so we can start in an Lrc. - fn read_binary_file(&self, path: &Path) -> io::Result<Lrc<[u8]>>; + /// We don't normalize binary files, so we can start in an Arc. + fn read_binary_file(&self, path: &Path) -> io::Result<Arc<[u8]>>; } /// A FileLoader that uses std::fs to load real files. @@ -124,12 +124,12 @@ impl FileLoader for RealFileLoader { fs::read_to_string(path) } - fn read_binary_file(&self, path: &Path) -> io::Result<Lrc<[u8]>> { + fn read_binary_file(&self, path: &Path) -> io::Result<Arc<[u8]>> { let mut file = fs::File::open(path)?; let len = file.metadata()?.len(); - let mut bytes = Lrc::new_uninit_slice(len as usize); - let mut buf = BorrowedBuf::from(Lrc::get_mut(&mut bytes).unwrap()); + let mut bytes = Arc::new_uninit_slice(len as usize); + let mut buf = BorrowedBuf::from(Arc::get_mut(&mut bytes).unwrap()); match file.read_buf_exact(buf.unfilled()) { Ok(()) => {} Err(e) if e.kind() == io::ErrorKind::UnexpectedEof => { @@ -146,9 +146,9 @@ impl FileLoader for RealFileLoader { // But we are not guaranteed to be at the end of the file, because we did not attempt to do // a read with a non-zero-sized buffer and get Ok(0). // So we do small read to a fixed-size buffer. If the read returns no bytes then we're - // already done, and we just return the Lrc we built above. + // already done, and we just return the Arc we built above. // If the read returns bytes however, we just fall back to reading into a Vec then turning - // that into an Lrc, losing our nice peak memory behavior. This fallback code path should + // that into an Arc, losing our nice peak memory behavior. This fallback code path should // be rarely exercised. let mut probe = [0u8; 32]; @@ -172,8 +172,8 @@ impl FileLoader for RealFileLoader { #[derive(Default)] struct SourceMapFiles { - source_files: monotonic::MonotonicVec<Lrc<SourceFile>>, - stable_id_to_source_file: UnhashMap<StableSourceFileId, Lrc<SourceFile>>, + source_files: monotonic::MonotonicVec<Arc<SourceFile>>, + stable_id_to_source_file: UnhashMap<StableSourceFileId, Arc<SourceFile>>, } /// Used to construct a `SourceMap` with `SourceMap::with_inputs`. @@ -232,7 +232,7 @@ impl SourceMap { self.file_loader.file_exists(path) } - pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> { + pub fn load_file(&self, path: &Path) -> io::Result<Arc<SourceFile>> { let src = self.file_loader.read_file(path)?; let filename = path.to_owned().into(); Ok(self.new_source_file(filename, src)) @@ -242,7 +242,7 @@ impl SourceMap { /// /// Unlike `load_file`, guarantees that no normalization like BOM-removal /// takes place. - pub fn load_binary_file(&self, path: &Path) -> io::Result<(Lrc<[u8]>, Span)> { + pub fn load_binary_file(&self, path: &Path) -> io::Result<(Arc<[u8]>, Span)> { let bytes = self.file_loader.read_binary_file(path)?; // We need to add file to the `SourceMap`, so that it is present @@ -265,14 +265,14 @@ impl SourceMap { // By returning a `MonotonicVec`, we ensure that consumers cannot invalidate // any existing indices pointing into `files`. - pub fn files(&self) -> MappedReadGuard<'_, monotonic::MonotonicVec<Lrc<SourceFile>>> { + pub fn files(&self) -> MappedReadGuard<'_, monotonic::MonotonicVec<Arc<SourceFile>>> { ReadGuard::map(self.files.borrow(), |files| &files.source_files) } pub fn source_file_by_stable_id( &self, stable_id: StableSourceFileId, - ) -> Option<Lrc<SourceFile>> { + ) -> Option<Arc<SourceFile>> { self.files.borrow().stable_id_to_source_file.get(&stable_id).cloned() } @@ -280,7 +280,7 @@ impl SourceMap { &self, file_id: StableSourceFileId, mut file: SourceFile, - ) -> Result<Lrc<SourceFile>, OffsetOverflowError> { + ) -> Result<Arc<SourceFile>, OffsetOverflowError> { let mut files = self.files.borrow_mut(); file.start_pos = BytePos(if let Some(last_file) = files.source_files.last() { @@ -291,9 +291,9 @@ impl SourceMap { 0 }); - let file = Lrc::new(file); - files.source_files.push(Lrc::clone(&file)); - files.stable_id_to_source_file.insert(file_id, Lrc::clone(&file)); + let file = Arc::new(file); + files.source_files.push(Arc::clone(&file)); + files.stable_id_to_source_file.insert(file_id, Arc::clone(&file)); Ok(file) } @@ -301,7 +301,7 @@ impl SourceMap { /// Creates a new `SourceFile`. /// If a file already exists in the `SourceMap` with the same ID, that file is returned /// unmodified. - pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> { + pub fn new_source_file(&self, filename: FileName, src: String) -> Arc<SourceFile> { self.try_new_source_file(filename, src).unwrap_or_else(|OffsetOverflowError| { eprintln!( "fatal error: rustc does not support text files larger than {} bytes", @@ -315,7 +315,7 @@ impl SourceMap { &self, filename: FileName, src: String, - ) -> Result<Lrc<SourceFile>, OffsetOverflowError> { + ) -> Result<Arc<SourceFile>, OffsetOverflowError> { // Note that filename may not be a valid path, eg it may be `<anon>` etc, // but this is okay because the directory determined by `path.pop()` will // be empty, so the working directory will be used. @@ -353,7 +353,7 @@ impl SourceMap { multibyte_chars: Vec<MultiByteChar>, normalized_pos: Vec<NormalizedPos>, metadata_index: u32, - ) -> Lrc<SourceFile> { + ) -> Arc<SourceFile> { let source_len = RelativeBytePos::from_u32(source_len); let source_file = SourceFile { @@ -393,9 +393,9 @@ impl SourceMap { } /// Return the SourceFile that contains the given `BytePos` - pub fn lookup_source_file(&self, pos: BytePos) -> Lrc<SourceFile> { + pub fn lookup_source_file(&self, pos: BytePos) -> Arc<SourceFile> { let idx = self.lookup_source_file_idx(pos); - Lrc::clone(&(*self.files.borrow().source_files)[idx]) + Arc::clone(&(*self.files.borrow().source_files)[idx]) } /// Looks up source information about a `BytePos`. @@ -406,7 +406,7 @@ impl SourceMap { } /// If the corresponding `SourceFile` is empty, does not return a line number. - pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> { + pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Arc<SourceFile>> { let f = self.lookup_source_file(pos); let pos = f.relative_position(pos); @@ -441,7 +441,7 @@ impl SourceMap { pub fn span_to_location_info( &self, sp: Span, - ) -> (Option<Lrc<SourceFile>>, usize, usize, usize, usize) { + ) -> (Option<Arc<SourceFile>>, usize, usize, usize, usize) { if self.files.borrow().source_files.is_empty() || sp.is_dummy() { return (None, 0, 0, 0, 0); } @@ -477,7 +477,7 @@ impl SourceMap { if lo != hi { return true; } - let f = Lrc::clone(&(*self.files.borrow().source_files)[lo]); + let f = Arc::clone(&(*self.files.borrow().source_files)[lo]); let lo = f.relative_position(sp.lo()); let hi = f.relative_position(sp.hi()); f.lookup_line(lo) != f.lookup_line(hi) @@ -998,12 +998,12 @@ impl SourceMap { } } - pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> { + pub fn get_source_file(&self, filename: &FileName) -> Option<Arc<SourceFile>> { // Remap filename before lookup let filename = self.path_mapping().map_filename_prefix(filename).0; for sf in self.files.borrow().source_files.iter() { if filename == sf.name { - return Some(Lrc::clone(&sf)); + return Some(Arc::clone(&sf)); } } None @@ -1012,7 +1012,7 @@ impl SourceMap { /// For a global `BytePos`, computes the local offset within the containing `SourceFile`. pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos { let idx = self.lookup_source_file_idx(bpos); - let sf = Lrc::clone(&(*self.files.borrow().source_files)[idx]); + let sf = Arc::clone(&(*self.files.borrow().source_files)[idx]); let offset = bpos - sf.start_pos; SourceFileAndBytePos { sf, pos: offset } } @@ -1082,7 +1082,7 @@ impl SourceMap { } } -pub fn get_source_map() -> Option<Lrc<SourceMap>> { +pub fn get_source_map() -> Option<Arc<SourceMap>> { with_session_globals(|session_globals| session_globals.source_map.clone()) } diff --git a/compiler/rustc_span/src/source_map/tests.rs b/compiler/rustc_span/src/source_map/tests.rs index 5b39706f3ad..957f55e3913 100644 --- a/compiler/rustc_span/src/source_map/tests.rs +++ b/compiler/rustc_span/src/source_map/tests.rs @@ -538,7 +538,7 @@ fn test_next_point() { #[cfg(target_os = "linux")] #[test] fn read_binary_file_handles_lying_stat() { - // read_binary_file tries to read the contents of a file into an Lrc<[u8]> while + // read_binary_file tries to read the contents of a file into an Arc<[u8]> while // never having two copies of the data in memory at once. This is an optimization // to support include_bytes! with large files. But since Rust allocators are // sensitive to alignment, our implementation can't be bootstrapped off calling diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index c819d433235..10c79b1be82 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -104,6 +104,8 @@ symbols! { Gen: "gen", // >= 2024 Edition only Try: "try", // >= 2018 Edition only + // NOTE: When adding new keywords, consider adding them to the ui/parser/raw/raw-idents.rs test. + // "Lifetime keywords": regular keywords with a leading `'`. // Matching predicates: `is_any_keyword` UnderscoreLifetime: "'_", @@ -190,9 +192,11 @@ symbols! { Capture, Cell, Center, + Child, Cleanup, Clone, CoercePointee, + CoercePointeeValidated, CoerceUnsized, Command, ConstParamTy, @@ -249,6 +253,7 @@ symbols! { Into, IntoFuture, IntoIterator, + IoBufRead, IoLines, IoRead, IoSeek, @@ -284,6 +289,7 @@ symbols! { OsString, Output, Param, + ParamSet, PartialEq, PartialOrd, Path, @@ -332,6 +338,7 @@ symbols! { SliceIter, Some, SpanCtxt, + Stdin, String, StructuralPartialEq, SubdiagMessage, @@ -514,6 +521,8 @@ symbols! { bang, begin_panic, bench, + bevy_ecs, + bikeshed_guaranteed_no_drop, bin, binaryheap_iter, bind_by_move_pattern_guards, @@ -594,6 +603,9 @@ symbols! { cfi, cfi_encoding, char, + char_is_ascii, + child_id, + child_kill, client, clippy, clobber_abi, @@ -619,6 +631,7 @@ symbols! { cmp_partialord_lt, cmpxchg16b_target_feature, cmse_nonsecure_entry, + coerce_pointee_validated, coerce_unsized, cold, cold_path, @@ -877,6 +890,7 @@ symbols! { extern_crate_self, extern_in_paths, extern_prelude, + extern_system_varargs, extern_types, external, external_doc, @@ -1024,6 +1038,7 @@ symbols! { generic_const_exprs, generic_const_items, generic_param_attrs, + generic_pattern_types, get_context, global_alloc_ty, global_allocator, @@ -1352,10 +1367,6 @@ symbols! { native_link_modifiers_whole_archive, natvis_file, ne, - nearbyintf128, - nearbyintf16, - nearbyintf32, - nearbyintf64, needs_allocator, needs_drop, needs_panic_runtime, @@ -1460,6 +1471,7 @@ symbols! { panic_2015, panic_2021, panic_abort, + panic_any, panic_bounds_check, panic_cannot_unwind, panic_const_add_overflow, @@ -1506,7 +1518,7 @@ symbols! { path_main_separator, path_to_pathbuf, pathbuf_as_path, - pattern_complexity, + pattern_complexity_limit, pattern_parentheses, pattern_type, pattern_types, @@ -1565,6 +1577,7 @@ symbols! { proc_macro_mod, proc_macro_non_items, proc_macro_path_invoc, + process_abort, process_exit, profiler_builtins, profiler_runtime, @@ -1671,20 +1684,16 @@ symbols! { return_position_impl_trait_in_trait, return_type_notation, rhs, - rintf128, - rintf16, - rintf32, - rintf64, riscv_target_feature, rlib, ropi, ropi_rwpi: "ropi-rwpi", rotate_left, rotate_right, - roundevenf128, - roundevenf16, - roundevenf32, - roundevenf64, + round_ties_even_f128, + round_ties_even_f16, + round_ties_even_f32, + round_ties_even_f64, roundf128, roundf16, roundf32, @@ -1967,6 +1976,10 @@ symbols! { str_from_utf8_mut, str_from_utf8_unchecked, str_from_utf8_unchecked_mut, + str_inherent_from_utf8, + str_inherent_from_utf8_mut, + str_inherent_from_utf8_unchecked, + str_inherent_from_utf8_unchecked_mut, str_len, str_split_whitespace, str_starts_with, @@ -1991,6 +2004,7 @@ symbols! { sub_assign, sub_with_overflow, suggestion, + supertrait_item_shadowing, surface_async_drop_in_place, sym, sync, |
