about summary refs log tree commit diff
path: root/library/std/src/io/buffered/bufreader.rs
diff options
context:
space:
mode:
Diffstat (limited to 'library/std/src/io/buffered/bufreader.rs')
-rw-r--r--library/std/src/io/buffered/bufreader.rs59
1 files changed, 43 insertions, 16 deletions
diff --git a/library/std/src/io/buffered/bufreader.rs b/library/std/src/io/buffered/bufreader.rs
index 2864e94f60f..b297e4bf7d9 100644
--- a/library/std/src/io/buffered/bufreader.rs
+++ b/library/std/src/io/buffered/bufreader.rs
@@ -1,8 +1,9 @@
 use crate::cmp;
 use crate::fmt;
 use crate::io::{
-    self, BufRead, Initializer, IoSliceMut, Read, Seek, SeekFrom, SizeHint, DEFAULT_BUF_SIZE,
+    self, BufRead, IoSliceMut, Read, ReadBuf, Seek, SeekFrom, SizeHint, DEFAULT_BUF_SIZE,
 };
+use crate::mem::MaybeUninit;
 
 /// The `BufReader<R>` struct adds buffering to any reader.
 ///
@@ -47,9 +48,10 @@ use crate::io::{
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct BufReader<R> {
     inner: R,
-    buf: Box<[u8]>,
+    buf: Box<[MaybeUninit<u8>]>,
     pos: usize,
     cap: usize,
+    init: usize,
 }
 
 impl<R: Read> BufReader<R> {
@@ -91,11 +93,8 @@ impl<R: Read> BufReader<R> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn with_capacity(capacity: usize, inner: R) -> BufReader<R> {
-        unsafe {
-            let mut buf = Box::new_uninit_slice(capacity).assume_init();
-            inner.initializer().initialize(&mut buf);
-            BufReader { inner, buf, pos: 0, cap: 0 }
-        }
+        let buf = Box::new_uninit_slice(capacity);
+        BufReader { inner, buf, pos: 0, cap: 0, init: 0 }
     }
 }
 
@@ -171,7 +170,7 @@ impl<R> BufReader<R> {
     /// ```
     #[stable(feature = "bufreader_buffer", since = "1.37.0")]
     pub fn buffer(&self) -> &[u8] {
-        &self.buf[self.pos..self.cap]
+        unsafe { MaybeUninit::slice_assume_init_ref(&self.buf[self.pos..self.cap]) }
     }
 
     /// Returns the number of bytes the internal buffer can hold at once.
@@ -271,6 +270,25 @@ impl<R: Read> Read for BufReader<R> {
         Ok(nread)
     }
 
+    fn read_buf(&mut self, buf: &mut ReadBuf<'_>) -> io::Result<()> {
+        // If we don't have any buffered data and we're doing a massive read
+        // (larger than our internal buffer), bypass our internal buffer
+        // entirely.
+        if self.pos == self.cap && buf.remaining() >= self.buf.len() {
+            self.discard_buffer();
+            return self.inner.read_buf(buf);
+        }
+
+        let prev = buf.filled_len();
+
+        let mut rem = self.fill_buf()?;
+        rem.read_buf(buf)?;
+
+        self.consume(buf.filled_len() - prev); //slice impl of read_buf known to never unfill buf
+
+        Ok(())
+    }
+
     // Small read_exacts from a BufReader are extremely common when used with a deserializer.
     // The default implementation calls read in a loop, which results in surprisingly poor code
     // generation for the common path where the buffer has enough bytes to fill the passed-in
@@ -303,16 +321,11 @@ impl<R: Read> Read for BufReader<R> {
         self.inner.is_read_vectored()
     }
 
-    // we can't skip unconditionally because of the large buffer case in read.
-    unsafe fn initializer(&self) -> Initializer {
-        self.inner.initializer()
-    }
-
     // The inner reader might have an optimized `read_to_end`. Drain our buffer and then
     // delegate to the inner implementation.
     fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
         let nread = self.cap - self.pos;
-        buf.extend_from_slice(&self.buf[self.pos..self.cap]);
+        buf.extend_from_slice(&self.buffer());
         self.discard_buffer();
         Ok(nread + self.inner.read_to_end(buf)?)
     }
@@ -363,10 +376,24 @@ impl<R: Read> BufRead for BufReader<R> {
         // to tell the compiler that the pos..cap slice is always valid.
         if self.pos >= self.cap {
             debug_assert!(self.pos == self.cap);
-            self.cap = self.inner.read(&mut self.buf)?;
+
+            let mut readbuf = ReadBuf::uninit(&mut self.buf);
+
+            // SAFETY: `self.init` is either 0 set to `readbuf.initialized_len()`
+            // from the last time this function was called
+            unsafe {
+                readbuf.assume_init(self.init);
+            }
+
+            self.inner.read_buf(&mut readbuf)?;
+
+            self.cap = readbuf.filled_len();
+            self.init = readbuf.initialized_len();
+
             self.pos = 0;
         }
-        Ok(&self.buf[self.pos..self.cap])
+        // SAFETY: self.cap is always <= self.init, so self.buf[self.pos..self.cap] is always init
+        unsafe { Ok(MaybeUninit::slice_assume_init_ref(&self.buf[self.pos..self.cap])) }
     }
 
     fn consume(&mut self, amt: usize) {