about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/attr.rs6
-rw-r--r--src/libsyntax/codemap.rs121
-rw-r--r--src/libsyntax/diagnostic.rs42
-rw-r--r--src/libsyntax/ext/base.rs2
-rw-r--r--src/libsyntax/ext/build.rs2
-rw-r--r--src/libsyntax/ext/registrar.rs2
-rw-r--r--src/libsyntax/ext/source_util.rs4
-rw-r--r--src/libsyntax/ext/trace_macros.rs2
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs9
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs16
-rw-r--r--src/libsyntax/parse/comments.rs4
-rw-r--r--src/libsyntax/parse/lexer.rs126
-rw-r--r--src/libsyntax/parse/mod.rs66
-rw-r--r--src/libsyntax/parse/parser.rs2
14 files changed, 189 insertions, 215 deletions
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index 7ff9a73f29d..65a0f473db2 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -375,7 +375,7 @@ pub fn find_stability<AM: AttrMetaMethods, It: Iterator<AM>>(mut metas: It)
     None
 }
 
-pub fn require_unique_names(diagnostic: @SpanHandler, metas: &[@MetaItem]) {
+pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[@MetaItem]) {
     let mut set = HashSet::new();
     for meta in metas.iter() {
         let name = meta.name();
@@ -400,7 +400,7 @@ pub fn require_unique_names(diagnostic: @SpanHandler, metas: &[@MetaItem]) {
  * present (before fields, if any) with that type; reprensentation
  * optimizations which would remove it will not be done.
  */
-pub fn find_repr_attr(diagnostic: @SpanHandler, attr: @ast::MetaItem, acc: ReprAttr)
+pub fn find_repr_attr(diagnostic: &SpanHandler, attr: @ast::MetaItem, acc: ReprAttr)
     -> ReprAttr {
     let mut acc = acc;
     match attr.node {
@@ -438,7 +438,7 @@ pub fn find_repr_attr(diagnostic: @SpanHandler, attr: @ast::MetaItem, acc: ReprA
         // Not a "repr" hint: ignore.
         _ => { }
     }
-    return acc;
+    acc
 }
 
 fn int_type_of_word(s: &str) -> Option<IntType> {
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index 5be03317b77..4bfd5391a8f 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -23,6 +23,7 @@ source code snippets, etc.
 
 use std::cell::RefCell;
 use std::cmp;
+use std::rc::Rc;
 use std::vec_ng::Vec;
 use serialize::{Encodable, Decodable, Encoder, Decoder};
 
@@ -140,7 +141,7 @@ pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
 /// A source code location used for error reporting
 pub struct Loc {
     /// Information about the original source
-    file: @FileMap,
+    file: Rc<FileMap>,
     /// The (1-based) line number
     line: uint,
     /// The (0-based) column offset
@@ -154,12 +155,12 @@ pub struct LocWithOpt {
     filename: FileName,
     line: uint,
     col: CharPos,
-    file: Option<@FileMap>,
+    file: Option<Rc<FileMap>>,
 }
 
 // used to be structural records. Better names, anyone?
-pub struct FileMapAndLine {fm: @FileMap, line: uint}
-pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos}
+pub struct FileMapAndLine {fm: Rc<FileMap>, line: uint}
+pub struct FileMapAndBytePos {fm: Rc<FileMap>, pos: BytePos}
 
 #[deriving(Clone, Hash, Show)]
 pub enum MacroFormat {
@@ -186,10 +187,10 @@ pub struct ExpnInfo {
 
 pub type FileName = ~str;
 
-pub struct FileLines
-{
-    file: @FileMap,
-    lines: Vec<uint> }
+pub struct FileLines {
+    file: Rc<FileMap>,
+    lines: Vec<uint>
+}
 
 /// Identifies an offset of a multi-byte character in a FileMap
 pub struct MultiByteChar {
@@ -251,8 +252,7 @@ impl FileMap {
             pos: pos,
             bytes: bytes,
         };
-        let mut multibyte_chars = self.multibyte_chars.borrow_mut();
-        multibyte_chars.get().push(mbc);
+        self.multibyte_chars.borrow_mut().get().push(mbc);
     }
 
     pub fn is_real_file(&self) -> bool {
@@ -261,7 +261,7 @@ impl FileMap {
 }
 
 pub struct CodeMap {
-    files: RefCell<Vec<@FileMap> >
+    files: RefCell<Vec<Rc<FileMap>>>
 }
 
 impl CodeMap {
@@ -271,11 +271,11 @@ impl CodeMap {
         }
     }
 
-    pub fn new_filemap(&self, filename: FileName, mut src: ~str) -> @FileMap {
+    pub fn new_filemap(&self, filename: FileName, mut src: ~str) -> Rc<FileMap> {
         let mut files = self.files.borrow_mut();
         let start_pos = match files.get().last() {
             None => 0,
-            Some(last) => last.start_pos.to_uint() + last.src.len(),
+            Some(last) => last.deref().start_pos.to_uint() + last.deref().src.len(),
         };
 
         // Append '\n' in case it's not already there.
@@ -286,34 +286,33 @@ impl CodeMap {
             src.push_char('\n');
         }
 
-        let filemap = @FileMap {
+        let filemap = Rc::new(FileMap {
             name: filename,
             src: src,
             start_pos: Pos::from_uint(start_pos),
             lines: RefCell::new(Vec::new()),
             multibyte_chars: RefCell::new(Vec::new()),
-        };
+        });
 
-        files.get().push(filemap);
+        files.get().push(filemap.clone());
 
-        return filemap;
+        filemap
     }
 
     pub fn mk_substr_filename(&self, sp: Span) -> ~str {
         let pos = self.lookup_char_pos(sp.lo);
-        return format!("<{}:{}:{}>", pos.file.name,
-                       pos.line, pos.col.to_uint() + 1)
+        format!("<{}:{}:{}>", pos.file.deref().name, pos.line, pos.col.to_uint() + 1)
     }
 
     /// Lookup source information about a BytePos
     pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
-        return self.lookup_pos(pos);
+        self.lookup_pos(pos)
     }
 
     pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
         let loc = self.lookup_char_pos(pos);
         LocWithOpt {
-            filename: loc.file.name.to_str(),
+            filename: loc.file.deref().name.to_str(),
             line: loc.line,
             col: loc.col,
             file: Some(loc.file)
@@ -321,11 +320,8 @@ impl CodeMap {
     }
 
     pub fn span_to_str(&self, sp: Span) -> ~str {
-        {
-            let files = self.files.borrow();
-            if files.get().len() == 0 && sp == DUMMY_SP {
-                return ~"no-location";
-            }
+        if self.files.borrow().get().len() == 0 && sp == DUMMY_SP {
+            return ~"no-location";
         }
 
         let lo = self.lookup_char_pos_adj(sp.lo);
@@ -335,18 +331,17 @@ impl CodeMap {
     }
 
     pub fn span_to_filename(&self, sp: Span) -> FileName {
-        let lo = self.lookup_char_pos(sp.lo);
-        lo.file.name.to_str()
+        self.lookup_char_pos(sp.lo).file.deref().name.to_str()
     }
 
-    pub fn span_to_lines(&self, sp: Span) -> @FileLines {
+    pub fn span_to_lines(&self, sp: Span) -> FileLines {
         let lo = self.lookup_char_pos(sp.lo);
         let hi = self.lookup_char_pos(sp.hi);
         let mut lines = Vec::new();
         for i in range(lo.line - 1u, hi.line as uint) {
             lines.push(i);
         };
-        return @FileLines {file: lo.file, lines: lines};
+        FileLines {file: lo.file, lines: lines}
     }
 
     pub fn span_to_snippet(&self, sp: Span) -> Option<~str> {
@@ -357,27 +352,22 @@ impl CodeMap {
         // it's testing isn't true for all spans in the AST, so to allow the
         // caller to not have to fail (and it can't catch it since the CodeMap
         // isn't sendable), return None
-        if begin.fm.start_pos != end.fm.start_pos {
+        if begin.fm.deref().start_pos != end.fm.deref().start_pos {
             None
         } else {
-            Some(begin.fm.src.slice( begin.pos.to_uint(), end.pos.to_uint()).to_owned())
+            Some(begin.fm.deref().src.slice( begin.pos.to_uint(), end.pos.to_uint()).to_owned())
         }
     }
 
-    pub fn get_filemap(&self, filename: &str) -> @FileMap {
-        let files = self.files.borrow();
-        for fm in files.get().iter() {
-            if filename == fm.name {
-                return *fm
+    pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> {
+        for fm in self.files.borrow().get().iter() {
+            if filename == fm.deref().name {
+                return fm.clone();
             }
         }
-        //XXjdm the following triggers a mismatched type bug
-        //      (or expected function, found _|_)
-        fail!(); // ("asking for " + filename + " which we don't know about");
+        fail!("asking for {} which we don't know about", filename);
     }
-}
 
-impl CodeMap {
     fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
         let files = self.files.borrow();
         let files = files.get();
@@ -386,7 +376,7 @@ impl CodeMap {
         let mut b = len;
         while b - a > 1u {
             let m = (a + b) / 2u;
-            if files.get(m).start_pos > pos {
+            if files.get(m).deref().start_pos > pos {
                 b = m;
             } else {
                 a = m;
@@ -396,7 +386,7 @@ impl CodeMap {
         // filemap, but are not the filemaps we want (because they are length 0, they cannot
         // contain what we are looking for). So, rewind until we find a useful filemap.
         loop {
-            let lines = files.get(a).lines.borrow();
+            let lines = files.get(a).deref().lines.borrow();
             let lines = lines.get();
             if lines.len() > 0 {
                 break;
@@ -413,28 +403,28 @@ impl CodeMap {
         return a;
     }
 
-    fn lookup_line(&self, pos: BytePos) -> FileMapAndLine
-    {
+    fn lookup_line(&self, pos: BytePos) -> FileMapAndLine {
         let idx = self.lookup_filemap_idx(pos);
 
         let files = self.files.borrow();
-        let f = *files.get().get(idx);
+        let f = files.get().get(idx).clone();
         let mut a = 0u;
-        let mut lines = f.lines.borrow_mut();
-        let mut b = lines.get().len();
-        while b - a > 1u {
-            let m = (a + b) / 2u;
-            if *lines.get().get(m) > pos { b = m; } else { a = m; }
+        {
+            let mut lines = f.deref().lines.borrow_mut();
+            let mut b = lines.get().len();
+            while b - a > 1u {
+                let m = (a + b) / 2u;
+                if *lines.get().get(m) > pos { b = m; } else { a = m; }
+            }
         }
-        return FileMapAndLine {fm: f, line: a};
+        FileMapAndLine {fm: f, line: a}
     }
 
     fn lookup_pos(&self, pos: BytePos) -> Loc {
         let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
         let line = a + 1u; // Line numbers start at 1
         let chpos = self.bytepos_to_file_charpos(pos);
-        let lines = f.lines.borrow();
-        let linebpos = *lines.get().get(a);
+        let linebpos = *f.deref().lines.borrow().get().get(a);
         let linechpos = self.bytepos_to_file_charpos(linebpos);
         debug!("codemap: byte pos {:?} is on the line at byte pos {:?}",
                pos, linebpos);
@@ -442,20 +432,18 @@ impl CodeMap {
                chpos, linechpos);
         debug!("codemap: byte is on line: {:?}", line);
         assert!(chpos >= linechpos);
-        return Loc {
+        Loc {
             file: f,
             line: line,
             col: chpos - linechpos
-        };
+        }
     }
 
-    fn lookup_byte_offset(&self, bpos: BytePos)
-        -> FileMapAndBytePos {
+    fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
         let idx = self.lookup_filemap_idx(bpos);
-        let files = self.files.borrow();
-        let fm = *files.get().get(idx);
-        let offset = bpos - fm.start_pos;
-        return FileMapAndBytePos {fm: fm, pos: offset};
+        let fm = self.files.borrow().get().get(idx).clone();
+        let offset = bpos - fm.deref().start_pos;
+        FileMapAndBytePos {fm: fm, pos: offset}
     }
 
     // Converts an absolute BytePos to a CharPos relative to the filemap.
@@ -468,8 +456,7 @@ impl CodeMap {
         // The number of extra bytes due to multibyte chars in the FileMap
         let mut total_extra_bytes = 0;
 
-        let multibyte_chars = map.multibyte_chars.borrow();
-        for mbc in multibyte_chars.get().iter() {
+        for mbc in map.deref().multibyte_chars.borrow().get().iter() {
             debug!("codemap: {:?}-byte char at {:?}", mbc.bytes, mbc.pos);
             if mbc.pos < bpos {
                 // every character is at least one byte, so we only
@@ -483,8 +470,8 @@ impl CodeMap {
             }
         }
 
-        assert!(map.start_pos.to_uint() + total_extra_bytes <= bpos.to_uint());
-        CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes)
+        assert!(map.deref().start_pos.to_uint() + total_extra_bytes <= bpos.to_uint());
+        CharPos(bpos.to_uint() - map.deref().start_pos.to_uint() - total_extra_bytes)
     }
 }
 
diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs
index 2da8b786805..c88c5a3c4e6 100644
--- a/src/libsyntax/diagnostic.rs
+++ b/src/libsyntax/diagnostic.rs
@@ -40,37 +40,37 @@ pub struct ExplicitBug;
 // accepts span information for source-location
 // reporting.
 pub struct SpanHandler {
-    handler: @Handler,
-    cm: @codemap::CodeMap,
+    handler: Handler,
+    cm: codemap::CodeMap,
 }
 
 impl SpanHandler {
     pub fn span_fatal(&self, sp: Span, msg: &str) -> ! {
-        self.handler.emit(Some((&*self.cm, sp)), msg, Fatal);
+        self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
         fail!(FatalError);
     }
     pub fn span_err(&self, sp: Span, msg: &str) {
-        self.handler.emit(Some((&*self.cm, sp)), msg, Error);
+        self.handler.emit(Some((&self.cm, sp)), msg, Error);
         self.handler.bump_err_count();
     }
     pub fn span_warn(&self, sp: Span, msg: &str) {
-        self.handler.emit(Some((&*self.cm, sp)), msg, Warning);
+        self.handler.emit(Some((&self.cm, sp)), msg, Warning);
     }
     pub fn span_note(&self, sp: Span, msg: &str) {
-        self.handler.emit(Some((&*self.cm, sp)), msg, Note);
+        self.handler.emit(Some((&self.cm, sp)), msg, Note);
     }
     pub fn span_end_note(&self, sp: Span, msg: &str) {
-        self.handler.custom_emit(&*self.cm, sp, msg, Note);
+        self.handler.custom_emit(&self.cm, sp, msg, Note);
     }
     pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
-        self.handler.emit(Some((&*self.cm, sp)), msg, Bug);
+        self.handler.emit(Some((&self.cm, sp)), msg, Bug);
         fail!(ExplicitBug);
     }
     pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
         self.span_bug(sp, ~"unimplemented " + msg);
     }
-    pub fn handler(&self) -> @Handler {
-        self.handler
+    pub fn handler<'a>(&'a self) -> &'a Handler {
+        &self.handler
     }
 }
 
@@ -137,20 +137,19 @@ impl Handler {
     }
 }
 
-pub fn mk_span_handler(handler: @Handler, cm: @codemap::CodeMap)
-                       -> @SpanHandler {
-    @SpanHandler {
+pub fn mk_span_handler(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
+    SpanHandler {
         handler: handler,
         cm: cm,
     }
 }
 
-pub fn default_handler() -> @Handler {
+pub fn default_handler() -> Handler {
     mk_handler(~EmitterWriter::stderr())
 }
 
-pub fn mk_handler(e: ~Emitter) -> @Handler {
-    @Handler {
+pub fn mk_handler(e: ~Emitter) -> Handler {
+    Handler {
         err_count: Cell::new(0),
         emit: RefCell::new(e),
     }
@@ -301,8 +300,8 @@ fn highlight_lines(err: &mut EmitterWriter,
                    cm: &codemap::CodeMap,
                    sp: Span,
                    lvl: Level,
-                   lines: &codemap::FileLines) -> io::IoResult<()> {
-    let fm = lines.file;
+                   lines: codemap::FileLines) -> io::IoResult<()> {
+    let fm = lines.file.deref();
 
     let mut elided = false;
     let mut display_lines = lines.lines.as_slice();
@@ -374,8 +373,8 @@ fn custom_highlight_lines(w: &mut EmitterWriter,
                           cm: &codemap::CodeMap,
                           sp: Span,
                           lvl: Level,
-                          lines: &codemap::FileLines) -> io::IoResult<()> {
-    let fm = lines.file;
+                          lines: codemap::FileLines) -> io::IoResult<()> {
+    let fm = lines.file.deref();
 
     let lines = lines.lines.as_slice();
     if lines.len() > MAX_LINES {
@@ -420,8 +419,7 @@ fn print_macro_backtrace(w: &mut EmitterWriter,
     Ok(())
 }
 
-pub fn expect<T:Clone>(diag: @SpanHandler, opt: Option<T>, msg: || -> ~str)
-              -> T {
+pub fn expect<T:Clone>(diag: &SpanHandler, opt: Option<T>, msg: || -> ~str) -> T {
     match opt {
        Some(ref t) => (*t).clone(),
        None => diag.handler().bug(msg()),
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index a6f145a129e..df2c265e6eb 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -326,7 +326,7 @@ impl<'a> ExtCtxt<'a> {
         }
     }
 
-    pub fn codemap(&self) -> @CodeMap { self.parse_sess.cm }
+    pub fn codemap(&self) -> &'a CodeMap { &self.parse_sess.span_diagnostic.cm }
     pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
     pub fn cfg(&self) -> ast::CrateConfig { self.cfg.clone() }
     pub fn call_site(&self) -> Span {
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index 6aa90e5e842..7c42476bc01 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -633,7 +633,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
             vec!(
                 self.expr_str(span, msg),
                 self.expr_str(span,
-                              token::intern_and_get_ident(loc.file.name)),
+                              token::intern_and_get_ident(loc.file.deref().name)),
                 self.expr_uint(span, loc.line)))
     }
 
diff --git a/src/libsyntax/ext/registrar.rs b/src/libsyntax/ext/registrar.rs
index 4c18eb83afc..d8bf726da79 100644
--- a/src/libsyntax/ext/registrar.rs
+++ b/src/libsyntax/ext/registrar.rs
@@ -37,7 +37,7 @@ impl Visitor<()> for MacroRegistrarContext {
     }
 }
 
-pub fn find_macro_registrar(diagnostic: @diagnostic::SpanHandler,
+pub fn find_macro_registrar(diagnostic: &diagnostic::SpanHandler,
                             krate: &ast::Crate) -> Option<ast::DefId> {
     let mut ctx = MacroRegistrarContext { registrars: Vec::new() };
     visit::walk_crate(&mut ctx, krate, ());
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index b31388f58eb..137cd89bf30 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -57,7 +57,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
 
     let topmost = topmost_expn_info(cx.backtrace().unwrap());
     let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);
-    let filename = token::intern_and_get_ident(loc.file.name);
+    let filename = token::intern_and_get_ident(loc.file.deref().name);
     base::MRExpr(cx.expr_str(topmost.call_site, filename))
 }
 
@@ -117,7 +117,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             // dependency information
             let filename = file.display().to_str();
             let interned = token::intern_and_get_ident(src);
-            cx.parse_sess.cm.new_filemap(filename, src);
+            cx.codemap().new_filemap(filename, src);
 
             base::MRExpr(cx.expr_str(sp, interned))
         }
diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs
index 183cccde18e..fa49f06e516 100644
--- a/src/libsyntax/ext/trace_macros.rs
+++ b/src/libsyntax/ext/trace_macros.rs
@@ -22,7 +22,7 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
                         -> base::MacResult {
     let sess = cx.parse_sess();
     let cfg = cx.cfg();
-    let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic,
+    let tt_rdr = new_tt_reader(&cx.parse_sess().span_diagnostic,
                                None,
                                tt.iter().map(|x| (*x).clone()).collect());
     let mut rust_parser = Parser(sess, cfg.clone(), tt_rdr.dup());
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 4cacbfd6e5a..3f4ed0b1e8e 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -131,13 +131,11 @@ fn generic_extension(cx: &ExtCtxt,
     let mut best_fail_spot = DUMMY_SP;
     let mut best_fail_msg = ~"internal error: ran no matchers";
 
-    let s_d = cx.parse_sess().span_diagnostic;
-
     for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
         match **lhs {
           MatchedNonterminal(NtMatchers(ref mtcs)) => {
             // `None` is because we're not interpolating
-            let arg_rdr = new_tt_reader(s_d,
+            let arg_rdr = new_tt_reader(&cx.parse_sess().span_diagnostic,
                                         None,
                                         arg.iter()
                                            .map(|x| (*x).clone())
@@ -162,7 +160,8 @@ fn generic_extension(cx: &ExtCtxt,
                     _ => cx.span_bug(sp, "bad thing in rhs")
                 };
                 // rhs has holes ( `$id` and `$(...)` that need filled)
-                let trncbr = new_tt_reader(s_d, Some(named_matches),
+                let trncbr = new_tt_reader(&cx.parse_sess().span_diagnostic,
+                                           Some(named_matches),
                                            rhs);
                 let p = Parser(cx.parse_sess(), cx.cfg(), ~trncbr);
                 // Let the context choose how to interpret the result.
@@ -218,7 +217,7 @@ pub fn add_new_extension(cx: &mut ExtCtxt,
 
 
     // Parse the macro_rules! invocation (`none` is for no interpolations):
-    let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic,
+    let arg_reader = new_tt_reader(&cx.parse_sess().span_diagnostic,
                                    None,
                                    arg.clone());
     let argument_map = parse_or_else(cx.parse_sess(),
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index a3f179e851a..e120f07742e 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -30,8 +30,8 @@ struct TtFrame {
     up: Option<@TtFrame>,
 }
 
-pub struct TtReader {
-    sp_diag: @SpanHandler,
+pub struct TtReader<'a> {
+    sp_diag: &'a SpanHandler,
     // the unzipped tree:
     priv stack: RefCell<@TtFrame>,
     /* for MBE-style macro transcription */
@@ -46,10 +46,10 @@ pub struct TtReader {
 /** This can do Macro-By-Example transcription. On the other hand, if
  *  `src` contains no `TTSeq`s and `TTNonterminal`s, `interp` can (and
  *  should) be none. */
-pub fn new_tt_reader(sp_diag: @SpanHandler,
-                     interp: Option<HashMap<Ident, @NamedMatch>>,
-                     src: Vec<ast::TokenTree> )
-                     -> TtReader {
+pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
+                         interp: Option<HashMap<Ident, @NamedMatch>>,
+                         src: Vec<ast::TokenTree> )
+                         -> TtReader<'a> {
     let r = TtReader {
         sp_diag: sp_diag,
         stack: RefCell::new(@TtFrame {
@@ -70,7 +70,7 @@ pub fn new_tt_reader(sp_diag: @SpanHandler,
         cur_span: RefCell::new(DUMMY_SP),
     };
     tt_next_token(&r); /* get cur_tok and cur_span set up */
-    return r;
+    r
 }
 
 fn dup_tt_frame(f: @TtFrame) -> @TtFrame {
@@ -86,7 +86,7 @@ fn dup_tt_frame(f: @TtFrame) -> @TtFrame {
     }
 }
 
-pub fn dup_tt_reader(r: &TtReader) -> TtReader {
+pub fn dup_tt_reader<'a>(r: &TtReader<'a>) -> TtReader<'a> {
     TtReader {
         sp_diag: r.sp_diag,
         stack: RefCell::new(dup_tt_frame(r.stack.get())),
diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs
index c2a2097de24..ed74fd416d1 100644
--- a/src/libsyntax/parse/comments.rs
+++ b/src/libsyntax/parse/comments.rs
@@ -346,10 +346,10 @@ pub struct Literal {
 // it appears this function is called only from pprust... that's
 // probably not a good thing.
 pub fn gather_comments_and_literals(span_diagnostic:
-                                        @diagnostic::SpanHandler,
+                                        &diagnostic::SpanHandler,
                                     path: ~str,
                                     srdr: &mut io::Reader)
-                                 -> (Vec<Comment> , Vec<Literal> ) {
+                                 -> (Vec<Comment>, Vec<Literal>) {
     let src = srdr.read_to_end().unwrap();
     let src = str::from_utf8_owned(src).unwrap();
     let cm = CodeMap::new();
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 884fc306f22..43e1f8756fa 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -18,6 +18,7 @@ use parse::token::{str_to_ident};
 
 use std::cell::{Cell, RefCell};
 use std::char;
+use std::rc::Rc;
 use std::mem::replace;
 use std::num::from_str_radix;
 
@@ -27,7 +28,7 @@ pub trait Reader {
     fn is_eof(&self) -> bool;
     fn next_token(&self) -> TokenAndSpan;
     fn fatal(&self, ~str) -> !;
-    fn span_diag(&self) -> @SpanHandler;
+    fn span_diag<'a>(&'a self) -> &'a SpanHandler;
     fn peek(&self) -> TokenAndSpan;
     fn dup(&self) -> ~Reader:;
 }
@@ -38,8 +39,8 @@ pub struct TokenAndSpan {
     sp: Span,
 }
 
-pub struct StringReader {
-    span_diagnostic: @SpanHandler,
+pub struct StringReader<'a> {
+    span_diagnostic: &'a SpanHandler,
     // The absolute offset within the codemap of the next character to read
     pos: Cell<BytePos>,
     // The absolute offset within the codemap of the last character read(curr)
@@ -48,36 +49,36 @@ pub struct StringReader {
     col: Cell<CharPos>,
     // The last character to be read
     curr: Cell<Option<char>>,
-    filemap: @codemap::FileMap,
+    filemap: Rc<codemap::FileMap>,
     /* cached: */
     peek_tok: RefCell<token::Token>,
     peek_span: RefCell<Span>,
 }
 
-impl StringReader {
+impl<'a> StringReader<'a> {
     pub fn curr_is(&self, c: char) -> bool {
         self.curr.get() == Some(c)
     }
 }
 
-pub fn new_string_reader(span_diagnostic: @SpanHandler,
-                         filemap: @codemap::FileMap)
-                      -> StringReader {
+pub fn new_string_reader<'a>(span_diagnostic: &'a SpanHandler,
+                             filemap: Rc<codemap::FileMap>)
+                             -> StringReader<'a> {
     let r = new_low_level_string_reader(span_diagnostic, filemap);
     string_advance_token(&r); /* fill in peek_* */
     r
 }
 
 /* For comments.rs, which hackily pokes into 'pos' and 'curr' */
-pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler,
-                                   filemap: @codemap::FileMap)
-                                -> StringReader {
+pub fn new_low_level_string_reader<'a>(span_diagnostic: &'a SpanHandler,
+                                       filemap: Rc<codemap::FileMap>)
+                                       -> StringReader<'a> {
     // Force the initial reader bump to start on a fresh line
     let initial_char = '\n';
     let r = StringReader {
         span_diagnostic: span_diagnostic,
-        pos: Cell::new(filemap.start_pos),
-        last_pos: Cell::new(filemap.start_pos),
+        pos: Cell::new(filemap.deref().start_pos),
+        last_pos: Cell::new(filemap.deref().start_pos),
         col: Cell::new(CharPos(0)),
         curr: Cell::new(Some(initial_char)),
         filemap: filemap,
@@ -92,20 +93,20 @@ pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler,
 // duplicating the string reader is probably a bad idea, in
 // that using them will cause interleaved pushes of line
 // offsets to the underlying filemap...
-fn dup_string_reader(r: &StringReader) -> StringReader {
+fn dup_string_reader<'a>(r: &StringReader<'a>) -> StringReader<'a> {
     StringReader {
         span_diagnostic: r.span_diagnostic,
         pos: Cell::new(r.pos.get()),
         last_pos: Cell::new(r.last_pos.get()),
         col: Cell::new(r.col.get()),
         curr: Cell::new(r.curr.get()),
-        filemap: r.filemap,
+        filemap: r.filemap.clone(),
         peek_tok: r.peek_tok.clone(),
         peek_span: r.peek_span.clone(),
     }
 }
 
-impl Reader for StringReader {
+impl<'a> Reader for StringReader<'a> {
     fn is_eof(&self) -> bool { is_eof(self) }
     // return the next token. EFFECT: advances the string_reader.
     fn next_token(&self) -> TokenAndSpan {
@@ -122,7 +123,7 @@ impl Reader for StringReader {
     fn fatal(&self, m: ~str) -> ! {
         self.span_diagnostic.span_fatal(self.peek_span.get(), m)
     }
-    fn span_diag(&self) -> @SpanHandler { self.span_diagnostic }
+    fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.span_diagnostic }
     fn peek(&self) -> TokenAndSpan {
         // FIXME(pcwalton): Bad copy!
         TokenAndSpan {
@@ -133,7 +134,7 @@ impl Reader for StringReader {
     fn dup(&self) -> ~Reader: { ~dup_string_reader(self) as ~Reader: }
 }
 
-impl Reader for TtReader {
+impl<'a> Reader for TtReader<'a> {
     fn is_eof(&self) -> bool {
         let cur_tok = self.cur_tok.borrow();
         *cur_tok.get() == token::EOF
@@ -146,7 +147,7 @@ impl Reader for TtReader {
     fn fatal(&self, m: ~str) -> ! {
         self.sp_diag.span_fatal(self.cur_span.get(), m);
     }
-    fn span_diag(&self) -> @SpanHandler { self.sp_diag }
+    fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.sp_diag }
     fn peek(&self) -> TokenAndSpan {
         TokenAndSpan {
             tok: self.cur_tok.get(),
@@ -189,7 +190,7 @@ fn fatal_span_verbose(rdr: &StringReader,
                    -> ! {
     let mut m = m;
     m.push_str(": ");
-    let s = rdr.filemap.src.slice(
+    let s = rdr.filemap.deref().src.slice(
                   byte_offset(rdr, from_pos).to_uint(),
                   byte_offset(rdr, to_pos).to_uint());
     m.push_str(s);
@@ -218,7 +219,7 @@ fn string_advance_token(r: &StringReader) {
 }
 
 fn byte_offset(rdr: &StringReader, pos: BytePos) -> BytePos {
-    (pos - rdr.filemap.start_pos)
+    (pos - rdr.filemap.deref().start_pos)
 }
 
 /// Calls `f` with a string slice of the source text spanning from `start`
@@ -240,7 +241,7 @@ fn with_str_from_to<T>(
                     end: BytePos,
                     f: |s: &str| -> T)
                     -> T {
-    f(rdr.filemap.src.slice(
+    f(rdr.filemap.deref().src.slice(
             byte_offset(rdr, start).to_uint(),
             byte_offset(rdr, end).to_uint()))
 }
@@ -250,21 +251,21 @@ fn with_str_from_to<T>(
 pub fn bump(rdr: &StringReader) {
     rdr.last_pos.set(rdr.pos.get());
     let current_byte_offset = byte_offset(rdr, rdr.pos.get()).to_uint();
-    if current_byte_offset < (rdr.filemap.src).len() {
+    if current_byte_offset < rdr.filemap.deref().src.len() {
         assert!(rdr.curr.get().is_some());
         let last_char = rdr.curr.get().unwrap();
-        let next = rdr.filemap.src.char_range_at(current_byte_offset);
+        let next = rdr.filemap.deref().src.char_range_at(current_byte_offset);
         let byte_offset_diff = next.next - current_byte_offset;
         rdr.pos.set(rdr.pos.get() + Pos::from_uint(byte_offset_diff));
         rdr.curr.set(Some(next.ch));
         rdr.col.set(rdr.col.get() + CharPos(1u));
         if last_char == '\n' {
-            rdr.filemap.next_line(rdr.last_pos.get());
+            rdr.filemap.deref().next_line(rdr.last_pos.get());
             rdr.col.set(CharPos(0u));
         }
 
         if byte_offset_diff > 1 {
-            rdr.filemap.record_multibyte_char(rdr.last_pos.get(), byte_offset_diff);
+            rdr.filemap.deref().record_multibyte_char(rdr.last_pos.get(), byte_offset_diff);
         }
     } else {
         rdr.curr.set(None);
@@ -275,8 +276,8 @@ pub fn is_eof(rdr: &StringReader) -> bool {
 }
 pub fn nextch(rdr: &StringReader) -> Option<char> {
     let offset = byte_offset(rdr, rdr.pos.get()).to_uint();
-    if offset < (rdr.filemap.src).len() {
-        Some(rdr.filemap.src.char_at(offset))
+    if offset < rdr.filemap.deref().src.len() {
+        Some(rdr.filemap.deref().src.char_at(offset))
     } else {
         None
     }
@@ -334,56 +335,55 @@ fn consume_any_line_comment(rdr: &StringReader)
                          -> Option<TokenAndSpan> {
     if rdr.curr_is('/') {
         match nextch(rdr) {
-          Some('/') => {
-            bump(rdr);
-            bump(rdr);
-            // line comments starting with "///" or "//!" are doc-comments
-            if rdr.curr_is('/') || rdr.curr_is('!') {
-                let start_bpos = rdr.pos.get() - BytePos(3);
-                while !rdr.curr_is('\n') && !is_eof(rdr) {
-                    bump(rdr);
-                }
-                let ret = with_str_from(rdr, start_bpos, |string| {
-                    // but comments with only more "/"s are not
-                    if !is_line_non_doc_comment(string) {
-                        Some(TokenAndSpan{
-                            tok: token::DOC_COMMENT(str_to_ident(string)),
-                            sp: codemap::mk_sp(start_bpos, rdr.pos.get())
-                        })
-                    } else {
-                        None
+            Some('/') => {
+                bump(rdr);
+                bump(rdr);
+                // line comments starting with "///" or "//!" are doc-comments
+                if rdr.curr_is('/') || rdr.curr_is('!') {
+                    let start_bpos = rdr.pos.get() - BytePos(3);
+                    while !rdr.curr_is('\n') && !is_eof(rdr) {
+                        bump(rdr);
                     }
-                });
+                    let ret = with_str_from(rdr, start_bpos, |string| {
+                        // but comments with only more "/"s are not
+                        if !is_line_non_doc_comment(string) {
+                            Some(TokenAndSpan{
+                                tok: token::DOC_COMMENT(str_to_ident(string)),
+                                sp: codemap::mk_sp(start_bpos, rdr.pos.get())
+                            })
+                        } else {
+                            None
+                        }
+                    });
 
-                if ret.is_some() {
-                    return ret;
+                    if ret.is_some() {
+                        return ret;
+                    }
+                } else {
+                    while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); }
                 }
-            } else {
-                while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); }
+                // Restart whitespace munch.
+                consume_whitespace_and_comments(rdr)
             }
-            // Restart whitespace munch.
-            return consume_whitespace_and_comments(rdr);
-          }
-          Some('*') => { bump(rdr); bump(rdr); return consume_block_comment(rdr); }
-          _ => ()
+            Some('*') => { bump(rdr); bump(rdr); consume_block_comment(rdr) }
+            _ => None
         }
     } else if rdr.curr_is('#') {
         if nextch_is(rdr, '!') {
             // I guess this is the only way to figure out if
             // we're at the beginning of the file...
-            let cmap = @CodeMap::new();
-            {
-                let mut files = cmap.files.borrow_mut();
-                files.get().push(rdr.filemap);
-            }
+            let cmap = CodeMap::new();
+            cmap.files.borrow_mut().get().push(rdr.filemap.clone());
             let loc = cmap.lookup_char_pos_adj(rdr.last_pos.get());
             if loc.line == 1u && loc.col == CharPos(0u) {
                 while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); }
                 return consume_whitespace_and_comments(rdr);
             }
         }
+        None
+    } else {
+        None
     }
-    return None;
 }
 
 pub fn is_block_non_doc_comment(s: &str) -> bool {
@@ -1019,7 +1019,7 @@ mod test {
         let writer = ~util::NullWriter;
         let emitter = diagnostic::EmitterWriter::new(writer);
         let handler = diagnostic::mk_handler(~emitter);
-        let span_handler = diagnostic::mk_span_handler(handler, @cm);
+        let span_handler = diagnostic::mk_span_handler(handler, cm);
         Env {
             string_reader: new_string_reader(span_handler,fm)
         }
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 19291f72101..79fedf82798 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -13,13 +13,13 @@
 
 use ast;
 use codemap::{Span, CodeMap, FileMap};
-use codemap;
 use diagnostic::{SpanHandler, mk_span_handler, default_handler};
 use parse::attr::ParserAttr;
 use parse::parser::Parser;
 
 use std::cell::RefCell;
 use std::io::File;
+use std::rc::Rc;
 use std::str;
 use std::vec_ng::Vec;
 
@@ -40,26 +40,20 @@ pub mod obsolete;
 
 // info about a parsing session.
 pub struct ParseSess {
-    cm: @codemap::CodeMap, // better be the same as the one in the reader!
-    span_diagnostic: @SpanHandler, // better be the same as the one in the reader!
+    span_diagnostic: SpanHandler, // better be the same as the one in the reader!
     /// Used to determine and report recursive mod inclusions
-    included_mod_stack: RefCell<Vec<Path> >,
+    included_mod_stack: RefCell<Vec<Path>>,
 }
 
 pub fn new_parse_sess() -> ParseSess {
-    let cm = @CodeMap::new();
     ParseSess {
-        cm: cm,
-        span_diagnostic: mk_span_handler(default_handler(), cm),
+        span_diagnostic: mk_span_handler(default_handler(), CodeMap::new()),
         included_mod_stack: RefCell::new(Vec::new()),
     }
 }
 
-pub fn new_parse_sess_special_handler(sh: @SpanHandler,
-                                      cm: @codemap::CodeMap)
-                                      -> ParseSess {
+pub fn new_parse_sess_special_handler(sh: SpanHandler) -> ParseSess {
     ParseSess {
-        cm: cm,
         span_diagnostic: sh,
         included_mod_stack: RefCell::new(Vec::new()),
     }
@@ -175,40 +169,36 @@ pub fn parse_tts_from_source_str(name: ~str,
 
 // Create a new parser from a source string
 pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
-                                     cfg: ast::CrateConfig,
-                                     name: ~str,
-                                     source: ~str)
-                                     -> Parser<'a> {
-    filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg)
+                                      cfg: ast::CrateConfig,
+                                      name: ~str,
+                                      source: ~str)
+                                      -> Parser<'a> {
+    filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg)
 }
 
 /// Create a new parser, handling errors as appropriate
 /// if the file doesn't exist
-pub fn new_parser_from_file<'a>(
-    sess: &'a ParseSess,
-    cfg: ast::CrateConfig,
-    path: &Path
-) -> Parser<'a> {
-    filemap_to_parser(sess,file_to_filemap(sess,path,None),cfg)
+pub fn new_parser_from_file<'a>(sess: &'a ParseSess,
+                                cfg: ast::CrateConfig,
+                                path: &Path) -> Parser<'a> {
+    filemap_to_parser(sess, file_to_filemap(sess, path, None), cfg)
 }
 
 /// Given a session, a crate config, a path, and a span, add
 /// the file at the given path to the codemap, and return a parser.
 /// On an error, use the given span as the source of the problem.
-pub fn new_sub_parser_from_file<'a>(
-    sess: &'a ParseSess,
-    cfg: ast::CrateConfig,
-    path: &Path,
-    sp: Span
-) -> Parser<'a> {
-    filemap_to_parser(sess,file_to_filemap(sess,path,Some(sp)),cfg)
+pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
+                                    cfg: ast::CrateConfig,
+                                    path: &Path,
+                                    sp: Span) -> Parser<'a> {
+    filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp)), cfg)
 }
 
 /// Given a filemap and config, return a parser
 pub fn filemap_to_parser<'a>(sess: &'a ParseSess,
-                             filemap: @FileMap,
+                             filemap: Rc<FileMap>,
                              cfg: ast::CrateConfig) -> Parser<'a> {
-    tts_to_parser(sess,filemap_to_tts(sess,filemap),cfg)
+    tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg)
 }
 
 // must preserve old name for now, because quote! from the *existing*
@@ -216,7 +206,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess,
 pub fn new_parser_from_tts<'a>(sess: &'a ParseSess,
                                cfg: ast::CrateConfig,
                                tts: Vec<ast::TokenTree>) -> Parser<'a> {
-    tts_to_parser(sess,tts,cfg)
+    tts_to_parser(sess, tts, cfg)
 }
 
 
@@ -225,7 +215,7 @@ pub fn new_parser_from_tts<'a>(sess: &'a ParseSess,
 /// Given a session and a path and an optional span (for error reporting),
 /// add the path to the session's codemap and return the new filemap.
 pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
-    -> @FileMap {
+    -> Rc<FileMap> {
     let err = |msg: &str| {
         match spanopt {
             Some(sp) => sess.span_diagnostic.span_fatal(sp, msg),
@@ -251,17 +241,17 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
 // given a session and a string, add the string to
 // the session's codemap and return the new filemap
 pub fn string_to_filemap(sess: &ParseSess, source: ~str, path: ~str)
-                         -> @FileMap {
-    sess.cm.new_filemap(path, source)
+                         -> Rc<FileMap> {
+    sess.span_diagnostic.cm.new_filemap(path, source)
 }
 
 // given a filemap, produce a sequence of token-trees
-pub fn filemap_to_tts(sess: &ParseSess, filemap: @FileMap)
+pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
     -> Vec<ast::TokenTree> {
     // it appears to me that the cfg doesn't matter here... indeed,
     // parsing tt's probably shouldn't require a parser at all.
     let cfg = Vec::new();
-    let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap);
+    let srdr = lexer::new_string_reader(&sess.span_diagnostic, filemap);
     let mut p1 = Parser(sess, cfg, ~srdr);
     p1.parse_all_token_trees()
 }
@@ -270,7 +260,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: @FileMap)
 pub fn tts_to_parser<'a>(sess: &'a ParseSess,
                          tts: Vec<ast::TokenTree>,
                          cfg: ast::CrateConfig) -> Parser<'a> {
-    let trdr = lexer::new_tt_reader(sess.span_diagnostic, None, tts);
+    let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts);
     Parser(sess, cfg, ~trdr)
 }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index d183eb44cc2..27c86956499 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -4150,7 +4150,7 @@ impl<'a> Parser<'a> {
                     outer_attrs: &[ast::Attribute],
                     id_sp: Span)
                     -> (ast::Item_, Vec<ast::Attribute> ) {
-        let mut prefix = Path::new(self.sess.cm.span_to_filename(self.span));
+        let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span));
         prefix.pop();
         let mod_path = Path::new(".").join_many(self.mod_path_stack.as_slice());
         let dir_path = prefix.join(&mod_path);