about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorJohn Clements <clements@racket-lang.org>2013-01-30 09:56:33 -0800
committerJohn Clements <clements@racket-lang.org>2013-01-31 23:05:12 -0800
commit53688addaa4484f1a317bb0938cf453381810427 (patch)
tree2c025b1dc64129e4651df886918a07af9896a14f /src/libsyntax
parente343abd0ed11227425eca16e186367eced39cd82 (diff)
downloadrust-53688addaa4484f1a317bb0938cf453381810427.tar.gz
rust-53688addaa4484f1a317bb0938cf453381810427.zip
test cases, cleanup
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs6
-rw-r--r--src/libsyntax/ast_util.rs24
-rw-r--r--src/libsyntax/attr.rs2
-rw-r--r--src/libsyntax/codemap.rs130
-rw-r--r--src/libsyntax/ext/auto_encode.rs16
-rw-r--r--src/libsyntax/ext/base.rs3
-rw-r--r--src/libsyntax/ext/build.rs25
-rw-r--r--src/libsyntax/ext/deriving.rs4
-rw-r--r--src/libsyntax/ext/expand.rs4
-rw-r--r--src/libsyntax/ext/pipes/ast_builder.rs13
-rw-r--r--src/libsyntax/ext/pipes/pipec.rs2
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs11
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs5
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs9
-rw-r--r--src/libsyntax/parse/attr.rs2
-rw-r--r--src/libsyntax/parse/classify.rs4
-rw-r--r--src/libsyntax/parse/comments.rs4
-rw-r--r--src/libsyntax/parse/common.rs11
-rw-r--r--src/libsyntax/parse/eval.rs5
-rw-r--r--src/libsyntax/parse/lexer.rs113
-rw-r--r--src/libsyntax/parse/mod.rs3
-rw-r--r--src/libsyntax/parse/obsolete.rs3
-rw-r--r--src/libsyntax/parse/parser.rs56
-rw-r--r--src/libsyntax/print/pprust.rs13
-rw-r--r--src/libsyntax/syntax.rc4
-rw-r--r--src/libsyntax/util/testing.rs24
26 files changed, 318 insertions, 178 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index 7c947852afd..393020b6643 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -10,7 +10,7 @@
 
 // The Rust abstract syntax tree.
 
-use codemap::{span, FileName};
+use codemap::{span, FileName, spanned};
 
 use core::cast;
 use core::cmp;
@@ -21,10 +21,6 @@ use core::to_bytes;
 use core::to_str::ToStr;
 use std::serialize::{Encodable, Decodable, Encoder, Decoder};
 
-#[auto_encode]
-#[auto_decode]
-pub struct spanned<T> { node: T, span: span }
-
 /* can't import macros yet, so this is copied from token.rs. See its comment
  * there. */
 macro_rules! interner_key (
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs
index bdea3d6d6f3..26c06f8e05e 100644
--- a/src/libsyntax/ast_util.rs
+++ b/src/libsyntax/ast_util.rs
@@ -13,7 +13,7 @@ use core::prelude::*;
 use ast::*;
 use ast;
 use ast_util;
-use codemap::{span, BytePos};
+use codemap::{span, BytePos, dummy_sp};
 use parse::token;
 use visit;
 
@@ -24,28 +24,6 @@ use core::str;
 use core::to_bytes;
 use core::vec;
 
-pub pure fn spanned<T>(+lo: BytePos, +hi: BytePos, +t: T) -> spanned<T> {
-    respan(mk_sp(lo, hi), move t)
-}
-
-pub pure fn respan<T>(sp: span, +t: T) -> spanned<T> {
-    spanned {node: t, span: sp}
-}
-
-pub pure fn dummy_spanned<T>(+t: T) -> spanned<T> {
-    respan(dummy_sp(), move t)
-}
-
-/* assuming that we're not in macro expansion */
-pub pure fn mk_sp(+lo: BytePos, +hi: BytePos) -> span {
-    span {lo: lo, hi: hi, expn_info: None}
-}
-
-// make this a const, once the compiler supports it
-pub pure fn dummy_sp() -> span { return mk_sp(BytePos(0), BytePos(0)); }
-
-
-
 pub pure fn path_name_i(idents: &[ident], intr: @token::ident_interner)
                      -> ~str {
     // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index d9b22248b14..30bd23d5936 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -13,7 +13,7 @@
 use core::prelude::*;
 
 use ast;
-use ast_util::{spanned, dummy_spanned};
+use codemap::{spanned, dummy_spanned};
 use attr;
 use codemap::BytePos;
 use diagnostic::span_handler;
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index b51327a0dfb..31ed65d8065 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -23,8 +23,6 @@ source code snippets, etc.
 
 use core::prelude::*;
 
-use ast_util;
-
 use core::cmp;
 use core::dvec::DVec;
 use core::str;
@@ -130,6 +128,10 @@ pub struct span {
     expn_info: Option<@ExpnInfo>
 }
 
+#[auto_encode]
+#[auto_decode]
+pub struct spanned<T> { node: T, span: span }
+
 pub impl span : cmp::Eq {
     pure fn eq(&self, other: &span) -> bool {
         return (*self).lo == (*other).lo && (*self).hi == (*other).hi;
@@ -144,10 +146,32 @@ pub impl<S: Encoder> span: Encodable<S> {
 
 pub impl<D: Decoder> span: Decodable<D> {
     static fn decode(_d: &D) -> span {
-        ast_util::dummy_sp()
+        dummy_sp()
     }
 }
 
+pub pure fn spanned<T>(+lo: BytePos, +hi: BytePos, +t: T) -> spanned<T> {
+    respan(mk_sp(lo, hi), move t)
+}
+
+pub pure fn respan<T>(sp: span, +t: T) -> spanned<T> {
+    spanned {node: t, span: sp}
+}
+
+pub pure fn dummy_spanned<T>(+t: T) -> spanned<T> {
+    respan(dummy_sp(), move t)
+}
+
+/* assuming that we're not in macro expansion */
+pub pure fn mk_sp(+lo: BytePos, +hi: BytePos) -> span {
+    span {lo: lo, hi: hi, expn_info: None}
+}
+
+// make this a const, once the compiler supports it
+pub pure fn dummy_sp() -> span { return mk_sp(BytePos(0), BytePos(0)); }
+
+
+
 /// A source code location used for error reporting
 pub struct Loc {
     /// Information about the original source
@@ -158,6 +182,20 @@ pub struct Loc {
     col: CharPos
 }
 
+/// A source code location used as the result of lookup_char_pos_adj
+// Actually, *none* of the clients use the filename *or* file field;
+// perhaps they should just be removed.
+pub struct LocWithOpt {
+    filename: ~str,
+    line: uint,
+    col: CharPos,
+    file: Option<@FileMap>,
+}
+
+// used to be structural records. Better names, anyone?
+pub struct FileMapAndLine {fm: @FileMap, line: uint}
+pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos}
+
 /// Extra information for tracking macro expansion of spans
 pub enum ExpnInfo {
     ExpandedFrom({call_site: span,
@@ -204,10 +242,19 @@ pub struct FileMap {
 }
 
 pub impl FileMap {
+    // EFFECT: register a start-of-line offset in the
+    // table of line-beginnings.
+    // UNCHECKED INVARIANT: these offsets must be added in the right
+    // order and must be in the right places; there is shared knowledge
+    // about what ends a line between this file and parse.rs
     fn next_line(&self, +pos: BytePos) {
+        // the new charpos must be > the last one (or it's the first one).
+        assert ((self.lines.len() == 0)
+                || (self.lines[self.lines.len() - 1] < pos));
         self.lines.push(pos);
     }
 
+    // get a line from the list of pre-computed line-beginnings
     pub fn get_line(&self, line: int) -> ~str {
         unsafe {
             let begin: BytePos = self.lines[line] - self.start_pos;
@@ -279,27 +326,25 @@ pub impl CodeMap {
         return self.lookup_pos(pos);
     }
 
-    pub fn lookup_char_pos_adj(&self, +pos: BytePos)
-        -> {filename: ~str, line: uint, col: CharPos, file: Option<@FileMap>}
+    pub fn lookup_char_pos_adj(&self, +pos: BytePos) -> LocWithOpt
     {
         let loc = self.lookup_char_pos(pos);
         match (loc.file.substr) {
-            FssNone => {
-                {filename: /* FIXME (#2543) */ copy loc.file.name,
-                 line: loc.line,
-                 col: loc.col,
-                 file: Some(loc.file)}
-            }
-            FssInternal(sp) => {
-                self.lookup_char_pos_adj(
-                    sp.lo + (pos - loc.file.start_pos))
-            }
-            FssExternal(ref eloc) => {
-                {filename: /* FIXME (#2543) */ copy (*eloc).filename,
-                 line: (*eloc).line + loc.line - 1u,
-                 col: if loc.line == 1 {eloc.col + loc.col} else {loc.col},
-                 file: None}
-            }
+            FssNone =>
+            LocWithOpt {
+                filename: /* FIXME (#2543) */ copy loc.file.name,
+                line: loc.line,
+                col: loc.col,
+                file: Some(loc.file)},
+            FssInternal(sp) =>
+            self.lookup_char_pos_adj(
+                sp.lo + (pos - loc.file.start_pos)),
+            FssExternal(ref eloc) =>
+            LocWithOpt {
+                filename: /* FIXME (#2543) */ copy (*eloc).filename,
+                line: (*eloc).line + loc.line - 1u,
+                col: if loc.line == 1 {eloc.col + loc.col} else {loc.col},
+                file: None}
         }
     }
 
@@ -319,7 +364,7 @@ pub impl CodeMap {
     }
 
     pub fn span_to_str(&self, sp: span) -> ~str {
-        if self.files.len() == 0 && sp == ast_util::dummy_sp() {
+        if self.files.len() == 0 && sp == dummy_sp() {
             return ~"no-location";
         }
 
@@ -383,8 +428,7 @@ priv impl CodeMap {
         return a;
     }
 
-    fn lookup_line(&self, +pos: BytePos)
-        -> {fm: @FileMap, line: uint}
+    fn lookup_line(&self, pos: BytePos) -> FileMapAndLine
     {
         let idx = self.lookup_filemap_idx(pos);
         let f = self.files[idx];
@@ -394,11 +438,11 @@ priv impl CodeMap {
             let m = (a + b) / 2u;
             if f.lines[m] > pos { b = m; } else { a = m; }
         }
-        return {fm: f, line: a};
+        return FileMapAndLine {fm: f, line: a};
     }
 
     fn lookup_pos(&self, +pos: BytePos) -> Loc {
-        let {fm: f, line: a} = self.lookup_line(pos);
+        let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
         let line = a + 1u; // Line numbers start at 1
         let chpos = self.bytepos_to_local_charpos(pos);
         let linebpos = f.lines[a];
@@ -424,11 +468,11 @@ priv impl CodeMap {
     }
 
     fn lookup_byte_offset(&self, +bpos: BytePos)
-        -> {fm: @FileMap, pos: BytePos} {
+        -> FileMapAndBytePos {
         let idx = self.lookup_filemap_idx(bpos);
         let fm = self.files[idx];
         let offset = bpos - fm.start_pos;
-        return {fm: fm, pos: offset};
+        return FileMapAndBytePos {fm: fm, pos: offset};
     }
 
     // Converts an absolute BytePos to a CharPos relative to the file it is
@@ -458,6 +502,36 @@ priv impl CodeMap {
     }
 }
 
+#[cfg(test)]
+mod test {
+    use super::*;
+    use util::testing::check_equal;
+
+    #[test]
+    fn t1 () {
+        let cm = CodeMap::new();
+        let fm = cm.new_filemap(~"blork.rs",@~"first line.\nsecond line");
+        fm.next_line(BytePos(0));
+        check_equal(&fm.get_line(0),&~"first line.");
+        // TESTING BROKEN BEHAVIOR:
+        fm.next_line(BytePos(10));
+        check_equal(&fm.get_line(1),&~".");
+    }
+
+    #[test]
+    #[should_fail]
+    fn t2 () {
+        let cm = CodeMap::new();
+        let fm = cm.new_filemap(~"blork.rs",@~"first line.\nsecond line");
+        // TESTING *REALLY* BROKEN BEHAVIOR:
+        fm.next_line(BytePos(0));
+        fm.next_line(BytePos(10));
+        fm.next_line(BytePos(2));
+    }
+}
+
+
+
 //
 // Local Variables:
 // mode: rust
diff --git a/src/libsyntax/ext/auto_encode.rs b/src/libsyntax/ext/auto_encode.rs
index e406e09aac0..4dddb66995b 100644
--- a/src/libsyntax/ext/auto_encode.rs
+++ b/src/libsyntax/ext/auto_encode.rs
@@ -311,7 +311,7 @@ priv impl ext_ctxt {
     }
 
     fn stmt(expr: @ast::expr) -> @ast::stmt {
-        @ast::spanned { node: ast::stmt_semi(expr, self.next_id()),
+        @codemap::spanned { node: ast::stmt_semi(expr, self.next_id()),
                        span: expr.span }
     }
 
@@ -322,7 +322,7 @@ priv impl ext_ctxt {
                 self.expr(
                     span,
                     ast::expr_lit(
-                        @ast::spanned { node: ast::lit_str(s),
+                        @codemap::spanned { node: ast::lit_str(s),
                                         span: span})),
                 ast::expr_vstore_uniq))
     }
@@ -331,7 +331,7 @@ priv impl ext_ctxt {
         self.expr(
             span,
             ast::expr_lit(
-                @ast::spanned { node: ast::lit_uint(i as u64, ast::ty_u),
+                @codemap::spanned { node: ast::lit_uint(i as u64, ast::ty_u),
                                 span: span}))
     }
 
@@ -342,7 +342,7 @@ priv impl ext_ctxt {
     }
 
     fn blk(span: span, stmts: ~[@ast::stmt]) -> ast::blk {
-        ast::spanned {
+        codemap::spanned {
             node: ast::blk_ {
                 view_items: ~[],
                 stmts: stmts,
@@ -355,7 +355,7 @@ priv impl ext_ctxt {
     }
 
     fn expr_blk(expr: @ast::expr) -> ast::blk {
-        ast::spanned {
+        codemap::spanned {
             node: ast::blk_ {
                 view_items: ~[],
                 stmts: ~[],
@@ -593,7 +593,7 @@ fn mk_ser_method(
         ident: cx.ident_of(~"encode"),
         attrs: ~[],
         tps: ~[],
-        self_ty: ast::spanned { node: ast::sty_region(ast::m_imm),
+        self_ty: codemap::spanned { node: ast::sty_region(ast::m_imm),
                                 span: span },
         purity: ast::impure_fn,
         decl: ser_decl,
@@ -651,7 +651,7 @@ fn mk_deser_method(
         ident: cx.ident_of(~"decode"),
         attrs: ~[],
         tps: ~[],
-        self_ty: ast::spanned { node: ast::sty_static, span: span },
+        self_ty: codemap::spanned { node: ast::sty_static, span: span },
         purity: ast::impure_fn,
         decl: deser_decl,
         body: deser_body,
@@ -762,7 +762,7 @@ fn mk_struct_deser_impl(
             ]
         );
 
-        ast::spanned {
+        codemap::spanned {
             node: ast::field_ {
                 mutbl: field.mutbl,
                 ident: field.ident,
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index d0974e0654c..a37ce512ac1 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -11,9 +11,8 @@
 use core::prelude::*;
 
 use ast;
-use ast_util::dummy_sp;
 use codemap;
-use codemap::{CodeMap, span, ExpnInfo, ExpandedFrom};
+use codemap::{CodeMap, span, ExpnInfo, ExpandedFrom, dummy_sp};
 use diagnostic::span_handler;
 use ext;
 use parse;
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index 6d44a412742..4d8fd39c960 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -32,7 +32,7 @@ pub fn mk_expr(cx: ext_ctxt,
 }
 
 pub fn mk_lit(cx: ext_ctxt, sp: span, lit: ast::lit_) -> @ast::expr {
-    let sp_lit = @ast::spanned { node: lit, span: sp };
+    let sp_lit = @codemap::spanned { node: lit, span: sp };
     mk_expr(cx, sp, ast::expr_lit(sp_lit))
 }
 pub fn mk_int(cx: ext_ctxt, sp: span, i: int) -> @ast::expr {
@@ -149,7 +149,7 @@ pub fn mk_uniq_str(cx: ext_ctxt, sp: span, s: ~str) -> @ast::expr {
 }
 pub fn mk_field(sp: span, f: &{ident: ast::ident, ex: @ast::expr})
              -> ast::field {
-    ast::spanned {
+    codemap::spanned {
         node: ast::field_ { mutbl: ast::m_imm, ident: f.ident, expr: f.ex },
         span: sp,
     }
@@ -188,7 +188,7 @@ pub fn mk_global_struct_e(cx: ext_ctxt,
 pub fn mk_glob_use(cx: ext_ctxt,
                    sp: span,
                    path: ~[ast::ident]) -> @ast::view_item {
-    let glob = @ast::spanned {
+    let glob = @codemap::spanned {
         node: ast::view_path_glob(mk_raw_path(sp, path), cx.next_id()),
         span: sp,
     };
@@ -209,7 +209,7 @@ pub fn mk_local(cx: ext_ctxt, sp: span, mutbl: bool,
         span: sp,
     };
     let ty = @ast::Ty { id: cx.next_id(), node: ast::ty_infer, span: sp };
-    let local = @ast::spanned {
+    let local = @codemap::spanned {
         node: ast::local_ {
             is_mutbl: mutbl,
             ty: ty,
@@ -219,14 +219,14 @@ pub fn mk_local(cx: ext_ctxt, sp: span, mutbl: bool,
         },
         span: sp,
     };
-    let decl = ast::spanned {node: ast::decl_local(~[local]), span: sp};
-    @ast::spanned { node: ast::stmt_decl(@decl, cx.next_id()), span: sp }
+    let decl = codemap::spanned {node: ast::decl_local(~[local]), span: sp};
+    @codemap::spanned { node: ast::stmt_decl(@decl, cx.next_id()), span: sp }
 }
 pub fn mk_block(cx: ext_ctxt, span: span,
                 view_items: ~[@ast::view_item],
                 stmts: ~[@ast::stmt],
                 expr: Option<@ast::expr>) -> @ast::expr {
-    let blk = ast::spanned {
+    let blk = codemap::spanned {
         node: ast::blk_ {
              view_items: view_items,
              stmts: stmts,
@@ -242,7 +242,7 @@ pub fn mk_block_(cx: ext_ctxt,
                  span: span,
                  +stmts: ~[@ast::stmt])
               -> ast::blk {
-    ast::spanned {
+    codemap::spanned {
         node: ast::blk_ {
             view_items: ~[],
             stmts: stmts,
@@ -257,7 +257,7 @@ pub fn mk_simple_block(cx: ext_ctxt,
                        span: span,
                        expr: @ast::expr)
                     -> ast::blk {
-    ast::spanned {
+    codemap::spanned {
         node: ast::blk_ {
             view_items: ~[],
             stmts: ~[],
@@ -307,13 +307,14 @@ pub fn mk_pat_struct(cx: ext_ctxt,
     mk_pat(cx, span, move pat)
 }
 pub fn mk_bool(cx: ext_ctxt, span: span, value: bool) -> @ast::expr {
-    let lit_expr = ast::expr_lit(@ast::spanned { node: ast::lit_bool(value),
-                                                 span: span });
+    let lit_expr = ast::expr_lit(@codemap::spanned {
+        node: ast::lit_bool(value),
+        span: span });
     build::mk_expr(cx, span, move lit_expr)
 }
 pub fn mk_stmt(cx: ext_ctxt, span: span, expr: @ast::expr) -> @ast::stmt {
     let stmt_ = ast::stmt_semi(expr, cx.next_id());
-    @ast::spanned { node: move stmt_, span: span }
+    @codemap::spanned { node: move stmt_, span: span }
 }
 pub fn mk_ty_path(cx: ext_ctxt,
                   span: span,
diff --git a/src/libsyntax/ext/deriving.rs b/src/libsyntax/ext/deriving.rs
index d542b104e54..8e87357f8b5 100644
--- a/src/libsyntax/ext/deriving.rs
+++ b/src/libsyntax/ext/deriving.rs
@@ -18,12 +18,12 @@ use ast::{TraitTyParamBound, Ty, and, bind_by_ref, binop, deref, enum_def};
 use ast::{enum_variant_kind, expr, expr_match, ident, item, item_};
 use ast::{item_enum, item_impl, item_struct, m_imm, meta_item, method};
 use ast::{named_field, or, pat, pat_ident, pat_wild, public, pure_fn};
-use ast::{re_anon, spanned, stmt, struct_def, struct_variant_kind};
+use ast::{re_anon, stmt, struct_def, struct_variant_kind};
 use ast::{sty_by_ref, sty_region, tuple_variant_kind, ty_nil, ty_param};
 use ast::{ty_param_bound, ty_path, ty_rptr, unnamed_field, variant};
 use ext::base::ext_ctxt;
 use ext::build;
-use codemap::span;
+use codemap::{span, spanned};
 use parse::token::special_idents::clownshoes_extensions;
 
 use core::dvec;
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 0ab94710f40..be768650ef7 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -154,7 +154,7 @@ pub fn expand_item_mac(exts: HashMap<~str, SyntaxExtension>,
                        fld: ast_fold) -> Option<@ast::item> {
 
     let (pth, tts) = match it.node {
-        item_mac(ast::spanned { node: mac_invoc_tt(pth, ref tts), _}) => {
+        item_mac(codemap::spanned { node: mac_invoc_tt(pth, ref tts), _}) => {
             (pth, (*tts))
         }
         _ => cx.span_bug(it.span, ~"invalid item macro invocation")
@@ -234,7 +234,7 @@ pub fn expand_stmt(exts: HashMap<~str, SyntaxExtension>, cx: ext_ctxt,
                 {call_site: sp, callie: {name: *extname, span: exp_sp}}));
             let expanded = match exp(cx, mac.span, tts) {
                 MRExpr(e) =>
-                    @ast::spanned { node: stmt_expr(e, cx.next_id()),
+                    @codemap::spanned { node: stmt_expr(e, cx.next_id()),
                                     span: e.span},
                 MRAny(_,_,stmt_mkr) => stmt_mkr(),
                 _ => cx.span_fatal(
diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs
index 8abca3d97f9..58ab05987a6 100644
--- a/src/libsyntax/ext/pipes/ast_builder.rs
+++ b/src/libsyntax/ext/pipes/ast_builder.rs
@@ -17,10 +17,11 @@ use core::prelude::*;
 
 use ast::{ident, node_id};
 use ast;
-use ast_util::{ident_to_path, respan, dummy_sp};
+use ast_util::{ident_to_path};
 use ast_util;
 use attr;
-use codemap::span;
+use codemap::{span, respan, dummy_sp};
+use codemap;
 use ext::base::{ext_ctxt, mk_ctxt};
 use ext::quote::rt::*;
 
@@ -310,7 +311,7 @@ pub impl ext_ctxt: ext_ctxt_ast_builder {
         // XXX: Total hack: import `core::kinds::Owned` to work around a
         // parser bug whereby `fn f<T: ::kinds::Owned>` doesn't parse.
         let vi = ast::view_item_import(~[
-            @ast::spanned {
+            @codemap::spanned {
                 node: ast::view_path_simple(
                     self.ident_of(~"Owned"),
                     path(
@@ -319,19 +320,19 @@ pub impl ext_ctxt: ext_ctxt_ast_builder {
                             self.ident_of(~"kinds"),
                             self.ident_of(~"Owned")
                         ],
-                        ast_util::dummy_sp()
+                        codemap::dummy_sp()
                     ),
                     ast::type_value_ns,
                     self.next_id()
                 ),
-                span: ast_util::dummy_sp()
+                span: codemap::dummy_sp()
             }
         ]);
         let vi = @ast::view_item {
             node: vi,
             attrs: ~[],
             vis: ast::private,
-            span: ast_util::dummy_sp()
+            span: codemap::dummy_sp()
         };
 
         self.item(
diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs
index 928885cf817..8c73dcfc04f 100644
--- a/src/libsyntax/ext/pipes/pipec.rs
+++ b/src/libsyntax/ext/pipes/pipec.rs
@@ -11,7 +11,7 @@
 // A protocol compiler for Rust.
 
 use ast::ident;
-use ast_util::dummy_sp;
+use codemap::dummy_sp;
 use ext::base::ext_ctxt;
 use ext::pipes::ast_builder::{append_types, ext_ctxt_ast_builder, path};
 use ext::pipes::ast_builder::{path_global};
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 1c7a12a4282..a31af36cc54 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -10,8 +10,7 @@
 
 // Earley-like parser for macros.
 use ast::{matcher, match_tok, match_seq, match_nonterminal, ident};
-use ast_util::mk_sp;
-use codemap::BytePos;
+use codemap::{BytePos, mk_sp};
 use codemap;
 use parse::common::*; //resolve bug?
 use parse::lexer::*; //resolve bug?
@@ -189,13 +188,13 @@ pub fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match])
     fn n_rec(p_s: parse_sess, m: matcher, res: ~[@named_match],
              ret_val: HashMap<ident, @named_match>) {
         match m {
-          ast::spanned {node: match_tok(_), _} => (),
-          ast::spanned {node: match_seq(ref more_ms, _, _, _, _), _} => {
+          codemap::spanned {node: match_tok(_), _} => (),
+          codemap::spanned {node: match_seq(ref more_ms, _, _, _, _), _} => {
             for (*more_ms).each() |next_m| {
                 n_rec(p_s, *next_m, res, ret_val)
             };
           }
-          ast::spanned {
+          codemap::spanned {
                 node: match_nonterminal(bind_name, _, idx), span: sp
           } => {
             if ret_val.contains_key(bind_name) {
@@ -239,7 +238,7 @@ pub fn parse(sess: parse_sess,
         let mut next_eis = ~[]; // or proceed normally
         let mut eof_eis = ~[];
 
-        let {tok: tok, sp: sp} = rdr.peek();
+        let TokenAndSpan {tok: tok, sp: sp} = rdr.peek();
 
         /* we append new items to this while we go */
         while cur_eis.len() > 0u { /* for each Earley Item */
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index f7ef79db466..115cb4f5f94 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -13,8 +13,7 @@ use core::prelude::*;
 use ast::{ident, matcher_, matcher, match_tok, match_nonterminal, match_seq};
 use ast::{tt_delim};
 use ast;
-use ast_util::dummy_sp;
-use codemap::span;
+use codemap::{span, spanned, dummy_sp};
 use ext::base::{ext_ctxt, MacResult, MRAny, MRDef, MacroDef, NormalTT};
 use ext::base;
 use ext::tt::macro_parser::{error};
@@ -33,7 +32,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
                          arg: ~[ast::token_tree]) -> base::MacResult {
     // these spans won't matter, anyways
     fn ms(m: matcher_) -> matcher {
-        ast::spanned { node: m, span: dummy_sp() }
+        spanned { node: m, span: dummy_sp() }
     }
 
     let lhs_nm =  cx.parse_sess().interner.gensym(@~"lhs");
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index b3b1e04976a..504a17237dc 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -13,10 +13,11 @@ use core::prelude::*;
 use ast;
 use ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident};
 use ast_util;
-use codemap::span;
+use codemap::{span, dummy_sp};
 use diagnostic::span_handler;
 use ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal};
 use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident, ident_interner};
+use parse::lexer::TokenAndSpan;
 
 use core::option;
 use core::vec;
@@ -69,7 +70,7 @@ pub fn new_tt_reader(sp_diag: span_handler, itr: @ident_interner,
               mut repeat_len: ~[],
               /* dummy values, never read: */
               mut cur_tok: EOF,
-              mut cur_span: ast_util::dummy_sp()
+              mut cur_span: dummy_sp()
              };
     tt_next_token(r); /* get cur_tok and cur_span set up */
     return r;
@@ -149,8 +150,8 @@ fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis {
 }
 
 
-pub fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} {
-    let ret_val = { tok: r.cur_tok, sp: r.cur_span };
+pub fn tt_next_token(&&r: tt_reader) -> TokenAndSpan {
+    let ret_val = TokenAndSpan { tok: r.cur_tok, sp: r.cur_span };
     while r.cur.idx >= r.cur.readme.len() {
         /* done with this set; pop or repeat? */
         if ! r.cur.dotdotdoted
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 3ed31c0953c..e6ba543cf79 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -11,7 +11,7 @@
 use core::prelude::*;
 
 use ast;
-use ast_util::spanned;
+use codemap::spanned;
 use codemap::BytePos;
 use parse::common::*; //resolve bug?
 use parse::token;
diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs
index afe6823e76d..4ceb04c55d0 100644
--- a/src/libsyntax/parse/classify.rs
+++ b/src/libsyntax/parse/classify.rs
@@ -13,6 +13,7 @@
  */
 
 use ast;
+use codemap;
 use ast_util::operator_prec;
 
 pub fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool {
@@ -31,7 +32,8 @@ pub fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool {
 pub fn expr_is_simple_block(e: @ast::expr) -> bool {
     match e.node {
         ast::expr_block(
-            ast::spanned { node: ast::blk_ { rules: ast::default_blk, _ }, _ }
+            codemap::spanned {
+                node: ast::blk_ { rules: ast::default_blk, _ }, _ }
         ) => true,
       _ => false
     }
diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs
index 8ed10fb138d..d7640ce3a23 100644
--- a/src/libsyntax/parse/comments.rs
+++ b/src/libsyntax/parse/comments.rs
@@ -14,7 +14,7 @@ use ast;
 use codemap::{BytePos, CharPos, CodeMap, FileMap, Pos};
 use diagnostic;
 use parse::lexer::{is_whitespace, get_str_from, reader};
-use parse::lexer::{string_reader, bump, is_eof, nextch};
+use parse::lexer::{string_reader, bump, is_eof, nextch, TokenAndSpan};
 use parse::lexer;
 use parse::token;
 use parse;
@@ -334,7 +334,7 @@ pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
         let bstart = rdr.pos;
         rdr.next_token();
         //discard, and look ahead; we're working with internal state
-        let {tok: tok, sp: sp} = rdr.peek();
+        let TokenAndSpan {tok: tok, sp: sp} = rdr.peek();
         if token::is_lit(tok) {
             let s = get_str_from(rdr, bstart);
             literals.push({lit: s, pos: sp.lo});
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index 583ad982000..48ef6b873ec 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -11,8 +11,7 @@
 use core::prelude::*;
 
 use ast;
-use ast_util::spanned;
-use codemap::BytePos;
+use codemap::{BytePos, spanned};
 use parse::lexer::reader;
 use parse::parser::Parser;
 use parse::token;
@@ -190,7 +189,9 @@ pub impl Parser {
         if self.token == token::GT {
             self.bump();
         } else if self.token == token::BINOP(token::SHR) {
-            self.swap(token::GT, self.span.lo + BytePos(1u), self.span.hi);
+            self.replace_token(token::GT,
+                               self.span.lo + BytePos(1u),
+                               self.span.hi);
         } else {
             let mut s: ~str = ~"expected `";
             s += token_to_str(self.reader, token::GT);
@@ -229,7 +230,7 @@ pub impl Parser {
     }
 
     fn parse_seq_lt_gt<T: Copy>(sep: Option<token::Token>,
-                                f: fn(Parser) -> T) -> ast::spanned<~[T]> {
+                                f: fn(Parser) -> T) -> spanned<~[T]> {
         let lo = self.span.lo;
         self.expect(token::LT);
         let result = self.parse_seq_to_before_gt::<T>(sep, f);
@@ -277,7 +278,7 @@ pub impl Parser {
     // NB: Do not use this function unless you actually plan to place the
     // spanned list in the AST.
     fn parse_seq<T: Copy>(bra: token::Token, ket: token::Token, sep: seq_sep,
-                          f: fn(Parser) -> T) -> ast::spanned<~[T]> {
+                          f: fn(Parser) -> T) -> spanned<~[T]> {
         let lo = self.span.lo;
         self.expect(bra);
         let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs
index caab03afb76..5decb2351e3 100644
--- a/src/libsyntax/parse/eval.rs
+++ b/src/libsyntax/parse/eval.rs
@@ -10,8 +10,7 @@
 
 use parser::Parser;
 use attr::parser_attr;
-use ast_util::mk_sp;
-use codemap::span;
+use codemap::{span, mk_sp};
 
 type ctx =
     @{sess: parse::parse_sess,
@@ -75,7 +74,7 @@ fn parse_companion_mod(cx: ctx, prefix: &Path, suffix: &Option<Path>)
         // XXX: Using a dummy span, but this code will go away soon
         let p0 = new_sub_parser_from_file(cx.sess, cx.cfg,
                                           modpath,
-                                          ast_util::dummy_sp());
+                                          codemap::dummy_sp());
         let inner_attrs = p0.parse_inner_attrs_and_next();
         let m0 = p0.parse_mod_items(token::EOF, inner_attrs.next);
         return (m0.view_items, m0.items, inner_attrs.inner);
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 65fc86a106a..71e96699c3d 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -30,14 +30,17 @@ use std;
 
 pub trait reader {
     fn is_eof() -> bool;
-    fn next_token() -> {tok: token::Token, sp: span};
+    fn next_token() -> TokenAndSpan;
     fn fatal(~str) -> !;
     fn span_diag() -> span_handler;
     pure fn interner() -> @token::ident_interner;
-    fn peek() -> {tok: token::Token, sp: span};
+    fn peek() -> TokenAndSpan;
     fn dup() -> reader;
 }
 
+#[deriving_eq]
+pub struct TokenAndSpan {tok: token::Token, sp: span}
+
 pub type string_reader = @{
     span_diagnostic: span_handler,
     src: @~str,
@@ -79,11 +82,14 @@ pub fn new_low_level_string_reader(span_diagnostic: span_handler,
               filemap: filemap, interner: itr,
               /* dummy values; not read */
               mut peek_tok: token::EOF,
-              mut peek_span: ast_util::dummy_sp()};
+              mut peek_span: codemap::dummy_sp()};
     bump(r);
     return r;
 }
 
+// duplicating the string reader is probably a bad idea, in
+// that using them will cause interleaved pushes of line
+// offsets to the underlying filemap...
 fn dup_string_reader(&&r: string_reader) -> string_reader {
     @{span_diagnostic: r.span_diagnostic, src: r.src,
       mut pos: r.pos,
@@ -95,8 +101,9 @@ fn dup_string_reader(&&r: string_reader) -> string_reader {
 
 impl string_reader: reader {
     fn is_eof() -> bool { is_eof(self) }
-    fn next_token() -> {tok: token::Token, sp: span} {
-        let ret_val = {tok: self.peek_tok, sp: self.peek_span};
+    // return the next token. EFFECT: advances the string_reader.
+    fn next_token() -> TokenAndSpan {
+        let ret_val = TokenAndSpan {tok: self.peek_tok, sp: self.peek_span};
         string_advance_token(self);
         return ret_val;
     }
@@ -105,15 +112,15 @@ impl string_reader: reader {
     }
     fn span_diag() -> span_handler { self.span_diagnostic }
     pure fn interner() -> @token::ident_interner { self.interner }
-    fn peek() -> {tok: token::Token, sp: span} {
-        {tok: self.peek_tok, sp: self.peek_span}
+    fn peek() -> TokenAndSpan {
+        TokenAndSpan {tok: self.peek_tok, sp: self.peek_span}
     }
     fn dup() -> reader { dup_string_reader(self) as reader }
 }
 
 pub impl tt_reader: reader {
     fn is_eof() -> bool { self.cur_tok == token::EOF }
-    fn next_token() -> {tok: token::Token, sp: span} {
+    fn next_token() -> TokenAndSpan {
         /* weird resolve bug: if the following `if`, or any of its
         statements are removed, we get resolution errors */
         if false {
@@ -127,27 +134,29 @@ pub impl tt_reader: reader {
     }
     fn span_diag() -> span_handler { self.sp_diag }
     pure fn interner() -> @token::ident_interner { self.interner }
-    fn peek() -> {tok: token::Token, sp: span} {
-        { tok: self.cur_tok, sp: self.cur_span }
+    fn peek() -> TokenAndSpan {
+        TokenAndSpan { tok: self.cur_tok, sp: self.cur_span }
     }
     fn dup() -> reader { dup_tt_reader(self) as reader }
 }
 
+// EFFECT: advance peek_tok and peek_span to refer to the next token.
 fn string_advance_token(&&r: string_reader) {
-    for consume_whitespace_and_comments(r).each |comment| {
-        r.peek_tok = comment.tok;
-        r.peek_span = comment.sp;
-        return;
+    match (consume_whitespace_and_comments(r)) {
+        Some(comment) => {
+            r.peek_tok = comment.tok;
+            r.peek_span = comment.sp;
+        },
+        None => {
+            if is_eof(r) {
+                r.peek_tok = token::EOF;
+            } else {
+                let start_bytepos = r.last_pos;
+                r.peek_tok = next_token_inner(r);
+                r.peek_span = codemap::mk_sp(start_bytepos, r.last_pos);
+            };
+        }
     }
-
-    if is_eof(r) {
-        r.peek_tok = token::EOF;
-    } else {
-        let start_bytepos = r.last_pos;
-        r.peek_tok = next_token_inner(r);
-        r.peek_span = ast_util::mk_sp(start_bytepos, r.last_pos);
-    };
-
 }
 
 fn byte_offset(rdr: string_reader) -> BytePos {
@@ -163,6 +172,8 @@ pub fn get_str_from(rdr: string_reader, start: BytePos) -> ~str {
     }
 }
 
+// EFFECT: advance the StringReader by one character. If a newline is
+// discovered, add it to the FileMap's list of line start offsets.
 pub fn bump(rdr: string_reader) {
     rdr.last_pos = rdr.pos;
     let current_byte_offset = byte_offset(rdr).to_uint();;
@@ -233,16 +244,19 @@ fn is_hex_digit(c: char) -> bool {
 
 fn is_bin_digit(c: char) -> bool { return c == '0' || c == '1'; }
 
-// might return a sugared-doc-attr
+// EFFECT: eats whitespace and comments.
+// returns a Some(sugared-doc-attr) if one exists, None otherwise.
 fn consume_whitespace_and_comments(rdr: string_reader)
-                                -> Option<{tok: token::Token, sp: span}> {
+    -> Option<TokenAndSpan> {
     while is_whitespace(rdr.curr) { bump(rdr); }
     return consume_any_line_comment(rdr);
 }
 
-// might return a sugared-doc-attr
+// PRECONDITION: rdr.curr is not whitespace
+// EFFECT: eats any kind of comment.
+// returns a Some(sugared-doc-attr) if one exists, None otherwise
 fn consume_any_line_comment(rdr: string_reader)
-                                -> Option<{tok: token::Token, sp: span}> {
+                                -> Option<TokenAndSpan> {
     if rdr.curr == '/' {
         match nextch(rdr) {
           '/' => {
@@ -256,9 +270,9 @@ fn consume_any_line_comment(rdr: string_reader)
                     str::push_char(&mut acc, rdr.curr);
                     bump(rdr);
                 }
-                return Some({
+                return Some(TokenAndSpan{
                     tok: token::DOC_COMMENT(rdr.interner.intern(@acc)),
-                    sp: ast_util::mk_sp(start_bpos, rdr.pos)
+                    sp: codemap::mk_sp(start_bpos, rdr.pos)
                 });
             } else {
                 while rdr.curr != '\n' && !is_eof(rdr) { bump(rdr); }
@@ -285,7 +299,7 @@ fn consume_any_line_comment(rdr: string_reader)
 
 // might return a sugared-doc-attr
 fn consume_block_comment(rdr: string_reader)
-                                -> Option<{tok: token::Token, sp: span}> {
+                                -> Option<TokenAndSpan> {
 
     // block comments starting with "/**" or "/*!" are doc-comments
     if rdr.curr == '*' || rdr.curr == '!' {
@@ -301,9 +315,9 @@ fn consume_block_comment(rdr: string_reader)
             acc += ~"*/";
             bump(rdr);
             bump(rdr);
-            return Some({
+            return Some(TokenAndSpan{
                 tok: token::DOC_COMMENT(rdr.interner.intern(@acc)),
-                sp: ast_util::mk_sp(start_bpos, rdr.pos)
+                sp: codemap::mk_sp(start_bpos, rdr.pos)
             });
         }
     } else {
@@ -702,6 +716,41 @@ fn consume_whitespace(rdr: string_reader) {
     while is_whitespace(rdr.curr) && !is_eof(rdr) { bump(rdr); }
 }
 
+#[cfg(test)]
+pub mod test {
+
+    use super::*;
+    use util::interner;
+    use diagnostic;
+    use util::testing::{check_equal, check_equal_ptr};
+    #[test] fn t1 () {
+        let teststr =
+            @~"/* my source file */
+fn main() { io::println(~\"zebra\"); }\n";
+        let cm = CodeMap::new();
+        let fm = cm.new_filemap(~"zebra.rs",teststr);
+        let ident_interner = token::mk_ident_interner(); // interner::mk();
+        let id = ident_interner.intern(@~"fn");
+        let span_handler =
+            diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm);
+        let string_reader = new_string_reader(span_handler,fm,ident_interner);
+        let tok1 = string_reader.next_token();
+        let tok2 = TokenAndSpan{
+            tok:token::IDENT(id, false),
+            sp:span {lo:BytePos(21),hi:BytePos(23),expn_info: None}};
+        check_equal (tok1,tok2);
+        // the 'main' id is already read:
+        check_equal (string_reader.last_pos,BytePos(28));
+        // read another token:
+        let tok3 = string_reader.next_token();
+        let tok4 = TokenAndSpan{
+            tok:token::IDENT(ident_interner.intern (@~"main"), false),
+            sp:span {lo:BytePos(24),hi:BytePos(28),expn_info: None}};
+        check_equal (tok3,tok4);
+        // the lparen is already read:
+        check_equal (string_reader.last_pos,BytePos(29))
+    }
+}
 
 //
 // Local Variables:
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index f9088bfd635..6169233c1b7 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -10,6 +10,7 @@
 
 //! The main parser interface
 
+
 use ast::node_id;
 use ast;
 use codemap::{span, CodeMap, FileMap, CharPos, BytePos};
@@ -33,6 +34,7 @@ pub mod token;
 pub mod comments;
 pub mod attr;
 
+
 /// Common routines shared by parser mods
 pub mod common;
 
@@ -215,3 +217,4 @@ pub fn new_parser_from_tts(sess: parse_sess, cfg: ast::crate_cfg,
                                     None, tts);
     return Parser(sess, cfg, trdr as reader)
 }
+
diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs
index 7dad15f9dfc..0c7a202dcd6 100644
--- a/src/libsyntax/parse/obsolete.rs
+++ b/src/libsyntax/parse/obsolete.rs
@@ -21,8 +21,7 @@ use core::prelude::*;
 
 use ast::{expr, expr_lit, lit_nil};
 use ast;
-use ast_util::{respan};
-use codemap::span;
+use codemap::{span, respan};
 use parse::parser::Parser;
 use parse::token::Token;
 use parse::token;
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 51acf76ac30..2917ce43358 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -55,15 +55,16 @@ use ast::{view_path, view_path_glob, view_path_list, view_path_simple};
 use ast::{visibility, vstore, vstore_box, vstore_fixed, vstore_slice};
 use ast::{vstore_uniq};
 use ast;
-use ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec};
+use ast_util::{ident_to_path, operator_prec};
 use ast_util;
 use classify;
-use codemap::{span,FssNone, BytePos};
+use codemap::{span,FssNone, BytePos, spanned, respan, mk_sp};
 use codemap;
 use parse::attr::parser_attr;
 use parse::common::{seq_sep_none, token_to_str};
 use parse::common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed};
 use parse::lexer::reader;
+use parse::lexer::TokenAndSpan;
 use parse::obsolete::{ObsoleteClassTraits, ObsoleteModeInFnType};
 use parse::obsolete::{ObsoleteLet, ObsoleteFieldTerminator};
 use parse::obsolete::{ObsoleteMoveInit, ObsoleteBinaryMove};
@@ -193,7 +194,7 @@ pub fn Parser(sess: parse_sess,
         token: tok0.tok,
         span: span0,
         last_span: span0,
-        buffer: [mut {tok: tok0.tok, sp: span0}, ..4],
+        buffer: [mut TokenAndSpan {tok: tok0.tok, sp: span0}, ..4],
         buffer_start: 0,
         buffer_end: 0,
         tokens_consumed: 0u,
@@ -213,7 +214,7 @@ pub struct Parser {
     mut token: token::Token,
     mut span: span,
     mut last_span: span,
-    mut buffer: [mut {tok: token::Token, sp: span} * 4],
+    mut buffer: [mut TokenAndSpan * 4],
     mut buffer_start: int,
     mut buffer_end: int,
     mut tokens_consumed: uint,
@@ -234,6 +235,7 @@ pub struct Parser {
 }
 
 pub impl Parser {
+    // advance the parser by one token
     fn bump() {
         self.last_span = self.span;
         let next = if self.buffer_start == self.buffer_end {
@@ -247,7 +249,8 @@ pub impl Parser {
         self.span = next.sp;
         self.tokens_consumed += 1u;
     }
-    fn swap(next: token::Token, +lo: BytePos, +hi: BytePos) {
+    // EFFECT: replace the current token and span with the given one
+    fn replace_token(next: token::Token, +lo: BytePos, +hi: BytePos) {
         self.token = next;
         self.span = mk_sp(lo, hi);
     }
@@ -800,7 +803,7 @@ pub impl Parser {
             self.bump();
             self.lit_from_token(tok)
         };
-        ast::spanned { node: lit, span: mk_sp(lo, self.last_span.hi) }
+        codemap::spanned { node: lit, span: mk_sp(lo, self.last_span.hi) }
     }
 
     fn parse_path_without_tps() -> @path {
@@ -875,7 +878,7 @@ pub impl Parser {
                 self.parse_seq_lt_gt(Some(token::COMMA),
                                      |p| p.parse_ty(false))
             } else {
-                ast::spanned {node: ~[], span: path.span}
+                codemap::spanned {node: ~[], span: path.span}
             }
         };
 
@@ -917,14 +920,14 @@ pub impl Parser {
         @expr {
             id: self.get_id(),
             callee_id: self.get_id(),
-            node: expr_mac(ast::spanned {node: m, span: mk_sp(lo, hi)}),
+            node: expr_mac(codemap::spanned {node: m, span: mk_sp(lo, hi)}),
             span: mk_sp(lo, hi),
         }
     }
 
     fn mk_lit_u32(i: u32) -> @expr {
         let span = self.span;
-        let lv_lit = @ast::spanned { node: lit_uint(i as u64, ty_u32),
+        let lv_lit = @codemap::spanned { node: lit_uint(i as u64, ty_u32),
                                      span: span };
 
         @expr {
@@ -1404,7 +1407,7 @@ pub impl Parser {
                 hi = e.span.hi;
                 // HACK: turn &[...] into a &-evec
                 ex = match e.node {
-                  expr_vec(*) | expr_lit(@ast::spanned {
+                  expr_vec(*) | expr_lit(@codemap::spanned {
                     node: lit_str(_), span: _
                   })
                   if m == m_imm => {
@@ -1429,7 +1432,7 @@ pub impl Parser {
               expr_vec(*) if m == m_mutbl =>
                 expr_vstore(e, expr_vstore_mut_box),
               expr_vec(*) if m == m_imm => expr_vstore(e, expr_vstore_box),
-              expr_lit(@ast::spanned {
+              expr_lit(@codemap::spanned {
                   node: lit_str(_), span: _}) if m == m_imm =>
                 expr_vstore(e, expr_vstore_box),
               _ => expr_unary(box(m), e)
@@ -1442,7 +1445,7 @@ pub impl Parser {
             hi = e.span.hi;
             // HACK: turn ~[...] into a ~-evec
             ex = match e.node {
-              expr_vec(*) | expr_lit(@ast::spanned {
+              expr_vec(*) | expr_lit(@codemap::spanned {
                 node: lit_str(_), span: _})
               if m == m_imm => expr_vstore(e, expr_vstore_uniq),
               _ => expr_unary(uniq(m), e)
@@ -1496,6 +1499,7 @@ pub impl Parser {
         return lhs;
     }
 
+    // parse an arbitrary expression.
     fn parse_assign_expr() -> @expr {
         let lo = self.span.lo;
         let lhs = self.parse_binops();
@@ -1794,7 +1798,7 @@ pub impl Parser {
                 self.eat(token::COMMA);
             }
 
-            let blk = ast::spanned {
+            let blk = codemap::spanned {
                 node: ast::blk_ {
                     view_items: ~[],
                     stmts: ~[],
@@ -1812,10 +1816,12 @@ pub impl Parser {
         return self.mk_expr(lo, hi, expr_match(discriminant, arms));
     }
 
+    // parse an expression
     fn parse_expr() -> @expr {
         return self.parse_expr_res(UNRESTRICTED);
     }
 
+    // parse an expression, subject to the given restriction
     fn parse_expr_res(r: restriction) -> @expr {
         let old = self.restriction;
         self.restriction = r;
@@ -1943,7 +1949,9 @@ pub impl Parser {
             // HACK: parse @"..." as a literal of a vstore @str
             pat = match sub.node {
               pat_lit(e@@expr {
-                node: expr_lit(@ast::spanned {node: lit_str(_), span: _}), _
+                node: expr_lit(@codemap::spanned {
+                    node: lit_str(_),
+                    span: _}), _
               }) => {
                 let vst = @expr {
                     id: self.get_id(),
@@ -1963,7 +1971,9 @@ pub impl Parser {
             // HACK: parse ~"..." as a literal of a vstore ~str
             pat = match sub.node {
               pat_lit(e@@expr {
-                node: expr_lit(@ast::spanned {node: lit_str(_), span: _}), _
+                node: expr_lit(@codemap::spanned {
+                    node: lit_str(_),
+                    span: _}), _
               }) => {
                 let vst = @expr {
                     id: self.get_id(),
@@ -1985,7 +1995,7 @@ pub impl Parser {
               // HACK: parse &"..." as a literal of a borrowed str
               pat = match sub.node {
                   pat_lit(e@@expr {
-                      node: expr_lit(@ast::spanned {
+                      node: expr_lit(@codemap::spanned {
                             node: lit_str(_), span: _}), _
                   }) => {
                       let vst = @expr {
@@ -2011,7 +2021,9 @@ pub impl Parser {
             if self.token == token::RPAREN {
                 hi = self.span.hi;
                 self.bump();
-                let lit = @ast::spanned {node: lit_nil, span: mk_sp(lo, hi)};
+                let lit = @codemap::spanned {
+                    node: lit_nil,
+                    span: mk_sp(lo, hi)};
                 let expr = self.mk_expr(lo, hi, expr_lit(lit));
                 pat = pat_lit(expr);
             } else {
@@ -2381,7 +2393,7 @@ pub impl Parser {
                             match self.token {
                                 token::SEMI => {
                                     self.bump();
-                                    stmts.push(@ast::spanned {
+                                    stmts.push(@codemap::spanned {
                                         node: stmt_semi(e, stmt_id),
                                         .. *stmt});
                                 }
@@ -2406,7 +2418,7 @@ pub impl Parser {
                             match self.token {
                                 token::SEMI => {
                                     self.bump();
-                                    stmts.push(@ast::spanned {
+                                    stmts.push(@codemap::spanned {
                                         node: stmt_mac((*m), true),
                                         .. *stmt});
                                 }
@@ -2940,7 +2952,7 @@ pub impl Parser {
 
         let actual_dtor = do the_dtor.map |dtor| {
             let (d_body, d_attrs, d_s) = *dtor;
-            ast::spanned { node: ast::struct_dtor_ { id: self.get_id(),
+            codemap::spanned { node: ast::struct_dtor_ { id: self.get_id(),
                                                      attrs: d_attrs,
                                                      self_id: self.get_id(),
                                                      body: d_body},
@@ -3445,7 +3457,7 @@ pub impl Parser {
         self.bump();
         let mut actual_dtor = do the_dtor.map |dtor| {
             let (d_body, d_attrs, d_s) = *dtor;
-            ast::spanned { node: ast::struct_dtor_ { id: self.get_id(),
+            codemap::spanned { node: ast::struct_dtor_ { id: self.get_id(),
                                                      attrs: d_attrs,
                                                      self_id: self.get_id(),
                                                      body: d_body },
@@ -3737,7 +3749,7 @@ pub impl Parser {
               _ => self.fatal(~"expected open delimiter")
             };
             let m = ast::mac_invoc_tt(pth, tts);
-            let m: ast::mac = ast::spanned { node: m,
+            let m: ast::mac = codemap::spanned { node: m,
                                              span: mk_sp(self.span.lo,
                                                          self.span.hi) };
             let item_ = item_mac(m);
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 72fd205fee9..1b5c93984ee 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -575,7 +575,7 @@ pub fn print_item(s: ps, &&item: @ast::item) {
         }
         bclose(s, item.span);
       }
-      ast::item_mac(ast::spanned { node: ast::mac_invoc_tt(pth, ref tts),
+      ast::item_mac(codemap::spanned { node: ast::mac_invoc_tt(pth, ref tts),
                                    _}) => {
         print_visibility(s, item.vis);
         print_path(s, pth, false);
@@ -2241,6 +2241,7 @@ pub mod test {
     use parse;
     use super::*;
     //use util;
+    use util::testing::check_equal;
 
     fn string_check<T : Eq> (given : &T, expected: &T) {
         if !(given == expected) {
@@ -2257,11 +2258,11 @@ pub mod test {
             inputs: ~[],
             output: @ast::Ty {id: 0,
                               node: ast::ty_nil,
-                              span: ast_util::dummy_sp()},
+                              span: codemap::dummy_sp()},
             cf: ast::return_val
         };
-        assert fun_to_str(decl, abba_ident, ~[],mock_interner)
-            == ~"fn abba()";
+        check_equal (&fun_to_str(decl, abba_ident, ~[],mock_interner),
+                     &~"fn abba()");
     }
 
     #[test]
@@ -2269,7 +2270,7 @@ pub mod test {
         let mock_interner = parse::token::mk_fake_ident_interner();
         let ident = mock_interner.intern(@~"principal_skinner");
 
-        let var = ast_util::respan(ast_util::dummy_sp(), ast::variant_ {
+        let var = codemap::respan(codemap::dummy_sp(), ast::variant_ {
             name: ident,
             attrs: ~[],
             // making this up as I go.... ?
@@ -2280,7 +2281,7 @@ pub mod test {
         });
 
         let varstr = variant_to_str(var,mock_interner);
-        string_check(&varstr,&~"pub principal_skinner");
+        check_equal(&varstr,&~"pub principal_skinner");
     }
 }
 
diff --git a/src/libsyntax/syntax.rc b/src/libsyntax/syntax.rc
index dc5964e6a12..a6d50b9cf09 100644
--- a/src/libsyntax/syntax.rc
+++ b/src/libsyntax/syntax.rc
@@ -46,10 +46,11 @@ pub mod ast_map;
 pub mod visit;
 pub mod fold;
 pub mod util {
-    #[path = "interner.rs"]
     pub mod interner;
+    pub mod testing;
 }
 
+
 #[path = "parse/mod.rs"]
 pub mod parse;
 
@@ -86,3 +87,4 @@ pub mod ext {
 
     pub mod trace_macros;
 }
+
diff --git a/src/libsyntax/util/testing.rs b/src/libsyntax/util/testing.rs
new file mode 100644
index 00000000000..d70e503a568
--- /dev/null
+++ b/src/libsyntax/util/testing.rs
@@ -0,0 +1,24 @@
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// support for test cases.
+use core::cmp;
+
+pub pure fn check_equal_ptr<T : cmp::Eq> (given : &T, expected: &T) {
+    if !((given == expected) && (expected == given )) {
+        fail (fmt!("given %?, expected %?",given,expected));
+    }
+}
+
+pub pure fn check_equal<T : cmp::Eq> (given : T, expected: T) {
+    if !((given == expected) && (expected == given )) {
+        fail (fmt!("given %?, expected %?",given,expected));
+    }
+}