about summary refs log tree commit diff
path: root/src/comp/syntax/parse/parser.rs
diff options
context:
space:
mode:
authorMarijn Haverbeke <marijnh@gmail.com>2012-01-13 09:56:53 +0100
committerMarijn Haverbeke <marijnh@gmail.com>2012-01-13 11:50:53 +0100
commit7f6294455963334fec69fc799442ae74ef65b35e (patch)
tree8c5b7ab38389dc6e4afcbfe4943bc432c6192846 /src/comp/syntax/parse/parser.rs
parent0616cba62be78082f10f6673d45ba4d94da423dc (diff)
downloadrust-7f6294455963334fec69fc799442ae74ef65b35e.tar.gz
rust-7f6294455963334fec69fc799442ae74ef65b35e.zip
Convert the objects used in the lexer and parser to records + impls
Diffstat (limited to 'src/comp/syntax/parse/parser.rs')
-rw-r--r--src/comp/syntax/parse/parser.rs654
1 files changed, 311 insertions, 343 deletions
diff --git a/src/comp/syntax/parse/parser.rs b/src/comp/syntax/parse/parser.rs
index 4e5620efed6..314add8289c 100644
--- a/src/comp/syntax/parse/parser.rs
+++ b/src/comp/syntax/parse/parser.rs
@@ -9,6 +9,7 @@ import codemap::span;
 import util::interner;
 import ast::{node_id, spanned};
 import front::attr;
+import lexer::reader;
 
 tag restriction {
     UNRESTRICTED;
@@ -27,34 +28,60 @@ fn next_node_id(sess: parse_sess) -> node_id {
     ret rv;
 }
 
-type parser =
-    obj {
-        fn peek() -> token::token;
-        fn bump();
-        fn swap(token::token, uint, uint);
-        fn look_ahead(uint) -> token::token;
-        fn fatal(str) -> ! ;
-        fn span_fatal(span, str) -> ! ;
-        fn warn(str);
-        fn restrict(restriction);
-        fn get_restriction() -> restriction;
-        fn get_file_type() -> file_type;
-        fn get_cfg() -> ast::crate_cfg;
-        fn get_span() -> span;
-        fn get_lo_pos() -> uint;
-        fn get_hi_pos() -> uint;
-        fn get_last_lo_pos() -> uint;
-        fn get_last_hi_pos() -> uint;
-        fn get_prec_table() -> @[op_spec];
-        fn get_str(token::str_num) -> str;
-        fn get_reader() -> lexer::reader;
-        fn get_filemap() -> codemap::filemap;
-        fn get_bad_expr_words() -> hashmap<str, ()>;
-        fn get_chpos() -> uint;
-        fn get_byte_pos() -> uint;
-        fn get_id() -> node_id;
-        fn get_sess() -> parse_sess;
-    };
+type parser = @{
+    sess: parse_sess,
+    cfg: ast::crate_cfg,
+    file_type: file_type,
+    mutable token: token::token,
+    mutable span: span,
+    mutable last_span: span,
+    mutable buffer: [{tok: token::token, span: span}],
+    mutable restriction: restriction,
+    reader: reader,
+    precs: @[op_spec],
+    bad_expr_words: hashmap<str, ()>
+};
+
+impl parser for parser {
+    fn bump() {
+        self.last_span = self.span;
+        if vec::len(self.buffer) == 0u {
+            let next = lexer::next_token(self.reader);
+            self.token = next.tok;
+            self.span = ast_util::mk_sp(next.chpos, self.reader.chpos);
+        } else {
+            let next = vec::pop(self.buffer);
+            self.token = next.tok;
+            self.span = next.span;
+        }
+    }
+    fn swap(next: token::token, lo: uint, hi: uint) {
+        self.token = next;
+        self.span = ast_util::mk_sp(lo, hi);
+    }
+    fn look_ahead(distance: uint) -> token::token {
+        while vec::len(self.buffer) < distance {
+            let next = lexer::next_token(self.reader);
+            let sp = ast_util::mk_sp(next.chpos, self.reader.chpos);
+            self.buffer = [{tok: next.tok, span: sp}] + self.buffer;
+        }
+        ret self.buffer[distance - 1u].tok;
+    }
+    fn fatal(m: str) -> ! {
+        self.span_fatal(self.span, m);
+    }
+    fn span_fatal(sp: span, m: str) -> ! {
+        codemap::emit_error(some(sp), m, self.sess.cm);
+        fail;
+    }
+    fn warn(m: str) {
+        codemap::emit_warning(some(self.span), m, self.sess.cm);
+    }
+    fn get_str(i: token::str_num) -> str {
+        interner::get(*self.reader.interner, i)
+    }
+    fn get_id() -> node_id { next_node_id(self.sess) }
+}
 
 fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: str,
                         chpos: uint, byte_pos: uint, ftype: file_type) ->
@@ -86,79 +113,21 @@ fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
     ret new_parser(sess, cfg, rdr, ftype);
 }
 
-fn new_parser(sess: parse_sess, cfg: ast::crate_cfg, rdr: lexer::reader,
+fn new_parser(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader,
               ftype: file_type) -> parser {
-    obj stdio_parser(sess: parse_sess,
-                     cfg: ast::crate_cfg,
-                     ftype: file_type,
-                     mutable tok: token::token,
-                     mutable tok_span: span,
-                     mutable last_tok_span: span,
-                     mutable buffer: [{tok: token::token, span: span}],
-                     mutable restr: restriction,
-                     rdr: lexer::reader,
-                     precs: @[op_spec],
-                     bad_words: hashmap<str, ()>) {
-        fn peek() -> token::token { ret tok; }
-        fn bump() {
-            last_tok_span = tok_span;
-            if vec::len(buffer) == 0u {
-                let next = lexer::next_token(rdr);
-                tok = next.tok;
-                tok_span = ast_util::mk_sp(next.chpos, rdr.get_chpos());
-            } else {
-                let next = vec::pop(buffer);
-                tok = next.tok;
-                tok_span = next.span;
-            }
-        }
-        fn swap(next: token::token, lo: uint, hi: uint) {
-            tok = next;
-            tok_span = ast_util::mk_sp(lo, hi);
-        }
-        fn look_ahead(distance: uint) -> token::token {
-            while vec::len(buffer) < distance {
-                let next = lexer::next_token(rdr);
-                let sp = ast_util::mk_sp(next.chpos, rdr.get_chpos());
-                buffer = [{tok: next.tok, span: sp}] + buffer;
-            }
-            ret buffer[distance - 1u].tok;
-        }
-        fn fatal(m: str) -> ! {
-            self.span_fatal(self.get_span(), m);
-        }
-        fn span_fatal(sp: span, m: str) -> ! {
-            codemap::emit_error(some(sp), m, sess.cm);
-            fail;
-        }
-        fn warn(m: str) {
-            codemap::emit_warning(some(self.get_span()), m, sess.cm);
-        }
-        fn restrict(r: restriction) { restr = r; }
-        fn get_restriction() -> restriction { ret restr; }
-        fn get_span() -> span { ret tok_span; }
-        fn get_lo_pos() -> uint { ret tok_span.lo; }
-        fn get_hi_pos() -> uint { ret tok_span.hi; }
-        fn get_last_lo_pos() -> uint { ret last_tok_span.lo; }
-        fn get_last_hi_pos() -> uint { ret last_tok_span.hi; }
-        fn get_file_type() -> file_type { ret ftype; }
-        fn get_cfg() -> ast::crate_cfg { ret cfg; }
-        fn get_prec_table() -> @[op_spec] { ret precs; }
-        fn get_str(i: token::str_num) -> str {
-            ret interner::get(*rdr.get_interner(), i);
-        }
-        fn get_reader() -> lexer::reader { ret rdr; }
-        fn get_filemap() -> codemap::filemap { ret rdr.get_filemap(); }
-        fn get_bad_expr_words() -> hashmap<str, ()> { ret bad_words; }
-        fn get_chpos() -> uint { ret rdr.get_chpos(); }
-        fn get_byte_pos() -> uint { ret rdr.get_byte_pos(); }
-        fn get_id() -> node_id { ret next_node_id(sess); }
-        fn get_sess() -> parse_sess { ret sess; }
-    }
     let tok0 = lexer::next_token(rdr);
-    let span0 = ast_util::mk_sp(tok0.chpos, rdr.get_chpos());
-    ret stdio_parser(sess, cfg, ftype, tok0.tok, span0, span0, [],
-                     UNRESTRICTED, rdr, prec_table(), bad_expr_word_table());
+    let span0 = ast_util::mk_sp(tok0.chpos, rdr.chpos);
+    @{sess: sess,
+      cfg: cfg,
+      file_type: ftype,
+      mutable token: tok0.tok,
+      mutable span: span0,
+      mutable last_span: span0,
+      mutable buffer: [],
+      mutable restriction: UNRESTRICTED,
+      reader: rdr,
+      precs: prec_table(),
+      bad_expr_words: bad_expr_word_table()}
 }
 
 // These are the words that shouldn't be allowed as value identifiers,
@@ -178,35 +147,35 @@ fn bad_expr_word_table() -> hashmap<str, ()> {
 }
 
 fn unexpected(p: parser, t: token::token) -> ! {
-    let s: str = "unexpected token: '" + token::to_str(p.get_reader(), t) +
+    let s: str = "unexpected token: '" + token::to_str(p.reader, t) +
         "'";
     p.fatal(s);
 }
 
 fn expect(p: parser, t: token::token) {
-    if p.peek() == t {
+    if p.token == t {
         p.bump();
     } else {
         let s: str = "expecting '";
-        s += token::to_str(p.get_reader(), t);
+        s += token::to_str(p.reader, t);
         s += "' but found '";
-        s += token::to_str(p.get_reader(), p.peek());
+        s += token::to_str(p.reader, p.token);
         p.fatal(s + "'");
     }
 }
 
 fn expect_gt(p: parser) {
-    if p.peek() == token::GT {
+    if p.token == token::GT {
         p.bump();
-    } else if p.peek() == token::BINOP(token::LSR) {
-        p.swap(token::GT, p.get_lo_pos() + 1u, p.get_hi_pos());
-    } else if p.peek() == token::BINOP(token::ASR) {
-        p.swap(token::BINOP(token::LSR), p.get_lo_pos() + 1u, p.get_hi_pos());
+    } else if p.token == token::BINOP(token::LSR) {
+        p.swap(token::GT, p.span.lo + 1u, p.span.hi);
+    } else if p.token == token::BINOP(token::ASR) {
+        p.swap(token::BINOP(token::LSR), p.span.lo + 1u, p.span.hi);
     } else {
         let s: str = "expecting ";
-        s += token::to_str(p.get_reader(), token::GT);
+        s += token::to_str(p.reader, token::GT);
         s += ", found ";
-        s += token::to_str(p.get_reader(), p.peek());
+        s += token::to_str(p.reader, p.token);
         p.fatal(s);
     }
 }
@@ -216,7 +185,7 @@ fn spanned<T: copy>(lo: uint, hi: uint, node: T) -> spanned<T> {
 }
 
 fn parse_ident(p: parser) -> ast::ident {
-    alt p.peek() {
+    alt p.token {
       token::IDENT(i, _) { p.bump(); ret p.get_str(i); }
       _ { p.fatal("expecting ident"); }
     }
@@ -228,18 +197,18 @@ fn parse_value_ident(p: parser) -> ast::ident {
 }
 
 fn eat(p: parser, tok: token::token) -> bool {
-    ret if p.peek() == tok { p.bump(); true } else { false };
+    ret if p.token == tok { p.bump(); true } else { false };
 }
 
 fn is_word(p: parser, word: str) -> bool {
-    ret alt p.peek() {
+    ret alt p.token {
           token::IDENT(sid, false) { str::eq(word, p.get_str(sid)) }
           _ { false }
         };
 }
 
 fn eat_word(p: parser, word: str) -> bool {
-    alt p.peek() {
+    alt p.token {
       token::IDENT(sid, false) {
         if str::eq(word, p.get_str(sid)) {
             p.bump();
@@ -253,15 +222,15 @@ fn eat_word(p: parser, word: str) -> bool {
 fn expect_word(p: parser, word: str) {
     if !eat_word(p, word) {
         p.fatal("expecting " + word + ", found " +
-                    token::to_str(p.get_reader(), p.peek()));
+                    token::to_str(p.reader, p.token));
     }
 }
 
 fn check_bad_word(p: parser) {
-    alt p.peek() {
+    alt p.token {
       token::IDENT(sid, false) {
         let w = p.get_str(sid);
-        if p.get_bad_expr_words().contains_key(w) {
+        if p.bad_expr_words.contains_key(w) {
             p.fatal("found " + w + " in expression position");
         }
       }
@@ -293,11 +262,11 @@ fn parse_ty_fn(proto: ast::proto, p: parser) -> ast::ty_ {
 
 fn parse_ty_methods(p: parser, allow_tps: bool) -> [ast::ty_method] {
     parse_seq(token::LBRACE, token::RBRACE, seq_sep_none(), {|p|
-        let flo = p.get_lo_pos();
+        let flo = p.span.lo;
         expect_word(p, "fn");
         let ident = parse_value_ident(p);
         let tps = allow_tps ? parse_ty_params(p) : [];
-        let f = parse_ty_fn(ast::proto_bare, p), fhi = p.get_last_hi_pos();
+        let f = parse_ty_fn(ast::proto_bare, p), fhi = p.last_span.hi;
         expect(p, token::SEMI);
         alt f {
           ast::ty_fn(_, d) {
@@ -315,7 +284,7 @@ fn parse_mt(p: parser) -> ast::mt {
 }
 
 fn parse_ty_field(p: parser) -> ast::ty_field {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let mut = parse_mutability(p);
     let id = parse_ident(p);
     expect(p, token::COLON);
@@ -332,10 +301,10 @@ fn ident_index(p: parser, args: [ast::arg], i: ast::ident) -> uint {
 }
 
 fn parse_type_constr_arg(p: parser) -> @ast::ty_constr_arg {
-    let sp = p.get_span();
+    let sp = p.span;
     let carg = ast::carg_base;
     expect(p, token::BINOP(token::STAR));
-    if p.peek() == token::DOT {
+    if p.token == token::DOT {
         // "*..." notation for record fields
         p.bump();
         let pth = parse_path(p);
@@ -346,9 +315,9 @@ fn parse_type_constr_arg(p: parser) -> @ast::ty_constr_arg {
 }
 
 fn parse_constr_arg(args: [ast::arg], p: parser) -> @ast::constr_arg {
-    let sp = p.get_span();
+    let sp = p.span;
     let carg = ast::carg_base;
-    if p.peek() == token::BINOP(token::STAR) {
+    if p.token == token::BINOP(token::STAR) {
         p.bump();
     } else {
         let i: ast::ident = parse_value_ident(p);
@@ -358,7 +327,7 @@ fn parse_constr_arg(args: [ast::arg], p: parser) -> @ast::constr_arg {
 }
 
 fn parse_ty_constr(fn_args: [ast::arg], p: parser) -> @ast::constr {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let path = parse_path(p);
     let args: {node: [@ast::constr_arg], span: span} =
         parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA),
@@ -368,12 +337,12 @@ fn parse_ty_constr(fn_args: [ast::arg], p: parser) -> @ast::constr {
 }
 
 fn parse_constr_in_type(p: parser) -> @ast::ty_constr {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let path = parse_path(p);
     let args: [@ast::ty_constr_arg] =
         parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA),
                   parse_type_constr_arg, p).node;
-    let hi = p.get_lo_pos();
+    let hi = p.span.lo;
     let tc: ast::ty_constr_ = {path: path, args: args, id: p.get_id()};
     ret @spanned(lo, hi, tc);
 }
@@ -386,7 +355,7 @@ fn parse_constrs<T: copy>(pser: block(parser) -> @ast::constr_general<T>,
     while true {
         let constr = pser(p);
         constrs += [constr];
-        if p.peek() == token::COMMA { p.bump(); } else { break; }
+        if p.token == token::COMMA { p.bump(); } else { break; }
     }
     constrs
 }
@@ -397,12 +366,12 @@ fn parse_type_constraints(p: parser) -> [@ast::ty_constr] {
 
 fn parse_ty_postfix(orig_t: ast::ty_, p: parser, colons_before_params: bool,
                     lo: uint) -> @ast::ty {
-    if colons_before_params && p.peek() == token::MOD_SEP {
+    if colons_before_params && p.token == token::MOD_SEP {
         p.bump();
         expect(p, token::LT);
-    } else if !colons_before_params && p.peek() == token::LT {
+    } else if !colons_before_params && p.token == token::LT {
         p.bump();
-    } else { ret @spanned(lo, p.get_last_hi_pos(), orig_t); }
+    } else { ret @spanned(lo, p.last_span.hi, orig_t); }
 
     // If we're here, we have explicit type parameter instantiation.
     let seq = parse_seq_to_gt(some(token::COMMA), {|p| parse_ty(p, false)},
@@ -410,8 +379,8 @@ fn parse_ty_postfix(orig_t: ast::ty_, p: parser, colons_before_params: bool,
 
     alt orig_t {
       ast::ty_path(pth, ann) {
-        ret @spanned(lo, p.get_last_hi_pos(),
-                     ast::ty_path(@spanned(lo, p.get_last_hi_pos(),
+        ret @spanned(lo, p.last_span.hi,
+                     ast::ty_path(@spanned(lo, p.last_span.hi,
                                            {global: pth.node.global,
                                             idents: pth.node.idents,
                                             types: seq}), ann));
@@ -422,18 +391,18 @@ fn parse_ty_postfix(orig_t: ast::ty_, p: parser, colons_before_params: bool,
 
 fn parse_ret_ty(p: parser) -> (ast::ret_style, @ast::ty) {
     ret if eat(p, token::RARROW) {
-        let lo = p.get_lo_pos();
+        let lo = p.span.lo;
         if eat(p, token::NOT) {
-            (ast::noreturn, @spanned(lo, p.get_last_hi_pos(), ast::ty_bot))
+            (ast::noreturn, @spanned(lo, p.last_span.hi, ast::ty_bot))
         } else { (ast::return_val, parse_ty(p, false)) }
     } else {
-        let pos = p.get_lo_pos();
+        let pos = p.span.lo;
         (ast::return_val, @spanned(pos, pos, ast::ty_nil))
     }
 }
 
 fn parse_ty(p: parser, colons_before_params: bool) -> @ast::ty {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let t: ast::ty_;
     // FIXME: do something with this
 
@@ -469,14 +438,14 @@ fn parse_ty(p: parser, colons_before_params: bool) -> @ast::ty {
         t = ast::ty_float(ast::ty_f32);
     } else if eat_word(p, "f64") {
         t = ast::ty_float(ast::ty_f64);
-    } else if p.peek() == token::LPAREN {
+    } else if p.token == token::LPAREN {
         p.bump();
-        if p.peek() == token::RPAREN {
+        if p.token == token::RPAREN {
             p.bump();
             t = ast::ty_nil;
         } else {
             let ts = [parse_ty(p, false)];
-            while p.peek() == token::COMMA {
+            while p.token == token::COMMA {
                 p.bump();
                 ts += [parse_ty(p, false)];
             }
@@ -485,28 +454,28 @@ fn parse_ty(p: parser, colons_before_params: bool) -> @ast::ty {
             } else { t = ast::ty_tup(ts); }
             expect(p, token::RPAREN);
         }
-    } else if p.peek() == token::AT {
+    } else if p.token == token::AT {
         p.bump();
         t = ast::ty_box(parse_mt(p));
-    } else if p.peek() == token::TILDE {
+    } else if p.token == token::TILDE {
         p.bump();
         t = ast::ty_uniq(parse_mt(p));
-    } else if p.peek() == token::BINOP(token::STAR) {
+    } else if p.token == token::BINOP(token::STAR) {
         p.bump();
         t = ast::ty_ptr(parse_mt(p));
-    } else if p.peek() == token::LBRACE {
+    } else if p.token == token::LBRACE {
         let elems =
             parse_seq(token::LBRACE, token::RBRACE, seq_sep_opt(token::COMMA),
                       parse_ty_field, p);
         if vec::len(elems.node) == 0u { unexpected(p, token::RBRACE); }
         let hi = elems.span.hi;
         t = ast::ty_rec(elems.node);
-        if p.peek() == token::COLON {
+        if p.token == token::COLON {
             p.bump();
             t = ast::ty_constr(@spanned(lo, hi, t),
                                parse_type_constraints(p));
         }
-    } else if p.peek() == token::LBRACKET {
+    } else if p.token == token::LBRACKET {
         expect(p, token::LBRACKET);
         t = ast::ty_vec(parse_mt(p));
         expect(p, token::RBRACKET);
@@ -523,7 +492,7 @@ fn parse_ty(p: parser, colons_before_params: bool) -> @ast::ty {
         t = parse_ty_fn(ast::proto_uniq, p);
     } else if eat_word(p, "obj") {
         t = ast::ty_obj(parse_ty_methods(p, false));
-    } else if p.peek() == token::MOD_SEP || is_ident(p.peek()) {
+    } else if p.token == token::MOD_SEP || is_ident(p.token) {
         let path = parse_path(p);
         t = ast::ty_path(path, p.get_id());
     } else { p.fatal("expecting type"); }
@@ -553,7 +522,7 @@ fn parse_fn_block_arg(p: parser) -> ast::arg {
     let m = parse_arg_mode(p);
     let i = parse_value_ident(p);
     let t = eat(p, token::COLON) ? parse_ty(p, false) :
-        @spanned(p.get_lo_pos(), p.get_hi_pos(), ast::ty_infer);
+        @spanned(p.span.lo, p.span.hi, ast::ty_infer);
     ret {mode: m, ty: t, ident: i, id: p.get_id()};
 }
 
@@ -562,8 +531,8 @@ fn parse_seq_to_before_gt<T: copy>(sep: option::t<token::token>,
                                   p: parser) -> [T] {
     let first = true;
     let v = [];
-    while p.peek() != token::GT && p.peek() != token::BINOP(token::LSR) &&
-              p.peek() != token::BINOP(token::ASR) {
+    while p.token != token::GT && p.token != token::BINOP(token::LSR) &&
+              p.token != token::BINOP(token::ASR) {
         alt sep {
           some(t) { if first { first = false; } else { expect(p, t); } }
           _ { }
@@ -585,10 +554,10 @@ fn parse_seq_to_gt<T: copy>(sep: option::t<token::token>,
 fn parse_seq_lt_gt<T: copy>(sep: option::t<token::token>,
                            f: block(parser) -> T,
                            p: parser) -> spanned<[T]> {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     expect(p, token::LT);
     let result = parse_seq_to_before_gt::<T>(sep, f, p);
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     expect_gt(p);
     ret spanned(lo, hi, result);
 }
@@ -620,12 +589,12 @@ fn parse_seq_to_before_end<T: copy>(ket: token::token,
                                    f: block(parser) -> T, p: parser) -> [T] {
     let first: bool = true;
     let v: [T] = [];
-    while p.peek() != ket {
+    while p.token != ket {
         alt sep.sep {
           some(t) { if first { first = false; } else { expect(p, t); } }
           _ { }
         }
-        if sep.trailing_opt && p.peek() == ket { break; }
+        if sep.trailing_opt && p.token == ket { break; }
         v += [f(p)];
     }
     ret v;
@@ -635,10 +604,10 @@ fn parse_seq_to_before_end<T: copy>(ket: token::token,
 fn parse_seq<T: copy>(bra: token::token, ket: token::token,
                      sep: seq_sep, f: block(parser) -> T,
                      p: parser) -> spanned<[T]> {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     expect(p, bra);
     let result = parse_seq_to_before_end::<T>(ket, sep, f, p);
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     p.bump();
     ret spanned(lo, hi, result);
 }
@@ -655,13 +624,13 @@ fn lit_from_token(p: parser, tok: token::token) -> ast::lit_ {
 }
 
 fn parse_lit(p: parser) -> ast::lit {
-    let sp = p.get_span();
+    let sp = p.span;
     let lit = if eat_word(p, "true") {
         ast::lit_bool(true)
     } else if eat_word(p, "false") {
         ast::lit_bool(false)
     } else {
-        let tok = p.peek();
+        let tok = p.token;
         p.bump();
         lit_from_token(p, tok)
     };
@@ -674,23 +643,23 @@ fn is_ident(t: token::token) -> bool {
 }
 
 fn is_plain_ident(p: parser) -> bool {
-    ret alt p.peek() { token::IDENT(_, false) { true } _ { false } };
+    ret alt p.token { token::IDENT(_, false) { true } _ { false } };
 }
 
 fn parse_path(p: parser) -> @ast::path {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let global = eat(p, token::MOD_SEP), ids = [parse_ident(p)];
     while p.look_ahead(1u) != token::LT && eat(p, token::MOD_SEP) {
         ids += [parse_ident(p)];
     }
-    ret @spanned(lo, p.get_last_hi_pos(),
+    ret @spanned(lo, p.last_span.hi,
                  {global: global, idents: ids, types: []});
 }
 
 fn parse_path_and_ty_param_substs(p: parser, colons: bool) -> @ast::path {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let path = parse_path(p);
-    if colons ? eat(p, token::MOD_SEP) : p.peek() == token::LT {
+    if colons ? eat(p, token::MOD_SEP) : p.token == token::LT {
         let seq = parse_seq_lt_gt(some(token::COMMA),
                                   {|p| parse_ty(p, false)}, p);
         @spanned(lo, seq.span.hi, {types: seq.node with path.node})
@@ -708,7 +677,7 @@ fn parse_mutability(p: parser) -> ast::mutability {
 }
 
 fn parse_field(p: parser, sep: token::token) -> ast::field {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let m = parse_mutability(p);
     let i = parse_ident(p);
     expect(p, sep);
@@ -731,8 +700,7 @@ fn is_bar(t: token::token) -> bool {
 }
 
 fn mk_lit_u32(p: parser, i: u32) -> @ast::expr {
-    let span = p.get_span();
-
+    let span = p.span;
     let lv_lit = @{node: ast::lit_uint(i as u64, ast::ty_u32),
                    span: span};
 
@@ -762,21 +730,21 @@ fn to_expr(e: pexpr) -> @ast::expr {
 }
 
 fn parse_bottom_expr(p: parser) -> pexpr {
-    let lo = p.get_lo_pos();
-    let hi = p.get_hi_pos();
+    let lo = p.span.lo;
+    let hi = p.span.hi;
 
     let ex: ast::expr_;
-    if p.peek() == token::LPAREN {
+    if p.token == token::LPAREN {
         p.bump();
-        if p.peek() == token::RPAREN {
-            hi = p.get_hi_pos();
+        if p.token == token::RPAREN {
+            hi = p.span.hi;
             p.bump();
             let lit = @spanned(lo, hi, ast::lit_nil);
             ret mk_pexpr(p, lo, hi, ast::expr_lit(lit));
         }
         let es = [parse_expr(p)];
-        while p.peek() == token::COMMA { p.bump(); es += [parse_expr(p)]; }
-        hi = p.get_hi_pos();
+        while p.token == token::COMMA { p.bump(); es += [parse_expr(p)]; }
+        hi = p.span.hi;
         expect(p, token::RPAREN);
 
         // Note: we retain the expr_tup() even for simple
@@ -785,25 +753,25 @@ fn parse_bottom_expr(p: parser) -> pexpr {
         // can tell whether the expression was parenthesized or not,
         // which affects expr_is_complete().
         ret mk_pexpr(p, lo, hi, ast::expr_tup(es));
-    } else if p.peek() == token::LBRACE {
+    } else if p.token == token::LBRACE {
         p.bump();
         if is_word(p, "mutable") ||
                is_plain_ident(p) && p.look_ahead(1u) == token::COLON {
             let fields = [parse_field(p, token::COLON)];
             let base = none;
-            while p.peek() != token::RBRACE {
+            while p.token != token::RBRACE {
                 if eat_word(p, "with") { base = some(parse_expr(p)); break; }
                 expect(p, token::COMMA);
-                if p.peek() == token::RBRACE {
+                if p.token == token::RBRACE {
                     // record ends by an optional trailing comma
                     break;
                 }
                 fields += [parse_field(p, token::COLON)];
             }
-            hi = p.get_hi_pos();
+            hi = p.span.hi;
             expect(p, token::RBRACE);
             ex = ast::expr_rec(fields, base);
-        } else if is_bar(p.peek()) {
+        } else if is_bar(p.token) {
             ret pexpr(parse_fn_block_expr(p));
         } else {
             let blk = parse_block_tail(p, lo, ast::default_blk);
@@ -834,35 +802,35 @@ fn parse_bottom_expr(p: parser) -> pexpr {
         ret pexpr(parse_block_expr(p, lo, ast::unchecked_blk));
     } else if eat_word(p, "unsafe") {
         ret pexpr(parse_block_expr(p, lo, ast::unsafe_blk));
-    } else if p.peek() == token::LBRACKET {
+    } else if p.token == token::LBRACKET {
         p.bump();
         let mut = parse_mutability(p);
         let es =
             parse_seq_to_end(token::RBRACKET, seq_sep(token::COMMA),
                              parse_expr, p);
         ex = ast::expr_vec(es, mut);
-    } else if p.peek() == token::POUND_LT {
+    } else if p.token == token::POUND_LT {
         p.bump();
         let ty = parse_ty(p, false);
         expect(p, token::GT);
 
         /* hack: early return to take advantage of specialized function */
-        ret pexpr(mk_mac_expr(p, lo, p.get_hi_pos(),
+        ret pexpr(mk_mac_expr(p, lo, p.span.hi,
                               ast::mac_embed_type(ty)));
-    } else if p.peek() == token::POUND_LBRACE {
+    } else if p.token == token::POUND_LBRACE {
         p.bump();
         let blk = ast::mac_embed_block(
             parse_block_tail(p, lo, ast::default_blk));
-        ret pexpr(mk_mac_expr(p, lo, p.get_hi_pos(), blk));
-    } else if p.peek() == token::ELLIPSIS {
+        ret pexpr(mk_mac_expr(p, lo, p.span.hi, blk));
+    } else if p.token == token::ELLIPSIS {
         p.bump();
-        ret pexpr(mk_mac_expr(p, lo, p.get_hi_pos(), ast::mac_ellipsis));
+        ret pexpr(mk_mac_expr(p, lo, p.span.hi, ast::mac_ellipsis));
     } else if eat_word(p, "obj") {
         // Anonymous object
 
         // Only make people type () if they're actually adding new fields
         let fields: option::t<[ast::anon_obj_field]> = none;
-        if p.peek() == token::LPAREN {
+        if p.token == token::LPAREN {
             p.bump();
             fields =
                 some(parse_seq_to_end(token::RPAREN, seq_sep(token::COMMA),
@@ -871,12 +839,12 @@ fn parse_bottom_expr(p: parser) -> pexpr {
         let meths: [@ast::method] = [];
         let inner_obj: option::t<@ast::expr> = none;
         expect(p, token::LBRACE);
-        while p.peek() != token::RBRACE {
+        while p.token != token::RBRACE {
             if eat_word(p, "with") {
                 inner_obj = some(parse_expr(p));
             } else { meths += [parse_method(p, false)]; }
         }
-        hi = p.get_hi_pos();
+        hi = p.span.hi;
         expect(p, token::RBRACE);
         // fields and methods may be *additional* or *overriding* fields
         // and methods if there's a inner_obj, or they may be the *only*
@@ -889,7 +857,7 @@ fn parse_bottom_expr(p: parser) -> pexpr {
     } else if eat_word(p, "bind") {
         let e = parse_expr_res(p, RESTRICT_NO_CALL_EXPRS);
         fn parse_expr_opt(p: parser) -> option::t<@ast::expr> {
-            alt p.peek() {
+            alt p.token {
               token::UNDERSCORE. { p.bump(); ret none; }
               _ { ret some(parse_expr(p)); }
             }
@@ -899,12 +867,12 @@ fn parse_bottom_expr(p: parser) -> pexpr {
                       parse_expr_opt, p);
         hi = es.span.hi;
         ex = ast::expr_bind(e, es.node);
-    } else if p.peek() == token::POUND {
+    } else if p.token == token::POUND {
         let ex_ext = parse_syntax_ext(p);
         hi = ex_ext.span.hi;
         ex = ex_ext.node;
     } else if eat_word(p, "fail") {
-        if can_begin_expr(p.peek()) {
+        if can_begin_expr(p.token) {
             let e = parse_expr(p);
             hi = e.span.hi;
             ex = ast::expr_fail(some(e));
@@ -915,7 +883,7 @@ fn parse_bottom_expr(p: parser) -> pexpr {
         expect(p, token::COMMA);
         let e = parse_expr(p);
         ex = ast::expr_log(2, lvl, e);
-        hi = p.get_hi_pos();
+        hi = p.span.hi;
         expect(p, token::RPAREN);
     } else if eat_word(p, "assert") {
         let e = parse_expr(p);
@@ -938,17 +906,17 @@ fn parse_bottom_expr(p: parser) -> pexpr {
         hi = e.span.hi;
         ex = ast::expr_check(ast::claimed_expr, e);
     } else if eat_word(p, "ret") {
-        if can_begin_expr(p.peek()) {
+        if can_begin_expr(p.token) {
             let e = parse_expr(p);
             hi = e.span.hi;
             ex = ast::expr_ret(some(e));
         } else { ex = ast::expr_ret(none); }
     } else if eat_word(p, "break") {
         ex = ast::expr_break;
-        hi = p.get_hi_pos();
+        hi = p.span.hi;
     } else if eat_word(p, "cont") {
         ex = ast::expr_cont;
-        hi = p.get_hi_pos();
+        hi = p.span.hi;
     } else if eat_word(p, "be") {
         let e = parse_expr(p);
 
@@ -961,8 +929,8 @@ fn parse_bottom_expr(p: parser) -> pexpr {
         let e = parse_expr(p);
         ex = ast::expr_copy(e);
         hi = e.span.hi;
-    } else if p.peek() == token::MOD_SEP ||
-                  is_ident(p.peek()) && !is_word(p, "true") &&
+    } else if p.token == token::MOD_SEP ||
+                  is_ident(p.token) && !is_word(p, "true") &&
                       !is_word(p, "false") {
         check_bad_word(p);
         let pth = parse_path_and_ty_param_substs(p, true);
@@ -985,13 +953,13 @@ fn parse_block_expr(p: parser,
 }
 
 fn parse_syntax_ext(p: parser) -> @ast::expr {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     expect(p, token::POUND);
     ret parse_syntax_ext_naked(p, lo);
 }
 
 fn parse_syntax_ext_naked(p: parser, lo: uint) -> @ast::expr {
-    alt p.peek() {
+    alt p.token {
       token::IDENT(_, _) {}
       _ { p.fatal("expected a syntax expander name"); }
     }
@@ -999,7 +967,7 @@ fn parse_syntax_ext_naked(p: parser, lo: uint) -> @ast::expr {
     //temporary for a backwards-compatible cycle:
     let sep = seq_sep(token::COMMA);
     let es =
-        if p.peek() == token::LPAREN {
+        if p.token == token::LPAREN {
             parse_seq(token::LPAREN, token::RPAREN, sep, parse_expr, p)
         } else {
             parse_seq(token::LBRACKET, token::RBRACKET, sep, parse_expr, p)
@@ -1015,7 +983,7 @@ fn parse_dot_or_call_expr(p: parser) -> pexpr {
 }
 
 fn permits_call(p: parser) -> bool {
-    ret p.get_restriction() != RESTRICT_NO_CALL_EXPRS;
+    ret p.restriction != RESTRICT_NO_CALL_EXPRS;
 }
 
 fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
@@ -1023,7 +991,7 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
     let lo = e.span.lo;
     let hi = e.span.hi;
     while !expr_is_complete(p, e) {
-        alt p.peek() {
+        alt p.token {
           // expr(...)
           token::LPAREN. if permits_call(p) {
             let es = parse_seq(token::LPAREN, token::RPAREN,
@@ -1043,7 +1011,7 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
                             with *to_expr(e)});
               }
               _ {
-                e = mk_pexpr(p, lo, p.get_last_hi_pos(),
+                e = mk_pexpr(p, lo, p.last_span.hi,
                             ast::expr_call(to_expr(e), [blk], true));
               }
             }
@@ -1061,9 +1029,9 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
           // expr.f
           token::DOT. {
             p.bump();
-            alt p.peek() {
+            alt p.token {
               token::IDENT(i, _) {
-                hi = p.get_hi_pos();
+                hi = p.span.hi;
                 p.bump();
                 let tys = if eat(p, token::MOD_SEP) {
                     expect(p, token::LT);
@@ -1086,11 +1054,11 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
 }
 
 fn parse_prefix_expr(p: parser) -> pexpr {
-    let lo = p.get_lo_pos();
-    let hi = p.get_hi_pos();
+    let lo = p.span.lo;
+    let hi = p.span.hi;
 
     let ex;
-    alt p.peek() {
+    alt p.token {
       token::NOT. {
         p.bump();
         let e = to_expr(parse_prefix_expr(p));
@@ -1135,7 +1103,7 @@ fn parse_prefix_expr(p: parser) -> pexpr {
 
 fn parse_ternary(p: parser) -> @ast::expr {
     let cond_expr = parse_binops(p);
-    if p.peek() == token::QUES {
+    if p.token == token::QUES {
         p.bump();
         let then_expr = parse_expr(p);
         expect(p, token::COLON);
@@ -1185,10 +1153,10 @@ fn parse_more_binops(p: parser, plhs: pexpr, min_prec: int) ->
    @ast::expr {
     let lhs = to_expr(plhs);
     if expr_is_complete(p, plhs) { ret lhs; }
-    let peeked = p.peek();
+    let peeked = p.token;
     if peeked == token::BINOP(token::OR) &&
-       p.get_restriction() == RESTRICT_NO_BAR_OP { ret lhs; }
-    for cur: op_spec in *p.get_prec_table() {
+       p.restriction == RESTRICT_NO_BAR_OP { ret lhs; }
+    for cur: op_spec in *p.precs {
         if cur.prec > min_prec && cur.tok == peeked {
             p.bump();
             let expr = parse_prefix_expr(p);
@@ -1208,9 +1176,9 @@ fn parse_more_binops(p: parser, plhs: pexpr, min_prec: int) ->
 }
 
 fn parse_assign_expr(p: parser) -> @ast::expr {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let lhs = parse_ternary(p);
-    alt p.peek() {
+    alt p.token {
       token::EQ. {
         p.bump();
         let rhs = parse_expr(p);
@@ -1256,7 +1224,7 @@ fn parse_if_expr_1(p: parser) ->
     els: option::t<@ast::expr>,
     lo: uint,
     hi: uint} {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let cond = parse_expr(p);
     let thn = parse_block(p);
     let els: option::t<@ast::expr> = none;
@@ -1287,7 +1255,7 @@ fn parse_if_expr(p: parser) -> @ast::expr {
 fn parse_capture_clause(p: parser) -> @ast::capture_clause {
     fn expect_opt_trailing_semi(p: parser) {
         if !eat(p, token::SEMI) {
-            if p.peek() != token::RBRACKET {
+            if p.token != token::RBRACKET {
                 p.fatal("expecting ; or ]");
             }
         }
@@ -1296,10 +1264,10 @@ fn parse_capture_clause(p: parser) -> @ast::capture_clause {
     fn eat_ident_list(p: parser) -> [@ast::capture_item] {
         let res = [];
         while true {
-            alt p.peek() {
+            alt p.token {
               token::IDENT(_, _) {
                 let id = p.get_id();
-                let sp = ast_util::mk_sp(p.get_lo_pos(), p.get_hi_pos());
+                let sp = ast_util::mk_sp(p.span.lo, p.span.hi);
                 let ident = parse_ident(p);
                 res += [@{id:id, name:ident, span:sp}];
                 if !eat(p, token::COMMA) {
@@ -1335,7 +1303,7 @@ fn parse_capture_clause(p: parser) -> @ast::capture_clause {
 }
 
 fn parse_fn_expr(p: parser, proto: ast::proto) -> @ast::expr {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let capture_clause = parse_capture_clause(p);
     let decl = parse_fn_decl(p, ast::impure_fn);
     let body = parse_block(p);
@@ -1344,7 +1312,7 @@ fn parse_fn_expr(p: parser, proto: ast::proto) -> @ast::expr {
 }
 
 fn parse_fn_block_expr(p: parser) -> @ast::expr {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let decl = parse_fn_block_decl(p);
     let body = parse_block_tail(p, lo, ast::default_blk);
     ret mk_expr(p, lo, body.span.hi, ast::expr_fn_block(decl, body));
@@ -1360,7 +1328,7 @@ fn parse_else_expr(p: parser) -> @ast::expr {
 }
 
 fn parse_for_expr(p: parser) -> @ast::expr {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let decl = parse_local(p, false);
     expect_word(p, "in");
     let seq = parse_expr(p);
@@ -1370,7 +1338,7 @@ fn parse_for_expr(p: parser) -> @ast::expr {
 }
 
 fn parse_while_expr(p: parser) -> @ast::expr {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let cond = parse_expr(p);
     let body = parse_block_no_value(p);
     let hi = body.span.hi;
@@ -1378,7 +1346,7 @@ fn parse_while_expr(p: parser) -> @ast::expr {
 }
 
 fn parse_do_while_expr(p: parser) -> @ast::expr {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let body = parse_block_no_value(p);
     expect_word(p, "while");
     let cond = parse_expr(p);
@@ -1387,18 +1355,18 @@ fn parse_do_while_expr(p: parser) -> @ast::expr {
 }
 
 fn parse_alt_expr(p: parser) -> @ast::expr {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let discriminant = parse_expr(p);
     expect(p, token::LBRACE);
     let arms: [ast::arm] = [];
-    while p.peek() != token::RBRACE {
+    while p.token != token::RBRACE {
         let pats = parse_pats(p);
         let guard = none;
         if eat_word(p, "if") { guard = some(parse_expr(p)); }
         let blk = parse_block(p);
         arms += [{pats: pats, guard: guard, body: blk}];
     }
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     p.bump();
     ret mk_expr(p, lo, hi, ast::expr_alt(discriminant, arms));
 }
@@ -1408,15 +1376,15 @@ fn parse_expr(p: parser) -> @ast::expr {
 }
 
 fn parse_expr_res(p: parser, r: restriction) -> @ast::expr {
-    let old = p.get_restriction();
-    p.restrict(r);
+    let old = p.restriction;
+    p.restriction = r;
     let e = parse_assign_expr(p);
-    p.restrict(old);
+    p.restriction = old;
     ret e;
 }
 
 fn parse_initializer(p: parser) -> option::t<ast::initializer> {
-    alt p.peek() {
+    alt p.token {
       token::EQ. {
         p.bump();
         ret some({op: ast::init_assign, expr: parse_expr(p)});
@@ -1442,16 +1410,16 @@ fn parse_pats(p: parser) -> [@ast::pat] {
     let pats = [];
     while true {
         pats += [parse_pat(p)];
-        if p.peek() == token::BINOP(token::OR) { p.bump(); } else { break; }
+        if p.token == token::BINOP(token::OR) { p.bump(); } else { break; }
     }
     ret pats;
 }
 
 fn parse_pat(p: parser) -> @ast::pat {
-    let lo = p.get_lo_pos();
-    let hi = p.get_hi_pos();
+    let lo = p.span.lo;
+    let hi = p.span.hi;
     let pat;
-    alt p.peek() {
+    alt p.token {
       token::UNDERSCORE. { p.bump(); pat = ast::pat_wild; }
       token::AT. {
         p.bump();
@@ -1470,14 +1438,14 @@ fn parse_pat(p: parser) -> @ast::pat {
         let fields = [];
         let etc = false;
         let first = true;
-        while p.peek() != token::RBRACE {
+        while p.token != token::RBRACE {
             if first { first = false; } else { expect(p, token::COMMA); }
 
-            if p.peek() == token::UNDERSCORE {
+            if p.token == token::UNDERSCORE {
                 p.bump();
-                if p.peek() != token::RBRACE {
+                if p.token != token::RBRACE {
                     p.fatal("expecting }, found " +
-                                token::to_str(p.get_reader(), p.peek()));
+                                token::to_str(p.reader, p.token));
                 }
                 etc = true;
                 break;
@@ -1485,11 +1453,11 @@ fn parse_pat(p: parser) -> @ast::pat {
 
             let fieldname = parse_ident(p);
             let subpat;
-            if p.peek() == token::COLON {
+            if p.token == token::COLON {
                 p.bump();
                 subpat = parse_pat(p);
             } else {
-                if p.get_bad_expr_words().contains_key(fieldname) {
+                if p.bad_expr_words.contains_key(fieldname) {
                     p.fatal("found " + fieldname + " in binding position");
                 }
                 subpat = @{id: p.get_id(),
@@ -1498,26 +1466,26 @@ fn parse_pat(p: parser) -> @ast::pat {
             }
             fields += [{ident: fieldname, pat: subpat}];
         }
-        hi = p.get_hi_pos();
+        hi = p.span.hi;
         p.bump();
         pat = ast::pat_rec(fields, etc);
       }
       token::LPAREN. {
         p.bump();
-        if p.peek() == token::RPAREN {
-            hi = p.get_hi_pos();
+        if p.token == token::RPAREN {
+            hi = p.span.hi;
             p.bump();
             let lit = @{node: ast::lit_nil, span: ast_util::mk_sp(lo, hi)};
             let expr = mk_expr(p, lo, hi, ast::expr_lit(lit));
             pat = ast::pat_lit(expr);
         } else {
             let fields = [parse_pat(p)];
-            while p.peek() == token::COMMA {
+            while p.token == token::COMMA {
                 p.bump();
                 fields += [parse_pat(p)];
             }
             if vec::len(fields) == 1u { expect(p, token::COMMA); }
-            hi = p.get_hi_pos();
+            hi = p.span.hi;
             expect(p, token::RPAREN);
             pat = ast::pat_tup(fields);
         }
@@ -1540,7 +1508,7 @@ fn parse_pat(p: parser) -> @ast::pat {
                         }
                         _ { true }
                       } {
-            hi = p.get_hi_pos();
+            hi = p.span.hi;
             let name = parse_value_ident(p);
             let sub = eat(p, token::AT) ? some(parse_pat(p)) : none;
             pat = ast::pat_bind(name, sub);
@@ -1548,7 +1516,7 @@ fn parse_pat(p: parser) -> @ast::pat {
             let tag_path = parse_path_and_ty_param_substs(p, true);
             hi = tag_path.span.hi;
             let args: [@ast::pat];
-            alt p.peek() {
+            alt p.token {
               token::LPAREN. {
                 let a =
                     parse_seq(token::LPAREN, token::RPAREN,
@@ -1567,12 +1535,12 @@ fn parse_pat(p: parser) -> @ast::pat {
 }
 
 fn parse_local(p: parser, allow_init: bool) -> @ast::local {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let pat = parse_pat(p);
     let ty = @spanned(lo, lo, ast::ty_infer);
     if eat(p, token::COLON) { ty = parse_ty(p, false); }
     let init = if allow_init { parse_initializer(p) } else { none };
-    ret @spanned(lo, p.get_last_hi_pos(),
+    ret @spanned(lo, p.last_span.hi,
                  {ty: ty, pat: pat, init: init, id: p.get_id()});
 }
 
@@ -1580,16 +1548,16 @@ fn parse_let(p: parser) -> @ast::decl {
     fn parse_let_style(p: parser) -> ast::let_style {
         eat(p, token::BINOP(token::AND)) ? ast::let_ref : ast::let_copy
     }
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let locals = [(parse_let_style(p), parse_local(p, true))];
     while eat(p, token::COMMA) {
         locals += [(parse_let_style(p), parse_local(p, true))];
     }
-    ret @spanned(lo, p.get_last_hi_pos(), ast::decl_local(locals));
+    ret @spanned(lo, p.last_span.hi, ast::decl_local(locals));
 }
 
 fn parse_stmt(p: parser) -> @ast::stmt {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     if eat_word(p, "let") {
         let decl = parse_let(p);
         ret @spanned(lo, decl.span.hi, ast::stmt_decl(decl, p.get_id()));
@@ -1624,10 +1592,10 @@ fn parse_stmt(p: parser) -> @ast::stmt {
 }
 
 fn expr_is_complete(p: parser, e: pexpr) -> bool {
-    log(debug, ("expr_is_complete", p.get_restriction(),
+    log(debug, ("expr_is_complete", p.restriction,
                 print::pprust::expr_to_str(*e),
                 expr_requires_semi_to_be_stmt(*e)));
-    ret p.get_restriction() == RESTRICT_STMT_EXPR &&
+    ret p.restriction == RESTRICT_STMT_EXPR &&
         !expr_requires_semi_to_be_stmt(*e);
 }
 
@@ -1662,7 +1630,7 @@ fn stmt_ends_with_semi(stmt: ast::stmt) -> bool {
 }
 
 fn parse_block(p: parser) -> ast::blk {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     if eat_word(p, "unchecked") {
         expect(p, token::LBRACE);
         be parse_block_tail(p, lo, ast::unchecked_blk);
@@ -1689,8 +1657,8 @@ fn parse_block_no_value(p: parser) -> ast::blk {
 fn parse_block_tail(p: parser, lo: uint, s: ast::blk_check_mode) -> ast::blk {
     let view_items = [], stmts = [], expr = none;
     while is_word(p, "import") { view_items += [parse_view_item(p)]; }
-    while p.peek() != token::RBRACE {
-        alt p.peek() {
+    while p.token != token::RBRACE {
+        alt p.token {
           token::SEMI. {
             p.bump(); // empty
           }
@@ -1698,7 +1666,7 @@ fn parse_block_tail(p: parser, lo: uint, s: ast::blk_check_mode) -> ast::blk {
             let stmt = parse_stmt(p);
             alt stmt.node {
               ast::stmt_expr(e, stmt_id) { // Expression without semicolon:
-                alt p.peek() {
+                alt p.token {
                   token::SEMI. {
                     p.bump();
                     stmts += [@{node: ast::stmt_semi(e, stmt_id) with *stmt}];
@@ -1709,7 +1677,7 @@ fn parse_block_tail(p: parser, lo: uint, s: ast::blk_check_mode) -> ast::blk {
                   t {
                     if stmt_ends_with_semi(*stmt) {
                         p.fatal("expected ';' or '}' after expression but \
-                                 found '" + token::to_str(p.get_reader(), t) +
+                                 found '" + token::to_str(p.reader, t) +
                                 "'");
                     }
                     stmts += [stmt];
@@ -1728,7 +1696,7 @@ fn parse_block_tail(p: parser, lo: uint, s: ast::blk_check_mode) -> ast::blk {
           }
         }
     }
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     p.bump();
     let bloc = {view_items: view_items, stmts: stmts, expr: expr,
                 id: p.get_id(), rules: s};
@@ -1739,7 +1707,7 @@ fn parse_ty_param(p: parser) -> ast::ty_param {
     let bounds = [];
     let ident = parse_ident(p);
     if eat(p, token::COLON) {
-        while p.peek() != token::COMMA && p.peek() != token::GT {
+        while p.token != token::COMMA && p.token != token::GT {
             if eat_word(p, "send") { bounds += [ast::bound_send]; }
             else if eat_word(p, "copy") { bounds += [ast::bound_copy]; }
             else { bounds += [ast::bound_iface(parse_ty(p, false))]; }
@@ -1763,7 +1731,7 @@ fn parse_fn_decl(p: parser, purity: ast::purity)
     // mentioned in a constraint to an arg index.
     // Seems weird to do this in the parser, but I'm not sure how else to.
     let constrs = [];
-    if p.peek() == token::COLON {
+    if p.token == token::COLON {
         p.bump();
         constrs = parse_constrs({|x| parse_ty_constr(inputs.node, x) }, p);
     }
@@ -1780,7 +1748,7 @@ fn parse_fn_block_decl(p: parser) -> ast::fn_decl {
         parse_seq(token::BINOP(token::OR), token::BINOP(token::OR),
                   seq_sep(token::COMMA), parse_fn_block_arg, p).node;
     let output = eat(p, token::RARROW) ? parse_ty(p, false) :
-        @spanned(p.get_lo_pos(), p.get_hi_pos(), ast::ty_infer);
+        @spanned(p.span.lo, p.span.hi, ast::ty_infer);
     ret {inputs: inputs,
          output: output,
          purity: ast::impure_fn,
@@ -1805,7 +1773,7 @@ fn mk_item(p: parser, lo: uint, hi: uint, ident: ast::ident, node: ast::item_,
 
 fn parse_item_fn(p: parser, purity: ast::purity,
                  attrs: [ast::attribute]) -> @ast::item {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let t = parse_fn_header(p);
     let decl = parse_fn_decl(p, purity);
     let body = parse_block(p);
@@ -1832,7 +1800,7 @@ fn parse_anon_obj_field(p: parser) -> ast::anon_obj_field {
 }
 
 fn parse_method(p: parser, allow_tps: bool) -> @ast::method {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     expect_word(p, "fn");
     let ident = parse_value_ident(p);
     let tps = allow_tps ? parse_ty_params(p) : [];
@@ -1843,7 +1811,7 @@ fn parse_method(p: parser, allow_tps: bool) -> @ast::method {
 }
 
 fn parse_item_obj(p: parser, attrs: [ast::attribute]) -> @ast::item {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let ident = parse_value_ident(p);
     let ty_params = parse_ty_params(p);
     let fields: ast::spanned<[ast::obj_field]> =
@@ -1851,8 +1819,8 @@ fn parse_item_obj(p: parser, attrs: [ast::attribute]) -> @ast::item {
                   parse_obj_field, p);
     let meths: [@ast::method] = [];
     expect(p, token::LBRACE);
-    while p.peek() != token::RBRACE { meths += [parse_method(p, false)]; }
-    let hi = p.get_hi_pos();
+    while p.token != token::RBRACE { meths += [parse_method(p, false)]; }
+    let hi = p.span.hi;
     expect(p, token::RBRACE);
     let ob: ast::_obj = {fields: fields.node, methods: meths};
     ret mk_item(p, lo, hi, ident, ast::item_obj(ob, ty_params, p.get_id()),
@@ -1860,9 +1828,9 @@ fn parse_item_obj(p: parser, attrs: [ast::attribute]) -> @ast::item {
 }
 
 fn parse_item_iface(p: parser, attrs: [ast::attribute]) -> @ast::item {
-    let lo = p.get_last_lo_pos(), ident = parse_ident(p),
+    let lo = p.last_span.lo, ident = parse_ident(p),
         tps = parse_ty_params(p), meths = parse_ty_methods(p, true);
-    ret mk_item(p, lo, p.get_last_hi_pos(), ident,
+    ret mk_item(p, lo, p.last_span.hi, ident,
                 ast::item_iface(tps, meths), attrs);
 }
 
@@ -1871,12 +1839,12 @@ fn parse_item_iface(p: parser, attrs: [ast::attribute]) -> @ast::item {
 //    impl name<T> of to_str for [T] { ... }
 //    impl name<T> for [T] { ... }
 fn parse_item_impl(p: parser, attrs: [ast::attribute]) -> @ast::item {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     fn wrap_path(p: parser, pt: @ast::path) -> @ast::ty {
         @{node: ast::ty_path(pt, p.get_id()), span: pt.span}
     }
     let (ident, tps) = if !is_word(p, "of") {
-        if p.peek() == token::LT { (none, parse_ty_params(p)) }
+        if p.token == token::LT { (none, parse_ty_params(p)) }
         else { (some(parse_ident(p)), parse_ty_params(p)) }
     } else { (none, []) };
     let ifce = if eat_word(p, "of") {
@@ -1894,12 +1862,12 @@ fn parse_item_impl(p: parser, attrs: [ast::attribute]) -> @ast::item {
     let ty = parse_ty(p, false), meths = [];
     expect(p, token::LBRACE);
     while !eat(p, token::RBRACE) { meths += [parse_method(p, true)]; }
-    ret mk_item(p, lo, p.get_last_hi_pos(), ident,
+    ret mk_item(p, lo, p.last_span.hi, ident,
                 ast::item_impl(tps, ifce, ty, meths), attrs);
 }
 
 fn parse_item_res(p: parser, attrs: [ast::attribute]) -> @ast::item {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let ident = parse_value_ident(p);
     let ty_params = parse_ty_params(p);
     expect(p, token::LPAREN);
@@ -1928,14 +1896,14 @@ fn parse_mod_items(p: parser, term: token::token,
         if vec::len(first_item_attrs) == 0u { parse_view(p) } else { [] };
     let items: [@ast::item] = [];
     let initial_attrs = first_item_attrs;
-    while p.peek() != term {
+    while p.token != term {
         let attrs = initial_attrs + parse_outer_attributes(p);
         initial_attrs = [];
         alt parse_item(p, attrs) {
           some(i) { items += [i]; }
           _ {
             p.fatal("expected item but found '" +
-                    token::to_str(p.get_reader(), p.peek()) + "'");
+                    token::to_str(p.reader, p.token) + "'");
           }
         }
     }
@@ -1943,25 +1911,25 @@ fn parse_mod_items(p: parser, term: token::token,
 }
 
 fn parse_item_const(p: parser, attrs: [ast::attribute]) -> @ast::item {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let id = parse_value_ident(p);
     expect(p, token::COLON);
     let ty = parse_ty(p, false);
     expect(p, token::EQ);
     let e = parse_expr(p);
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     expect(p, token::SEMI);
     ret mk_item(p, lo, hi, id, ast::item_const(ty, e), attrs);
 }
 
 fn parse_item_mod(p: parser, attrs: [ast::attribute]) -> @ast::item {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let id = parse_ident(p);
     expect(p, token::LBRACE);
     let inner_attrs = parse_inner_attrs_and_next(p);
     let first_item_outer_attrs = inner_attrs.next;
     let m = parse_mod_items(p, token::RBRACE, first_item_outer_attrs);
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     expect(p, token::RBRACE);
     ret mk_item(p, lo, hi, id, ast::item_mod(m), attrs + inner_attrs.inner);
 }
@@ -1969,7 +1937,7 @@ fn parse_item_mod(p: parser, attrs: [ast::attribute]) -> @ast::item {
 fn parse_item_native_type(p: parser, attrs: [ast::attribute]) ->
    @ast::native_item {
     let t = parse_type_decl(p);
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     expect(p, token::SEMI);
     ret @{ident: t.ident,
           attrs: attrs,
@@ -1980,10 +1948,10 @@ fn parse_item_native_type(p: parser, attrs: [ast::attribute]) ->
 
 fn parse_item_native_fn(p: parser, attrs: [ast::attribute],
                         purity: ast::purity) -> @ast::native_item {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let t = parse_fn_header(p);
     let decl = parse_fn_decl(p, purity);
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     expect(p, token::SEMI);
     ret @{ident: t.ident,
           attrs: attrs,
@@ -2004,7 +1972,7 @@ fn parse_native_item(p: parser, attrs: [ast::attribute]) ->
     } else if eat_word(p, "unsafe") {
         expect_word(p, "fn");
         ret parse_item_native_fn(p, attrs, ast::unsafe_fn);
-    } else { unexpected(p, p.peek()); }
+    } else { unexpected(p, p.token); }
 }
 
 fn parse_native_mod_items(p: parser, first_item_attrs: [ast::attribute]) ->
@@ -2016,7 +1984,7 @@ fn parse_native_mod_items(p: parser, first_item_attrs: [ast::attribute]) ->
         } else { [] };
     let items: [@ast::native_item] = [];
     let initial_attrs = first_item_attrs;
-    while p.peek() != token::RBRACE {
+    while p.token != token::RBRACE {
         let attrs = initial_attrs + parse_outer_attributes(p);
         initial_attrs = [];
         items += [parse_native_item(p, attrs)];
@@ -2026,7 +1994,7 @@ fn parse_native_mod_items(p: parser, first_item_attrs: [ast::attribute]) ->
 }
 
 fn parse_item_native_mod(p: parser, attrs: [ast::attribute]) -> @ast::item {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     expect_word(p, "mod");
     let id = parse_ident(p);
     expect(p, token::LBRACE);
@@ -2034,13 +2002,13 @@ fn parse_item_native_mod(p: parser, attrs: [ast::attribute]) -> @ast::item {
     let inner_attrs = more_attrs.inner;
     let first_item_outer_attrs = more_attrs.next;
     let m = parse_native_mod_items(p, first_item_outer_attrs);
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     expect(p, token::RBRACE);
     ret mk_item(p, lo, hi, id, ast::item_native_mod(m), attrs + inner_attrs);
 }
 
 fn parse_type_decl(p: parser) -> {lo: uint, ident: ast::ident} {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let id = parse_ident(p);
     ret {lo: lo, ident: id};
 }
@@ -2050,19 +2018,19 @@ fn parse_item_type(p: parser, attrs: [ast::attribute]) -> @ast::item {
     let tps = parse_ty_params(p);
     expect(p, token::EQ);
     let ty = parse_ty(p, false);
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     expect(p, token::SEMI);
     ret mk_item(p, t.lo, hi, t.ident, ast::item_ty(ty, tps), attrs);
 }
 
 fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
-    let lo = p.get_last_lo_pos();
+    let lo = p.last_span.lo;
     let id = parse_ident(p);
     let ty_params = parse_ty_params(p);
     let variants: [ast::variant] = [];
     // Newtype syntax
-    if p.peek() == token::EQ {
-        if p.get_bad_expr_words().contains_key(id) {
+    if p.token == token::EQ {
+        if p.bad_expr_words.contains_key(id) {
             p.fatal("found " + id + " in tag constructor position");
         }
         p.bump();
@@ -2082,17 +2050,17 @@ fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
     let all_nullary = true;
     let have_disr = false;
     let disr_val = 0;
-    while p.peek() != token::RBRACE {
-        let tok = p.peek();
+    while p.token != token::RBRACE {
+        let tok = p.token;
         alt tok {
           token::IDENT(name, _) {
             check_bad_word(p);
-            let vlo = p.get_lo_pos();
+            let vlo = p.span.lo;
             p.bump();
             let args: [ast::variant_arg] = [];
-            let vhi = p.get_hi_pos();
+            let vhi = p.span.hi;
             let disr_expr = none;
-            alt p.peek() {
+            alt p.token {
               token::LPAREN. {
                 all_nullary = false;
                 let arg_tys = parse_seq(token::LPAREN, token::RPAREN,
@@ -2142,11 +2110,11 @@ fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
           token::RBRACE. {/* empty */ }
           _ {
             p.fatal("expected name of variant or '}' but found '" +
-                        token::to_str(p.get_reader(), tok) + "'");
+                        token::to_str(p.reader, tok) + "'");
           }
         }
     }
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     if (have_disr && !all_nullary) {
         p.fatal("discriminator values can only be used with a c-like enum");
     }
@@ -2155,10 +2123,10 @@ fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
 }
 
 fn parse_fn_ty_proto(p: parser) -> ast::proto {
-    if p.peek() == token::AT {
+    if p.token == token::AT {
         p.bump();
         ast::proto_box
-    } else if p.peek() == token::TILDE {
+    } else if p.token == token::TILDE {
         p.bump();
         ast::proto_uniq
     } else {
@@ -2218,13 +2186,13 @@ fn parse_item(p: parser, attrs: [ast::attribute]) -> option::t<@ast::item> {
 type attr_or_ext = option::t<either::t<[ast::attribute], @ast::expr>>;
 
 fn parse_outer_attrs_or_ext(p: parser) -> attr_or_ext {
-    if p.peek() == token::POUND {
-        let lo = p.get_lo_pos();
+    if p.token == token::POUND {
+        let lo = p.span.lo;
         p.bump();
-        if p.peek() == token::LBRACKET {
+        if p.token == token::LBRACKET {
             let first_attr = parse_attribute_naked(p, ast::attr_outer, lo);
             ret some(left([first_attr] + parse_outer_attributes(p)));
-        } else if !(p.peek() == token::LT || p.peek() == token::LBRACKET) {
+        } else if !(p.token == token::LT || p.token == token::LBRACKET) {
             ret some(right(parse_syntax_ext_naked(p, lo)));
         } else { ret none; }
     } else { ret none; }
@@ -2233,14 +2201,14 @@ fn parse_outer_attrs_or_ext(p: parser) -> attr_or_ext {
 // Parse attributes that appear before an item
 fn parse_outer_attributes(p: parser) -> [ast::attribute] {
     let attrs: [ast::attribute] = [];
-    while p.peek() == token::POUND {
+    while p.token == token::POUND {
         attrs += [parse_attribute(p, ast::attr_outer)];
     }
     ret attrs;
 }
 
 fn parse_attribute(p: parser, style: ast::attr_style) -> ast::attribute {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     expect(p, token::POUND);
     ret parse_attribute_naked(p, style, lo);
 }
@@ -2250,7 +2218,7 @@ fn parse_attribute_naked(p: parser, style: ast::attr_style, lo: uint) ->
     expect(p, token::LBRACKET);
     let meta_item = parse_meta_item(p);
     expect(p, token::RBRACKET);
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     ret spanned(lo, hi, {style: style, value: *meta_item});
 }
 
@@ -2264,9 +2232,9 @@ fn parse_inner_attrs_and_next(p: parser) ->
    {inner: [ast::attribute], next: [ast::attribute]} {
     let inner_attrs: [ast::attribute] = [];
     let next_outer_attrs: [ast::attribute] = [];
-    while p.peek() == token::POUND {
+    while p.token == token::POUND {
         let attr = parse_attribute(p, ast::attr_inner);
-        if p.peek() == token::SEMI {
+        if p.token == token::SEMI {
             p.bump();
             inner_attrs += [attr];
         } else {
@@ -2282,22 +2250,22 @@ fn parse_inner_attrs_and_next(p: parser) ->
 }
 
 fn parse_meta_item(p: parser) -> @ast::meta_item {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let ident = parse_ident(p);
-    alt p.peek() {
+    alt p.token {
       token::EQ. {
         p.bump();
         let lit = parse_lit(p);
-        let hi = p.get_hi_pos();
+        let hi = p.span.hi;
         ret @spanned(lo, hi, ast::meta_name_value(ident, lit));
       }
       token::LPAREN. {
         let inner_items = parse_meta_seq(p);
-        let hi = p.get_hi_pos();
+        let hi = p.span.hi;
         ret @spanned(lo, hi, ast::meta_list(ident, inner_items));
       }
       _ {
-        let hi = p.get_hi_pos();
+        let hi = p.span.hi;
         ret @spanned(lo, hi, ast::meta_word(ident));
       }
     }
@@ -2309,7 +2277,7 @@ fn parse_meta_seq(p: parser) -> [@ast::meta_item] {
 }
 
 fn parse_optional_meta(p: parser) -> [@ast::meta_item] {
-    alt p.peek() { token::LPAREN. { ret parse_meta_seq(p); } _ { ret []; } }
+    alt p.token { token::LPAREN. { ret parse_meta_seq(p); } _ { ret []; } }
 }
 
 fn parse_use(p: parser) -> ast::view_item_ {
@@ -2325,7 +2293,7 @@ fn parse_rest_import_name(p: parser, first: ast::ident,
     let glob: bool = false;
     let from_idents = option::none::<[ast::import_ident]>;
     while true {
-        alt p.peek() {
+        alt p.token {
           token::SEMI. { break; }
           token::MOD_SEP. {
             if glob { p.fatal("cannot path into a glob"); }
@@ -2336,7 +2304,7 @@ fn parse_rest_import_name(p: parser, first: ast::ident,
           }
           _ { p.fatal("expecting '::' or ';'"); }
         }
-        alt p.peek() {
+        alt p.token {
           token::IDENT(_, _) { identifiers += [parse_ident(p)]; }
 
 
@@ -2355,9 +2323,9 @@ fn parse_rest_import_name(p: parser, first: ast::ident,
 
           token::LBRACE. {
             fn parse_import_ident(p: parser) -> ast::import_ident {
-                let lo = p.get_lo_pos();
+                let lo = p.span.lo;
                 let ident = parse_ident(p);
-                let hi = p.get_hi_pos();
+                let hi = p.span.hi;
                 ret spanned(lo, hi, {name: ident, id: p.get_id()});
             }
             let from_idents_ =
@@ -2404,7 +2372,7 @@ fn parse_rest_import_name(p: parser, first: ast::ident,
 
 fn parse_full_import_name(p: parser, def_ident: ast::ident) ->
    ast::view_item_ {
-    alt p.peek() {
+    alt p.token {
       token::IDENT(i, _) {
         p.bump();
         ret parse_rest_import_name(p, p.get_str(i), some(def_ident));
@@ -2414,10 +2382,10 @@ fn parse_full_import_name(p: parser, def_ident: ast::ident) ->
 }
 
 fn parse_import(p: parser) -> ast::view_item_ {
-    alt p.peek() {
+    alt p.token {
       token::IDENT(i, _) {
         p.bump();
-        alt p.peek() {
+        alt p.token {
           token::EQ. {
             p.bump();
             ret parse_full_import_name(p, p.get_str(i));
@@ -2437,20 +2405,20 @@ fn parse_export(p: parser) -> ast::view_item_ {
 }
 
 fn parse_view_item(p: parser) -> @ast::view_item {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let the_item =
         if eat_word(p, "use") {
             parse_use(p)
         } else if eat_word(p, "import") {
             parse_import(p)
         } else if eat_word(p, "export") { parse_export(p) } else { fail };
-    let hi = p.get_lo_pos();
+    let hi = p.span.lo;
     expect(p, token::SEMI);
     ret @spanned(lo, hi, the_item);
 }
 
 fn is_view_item(p: parser) -> bool {
-    alt p.peek() {
+    alt p.token {
       token::IDENT(sid, false) {
         let st = p.get_str(sid);
         ret str::eq(st, "use") || str::eq(st, "import") ||
@@ -2493,19 +2461,19 @@ fn parse_crate_from_source_str(name: str, source: str, cfg: ast::crate_cfg,
 
 // Parses a source module as a crate
 fn parse_crate_mod(p: parser, _cfg: ast::crate_cfg) -> @ast::crate {
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     let crate_attrs = parse_inner_attrs_and_next(p);
     let first_item_outer_attrs = crate_attrs.next;
     let m = parse_mod_items(p, token::EOF, first_item_outer_attrs);
-    ret @spanned(lo, p.get_lo_pos(),
+    ret @spanned(lo, p.span.lo,
                  {directives: [],
                   module: m,
                   attrs: crate_attrs.inner,
-                  config: p.get_cfg()});
+                  config: p.cfg});
 }
 
 fn parse_str(p: parser) -> str {
-    alt p.peek() {
+    alt p.token {
       token::LIT_STR(s) { p.bump(); p.get_str(s) }
       _ {
         p.fatal("expected string literal")
@@ -2526,14 +2494,14 @@ fn parse_crate_directive(p: parser, first_outer_attr: [ast::attribute]) ->
     // In a crate file outer attributes are only going to apply to mods
     let expect_mod = vec::len(outer_attrs) > 0u;
 
-    let lo = p.get_lo_pos();
+    let lo = p.span.lo;
     if expect_mod || is_word(p, "mod") {
         expect_word(p, "mod");
         let id = parse_ident(p);
-        alt p.peek() {
+        alt p.token {
           // mod x = "foo.rs";
           token::SEMI. {
-            let hi = p.get_hi_pos();
+            let hi = p.span.hi;
             p.bump();
             ret spanned(lo, hi, ast::cdir_src_mod(id, outer_attrs));
           }
@@ -2545,7 +2513,7 @@ fn parse_crate_directive(p: parser, first_outer_attr: [ast::attribute]) ->
             let next_outer_attr = inner_attrs.next;
             let cdirs =
                 parse_crate_directives(p, token::RBRACE, next_outer_attr);
-            let hi = p.get_hi_pos();
+            let hi = p.span.hi;
             expect(p, token::RBRACE);
             ret spanned(lo, hi,
                         ast::cdir_dir_mod(id, cdirs, mod_attrs));
@@ -2565,13 +2533,13 @@ fn parse_crate_directives(p: parser, term: token::token,
     // This is pretty ugly. If we have an outer attribute then we can't accept
     // seeing the terminator next, so if we do see it then fail the same way
     // parse_crate_directive would
-    if vec::len(first_outer_attr) > 0u && p.peek() == term {
+    if vec::len(first_outer_attr) > 0u && p.token == term {
         expect_word(p, "mod");
     }
 
     let cdirs: [@ast::crate_directive] = [];
     let first_outer_attr = first_outer_attr;
-    while p.peek() != term {
+    while p.token != term {
         let cdir = @parse_crate_directive(p, first_outer_attr);
         cdirs += [cdir];
         first_outer_attr = [];
@@ -2582,8 +2550,8 @@ fn parse_crate_directives(p: parser, term: token::token,
 fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg,
                                sess: parse_sess) -> @ast::crate {
     let p = new_parser_from_file(sess, cfg, input, 0u, 0u, CRATE_FILE);
-    let lo = p.get_lo_pos();
-    let prefix = std::fs::dirname(p.get_filemap().name);
+    let lo = p.span.lo;
+    let prefix = std::fs::dirname(p.reader.filemap.name);
     let leading_attrs = parse_inner_attrs_and_next(p);
     let crate_attrs = leading_attrs.inner;
     let first_cdir_attr = leading_attrs.next;
@@ -2591,19 +2559,19 @@ fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg,
     let cx =
         @{p: p,
           sess: sess,
-          mutable chpos: p.get_chpos(),
-          mutable byte_pos: p.get_byte_pos(),
-          cfg: p.get_cfg()};
+          mutable chpos: p.reader.chpos,
+          mutable byte_pos: p.reader.pos,
+          cfg: p.cfg};
     let (companionmod, _) = fs::splitext(fs::basename(input));
     let (m, attrs) = eval::eval_crate_directives_to_mod(
         cx, cdirs, prefix, option::some(companionmod));
-    let hi = p.get_hi_pos();
+    let hi = p.span.hi;
     expect(p, token::EOF);
     ret @spanned(lo, hi,
                  {directives: cdirs,
                   module: m,
                   attrs: crate_attrs + attrs,
-                  config: p.get_cfg()});
+                  config: p.cfg});
 }
 
 fn parse_crate_from_file(input: str, cfg: ast::crate_cfg, sess: parse_sess) ->