about summary refs log tree commit diff
path: root/src/librustsyntax
diff options
context:
space:
mode:
Diffstat (limited to 'src/librustsyntax')
-rw-r--r--src/librustsyntax/ast.rs743
-rw-r--r--src/librustsyntax/ast_map.rs309
-rw-r--r--src/librustsyntax/ast_util.rs554
-rw-r--r--src/librustsyntax/attr.rs386
-rw-r--r--src/librustsyntax/codemap.rs236
-rw-r--r--src/librustsyntax/diagnostic.rs264
-rw-r--r--src/librustsyntax/ext/auto_serialize.rs864
-rw-r--r--src/librustsyntax/ext/base.rs234
-rw-r--r--src/librustsyntax/ext/build.rs78
-rw-r--r--src/librustsyntax/ext/concat_idents.rs15
-rw-r--r--src/librustsyntax/ext/env.rs35
-rw-r--r--src/librustsyntax/ext/expand.rs157
-rw-r--r--src/librustsyntax/ext/fmt.rs283
-rw-r--r--src/librustsyntax/ext/ident_to_str.rs11
-rw-r--r--src/librustsyntax/ext/log_syntax.rs15
-rw-r--r--src/librustsyntax/ext/qquote.rs337
-rw-r--r--src/librustsyntax/ext/simplext.rs778
-rw-r--r--src/librustsyntax/ext/source_util.rs115
-rw-r--r--src/librustsyntax/fold.rs745
-rw-r--r--src/librustsyntax/parse.rs164
-rw-r--r--src/librustsyntax/parse/attr.rs132
-rw-r--r--src/librustsyntax/parse/classify.rs69
-rw-r--r--src/librustsyntax/parse/comments.rs203
-rw-r--r--src/librustsyntax/parse/common.rs217
-rw-r--r--src/librustsyntax/parse/eval.rs142
-rw-r--r--src/librustsyntax/parse/lexer.rs536
-rw-r--r--src/librustsyntax/parse/parser.rs2557
-rw-r--r--src/librustsyntax/parse/prec.rs43
-rw-r--r--src/librustsyntax/parse/token.rs286
-rw-r--r--src/librustsyntax/print/pp.rs528
-rw-r--r--src/librustsyntax/print/pprust.rs1861
-rw-r--r--src/librustsyntax/rustsyntax.rc70
-rw-r--r--src/librustsyntax/util/interner.rs40
-rw-r--r--src/librustsyntax/visit.rs590
34 files changed, 0 insertions, 13597 deletions
diff --git a/src/librustsyntax/ast.rs b/src/librustsyntax/ast.rs
deleted file mode 100644
index f0a3006b8c6..00000000000
--- a/src/librustsyntax/ast.rs
+++ /dev/null
@@ -1,743 +0,0 @@
-// The Rust abstract syntax tree.
-
-import codemap::{span, filename};
-import std::serialization::{serializer,
-                            deserializer,
-                            serialize_option,
-                            deserialize_option,
-                            serialize_uint,
-                            deserialize_uint,
-                            serialize_int,
-                            deserialize_int,
-                            serialize_i64,
-                            deserialize_i64,
-                            serialize_u64,
-                            deserialize_u64,
-                            serialize_str,
-                            deserialize_str,
-                            serialize_bool,
-                            deserialize_bool};
-
-/* Note #1972 -- spans are serialized but not deserialized */
-fn serialize_span<S>(_s: S, _v: span) {
-}
-
-fn deserialize_span<D>(_d: D) -> span {
-    ast_util::dummy_sp()
-}
-
-#[auto_serialize]
-type spanned<T> = {node: T, span: span};
-
-#[auto_serialize]
-type ident = str;
-
-// Functions may or may not have names.
-#[auto_serialize]
-type fn_ident = option<ident>;
-
-#[auto_serialize]
-type path = {span: span,
-             global: bool,
-             idents: [ident],
-             rp: option<@region>,
-             types: [@ty]};
-
-#[auto_serialize]
-type crate_num = int;
-
-#[auto_serialize]
-type node_id = int;
-
-#[auto_serialize]
-type def_id = {crate: crate_num, node: node_id};
-
-const local_crate: crate_num = 0;
-const crate_node_id: node_id = 0;
-
-#[auto_serialize]
-enum ty_param_bound {
-    bound_copy,
-    bound_send,
-    bound_const,
-    bound_iface(@ty),
-}
-
-#[auto_serialize]
-type ty_param = {ident: ident, id: node_id, bounds: @[ty_param_bound]};
-
-#[auto_serialize]
-enum def {
-    def_fn(def_id, purity),
-    def_self(node_id),
-    def_mod(def_id),
-    def_native_mod(def_id),
-    def_const(def_id),
-    def_arg(node_id, mode),
-    def_local(node_id, bool /* is_mutbl */),
-    def_variant(def_id /* enum */, def_id /* variant */),
-    def_ty(def_id),
-    def_prim_ty(prim_ty),
-    def_ty_param(def_id, uint),
-    def_binding(node_id),
-    def_use(def_id),
-    def_upvar(node_id /* local id of closed over var */,
-              @def    /* closed over def */,
-              node_id /* expr node that creates the closure */),
-    def_class(def_id),
-    def_region(node_id)
-}
-
-// The set of meta_items that define the compilation environment of the crate,
-// used to drive conditional compilation
-type crate_cfg = [@meta_item];
-
-type crate = spanned<crate_>;
-
-type crate_ =
-    {directives: [@crate_directive],
-     module: _mod,
-     attrs: [attribute],
-     config: crate_cfg};
-
-enum crate_directive_ {
-    cdir_src_mod(ident, [attribute]),
-    cdir_dir_mod(ident, [@crate_directive], [attribute]),
-
-    // NB: cdir_view_item is *not* processed by the rest of the compiler, the
-    // attached view_items are sunk into the crate's module during parsing,
-    // and processed (resolved, imported, etc.) there. This enum-variant
-    // exists only to preserve the view items in order in case we decide to
-    // pretty-print crates in the future.
-    cdir_view_item(@view_item),
-
-    cdir_syntax(@path),
-}
-
-type crate_directive = spanned<crate_directive_>;
-
-#[auto_serialize]
-type meta_item = spanned<meta_item_>;
-
-#[auto_serialize]
-enum meta_item_ {
-    meta_word(ident),
-    meta_list(ident, [@meta_item]),
-    meta_name_value(ident, lit),
-}
-
-#[auto_serialize]
-type blk = spanned<blk_>;
-
-#[auto_serialize]
-type blk_ = {view_items: [@view_item], stmts: [@stmt], expr: option<@expr>,
-             id: node_id, rules: blk_check_mode};
-
-#[auto_serialize]
-type pat = {id: node_id, node: pat_, span: span};
-
-#[auto_serialize]
-type field_pat = {ident: ident, pat: @pat};
-
-#[auto_serialize]
-enum pat_ {
-    pat_wild,
-    // A pat_ident may either be a new bound variable,
-    // or a nullary enum (in which case the second field
-    // is none).
-    // In the nullary enum case, the parser can't determine
-    // which it is. The resolver determines this, and
-    // records this pattern's node_id in an auxiliary
-    // set (of "pat_idents that refer to nullary enums")
-    pat_ident(@path, option<@pat>),
-    pat_enum(@path, option<[@pat]>), // "none" means a * pattern where
-                                  // we don't bind the fields to names
-    pat_rec([field_pat], bool),
-    pat_tup([@pat]),
-    pat_box(@pat),
-    pat_uniq(@pat),
-    pat_lit(@expr),
-    pat_range(@expr, @expr),
-}
-
-#[auto_serialize]
-enum mutability { m_mutbl, m_imm, m_const, }
-
-#[auto_serialize]
-enum proto {
-    proto_bare,    // native fn
-    proto_any,     // fn
-    proto_uniq,    // fn~
-    proto_box,     // fn@
-    proto_block,   // fn&
-}
-
-#[auto_serialize]
-enum vstore {
-    /* FIXME: Change uint to @expr (actually only constant exprs,
-       as per #2112)
-     */
-    vstore_fixed(option<uint>),   // [1,2,3,4]/_ or 4
-    vstore_uniq,                  // [1,2,3,4]/~
-    vstore_box,                   // [1,2,3,4]/@
-    vstore_slice(@region)         // [1,2,3,4]/&(foo)?
-}
-
-pure fn is_blockish(p: ast::proto) -> bool {
-    alt p {
-      proto_any | proto_block { true }
-      proto_bare | proto_uniq | proto_box { false }
-    }
-}
-
-#[auto_serialize]
-enum binop {
-    add,
-    subtract,
-    mul,
-    div,
-    rem,
-    and,
-    or,
-    bitxor,
-    bitand,
-    bitor,
-    shl,
-    shr,
-    eq,
-    lt,
-    le,
-    ne,
-    ge,
-    gt,
-}
-
-#[auto_serialize]
-enum unop {
-    box(mutability),
-    uniq(mutability),
-    deref, not, neg
-}
-
-// Generally, after typeck you can get the inferred value
-// using ty::resolved_T(...).
-#[auto_serialize]
-enum inferable<T> {
-    expl(T), infer(node_id)
-}
-
-// "resolved" mode: the real modes.
-#[auto_serialize]
-enum rmode { by_ref, by_val, by_mutbl_ref, by_move, by_copy }
-
-// inferable mode.
-#[auto_serialize]
-type mode = inferable<rmode>;
-
-#[auto_serialize]
-type stmt = spanned<stmt_>;
-
-#[auto_serialize]
-enum stmt_ {
-    stmt_decl(@decl, node_id),
-
-    // expr without trailing semi-colon (must have unit type):
-    stmt_expr(@expr, node_id),
-
-    // expr with trailing semi-colon (may have any type):
-    stmt_semi(@expr, node_id),
-}
-
-#[auto_serialize]
-enum init_op { init_assign, init_move, }
-
-#[auto_serialize]
-type initializer = {op: init_op, expr: @expr};
-
-#[auto_serialize]
-type local_ =  /* FIXME: should really be a refinement on pat
-                  (pending discussion of #1697, #2178...)
-                */
-    {is_mutbl: bool, ty: @ty, pat: @pat,
-     init: option<initializer>, id: node_id};
-
-#[auto_serialize]
-type local = spanned<local_>;
-
-#[auto_serialize]
-type decl = spanned<decl_>;
-
-#[auto_serialize]
-enum decl_ { decl_local([@local]), decl_item(@item), }
-
-#[auto_serialize]
-type arm = {pats: [@pat], guard: option<@expr>, body: blk};
-
-#[auto_serialize]
-type field_ = {mutbl: mutability, ident: ident, expr: @expr};
-
-#[auto_serialize]
-type field = spanned<field_>;
-
-#[auto_serialize]
-enum blk_check_mode { default_blk, unchecked_blk, unsafe_blk, }
-
-#[auto_serialize]
-enum expr_check_mode { claimed_expr, checked_expr, }
-
-#[auto_serialize]
-type expr = {id: node_id, node: expr_, span: span};
-
-#[auto_serialize]
-enum alt_mode { alt_check, alt_exhaustive, }
-
-#[auto_serialize]
-enum expr_ {
-    expr_vstore(@expr, vstore),
-    expr_vec([@expr], mutability),
-    expr_rec([field], option<@expr>),
-    expr_call(@expr, [@expr], bool), // True iff last argument is a block
-    expr_tup([@expr]),
-    expr_bind(@expr, [option<@expr>]),
-    expr_binary(binop, @expr, @expr),
-    expr_unary(unop, @expr),
-    expr_lit(@lit),
-    expr_cast(@expr, @ty),
-    expr_if(@expr, blk, option<@expr>),
-    expr_while(@expr, blk),
-    /* Conditionless loop (can be exited with break, cont, ret, or fail)
-       Same semantics as while(true) { body }, but typestate knows that the
-       (implicit) condition is always true. */
-    expr_loop(blk),
-    expr_alt(@expr, [arm], alt_mode),
-    expr_fn(proto, fn_decl, blk, capture_clause),
-    expr_fn_block(fn_decl, blk, capture_clause),
-    // Inner expr is always an expr_fn_block. We need the wrapping node to
-    // sanely type this (a function returning nil on the inside but bool on
-    // the outside).
-    expr_loop_body(@expr),
-    expr_block(blk),
-
-    /*
-     * FIXME: many of these @exprs should be constrained with
-     * is_lval once we have constrained types working.
-     * (See #34)
-     */
-    expr_copy(@expr),
-    expr_move(@expr, @expr),
-    expr_assign(@expr, @expr),
-    expr_swap(@expr, @expr),
-    expr_assign_op(binop, @expr, @expr),
-    expr_field(@expr, ident, [@ty]),
-    expr_index(@expr, @expr),
-    expr_path(@path),
-    expr_addr_of(mutability, @expr),
-    expr_fail(option<@expr>),
-    expr_break,
-    expr_cont,
-    expr_ret(option<@expr>),
-    expr_log(int, @expr, @expr),
-
-    expr_new(/* arena */ @expr,
-             /* id for the alloc() call */ node_id,
-             /* value */ @expr),
-
-    /* just an assert, no significance to typestate */
-    expr_assert(@expr),
-
-    /* preds that typestate is aware of */
-    expr_check(expr_check_mode, @expr),
-    expr_if_check(@expr, blk, option<@expr>),
-    expr_mac(mac),
-}
-
-#[auto_serialize]
-type capture_item = @{
-    id: int,
-    is_move: bool,
-    name: ident, // Currently, can only capture a local var.
-    span: span
-};
-
-#[auto_serialize]
-type capture_clause = @[capture_item];
-
-/*
-// Says whether this is a block the user marked as
-// "unchecked"
-enum blk_sort {
-    blk_unchecked, // declared as "exception to effect-checking rules"
-    blk_checked, // all typing rules apply
-}
-*/
-
-#[auto_serialize]
-type mac = spanned<mac_>;
-
-#[auto_serialize]
-type mac_arg = option<@expr>;
-
-#[auto_serialize]
-type mac_body_ = {span: span};
-
-#[auto_serialize]
-type mac_body = option<mac_body_>;
-
-#[auto_serialize]
-enum mac_ {
-    mac_invoc(@path, mac_arg, mac_body),
-    mac_embed_type(@ty),
-    mac_embed_block(blk),
-    mac_ellipsis,
-    // the span is used by the quoter/anti-quoter ...
-    mac_aq(span /* span of quote */, @expr), // anti-quote
-    mac_var(uint)
-}
-
-#[auto_serialize]
-type lit = spanned<lit_>;
-
-#[auto_serialize]
-enum lit_ {
-    lit_str(str),
-    lit_int(i64, int_ty),
-    lit_uint(u64, uint_ty),
-    lit_float(str, float_ty),
-    lit_nil,
-    lit_bool(bool),
-}
-
-// NB: If you change this, you'll probably want to change the corresponding
-// type structure in middle/ty.rs as well.
-#[auto_serialize]
-type mt = {ty: @ty, mutbl: mutability};
-
-#[auto_serialize]
-type ty_field_ = {ident: ident, mt: mt};
-
-#[auto_serialize]
-type ty_field = spanned<ty_field_>;
-
-#[auto_serialize]
-type ty_method = {ident: ident, attrs: [attribute],
-                  decl: fn_decl, tps: [ty_param], span: span};
-
-#[auto_serialize]
-enum int_ty { ty_i, ty_char, ty_i8, ty_i16, ty_i32, ty_i64, }
-
-#[auto_serialize]
-enum uint_ty { ty_u, ty_u8, ty_u16, ty_u32, ty_u64, }
-
-#[auto_serialize]
-enum float_ty { ty_f, ty_f32, ty_f64, }
-
-#[auto_serialize]
-type ty = {id: node_id, node: ty_, span: span};
-
-// Not represented directly in the AST, referred to by name through a ty_path.
-#[auto_serialize]
-enum prim_ty {
-    ty_int(int_ty),
-    ty_uint(uint_ty),
-    ty_float(float_ty),
-    ty_str,
-    ty_bool,
-}
-
-#[auto_serialize]
-type region = {id: node_id, node: region_};
-
-#[auto_serialize]
-enum region_ { re_anon, re_named(ident) }
-
-#[auto_serialize]
-enum ty_ {
-    ty_nil,
-    ty_bot, /* bottom type */
-    ty_box(mt),
-    ty_uniq(mt),
-    ty_vec(mt),
-    ty_ptr(mt),
-    ty_rptr(@region, mt),
-    ty_rec([ty_field]),
-    ty_fn(proto, fn_decl),
-    ty_tup([@ty]),
-    ty_path(@path, node_id),
-    ty_constr(@ty, [@ty_constr]),
-    ty_vstore(@ty, vstore),
-    ty_mac(mac),
-    // ty_infer means the type should be inferred instead of it having been
-    // specified. This should only appear at the "top level" of a type and not
-    // nested in one.
-    ty_infer,
-}
-
-
-/*
-A constraint arg that's a function argument is referred to by its position
-rather than name.  This is so we could have higher-order functions that have
-constraints (potentially -- right now there's no way to write that), and also
-so that the typestate pass doesn't have to map a function name onto its decl.
-So, the constr_arg type is parameterized: it's instantiated with uint for
-declarations, and ident for uses.
-*/
-#[auto_serialize]
-enum constr_arg_general_<T> { carg_base, carg_ident(T), carg_lit(@lit), }
-
-#[auto_serialize]
-type fn_constr_arg = constr_arg_general_<uint>;
-
-#[auto_serialize]
-type sp_constr_arg<T> = spanned<constr_arg_general_<T>>;
-
-#[auto_serialize]
-type ty_constr_arg = sp_constr_arg<@path>;
-
-#[auto_serialize]
-type constr_arg = spanned<fn_constr_arg>;
-
-// Constrained types' args are parameterized by paths, since
-// we refer to paths directly and not by indices.
-// The implicit root of such path, in the constraint-list for a
-// constrained type, is * (referring to the base record)
-
-#[auto_serialize]
-type constr_general_<ARG, ID> =
-    {path: @path, args: [@sp_constr_arg<ARG>], id: ID};
-
-// In the front end, constraints have a node ID attached.
-// Typeck turns this to a def_id, using the output of resolve.
-#[auto_serialize]
-type constr_general<ARG> = spanned<constr_general_<ARG, node_id>>;
-
-#[auto_serialize]
-type constr_ = constr_general_<uint, node_id>;
-
-#[auto_serialize]
-type constr = spanned<constr_general_<uint, node_id>>;
-
-#[auto_serialize]
-type ty_constr_ = constr_general_<@path, node_id>;
-
-#[auto_serialize]
-type ty_constr = spanned<ty_constr_>;
-
-/* The parser generates ast::constrs; resolve generates
- a mapping from each function to a list of ty::constr_defs,
- corresponding to these. */
-#[auto_serialize]
-type arg = {mode: mode, ty: @ty, ident: ident, id: node_id};
-
-#[auto_serialize]
-type fn_decl =
-    {inputs: [arg],
-     output: @ty,
-     purity: purity,
-     cf: ret_style,
-     constraints: [@constr]};
-
-#[auto_serialize]
-enum purity {
-    pure_fn, // declared with "pure fn"
-    unsafe_fn, // declared with "unsafe fn"
-    impure_fn, // declared with "fn"
-    crust_fn, // declared with "crust fn"
-}
-
-#[auto_serialize]
-enum ret_style {
-    noreturn, // functions with return type _|_ that always
-              // raise an error or exit (i.e. never return to the caller)
-    return_val, // everything else
-}
-
-#[auto_serialize]
-type method = {ident: ident, attrs: [attribute],
-               tps: [ty_param], decl: fn_decl, body: blk,
-               id: node_id, span: span, self_id: node_id,
-               vis: visibility};  // always public, unless it's a
-                                  // class method
-
-#[auto_serialize]
-type _mod = {view_items: [@view_item], items: [@item]};
-
-#[auto_serialize]
-enum native_abi {
-    native_abi_rust_intrinsic,
-    native_abi_cdecl,
-    native_abi_stdcall,
-}
-
-#[auto_serialize]
-type native_mod =
-    {view_items: [@view_item],
-     items: [@native_item]};
-
-#[auto_serialize]
-type variant_arg = {ty: @ty, id: node_id};
-
-#[auto_serialize]
-type variant_ = {name: ident, attrs: [attribute], args: [variant_arg],
-                 id: node_id, disr_expr: option<@expr>, vis: visibility};
-
-#[auto_serialize]
-type variant = spanned<variant_>;
-
-#[auto_serialize]
-type path_list_ident_ = {name: ident, id: node_id};
-
-#[auto_serialize]
-type path_list_ident = spanned<path_list_ident_>;
-
-#[auto_serialize]
-type view_path = spanned<view_path_>;
-
-#[auto_serialize]
-enum view_path_ {
-
-    // quux = foo::bar::baz
-    //
-    // or just
-    //
-    // foo::bar::baz  (with 'baz =' implicitly on the left)
-    view_path_simple(ident, @path, node_id),
-
-    // foo::bar::*
-    view_path_glob(@path, node_id),
-
-    // foo::bar::{a,b,c}
-    view_path_list(@path, [path_list_ident], node_id)
-}
-
-#[auto_serialize]
-type view_item = {node: view_item_, attrs: [attribute],
-                  vis: visibility, span: span};
-
-#[auto_serialize]
-enum view_item_ {
-    view_item_use(ident, [@meta_item], node_id),
-    view_item_import([@view_path]),
-    view_item_export([@view_path])
-}
-
-// Meta-data associated with an item
-#[auto_serialize]
-type attribute = spanned<attribute_>;
-
-// Distinguishes between attributes that decorate items and attributes that
-// are contained as statements within items. These two cases need to be
-// distinguished for pretty-printing.
-#[auto_serialize]
-enum attr_style { attr_outer, attr_inner, }
-
-#[auto_serialize]
-type attribute_ = {style: attr_style, value: meta_item};
-
-/*
-  iface_refs appear in both impls and in classes that implement ifaces.
-  resolve maps each iface_ref's id to its defining iface.
- */
-#[auto_serialize]
-type iface_ref = {path: @path, id: node_id};
-
-#[auto_serialize]
-enum visibility { public, private }
-
-#[auto_serialize]
-type item = {ident: ident, attrs: [attribute],
-             id: node_id, node: item_,
-             vis: visibility, span: span};
-
-#[auto_serialize]
-enum region_param {
-    rp_none,
-    rp_self
-}
-
-#[auto_serialize]
-enum item_ {
-    item_const(@ty, @expr),
-    item_fn(fn_decl, [ty_param], blk),
-    item_mod(_mod),
-    item_native_mod(native_mod),
-    item_ty(@ty, [ty_param], region_param),
-    item_enum([variant], [ty_param], region_param),
-    item_res(fn_decl /* dtor */, [ty_param], blk /* dtor body */,
-             node_id /* dtor id */, node_id /* ctor id */,
-             region_param),
-    item_class([ty_param], /* ty params for class */
-               [@iface_ref],   /* ifaces this class implements */
-               [@class_member], /* methods, etc. */
-                               /* (not including ctor or dtor) */
-               class_ctor,
-               /* dtor is optional */
-               option<class_dtor>,
-               region_param
-               ),
-    item_iface([ty_param], region_param, [ty_method]),
-    item_impl([ty_param], region_param, option<@iface_ref> /* iface */,
-              @ty /* self */, [@method]),
-}
-
-#[auto_serialize]
-type class_member = spanned<class_member_>;
-
-#[auto_serialize]
-enum class_member_ {
-    instance_var(ident, @ty, class_mutability, node_id, visibility),
-    class_method(@method)
-}
-
-#[auto_serialize]
-enum class_mutability { class_mutable, class_immutable }
-
-#[auto_serialize]
-type class_ctor = spanned<class_ctor_>;
-
-#[auto_serialize]
-type class_ctor_ = {id: node_id,
-                    self_id: node_id,
-                    dec: fn_decl,
-                    body: blk};
-
-#[auto_serialize]
-type class_dtor = spanned<class_dtor_>;
-
-#[auto_serialize]
-type class_dtor_ = {id: node_id,
-                    self_id: node_id,
-                    body: blk};
-
-#[auto_serialize]
-type native_item =
-    {ident: ident,
-     attrs: [attribute],
-     node: native_item_,
-     id: node_id,
-     span: span};
-
-#[auto_serialize]
-enum native_item_ {
-    native_item_fn(fn_decl, [ty_param]),
-}
-
-// The data we save and restore about an inlined item or method.  This is not
-// part of the AST that we parse from a file, but it becomes part of the tree
-// that we trans.
-#[auto_serialize]
-enum inlined_item {
-    ii_item(@item),
-    ii_method(def_id /* impl id */, @method),
-    ii_native(@native_item),
-    ii_ctor(class_ctor, ident, [ty_param], def_id /* parent id */)
-}
-
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/ast_map.rs b/src/librustsyntax/ast_map.rs
deleted file mode 100644
index fbea5026b3c..00000000000
--- a/src/librustsyntax/ast_map.rs
+++ /dev/null
@@ -1,309 +0,0 @@
-import std::map;
-import std::map::hashmap;
-import ast::*;
-import print::pprust;
-import ast_util::path_to_ident;
-import ast_util::inlined_item_methods;
-import diagnostic::span_handler;
-
-enum path_elt { path_mod(str), path_name(str) }
-type path = [path_elt];
-
-fn path_to_str_with_sep(p: path, sep: str) -> str {
-    let strs = vec::map(p) {|e|
-        alt e {
-          path_mod(s) { s }
-          path_name(s) { s }
-        }
-    };
-    str::connect(strs, sep)
-}
-
-fn path_ident_to_str(p: path, i: ident) -> str {
-    if vec::is_empty(p) {
-        i
-    } else {
-        #fmt["%s::%s", path_to_str(p), i]
-    }
-}
-
-fn path_to_str(p: path) -> str {
-    path_to_str_with_sep(p, "::")
-}
-
-enum ast_node {
-    node_item(@item, @path),
-    node_native_item(@native_item, native_abi, @path),
-    node_method(@method, def_id /* impl did */, @path /* path to the impl */),
-    node_variant(variant, @item, @path),
-    node_expr(@expr),
-    node_export(@view_path, @path),
-    // Locals are numbered, because the alias analysis needs to know in which
-    // order they are introduced.
-    node_arg(arg, uint),
-    node_local(uint),
-    // Constructor for either a resource or a class
-    node_ctor(ident, [ty_param], a_ctor, @path),
-    // Destructor for a class
-    node_dtor([ty_param], @class_dtor, def_id, @path),
-    node_block(blk),
-}
-
-enum a_ctor {
-  res_ctor(fn_decl, node_id, codemap::span),
-  class_ctor(@class_ctor, def_id /* ID for parent class */),
-}
-
-type map = std::map::hashmap<node_id, ast_node>;
-type ctx = {map: map, mut path: path,
-            mut local_id: uint, diag: span_handler};
-type vt = visit::vt<ctx>;
-
-fn extend(cx: ctx, elt: str) -> @path {
-    @(cx.path + [path_name(elt)])
-}
-
-fn mk_ast_map_visitor() -> vt {
-    ret visit::mk_vt(@{
-        visit_item: map_item,
-        visit_expr: map_expr,
-        visit_fn: map_fn,
-        visit_local: map_local,
-        visit_arm: map_arm,
-        visit_view_item: map_view_item,
-        visit_block: map_block
-        with *visit::default_visitor()
-    });
-}
-
-fn map_crate(diag: span_handler, c: crate) -> map {
-    let cx = {map: std::map::int_hash(),
-              mut path: [],
-              mut local_id: 0u,
-              diag: diag};
-    visit::visit_crate(c, cx, mk_ast_map_visitor());
-    ret cx.map;
-}
-
-// Used for items loaded from external crate that are being inlined into this
-// crate.  The `path` should be the path to the item but should not include
-// the item itself.
-fn map_decoded_item(diag: span_handler,
-                    map: map, path: path, ii: inlined_item) {
-    // I believe it is ok for the local IDs of inlined items from other crates
-    // to overlap with the local ids from this crate, so just generate the ids
-    // starting from 0.  (In particular, I think these ids are only used in
-    // alias analysis, which we will not be running on the inlined items, and
-    // even if we did I think it only needs an ordering between local
-    // variables that are simultaneously in scope).
-    let cx = {map: map,
-              mut path: path,
-              mut local_id: 0u,
-              diag: diag};
-    let v = mk_ast_map_visitor();
-
-    // methods get added to the AST map when their impl is visited.  Since we
-    // don't decode and instantiate the impl, but just the method, we have to
-    // add it to the table now:
-    alt ii {
-      ii_item(_) | ii_ctor(_,_,_,_) { /* fallthrough */ }
-      ii_native(i) {
-        cx.map.insert(i.id, node_native_item(i, native_abi_rust_intrinsic,
-                                             @path));
-      }
-      ii_method(impl_did, m) {
-        map_method(impl_did, @path, m, cx);
-      }
-    }
-
-    // visit the item / method contents and add those to the map:
-    ii.accept(cx, v);
-}
-
-fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
-          sp: codemap::span, id: node_id, cx: ctx, v: vt) {
-    for decl.inputs.each {|a|
-        cx.map.insert(a.id, node_arg(a, cx.local_id));
-        cx.local_id += 1u;
-    }
-    alt fk {
-      visit::fk_ctor(nm, tps, self_id, parent_id) {
-          let ct = @{node: {id: id, self_id: self_id,
-                           dec: decl, body: body},
-                    span: sp};
-          cx.map.insert(id, node_ctor(nm, tps, class_ctor(ct, parent_id),
-                                      @cx.path));
-       }
-      visit::fk_dtor(tps, self_id, parent_id) {
-          let dt = @{node: {id: id, self_id: self_id, body: body},
-                    span: sp};
-          cx.map.insert(id, node_dtor(tps, dt, parent_id, @cx.path));
-       }
-
-       _ {}
-    }
-    visit::visit_fn(fk, decl, body, sp, id, cx, v);
-}
-
-fn map_block(b: blk, cx: ctx, v: vt) {
-    cx.map.insert(b.node.id, node_block(b));
-    visit::visit_block(b, cx, v);
-}
-
-fn number_pat(cx: ctx, pat: @pat) {
-    ast_util::walk_pat(pat) {|p|
-        alt p.node {
-          pat_ident(_, _) {
-            cx.map.insert(p.id, node_local(cx.local_id));
-            cx.local_id += 1u;
-          }
-          _ {}
-        }
-    };
-}
-
-fn map_local(loc: @local, cx: ctx, v: vt) {
-    number_pat(cx, loc.node.pat);
-    visit::visit_local(loc, cx, v);
-}
-
-fn map_arm(arm: arm, cx: ctx, v: vt) {
-    number_pat(cx, arm.pats[0]);
-    visit::visit_arm(arm, cx, v);
-}
-
-fn map_method(impl_did: def_id, impl_path: @path,
-              m: @method, cx: ctx) {
-    cx.map.insert(m.id, node_method(m, impl_did, impl_path));
-    cx.map.insert(m.self_id, node_local(cx.local_id));
-    cx.local_id += 1u;
-}
-
-fn map_item(i: @item, cx: ctx, v: vt) {
-    let item_path = @cx.path;
-    cx.map.insert(i.id, node_item(i, item_path));
-    alt i.node {
-      item_impl(_, _, _, _, ms) {
-        let impl_did = ast_util::local_def(i.id);
-        for ms.each {|m|
-            map_method(impl_did, extend(cx, i.ident), m, cx);
-        }
-      }
-      item_res(decl, tps, _, dtor_id, ctor_id, _) {
-        cx.map.insert(ctor_id, node_ctor(i.ident, tps,
-                                         res_ctor(decl, ctor_id, i.span),
-                                         item_path));
-        cx.map.insert(dtor_id, node_item(i, item_path));
-      }
-      item_enum(vs, _, _) {
-        for vs.each {|v|
-            cx.map.insert(v.node.id, node_variant(
-                v, i, extend(cx, i.ident)));
-        }
-      }
-      item_native_mod(nm) {
-        let abi = alt attr::native_abi(i.attrs) {
-          either::left(msg) { cx.diag.span_fatal(i.span, msg); }
-          either::right(abi) { abi }
-        };
-        for nm.items.each {|nitem|
-            cx.map.insert(nitem.id, node_native_item(nitem, abi, @cx.path));
-        }
-      }
-      item_class(tps, ifces, items, ctor, dtor, _) {
-          let (_, ms) = ast_util::split_class_items(items);
-          // Map iface refs to their parent classes. This is
-          // so we can find the self_ty
-          vec::iter(ifces) {|p| cx.map.insert(p.id,
-                                  node_item(i, item_path)); };
-          let d_id = ast_util::local_def(i.id);
-          let p = extend(cx, i.ident);
-           // only need to handle methods
-          vec::iter(ms) {|m| map_method(d_id, p, m, cx); }
-      }
-      _ { }
-    }
-    alt i.node {
-      item_mod(_) | item_native_mod(_) {
-        cx.path += [path_mod(i.ident)];
-      }
-      _ { cx.path += [path_name(i.ident)]; }
-    }
-    visit::visit_item(i, cx, v);
-    vec::pop(cx.path);
-}
-
-fn map_view_item(vi: @view_item, cx: ctx, _v: vt) {
-    alt vi.node {
-      view_item_export(vps) {
-        for vps.each {|vp|
-            let (id, name) = alt vp.node {
-              view_path_simple(nm, _, id) { (id, nm) }
-              view_path_glob(pth, id) | view_path_list(pth, _, id) {
-                (id, path_to_ident(pth))
-              }
-            };
-            cx.map.insert(id, node_export(vp, extend(cx, name)));
-        }
-      }
-      _ {}
-    }
-}
-
-fn map_expr(ex: @expr, cx: ctx, v: vt) {
-    cx.map.insert(ex.id, node_expr(ex));
-    visit::visit_expr(ex, cx, v);
-}
-
-fn node_id_to_str(map: map, id: node_id) -> str {
-    alt map.find(id) {
-      none {
-        #fmt["unknown node (id=%d)", id]
-      }
-      some(node_item(item, path)) {
-        #fmt["item %s (id=%?)", path_ident_to_str(*path, item.ident), id]
-      }
-      some(node_native_item(item, abi, path)) {
-        #fmt["native item %s with abi %? (id=%?)",
-             path_ident_to_str(*path, item.ident), abi, id]
-      }
-      some(node_method(m, impl_did, path)) {
-        #fmt["method %s in %s (id=%?)",
-             m.ident, path_to_str(*path), id]
-      }
-      some(node_variant(variant, def_id, path)) {
-        #fmt["variant %s in %s (id=%?)",
-             variant.node.name, path_to_str(*path), id]
-      }
-      some(node_expr(expr)) {
-        #fmt["expr %s (id=%?)",
-             pprust::expr_to_str(expr), id]
-      }
-      some(node_export(_, path)) {
-        #fmt["export %s (id=%?)", // FIXME: add more info here
-             path_to_str(*path), id]
-      }
-      some(node_arg(_, _)) { // FIXME: add more info here
-        #fmt["arg (id=%?)", id]
-      }
-      some(node_local(_)) { // FIXME: add more info here
-        #fmt["local (id=%?)", id]
-      }
-      some(node_ctor(*)) { // FIXME: add more info here
-        #fmt["node_ctor (id=%?)", id]
-      }
-      some(node_dtor(*)) { // FIXME: add more info here
-        #fmt["node_dtor (id=%?)", id]
-      }
-      some(node_block(_)) {
-        #fmt["block"]
-      }
-    }
-}
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
diff --git a/src/librustsyntax/ast_util.rs b/src/librustsyntax/ast_util.rs
deleted file mode 100644
index cee2032ed0d..00000000000
--- a/src/librustsyntax/ast_util.rs
+++ /dev/null
@@ -1,554 +0,0 @@
-import codemap::span;
-import ast::*;
-
-pure fn spanned<T: copy>(lo: uint, hi: uint, t: T) -> spanned<T> {
-    respan(mk_sp(lo, hi), t)
-}
-
-pure fn respan<T: copy>(sp: span, t: T) -> spanned<T> {
-    {node: t, span: sp}
-}
-
-pure fn dummy_spanned<T: copy>(t: T) -> spanned<T> {
-    respan(dummy_sp(), t)
-}
-
-/* assuming that we're not in macro expansion */
-pure fn mk_sp(lo: uint, hi: uint) -> span {
-    {lo: lo, hi: hi, expn_info: none}
-}
-
-// make this a const, once the compiler supports it
-pure fn dummy_sp() -> span { ret mk_sp(0u, 0u); }
-
-fn path_name(p: @path) -> str { path_name_i(p.idents) }
-
-fn path_name_i(idents: [ident]) -> str { str::connect(idents, "::") }
-
-fn path_to_ident(p: @path) -> ident { vec::last(p.idents) }
-
-fn local_def(id: node_id) -> def_id { {crate: local_crate, node: id} }
-
-pure fn is_local(did: ast::def_id) -> bool { did.crate == local_crate }
-
-fn stmt_id(s: stmt) -> node_id {
-    alt s.node {
-      stmt_decl(_, id) { id }
-      stmt_expr(_, id) { id }
-      stmt_semi(_, id) { id }
-    }
-}
-
-fn variant_def_ids(d: def) -> {enm: def_id, var: def_id} {
-    alt d { def_variant(enum_id, var_id) {
-            ret {enm: enum_id, var: var_id}; }
-        _ { fail "non-variant in variant_def_ids"; } }
-}
-
-fn def_id_of_def(d: def) -> def_id {
-    alt d {
-      def_fn(id, _) | def_mod(id) |
-      def_native_mod(id) | def_const(id) |
-      def_variant(_, id) | def_ty(id) | def_ty_param(id, _) |
-      def_use(id) | def_class(id) { id }
-      def_arg(id, _) | def_local(id, _) | def_self(id) |
-      def_upvar(id, _, _) | def_binding(id) | def_region(id) {
-        local_def(id)
-      }
-
-      def_prim_ty(_) { fail; }
-    }
-}
-
-fn binop_to_str(op: binop) -> str {
-    alt op {
-      add { ret "+"; }
-      subtract { ret "-"; }
-      mul { ret "*"; }
-      div { ret "/"; }
-      rem { ret "%"; }
-      and { ret "&&"; }
-      or { ret "||"; }
-      bitxor { ret "^"; }
-      bitand { ret "&"; }
-      bitor { ret "|"; }
-      shl { ret "<<"; }
-      shr { ret ">>"; }
-      eq { ret "=="; }
-      lt { ret "<"; }
-      le { ret "<="; }
-      ne { ret "!="; }
-      ge { ret ">="; }
-      gt { ret ">"; }
-    }
-}
-
-pure fn lazy_binop(b: binop) -> bool {
-    alt b { and { true } or { true } _ { false } }
-}
-
-pure fn is_shift_binop(b: binop) -> bool {
-    alt b {
-      shl { true }
-      shr { true }
-      _ { false }
-    }
-}
-
-fn unop_to_str(op: unop) -> str {
-    alt op {
-      box(mt) { if mt == m_mutbl { ret "@mut "; } ret "@"; }
-      uniq(mt) { if mt == m_mutbl { ret "~mut "; } ret "~"; }
-      deref { ret "*"; }
-      not { ret "!"; }
-      neg { ret "-"; }
-    }
-}
-
-fn is_path(e: @expr) -> bool {
-    ret alt e.node { expr_path(_) { true } _ { false } };
-}
-
-fn int_ty_to_str(t: int_ty) -> str {
-    alt t {
-      ty_char { "u8" } // ???
-      ty_i { "" } ty_i8 { "i8" } ty_i16 { "i16" }
-      ty_i32 { "i32" } ty_i64 { "i64" }
-    }
-}
-
-fn int_ty_max(t: int_ty) -> u64 {
-    alt t {
-      ty_i8 { 0x80u64 }
-      ty_i16 { 0x8000u64 }
-      ty_i | ty_char | ty_i32 { 0x80000000u64 } // actually ni about ty_i
-      ty_i64 { 0x8000000000000000u64 }
-    }
-}
-
-fn uint_ty_to_str(t: uint_ty) -> str {
-    alt t {
-      ty_u { "u" } ty_u8 { "u8" } ty_u16 { "u16" }
-      ty_u32 { "u32" } ty_u64 { "u64" }
-    }
-}
-
-fn uint_ty_max(t: uint_ty) -> u64 {
-    alt t {
-      ty_u8 { 0xffu64 }
-      ty_u16 { 0xffffu64 }
-      ty_u | ty_u32 { 0xffffffffu64 } // actually ni about ty_u
-      ty_u64 { 0xffffffffffffffffu64 }
-    }
-}
-
-fn float_ty_to_str(t: float_ty) -> str {
-    alt t { ty_f { "" } ty_f32 { "f32" } ty_f64 { "f64" } }
-}
-
-fn is_exported(i: ident, m: _mod) -> bool {
-    let mut local = false;
-    let mut parent_enum : option<ident> = none;
-    for m.items.each {|it|
-        if it.ident == i { local = true; }
-        alt it.node {
-          item_enum(variants, _, _) {
-            for variants.each {|v|
-                if v.node.name == i {
-                   local = true;
-                   parent_enum = some(it.ident);
-                }
-            }
-          }
-          _ { }
-        }
-        if local { break; }
-    }
-    let mut has_explicit_exports = false;
-    for m.view_items.each {|vi|
-        alt vi.node {
-          view_item_export(vps) {
-            has_explicit_exports = true;
-            for vps.each {|vp|
-                alt vp.node {
-                  ast::view_path_simple(id, _, _) {
-                    if id == i { ret true; }
-                    alt parent_enum {
-                      some(parent_enum_id) {
-                        if id == parent_enum_id { ret true; }
-                      }
-                      _ {}
-                    }
-                  }
-
-                  ast::view_path_list(path, ids, _) {
-                    if vec::len(path.idents) == 1u {
-                        if i == path.idents[0] { ret true; }
-                        for ids.each {|id|
-                            if id.node.name == i { ret true; }
-                        }
-                    } else {
-                        fail "export of path-qualified list";
-                    }
-                  }
-
-                  // FIXME: glob-exports aren't supported yet. (#2006)
-                  _ {}
-                }
-            }
-          }
-          _ {}
-        }
-    }
-    // If there are no declared exports then
-    // everything not imported is exported
-    // even if it's local (since it's explicit)
-    ret !has_explicit_exports && local;
-}
-
-pure fn is_call_expr(e: @expr) -> bool {
-    alt e.node { expr_call(_, _, _) { true } _ { false } }
-}
-
-fn is_constraint_arg(e: @expr) -> bool {
-    alt e.node {
-      expr_lit(_) { ret true; }
-      expr_path(_) { ret true; }
-      _ { ret false; }
-    }
-}
-
-fn eq_ty(&&a: @ty, &&b: @ty) -> bool { ret box::ptr_eq(a, b); }
-
-fn hash_ty(&&t: @ty) -> uint {
-    let res = (t.span.lo << 16u) + t.span.hi;
-    ret res;
-}
-
-fn def_eq(a: ast::def_id, b: ast::def_id) -> bool {
-    ret a.crate == b.crate && a.node == b.node;
-}
-
-fn hash_def(d: ast::def_id) -> uint {
-    let mut h = 5381u;
-    h = (h << 5u) + h ^ (d.crate as uint);
-    h = (h << 5u) + h ^ (d.node as uint);
-    ret h;
-}
-
-fn new_def_hash<V: copy>() -> std::map::hashmap<ast::def_id, V> {
-    let hasher: std::map::hashfn<ast::def_id> = hash_def;
-    let eqer: std::map::eqfn<ast::def_id> = def_eq;
-    ret std::map::hashmap::<ast::def_id, V>(hasher, eqer);
-}
-
-fn block_from_expr(e: @expr) -> blk {
-    let blk_ = default_block([], option::some::<@expr>(e), e.id);
-    ret {node: blk_, span: e.span};
-}
-
-fn default_block(stmts1: [@stmt], expr1: option<@expr>, id1: node_id) ->
-   blk_ {
-    {view_items: [], stmts: stmts1, expr: expr1, id: id1, rules: default_blk}
-}
-
-fn ident_to_path(s: span, i: ident) -> @path {
-    @{span: s, global: false, idents: [i],
-      rp: none, types: []}
-}
-
-pure fn is_unguarded(&&a: arm) -> bool {
-    alt a.guard {
-      none { true }
-      _    { false }
-    }
-}
-
-pure fn unguarded_pat(a: arm) -> option<[@pat]> {
-    if is_unguarded(a) { some(a.pats) } else { none }
-}
-
-// Provides an extra node_id to hang callee information on, in case the
-// operator is deferred to a user-supplied method. The parser is responsible
-// for reserving this id.
-fn op_expr_callee_id(e: @expr) -> node_id { e.id - 1 }
-
-pure fn class_item_ident(ci: @class_member) -> ident {
-    alt ci.node {
-      instance_var(i,_,_,_,_) { i }
-      class_method(it) { it.ident }
-    }
-}
-
-type ivar = {ident: ident, ty: @ty, cm: class_mutability,
-             id: node_id, vis: visibility};
-
-fn public_methods(ms: [@method]) -> [@method] {
-    vec::filter(ms, {|m| alt m.vis {
-                    public { true }
-                    _   { false }}})
-}
-
-fn split_class_items(cs: [@class_member]) -> ([ivar], [@method]) {
-    let mut vs = [], ms = [];
-    for cs.each {|c|
-      alt c.node {
-        instance_var(i, t, cm, id, vis) {
-          vs += [{ident: i, ty: t, cm: cm, id: id, vis: vis}];
-        }
-        class_method(m) { ms += [m]; }
-      }
-    };
-    (vs, ms)
-}
-
-pure fn class_member_visibility(ci: @class_member) -> visibility {
-  alt ci.node {
-     instance_var(_, _, _, _, vis) { vis }
-     class_method(m) { m.vis }
-  }
-}
-
-impl inlined_item_methods for inlined_item {
-    fn ident() -> ident {
-        alt self {
-          ii_item(i) { i.ident }
-          ii_native(i) { i.ident }
-          ii_method(_, m) { m.ident }
-          ii_ctor(_, nm, _, _) { nm }
-        }
-    }
-
-    fn id() -> ast::node_id {
-        alt self {
-          ii_item(i) { i.id }
-          ii_native(i) { i.id }
-          ii_method(_, m) { m.id }
-          ii_ctor(ctor, _, _, _) { ctor.node.id }
-        }
-    }
-
-    fn accept<E>(e: E, v: visit::vt<E>) {
-        alt self {
-          ii_item(i) { v.visit_item(i, e, v) }
-          ii_native(i) { v.visit_native_item(i, e, v) }
-          ii_method(_, m) { visit::visit_method_helper(m, e, v) }
-          ii_ctor(ctor, nm, tps, parent_id) {
-              visit::visit_class_ctor_helper(ctor, nm, tps, parent_id, e, v);
-          }
-        }
-    }
-}
-
-/* True if d is either a def_self, or a chain of def_upvars
- referring to a def_self */
-fn is_self(d: ast::def) -> bool {
-  alt d {
-    def_self(_)        { true }
-    def_upvar(_, d, _) { is_self(*d) }
-    _                  { false }
-  }
-}
-
-#[doc = "Maps a binary operator to its precedence"]
-fn operator_prec(op: ast::binop) -> uint {
-  alt op {
-      mul | div | rem   { 12u }
-      // 'as' sits between here with 11
-      add | subtract    { 10u }
-      shl | shr         {  9u }
-      bitand            {  8u }
-      bitxor            {  7u }
-      bitor             {  6u }
-      lt | le | ge | gt {  4u }
-      eq | ne           {  3u }
-      and               {  2u }
-      or                {  1u }
-  }
-}
-
-fn dtor_dec() -> fn_decl {
-    let nil_t = @{id: 0, node: ty_nil, span: dummy_sp()};
-    // dtor has one argument, of type ()
-    {inputs: [{mode: ast::expl(ast::by_ref),
-               ty: nil_t, ident: "_", id: 0}],
-     output: nil_t, purity: impure_fn, cf: return_val, constraints: []}
-}
-
-// ______________________________________________________________________
-// Enumerating the IDs which appear in an AST
-
-#[auto_serialize]
-type id_range = {min: node_id, max: node_id};
-
-fn empty(range: id_range) -> bool {
-    range.min >= range.max
-}
-
-fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
-    visit::mk_simple_visitor(@{
-        visit_mod: fn@(_m: _mod, _sp: span, id: node_id) {
-            vfn(id)
-        },
-
-        visit_view_item: fn@(vi: @view_item) {
-            alt vi.node {
-              view_item_use(_, _, id) { vfn(id) }
-              view_item_import(vps) | view_item_export(vps) {
-                vec::iter(vps) {|vp|
-                    alt vp.node {
-                      view_path_simple(_, _, id) { vfn(id) }
-                      view_path_glob(_, id) { vfn(id) }
-                      view_path_list(_, _, id) { vfn(id) }
-                    }
-                }
-              }
-            }
-        },
-
-        visit_native_item: fn@(ni: @native_item) {
-            vfn(ni.id)
-        },
-
-        visit_item: fn@(i: @item) {
-            vfn(i.id);
-            alt i.node {
-              item_res(_, _, _, d_id, c_id, _) { vfn(d_id); vfn(c_id); }
-              item_enum(vs, _, _) { for vs.each {|v| vfn(v.node.id); } }
-              _ {}
-            }
-        },
-
-        visit_local: fn@(l: @local) {
-            vfn(l.node.id);
-        },
-
-        visit_block: fn@(b: blk) {
-            vfn(b.node.id);
-        },
-
-        visit_stmt: fn@(s: @stmt) {
-            vfn(ast_util::stmt_id(*s));
-        },
-
-        visit_arm: fn@(_a: arm) { },
-
-        visit_pat: fn@(p: @pat) {
-            vfn(p.id)
-        },
-
-        visit_decl: fn@(_d: @decl) {
-        },
-
-        visit_expr: fn@(e: @expr) {
-            vfn(e.id);
-            alt e.node {
-              expr_unary(_, _) | expr_binary(_, _, _) {
-                vfn(ast_util::op_expr_callee_id(e));
-              }
-              _ { /* fallthrough */ }
-            }
-        },
-
-        visit_ty: fn@(t: @ty) {
-            alt t.node {
-              ty_path(_, id) {
-                vfn(id)
-              }
-              _ { /* fall through */ }
-            }
-        },
-
-        visit_ty_params: fn@(ps: [ty_param]) {
-            vec::iter(ps) {|p| vfn(p.id) }
-        },
-
-        visit_constr: fn@(_p: @path, _sp: span, id: node_id) {
-            vfn(id);
-        },
-
-        visit_fn: fn@(fk: visit::fn_kind, d: fn_decl,
-                      _b: blk, _sp: span, id: node_id) {
-            vfn(id);
-
-            alt fk {
-              visit::fk_ctor(_, tps, self_id, parent_id) |
-              visit::fk_dtor(tps, self_id, parent_id) {
-                vec::iter(tps) {|tp| vfn(tp.id)}
-                vfn(id);
-                vfn(self_id);
-                vfn(parent_id.node);
-              }
-              visit::fk_item_fn(_, tps) |
-              visit::fk_res(_, tps, _) {
-                vec::iter(tps) {|tp| vfn(tp.id)}
-              }
-              visit::fk_method(_, tps, m) {
-                vfn(m.self_id);
-                vec::iter(tps) {|tp| vfn(tp.id)}
-              }
-              visit::fk_anon(*) | visit::fk_fn_block(*) {
-              }
-            }
-
-            vec::iter(d.inputs) {|arg|
-                vfn(arg.id)
-            }
-        },
-
-        visit_class_item: fn@(c: @class_member) {
-            alt c.node {
-              instance_var(_, _, _, id,_) {
-                vfn(id)
-              }
-              class_method(_) {
-              }
-            }
-        }
-    })
-}
-
-fn visit_ids_for_inlined_item(item: inlined_item, vfn: fn@(node_id)) {
-    item.accept((), id_visitor(vfn));
-}
-
-fn compute_id_range(visit_ids_fn: fn(fn@(node_id))) -> id_range {
-    let min = @mut int::max_value;
-    let max = @mut int::min_value;
-    visit_ids_fn { |id|
-        *min = int::min(*min, id);
-        *max = int::max(*max, id + 1);
-    }
-    ret {min:*min, max:*max};
-}
-
-fn compute_id_range_for_inlined_item(item: inlined_item) -> id_range {
-    compute_id_range { |f| visit_ids_for_inlined_item(item, f) }
-}
-
-pure fn is_item_impl(item: @ast::item) -> bool {
-    alt item.node {
-       item_impl(*) { true }
-       _            { false }
-    }
-}
-
-fn walk_pat(pat: @pat, it: fn(@pat)) {
-    it(pat);
-    alt pat.node {
-      pat_ident(pth, some(p)) { walk_pat(p, it); }
-      pat_rec(fields, _) { for fields.each {|f| walk_pat(f.pat, it); } }
-      pat_enum(_, some(s)) | pat_tup(s) { for s.each {|p| walk_pat(p, it); } }
-      pat_box(s) | pat_uniq(s) { walk_pat(s, it); }
-      pat_wild | pat_lit(_) | pat_range(_, _) | pat_ident(_, _)
-        | pat_enum(_, _) {}
-    }
-}
-
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
diff --git a/src/librustsyntax/attr.rs b/src/librustsyntax/attr.rs
deleted file mode 100644
index 77423a993d4..00000000000
--- a/src/librustsyntax/attr.rs
+++ /dev/null
@@ -1,386 +0,0 @@
-// Functions dealing with attributes and meta_items
-
-import std::map;
-import std::map::hashmap;
-import either::either;
-import diagnostic::span_handler;
-import ast_util::dummy_spanned;
-
-// Constructors
-export mk_name_value_item_str;
-export mk_name_value_item;
-export mk_list_item;
-export mk_word_item;
-export mk_attr;
-
-// Conversion
-export attr_meta;
-export attr_metas;
-
-// Accessors
-export get_attr_name;
-export get_meta_item_name;
-export get_meta_item_value_str;
-export get_meta_item_list;
-export get_name_value_str_pair;
-
-// Searching
-export find_attrs_by_name;
-export find_meta_items_by_name;
-export contains;
-export contains_name;
-export attrs_contains_name;
-export first_attr_value_str_by_name;
-export last_meta_item_value_str_by_name;
-export last_meta_item_list_by_name;
-
-// Higher-level applications
-export sort_meta_items;
-export remove_meta_items_by_name;
-export find_linkage_attrs;
-export find_linkage_metas;
-export native_abi;
-export inline_attr;
-export find_inline_attr;
-export require_unique_names;
-
-/* Constructors */
-
-fn mk_name_value_item_str(name: ast::ident, value: str) -> @ast::meta_item {
-    let value_lit = dummy_spanned(ast::lit_str(value));
-    ret mk_name_value_item(name, value_lit);
-}
-
-fn mk_name_value_item(name: ast::ident, value: ast::lit) -> @ast::meta_item {
-    ret @dummy_spanned(ast::meta_name_value(name, value));
-}
-
-fn mk_list_item(name: ast::ident, items: [@ast::meta_item]) ->
-   @ast::meta_item {
-    ret @dummy_spanned(ast::meta_list(name, items));
-}
-
-fn mk_word_item(name: ast::ident) -> @ast::meta_item {
-    ret @dummy_spanned(ast::meta_word(name));
-}
-
-fn mk_attr(item: @ast::meta_item) -> ast::attribute {
-    ret dummy_spanned({style: ast::attr_inner, value: *item});
-}
-
-
-/* Conversion */
-
-fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value }
-
-// Get the meta_items from inside a vector of attributes
-fn attr_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
-    let mut mitems = [];
-    for attrs.each {|a| mitems += [attr_meta(a)]; }
-    ret mitems;
-}
-
-
-/* Accessors */
-
-fn get_attr_name(attr: ast::attribute) -> ast::ident {
-    get_meta_item_name(@attr.node.value)
-}
-
-fn get_meta_item_name(meta: @ast::meta_item) -> ast::ident {
-    alt meta.node {
-      ast::meta_word(n) { n }
-      ast::meta_name_value(n, _) { n }
-      ast::meta_list(n, _) { n }
-    }
-}
-
-#[doc = "
-Gets the string value if the meta_item is a meta_name_value variant
-containing a string, otherwise none
-"]
-fn get_meta_item_value_str(meta: @ast::meta_item) -> option<str> {
-    alt meta.node {
-      ast::meta_name_value(_, v) {
-        alt v.node { ast::lit_str(s) { option::some(s) } _ { option::none } }
-      }
-      _ { option::none }
-    }
-}
-
-#[doc = "Gets a list of inner meta items from a list meta_item type"]
-fn get_meta_item_list(meta: @ast::meta_item) -> option<[@ast::meta_item]> {
-    alt meta.node {
-      ast::meta_list(_, l) { option::some(l) }
-      _ { option::none }
-    }
-}
-
-#[doc = "
-If the meta item is a nam-value type with a string value then returns
-a tuple containing the name and string value, otherwise `none`
-"]
-fn get_name_value_str_pair(
-    item: @ast::meta_item
-) -> option<(str, str)> {
-    alt attr::get_meta_item_value_str(item) {
-      some(value) {
-        let name = attr::get_meta_item_name(item);
-        some((name, value))
-      }
-      none { none }
-    }
-}
-
-
-/* Searching */
-
-#[doc = "
-Search a list of attributes and return only those with a specific name
-"]
-fn find_attrs_by_name(attrs: [ast::attribute], name: ast::ident) ->
-   [ast::attribute] {
-    let filter = (
-        fn@(a: ast::attribute) -> option<ast::attribute> {
-            if get_attr_name(a) == name {
-                option::some(a)
-            } else { option::none }
-        }
-    );
-    ret vec::filter_map(attrs, filter);
-}
-
-#[doc = "
-Searcha list of meta items and return only those with a specific name
-"]
-fn find_meta_items_by_name(metas: [@ast::meta_item], name: ast::ident) ->
-   [@ast::meta_item] {
-    let filter = fn@(&&m: @ast::meta_item) -> option<@ast::meta_item> {
-        if get_meta_item_name(m) == name {
-            option::some(m)
-        } else { option::none }
-    };
-    ret vec::filter_map(metas, filter);
-}
-
-#[doc = "
-Returns true if a list of meta items contains another meta item. The
-comparison is performed structurally.
-"]
-fn contains(haystack: [@ast::meta_item], needle: @ast::meta_item) -> bool {
-    #debug("looking for %s",
-           print::pprust::meta_item_to_str(*needle));
-    for haystack.each {|item|
-        #debug("looking in %s",
-               print::pprust::meta_item_to_str(*item));
-        if eq(item, needle) { #debug("found it!"); ret true; }
-    }
-    #debug("found it not :(");
-    ret false;
-}
-
-fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
-    ret alt a.node {
-          ast::meta_word(na) {
-            alt b.node { ast::meta_word(nb) { na == nb } _ { false } }
-          }
-          ast::meta_name_value(na, va) {
-            alt b.node {
-              ast::meta_name_value(nb, vb) { na == nb && va.node == vb.node }
-              _ { false }
-            }
-          }
-          ast::meta_list(na, la) {
-
-            // [Fixme-sorting]
-            // FIXME (#607): Needs implementing
-            // This involves probably sorting the list by name and
-            // meta_item variant
-            fail "unimplemented meta_item variant"
-          }
-        }
-}
-
-fn contains_name(metas: [@ast::meta_item], name: ast::ident) -> bool {
-    let matches = find_meta_items_by_name(metas, name);
-    ret vec::len(matches) > 0u;
-}
-
-fn attrs_contains_name(attrs: [ast::attribute], name: ast::ident) -> bool {
-    vec::is_not_empty(find_attrs_by_name(attrs, name))
-}
-
-fn first_attr_value_str_by_name(attrs: [ast::attribute], name: ast::ident)
-    -> option<str> {
-    let mattrs = find_attrs_by_name(attrs, name);
-    if vec::len(mattrs) > 0u {
-        ret get_meta_item_value_str(attr_meta(mattrs[0]));
-    }
-    ret option::none;
-}
-
-fn last_meta_item_by_name(
-    items: [@ast::meta_item],
-    name: str
-) -> option<@ast::meta_item> {
-    let items = attr::find_meta_items_by_name(items, name);
-    vec::last_opt(items)
-}
-
-fn last_meta_item_value_str_by_name(
-    items: [@ast::meta_item],
-    name: str
-) -> option<str> {
-    alt last_meta_item_by_name(items, name) {
-      some(item) {
-        alt attr::get_meta_item_value_str(item) {
-          some(value) { some(value) }
-          none { none }
-        }
-      }
-      none { none }
-    }
-}
-
-fn last_meta_item_list_by_name(
-    items: [@ast::meta_item],
-    name: str
-) -> option<[@ast::meta_item]> {
-    alt last_meta_item_by_name(items, name) {
-      some(item) {
-        attr::get_meta_item_list(item)
-      }
-      none { none }
-    }
-}
-
-
-/* Higher-level applications */
-
-// FIXME: This needs to sort by meta_item variant in addition to the item name
-// (See [Fixme-sorting])
-fn sort_meta_items(items: [@ast::meta_item]) -> [@ast::meta_item] {
-    fn lteq(&&ma: @ast::meta_item, &&mb: @ast::meta_item) -> bool {
-        fn key(m: @ast::meta_item) -> ast::ident {
-            alt m.node {
-              ast::meta_word(name) { name }
-              ast::meta_name_value(name, _) { name }
-              ast::meta_list(name, _) { name }
-            }
-        }
-        ret key(ma) <= key(mb);
-    }
-
-    // This is sort of stupid here, converting to a vec of mutables and back
-    let mut v: [mut @ast::meta_item] = [mut];
-    for items.each {|mi| v += [mut mi]; }
-
-    std::sort::quick_sort(lteq, v);
-
-    let mut v2: [@ast::meta_item] = [];
-    for v.each {|mi| v2 += [mi]; }
-    ret v2;
-}
-
-fn remove_meta_items_by_name(items: [@ast::meta_item], name: str) ->
-   [@ast::meta_item] {
-
-    let filter = fn@(&&item: @ast::meta_item) -> option<@ast::meta_item> {
-        if get_meta_item_name(item) != name {
-            option::some(item)
-        } else { option::none }
-    };
-
-    ret vec::filter_map(items, filter);
-}
-
-fn find_linkage_attrs(attrs: [ast::attribute]) -> [ast::attribute] {
-    let mut found = [];
-    for find_attrs_by_name(attrs, "link").each {|attr|
-        alt attr.node.value.node {
-          ast::meta_list(_, _) { found += [attr] }
-          _ { #debug("ignoring link attribute that has incorrect type"); }
-        }
-    }
-    ret found;
-}
-
-#[doc = "
-From a list of crate attributes get only the meta_items that impact crate
-linkage
-"]
-fn find_linkage_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
-    find_linkage_attrs(attrs).flat_map {|attr|
-        alt check attr.node.value.node {
-          ast::meta_list(_, items) { items }
-        }
-    }
-}
-
-fn native_abi(attrs: [ast::attribute]) -> either<str, ast::native_abi> {
-    ret alt attr::first_attr_value_str_by_name(attrs, "abi") {
-      option::none {
-        either::right(ast::native_abi_cdecl)
-      }
-      option::some("rust-intrinsic") {
-        either::right(ast::native_abi_rust_intrinsic)
-      }
-      option::some("cdecl") {
-        either::right(ast::native_abi_cdecl)
-      }
-      option::some("stdcall") {
-        either::right(ast::native_abi_stdcall)
-      }
-      option::some(t) {
-        either::left("unsupported abi: " + t)
-      }
-    };
-}
-
-enum inline_attr {
-    ia_none,
-    ia_hint,
-    ia_always
-}
-
-#[doc = "True if something like #[inline] is found in the list of attrs."]
-fn find_inline_attr(attrs: [ast::attribute]) -> inline_attr {
-    // TODO---validate the usage of #[inline] and #[inline(always)]
-    vec::foldl(ia_none, attrs) {|ia,attr|
-        alt attr.node.value.node {
-          ast::meta_word("inline") { ia_hint }
-          ast::meta_list("inline", items) {
-            if !vec::is_empty(find_meta_items_by_name(items, "always")) {
-                ia_always
-            } else {
-                ia_hint
-            }
-          }
-          _ { ia }
-        }
-    }
-}
-
-
-fn require_unique_names(diagnostic: span_handler,
-                        metas: [@ast::meta_item]) {
-    let map = map::str_hash();
-    for metas.each {|meta|
-        let name = get_meta_item_name(meta);
-        if map.contains_key(name) {
-            diagnostic.span_fatal(meta.span,
-                                  #fmt["duplicate meta item `%s`", name]);
-        }
-        map.insert(name, ());
-    }
-}
-
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/codemap.rs b/src/librustsyntax/codemap.rs
deleted file mode 100644
index 259041959d4..00000000000
--- a/src/librustsyntax/codemap.rs
+++ /dev/null
@@ -1,236 +0,0 @@
-import dvec::{dvec, extensions};
-
-export filename;
-export filemap;
-export span;
-export file_substr;
-export codemap;
-export expn_info;
-export expn_info_;
-export expanded_from;
-export new_filemap;
-export new_filemap_w_substr;
-export mk_substr_filename;
-export lookup_char_pos;
-export lookup_char_pos_adj;
-export adjust_span;
-export span_to_str;
-export span_to_filename;
-export span_to_lines;
-export file_lines;
-export get_line;
-export next_line;
-export span_to_snippet;
-export loc;
-export get_filemap;
-export new_codemap;
-
-type filename = str;
-
-type file_pos = {ch: uint, byte: uint};
-
-/* A codemap is a thing that maps uints to file/line/column positions
- * in a crate. This to make it possible to represent the positions
- * with single-word things, rather than passing records all over the
- * compiler.
- */
-
-enum file_substr {
-    fss_none,
-    fss_internal(span),
-    fss_external({filename: str, line: uint, col: uint})
-}
-
-type filemap =
-    @{name: filename, substr: file_substr, src: @str,
-      start_pos: file_pos, mut lines: [file_pos]};
-
-type codemap = @{files: dvec<filemap>};
-
-type loc = {file: filemap, line: uint, col: uint};
-
-fn new_codemap() -> codemap { @{files: dvec()} }
-
-fn new_filemap_w_substr(filename: filename, substr: file_substr,
-                        src: @str,
-                        start_pos_ch: uint, start_pos_byte: uint)
-   -> filemap {
-    ret @{name: filename, substr: substr, src: src,
-          start_pos: {ch: start_pos_ch, byte: start_pos_byte},
-          mut lines: [{ch: start_pos_ch, byte: start_pos_byte}]};
-}
-
-fn new_filemap(filename: filename, src: @str,
-               start_pos_ch: uint, start_pos_byte: uint)
-    -> filemap {
-    ret new_filemap_w_substr(filename, fss_none, src,
-                             start_pos_ch, start_pos_byte);
-}
-
-fn mk_substr_filename(cm: codemap, sp: span) -> str
-{
-    let pos = lookup_char_pos(cm, sp.lo);
-    ret #fmt("<%s:%u:%u>", pos.file.name, pos.line, pos.col);
-}
-
-fn next_line(file: filemap, chpos: uint, byte_pos: uint) {
-    file.lines += [{ch: chpos, byte: byte_pos + file.start_pos.byte}];
-}
-
-type lookup_fn = fn@(file_pos) -> uint;
-
-fn lookup_line(map: codemap, pos: uint, lookup: lookup_fn)
-    -> {fm: filemap, line: uint}
-{
-    let len = map.files.len();
-    let mut a = 0u;
-    let mut b = len;
-    while b - a > 1u {
-        let m = (a + b) / 2u;
-        if lookup(map.files[m].start_pos) > pos { b = m; } else { a = m; }
-    }
-    if (a >= len) {
-        fail #fmt("position %u does not resolve to a source location", pos)
-    }
-    let f = map.files[a];
-    a = 0u;
-    b = vec::len(f.lines);
-    while b - a > 1u {
-        let m = (a + b) / 2u;
-        if lookup(f.lines[m]) > pos { b = m; } else { a = m; }
-    }
-    ret {fm: f, line: a};
-}
-
-fn lookup_pos(map: codemap, pos: uint, lookup: lookup_fn) -> loc {
-    let {fm: f, line: a} = lookup_line(map, pos, lookup);
-    ret {file: f, line: a + 1u, col: pos - lookup(f.lines[a])};
-}
-
-fn lookup_char_pos(map: codemap, pos: uint) -> loc {
-    fn lookup(pos: file_pos) -> uint { ret pos.ch; }
-    ret lookup_pos(map, pos, lookup);
-}
-
-fn lookup_byte_pos(map: codemap, pos: uint) -> loc {
-    fn lookup(pos: file_pos) -> uint { ret pos.byte; }
-    ret lookup_pos(map, pos, lookup);
-}
-
-fn lookup_char_pos_adj(map: codemap, pos: uint)
-    -> {filename: str, line: uint, col: uint, file: option<filemap>}
-{
-    let loc = lookup_char_pos(map, pos);
-    alt (loc.file.substr) {
-      fss_none {
-        {filename: loc.file.name, line: loc.line, col: loc.col,
-         file: some(loc.file)}
-      }
-      fss_internal(sp) {
-        lookup_char_pos_adj(map, sp.lo + (pos - loc.file.start_pos.ch))
-      }
-      fss_external(eloc) {
-        {filename: eloc.filename,
-         line: eloc.line + loc.line - 1u,
-         col: if loc.line == 1u {eloc.col + loc.col} else {loc.col},
-         file: none}
-      }
-    }
-}
-
-fn adjust_span(map: codemap, sp: span) -> span {
-    fn lookup(pos: file_pos) -> uint { ret pos.ch; }
-    let line = lookup_line(map, sp.lo, lookup);
-    alt (line.fm.substr) {
-      fss_none {sp}
-      fss_internal(s) {
-        adjust_span(map, {lo: s.lo + (sp.lo - line.fm.start_pos.ch),
-                          hi: s.lo + (sp.hi - line.fm.start_pos.ch),
-                          expn_info: sp.expn_info})}
-      fss_external(_) {sp}
-    }
-}
-
-enum expn_info_ {
-    expanded_from({call_site: span,
-                   callie: {name: str, span: option<span>}})
-}
-type expn_info = option<@expn_info_>;
-type span = {lo: uint, hi: uint, expn_info: expn_info};
-
-fn span_to_str_no_adj(sp: span, cm: codemap) -> str {
-    let lo = lookup_char_pos(cm, sp.lo);
-    let hi = lookup_char_pos(cm, sp.hi);
-    ret #fmt("%s:%u:%u: %u:%u", lo.file.name,
-             lo.line, lo.col, hi.line, hi.col)
-}
-
-fn span_to_str(sp: span, cm: codemap) -> str {
-    let lo = lookup_char_pos_adj(cm, sp.lo);
-    let hi = lookup_char_pos_adj(cm, sp.hi);
-    ret #fmt("%s:%u:%u: %u:%u", lo.filename,
-             lo.line, lo.col, hi.line, hi.col)
-}
-
-type file_lines = {file: filemap, lines: [uint]};
-
-fn span_to_filename(sp: span, cm: codemap::codemap) -> filename {
-    let lo = lookup_char_pos(cm, sp.lo);
-    ret lo.file.name;
-}
-
-fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
-    let lo = lookup_char_pos(cm, sp.lo);
-    let hi = lookup_char_pos(cm, sp.hi);
-    let mut lines = [];
-    for uint::range(lo.line - 1u, hi.line as uint) {|i| lines += [i]; };
-    ret @{file: lo.file, lines: lines};
-}
-
-fn get_line(fm: filemap, line: int) -> str unsafe {
-    let begin: uint = fm.lines[line].byte - fm.start_pos.byte;
-    let end = alt str::find_char_from(*fm.src, '\n', begin) {
-      some(e) { e }
-      none { str::len(*fm.src) }
-    };
-    str::slice(*fm.src, begin, end)
-}
-
-fn lookup_byte_offset(cm: codemap::codemap, chpos: uint)
-    -> {fm: filemap, pos: uint} {
-    let {fm, line} = lookup_line(cm, chpos, {|pos| pos.ch});
-    let line_offset = fm.lines[line].byte - fm.start_pos.byte;
-    let col = chpos - fm.lines[line].ch;
-    let col_offset = str::count_bytes(*fm.src, line_offset, col);
-    {fm: fm, pos: line_offset + col_offset}
-}
-
-fn span_to_snippet(sp: span, cm: codemap::codemap) -> str {
-    let begin = lookup_byte_offset(cm, sp.lo);
-    let end = lookup_byte_offset(cm, sp.hi);
-    assert begin.fm == end.fm;
-    ret str::slice(*begin.fm.src, begin.pos, end.pos);
-}
-
-fn get_snippet(cm: codemap::codemap, fidx: uint, lo: uint, hi: uint) -> str
-{
-    let fm = cm.files[fidx];
-    ret str::slice(*fm.src, lo, hi)
-}
-
-fn get_filemap(cm: codemap, filename: str) -> filemap {
-    for cm.files.each {|fm| if fm.name == filename { ret fm; } }
-    //XXjdm the following triggers a mismatched type bug
-    //      (or expected function, found _|_)
-    fail; // ("asking for " + filename + " which we don't know about");
-}
-
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/diagnostic.rs b/src/librustsyntax/diagnostic.rs
deleted file mode 100644
index 64904d612ce..00000000000
--- a/src/librustsyntax/diagnostic.rs
+++ /dev/null
@@ -1,264 +0,0 @@
-import std::term;
-import io::writer_util;
-import codemap::span;
-
-export emitter, emit;
-export level, fatal, error, warning, note;
-export span_handler, handler, mk_span_handler, mk_handler;
-export codemap_span_handler, codemap_handler;
-export ice_msg;
-export expect;
-
-type emitter = fn@(cmsp: option<(codemap::codemap, span)>,
-                   msg: str, lvl: level);
-
-
-iface span_handler {
-    fn span_fatal(sp: span, msg: str) -> !;
-    fn span_err(sp: span, msg: str);
-    fn span_warn(sp: span, msg: str);
-    fn span_note(sp: span, msg: str);
-    fn span_bug(sp: span, msg: str) -> !;
-    fn span_unimpl(sp: span, msg: str) -> !;
-    fn handler() -> handler;
-}
-
-iface handler {
-    fn fatal(msg: str) -> !;
-    fn err(msg: str);
-    fn bump_err_count();
-    fn has_errors() -> bool;
-    fn abort_if_errors();
-    fn warn(msg: str);
-    fn note(msg: str);
-    fn bug(msg: str) -> !;
-    fn unimpl(msg: str) -> !;
-    fn emit(cmsp: option<(codemap::codemap, span)>, msg: str, lvl: level);
-}
-
-type handler_t = @{
-    mut err_count: uint,
-    emit: emitter
-};
-
-type codemap_t = @{
-    handler: handler,
-    cm: codemap::codemap
-};
-
-impl codemap_span_handler of span_handler for codemap_t {
-    fn span_fatal(sp: span, msg: str) -> ! {
-        self.handler.emit(some((self.cm, sp)), msg, fatal);
-        fail;
-    }
-    fn span_err(sp: span, msg: str) {
-        self.handler.emit(some((self.cm, sp)), msg, error);
-        self.handler.bump_err_count();
-    }
-    fn span_warn(sp: span, msg: str) {
-        self.handler.emit(some((self.cm, sp)), msg, warning);
-    }
-    fn span_note(sp: span, msg: str) {
-        self.handler.emit(some((self.cm, sp)), msg, note);
-    }
-    fn span_bug(sp: span, msg: str) -> ! {
-        self.span_fatal(sp, ice_msg(msg));
-    }
-    fn span_unimpl(sp: span, msg: str) -> ! {
-        self.span_bug(sp, "unimplemented " + msg);
-    }
-    fn handler() -> handler {
-        self.handler
-    }
-}
-
-impl codemap_handler of handler for handler_t {
-    fn fatal(msg: str) -> ! {
-        self.emit(none, msg, fatal);
-        fail;
-    }
-    fn err(msg: str) {
-        self.emit(none, msg, error);
-        self.bump_err_count();
-    }
-    fn bump_err_count() {
-        self.err_count += 1u;
-    }
-    fn has_errors() -> bool { self.err_count > 0u }
-    fn abort_if_errors() {
-        if self.err_count > 0u {
-            self.fatal("aborting due to previous errors");
-        }
-    }
-    fn warn(msg: str) {
-        self.emit(none, msg, warning);
-    }
-    fn note(msg: str) {
-        self.emit(none, msg, note);
-    }
-    fn bug(msg: str) -> ! {
-        self.fatal(ice_msg(msg));
-    }
-    fn unimpl(msg: str) -> ! { self.bug("unimplemented " + msg); }
-    fn emit(cmsp: option<(codemap::codemap, span)>, msg: str, lvl: level) {
-        self.emit(cmsp, msg, lvl);
-    }
-}
-
-fn ice_msg(msg: str) -> str {
-    #fmt["internal compiler error: %s", msg]
-}
-
-fn mk_span_handler(handler: handler, cm: codemap::codemap) -> span_handler {
-    @{ handler: handler, cm: cm } as span_handler
-}
-
-fn mk_handler(emitter: option<emitter>) -> handler {
-
-    let emit = alt emitter {
-      some(e) { e }
-      none {
-        let f = fn@(cmsp: option<(codemap::codemap, span)>,
-            msg: str, t: level) {
-            emit(cmsp, msg, t);
-        };
-        f
-      }
-    };
-
-    @{
-        mut err_count: 0u,
-        emit: emit
-    } as handler
-}
-
-enum level {
-    fatal,
-    error,
-    warning,
-    note,
-}
-
-fn diagnosticstr(lvl: level) -> str {
-    alt lvl {
-      fatal { "error" }
-      error { "error" }
-      warning { "warning" }
-      note { "note" }
-    }
-}
-
-fn diagnosticcolor(lvl: level) -> u8 {
-    alt lvl {
-      fatal { term::color_bright_red }
-      error { term::color_bright_red }
-      warning { term::color_bright_yellow }
-      note { term::color_bright_green }
-    }
-}
-
-fn print_diagnostic(topic: str, lvl: level, msg: str) {
-    if str::is_not_empty(topic) {
-        io::stderr().write_str(#fmt["%s ", topic]);
-    }
-    if term::color_supported() {
-        term::fg(io::stderr(), diagnosticcolor(lvl));
-    }
-    io::stderr().write_str(#fmt["%s:", diagnosticstr(lvl)]);
-    if term::color_supported() {
-        term::reset(io::stderr());
-    }
-    io::stderr().write_str(#fmt[" %s\n", msg]);
-}
-
-fn emit(cmsp: option<(codemap::codemap, span)>,
-        msg: str, lvl: level) {
-    alt cmsp {
-      some((cm, sp)) {
-        let sp = codemap::adjust_span(cm,sp);
-        let ss = codemap::span_to_str(sp, cm);
-        let lines = codemap::span_to_lines(sp, cm);
-        print_diagnostic(ss, lvl, msg);
-        highlight_lines(cm, sp, lines);
-        print_macro_backtrace(cm, sp);
-      }
-      none {
-        print_diagnostic("", lvl, msg);
-      }
-    }
-}
-
-fn highlight_lines(cm: codemap::codemap, sp: span,
-                   lines: @codemap::file_lines) {
-
-    let fm = lines.file;
-
-    // arbitrarily only print up to six lines of the error
-    let max_lines = 6u;
-    let mut elided = false;
-    let mut display_lines = lines.lines;
-    if vec::len(display_lines) > max_lines {
-        display_lines = vec::slice(display_lines, 0u, max_lines);
-        elided = true;
-    }
-    // Print the offending lines
-    for display_lines.each {|line|
-        io::stderr().write_str(#fmt["%s:%u ", fm.name, line + 1u]);
-        let s = codemap::get_line(fm, line as int) + "\n";
-        io::stderr().write_str(s);
-    }
-    if elided {
-        let last_line = display_lines[vec::len(display_lines) - 1u];
-        let s = #fmt["%s:%u ", fm.name, last_line + 1u];
-        let mut indent = str::len(s);
-        let mut out = "";
-        while indent > 0u { out += " "; indent -= 1u; }
-        out += "...\n";
-        io::stderr().write_str(out);
-    }
-
-
-    // If there's one line at fault we can easily point to the problem
-    if vec::len(lines.lines) == 1u {
-        let lo = codemap::lookup_char_pos(cm, sp.lo);
-        let mut digits = 0u;
-        let mut num = (lines.lines[0] + 1u) / 10u;
-
-        // how many digits must be indent past?
-        while num > 0u { num /= 10u; digits += 1u; }
-
-        // indent past |name:## | and the 0-offset column location
-        let mut left = str::len(fm.name) + digits + lo.col + 3u;
-        let mut s = "";
-        while left > 0u { str::push_char(s, ' '); left -= 1u; }
-
-        s += "^";
-        let hi = codemap::lookup_char_pos(cm, sp.hi);
-        if hi.col != lo.col {
-            // the ^ already takes up one space
-            let mut width = hi.col - lo.col - 1u;
-            while width > 0u { str::push_char(s, '~'); width -= 1u; }
-        }
-        io::stderr().write_str(s + "\n");
-    }
-}
-
-fn print_macro_backtrace(cm: codemap::codemap, sp: span) {
-    option::iter (sp.expn_info) {|ei|
-        let ss = option::map_default(ei.callie.span, "",
-                               bind codemap::span_to_str(_, cm));
-        print_diagnostic(ss, note,
-                         #fmt("in expansion of #%s", ei.callie.name));
-        let ss = codemap::span_to_str(ei.call_site, cm);
-        print_diagnostic(ss, note, "expansion site");
-        print_macro_backtrace(cm, ei.call_site);
-    }
-}
-
-fn expect<T: copy>(diag: span_handler,
-                   opt: option<T>, msg: fn() -> str) -> T {
-    alt opt {
-       some(t) { t }
-       none { diag.handler().bug(msg()); }
-    }
-}
diff --git a/src/librustsyntax/ext/auto_serialize.rs b/src/librustsyntax/ext/auto_serialize.rs
deleted file mode 100644
index dc632d6b6ac..00000000000
--- a/src/librustsyntax/ext/auto_serialize.rs
+++ /dev/null
@@ -1,864 +0,0 @@
-/*
-
-The compiler code necessary to implement the #[auto_serialize]
-extension.  The idea here is that type-defining items may be tagged
-with #[auto_serialize], which will cause us to generate a little
-companion module with the same name as the item.
-
-For example, a type like:
-
-    type node_id = uint;
-
-would generate two functions like:
-
-    fn serialize_node_id<S: serializer>(s: S, v: node_id) {
-        s.emit_uint(v);
-    }
-    fn deserialize_node_id<D: deserializer>(d: D) -> node_id {
-        d.read_uint()
-    }
-
-Other interesting scenarios are whe the item has type parameters or
-references other non-built-in types.  A type definition like:
-
-    type spanned<T> = {node: T, span: span};
-
-would yield functions like:
-
-    fn serialize_spanned<S: serializer,T>(s: S, v: spanned<T>, t: fn(T)) {
-         s.emit_rec(2u) {||
-             s.emit_rec_field("node", 0u) {||
-                 t(s.node);
-             };
-             s.emit_rec_field("span", 1u) {||
-                 serialize_span(s, s.span);
-             };
-         }
-    }
-    fn deserialize_spanned<D: deserializer>(d: D, t: fn() -> T) -> node_id {
-         d.read_rec(2u) {||
-             {node: d.read_rec_field("node", 0u, t),
-              span: d.read_rec_field("span", 1u) {||deserialize_span(d)}}
-         }
-    }
-
-In general, the code to serialize an instance `v` of a non-built-in
-type a::b::c<T0,...,Tn> looks like:
-
-    a::b::serialize_c(s, {|v| c_T0}, ..., {|v| c_Tn}, v)
-
-where `c_Ti` is the code to serialize an instance `v` of the type
-`Ti`.
-
-Similarly, the code to deserialize an instance of a non-built-in type
-`a::b::c<T0,...,Tn>` using the deserializer `d` looks like:
-
-    a::b::deserialize_c(d, {|| c_T0}, ..., {|| c_Tn})
-
-where `c_Ti` is the code to deserialize an instance of `Ti` using the
-deserializer `d`.
-
-TODO--Hygiene. Search for "__" strings.  We also assume "std" is the
-standard library.
-
-Misc notes:
------------
-
-I use move mode arguments for ast nodes that will get inserted as is
-into the tree.  This is intended to prevent us from inserting the same
-node twice.
-
-*/
-import base::*;
-import codemap::span;
-import std::map;
-import std::map::hashmap;
-
-export expand;
-
-// Transitional reexports so qquote can find the paths it is looking for
-mod syntax {
-    import ext;
-    export ext;
-    import parse;
-    export parse;
-}
-
-type ser_tps_map = map::hashmap<str, fn@(@ast::expr) -> [@ast::stmt]>;
-type deser_tps_map = map::hashmap<str, fn@() -> @ast::expr>;
-
-fn expand(cx: ext_ctxt,
-          span: span,
-          _mitem: ast::meta_item,
-          in_items: [@ast::item]) -> [@ast::item] {
-    fn not_auto_serialize(a: ast::attribute) -> bool {
-        attr::get_attr_name(a) != "auto_serialize"
-    }
-
-    fn filter_attrs(item: @ast::item) -> @ast::item {
-        @{attrs: vec::filter(item.attrs, not_auto_serialize)
-          with *item}
-    }
-
-    vec::flat_map(in_items) {|in_item|
-        alt in_item.node {
-          ast::item_ty(ty, tps, _) {
-            [filter_attrs(in_item)] + ty_fns(cx, in_item.ident, ty, tps)
-          }
-
-          ast::item_enum(variants, tps, _) {
-            [filter_attrs(in_item)] + enum_fns(cx, in_item.ident,
-                                               in_item.span, variants, tps)
-          }
-
-          _ {
-            cx.span_err(span, "#[auto_serialize] can only be \
-                               applied to type and enum \
-                               definitions");
-            [in_item]
-          }
-        }
-    }
-}
-
-impl helpers for ext_ctxt {
-    fn helper_path(base_path: @ast::path,
-                   helper_name: str) -> @ast::path {
-        let head = vec::init(base_path.idents);
-        let tail = vec::last(base_path.idents);
-        self.path(base_path.span, head + [helper_name + "_" + tail])
-    }
-
-    fn path(span: span, strs: [str]) -> @ast::path {
-        @{span: span, global: false, idents: strs, rp: none, types: []}
-    }
-
-    fn path_tps(span: span, strs: [str], tps: [@ast::ty]) -> @ast::path {
-        @{span: span, global: false, idents: strs, rp: none, types: tps}
-    }
-
-    fn ty_path(span: span, strs: [str], tps: [@ast::ty]) -> @ast::ty {
-        @{id: self.next_id(),
-          node: ast::ty_path(self.path_tps(span, strs, tps), self.next_id()),
-          span: span}
-    }
-
-    fn ty_fn(span: span,
-             -input_tys: [@ast::ty],
-             -output: @ast::ty) -> @ast::ty {
-        let args = vec::map(input_tys) {|ty|
-            {mode: ast::expl(ast::by_ref),
-             ty: ty,
-             ident: "",
-             id: self.next_id()}
-        };
-
-        @{id: self.next_id(),
-          node: ast::ty_fn(ast::proto_any, {inputs: args,
-                                            output: output,
-                                            purity: ast::impure_fn,
-                                            cf: ast::return_val,
-                                            constraints: []}),
-          span: span}
-    }
-
-    fn ty_nil(span: span) -> @ast::ty {
-        @{id: self.next_id(), node: ast::ty_nil, span: span}
-    }
-
-    fn expr(span: span, node: ast::expr_) -> @ast::expr {
-        @{id: self.next_id(), node: node, span: span}
-    }
-
-    fn var_ref(span: span, name: str) -> @ast::expr {
-        self.expr(span, ast::expr_path(self.path(span, [name])))
-    }
-
-    fn blk(span: span, stmts: [@ast::stmt]) -> ast::blk {
-        {node: {view_items: [],
-                stmts: stmts,
-                expr: none,
-                id: self.next_id(),
-                rules: ast::default_blk},
-         span: span}
-    }
-
-    fn expr_blk(expr: @ast::expr) -> ast::blk {
-        {node: {view_items: [],
-                stmts: [],
-                expr: some(expr),
-                id: self.next_id(),
-                rules: ast::default_blk},
-         span: expr.span}
-    }
-
-    fn binder_pat(span: span, nm: str) -> @ast::pat {
-        let path = @{span: span, global: false, idents: [nm],
-                     rp: none, types: []};
-        @{id: self.next_id(),
-          node: ast::pat_ident(path, none),
-          span: span}
-    }
-
-    fn stmt(expr: @ast::expr) -> @ast::stmt {
-        @{node: ast::stmt_semi(expr, self.next_id()),
-          span: expr.span}
-    }
-
-    fn alt_stmt(arms: [ast::arm], span: span, -v: @ast::expr) -> @ast::stmt {
-        self.stmt(
-            self.expr(
-                span,
-                ast::expr_alt(v, arms, ast::alt_exhaustive)))
-    }
-
-    fn lit_str(span: span, s: str) -> @ast::expr {
-        self.expr(
-            span,
-            ast::expr_lit(
-                @{node: ast::lit_str(s),
-                  span: span}))
-    }
-
-    fn lit_uint(span: span, i: uint) -> @ast::expr {
-        self.expr(
-            span,
-            ast::expr_lit(
-                @{node: ast::lit_uint(i as u64, ast::ty_u),
-                  span: span}))
-    }
-
-    fn lambda(blk: ast::blk) -> @ast::expr {
-        let ext_cx = self;
-        let blk_e = self.expr(blk.span, ast::expr_block(blk));
-        #ast{ {|| $(blk_e) } }
-    }
-
-    fn clone_folder() -> fold::ast_fold {
-        fold::make_fold(@{
-            new_id: {|_id| self.next_id()}
-            with *fold::default_ast_fold()
-        })
-    }
-
-    fn clone(v: @ast::expr) -> @ast::expr {
-        let fld = self.clone_folder();
-        fld.fold_expr(v)
-    }
-
-    fn clone_ty(v: @ast::ty) -> @ast::ty {
-        let fld = self.clone_folder();
-        fld.fold_ty(v)
-    }
-
-    fn clone_ty_param(v: ast::ty_param) -> ast::ty_param {
-        let fld = self.clone_folder();
-        fold::fold_ty_param(v, fld)
-    }
-
-    fn at(span: span, expr: @ast::expr) -> @ast::expr {
-        fn repl_sp(old_span: span, repl_span: span, with_span: span) -> span {
-            if old_span == repl_span {
-                with_span
-            } else {
-                old_span
-            }
-        }
-
-        let fld = fold::make_fold(@{
-            new_span: repl_sp(_, ast_util::dummy_sp(), span)
-            with *fold::default_ast_fold()
-        });
-
-        fld.fold_expr(expr)
-    }
-}
-
-fn ser_path(cx: ext_ctxt, tps: ser_tps_map, path: @ast::path,
-                  -s: @ast::expr, -v: @ast::expr)
-    -> [@ast::stmt] {
-    let ext_cx = cx; // required for #ast{}
-
-    // We want to take a path like a::b::c<...> and generate a call
-    // like a::b::c::serialize(s, ...), as described above.
-
-    let callee =
-        cx.expr(
-            path.span,
-            ast::expr_path(
-                cx.helper_path(path, "serialize")));
-
-    let ty_args = vec::map(path.types) {|ty|
-        let sv_stmts = ser_ty(cx, tps, ty, cx.clone(s), #ast{ __v });
-        let sv = cx.expr(path.span,
-                         ast::expr_block(cx.blk(path.span, sv_stmts)));
-        cx.at(ty.span, #ast{ {|__v| $(sv)} })
-    };
-
-    [cx.stmt(
-        cx.expr(
-            path.span,
-            ast::expr_call(callee, [s, v] + ty_args, false)))]
-}
-
-fn ser_variant(cx: ext_ctxt,
-               tps: ser_tps_map,
-               tys: [@ast::ty],
-               span: span,
-               -s: @ast::expr,
-               pfn: fn([@ast::pat]) -> ast::pat_,
-               bodyfn: fn(-@ast::expr, ast::blk) -> @ast::expr,
-               argfn: fn(-@ast::expr, uint, ast::blk) -> @ast::expr)
-    -> ast::arm {
-    let vnames = vec::from_fn(vec::len(tys)) {|i| #fmt["__v%u", i]};
-    let pats = vec::from_fn(vec::len(tys)) {|i|
-        cx.binder_pat(tys[i].span, vnames[i])
-    };
-    let pat: @ast::pat = @{id: cx.next_id(), node: pfn(pats), span: span};
-    let stmts = vec::from_fn(vec::len(tys)) {|i|
-        let v = cx.var_ref(span, vnames[i]);
-        let arg_blk =
-            cx.blk(
-                span,
-                ser_ty(cx, tps, tys[i], cx.clone(s), v));
-        cx.stmt(argfn(cx.clone(s), i, arg_blk))
-    };
-
-    let body_blk = cx.blk(span, stmts);
-    let body = cx.blk(span, [cx.stmt(bodyfn(s, body_blk))]);
-
-    {pats: [pat], guard: none, body: body}
-}
-
-fn ser_lambda(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty,
-              -s: @ast::expr, -v: @ast::expr) -> @ast::expr {
-    cx.lambda(cx.blk(ty.span, ser_ty(cx, tps, ty, s, v)))
-}
-
-fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
-                ty: @ast::ty, -s: @ast::expr, -v: @ast::expr)
-    -> [@ast::stmt] {
-
-    let ext_cx = cx; // required for #ast{}
-
-    alt ty.node {
-      ast::ty_nil {
-        [#ast[stmt]{$(s).emit_nil()}]
-      }
-
-      ast::ty_bot {
-        cx.span_err(
-            ty.span, #fmt["Cannot serialize bottom type"]);
-        []
-      }
-
-      ast::ty_box(mt) {
-        let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
-        [#ast(stmt){$(s).emit_box($(l));}]
-      }
-
-      ast::ty_uniq(mt) {
-        let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
-        [#ast(stmt){$(s).emit_uniq($(l));}]
-      }
-
-      ast::ty_ptr(_) | ast::ty_rptr(_, _) {
-        cx.span_err(ty.span, "cannot serialize pointer types");
-        []
-      }
-
-      ast::ty_rec(flds) {
-        let fld_stmts = vec::from_fn(vec::len(flds)) {|fidx|
-            let fld = flds[fidx];
-            let vf = cx.expr(fld.span,
-                             ast::expr_field(cx.clone(v),
-                                             fld.node.ident,
-                                             []));
-            let s = cx.clone(s);
-            let f = cx.lit_str(fld.span, fld.node.ident);
-            let i = cx.lit_uint(fld.span, fidx);
-            let l = ser_lambda(cx, tps, fld.node.mt.ty, cx.clone(s), vf);
-            #ast(stmt){$(s).emit_rec_field($(f), $(i), $(l));}
-        };
-        let fld_lambda = cx.lambda(cx.blk(ty.span, fld_stmts));
-        [#ast(stmt){$(s).emit_rec($(fld_lambda));}]
-      }
-
-      ast::ty_fn(_, _) {
-        cx.span_err(ty.span, "cannot serialize function types");
-        []
-      }
-
-      ast::ty_tup(tys) {
-        // Generate code like
-        //
-        // alt v {
-        //    (v1, v2, v3) {
-        //       .. serialize v1, v2, v3 ..
-        //    }
-        // };
-
-        let arms = [
-            ser_variant(
-
-                cx, tps, tys, ty.span, s,
-
-                // Generate pattern (v1, v2, v3)
-                {|pats| ast::pat_tup(pats)},
-
-                // Generate body s.emit_tup(3, {|| blk })
-                {|-s, blk|
-                    let sz = cx.lit_uint(ty.span, vec::len(tys));
-                    let body = cx.lambda(blk);
-                    #ast{ $(s).emit_tup($(sz), $(body)) }
-                },
-
-                // Generate s.emit_tup_elt(i, {|| blk })
-                {|-s, i, blk|
-                    let idx = cx.lit_uint(ty.span, i);
-                    let body = cx.lambda(blk);
-                    #ast{ $(s).emit_tup_elt($(idx), $(body)) }
-                })
-        ];
-        [cx.alt_stmt(arms, ty.span, v)]
-      }
-
-      ast::ty_path(path, _) {
-        if vec::len(path.idents) == 1u &&
-            vec::is_empty(path.types) {
-            let ident = path.idents[0];
-
-            alt tps.find(ident) {
-              some(f) { f(v) }
-              none { ser_path(cx, tps, path, s, v) }
-            }
-        } else {
-            ser_path(cx, tps, path, s, v)
-        }
-      }
-
-      ast::ty_constr(ty, _) {
-        ser_ty(cx, tps, ty, s, v)
-      }
-
-      ast::ty_mac(_) {
-        cx.span_err(ty.span, "cannot serialize macro types");
-        []
-      }
-
-      ast::ty_infer {
-        cx.span_err(ty.span, "cannot serialize inferred types");
-        []
-      }
-
-      ast::ty_vstore(_, _) {
-        cx.span_unimpl(ty.span, "serialization for vstore types");
-      }
-
-      ast::ty_vec(mt) {
-        let ser_e =
-            cx.expr(
-                ty.span,
-                ast::expr_block(
-                    cx.blk(
-                        ty.span,
-                        ser_ty(
-                            cx, tps, mt.ty,
-                            cx.clone(s),
-                            cx.at(ty.span, #ast{ __e })))));
-
-        [#ast(stmt){
-            std::serialization::emit_from_vec($(s), $(v), {|__e| $(ser_e) })
-        }]
-      }
-    }
-}
-
-fn mk_ser_fn(cx: ext_ctxt, span: span, name: str, tps: [ast::ty_param],
-             f: fn(ext_ctxt, ser_tps_map,
-                   -@ast::expr, -@ast::expr) -> [@ast::stmt])
-    -> @ast::item {
-    let ext_cx = cx; // required for #ast
-
-    let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident], [])});
-    let v_ty = cx.ty_path(span, [name], tp_types);
-
-    let tp_inputs =
-        vec::map(tps, {|tp|
-            {mode: ast::expl(ast::by_ref),
-             ty: cx.ty_fn(span,
-                          [cx.ty_path(span, [tp.ident], [])],
-                          cx.ty_nil(span)),
-             ident: "__s" + tp.ident,
-             id: cx.next_id()}});
-
-    #debug["tp_inputs = %?", tp_inputs];
-
-
-    let ser_inputs: [ast::arg] =
-        [{mode: ast::expl(ast::by_ref),
-          ty: cx.ty_path(span, ["__S"], []),
-          ident: "__s",
-          id: cx.next_id()},
-         {mode: ast::expl(ast::by_ref),
-          ty: v_ty,
-          ident: "__v",
-          id: cx.next_id()}]
-        + tp_inputs;
-
-    let tps_map = map::str_hash();
-    vec::iter2(tps, tp_inputs) {|tp, arg|
-        let arg_ident = arg.ident;
-        tps_map.insert(
-            tp.ident,
-            fn@(v: @ast::expr) -> [@ast::stmt] {
-                let f = cx.var_ref(span, arg_ident);
-                #debug["serializing type arg %s", arg_ident];
-                [#ast(stmt){$(f)($(v));}]
-            });
-    }
-
-    let ser_bnds = @[
-        ast::bound_iface(cx.ty_path(span,
-                                    ["std", "serialization", "serializer"],
-                                    []))];
-
-    let ser_tps: [ast::ty_param] =
-        [{ident: "__S",
-          id: cx.next_id(),
-          bounds: ser_bnds}] +
-        vec::map(tps) {|tp| cx.clone_ty_param(tp) };
-
-    let ser_output: @ast::ty = @{id: cx.next_id(),
-                                 node: ast::ty_nil,
-                                 span: span};
-
-    let ser_blk = cx.blk(span,
-                         f(cx, tps_map, #ast{ __s }, #ast{ __v }));
-
-    @{ident: "serialize_" + name,
-      attrs: [],
-      id: cx.next_id(),
-      node: ast::item_fn({inputs: ser_inputs,
-                          output: ser_output,
-                          purity: ast::impure_fn,
-                          cf: ast::return_val,
-                          constraints: []},
-                         ser_tps,
-                         ser_blk),
-      vis: ast::public,
-      span: span}
-}
-
-// ______________________________________________________________________
-
-fn deser_path(cx: ext_ctxt, tps: deser_tps_map, path: @ast::path,
-                    -d: @ast::expr) -> @ast::expr {
-    // We want to take a path like a::b::c<...> and generate a call
-    // like a::b::c::deserialize(d, ...), as described above.
-
-    let callee =
-        cx.expr(
-            path.span,
-            ast::expr_path(
-                cx.helper_path(path, "deserialize")));
-
-    let ty_args = vec::map(path.types) {|ty|
-        let dv_expr = deser_ty(cx, tps, ty, cx.clone(d));
-        cx.lambda(cx.expr_blk(dv_expr))
-    };
-
-    cx.expr(path.span, ast::expr_call(callee, [d] + ty_args, false))
-}
-
-fn deser_lambda(cx: ext_ctxt, tps: deser_tps_map, ty: @ast::ty,
-                -d: @ast::expr) -> @ast::expr {
-    cx.lambda(cx.expr_blk(deser_ty(cx, tps, ty, d)))
-}
-
-fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
-                  ty: @ast::ty, -d: @ast::expr) -> @ast::expr {
-
-    let ext_cx = cx; // required for #ast{}
-
-    alt ty.node {
-      ast::ty_nil {
-        #ast{ $(d).read_nil() }
-      }
-
-      ast::ty_bot {
-        #ast{ fail }
-      }
-
-      ast::ty_box(mt) {
-        let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
-        #ast{ @$(d).read_box($(l)) }
-      }
-
-      ast::ty_uniq(mt) {
-        let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
-        #ast{ ~$(d).read_uniq($(l)) }
-      }
-
-      ast::ty_ptr(_) | ast::ty_rptr(_, _) {
-        #ast{ fail }
-      }
-
-      ast::ty_rec(flds) {
-        let fields = vec::from_fn(vec::len(flds)) {|fidx|
-            let fld = flds[fidx];
-            let d = cx.clone(d);
-            let f = cx.lit_str(fld.span, fld.node.ident);
-            let i = cx.lit_uint(fld.span, fidx);
-            let l = deser_lambda(cx, tps, fld.node.mt.ty, cx.clone(d));
-            {node: {mutbl: fld.node.mt.mutbl,
-                    ident: fld.node.ident,
-                    expr: #ast{ $(d).read_rec_field($(f), $(i), $(l))} },
-             span: fld.span}
-        };
-        let fld_expr = cx.expr(ty.span, ast::expr_rec(fields, none));
-        let fld_lambda = cx.lambda(cx.expr_blk(fld_expr));
-        #ast{ $(d).read_rec($(fld_lambda)) }
-      }
-
-      ast::ty_fn(_, _) {
-        #ast{ fail }
-      }
-
-      ast::ty_tup(tys) {
-        // Generate code like
-        //
-        // d.read_tup(3u) {||
-        //   (d.read_tup_elt(0u, {||...}),
-        //    d.read_tup_elt(1u, {||...}),
-        //    d.read_tup_elt(2u, {||...}))
-        // }
-
-        let arg_exprs = vec::from_fn(vec::len(tys)) {|i|
-            let idx = cx.lit_uint(ty.span, i);
-            let body = deser_lambda(cx, tps, tys[i], cx.clone(d));
-            #ast{ $(d).read_tup_elt($(idx), $(body)) }
-        };
-        let body =
-            cx.lambda(cx.expr_blk(
-                cx.expr(ty.span, ast::expr_tup(arg_exprs))));
-        let sz = cx.lit_uint(ty.span, vec::len(tys));
-        #ast{ $(d).read_tup($(sz), $(body)) }
-      }
-
-      ast::ty_path(path, _) {
-        if vec::len(path.idents) == 1u &&
-            vec::is_empty(path.types) {
-            let ident = path.idents[0];
-
-            alt tps.find(ident) {
-              some(f) { f() }
-              none { deser_path(cx, tps, path, d) }
-            }
-        } else {
-            deser_path(cx, tps, path, d)
-        }
-      }
-
-      ast::ty_constr(ty, constrs) {
-        deser_ty(cx, tps, ty, d)
-      }
-
-      ast::ty_mac(_) {
-        #ast{ fail }
-      }
-
-      ast::ty_infer {
-        #ast{ fail }
-      }
-
-      ast::ty_vstore(_, _) {
-        cx.span_unimpl(ty.span, "deserialization for vstore types");
-      }
-
-      ast::ty_vec(mt) {
-        let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
-        #ast{ std::serialization::read_to_vec($(d), $(l)) }
-      }
-    }
-}
-
-fn mk_deser_fn(cx: ext_ctxt, span: span, name: str, tps: [ast::ty_param],
-               f: fn(ext_ctxt, deser_tps_map, -@ast::expr) -> @ast::expr)
-    -> @ast::item {
-    let ext_cx = cx; // required for #ast
-
-    let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident], [])});
-    let v_ty = cx.ty_path(span, [name], tp_types);
-
-    let tp_inputs =
-        vec::map(tps, {|tp|
-            {mode: ast::expl(ast::by_ref),
-             ty: cx.ty_fn(span,
-                          [],
-                          cx.ty_path(span, [tp.ident], [])),
-             ident: "__d" + tp.ident,
-             id: cx.next_id()}});
-
-    #debug["tp_inputs = %?", tp_inputs];
-
-    let deser_inputs: [ast::arg] =
-        [{mode: ast::expl(ast::by_ref),
-          ty: cx.ty_path(span, ["__D"], []),
-          ident: "__d",
-          id: cx.next_id()}]
-        + tp_inputs;
-
-    let tps_map = map::str_hash();
-    vec::iter2(tps, tp_inputs) {|tp, arg|
-        let arg_ident = arg.ident;
-        tps_map.insert(
-            tp.ident,
-            fn@() -> @ast::expr {
-                let f = cx.var_ref(span, arg_ident);
-                #ast{ $(f)() }
-            });
-    }
-
-    let deser_bnds = @[
-        ast::bound_iface(cx.ty_path(span,
-                                    ["std", "serialization", "deserializer"],
-                                    []))];
-
-    let deser_tps: [ast::ty_param] =
-        [{ident: "__D",
-          id: cx.next_id(),
-          bounds: deser_bnds}] + vec::map(tps) {|tp|
-        let cloned = cx.clone_ty_param(tp);
-        {bounds: @(*cloned.bounds + [ast::bound_copy]) with cloned}
-    };
-
-    let deser_blk = cx.expr_blk(f(cx, tps_map, #ast(expr){__d}));
-
-    @{ident: "deserialize_" + name,
-      attrs: [],
-      id: cx.next_id(),
-      node: ast::item_fn({inputs: deser_inputs,
-                          output: v_ty,
-                          purity: ast::impure_fn,
-                          cf: ast::return_val,
-                          constraints: []},
-                         deser_tps,
-                         deser_blk),
-      vis: ast::public,
-      span: span}
-}
-
-fn ty_fns(cx: ext_ctxt, name: str, ty: @ast::ty, tps: [ast::ty_param])
-    -> [@ast::item] {
-
-    let span = ty.span;
-    [
-        mk_ser_fn(cx, span, name, tps, ser_ty(_, _, ty, _, _)),
-        mk_deser_fn(cx, span, name, tps, deser_ty(_, _, ty, _))
-    ]
-}
-
-fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: str,
-            e_span: span, variants: [ast::variant],
-            -s: @ast::expr, -v: @ast::expr) -> [@ast::stmt] {
-    let ext_cx = cx;
-    let arms = vec::from_fn(vec::len(variants)) {|vidx|
-        let variant = variants[vidx];
-        let v_span = variant.span;
-        let v_name = variant.node.name;
-        let variant_tys = vec::map(variant.node.args) {|a| a.ty };
-
-        ser_variant(
-            cx, tps, variant_tys, v_span, cx.clone(s),
-
-            // Generate pattern var(v1, v2, v3)
-            {|pats|
-                if vec::is_empty(pats) {
-                    ast::pat_ident(cx.path(v_span, [v_name]), none)
-                } else {
-                    ast::pat_enum(cx.path(v_span, [v_name]), some(pats))
-                }
-            },
-
-            // Generate body s.emit_enum_variant("foo", 0u,
-            //                                   3u, {|| blk })
-            {|-s, blk|
-                let v_name = cx.lit_str(v_span, v_name);
-                let v_id = cx.lit_uint(v_span, vidx);
-                let sz = cx.lit_uint(v_span, vec::len(variant_tys));
-                let body = cx.lambda(blk);
-                #ast[expr]{
-                    $(s).emit_enum_variant($(v_name), $(v_id),
-                                           $(sz), $(body))
-                }
-            },
-
-            // Generate s.emit_enum_variant_arg(i, {|| blk })
-            {|-s, i, blk|
-                let idx = cx.lit_uint(v_span, i);
-                let body = cx.lambda(blk);
-                #ast[expr]{
-                    $(s).emit_enum_variant_arg($(idx), $(body))
-                }
-            })
-    };
-    let lam = cx.lambda(cx.blk(e_span, [cx.alt_stmt(arms, e_span, v)]));
-    let e_name = cx.lit_str(e_span, e_name);
-    [#ast(stmt){ $(s).emit_enum($(e_name), $(lam)) }]
-}
-
-fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: str,
-              e_span: span, variants: [ast::variant],
-              -d: @ast::expr) -> @ast::expr {
-    let ext_cx = cx;
-    let arms: [ast::arm] = vec::from_fn(vec::len(variants)) {|vidx|
-        let variant = variants[vidx];
-        let v_span = variant.span;
-        let v_name = variant.node.name;
-        let tys = vec::map(variant.node.args) {|a| a.ty };
-
-        let arg_exprs = vec::from_fn(vec::len(tys)) {|i|
-            let idx = cx.lit_uint(v_span, i);
-            let body = deser_lambda(cx, tps, tys[i], cx.clone(d));
-            #ast{ $(d).read_enum_variant_arg($(idx), $(body)) }
-        };
-
-        let body = {
-            if vec::is_empty(tys) {
-                // for a nullary variant v, do "v"
-                cx.var_ref(v_span, v_name)
-            } else {
-                // for an n-ary variant v, do "v(a_1, ..., a_n)"
-                cx.expr(v_span, ast::expr_call(
-                    cx.var_ref(v_span, v_name), arg_exprs, false))
-            }
-        };
-
-        {pats: [@{id: cx.next_id(),
-                  node: ast::pat_lit(cx.lit_uint(v_span, vidx)),
-                  span: v_span}],
-         guard: none,
-         body: cx.expr_blk(body)}
-    };
-
-    // Generate code like:
-    let e_name = cx.lit_str(e_span, e_name);
-    let alt_expr = cx.expr(e_span,
-                           ast::expr_alt(#ast{__i}, arms, ast::alt_check));
-    let var_lambda = #ast{ {|__i| $(alt_expr)} };
-    let read_var = #ast{ $(cx.clone(d)).read_enum_variant($(var_lambda)) };
-    let read_lambda = cx.lambda(cx.expr_blk(read_var));
-    #ast{ $(d).read_enum($(e_name), $(read_lambda)) }
-}
-
-fn enum_fns(cx: ext_ctxt, e_name: str, e_span: span,
-               variants: [ast::variant], tps: [ast::ty_param])
-    -> [@ast::item] {
-    [
-        mk_ser_fn(cx, e_span, e_name, tps,
-                  ser_enum(_, _, e_name, e_span, variants, _, _)),
-        mk_deser_fn(cx, e_span, e_name, tps,
-                    deser_enum(_, _, e_name, e_span, variants, _))
-    ]
-}
diff --git a/src/librustsyntax/ext/base.rs b/src/librustsyntax/ext/base.rs
deleted file mode 100644
index 29e20212d66..00000000000
--- a/src/librustsyntax/ext/base.rs
+++ /dev/null
@@ -1,234 +0,0 @@
-import std::map::hashmap;
-import parse::parser;
-import diagnostic::span_handler;
-import codemap::{codemap, span, expn_info, expanded_from};
-import std::map::str_hash;
-
-type syntax_expander_ =
-    fn@(ext_ctxt, span, ast::mac_arg, ast::mac_body) -> @ast::expr;
-type syntax_expander = {
-    expander: syntax_expander_,
-    span: option<span>};
-type macro_def = {ident: str, ext: syntax_extension};
-type macro_definer =
-    fn@(ext_ctxt, span, ast::mac_arg, ast::mac_body) -> macro_def;
-type item_decorator =
-    fn@(ext_ctxt, span, ast::meta_item, [@ast::item]) -> [@ast::item];
-
-enum syntax_extension {
-    normal(syntax_expander),
-    macro_defining(macro_definer),
-    item_decorator(item_decorator),
-}
-
-// A temporary hard-coded map of methods for expanding syntax extension
-// AST nodes into full ASTs
-fn syntax_expander_table() -> hashmap<str, syntax_extension> {
-    fn builtin(f: syntax_expander_) -> syntax_extension
-        {normal({expander: f, span: none})}
-    let syntax_expanders = str_hash::<syntax_extension>();
-    syntax_expanders.insert("fmt", builtin(ext::fmt::expand_syntax_ext));
-    syntax_expanders.insert("auto_serialize",
-                            item_decorator(ext::auto_serialize::expand));
-    syntax_expanders.insert("env", builtin(ext::env::expand_syntax_ext));
-    syntax_expanders.insert("macro",
-                            macro_defining(ext::simplext::add_new_extension));
-    syntax_expanders.insert("concat_idents",
-                            builtin(ext::concat_idents::expand_syntax_ext));
-    syntax_expanders.insert("ident_to_str",
-                            builtin(ext::ident_to_str::expand_syntax_ext));
-    syntax_expanders.insert("log_syntax",
-                            builtin(ext::log_syntax::expand_syntax_ext));
-    syntax_expanders.insert("ast",
-                            builtin(ext::qquote::expand_ast));
-    syntax_expanders.insert("line",
-                            builtin(ext::source_util::expand_line));
-    syntax_expanders.insert("col",
-                            builtin(ext::source_util::expand_col));
-    syntax_expanders.insert("file",
-                            builtin(ext::source_util::expand_file));
-    syntax_expanders.insert("stringify",
-                            builtin(ext::source_util::expand_stringify));
-    syntax_expanders.insert("include",
-                            builtin(ext::source_util::expand_include));
-    syntax_expanders.insert("include_str",
-                            builtin(ext::source_util::expand_include_str));
-    syntax_expanders.insert("include_bin",
-                            builtin(ext::source_util::expand_include_bin));
-    syntax_expanders.insert("mod",
-                            builtin(ext::source_util::expand_mod));
-    ret syntax_expanders;
-}
-
-iface ext_ctxt {
-    fn codemap() -> codemap;
-    fn parse_sess() -> parse::parse_sess;
-    fn cfg() -> ast::crate_cfg;
-    fn print_backtrace();
-    fn backtrace() -> expn_info;
-    fn mod_push(mod_name: ast::ident);
-    fn mod_pop();
-    fn mod_path() -> [ast::ident];
-    fn bt_push(ei: codemap::expn_info_);
-    fn bt_pop();
-    fn span_fatal(sp: span, msg: str) -> !;
-    fn span_err(sp: span, msg: str);
-    fn span_unimpl(sp: span, msg: str) -> !;
-    fn span_bug(sp: span, msg: str) -> !;
-    fn bug(msg: str) -> !;
-    fn next_id() -> ast::node_id;
-}
-
-fn mk_ctxt(parse_sess: parse::parse_sess,
-           cfg: ast::crate_cfg) -> ext_ctxt {
-    type ctxt_repr = {parse_sess: parse::parse_sess,
-                      cfg: ast::crate_cfg,
-                      mut backtrace: expn_info,
-                      mut mod_path: [ast::ident]};
-    impl of ext_ctxt for ctxt_repr {
-        fn codemap() -> codemap { self.parse_sess.cm }
-        fn parse_sess() -> parse::parse_sess { self.parse_sess }
-        fn cfg() -> ast::crate_cfg { self.cfg }
-        fn print_backtrace() { }
-        fn backtrace() -> expn_info { self.backtrace }
-        fn mod_push(i: ast::ident) { vec::push(self.mod_path, i); }
-        fn mod_pop() { vec::pop(self.mod_path); }
-        fn mod_path() -> [ast::ident] { ret self.mod_path; }
-        fn bt_push(ei: codemap::expn_info_) {
-            alt ei {
-              expanded_from({call_site: cs, callie: callie}) {
-                self.backtrace =
-                    some(@expanded_from({
-                        call_site: {lo: cs.lo, hi: cs.hi,
-                                    expn_info: self.backtrace},
-                        callie: callie}));
-              }
-            }
-        }
-        fn bt_pop() {
-            alt self.backtrace {
-              some(@expanded_from({call_site: {expn_info: prev, _}, _})) {
-                self.backtrace = prev
-              }
-              _ { self.bug("tried to pop without a push"); }
-            }
-        }
-        fn span_fatal(sp: span, msg: str) -> ! {
-            self.print_backtrace();
-            self.parse_sess.span_diagnostic.span_fatal(sp, msg);
-        }
-        fn span_err(sp: span, msg: str) {
-            self.print_backtrace();
-            self.parse_sess.span_diagnostic.span_err(sp, msg);
-        }
-        fn span_unimpl(sp: span, msg: str) -> ! {
-            self.print_backtrace();
-            self.parse_sess.span_diagnostic.span_unimpl(sp, msg);
-        }
-        fn span_bug(sp: span, msg: str) -> ! {
-            self.print_backtrace();
-            self.parse_sess.span_diagnostic.span_bug(sp, msg);
-        }
-        fn bug(msg: str) -> ! {
-            self.print_backtrace();
-            self.parse_sess.span_diagnostic.handler().bug(msg);
-        }
-        fn next_id() -> ast::node_id {
-            ret parse::next_node_id(self.parse_sess);
-        }
-    }
-    let imp : ctxt_repr = {
-        parse_sess: parse_sess,
-        cfg: cfg,
-        mut backtrace: none,
-        mut mod_path: []
-    };
-    ret imp as ext_ctxt
-}
-
-fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, error: str) -> str {
-    alt expr.node {
-      ast::expr_lit(l) {
-        alt l.node {
-          ast::lit_str(s) { ret s; }
-          _ { cx.span_fatal(l.span, error); }
-        }
-      }
-      _ { cx.span_fatal(expr.span, error); }
-    }
-}
-
-fn expr_to_ident(cx: ext_ctxt, expr: @ast::expr, error: str) -> ast::ident {
-    alt expr.node {
-      ast::expr_path(p) {
-        if vec::len(p.types) > 0u || vec::len(p.idents) != 1u {
-            cx.span_fatal(expr.span, error);
-        } else { ret p.idents[0]; }
-      }
-      _ { cx.span_fatal(expr.span, error); }
-    }
-}
-
-fn make_new_lit(cx: ext_ctxt, sp: codemap::span, lit: ast::lit_) ->
-   @ast::expr {
-    let sp_lit = @{node: lit, span: sp};
-    ret @{id: cx.next_id(), node: ast::expr_lit(sp_lit), span: sp};
-}
-
-fn make_new_expr(cx: ext_ctxt, sp: codemap::span, expr: ast::expr_) ->
-    @ast::expr {
-    ret @{id: cx.next_id(), node: expr, span: sp};
-}
-
-fn get_mac_args_no_max(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                       min: uint, name: str) -> [@ast::expr] {
-    ret get_mac_args(cx, sp, arg, min, none, name);
-}
-
-fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                min: uint, max: option<uint>, name: str) -> [@ast::expr] {
-    alt arg {
-      some(expr) {
-        alt expr.node {
-          ast::expr_vec(elts, _) {
-            let elts_len = vec::len(elts);
-            alt max {
-              some(max) if ! (min <= elts_len && elts_len <= max) {
-                cx.span_fatal(sp,
-                              #fmt["#%s takes between %u and %u arguments.",
-                                   name, min, max]);
-              }
-              none if ! (min <= elts_len) {
-                cx.span_fatal(sp, #fmt["#%s needs at least %u arguments.",
-                                       name, min]);
-              }
-              _ { ret elts; /* we're good */}
-            }
-          }
-          _ {
-            cx.span_fatal(sp, #fmt["#%s: malformed invocation", name])
-          }
-        }
-      }
-      none {cx.span_fatal(sp, #fmt["#%s: missing arguments", name])}
-    }
-}
-
-fn get_mac_body(cx: ext_ctxt, sp: span, args: ast::mac_body)
-    -> ast::mac_body_
-{
-    alt (args) {
-      some(body) {body}
-      none {cx.span_fatal(sp, "missing macro body")}
-    }
-}
-
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/ext/build.rs b/src/librustsyntax/ext/build.rs
deleted file mode 100644
index 516deb1e793..00000000000
--- a/src/librustsyntax/ext/build.rs
+++ /dev/null
@@ -1,78 +0,0 @@
-import codemap::span;
-import base::ext_ctxt;
-
-fn mk_lit(cx: ext_ctxt, sp: span, lit: ast::lit_) -> @ast::expr {
-    let sp_lit = @{node: lit, span: sp};
-    ret @{id: cx.next_id(), node: ast::expr_lit(sp_lit), span: sp};
-}
-fn mk_str(cx: ext_ctxt, sp: span, s: str) -> @ast::expr {
-    let lit = ast::lit_str(s);
-    ret mk_lit(cx, sp, lit);
-}
-fn mk_int(cx: ext_ctxt, sp: span, i: int) -> @ast::expr {
-    let lit = ast::lit_int(i as i64, ast::ty_i);
-    ret mk_lit(cx, sp, lit);
-}
-fn mk_uint(cx: ext_ctxt, sp: span, u: uint) -> @ast::expr {
-    let lit = ast::lit_uint(u as u64, ast::ty_u);
-    ret mk_lit(cx, sp, lit);
-}
-fn mk_binary(cx: ext_ctxt, sp: span, op: ast::binop,
-             lhs: @ast::expr, rhs: @ast::expr)
-   -> @ast::expr {
-    let binexpr = ast::expr_binary(op, lhs, rhs);
-    ret @{id: cx.next_id(), node: binexpr, span: sp};
-}
-fn mk_unary(cx: ext_ctxt, sp: span, op: ast::unop, e: @ast::expr)
-    -> @ast::expr {
-    let expr = ast::expr_unary(op, e);
-    ret @{id: cx.next_id(), node: expr, span: sp};
-}
-fn mk_path(cx: ext_ctxt, sp: span, idents: [ast::ident]) ->
-    @ast::expr {
-    let path = @{span: sp, global: false, idents: idents,
-                 rp: none, types: []};
-    let pathexpr = ast::expr_path(path);
-    ret @{id: cx.next_id(), node: pathexpr, span: sp};
-}
-fn mk_access_(cx: ext_ctxt, sp: span, p: @ast::expr, m: ast::ident)
-    -> @ast::expr {
-    let expr = ast::expr_field(p, m, []);
-    ret @{id: cx.next_id(), node: expr, span: sp};
-}
-fn mk_access(cx: ext_ctxt, sp: span, p: [ast::ident], m: ast::ident)
-    -> @ast::expr {
-    let pathexpr = mk_path(cx, sp, p);
-    ret mk_access_(cx, sp, pathexpr, m);
-}
-fn mk_call_(cx: ext_ctxt, sp: span, fn_expr: @ast::expr,
-            args: [@ast::expr]) -> @ast::expr {
-    let callexpr = ast::expr_call(fn_expr, args, false);
-    ret @{id: cx.next_id(), node: callexpr, span: sp};
-}
-fn mk_call(cx: ext_ctxt, sp: span, fn_path: [ast::ident],
-             args: [@ast::expr]) -> @ast::expr {
-    let pathexpr = mk_path(cx, sp, fn_path);
-    ret mk_call_(cx, sp, pathexpr, args);
-}
-// e = expr, t = type
-fn mk_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]) ->
-   @ast::expr {
-    let vecexpr = ast::expr_vec(exprs, ast::m_imm);
-    ret @{id: cx.next_id(), node: vecexpr, span: sp};
-}
-fn mk_rec_e(cx: ext_ctxt, sp: span,
-            fields: [{ident: ast::ident, ex: @ast::expr}]) ->
-    @ast::expr {
-    let mut astfields: [ast::field] = [];
-    for fields.each {|field|
-        let ident = field.ident;
-        let val = field.ex;
-        let astfield =
-            {node: {mutbl: ast::m_imm, ident: ident, expr: val}, span: sp};
-        astfields += [astfield];
-    }
-    let recexpr = ast::expr_rec(astfields, option::none::<@ast::expr>);
-    ret @{id: cx.next_id(), node: recexpr, span: sp};
-}
-
diff --git a/src/librustsyntax/ext/concat_idents.rs b/src/librustsyntax/ext/concat_idents.rs
deleted file mode 100644
index 278321ec8bc..00000000000
--- a/src/librustsyntax/ext/concat_idents.rs
+++ /dev/null
@@ -1,15 +0,0 @@
-import base::*;
-
-fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
-                     _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args_no_max(cx,sp,arg,1u,"concat_idents");
-    let mut res: ast::ident = "";
-    for args.each {|e|
-        res += expr_to_ident(cx, e, "expected an ident");
-    }
-
-    ret @{id: cx.next_id(),
-          node: ast::expr_path(@{span: sp, global: false, idents: [res],
-                                 rp: none, types: []}),
-          span: sp};
-}
diff --git a/src/librustsyntax/ext/env.rs b/src/librustsyntax/ext/env.rs
deleted file mode 100644
index 6a4d937f083..00000000000
--- a/src/librustsyntax/ext/env.rs
+++ /dev/null
@@ -1,35 +0,0 @@
-
-/*
- * The compiler code necessary to support the #env extension.  Eventually this
- * should all get sucked into either the compiler syntax extension plugin
- * interface.
- */
-import base::*;
-export expand_syntax_ext;
-
-fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
-                     _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), "env");
-
-    // FIXME: if this was more thorough it would manufacture an
-    // option<str> rather than just an maybe-empty string. (Issue #2248)
-
-    let var = expr_to_str(cx, args[0], "#env requires a string");
-    alt os::getenv(var) {
-      option::none { ret make_new_str(cx, sp, ""); }
-      option::some(s) { ret make_new_str(cx, sp, s); }
-    }
-}
-
-fn make_new_str(cx: ext_ctxt, sp: codemap::span, s: str) -> @ast::expr {
-    ret make_new_lit(cx, sp, ast::lit_str(s));
-}
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/ext/expand.rs b/src/librustsyntax/ext/expand.rs
deleted file mode 100644
index 90487e27956..00000000000
--- a/src/librustsyntax/ext/expand.rs
+++ /dev/null
@@ -1,157 +0,0 @@
-import std::map::hashmap;
-
-import ast::{crate, expr_, expr_mac, mac_invoc};
-import fold::*;
-import ext::base::*;
-import ext::qquote::{qq_helper};
-import parse::parser;
-import parse::parse_expr_from_source_str;
-
-
-import codemap::{span, expanded_from};
-
-fn expand_expr(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
-               e: expr_, s: span, fld: ast_fold,
-               orig: fn@(expr_, span, ast_fold) -> (expr_, span))
-    -> (expr_, span)
-{
-    ret alt e {
-          expr_mac(mac) {
-            alt mac.node {
-              mac_invoc(pth, args, body) {
-                assert (vec::len(pth.idents) > 0u);
-                let extname = pth.idents[0];
-                alt exts.find(extname) {
-                  none {
-                    cx.span_fatal(pth.span,
-                                  #fmt["macro undefined: '%s'", extname])
-                  }
-                  some(item_decorator(_)) {
-                    cx.span_fatal(
-                        pth.span,
-                        #fmt["%s can only be used as a decorator", extname]);
-                  }
-                  some(normal({expander: exp, span: exp_sp})) {
-                    let expanded = exp(cx, pth.span, args, body);
-
-                    cx.bt_push(expanded_from({call_site: s,
-                                callie: {name: extname, span: exp_sp}}));
-                    //keep going, outside-in
-                    let fully_expanded = fld.fold_expr(expanded).node;
-                    cx.bt_pop();
-
-                    (fully_expanded, s)
-                  }
-                  some(macro_defining(ext)) {
-                    let named_extension = ext(cx, pth.span, args, body);
-                    exts.insert(named_extension.ident, named_extension.ext);
-                    (ast::expr_rec([], none), s)
-                  }
-                }
-              }
-              _ { cx.span_bug(mac.span, "naked syntactic bit") }
-            }
-          }
-          _ { orig(e, s, fld) }
-        };
-}
-
-fn expand_mod_items(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
-                    module: ast::_mod, fld: ast_fold,
-                    orig: fn@(ast::_mod, ast_fold) -> ast::_mod)
-    -> ast::_mod
-{
-    // Fold the contents first:
-    let module = orig(module, fld);
-
-    // For each item, look through the attributes.  If any of them are
-    // decorated with "item decorators", then use that function to transform
-    // the item into a new set of items.
-    let new_items = vec::flat_map(module.items) {|item|
-        vec::foldr(item.attrs, [item]) {|attr, items|
-            let mname = alt attr.node.value.node {
-              ast::meta_word(n) { n }
-              ast::meta_name_value(n, _) { n }
-              ast::meta_list(n, _) { n }
-            };
-            alt exts.find(mname) {
-              none | some(normal(_)) | some(macro_defining(_)) {
-                items
-              }
-
-              some(item_decorator(dec_fn)) {
-                dec_fn(cx, attr.span, attr.node.value, items)
-              }
-            }
-        }
-    };
-
-    ret {items: new_items with module};
-}
-
-/* record module we enter for `#mod` */
-fn expand_item(cx: ext_ctxt, &&it: @ast::item, fld: ast_fold,
-               orig: fn@(&&@ast::item, ast_fold) -> @ast::item)
-    -> @ast::item
-{
-    let is_mod = alt it.node {
-      ast::item_mod(_) | ast::item_native_mod(_) {true}
-      _ {false}
-    };
-    if is_mod { cx.mod_push(it.ident); }
-    let ret_val = orig(it, fld);
-    if is_mod { cx.mod_pop(); }
-    ret ret_val;
-}
-
-fn new_span(cx: ext_ctxt, sp: span) -> span {
-    /* this discards information in the case of macro-defining macros */
-    ret {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
-}
-
-// FIXME: this is a terrible kludge to inject some macros into the default
-// compilation environment. When the macro-definition system is substantially
-// more mature, these should move from here, into a compiled part of libcore
-// at very least. (Issue #2247)
-
-fn core_macros() -> str {
-    ret
-"{
-    #macro([#error[f, ...], log(core::error, #fmt[f, ...])]);
-    #macro([#warn[f, ...], log(core::warn, #fmt[f, ...])]);
-    #macro([#info[f, ...], log(core::info, #fmt[f, ...])]);
-    #macro([#debug[f, ...], log(core::debug, #fmt[f, ...])]);
-}";
-}
-
-fn expand_crate(parse_sess: parse::parse_sess,
-                cfg: ast::crate_cfg, c: @crate) -> @crate {
-    let exts = syntax_expander_table();
-    let afp = default_ast_fold();
-    let cx: ext_ctxt = mk_ctxt(parse_sess, cfg);
-    let f_pre =
-        @{fold_expr: bind expand_expr(exts, cx, _, _, _, afp.fold_expr),
-          fold_mod: bind expand_mod_items(exts, cx, _, _, afp.fold_mod),
-          fold_item: bind expand_item(cx, _, _, afp.fold_item),
-          new_span: bind new_span(cx, _)
-          with *afp};
-    let f = make_fold(f_pre);
-    let cm = parse_expr_from_source_str("<core-macros>",
-                                        @core_macros(),
-                                        cfg,
-                                        parse_sess);
-
-    // This is run for its side-effects on the expander env,
-    // as it registers all the core macros as expanders.
-    f.fold_expr(cm);
-
-    let res = @f.fold_crate(*c);
-    ret res;
-}
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
diff --git a/src/librustsyntax/ext/fmt.rs b/src/librustsyntax/ext/fmt.rs
deleted file mode 100644
index aceeed4b9e8..00000000000
--- a/src/librustsyntax/ext/fmt.rs
+++ /dev/null
@@ -1,283 +0,0 @@
-
-
-/*
- * The compiler code necessary to support the #fmt extension. Eventually this
- * should all get sucked into either the standard library extfmt module or the
- * compiler syntax extension plugin interface.
- */
-import extfmt::ct::*;
-import base::*;
-import codemap::span;
-import ext::build::*;
-export expand_syntax_ext;
-
-fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                     _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args_no_max(cx, sp, arg, 1u, "fmt");
-    let fmt =
-        expr_to_str(cx, args[0],
-                    "first argument to #fmt must be a string literal.");
-    let fmtspan = args[0].span;
-    #debug("Format string:");
-    log(debug, fmt);
-    fn parse_fmt_err_(cx: ext_ctxt, sp: span, msg: str) -> ! {
-        cx.span_fatal(sp, msg);
-    }
-    let parse_fmt_err = bind parse_fmt_err_(cx, fmtspan, _);
-    let pieces = parse_fmt_string(fmt, parse_fmt_err);
-    ret pieces_to_expr(cx, sp, pieces, args);
-}
-
-// FIXME: A lot of these functions for producing expressions can probably
-// be factored out in common with other code that builds expressions.
-// FIXME: Cleanup the naming of these functions
-// NOTE: Moved many of the common ones to build.rs --kevina
-// See Issue #2249
-fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
-   -> @ast::expr {
-    fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> [ast::ident] {
-        ret ["extfmt", "rt", ident];
-    }
-    fn make_rt_path_expr(cx: ext_ctxt, sp: span, ident: str) -> @ast::expr {
-        let path = make_path_vec(cx, ident);
-        ret mk_path(cx, sp, path);
-    }
-    // Produces an AST expression that represents a RT::conv record,
-    // which tells the RT::conv* functions how to perform the conversion
-
-    fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr {
-        fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]) -> @ast::expr {
-            let mut flagexprs: [@ast::expr] = [];
-            for flags.each {|f|
-                let mut fstr;
-                alt f {
-                  flag_left_justify { fstr = "flag_left_justify"; }
-                  flag_left_zero_pad { fstr = "flag_left_zero_pad"; }
-                  flag_space_for_sign { fstr = "flag_space_for_sign"; }
-                  flag_sign_always { fstr = "flag_sign_always"; }
-                  flag_alternate { fstr = "flag_alternate"; }
-                }
-                flagexprs += [make_rt_path_expr(cx, sp, fstr)];
-            }
-            ret mk_vec_e(cx, sp, flagexprs);
-        }
-        fn make_count(cx: ext_ctxt, sp: span, cnt: count) -> @ast::expr {
-            alt cnt {
-              count_implied {
-                ret make_rt_path_expr(cx, sp, "count_implied");
-              }
-              count_is(c) {
-                let count_lit = mk_int(cx, sp, c);
-                let count_is_path = make_path_vec(cx, "count_is");
-                let count_is_args = [count_lit];
-                ret mk_call(cx, sp, count_is_path, count_is_args);
-              }
-              _ { cx.span_unimpl(sp, "unimplemented #fmt conversion"); }
-            }
-        }
-        fn make_ty(cx: ext_ctxt, sp: span, t: ty) -> @ast::expr {
-            let mut rt_type;
-            alt t {
-              ty_hex(c) {
-                alt c {
-                  case_upper { rt_type = "ty_hex_upper"; }
-                  case_lower { rt_type = "ty_hex_lower"; }
-                }
-              }
-              ty_bits { rt_type = "ty_bits"; }
-              ty_octal { rt_type = "ty_octal"; }
-              _ { rt_type = "ty_default"; }
-            }
-            ret make_rt_path_expr(cx, sp, rt_type);
-        }
-        fn make_conv_rec(cx: ext_ctxt, sp: span, flags_expr: @ast::expr,
-                         width_expr: @ast::expr, precision_expr: @ast::expr,
-                         ty_expr: @ast::expr) -> @ast::expr {
-            ret mk_rec_e(cx, sp,
-                         [{ident: "flags", ex: flags_expr},
-                          {ident: "width", ex: width_expr},
-                          {ident: "precision", ex: precision_expr},
-                          {ident: "ty", ex: ty_expr}]);
-        }
-        let rt_conv_flags = make_flags(cx, sp, cnv.flags);
-        let rt_conv_width = make_count(cx, sp, cnv.width);
-        let rt_conv_precision = make_count(cx, sp, cnv.precision);
-        let rt_conv_ty = make_ty(cx, sp, cnv.ty);
-        ret make_conv_rec(cx, sp, rt_conv_flags, rt_conv_width,
-                          rt_conv_precision, rt_conv_ty);
-    }
-    fn make_conv_call(cx: ext_ctxt, sp: span, conv_type: str, cnv: conv,
-                      arg: @ast::expr) -> @ast::expr {
-        let fname = "conv_" + conv_type;
-        let path = make_path_vec(cx, fname);
-        let cnv_expr = make_rt_conv_expr(cx, sp, cnv);
-        let args = [cnv_expr, arg];
-        ret mk_call(cx, arg.span, path, args);
-    }
-    fn make_new_conv(cx: ext_ctxt, sp: span, cnv: conv, arg: @ast::expr) ->
-       @ast::expr {
-        // FIXME: Move validation code into core::extfmt (Issue #2249)
-
-        fn is_signed_type(cnv: conv) -> bool {
-            alt cnv.ty {
-              ty_int(s) {
-                alt s { signed { ret true; } unsigned { ret false; } }
-              }
-              ty_float { ret true; }
-              _ { ret false; }
-            }
-        }
-        let unsupported = "conversion not supported in #fmt string";
-        alt cnv.param {
-          option::none { }
-          _ { cx.span_unimpl(sp, unsupported); }
-        }
-        for cnv.flags.each {|f|
-            alt f {
-              flag_left_justify { }
-              flag_sign_always {
-                if !is_signed_type(cnv) {
-                    cx.span_fatal(sp,
-                                  "+ flag only valid in " +
-                                      "signed #fmt conversion");
-                }
-              }
-              flag_space_for_sign {
-                if !is_signed_type(cnv) {
-                    cx.span_fatal(sp,
-                                  "space flag only valid in " +
-                                      "signed #fmt conversions");
-                }
-              }
-              flag_left_zero_pad { }
-              _ { cx.span_unimpl(sp, unsupported); }
-            }
-        }
-        alt cnv.width {
-          count_implied { }
-          count_is(_) { }
-          _ { cx.span_unimpl(sp, unsupported); }
-        }
-        alt cnv.precision {
-          count_implied { }
-          count_is(_) { }
-          _ { cx.span_unimpl(sp, unsupported); }
-        }
-        alt cnv.ty {
-          ty_str { ret make_conv_call(cx, arg.span, "str", cnv, arg); }
-          ty_int(sign) {
-            alt sign {
-              signed { ret make_conv_call(cx, arg.span, "int", cnv, arg); }
-              unsigned {
-                ret make_conv_call(cx, arg.span, "uint", cnv, arg);
-              }
-            }
-          }
-          ty_bool { ret make_conv_call(cx, arg.span, "bool", cnv, arg); }
-          ty_char { ret make_conv_call(cx, arg.span, "char", cnv, arg); }
-          ty_hex(_) { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
-          ty_bits { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
-          ty_octal { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
-          ty_float { ret make_conv_call(cx, arg.span, "float", cnv, arg); }
-          ty_poly { ret make_conv_call(cx, arg.span, "poly", cnv, arg); }
-        }
-    }
-    fn log_conv(c: conv) {
-        alt c.param {
-          some(p) { log(debug, "param: " + int::to_str(p, 10u)); }
-          _ { #debug("param: none"); }
-        }
-        for c.flags.each {|f|
-            alt f {
-              flag_left_justify { #debug("flag: left justify"); }
-              flag_left_zero_pad { #debug("flag: left zero pad"); }
-              flag_space_for_sign { #debug("flag: left space pad"); }
-              flag_sign_always { #debug("flag: sign always"); }
-              flag_alternate { #debug("flag: alternate"); }
-            }
-        }
-        alt c.width {
-          count_is(i) { log(debug,
-                                 "width: count is " + int::to_str(i, 10u)); }
-          count_is_param(i) {
-            log(debug,
-                     "width: count is param " + int::to_str(i, 10u));
-          }
-          count_is_next_param { #debug("width: count is next param"); }
-          count_implied { #debug("width: count is implied"); }
-        }
-        alt c.precision {
-          count_is(i) { log(debug,
-                                 "prec: count is " + int::to_str(i, 10u)); }
-          count_is_param(i) {
-            log(debug,
-                     "prec: count is param " + int::to_str(i, 10u));
-          }
-          count_is_next_param { #debug("prec: count is next param"); }
-          count_implied { #debug("prec: count is implied"); }
-        }
-        alt c.ty {
-          ty_bool { #debug("type: bool"); }
-          ty_str { #debug("type: str"); }
-          ty_char { #debug("type: char"); }
-          ty_int(s) {
-            alt s {
-              signed { #debug("type: signed"); }
-              unsigned { #debug("type: unsigned"); }
-            }
-          }
-          ty_bits { #debug("type: bits"); }
-          ty_hex(cs) {
-            alt cs {
-              case_upper { #debug("type: uhex"); }
-              case_lower { #debug("type: lhex"); }
-            }
-          }
-          ty_octal { #debug("type: octal"); }
-          ty_float { #debug("type: float"); }
-          ty_poly { #debug("type: poly"); }
-        }
-    }
-    let fmt_sp = args[0].span;
-    let mut n = 0u;
-    let mut tmp_expr = mk_str(cx, sp, "");
-    let nargs = vec::len::<@ast::expr>(args);
-    for pieces.each {|pc|
-        alt pc {
-          piece_string(s) {
-            let s_expr = mk_str(cx, fmt_sp, s);
-            tmp_expr = mk_binary(cx, fmt_sp, ast::add, tmp_expr, s_expr);
-          }
-          piece_conv(conv) {
-            n += 1u;
-            if n >= nargs {
-                cx.span_fatal(sp,
-                              "not enough arguments to #fmt " +
-                                  "for the given format string");
-            }
-            #debug("Building conversion:");
-            log_conv(conv);
-            let arg_expr = args[n];
-            let c_expr = make_new_conv(cx, fmt_sp, conv, arg_expr);
-            tmp_expr = mk_binary(cx, fmt_sp, ast::add, tmp_expr, c_expr);
-          }
-        }
-    }
-    let expected_nargs = n + 1u; // n conversions + the fmt string
-
-    if expected_nargs < nargs {
-        cx.span_fatal
-            (sp, #fmt["too many arguments to #fmt. found %u, expected %u",
-                           nargs, expected_nargs]);
-    }
-    ret tmp_expr;
-}
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/ext/ident_to_str.rs b/src/librustsyntax/ext/ident_to_str.rs
deleted file mode 100644
index 7dfb70f1520..00000000000
--- a/src/librustsyntax/ext/ident_to_str.rs
+++ /dev/null
@@ -1,11 +0,0 @@
-import base::*;
-import option;
-
-fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
-                     _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx,sp,arg,1u,option::some(1u),"ident_to_str");
-
-    ret make_new_lit(cx, sp,
-                     ast::lit_str(expr_to_ident(cx, args[0u],
-                                                "expected an ident")));
-}
diff --git a/src/librustsyntax/ext/log_syntax.rs b/src/librustsyntax/ext/log_syntax.rs
deleted file mode 100644
index 5ccbb143b97..00000000000
--- a/src/librustsyntax/ext/log_syntax.rs
+++ /dev/null
@@ -1,15 +0,0 @@
-import base::*;
-import io::writer_util;
-
-fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
-                     _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args_no_max(cx,sp,arg,0u,"log_syntax");
-    cx.print_backtrace();
-    io::stdout().write_line(
-        str::connect(vec::map(args,
-                              {|&&ex| print::pprust::expr_to_str(ex)}), ", ")
-    );
-
-    //trivial expression
-    ret @{id: cx.next_id(), node: ast::expr_rec([], option::none), span: sp};
-}
diff --git a/src/librustsyntax/ext/qquote.rs b/src/librustsyntax/ext/qquote.rs
deleted file mode 100644
index a6e08439356..00000000000
--- a/src/librustsyntax/ext/qquote.rs
+++ /dev/null
@@ -1,337 +0,0 @@
-import ast::{crate, expr_, mac_invoc,
-                     mac_aq, mac_var};
-import fold::*;
-import visit::*;
-import ext::base::*;
-import ext::build::*;
-import parse::parser;
-import parse::parser::parse_from_source_str;
-import dvec::{dvec, extensions};
-
-import print::*;
-import io::*;
-
-import codemap::span;
-
-type aq_ctxt = @{lo: uint,
-                 gather: dvec<{lo: uint, hi: uint,
-                               e: @ast::expr,
-                               constr: str}>};
-enum fragment {
-    from_expr(@ast::expr),
-    from_ty(@ast::ty)
-}
-
-iface qq_helper {
-    fn span() -> span;
-    fn visit(aq_ctxt, vt<aq_ctxt>);
-    fn extract_mac() -> option<ast::mac_>;
-    fn mk_parse_fn(ext_ctxt,span) -> @ast::expr;
-    fn get_fold_fn() -> str;
-}
-
-impl of qq_helper for @ast::crate {
-    fn span() -> span {self.span}
-    fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_crate(*self, cx, v);}
-    fn extract_mac() -> option<ast::mac_> {fail}
-    fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_crate"])
-    }
-    fn get_fold_fn() -> str {"fold_crate"}
-}
-impl of qq_helper for @ast::expr {
-    fn span() -> span {self.span}
-    fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_expr(self, cx, v);}
-    fn extract_mac() -> option<ast::mac_> {
-        alt (self.node) {
-          ast::expr_mac({node: mac, _}) {some(mac)}
-          _ {none}
-        }
-    }
-    fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_expr"])
-    }
-    fn get_fold_fn() -> str {"fold_expr"}
-}
-impl of qq_helper for @ast::ty {
-    fn span() -> span {self.span}
-    fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_ty(self, cx, v);}
-    fn extract_mac() -> option<ast::mac_> {
-        alt (self.node) {
-          ast::ty_mac({node: mac, _}) {some(mac)}
-          _ {none}
-        }
-    }
-    fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_ty"])
-    }
-    fn get_fold_fn() -> str {"fold_ty"}
-}
-impl of qq_helper for @ast::item {
-    fn span() -> span {self.span}
-    fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_item(self, cx, v);}
-    fn extract_mac() -> option<ast::mac_> {fail}
-    fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_item"])
-    }
-    fn get_fold_fn() -> str {"fold_item"}
-}
-impl of qq_helper for @ast::stmt {
-    fn span() -> span {self.span}
-    fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_stmt(self, cx, v);}
-    fn extract_mac() -> option<ast::mac_> {fail}
-    fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_stmt"])
-    }
-    fn get_fold_fn() -> str {"fold_stmt"}
-}
-impl of qq_helper for @ast::pat {
-    fn span() -> span {self.span}
-    fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);}
-    fn extract_mac() -> option<ast::mac_> {fail}
-    fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_pat"])
-    }
-    fn get_fold_fn() -> str {"fold_pat"}
-}
-
-fn gather_anti_quotes<N: qq_helper>(lo: uint, node: N) -> aq_ctxt
-{
-    let v = @{visit_expr: {|node, &&cx, v|
-                  visit_aq(node, "from_expr", cx, v)},
-              visit_ty: {|node, &&cx, v|
-                  visit_aq(node, "from_ty", cx, v)}
-              with *default_visitor()};
-    let cx = @{lo:lo, gather: dvec()};
-    node.visit(cx, mk_vt(v));
-    // FIXME: Maybe this is an overkill (merge_sort), it might be better
-    //   to just keep the gather array in sorted order ... (Issue #2250)
-    cx.gather.swap { |v|
-        vec::to_mut(std::sort::merge_sort({|a,b| a.lo < b.lo}, v))
-    };
-    ret cx;
-}
-
-fn visit_aq<T:qq_helper>(node: T, constr: str, &&cx: aq_ctxt, v: vt<aq_ctxt>)
-{
-    alt (node.extract_mac()) {
-      some(mac_aq(sp, e)) {
-        cx.gather.push({lo: sp.lo - cx.lo, hi: sp.hi - cx.lo,
-                        e: e, constr: constr});
-      }
-      _ {node.visit(cx, v);}
-    }
-}
-
-fn is_space(c: char) -> bool {
-    parse::lexer::is_whitespace(c)
-}
-
-fn expand_ast(ecx: ext_ctxt, _sp: span,
-              arg: ast::mac_arg, body: ast::mac_body)
-    -> @ast::expr
-{
-    let mut what = "expr";
-    option::iter(arg) {|arg|
-        let args: [@ast::expr] =
-            alt arg.node {
-              ast::expr_vec(elts, _) { elts }
-              _ {
-                ecx.span_fatal
-                    (_sp, "#ast requires arguments of the form `[...]`.")
-              }
-            };
-        if vec::len::<@ast::expr>(args) != 1u {
-            ecx.span_fatal(_sp, "#ast requires exactly one arg");
-        }
-        alt (args[0].node) {
-          ast::expr_path(@{idents: id, _}) if vec::len(id) == 1u
-              {what = id[0]}
-          _ {ecx.span_fatal(args[0].span, "expected an identifier");}
-        }
-    }
-    let body = get_mac_body(ecx,_sp,body);
-
-    ret alt what {
-      "crate" {finish(ecx, body, parse_crate)}
-      "expr" {finish(ecx, body, parse_expr)}
-      "ty" {finish(ecx, body, parse_ty)}
-      "item" {finish(ecx, body, parse_item)}
-      "stmt" {finish(ecx, body, parse_stmt)}
-      "pat" {finish(ecx, body, parse_pat)}
-      _ {ecx.span_fatal(_sp, "unsupported ast type")}
-    };
-}
-
-fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod([]) }
-fn parse_ty(p: parser) -> @ast::ty { p.parse_ty(false) }
-fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt([]) }
-fn parse_expr(p: parser) -> @ast::expr { p.parse_expr() }
-fn parse_pat(p: parser) -> @ast::pat { p.parse_pat() }
-
-fn parse_item(p: parser) -> @ast::item {
-    alt p.parse_item([], ast::public) {
-      some(item) { item }
-      none       { fail "parse_item: parsing an item failed"; }
-    }
-}
-
-fn finish<T: qq_helper>
-    (ecx: ext_ctxt, body: ast::mac_body_, f: fn (p: parser) -> T)
-    -> @ast::expr
-{
-    let cm = ecx.codemap();
-    let str = @codemap::span_to_snippet(body.span, cm);
-    #debug["qquote--str==%?", str];
-    let fname = codemap::mk_substr_filename(cm, body.span);
-    let node = parse_from_source_str
-        (f, fname, codemap::fss_internal(body.span), str,
-         ecx.cfg(), ecx.parse_sess());
-    let loc = codemap::lookup_char_pos(cm, body.span.lo);
-
-    let sp = node.span();
-    let qcx = gather_anti_quotes(sp.lo, node);
-    let cx = qcx;
-
-    for uint::range(1u, cx.gather.len()) {|i|
-        assert cx.gather[i-1u].lo < cx.gather[i].lo;
-        // ^^ check that the vector is sorted
-        assert cx.gather[i-1u].hi <= cx.gather[i].lo;
-        // ^^ check that the spans are non-overlapping
-    }
-
-    let mut str2 = "";
-    enum state {active, skip(uint), blank};
-    let mut state = active;
-    let mut i = 0u, j = 0u;
-    let g_len = cx.gather.len();
-    str::chars_iter(*str) {|ch|
-        if (j < g_len && i == cx.gather[j].lo) {
-            assert ch == '$';
-            let repl = #fmt("$%u ", j);
-            state = skip(str::char_len(repl));
-            str2 += repl;
-        }
-        alt state {
-          active {str::push_char(str2, ch);}
-          skip(1u) {state = blank;}
-          skip(sk) {state = skip (sk-1u);}
-          blank if is_space(ch) {str::push_char(str2, ch);}
-          blank {str::push_char(str2, ' ');}
-        }
-        i += 1u;
-        if (j < g_len && i == cx.gather[j].hi) {
-            assert ch == ')';
-            state = active;
-            j += 1u;
-        }
-    }
-
-    let cx = ecx;
-
-    let cfg_call = {||
-        mk_call_(cx, sp, mk_access(cx, sp, ["ext_cx"], "cfg"), [])
-    };
-
-    let parse_sess_call = {||
-        mk_call_(cx, sp, mk_access(cx, sp, ["ext_cx"], "parse_sess"), [])
-    };
-
-    let pcall = mk_call(cx,sp,
-                       ["syntax", "parse", "parser",
-                        "parse_from_source_str"],
-                       [node.mk_parse_fn(cx,sp),
-                        mk_str(cx,sp, fname),
-                        mk_call(cx,sp,
-                                ["syntax","ext","qquote", "mk_file_substr"],
-                                [mk_str(cx,sp, loc.file.name),
-                                 mk_uint(cx,sp, loc.line),
-                                 mk_uint(cx,sp, loc.col)]),
-                        mk_unary(cx,sp, ast::box(ast::m_imm),
-                                 mk_str(cx,sp, str2)),
-                        cfg_call(),
-                        parse_sess_call()]
-                      );
-    let mut rcall = pcall;
-    if (g_len > 0u) {
-        rcall = mk_call(cx,sp,
-                        ["syntax", "ext", "qquote", "replace"],
-                        [pcall,
-                         mk_vec_e(cx,sp, qcx.gather.map_to_vec {|g|
-                             mk_call(cx,sp,
-                                     ["syntax", "ext", "qquote", g.constr],
-                                     [g.e])}),
-                         mk_path(cx,sp,
-                                 ["syntax", "ext", "qquote",
-                                  node.get_fold_fn()])]);
-    }
-    ret rcall;
-}
-
-fn replace<T>(node: T, repls: [fragment], ff: fn (ast_fold, T) -> T)
-    -> T
-{
-    let aft = default_ast_fold();
-    let f_pre = @{fold_expr: bind replace_expr(repls, _, _, _,
-                                               aft.fold_expr),
-                  fold_ty: bind replace_ty(repls, _, _, _,
-                                           aft.fold_ty)
-                  with *aft};
-    ret ff(make_fold(f_pre), node);
-}
-fn fold_crate(f: ast_fold, &&n: @ast::crate) -> @ast::crate {
-    @f.fold_crate(*n)
-}
-fn fold_expr(f: ast_fold, &&n: @ast::expr) -> @ast::expr {f.fold_expr(n)}
-fn fold_ty(f: ast_fold, &&n: @ast::ty) -> @ast::ty {f.fold_ty(n)}
-fn fold_item(f: ast_fold, &&n: @ast::item) -> @ast::item {f.fold_item(n)}
-fn fold_stmt(f: ast_fold, &&n: @ast::stmt) -> @ast::stmt {f.fold_stmt(n)}
-fn fold_pat(f: ast_fold, &&n: @ast::pat) -> @ast::pat {f.fold_pat(n)}
-
-fn replace_expr(repls: [fragment],
-                e: ast::expr_, s: span, fld: ast_fold,
-                orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span))
-    -> (ast::expr_, span)
-{
-    alt e {
-      ast::expr_mac({node: mac_var(i), _}) {
-        alt (repls[i]) {
-          from_expr(r) {(r.node, r.span)}
-          _ {fail /* fixme error message */}}}
-      _ {orig(e,s,fld)}
-    }
-}
-
-fn replace_ty(repls: [fragment],
-                e: ast::ty_, s: span, fld: ast_fold,
-                orig: fn@(ast::ty_, span, ast_fold)->(ast::ty_, span))
-    -> (ast::ty_, span)
-{
-    alt e {
-      ast::ty_mac({node: mac_var(i), _}) {
-        alt (repls[i]) {
-          from_ty(r) {(r.node, r.span)}
-          _ {fail /* fixme error message */}}}
-      _ {orig(e,s,fld)}
-    }
-}
-
-fn print_expr(expr: @ast::expr) {
-    let stdout = io::stdout();
-    let pp = pprust::rust_printer(stdout);
-    pprust::print_expr(pp, expr);
-    pp::eof(pp.s);
-    stdout.write_str("\n");
-}
-
-fn mk_file_substr(fname: str, line: uint, col: uint) -> codemap::file_substr {
-    codemap::fss_external({filename: fname, line: line, col: col})
-}
-
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
diff --git a/src/librustsyntax/ext/simplext.rs b/src/librustsyntax/ext/simplext.rs
deleted file mode 100644
index 76b78cb2b86..00000000000
--- a/src/librustsyntax/ext/simplext.rs
+++ /dev/null
@@ -1,778 +0,0 @@
-import codemap::span;
-import std::map::{hashmap, str_hash};
-
-import base::*;
-
-import fold::*;
-import ast_util::respan;
-import ast::{ident, path, ty, blk_, expr, expr_path,
-             expr_vec, expr_mac, mac_invoc, node_id};
-
-export add_new_extension;
-
-fn path_to_ident(pth: @path) -> option<ident> {
-    if vec::len(pth.idents) == 1u && vec::len(pth.types) == 0u {
-        ret some(pth.idents[0u]);
-    }
-    ret none;
-}
-
-//a vec of binders might be a little big.
-type clause = {params: binders, body: @expr};
-
-/* logically, an arb_depth should contain only one kind of matchable */
-enum arb_depth<T> { leaf(T), seq(@[arb_depth<T>], span), }
-
-
-enum matchable {
-    match_expr(@expr),
-    match_path(@path),
-    match_ident(ast::spanned<ident>),
-    match_ty(@ty),
-    match_block(ast::blk),
-    match_exact, /* don't bind anything, just verify the AST traversal */
-}
-
-/* for when given an incompatible bit of AST */
-fn match_error(cx: ext_ctxt, m: matchable, expected: str) -> ! {
-    alt m {
-      match_expr(x) {
-        cx.span_fatal(x.span,
-                      "this argument is an expr, expected " + expected);
-      }
-      match_path(x) {
-        cx.span_fatal(x.span,
-                      "this argument is a path, expected " + expected);
-      }
-      match_ident(x) {
-        cx.span_fatal(x.span,
-                      "this argument is an ident, expected " + expected);
-      }
-      match_ty(x) {
-        cx.span_fatal(x.span,
-                      "this argument is a type, expected " + expected);
-      }
-      match_block(x) {
-        cx.span_fatal(x.span,
-                      "this argument is a block, expected " + expected);
-      }
-      match_exact { cx.bug("what is a match_exact doing in a bindings?"); }
-    }
-}
-
-// We can't make all the matchables in a match_result the same type because
-// idents can be paths, which can be exprs.
-
-// If we want better match failure error messages (like in Fortifying Syntax),
-// we'll want to return something indicating amount of progress and location
-// of failure instead of `none`.
-type match_result = option<arb_depth<matchable>>;
-type selector = fn@(matchable) -> match_result;
-
-fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
-   {pre: [@expr], rep: option<@expr>, post: [@expr]} {
-    let mut idx: uint = 0u;
-    let mut res = none;
-    for elts.each {|elt|
-        alt elt.node {
-          expr_mac(m) {
-            alt m.node {
-              ast::mac_ellipsis {
-                if res != none {
-                    cx.span_fatal(m.span, "only one ellipsis allowed");
-                }
-                res =
-                    some({pre: vec::slice(elts, 0u, idx - 1u),
-                          rep: some(elts[idx - 1u]),
-                          post: vec::slice(elts, idx + 1u, vec::len(elts))});
-              }
-              _ { }
-            }
-          }
-          _ { }
-        }
-        idx += 1u;
-    }
-    ret alt res {
-          some(val) { val }
-          none { {pre: elts, rep: none, post: []} }
-        }
-}
-
-fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: [T]) ->
-   option<[U]> {
-    let mut res = [];
-    for v.each {|elem|
-        alt f(elem) { none { ret none; } some(fv) { res += [fv]; } }
-    }
-    ret some(res);
-}
-
-fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result {
-    alt ad {
-      leaf(x) { ret f(x); }
-      seq(ads, span) {
-        alt option_flatten_map(bind a_d_map(_, f), *ads) {
-          none { ret none; }
-          some(ts) { ret some(seq(@ts, span)); }
-        }
-      }
-    }
-}
-
-fn compose_sels(s1: selector, s2: selector) -> selector {
-    fn scomp(s1: selector, s2: selector, m: matchable) -> match_result {
-        ret alt s1(m) {
-              none { none }
-              some(matches) { a_d_map(matches, s2) }
-            }
-    }
-    ret bind scomp(s1, s2, _);
-}
-
-
-
-type binders =
-    {real_binders: hashmap<ident, selector>,
-     mut literal_ast_matchers: [selector]};
-type bindings = hashmap<ident, arb_depth<matchable>>;
-
-fn acumm_bindings(_cx: ext_ctxt, _b_dest: bindings, _b_src: bindings) { }
-
-/* these three functions are the big moving parts */
-
-/* create the selectors needed to bind and verify the pattern */
-
-fn pattern_to_selectors(cx: ext_ctxt, e: @expr) -> binders {
-    let res: binders =
-        {real_binders: str_hash::<selector>(),
-         mut literal_ast_matchers: []};
-    //this oughta return binders instead, but macro args are a sequence of
-    //expressions, rather than a single expression
-    fn trivial_selector(m: matchable) -> match_result { ret some(leaf(m)); }
-    p_t_s_rec(cx, match_expr(e), trivial_selector, res);
-    ret res;
-}
-
-
-
-/* use the selectors on the actual arguments to the macro to extract
-bindings. Most of the work is done in p_t_s, which generates the
-selectors. */
-
-fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
-    let res = str_hash::<arb_depth<matchable>>();
-    //need to do this first, to check vec lengths.
-    for b.literal_ast_matchers.each {|sel|
-        alt sel(match_expr(e)) { none { ret none; } _ { } }
-    }
-    let mut never_mind: bool = false;
-    for b.real_binders.each {|key, val|
-        alt val(match_expr(e)) {
-          none { never_mind = true; }
-          some(mtc) { res.insert(key, mtc); }
-        }
-    };
-    //HACK: `ret` doesn't work in `for each`
-    if never_mind { ret none; }
-    ret some(res);
-}
-
-/* use the bindings on the body to generate the expanded code */
-
-fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr {
-    let idx_path: @mut [uint] = @mut [];
-    fn new_id(_old: node_id, cx: ext_ctxt) -> node_id { ret cx.next_id(); }
-    fn new_span(cx: ext_ctxt, sp: span) -> span {
-        /* this discards information in the case of macro-defining macros */
-        ret {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
-    }
-    let afp = default_ast_fold();
-    let f_pre =
-        @{fold_ident: bind transcribe_ident(cx, b, idx_path, _, _),
-          fold_path: bind transcribe_path(cx, b, idx_path, _, _),
-          fold_expr:
-              bind transcribe_expr(cx, b, idx_path, _, _, _, afp.fold_expr),
-          fold_ty: bind transcribe_type(cx, b, idx_path,
-                                        _, _, _, afp.fold_ty),
-          fold_block:
-              bind transcribe_block(cx, b, idx_path, _, _, _, afp.fold_block),
-          map_exprs: bind transcribe_exprs(cx, b, idx_path, _, _),
-          new_id: bind new_id(_, cx)
-          with *afp};
-    let f = make_fold(f_pre);
-    let result = f.fold_expr(body);
-    ret result;
-}
-
-
-/* helper: descend into a matcher */
-fn follow(m: arb_depth<matchable>, idx_path: @mut [uint]) ->
-   arb_depth<matchable> {
-    let mut res: arb_depth<matchable> = m;
-    for vec::each(*idx_path) {|idx|
-        res = alt res {
-          leaf(_) { ret res;/* end of the line */ }
-          seq(new_ms, _) { new_ms[idx] }
-        }
-    }
-    ret res;
-}
-
-fn follow_for_trans(cx: ext_ctxt, mmaybe: option<arb_depth<matchable>>,
-                    idx_path: @mut [uint]) -> option<matchable> {
-    alt mmaybe {
-      none { ret none }
-      some(m) {
-        ret alt follow(m, idx_path) {
-              seq(_, sp) {
-                cx.span_fatal(sp,
-                              "syntax matched under ... but not " +
-                                  "used that way.")
-              }
-              leaf(m) { ret some(m) }
-            }
-      }
-    }
-
-}
-
-/* helper for transcribe_exprs: what vars from `b` occur in `e`? */
-fn free_vars(b: bindings, e: @expr, it: fn(ident)) {
-    let idents: hashmap<ident, ()> = str_hash::<()>();
-    fn mark_ident(&&i: ident, _fld: ast_fold, b: bindings,
-                  idents: hashmap<ident, ()>) -> ident {
-        if b.contains_key(i) { idents.insert(i, ()); }
-        ret i;
-    }
-    // using fold is a hack: we want visit, but it doesn't hit idents ) :
-    // solve this with macros
-    let f_pre =
-        @{fold_ident: bind mark_ident(_, _, b, idents)
-          with *default_ast_fold()};
-    let f = make_fold(f_pre);
-    f.fold_expr(e); // ignore result
-    for idents.each_key {|x| it(x); };
-}
-
-
-/* handle sequences (anywhere in the AST) of exprs, either real or ...ed */
-fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
-                    recur: fn@(&&@expr) -> @expr, exprs: [@expr]) -> [@expr] {
-    alt elts_to_ell(cx, exprs) {
-      {pre: pre, rep: repeat_me_maybe, post: post} {
-        let mut res = vec::map(pre, recur);
-        alt repeat_me_maybe {
-          none { }
-          some(repeat_me) {
-            let mut repeat: option<{rep_count: uint, name: ident}> = none;
-            /* we need to walk over all the free vars in lockstep, except for
-            the leaves, which are just duplicated */
-            free_vars(b, repeat_me) {|fv|
-                let cur_pos = follow(b.get(fv), idx_path);
-                alt cur_pos {
-                  leaf(_) { }
-                  seq(ms, _) {
-                    alt repeat {
-                      none {
-                        repeat = some({rep_count: vec::len(*ms), name: fv});
-                      }
-                      some({rep_count: old_len, name: old_name}) {
-                        let len = vec::len(*ms);
-                        if old_len != len {
-                            let msg =
-                                #fmt["'%s' occurs %u times, but ", fv, len] +
-                                    #fmt["'%s' occurs %u times", old_name,
-                                         old_len];
-                            cx.span_fatal(repeat_me.span, msg);
-                        }
-                      }
-                    }
-                  }
-                }
-            };
-            alt repeat {
-              none {
-                cx.span_fatal(repeat_me.span,
-                              "'...' surrounds an expression without any" +
-                                  " repeating syntax variables");
-              }
-              some({rep_count: rc, _}) {
-                /* Whew, we now know how how many times to repeat */
-                let mut idx: uint = 0u;
-                while idx < rc {
-                    *idx_path += [idx];
-                    res += [recur(repeat_me)]; // whew!
-                    vec::pop(*idx_path);
-                    idx += 1u;
-                }
-              }
-            }
-          }
-        }
-        res += vec::map(post, recur);
-        ret res;
-      }
-    }
-}
-
-
-
-// substitute, in a position that's required to be an ident
-fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
-                    &&i: ident, _fld: ast_fold) -> ident {
-    ret alt follow_for_trans(cx, b.find(i), idx_path) {
-          some(match_ident(a_id)) { a_id.node }
-          some(m) { match_error(cx, m, "an identifier") }
-          none { i }
-        }
-}
-
-
-fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
-                   p: path, _fld: ast_fold) -> path {
-    // Don't substitute into qualified names.
-    if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { ret p; }
-    alt follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
-      some(match_ident(id)) {
-        {span: id.span, global: false, idents: [id.node],
-         rp: none, types: []}
-      }
-      some(match_path(a_pth)) { *a_pth }
-      some(m) { match_error(cx, m, "a path") }
-      none { p }
-    }
-}
-
-
-fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
-                   e: ast::expr_, s: span, fld: ast_fold,
-                   orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span))
-    -> (ast::expr_, span)
-{
-    ret alt e {
-          expr_path(p) {
-            // Don't substitute into qualified names.
-            if vec::len(p.types) > 0u || vec::len(p.idents) != 1u {
-                (e, s);
-            }
-            alt follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
-              some(match_ident(id)) {
-                (expr_path(@{span: id.span,
-                             global: false,
-                             idents: [id.node],
-                             rp: none,
-                             types: []}), id.span)
-              }
-              some(match_path(a_pth)) { (expr_path(a_pth), s) }
-              some(match_expr(a_exp)) { (a_exp.node, a_exp.span) }
-              some(m) { match_error(cx, m, "an expression") }
-              none { orig(e, s, fld) }
-            }
-          }
-          _ { orig(e, s, fld) }
-        }
-}
-
-fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
-                   t: ast::ty_, s: span, fld: ast_fold,
-                   orig: fn@(ast::ty_, span, ast_fold) -> (ast::ty_, span))
-    -> (ast::ty_, span)
-{
-    ret alt t {
-          ast::ty_path(pth, _) {
-            alt path_to_ident(pth) {
-              some(id) {
-                alt follow_for_trans(cx, b.find(id), idx_path) {
-                  some(match_ty(ty)) { (ty.node, ty.span) }
-                  some(m) { match_error(cx, m, "a type") }
-                  none { orig(t, s, fld) }
-                }
-              }
-              none { orig(t, s, fld) }
-            }
-          }
-          _ { orig(t, s, fld) }
-        }
-}
-
-
-/* for parsing reasons, syntax variables bound to blocks must be used like
-`{v}` */
-
-fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
-                    blk: blk_, s: span, fld: ast_fold,
-                    orig: fn@(blk_, span, ast_fold) -> (blk_, span))
-    -> (blk_, span)
-{
-    ret alt block_to_ident(blk) {
-          some(id) {
-            alt follow_for_trans(cx, b.find(id), idx_path) {
-              some(match_block(new_blk)) { (new_blk.node, new_blk.span) }
-
-
-
-
-
-              // possibly allow promotion of ident/path/expr to blocks?
-              some(m) {
-                match_error(cx, m, "a block")
-              }
-              none { orig(blk, s, fld) }
-            }
-          }
-          none { orig(blk, s, fld) }
-        }
-}
-
-
-/* traverse the pattern, building instructions on how to bind the actual
-argument. ps accumulates instructions on navigating the tree.*/
-fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
-
-    //it might be possible to traverse only exprs, not matchables
-    alt m {
-      match_expr(e) {
-        alt e.node {
-          expr_path(p_pth) { p_t_s_r_path(cx, p_pth, s, b); }
-          expr_vec(p_elts, _) {
-            alt elts_to_ell(cx, p_elts) {
-              {pre: pre, rep: some(repeat_me), post: post} {
-                p_t_s_r_length(cx, vec::len(pre) + vec::len(post), true, s,
-                               b);
-                if vec::len(pre) > 0u {
-                    p_t_s_r_actual_vector(cx, pre, true, s, b);
-                }
-                p_t_s_r_ellipses(cx, repeat_me, vec::len(pre), s, b);
-
-                if vec::len(post) > 0u {
-                    cx.span_unimpl(e.span,
-                                   "matching after `...` not yet supported");
-                }
-              }
-              {pre: pre, rep: none, post: post} {
-                if post != [] {
-                    cx.bug("elts_to_ell provided an invalid result");
-                }
-                p_t_s_r_length(cx, vec::len(pre), false, s, b);
-                p_t_s_r_actual_vector(cx, pre, false, s, b);
-              }
-            }
-          }
-          /* FIXME: handle embedded types and blocks, at least
-             (Issue #2251) */
-          expr_mac(mac) {
-            p_t_s_r_mac(cx, mac, s, b);
-          }
-          _ {
-            fn select(cx: ext_ctxt, m: matchable, pat: @expr) ->
-               match_result {
-                ret alt m {
-                      match_expr(e) {
-                        if e == pat { some(leaf(match_exact)) } else { none }
-                      }
-                      _ { cx.bug("broken traversal in p_t_s_r") }
-                    }
-            }
-            b.literal_ast_matchers += [bind select(cx, _, e)];
-          }
-        }
-      }
-      _ {
-          cx.bug("undocumented invariant in p_t_s_rec");
-      }
-    }
-}
-
-
-/* make a match more precise */
-fn specialize_match(m: matchable) -> matchable {
-    ret alt m {
-          match_expr(e) {
-            alt e.node {
-              expr_path(pth) {
-                alt path_to_ident(pth) {
-                  some(id) { match_ident(respan(pth.span, id)) }
-                  none { match_path(pth) }
-                }
-              }
-              _ { m }
-            }
-          }
-          _ { m }
-        }
-}
-
-/* pattern_to_selectors helper functions */
-fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) {
-    alt path_to_ident(p) {
-      some(p_id) {
-        fn select(cx: ext_ctxt, m: matchable) -> match_result {
-            ret alt m {
-                  match_expr(e) { some(leaf(specialize_match(m))) }
-                  _ { cx.bug("broken traversal in p_t_s_r") }
-                }
-        }
-        if b.real_binders.contains_key(p_id) {
-            cx.span_fatal(p.span, "duplicate binding identifier");
-        }
-        b.real_binders.insert(p_id, compose_sels(s, bind select(cx, _)));
-      }
-      none { }
-    }
-}
-
-fn block_to_ident(blk: blk_) -> option<ident> {
-    if vec::len(blk.stmts) != 0u { ret none; }
-    ret alt blk.expr {
-          some(expr) {
-            alt expr.node { expr_path(pth) { path_to_ident(pth) } _ { none } }
-          }
-          none { none }
-        }
-}
-
-fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, s: selector, b: binders) {
-    fn select_pt_1(cx: ext_ctxt, m: matchable,
-                   fn_m: fn(ast::mac) -> match_result) -> match_result {
-        ret alt m {
-              match_expr(e) {
-                alt e.node { expr_mac(mac) { fn_m(mac) } _ { none } }
-              }
-              _ { cx.bug("broken traversal in p_t_s_r") }
-            }
-    }
-    fn no_des(cx: ext_ctxt, sp: span, syn: str) -> ! {
-        cx.span_fatal(sp, "destructuring " + syn + " is not yet supported");
-    }
-    alt mac.node {
-      ast::mac_ellipsis { cx.span_fatal(mac.span, "misused `...`"); }
-      ast::mac_invoc(_, _, _) { no_des(cx, mac.span, "macro calls"); }
-      ast::mac_embed_type(ty) {
-        alt ty.node {
-          ast::ty_path(pth, _) {
-            alt path_to_ident(pth) {
-              some(id) {
-                /* look for an embedded type */
-                fn select_pt_2(m: ast::mac) -> match_result {
-                    ret alt m.node {
-                          ast::mac_embed_type(t) { some(leaf(match_ty(t))) }
-                          _ { none }
-                        }
-                }
-                let final_step = bind select_pt_1(cx, _, select_pt_2);
-                b.real_binders.insert(id, compose_sels(s, final_step));
-              }
-              none { no_des(cx, pth.span, "under `#<>`"); }
-            }
-          }
-          _ { no_des(cx, ty.span, "under `#<>`"); }
-        }
-      }
-      ast::mac_embed_block(blk) {
-        alt block_to_ident(blk.node) {
-          some(id) {
-            fn select_pt_2(m: ast::mac) -> match_result {
-                ret alt m.node {
-                      ast::mac_embed_block(blk) {
-                        some(leaf(match_block(blk)))
-                      }
-                      _ { none }
-                    }
-            }
-            let final_step = bind select_pt_1(cx, _, select_pt_2);
-            b.real_binders.insert(id, compose_sels(s, final_step));
-          }
-          none { no_des(cx, blk.span, "under `#{}`"); }
-        }
-      }
-      ast::mac_aq(_,_) { no_des(cx, mac.span, "antiquotes"); }
-      ast::mac_var(_) { no_des(cx, mac.span, "antiquote variables"); }
-    }
-}
-
-fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
-                    b: binders) {
-    fn select(cx: ext_ctxt, repeat_me: @expr, offset: uint, m: matchable) ->
-       match_result {
-        ret alt m {
-              match_expr(e) {
-                alt e.node {
-                  expr_vec(arg_elts, _) {
-                    let mut elts = [];
-                    let mut idx = offset;
-                    while idx < vec::len(arg_elts) {
-                        elts += [leaf(match_expr(arg_elts[idx]))];
-                        idx += 1u;
-                    }
-
-                    // using repeat_me.span is a little wacky, but the
-                    // error we want to report is one in the macro def
-                    some(seq(@elts, repeat_me.span))
-                  }
-                  _ { none }
-                }
-              }
-              _ { cx.bug("broken traversal in p_t_s_r") }
-            }
-    }
-    p_t_s_rec(cx, match_expr(repeat_me),
-              compose_sels(s, bind select(cx, repeat_me, offset, _)), b);
-}
-
-
-fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector,
-                  b: binders) {
-    fn len_select(_cx: ext_ctxt, m: matchable, at_least: bool, len: uint) ->
-       match_result {
-        ret alt m {
-              match_expr(e) {
-                alt e.node {
-                  expr_vec(arg_elts, _) {
-                    let actual_len = vec::len(arg_elts);
-                    if at_least && actual_len >= len || actual_len == len {
-                        some(leaf(match_exact))
-                    } else { none }
-                  }
-                  _ { none }
-                }
-              }
-              _ { none }
-            }
-    }
-    b.literal_ast_matchers +=
-        [compose_sels(s, bind len_select(cx, _, at_least, len))];
-}
-
-fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: [@expr], _repeat_after: bool,
-                         s: selector, b: binders) {
-    let mut idx: uint = 0u;
-    while idx < vec::len(elts) {
-        fn select(cx: ext_ctxt, m: matchable, idx: uint) -> match_result {
-            ret alt m {
-                  match_expr(e) {
-                    alt e.node {
-                      expr_vec(arg_elts, _) {
-                        some(leaf(match_expr(arg_elts[idx])))
-                      }
-                      _ { none }
-                    }
-                  }
-                  _ { cx.bug("broken traversal in p_t_s_r") }
-                }
-        }
-        p_t_s_rec(cx, match_expr(elts[idx]),
-                  compose_sels(s, bind select(cx, _, idx)), b);
-        idx += 1u;
-    }
-}
-
-fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                     _body: ast::mac_body) -> base::macro_def {
-    let args = get_mac_args_no_max(cx, sp, arg, 0u, "macro");
-
-    let mut macro_name: option<str> = none;
-    let mut clauses: [@clause] = [];
-    for args.each {|arg|
-        alt arg.node {
-          expr_vec(elts, mutbl) {
-            if vec::len(elts) != 2u {
-                cx.span_fatal((*arg).span,
-                              "extension clause must consist of [" +
-                                  "macro invocation, expansion body]");
-            }
-
-
-            alt elts[0u].node {
-              expr_mac(mac) {
-                alt mac.node {
-                  mac_invoc(pth, invoc_arg, body) {
-                    alt path_to_ident(pth) {
-                      some(id) {
-                        alt macro_name {
-                          none { macro_name = some(id); }
-                          some(other_id) {
-                            if id != other_id {
-                                cx.span_fatal(pth.span,
-                                              "macro name must be " +
-                                                  "consistent");
-                            }
-                          }
-                        }
-                      }
-                      none {
-                        cx.span_fatal(pth.span,
-                                      "macro name must not be a path");
-                      }
-                    }
-                    let arg = alt invoc_arg {
-                      some(arg) { arg }
-                      none { cx.span_fatal(mac.span,
-                                           "macro must have arguments")}
-                    };
-                    clauses +=
-                        [@{params: pattern_to_selectors(cx, arg),
-                           body: elts[1u]}];
-
-                    // FIXME: check duplicates (or just simplify
-                    // the macro arg situation) (Issue #2251)
-                  }
-                  _ {
-                      cx.span_bug(mac.span, "undocumented invariant in \
-                         add_extension");
-                  }
-                }
-              }
-              _ {
-                cx.span_fatal(elts[0u].span,
-                              "extension clause must" +
-                                  " start with a macro invocation.");
-              }
-            }
-          }
-          _ {
-            cx.span_fatal((*arg).span,
-                          "extension must be [clause, " + " ...]");
-          }
-        }
-    }
-
-    let ext = bind generic_extension(_, _, _, _, clauses);
-
-    ret {ident:
-             alt macro_name {
-               some(id) { id }
-               none {
-                 cx.span_fatal(sp, "macro definition must have " +
-                               "at least one clause")
-               }
-             },
-         ext: normal({expander: ext, span: some(option::get(arg).span)})};
-
-    fn generic_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                         _body: ast::mac_body, clauses: [@clause]) -> @expr {
-        let arg = alt arg {
-          some(arg) { arg }
-          none { cx.span_fatal(sp, "macro must have arguments")}
-        };
-        for clauses.each {|c|
-            alt use_selectors_to_bind(c.params, arg) {
-              some(bindings) { ret transcribe(cx, bindings, c.body); }
-              none { cont; }
-            }
-        }
-        cx.span_fatal(sp, "no clauses match macro invocation");
-    }
-}
-
-
-
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/ext/source_util.rs b/src/librustsyntax/ext/source_util.rs
deleted file mode 100644
index 99b928cfb9c..00000000000
--- a/src/librustsyntax/ext/source_util.rs
+++ /dev/null
@@ -1,115 +0,0 @@
-import base::*;
-import ast;
-import codemap::span;
-import print::pprust;
-
-export expand_line;
-export expand_col;
-export expand_file;
-export expand_stringify;
-export expand_mod;
-export expand_include;
-export expand_include_str;
-export expand_include_bin;
-
-/* #line(): expands to the current line number */
-fn expand_line(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-               _body: ast::mac_body) -> @ast::expr {
-    get_mac_args(cx, sp, arg, 0u, option::some(0u), "line");
-    let loc = codemap::lookup_char_pos(cx.codemap(), sp.lo);
-    ret make_new_lit(cx, sp, ast::lit_uint(loc.line as u64, ast::ty_u));
-}
-
-/* #col(): expands to the current column number */
-fn expand_col(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-              _body: ast::mac_body) -> @ast::expr {
-    get_mac_args(cx, sp, arg, 0u, option::some(0u), "col");
-    let loc = codemap::lookup_char_pos(cx.codemap(), sp.lo);
-    ret make_new_lit(cx, sp, ast::lit_uint(loc.col as u64, ast::ty_u));
-}
-
-/* #file(): expands to the current filename */
-/* The filemap (`loc.file`) contains a bunch more information we could spit
- * out if we wanted. */
-fn expand_file(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-               _body: ast::mac_body) -> @ast::expr {
-    get_mac_args(cx, sp, arg, 0u, option::some(0u), "file");
-    let loc = codemap::lookup_char_pos(cx.codemap(), sp.lo);
-    ret make_new_lit(cx, sp, ast::lit_str(loc.file.name));
-}
-
-fn expand_stringify(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                    _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), "stringify");
-    ret make_new_lit(cx, sp, ast::lit_str(pprust::expr_to_str(args[0])));
-}
-
-fn expand_mod(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body)
-    -> @ast::expr {
-    get_mac_args(cx, sp, arg, 0u, option::some(0u), "file");
-    ret make_new_lit(cx, sp, ast::lit_str(str::connect(cx.mod_path(), "::")));
-}
-
-fn expand_include(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                  _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), "include");
-    let file = expr_to_str(cx, args[0], "#include_str requires a string");
-    let p = parse::new_parser_from_file(cx.parse_sess(), cx.cfg(),
-                                        res_rel_file(cx, sp, file),
-                                        parse::parser::SOURCE_FILE);
-    ret parse::parser::parse_expr(p)
-}
-
-fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
-                      _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx,sp,arg,1u,option::some(1u),"include_str");
-
-    let file = expr_to_str(cx, args[0], "#include_str requires a string");
-
-    alt io::read_whole_file_str(res_rel_file(cx, sp, file)) {
-      result::ok(src) { ret make_new_lit(cx, sp, ast::lit_str(src)); }
-      result::err(e) {
-        cx.parse_sess().span_diagnostic.handler().fatal(e)
-      }
-    }
-}
-
-fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
-                      _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx,sp,arg,1u,option::some(1u),"include_bin");
-
-    let file = expr_to_str(cx, args[0], "#include_bin requires a string");
-
-    alt io::read_whole_file(res_rel_file(cx, sp, file)) {
-      result::ok(src) {
-        let u8_exprs = vec::map(src) { |char: u8|
-            make_new_lit(cx, sp, ast::lit_uint(char as u64, ast::ty_u8))
-        };
-        ret make_new_expr(cx, sp, ast::expr_vec(u8_exprs, ast::m_imm));
-      }
-      result::err(e) {
-        cx.parse_sess().span_diagnostic.handler().fatal(e)
-      }
-    }
-}
-
-fn res_rel_file(cx: ext_ctxt, sp: codemap::span, arg: path) -> path {
-    // NB: relative paths are resolved relative to the compilation unit
-    if !path::path_is_absolute(arg) {
-        let cu = codemap::span_to_filename(sp, cx.codemap());
-        let dir = path::dirname(cu);
-        ret path::connect(dir, arg);
-    } else {
-        ret arg;
-    }
-}
-
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/fold.rs b/src/librustsyntax/fold.rs
deleted file mode 100644
index ecdb8d328fb..00000000000
--- a/src/librustsyntax/fold.rs
+++ /dev/null
@@ -1,745 +0,0 @@
-import codemap::span;
-import ast::*;
-
-export ast_fold_precursor;
-export ast_fold;
-export default_ast_fold;
-export make_fold;
-export noop_fold_crate;
-export noop_fold_item;
-export noop_fold_expr;
-export noop_fold_pat;
-export noop_fold_mod;
-export noop_fold_ty;
-export noop_fold_block;
-export wrap;
-export fold_ty_param;
-export fold_ty_params;
-export fold_fn_decl;
-
-iface ast_fold {
-    fn fold_crate(crate) -> crate;
-    fn fold_crate_directive(&&@crate_directive) -> @crate_directive;
-    fn fold_view_item(&&@view_item) -> @view_item;
-    fn fold_native_item(&&@native_item) -> @native_item;
-    fn fold_item(&&@item) -> @item;
-    fn fold_class_item(&&@class_member) -> @class_member;
-    fn fold_item_underscore(item_) -> item_;
-    fn fold_method(&&@method) -> @method;
-    fn fold_block(blk) -> blk;
-    fn fold_stmt(&&@stmt) -> @stmt;
-    fn fold_arm(arm) -> arm;
-    fn fold_pat(&&@pat) -> @pat;
-    fn fold_decl(&&@decl) -> @decl;
-    fn fold_expr(&&@expr) -> @expr;
-    fn fold_ty(&&@ty) -> @ty;
-    fn fold_constr(&&@constr) -> @constr;
-    fn fold_ty_constr(&&@ty_constr) -> @ty_constr;
-    fn fold_mod(_mod) -> _mod;
-    fn fold_native_mod(native_mod) -> native_mod;
-    fn fold_variant(variant) -> variant;
-    fn fold_ident(&&ident) -> ident;
-    fn fold_path(&&@path) -> @path;
-    fn fold_local(&&@local) -> @local;
-    fn map_exprs(fn@(&&@expr) -> @expr, [@expr]) -> [@expr];
-    fn new_id(node_id) -> node_id;
-    fn new_span(span) -> span;
-}
-
-// We may eventually want to be able to fold over type parameters, too
-
-type ast_fold_precursor = @{
-    //unlike the others, item_ is non-trivial
-    fold_crate: fn@(crate_, span, ast_fold) -> (crate_, span),
-    fold_crate_directive: fn@(crate_directive_, span,
-                              ast_fold) -> (crate_directive_, span),
-    fold_view_item: fn@(view_item_, ast_fold) -> view_item_,
-    fold_native_item: fn@(&&@native_item, ast_fold) -> @native_item,
-    fold_item: fn@(&&@item, ast_fold) -> @item,
-    fold_class_item: fn@(&&@class_member, ast_fold) -> @class_member,
-    fold_item_underscore: fn@(item_, ast_fold) -> item_,
-    fold_method: fn@(&&@method, ast_fold) -> @method,
-    fold_block: fn@(blk_, span, ast_fold) -> (blk_, span),
-    fold_stmt: fn@(stmt_, span, ast_fold) -> (stmt_, span),
-    fold_arm: fn@(arm, ast_fold) -> arm,
-    fold_pat: fn@(pat_, span, ast_fold) -> (pat_, span),
-    fold_decl: fn@(decl_, span, ast_fold) -> (decl_, span),
-    fold_expr: fn@(expr_, span, ast_fold) -> (expr_, span),
-    fold_ty: fn@(ty_, span, ast_fold) -> (ty_, span),
-    fold_constr: fn@(ast::constr_, span, ast_fold) -> (constr_, span),
-    fold_ty_constr: fn@(ast::ty_constr_, span, ast_fold)
-        -> (ty_constr_, span),
-    fold_mod: fn@(_mod, ast_fold) -> _mod,
-    fold_native_mod: fn@(native_mod, ast_fold) -> native_mod,
-    fold_variant: fn@(variant_, span, ast_fold) -> (variant_, span),
-    fold_ident: fn@(&&ident, ast_fold) -> ident,
-    fold_path: fn@(path, ast_fold) -> path,
-    fold_local: fn@(local_, span, ast_fold) -> (local_, span),
-    map_exprs: fn@(fn@(&&@expr) -> @expr, [@expr]) -> [@expr],
-    new_id: fn@(node_id) -> node_id,
-    new_span: fn@(span) -> span};
-
-/* some little folds that probably aren't useful to have in ast_fold itself*/
-
-//used in noop_fold_item and noop_fold_crate and noop_fold_crate_directive
-fn fold_meta_item_(&&mi: @meta_item, fld: ast_fold) -> @meta_item {
-    ret @{node:
-              alt mi.node {
-                meta_word(id) { meta_word(fld.fold_ident(id)) }
-                meta_list(id, mis) {
-                  let fold_meta_item = bind fold_meta_item_(_, fld);
-                  meta_list(id, vec::map(mis, fold_meta_item))
-                }
-                meta_name_value(id, s) {
-                  meta_name_value(fld.fold_ident(id), s)
-                }
-              },
-          span: fld.new_span(mi.span)};
-}
-//used in noop_fold_item and noop_fold_crate
-fn fold_attribute_(at: attribute, fld: ast_fold) ->
-   attribute {
-    ret {node: {style: at.node.style,
-                value: *fold_meta_item_(@at.node.value, fld)},
-         span: fld.new_span(at.span)};
-}
-//used in noop_fold_native_item and noop_fold_fn_decl
-fn fold_arg_(a: arg, fld: ast_fold) -> arg {
-    ret {mode: a.mode,
-         ty: fld.fold_ty(a.ty),
-         ident: fld.fold_ident(a.ident),
-         id: fld.new_id(a.id)};
-}
-//used in noop_fold_expr, and possibly elsewhere in the future
-fn fold_mac_(m: mac, fld: ast_fold) -> mac {
-    ret {node:
-             alt m.node {
-               mac_invoc(pth, arg, body) {
-                 mac_invoc(fld.fold_path(pth),
-                           option::map(arg, fld.fold_expr), body)
-               }
-               mac_embed_type(ty) { mac_embed_type(fld.fold_ty(ty)) }
-               mac_embed_block(blk) { mac_embed_block(fld.fold_block(blk)) }
-               mac_ellipsis { mac_ellipsis }
-               mac_aq(_,_) { /* fixme */ m.node }
-               mac_var(_) { /* fixme */ m.node }
-             },
-         span: fld.new_span(m.span)};
-}
-
-fn fold_fn_decl(decl: ast::fn_decl, fld: ast_fold) -> ast::fn_decl {
-    ret {inputs: vec::map(decl.inputs, bind fold_arg_(_, fld)),
-         output: fld.fold_ty(decl.output),
-         purity: decl.purity,
-         cf: decl.cf,
-         constraints: vec::map(decl.constraints, fld.fold_constr)}
-}
-
-fn fold_ty_param_bound(tpb: ty_param_bound, fld: ast_fold) -> ty_param_bound {
-    alt tpb {
-      bound_copy | bound_send | bound_const { tpb }
-      bound_iface(ty) { bound_iface(fld.fold_ty(ty)) }
-    }
-}
-
-fn fold_ty_param(tp: ty_param, fld: ast_fold) -> ty_param {
-    {ident: tp.ident,
-     id: fld.new_id(tp.id),
-     bounds: @vec::map(*tp.bounds, fold_ty_param_bound(_, fld))}
-}
-
-fn fold_ty_params(tps: [ty_param], fld: ast_fold) -> [ty_param] {
-    vec::map(tps, fold_ty_param(_, fld))
-}
-
-fn noop_fold_crate(c: crate_, fld: ast_fold) -> crate_ {
-    let fold_meta_item = bind fold_meta_item_(_, fld);
-    let fold_attribute = bind fold_attribute_(_, fld);
-
-    ret {directives: vec::map(c.directives, fld.fold_crate_directive),
-         module: fld.fold_mod(c.module),
-         attrs: vec::map(c.attrs, fold_attribute),
-         config: vec::map(c.config, fold_meta_item)};
-}
-
-fn noop_fold_crate_directive(cd: crate_directive_, fld: ast_fold) ->
-   crate_directive_ {
-    ret alt cd {
-          cdir_src_mod(id, attrs) {
-            cdir_src_mod(fld.fold_ident(id), attrs)
-          }
-          cdir_dir_mod(id, cds, attrs) {
-            cdir_dir_mod(fld.fold_ident(id),
-                         vec::map(cds, fld.fold_crate_directive), attrs)
-          }
-          cdir_view_item(vi) { cdir_view_item(fld.fold_view_item(vi)) }
-          cdir_syntax(_) { cd }
-        }
-}
-
-fn noop_fold_view_item(vi: view_item_, _fld: ast_fold) -> view_item_ {
-    ret vi;
-}
-
-
-fn noop_fold_native_item(&&ni: @native_item, fld: ast_fold) -> @native_item {
-    let fold_arg = bind fold_arg_(_, fld);
-    let fold_attribute = bind fold_attribute_(_, fld);
-
-    ret @{ident: fld.fold_ident(ni.ident),
-          attrs: vec::map(ni.attrs, fold_attribute),
-          node:
-              alt ni.node {
-                native_item_fn(fdec, typms) {
-                  native_item_fn({inputs: vec::map(fdec.inputs, fold_arg),
-                                  output: fld.fold_ty(fdec.output),
-                                  purity: fdec.purity,
-                                  cf: fdec.cf,
-                                  constraints:
-                                      vec::map(fdec.constraints,
-                                               fld.fold_constr)},
-                                 fold_ty_params(typms, fld))
-                }
-              },
-          id: fld.new_id(ni.id),
-          span: fld.new_span(ni.span)};
-}
-
-fn noop_fold_item(&&i: @item, fld: ast_fold) -> @item {
-    let fold_attribute = bind fold_attribute_(_, fld);
-
-    ret @{ident: fld.fold_ident(i.ident),
-          attrs: vec::map(i.attrs, fold_attribute),
-          id: fld.new_id(i.id),
-          node: fld.fold_item_underscore(i.node),
-          vis: i.vis,
-          span: fld.new_span(i.span)};
-}
-
-fn noop_fold_class_item(&&ci: @class_member, fld: ast_fold)
-    -> @class_member {
-    @{node: alt ci.node {
-        instance_var(ident, t, cm, id, p) {
-           instance_var(ident, fld.fold_ty(t), cm, id, p)
-        }
-        class_method(m) { class_method(fld.fold_method(m)) }
-      },
-      span: ci.span}
-}
-
-fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ {
-    ret alt i {
-          item_const(t, e) { item_const(fld.fold_ty(t), fld.fold_expr(e)) }
-          item_fn(decl, typms, body) {
-              item_fn(fold_fn_decl(decl, fld),
-                      fold_ty_params(typms, fld),
-                      fld.fold_block(body))
-          }
-          item_mod(m) { item_mod(fld.fold_mod(m)) }
-          item_native_mod(nm) { item_native_mod(fld.fold_native_mod(nm)) }
-          item_ty(t, typms, rp) { item_ty(fld.fold_ty(t),
-                                          fold_ty_params(typms, fld),
-                                          rp) }
-          item_enum(variants, typms, r) {
-            item_enum(vec::map(variants, fld.fold_variant),
-                      fold_ty_params(typms, fld),
-                      r)
-          }
-          item_class(typms, ifaces, items, ctor, m_dtor, rp) {
-              let ctor_body = fld.fold_block(ctor.node.body);
-              let ctor_decl = fold_fn_decl(ctor.node.dec, fld);
-              let ctor_id   = fld.new_id(ctor.node.id);
-              let dtor = option::map(m_dtor) {|dtor|
-                let dtor_body = fld.fold_block(dtor.node.body);
-                let dtor_id   = fld.new_id(dtor.node.id);
-                {node: {body: dtor_body,
-                        id: dtor_id with dtor.node}
-                    with dtor}};
-              item_class(
-                  typms,
-                  vec::map(ifaces, {|p| fold_iface_ref(p, fld) }),
-                  vec::map(items, fld.fold_class_item),
-                  {node: {body: ctor_body,
-                          dec: ctor_decl,
-                          id: ctor_id with ctor.node}
-                      with ctor}, dtor, rp)
-          }
-          item_impl(tps, rp, ifce, ty, methods) {
-              item_impl(fold_ty_params(tps, fld),
-                        rp,
-                        ifce.map { |p| fold_iface_ref(p, fld) },
-                        fld.fold_ty(ty),
-                        vec::map(methods, fld.fold_method))
-          }
-          item_iface(tps, rp, methods) {
-            item_iface(fold_ty_params(tps, fld),
-                       rp,
-                       methods)
-          }
-          item_res(decl, typms, body, did, cid, rp) {
-            item_res(fold_fn_decl(decl, fld),
-                     fold_ty_params(typms, fld),
-                     fld.fold_block(body),
-                     fld.new_id(did),
-                     fld.new_id(cid),
-                     rp)
-          }
-        };
-}
-
-fn fold_iface_ref(&&p: @iface_ref, fld: ast_fold) -> @iface_ref {
-    @{path: fld.fold_path(p.path), id: fld.new_id(p.id)}
-}
-
-fn noop_fold_method(&&m: @method, fld: ast_fold) -> @method {
-    ret @{ident: fld.fold_ident(m.ident),
-          attrs: m.attrs,
-          tps: fold_ty_params(m.tps, fld),
-          decl: fold_fn_decl(m.decl, fld),
-          body: fld.fold_block(m.body),
-          id: fld.new_id(m.id),
-          span: fld.new_span(m.span),
-          self_id: fld.new_id(m.self_id),
-          vis: m.vis};
-}
-
-
-fn noop_fold_block(b: blk_, fld: ast_fold) -> blk_ {
-    ret {view_items: vec::map(b.view_items, fld.fold_view_item),
-         stmts: vec::map(b.stmts, fld.fold_stmt),
-         expr: option::map(b.expr, fld.fold_expr),
-         id: fld.new_id(b.id),
-         rules: b.rules};
-}
-
-fn noop_fold_stmt(s: stmt_, fld: ast_fold) -> stmt_ {
-    ret alt s {
-      stmt_decl(d, nid) { stmt_decl(fld.fold_decl(d), fld.new_id(nid)) }
-      stmt_expr(e, nid) { stmt_expr(fld.fold_expr(e), fld.new_id(nid)) }
-      stmt_semi(e, nid) { stmt_semi(fld.fold_expr(e), fld.new_id(nid)) }
-    };
-}
-
-fn noop_fold_arm(a: arm, fld: ast_fold) -> arm {
-    ret {pats: vec::map(a.pats, fld.fold_pat),
-         guard: option::map(a.guard, fld.fold_expr),
-         body: fld.fold_block(a.body)};
-}
-
-fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
-    ret alt p {
-          pat_wild { p }
-          pat_ident(pth, sub) {
-            pat_ident(fld.fold_path(pth), option::map(sub, fld.fold_pat))
-          }
-          pat_lit(e) { pat_lit(fld.fold_expr(e)) }
-          pat_enum(pth, pats) {
-              pat_enum(fld.fold_path(pth), option::map(pats)
-                       {|pats| vec::map(pats, fld.fold_pat)})
-          }
-          pat_rec(fields, etc) {
-            let mut fs = [];
-            for fields.each {|f|
-                fs += [{ident: f.ident, pat: fld.fold_pat(f.pat)}];
-            }
-            pat_rec(fs, etc)
-          }
-          pat_tup(elts) { pat_tup(vec::map(elts, fld.fold_pat)) }
-          pat_box(inner) { pat_box(fld.fold_pat(inner)) }
-          pat_uniq(inner) { pat_uniq(fld.fold_pat(inner)) }
-          pat_range(e1, e2) {
-            pat_range(fld.fold_expr(e1), fld.fold_expr(e2))
-          }
-        };
-}
-
-fn noop_fold_decl(d: decl_, fld: ast_fold) -> decl_ {
-    alt d {
-      decl_local(ls) { decl_local(vec::map(ls, fld.fold_local)) }
-      decl_item(it) { decl_item(fld.fold_item(it)) }
-    }
-}
-
-fn wrap<T>(f: fn@(T, ast_fold) -> T)
-    -> fn@(T, span, ast_fold) -> (T, span)
-{
-    ret fn@(x: T, s: span, fld: ast_fold) -> (T, span) {
-        (f(x, fld), s)
-    }
-}
-
-fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ {
-    fn fold_field_(field: field, fld: ast_fold) -> field {
-        ret {node:
-                 {mutbl: field.node.mutbl,
-                  ident: fld.fold_ident(field.node.ident),
-                  expr: fld.fold_expr(field.node.expr)},
-             span: fld.new_span(field.span)};
-    }
-    let fold_field = bind fold_field_(_, fld);
-
-    let fold_mac = bind fold_mac_(_, fld);
-
-    ret alt e {
-          expr_new(p, i, v) {
-            expr_new(fld.fold_expr(p),
-                     fld.new_id(i),
-                     fld.fold_expr(v))
-          }
-          expr_vstore(e, v) {
-            expr_vstore(fld.fold_expr(e), v)
-          }
-          expr_vec(exprs, mutt) {
-            expr_vec(fld.map_exprs(fld.fold_expr, exprs), mutt)
-          }
-          expr_rec(fields, maybe_expr) {
-            expr_rec(vec::map(fields, fold_field),
-                     option::map(maybe_expr, fld.fold_expr))
-          }
-          expr_tup(elts) { expr_tup(vec::map(elts, fld.fold_expr)) }
-          expr_call(f, args, blk) {
-            expr_call(fld.fold_expr(f), fld.map_exprs(fld.fold_expr, args),
-                      blk)
-          }
-          expr_bind(f, args) {
-            let opt_map_se = bind option::map(_, fld.fold_expr);
-            expr_bind(fld.fold_expr(f), vec::map(args, opt_map_se))
-          }
-          expr_binary(binop, lhs, rhs) {
-            expr_binary(binop, fld.fold_expr(lhs), fld.fold_expr(rhs))
-          }
-          expr_unary(binop, ohs) { expr_unary(binop, fld.fold_expr(ohs)) }
-          expr_loop_body(f) { expr_loop_body(fld.fold_expr(f)) }
-          expr_lit(_) { e }
-          expr_cast(expr, ty) { expr_cast(fld.fold_expr(expr), ty) }
-          expr_addr_of(m, ohs) { expr_addr_of(m, fld.fold_expr(ohs)) }
-          expr_if(cond, tr, fl) {
-            expr_if(fld.fold_expr(cond), fld.fold_block(tr),
-                    option::map(fl, fld.fold_expr))
-          }
-          expr_while(cond, body) {
-            expr_while(fld.fold_expr(cond), fld.fold_block(body))
-          }
-          expr_loop(body) {
-              expr_loop(fld.fold_block(body))
-          }
-          expr_alt(expr, arms, mode) {
-            expr_alt(fld.fold_expr(expr), vec::map(arms, fld.fold_arm), mode)
-          }
-          expr_fn(proto, decl, body, captures) {
-            expr_fn(proto, fold_fn_decl(decl, fld),
-                    fld.fold_block(body),
-                    @((*captures).map({|cap_item|
-                        @({id: fld.new_id((*cap_item).id)
-                           with *cap_item})})))
-          }
-          expr_fn_block(decl, body, captures) {
-            expr_fn_block(fold_fn_decl(decl, fld), fld.fold_block(body),
-                          @((*captures).map({|cap_item|
-                              @({id: fld.new_id((*cap_item).id)
-                                 with *cap_item})})))
-          }
-          expr_block(blk) { expr_block(fld.fold_block(blk)) }
-          expr_move(el, er) {
-            expr_move(fld.fold_expr(el), fld.fold_expr(er))
-          }
-          expr_copy(e) { expr_copy(fld.fold_expr(e)) }
-          expr_assign(el, er) {
-            expr_assign(fld.fold_expr(el), fld.fold_expr(er))
-          }
-          expr_swap(el, er) {
-            expr_swap(fld.fold_expr(el), fld.fold_expr(er))
-          }
-          expr_assign_op(op, el, er) {
-            expr_assign_op(op, fld.fold_expr(el), fld.fold_expr(er))
-          }
-          expr_field(el, id, tys) {
-            expr_field(fld.fold_expr(el), fld.fold_ident(id),
-                       vec::map(tys, fld.fold_ty))
-          }
-          expr_index(el, er) {
-            expr_index(fld.fold_expr(el), fld.fold_expr(er))
-          }
-          expr_path(pth) { expr_path(fld.fold_path(pth)) }
-          expr_fail(e) { expr_fail(option::map(e, fld.fold_expr)) }
-          expr_break | expr_cont { e }
-          expr_ret(e) { expr_ret(option::map(e, fld.fold_expr)) }
-          expr_log(i, lv, e) { expr_log(i, fld.fold_expr(lv),
-                                        fld.fold_expr(e)) }
-          expr_assert(e) { expr_assert(fld.fold_expr(e)) }
-          expr_check(m, e) { expr_check(m, fld.fold_expr(e)) }
-          expr_if_check(cond, tr, fl) {
-            expr_if_check(fld.fold_expr(cond), fld.fold_block(tr),
-                          option::map(fl, fld.fold_expr))
-          }
-          expr_mac(mac) { expr_mac(fold_mac(mac)) }
-        }
-}
-
-fn noop_fold_ty(t: ty_, fld: ast_fold) -> ty_ {
-    let fold_mac = bind fold_mac_(_, fld);
-    fn fold_mt(mt: mt, fld: ast_fold) -> mt {
-        {ty: fld.fold_ty(mt.ty), mutbl: mt.mutbl}
-    }
-    fn fold_field(f: ty_field, fld: ast_fold) -> ty_field {
-        {node: {ident: fld.fold_ident(f.node.ident),
-                mt: fold_mt(f.node.mt, fld)},
-         span: fld.new_span(f.span)}
-    }
-    alt t {
-      ty_nil | ty_bot {t}
-      ty_box(mt) {ty_box(fold_mt(mt, fld))}
-      ty_uniq(mt) {ty_uniq(fold_mt(mt, fld))}
-      ty_vec(mt) {ty_vec(fold_mt(mt, fld))}
-      ty_ptr(mt) {ty_ptr(fold_mt(mt, fld))}
-      ty_rptr(region, mt) {ty_rptr(region, fold_mt(mt, fld))}
-      ty_rec(fields) {ty_rec(vec::map(fields) {|f| fold_field(f, fld)})}
-      ty_fn(proto, decl) {ty_fn(proto, fold_fn_decl(decl, fld))}
-      ty_tup(tys) {ty_tup(vec::map(tys) {|ty| fld.fold_ty(ty)})}
-      ty_path(path, id) {ty_path(fld.fold_path(path), fld.new_id(id))}
-      ty_constr(ty, constrs) {ty_constr(fld.fold_ty(ty),
-                                vec::map(constrs, fld.fold_ty_constr))}
-      ty_vstore(t, vs) {ty_vstore(fld.fold_ty(t), vs)}
-      ty_mac(mac) {ty_mac(fold_mac(mac))}
-      ty_infer {t}
-    }
-}
-
-fn noop_fold_constr(c: constr_, fld: ast_fold) -> constr_ {
-    {path: fld.fold_path(c.path), args: c.args, id: fld.new_id(c.id)}
-}
-
-fn noop_fold_ty_constr(c: ty_constr_, fld: ast_fold) -> ty_constr_ {
-    let rslt: ty_constr_ =
-        {path: fld.fold_path(c.path), args: c.args, id: fld.new_id(c.id)};
-    rslt
-}
-// ...nor do modules
-fn noop_fold_mod(m: _mod, fld: ast_fold) -> _mod {
-    ret {view_items: vec::map(m.view_items, fld.fold_view_item),
-         items: vec::map(m.items, fld.fold_item)};
-}
-
-fn noop_fold_native_mod(nm: native_mod, fld: ast_fold) -> native_mod {
-    ret {view_items: vec::map(nm.view_items, fld.fold_view_item),
-         items: vec::map(nm.items, fld.fold_native_item)}
-}
-
-fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ {
-    fn fold_variant_arg_(va: variant_arg, fld: ast_fold) -> variant_arg {
-        ret {ty: fld.fold_ty(va.ty), id: fld.new_id(va.id)};
-    }
-    let fold_variant_arg = bind fold_variant_arg_(_, fld);
-    let args = vec::map(v.args, fold_variant_arg);
-
-    let fold_attribute = bind fold_attribute_(_, fld);
-    let attrs = vec::map(v.attrs, fold_attribute);
-
-    let de = alt v.disr_expr {
-      some(e) {some(fld.fold_expr(e))}
-      none {none}
-    };
-    ret {name: v.name,
-         attrs: attrs,
-         args: args, id: fld.new_id(v.id),
-         disr_expr: de,
-         vis: v.vis};
-}
-
-fn noop_fold_ident(&&i: ident, _fld: ast_fold) -> ident { ret i; }
-
-fn noop_fold_path(&&p: path, fld: ast_fold) -> path {
-    ret {span: fld.new_span(p.span), global: p.global,
-         idents: vec::map(p.idents, fld.fold_ident),
-         rp: p.rp,
-         types: vec::map(p.types, fld.fold_ty)};
-}
-
-fn noop_fold_local(l: local_, fld: ast_fold) -> local_ {
-    ret {is_mutbl: l.is_mutbl,
-         ty: fld.fold_ty(l.ty),
-         pat: fld.fold_pat(l.pat),
-         init:
-             alt l.init {
-               option::none::<initializer> { l.init }
-               option::some::<initializer>(init) {
-                 option::some::<initializer>({op: init.op,
-                                              expr: fld.fold_expr(init.expr)})
-               }
-             },
-         id: fld.new_id(l.id)};
-}
-
-/* temporarily eta-expand because of a compiler bug with using `fn<T>` as a
-   value */
-fn noop_map_exprs(f: fn@(&&@expr) -> @expr, es: [@expr]) -> [@expr] {
-    ret vec::map(es, f);
-}
-
-fn noop_id(i: node_id) -> node_id { ret i; }
-
-fn noop_span(sp: span) -> span { ret sp; }
-
-fn default_ast_fold() -> ast_fold_precursor {
-    ret @{fold_crate: wrap(noop_fold_crate),
-          fold_crate_directive: wrap(noop_fold_crate_directive),
-          fold_view_item: noop_fold_view_item,
-          fold_native_item: noop_fold_native_item,
-          fold_item: noop_fold_item,
-          fold_class_item: noop_fold_class_item,
-          fold_item_underscore: noop_fold_item_underscore,
-          fold_method: noop_fold_method,
-          fold_block: wrap(noop_fold_block),
-          fold_stmt: wrap(noop_fold_stmt),
-          fold_arm: noop_fold_arm,
-          fold_pat: wrap(noop_fold_pat),
-          fold_decl: wrap(noop_fold_decl),
-          fold_expr: wrap(noop_fold_expr),
-          fold_ty: wrap(noop_fold_ty),
-          fold_constr: wrap(noop_fold_constr),
-          fold_ty_constr: wrap(noop_fold_ty_constr),
-          fold_mod: noop_fold_mod,
-          fold_native_mod: noop_fold_native_mod,
-          fold_variant: wrap(noop_fold_variant),
-          fold_ident: noop_fold_ident,
-          fold_path: noop_fold_path,
-          fold_local: wrap(noop_fold_local),
-          map_exprs: noop_map_exprs,
-          new_id: noop_id,
-          new_span: noop_span};
-}
-
-impl of ast_fold for ast_fold_precursor {
-    /* naturally, a macro to write these would be nice */
-    fn fold_crate(c: crate) -> crate {
-        let (n, s) = self.fold_crate(c.node, c.span, self as ast_fold);
-        ret {node: n, span: self.new_span(s)};
-    }
-    fn fold_crate_directive(&&c: @crate_directive) -> @crate_directive {
-        let (n, s) = self.fold_crate_directive(c.node, c.span,
-                                               self as ast_fold);
-        ret @{node: n,
-              span: self.new_span(s)};
-    }
-    fn fold_view_item(&&x: @view_item) ->
-       @view_item {
-        ret @{node: self.fold_view_item(x.node, self as ast_fold),
-              attrs: vec::map(x.attrs, {|a|
-                  fold_attribute_(a, self as ast_fold)}),
-              vis: x.vis,
-              span: self.new_span(x.span)};
-    }
-    fn fold_native_item(&&x: @native_item)
-        -> @native_item {
-        ret self.fold_native_item(x, self as ast_fold);
-    }
-    fn fold_item(&&i: @item) -> @item {
-        ret self.fold_item(i, self as ast_fold);
-    }
-    fn fold_class_item(&&ci: @class_member) -> @class_member {
-        @{node: alt ci.node {
-           instance_var(nm, t, mt, id, p) {
-               instance_var(nm, (self as ast_fold).fold_ty(t),
-                            mt, id, p)
-           }
-           class_method(m) {
-               class_method(self.fold_method(m, self as ast_fold))
-           }
-          }, span: self.new_span(ci.span)}
-    }
-    fn fold_item_underscore(i: item_) ->
-       item_ {
-        ret self.fold_item_underscore(i, self as ast_fold);
-    }
-    fn fold_method(&&x: @method)
-        -> @method {
-        ret self.fold_method(x, self as ast_fold);
-    }
-    fn fold_block(x: blk) -> blk {
-        let (n, s) = self.fold_block(x.node, x.span, self as ast_fold);
-        ret {node: n, span: self.new_span(s)};
-    }
-    fn fold_stmt(&&x: @stmt) -> @stmt {
-        let (n, s) = self.fold_stmt(x.node, x.span, self as ast_fold);
-        ret @{node: n, span: self.new_span(s)};
-    }
-    fn fold_arm(x: arm) -> arm {
-        ret self.fold_arm(x, self as ast_fold);
-    }
-    fn fold_pat(&&x: @pat) -> @pat {
-        let (n, s) =  self.fold_pat(x.node, x.span, self as ast_fold);
-        ret @{id: self.new_id(x.id),
-              node: n,
-              span: self.new_span(s)};
-    }
-    fn fold_decl(&&x: @decl) -> @decl {
-        let (n, s) = self.fold_decl(x.node, x.span, self as ast_fold);
-        ret @{node: n, span: self.new_span(s)};
-    }
-    fn fold_expr(&&x: @expr) -> @expr {
-        let (n, s) = self.fold_expr(x.node, x.span, self as ast_fold);
-        ret @{id: self.new_id(x.id),
-              node: n,
-              span: self.new_span(s)};
-    }
-    fn fold_ty(&&x: @ty) -> @ty {
-        let (n, s) = self.fold_ty(x.node, x.span, self as ast_fold);
-        ret @{id: self.new_id(x.id), node: n, span: self.new_span(s)};
-    }
-    fn fold_constr(&&x: @ast::constr) ->
-       @ast::constr {
-        let (n, s) = self.fold_constr(x.node, x.span, self as ast_fold);
-        ret @{node: n, span: self.new_span(s)};
-    }
-    fn fold_ty_constr(&&x: @ast::ty_constr) ->
-       @ast::ty_constr {
-        let (n, s) : (ty_constr_, span) =
-            self.fold_ty_constr(x.node, x.span, self as ast_fold);
-        ret @{node: n, span: self.new_span(s)};
-    }
-    fn fold_mod(x: _mod) -> _mod {
-        ret self.fold_mod(x, self as ast_fold);
-    }
-    fn fold_native_mod(x: native_mod) ->
-       native_mod {
-        ret self.fold_native_mod(x, self as ast_fold);
-    }
-    fn fold_variant(x: variant) ->
-       variant {
-        let (n, s) = self.fold_variant(x.node, x.span, self as ast_fold);
-        ret {node: n, span: self.new_span(s)};
-    }
-    fn fold_ident(&&x: ident) -> ident {
-        ret self.fold_ident(x, self as ast_fold);
-    }
-    fn fold_path(&&x: @path) -> @path {
-        @self.fold_path(*x, self as ast_fold)
-    }
-    fn fold_local(&&x: @local) -> @local {
-        let (n, s) = self.fold_local(x.node, x.span, self as ast_fold);
-        ret @{node: n, span: self.new_span(s)};
-    }
-    fn map_exprs(f: fn@(&&@expr) -> @expr, e: [@expr]) -> [@expr] {
-        self.map_exprs(f, e)
-    }
-    fn new_id(node_id: ast::node_id) -> node_id {
-        self.new_id(node_id)
-    }
-    fn new_span(span: span) -> span {
-        self.new_span(span)
-    }
-}
-
-fn make_fold(afp: ast_fold_precursor) -> ast_fold {
-    afp as ast_fold
-}
-
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/parse.rs b/src/librustsyntax/parse.rs
deleted file mode 100644
index c6535bd1a4d..00000000000
--- a/src/librustsyntax/parse.rs
+++ /dev/null
@@ -1,164 +0,0 @@
-#[doc = "The main parser interface"];
-import dvec::extensions;
-
-export parse_sess;
-export next_node_id;
-export new_parser_from_file;
-export new_parser_from_source_str;
-export parse_crate_from_file;
-export parse_crate_from_crate_file;
-export parse_crate_from_source_str;
-export parse_expr_from_source_str;
-export parse_item_from_source_str;
-export parse_from_source_str;
-
-import parser::parser;
-import attr::parser_attr;
-import common::parser_common;
-import ast::node_id;
-import util::interner;
-import lexer::reader;
-
-type parse_sess = @{
-    cm: codemap::codemap,
-    mut next_id: node_id,
-    span_diagnostic: diagnostic::span_handler,
-    // these two must be kept up to date
-    mut chpos: uint,
-    mut byte_pos: uint
-};
-
-fn parse_crate_from_file(input: str, cfg: ast::crate_cfg, sess: parse_sess) ->
-   @ast::crate {
-    if str::ends_with(input, ".rc") {
-        parse_crate_from_crate_file(input, cfg, sess)
-    } else if str::ends_with(input, ".rs") {
-        parse_crate_from_source_file(input, cfg, sess)
-    } else {
-        sess.span_diagnostic.handler().fatal("unknown input file type: " +
-                                             input)
-    }
-}
-
-fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg,
-                               sess: parse_sess) -> @ast::crate {
-    let p = new_parser_from_file(sess, cfg, input, parser::CRATE_FILE);
-    let lo = p.span.lo;
-    let prefix = path::dirname(p.reader.filemap.name);
-    let leading_attrs = p.parse_inner_attrs_and_next();
-    let crate_attrs = leading_attrs.inner;
-    let first_cdir_attr = leading_attrs.next;
-    let cdirs = p.parse_crate_directives(token::EOF, first_cdir_attr);
-    sess.chpos = p.reader.chpos;
-    sess.byte_pos = sess.byte_pos + p.reader.pos;
-    let cx =
-        @{sess: sess,
-          cfg: p.cfg};
-    let (companionmod, _) = path::splitext(path::basename(input));
-    let (m, attrs) = eval::eval_crate_directives_to_mod(
-        cx, cdirs, prefix, option::some(companionmod));
-    let mut hi = p.span.hi;
-    p.expect(token::EOF);
-    ret @ast_util::respan(ast_util::mk_sp(lo, hi),
-                          {directives: cdirs,
-                           module: m,
-                           attrs: crate_attrs + attrs,
-                           config: p.cfg});
-}
-
-fn parse_crate_from_source_file(input: str, cfg: ast::crate_cfg,
-                                sess: parse_sess) -> @ast::crate {
-    let p = new_parser_from_file(sess, cfg, input, parser::SOURCE_FILE);
-    let r = p.parse_crate_mod(cfg);
-    sess.chpos = p.reader.chpos;
-    sess.byte_pos = sess.byte_pos + p.reader.pos;
-    ret r;
-}
-
-fn parse_crate_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
-                               sess: parse_sess) -> @ast::crate {
-    let p = new_parser_from_source_str(
-        sess, cfg, name, codemap::fss_none, source);
-    let r = p.parse_crate_mod(cfg);
-    sess.chpos = p.reader.chpos;
-    sess.byte_pos = sess.byte_pos + p.reader.pos;
-    ret r;
-}
-
-fn parse_expr_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
-                              sess: parse_sess) -> @ast::expr {
-    let p = new_parser_from_source_str(
-        sess, cfg, name, codemap::fss_none, source);
-    let r = p.parse_expr();
-    sess.chpos = p.reader.chpos;
-    sess.byte_pos = sess.byte_pos + p.reader.pos;
-    ret r;
-}
-
-fn parse_item_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
-                              +attrs: [ast::attribute], vis: ast::visibility,
-                              sess: parse_sess) -> option<@ast::item> {
-    let p = new_parser_from_source_str(
-        sess, cfg, name, codemap::fss_none, source);
-    let r = p.parse_item(attrs, vis);
-    sess.chpos = p.reader.chpos;
-    sess.byte_pos = sess.byte_pos + p.reader.pos;
-    ret r;
-}
-
-fn parse_from_source_str<T>(f: fn (p: parser) -> T,
-                            name: str, ss: codemap::file_substr,
-                            source: @str, cfg: ast::crate_cfg,
-                            sess: parse_sess)
-    -> T
-{
-    let p = new_parser_from_source_str(sess, cfg, name, ss, source);
-    let r = f(p);
-    if !p.reader.is_eof() {
-        p.reader.fatal("expected end-of-string");
-    }
-    sess.chpos = p.reader.chpos;
-    sess.byte_pos = sess.byte_pos + p.reader.pos;
-    ret r;
-}
-
-fn next_node_id(sess: parse_sess) -> node_id {
-    let rv = sess.next_id;
-    sess.next_id += 1;
-    // ID 0 is reserved for the crate and doesn't actually exist in the AST
-    assert rv != 0;
-    ret rv;
-}
-
-fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
-                              name: str, ss: codemap::file_substr,
-                              source: @str) -> parser {
-    let ftype = parser::SOURCE_FILE;
-    let filemap = codemap::new_filemap_w_substr
-        (name, ss, source, sess.chpos, sess.byte_pos);
-    sess.cm.files.push(filemap);
-    let itr = @interner::mk(str::hash, str::eq);
-    let rdr = lexer::new_reader(sess.span_diagnostic,
-                                filemap, itr);
-    ret parser(sess, cfg, rdr, ftype);
-}
-
-fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: str,
-                        ftype: parser::file_type) ->
-   parser {
-    let src = alt io::read_whole_file_str(path) {
-      result::ok(src) {
-        // FIXME: This copy is unfortunate (#2319)
-        @src
-      }
-      result::err(e) {
-        sess.span_diagnostic.handler().fatal(e)
-      }
-    };
-    let filemap = codemap::new_filemap(path, src,
-                                       sess.chpos, sess.byte_pos);
-    sess.cm.files.push(filemap);
-    let itr = @interner::mk(str::hash, str::eq);
-    let rdr = lexer::new_reader(sess.span_diagnostic, filemap, itr);
-    ret parser(sess, cfg, rdr, ftype);
-}
diff --git a/src/librustsyntax/parse/attr.rs b/src/librustsyntax/parse/attr.rs
deleted file mode 100644
index 6615938b9ad..00000000000
--- a/src/librustsyntax/parse/attr.rs
+++ /dev/null
@@ -1,132 +0,0 @@
-import either::{either, left, right};
-import ast_util::spanned;
-import common::{parser_common, seq_sep};
-
-export attr_or_ext;
-export parser_attr;
-
-// A type to distingush between the parsing of item attributes or syntax
-// extensions, which both begin with token.POUND
-type attr_or_ext = option<either<[ast::attribute], @ast::expr>>;
-
-impl parser_attr for parser {
-
-    fn parse_outer_attrs_or_ext(first_item_attrs: [ast::attribute])
-        -> attr_or_ext
-    {
-        let expect_item_next = vec::is_not_empty(first_item_attrs);
-        if self.token == token::POUND {
-            let lo = self.span.lo;
-            if self.look_ahead(1u) == token::LBRACKET {
-                self.bump();
-                let first_attr =
-                    self.parse_attribute_naked(ast::attr_outer, lo);
-                ret some(left([first_attr] + self.parse_outer_attributes()));
-            } else if !(self.look_ahead(1u) == token::LT
-                        || self.look_ahead(1u) == token::LBRACKET
-                        || expect_item_next) {
-                self.bump();
-                ret some(right(self.parse_syntax_ext_naked(lo)));
-            } else { ret none; }
-        } else { ret none; }
-    }
-
-    // Parse attributes that appear before an item
-    fn parse_outer_attributes() -> [ast::attribute] {
-        let mut attrs: [ast::attribute] = [];
-        while self.token == token::POUND
-            && self.look_ahead(1u) == token::LBRACKET {
-            attrs += [self.parse_attribute(ast::attr_outer)];
-        }
-        ret attrs;
-    }
-
-    fn parse_attribute(style: ast::attr_style) -> ast::attribute {
-        let lo = self.span.lo;
-        self.expect(token::POUND);
-        ret self.parse_attribute_naked(style, lo);
-    }
-
-    fn parse_attribute_naked(style: ast::attr_style, lo: uint) ->
-        ast::attribute {
-        self.expect(token::LBRACKET);
-        let meta_item = self.parse_meta_item();
-        self.expect(token::RBRACKET);
-        let mut hi = self.span.hi;
-        ret spanned(lo, hi, {style: style, value: *meta_item});
-    }
-
-    // Parse attributes that appear after the opening of an item, each
-    // terminated by a semicolon. In addition to a vector of inner attributes,
-    // this function also returns a vector that may contain the first outer
-    // attribute of the next item (since we can't know whether the attribute
-    // is an inner attribute of the containing item or an outer attribute of
-    // the first contained item until we see the semi).
-    fn parse_inner_attrs_and_next() ->
-        {inner: [ast::attribute], next: [ast::attribute]} {
-        let mut inner_attrs: [ast::attribute] = [];
-        let mut next_outer_attrs: [ast::attribute] = [];
-        while self.token == token::POUND {
-            if self.look_ahead(1u) != token::LBRACKET {
-                // This is an extension
-                break;
-            }
-            let attr = self.parse_attribute(ast::attr_inner);
-            if self.token == token::SEMI {
-                self.bump();
-                inner_attrs += [attr];
-            } else {
-                // It's not really an inner attribute
-                let outer_attr =
-                    spanned(attr.span.lo, attr.span.hi,
-                            {style: ast::attr_outer, value: attr.node.value});
-                next_outer_attrs += [outer_attr];
-                break;
-            }
-        }
-        ret {inner: inner_attrs, next: next_outer_attrs};
-    }
-
-    fn parse_meta_item() -> @ast::meta_item {
-        let lo = self.span.lo;
-        let ident = self.parse_ident();
-        alt self.token {
-          token::EQ {
-            self.bump();
-            let lit = self.parse_lit();
-            let mut hi = self.span.hi;
-            ret @spanned(lo, hi, ast::meta_name_value(ident, lit));
-          }
-          token::LPAREN {
-            let inner_items = self.parse_meta_seq();
-            let mut hi = self.span.hi;
-            ret @spanned(lo, hi, ast::meta_list(ident, inner_items));
-          }
-          _ {
-            let mut hi = self.span.hi;
-            ret @spanned(lo, hi, ast::meta_word(ident));
-          }
-        }
-    }
-
-    fn parse_meta_seq() -> [@ast::meta_item] {
-        ret self.parse_seq(token::LPAREN, token::RPAREN,
-                           seq_sep(token::COMMA),
-                           {|p| p.parse_meta_item()}).node;
-    }
-
-    fn parse_optional_meta() -> [@ast::meta_item] {
-        alt self.token { token::LPAREN { ret self.parse_meta_seq(); }
-                         _ { ret []; } }
-    }
-}
-
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/parse/classify.rs b/src/librustsyntax/parse/classify.rs
deleted file mode 100644
index 471fe15788d..00000000000
--- a/src/librustsyntax/parse/classify.rs
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
-  Predicates on exprs and stmts that the pretty-printer and parser use
- */
-import ast_util::*;
-
-fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool {
-    alt e.node {
-      ast::expr_if(_, _, _) | ast::expr_if_check(_, _, _)
-      | ast::expr_alt(_, _, _) | ast::expr_block(_)
-      | ast::expr_while(_, _) | ast::expr_loop(_)
-      | ast::expr_call(_, _, true) {
-        false
-      }
-      _ { true }
-    }
-}
-
-fn stmt_ends_with_semi(stmt: ast::stmt) -> bool {
-    alt stmt.node {
-      ast::stmt_decl(d, _) {
-        ret alt d.node {
-              ast::decl_local(_) { true }
-              ast::decl_item(_) { false }
-            }
-      }
-      ast::stmt_expr(e, _) {
-        ret expr_requires_semi_to_be_stmt(e);
-      }
-      ast::stmt_semi(e, _) {
-        ret false;
-      }
-    }
-}
-
-fn need_parens(expr: @ast::expr, outer_prec: uint) -> bool {
-    alt expr.node {
-      ast::expr_binary(op, _, _) { operator_prec(op) < outer_prec }
-      ast::expr_cast(_, _) { parse::prec::as_prec < outer_prec }
-      // This may be too conservative in some cases
-      ast::expr_assign(_, _) { true }
-      ast::expr_move(_, _) { true }
-      ast::expr_swap(_, _) { true }
-      ast::expr_assign_op(_, _, _) { true }
-      ast::expr_ret(_) { true }
-      ast::expr_assert(_) { true }
-      ast::expr_check(_, _) { true }
-      ast::expr_log(_, _, _) { true }
-      _ { !parse::classify::expr_requires_semi_to_be_stmt(expr) }
-    }
-}
-
-fn ends_in_lit_int(ex: @ast::expr) -> bool {
-    alt ex.node {
-      ast::expr_lit(@{node: ast::lit_int(_, ast::ty_i), _}) { true }
-      ast::expr_binary(_, _, sub) | ast::expr_unary(_, sub) |
-      ast::expr_move(_, sub) | ast::expr_copy(sub) |
-      ast::expr_assign(_, sub) |
-      ast::expr_assign_op(_, _, sub) | ast::expr_swap(_, sub) |
-      ast::expr_log(_, _, sub) | ast::expr_assert(sub) |
-      ast::expr_check(_, sub) { ends_in_lit_int(sub) }
-      ast::expr_fail(osub) | ast::expr_ret(osub) {
-        alt osub {
-          some(ex) { ends_in_lit_int(ex) }
-          _ { false }
-        }
-      }
-      _ { false }
-    }
-}
diff --git a/src/librustsyntax/parse/comments.rs b/src/librustsyntax/parse/comments.rs
deleted file mode 100644
index 9fa4a4c3e8c..00000000000
--- a/src/librustsyntax/parse/comments.rs
+++ /dev/null
@@ -1,203 +0,0 @@
-import io::reader_util;
-import io::println;//XXXXXXXXxxx
-import util::interner;
-import lexer::{ reader, new_reader, next_token, is_whitespace };
-
-export cmnt;
-export lit;
-export cmnt_style;
-export gather_comments_and_literals;
-
-enum cmnt_style {
-    isolated, // No code on either side of each line of the comment
-    trailing, // Code exists to the left of the comment
-    mixed, // Code before /* foo */ and after the comment
-    blank_line, // Just a manual blank line "\n\n", for layout
-}
-
-type cmnt = {style: cmnt_style, lines: [str], pos: uint};
-
-fn read_to_eol(rdr: reader) -> str {
-    let mut val = "";
-    while rdr.curr != '\n' && !rdr.is_eof() {
-        str::push_char(val, rdr.curr);
-        rdr.bump();
-    }
-    if rdr.curr == '\n' { rdr.bump(); }
-    ret val;
-}
-
-fn read_one_line_comment(rdr: reader) -> str {
-    let val = read_to_eol(rdr);
-    assert ((val[0] == '/' as u8 && val[1] == '/' as u8) ||
-            (val[0] == '#' as u8 && val[1] == '!' as u8));
-    ret val;
-}
-
-fn consume_non_eol_whitespace(rdr: reader) {
-    while is_whitespace(rdr.curr) && rdr.curr != '\n' && !rdr.is_eof() {
-        rdr.bump();
-    }
-}
-
-fn push_blank_line_comment(rdr: reader, &comments: [cmnt]) {
-    #debug(">>> blank-line comment");
-    let v: [str] = [];
-    comments += [{style: blank_line, lines: v, pos: rdr.chpos}];
-}
-
-fn consume_whitespace_counting_blank_lines(rdr: reader, &comments: [cmnt]) {
-    while is_whitespace(rdr.curr) && !rdr.is_eof() {
-        if rdr.col == 0u && rdr.curr == '\n' {
-            push_blank_line_comment(rdr, comments);
-        }
-        rdr.bump();
-    }
-}
-
-fn read_shebang_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
-    #debug(">>> shebang comment");
-    let p = rdr.chpos;
-    #debug("<<< shebang comment");
-    ret {style: if code_to_the_left { trailing } else { isolated },
-         lines: [read_one_line_comment(rdr)],
-         pos: p};
-}
-
-fn read_line_comments(rdr: reader, code_to_the_left: bool) -> cmnt {
-    #debug(">>> line comments");
-    let p = rdr.chpos;
-    let mut lines: [str] = [];
-    while rdr.curr == '/' && rdr.next() == '/' {
-        let line = read_one_line_comment(rdr);
-        log(debug, line);
-        lines += [line];
-        consume_non_eol_whitespace(rdr);
-    }
-    #debug("<<< line comments");
-    ret {style: if code_to_the_left { trailing } else { isolated },
-         lines: lines,
-         pos: p};
-}
-
-fn all_whitespace(s: str, begin: uint, end: uint) -> bool {
-    let mut i: uint = begin;
-    while i != end { if !is_whitespace(s[i] as char) { ret false; } i += 1u; }
-    ret true;
-}
-
-fn trim_whitespace_prefix_and_push_line(&lines: [str],
-                                        s: str, col: uint) unsafe {
-    let mut s1;
-    let len = str::len(s);
-    if all_whitespace(s, 0u, uint::min(len, col)) {
-        if col < len {
-            s1 = str::slice(s, col, len);
-        } else { s1 = ""; }
-    } else { s1 = s; }
-    log(debug, "pushing line: " + s1);
-    lines += [s1];
-}
-
-fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
-    #debug(">>> block comment");
-    let p = rdr.chpos;
-    let mut lines: [str] = [];
-    let mut col: uint = rdr.col;
-    rdr.bump();
-    rdr.bump();
-    let mut curr_line = "/*";
-    let mut level: int = 1;
-    while level > 0 {
-        #debug("=== block comment level %d", level);
-        if rdr.is_eof() { rdr.fatal("unterminated block comment"); }
-        if rdr.curr == '\n' {
-            trim_whitespace_prefix_and_push_line(lines, curr_line, col);
-            curr_line = "";
-            rdr.bump();
-        } else {
-            str::push_char(curr_line, rdr.curr);
-            if rdr.curr == '/' && rdr.next() == '*' {
-                rdr.bump();
-                rdr.bump();
-                curr_line += "*";
-                level += 1;
-            } else {
-                if rdr.curr == '*' && rdr.next() == '/' {
-                    rdr.bump();
-                    rdr.bump();
-                    curr_line += "/";
-                    level -= 1;
-                } else { rdr.bump(); }
-            }
-        }
-    }
-    if str::len(curr_line) != 0u {
-        trim_whitespace_prefix_and_push_line(lines, curr_line, col);
-    }
-    let mut style = if code_to_the_left { trailing } else { isolated };
-    consume_non_eol_whitespace(rdr);
-    if !rdr.is_eof() && rdr.curr != '\n' && vec::len(lines) == 1u {
-        style = mixed;
-    }
-    #debug("<<< block comment");
-    ret {style: style, lines: lines, pos: p};
-}
-
-fn peeking_at_comment(rdr: reader) -> bool {
-    ret ((rdr.curr == '/' && rdr.next() == '/') ||
-         (rdr.curr == '/' && rdr.next() == '*')) ||
-        (rdr.curr == '#' && rdr.next() == '!');
-}
-
-fn consume_comment(rdr: reader, code_to_the_left: bool, &comments: [cmnt]) {
-    #debug(">>> consume comment");
-    if rdr.curr == '/' && rdr.next() == '/' {
-        comments += [read_line_comments(rdr, code_to_the_left)];
-    } else if rdr.curr == '/' && rdr.next() == '*' {
-        comments += [read_block_comment(rdr, code_to_the_left)];
-    } else if rdr.curr == '#' && rdr.next() == '!' {
-        comments += [read_shebang_comment(rdr, code_to_the_left)];
-    } else { fail; }
-    #debug("<<< consume comment");
-}
-
-type lit = {lit: str, pos: uint};
-
-fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
-                                path: str,
-                                srdr: io::reader) ->
-   {cmnts: [cmnt], lits: [lit]} {
-    let src = @str::from_bytes(srdr.read_whole_stream());
-    let itr = @interner::mk::<str>(str::hash, str::eq);
-    let rdr = new_reader(span_diagnostic,
-                         codemap::new_filemap(path, src, 0u, 0u), itr);
-    let mut comments: [cmnt] = [];
-    let mut literals: [lit] = [];
-    let mut first_read: bool = true;
-    while !rdr.is_eof() {
-        loop {
-            let mut code_to_the_left = !first_read;
-            consume_non_eol_whitespace(rdr);
-            if rdr.curr == '\n' {
-                code_to_the_left = false;
-                consume_whitespace_counting_blank_lines(rdr, comments);
-            }
-            while peeking_at_comment(rdr) {
-                consume_comment(rdr, code_to_the_left, comments);
-                consume_whitespace_counting_blank_lines(rdr, comments);
-            }
-            break;
-        }
-        let tok = next_token(rdr);
-        if token::is_lit(tok.tok) {
-            let s = rdr.get_str_from(tok.bpos);
-            literals += [{lit: s, pos: tok.chpos}];
-            log(debug, "tok lit: " + s);
-        } else {
-            log(debug, "tok: " + token::to_str(*rdr.interner, tok.tok));
-        }
-        first_read = false;
-    }
-    ret {cmnts: comments, lits: literals};
-}
diff --git a/src/librustsyntax/parse/common.rs b/src/librustsyntax/parse/common.rs
deleted file mode 100644
index 4bfba482c4f..00000000000
--- a/src/librustsyntax/parse/common.rs
+++ /dev/null
@@ -1,217 +0,0 @@
-import std::map::{hashmap};
-import ast_util::spanned;
-import parser::parser;
-
-type seq_sep = {
-    sep: option<token::token>,
-    trailing_opt: bool   // is trailing separator optional?
-};
-
-fn seq_sep(t: token::token) -> seq_sep {
-    ret {sep: option::some(t), trailing_opt: false};
-}
-fn seq_sep_opt(t: token::token) -> seq_sep {
-    ret {sep: option::some(t), trailing_opt: true};
-}
-fn seq_sep_none() -> seq_sep {
-    ret {sep: option::none, trailing_opt: false};
-}
-
-
-fn token_to_str(reader: reader, token: token::token) -> str {
-    token::to_str(*reader.interner, token)
-}
-
-
-// This should be done with traits, once traits work
-impl parser_common for parser {
-
-    fn unexpected_last(t: token::token) -> ! {
-        self.span_fatal(self.last_span, "unexpected token: '"
-                        + token_to_str(self.reader, t) + "'");
-    }
-
-    fn unexpected() -> ! {
-        self.fatal("unexpected token: '"
-                   + token_to_str(self.reader, self.token) + "'");
-    }
-
-    fn expect(t: token::token) {
-        if self.token == t {
-            self.bump();
-        } else {
-            let mut s: str = "expecting '";
-            s += token_to_str(self.reader, t);
-            s += "' but found '";
-            s += token_to_str(self.reader, self.token);
-            self.fatal(s + "'");
-        }
-    }
-
-    fn parse_ident() -> ast::ident {
-        alt self.token {
-          token::IDENT(i, _) { self.bump(); ret self.get_str(i); }
-          _ { self.fatal("expecting ident, found "
-                      + token_to_str(self.reader, self.token)); }
-        }
-    }
-
-    fn parse_path_list_ident() -> ast::path_list_ident {
-        let lo = self.span.lo;
-        let ident = self.parse_ident();
-        let hi = self.span.hi;
-        ret spanned(lo, hi, {name: ident, id: self.get_id()});
-    }
-
-    fn parse_value_ident() -> ast::ident {
-        self.check_restricted_keywords();
-        ret self.parse_ident();
-    }
-
-    fn eat(tok: token::token) -> bool {
-        ret if self.token == tok { self.bump(); true } else { false };
-    }
-
-    // A sanity check that the word we are asking for is a known keyword
-    fn require_keyword(word: str) {
-        if !self.keywords.contains_key(word) {
-            self.bug(#fmt("unknown keyword: %s", word));
-        }
-    }
-
-    fn token_is_keyword(word: str, tok: token::token) -> bool {
-        self.require_keyword(word);
-        alt tok {
-          token::IDENT(sid, false) { str::eq(word, self.get_str(sid)) }
-          _ { false }
-        }
-    }
-
-    fn is_keyword(word: str) -> bool {
-        self.token_is_keyword(word, self.token)
-    }
-
-    fn eat_keyword(word: str) -> bool {
-        self.require_keyword(word);
-        alt self.token {
-          token::IDENT(sid, false) {
-            if str::eq(word, self.get_str(sid)) {
-                self.bump();
-                ret true;
-            } else { ret false; }
-          }
-          _ { ret false; }
-        }
-    }
-
-    fn expect_keyword(word: str) {
-        self.require_keyword(word);
-        if !self.eat_keyword(word) {
-            self.fatal("expecting " + word + ", found " +
-                    token_to_str(self.reader, self.token));
-    }
-}
-
-    fn is_restricted_keyword(word: str) -> bool {
-        self.restricted_keywords.contains_key(word)
-    }
-
-    fn check_restricted_keywords() {
-        alt self.token {
-          token::IDENT(_, false) {
-            let w = token_to_str(self.reader, self.token);
-            self.check_restricted_keywords_(w);
-          }
-          _ { }
-        }
-    }
-
-    fn check_restricted_keywords_(w: ast::ident) {
-        if self.is_restricted_keyword(w) {
-            self.fatal("found `" + w + "` in restricted position");
-        }
-    }
-
-    fn expect_gt() {
-        if self.token == token::GT {
-            self.bump();
-        } else if self.token == token::BINOP(token::SHR) {
-            self.swap(token::GT, self.span.lo + 1u, self.span.hi);
-        } else {
-            let mut s: str = "expecting ";
-            s += token_to_str(self.reader, token::GT);
-            s += ", found ";
-            s += token_to_str(self.reader, self.token);
-            self.fatal(s);
-        }
-    }
-
-    fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>,
-                                       f: fn(parser) -> T) -> [T] {
-        let mut first = true;
-        let mut v = [];
-        while self.token != token::GT
-            && self.token != token::BINOP(token::SHR) {
-            alt sep {
-              some(t) { if first { first = false; }
-                       else { self.expect(t); } }
-              _ { }
-            }
-            v += [f(self)];
-        }
-
-        ret v;
-    }
-
-    fn parse_seq_to_gt<T: copy>(sep: option<token::token>,
-                                f: fn(parser) -> T) -> [T] {
-        let v = self.parse_seq_to_before_gt(sep, f);
-        self.expect_gt();
-
-        ret v;
-    }
-
-    fn parse_seq_lt_gt<T: copy>(sep: option<token::token>,
-                                f: fn(parser) -> T) -> spanned<[T]> {
-        let lo = self.span.lo;
-        self.expect(token::LT);
-        let result = self.parse_seq_to_before_gt::<T>(sep, f);
-        let hi = self.span.hi;
-        self.expect_gt();
-        ret spanned(lo, hi, result);
-    }
-
-    fn parse_seq_to_end<T: copy>(ket: token::token, sep: seq_sep,
-                                 f: fn(parser) -> T) -> [T] {
-        let val = self.parse_seq_to_before_end(ket, sep, f);
-        self.bump();
-        ret val;
-    }
-
-
-    fn parse_seq_to_before_end<T: copy>(ket: token::token, sep: seq_sep,
-                                        f: fn(parser) -> T) -> [T] {
-        let mut first: bool = true;
-        let mut v: [T] = [];
-        while self.token != ket {
-            alt sep.sep {
-              some(t) { if first { first = false; }
-                        else { self.expect(t); } }
-              _ { }
-            }
-            if sep.trailing_opt && self.token == ket { break; }
-            v += [f(self)];
-        }
-        ret v;
-    }
-
-    fn parse_seq<T: copy>(bra: token::token, ket: token::token, sep: seq_sep,
-                          f: fn(parser) -> T) -> spanned<[T]> {
-        let lo = self.span.lo;
-        self.expect(bra);
-        let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
-        let hi = self.span.hi;
-        self.bump();
-        ret spanned(lo, hi, result);
-    }
-}
\ No newline at end of file
diff --git a/src/librustsyntax/parse/eval.rs b/src/librustsyntax/parse/eval.rs
deleted file mode 100644
index 98d75499878..00000000000
--- a/src/librustsyntax/parse/eval.rs
+++ /dev/null
@@ -1,142 +0,0 @@
-import parser::{parser, SOURCE_FILE};
-import attr::parser_attr;
-
-export eval_crate_directives_to_mod;
-
-type ctx =
-    @{sess: parse::parse_sess,
-      cfg: ast::crate_cfg};
-
-fn eval_crate_directives(cx: ctx, cdirs: [@ast::crate_directive], prefix: str,
-                         &view_items: [@ast::view_item],
-                         &items: [@ast::item]) {
-    for cdirs.each {|sub_cdir|
-        eval_crate_directive(cx, sub_cdir, prefix, view_items, items);
-    }
-}
-
-fn eval_crate_directives_to_mod(cx: ctx, cdirs: [@ast::crate_directive],
-                                prefix: str, suffix: option<str>)
-    -> (ast::_mod, [ast::attribute]) {
-    #debug("eval crate prefix: %s", prefix);
-    #debug("eval crate suffix: %s",
-           option::get_default(suffix, "none"));
-    let (cview_items, citems, cattrs)
-        = parse_companion_mod(cx, prefix, suffix);
-    let mut view_items: [@ast::view_item] = [];
-    let mut items: [@ast::item] = [];
-    eval_crate_directives(cx, cdirs, prefix, view_items, items);
-    ret ({view_items: view_items + cview_items,
-          items: items + citems},
-         cattrs);
-}
-
-/*
-The 'companion mod'. So .rc crates and directory mod crate directives define
-modules but not a .rs file to fill those mods with stuff. The companion mod is
-a convention for location a .rs file to go with them.  For .rc files the
-companion mod is a .rs file with the same name; for directory mods the
-companion mod is a .rs file with the same name as the directory.
-
-We build the path to the companion mod by combining the prefix and the
-optional suffix then adding the .rs extension.
-*/
-fn parse_companion_mod(cx: ctx, prefix: str, suffix: option<str>)
-    -> ([@ast::view_item], [@ast::item], [ast::attribute]) {
-
-    fn companion_file(prefix: str, suffix: option<str>) -> str {
-        ret alt suffix {
-          option::some(s) { path::connect(prefix, s) }
-          option::none { prefix }
-        } + ".rs";
-    }
-
-    fn file_exists(path: str) -> bool {
-        // Crude, but there's no lib function for this and I'm not
-        // up to writing it just now
-        alt io::file_reader(path) {
-          result::ok(_) { true }
-          result::err(_) { false }
-        }
-    }
-
-    let modpath = companion_file(prefix, suffix);
-    #debug("looking for companion mod %s", modpath);
-    if file_exists(modpath) {
-        #debug("found companion mod");
-        let p0 = new_parser_from_file(cx.sess, cx.cfg, modpath, SOURCE_FILE);
-        let inner_attrs = p0.parse_inner_attrs_and_next();
-        let first_item_outer_attrs = inner_attrs.next;
-        let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
-        cx.sess.chpos = p0.reader.chpos;
-        cx.sess.byte_pos = cx.sess.byte_pos + p0.reader.pos;
-        ret (m0.view_items, m0.items, inner_attrs.inner);
-    } else {
-        ret ([], [], []);
-    }
-}
-
-fn cdir_path_opt(id: str, attrs: [ast::attribute]) -> str {
-    alt ::attr::first_attr_value_str_by_name(attrs, "path") {
-      some(d) {
-        ret d;
-      }
-      none { ret id; }
-    }
-}
-
-fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
-                        &view_items: [@ast::view_item],
-                        &items: [@ast::item]) {
-    alt cdir.node {
-      ast::cdir_src_mod(id, attrs) {
-        let file_path = cdir_path_opt(id + ".rs", attrs);
-        let full_path =
-            if path::path_is_absolute(file_path) {
-                file_path
-            } else { prefix + path::path_sep() + file_path };
-        let p0 =
-            new_parser_from_file(cx.sess, cx.cfg, full_path, SOURCE_FILE);
-        let inner_attrs = p0.parse_inner_attrs_and_next();
-        let mod_attrs = attrs + inner_attrs.inner;
-        let first_item_outer_attrs = inner_attrs.next;
-        let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
-
-        let i = p0.mk_item(cdir.span.lo, cdir.span.hi, id,
-                           ast::item_mod(m0), ast::public, mod_attrs);
-        // Thread defids, chpos and byte_pos through the parsers
-        cx.sess.chpos = p0.reader.chpos;
-        cx.sess.byte_pos = cx.sess.byte_pos + p0.reader.pos;
-        items += [i];
-      }
-      ast::cdir_dir_mod(id, cdirs, attrs) {
-        let path = cdir_path_opt(id, attrs);
-        let full_path =
-            if path::path_is_absolute(path) {
-                path
-            } else { prefix + path::path_sep() + path };
-        let (m0, a0) = eval_crate_directives_to_mod(
-            cx, cdirs, full_path, none);
-        let i =
-            @{ident: id,
-              attrs: attrs + a0,
-              id: cx.sess.next_id,
-              node: ast::item_mod(m0),
-              vis: ast::public,
-              span: cdir.span};
-        cx.sess.next_id += 1;
-        items += [i];
-      }
-      ast::cdir_view_item(vi) { view_items += [vi]; }
-      ast::cdir_syntax(pth) { }
-    }
-}
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/parse/lexer.rs b/src/librustsyntax/parse/lexer.rs
deleted file mode 100644
index 63dc85e865d..00000000000
--- a/src/librustsyntax/parse/lexer.rs
+++ /dev/null
@@ -1,536 +0,0 @@
-import util::interner;
-import util::interner::intern;
-import diagnostic;
-
-export reader, new_reader, next_token, is_whitespace;
-
-type reader = @{
-    span_diagnostic: diagnostic::span_handler,
-    src: @str,
-    mut col: uint,
-    mut pos: uint,
-    mut curr: char,
-    mut chpos: uint,
-    filemap: codemap::filemap,
-    interner: @interner::interner<str>
-};
-
-impl reader for reader {
-    fn is_eof() -> bool { self.curr == -1 as char }
-    fn get_str_from(start: uint) -> str unsafe {
-        // I'm pretty skeptical about this subtraction. What if there's a
-        // multi-byte character before the mark?
-        ret str::slice(*self.src, start - 1u, self.pos - 1u);
-    }
-    fn next() -> char {
-        if self.pos < (*self.src).len() {
-            ret str::char_at(*self.src, self.pos);
-        } else { ret -1 as char; }
-    }
-    fn bump() {
-        if self.pos < (*self.src).len() {
-            self.col += 1u;
-            self.chpos += 1u;
-            if self.curr == '\n' {
-                codemap::next_line(self.filemap, self.chpos, self.pos);
-                self.col = 0u;
-            }
-            let next = str::char_range_at(*self.src, self.pos);
-            self.pos = next.next;
-            self.curr = next.ch;
-        } else {
-            if (self.curr != -1 as char) {
-                self.col += 1u;
-                self.chpos += 1u;
-                self.curr = -1 as char;
-            }
-        }
-    }
-    fn fatal(m: str) -> ! {
-        self.span_diagnostic.span_fatal(
-            ast_util::mk_sp(self.chpos, self.chpos),
-            m)
-    }
-}
-
-fn new_reader(span_diagnostic: diagnostic::span_handler,
-              filemap: codemap::filemap,
-              itr: @interner::interner<str>) -> reader {
-    let r = @{span_diagnostic: span_diagnostic, src: filemap.src,
-              mut col: 0u, mut pos: 0u, mut curr: -1 as char,
-              mut chpos: filemap.start_pos.ch,
-              filemap: filemap, interner: itr};
-    if r.pos < (*filemap.src).len() {
-        let next = str::char_range_at(*r.src, r.pos);
-        r.pos = next.next;
-        r.curr = next.ch;
-    }
-    ret r;
-}
-
-fn dec_digit_val(c: char) -> int { ret (c as int) - ('0' as int); }
-
-fn hex_digit_val(c: char) -> int {
-    if in_range(c, '0', '9') { ret (c as int) - ('0' as int); }
-    if in_range(c, 'a', 'f') { ret (c as int) - ('a' as int) + 10; }
-    if in_range(c, 'A', 'F') { ret (c as int) - ('A' as int) + 10; }
-    fail;
-}
-
-fn bin_digit_value(c: char) -> int { if c == '0' { ret 0; } ret 1; }
-
-fn is_whitespace(c: char) -> bool {
-    ret c == ' ' || c == '\t' || c == '\r' || c == '\n';
-}
-
-fn may_begin_ident(c: char) -> bool { ret is_alpha(c) || c == '_'; }
-
-fn in_range(c: char, lo: char, hi: char) -> bool { ret lo <= c && c <= hi; }
-
-fn is_alpha(c: char) -> bool {
-    ret in_range(c, 'a', 'z') || in_range(c, 'A', 'Z');
-}
-
-fn is_dec_digit(c: char) -> bool { ret in_range(c, '0', '9'); }
-
-fn is_alnum(c: char) -> bool { ret is_alpha(c) || is_dec_digit(c); }
-
-fn is_hex_digit(c: char) -> bool {
-    ret in_range(c, '0', '9') || in_range(c, 'a', 'f') ||
-            in_range(c, 'A', 'F');
-}
-
-fn is_bin_digit(c: char) -> bool { ret c == '0' || c == '1'; }
-
-fn consume_whitespace_and_comments(rdr: reader) {
-    while is_whitespace(rdr.curr) { rdr.bump(); }
-    ret consume_any_line_comment(rdr);
-}
-
-fn consume_any_line_comment(rdr: reader) {
-    if rdr.curr == '/' {
-        alt rdr.next() {
-          '/' {
-            while rdr.curr != '\n' && !rdr.is_eof() { rdr.bump(); }
-            // Restart whitespace munch.
-
-            ret consume_whitespace_and_comments(rdr);
-          }
-          '*' { rdr.bump(); rdr.bump(); ret consume_block_comment(rdr); }
-          _ { ret; }
-        }
-    } else if rdr.curr == '#' {
-        if rdr.next() == '!' {
-            let cmap = codemap::new_codemap();
-            (*cmap).files.push(rdr.filemap);
-            let loc = codemap::lookup_char_pos_adj(cmap, rdr.chpos);
-            if loc.line == 1u && loc.col == 0u {
-                while rdr.curr != '\n' && !rdr.is_eof() { rdr.bump(); }
-                ret consume_whitespace_and_comments(rdr);
-            }
-        }
-    }
-}
-
-fn consume_block_comment(rdr: reader) {
-    let mut level: int = 1;
-    while level > 0 {
-        if rdr.is_eof() { rdr.fatal("unterminated block comment"); }
-        if rdr.curr == '/' && rdr.next() == '*' {
-            rdr.bump();
-            rdr.bump();
-            level += 1;
-        } else {
-            if rdr.curr == '*' && rdr.next() == '/' {
-                rdr.bump();
-                rdr.bump();
-                level -= 1;
-            } else { rdr.bump(); }
-        }
-    }
-    // restart whitespace munch.
-
-    ret consume_whitespace_and_comments(rdr);
-}
-
-fn scan_exponent(rdr: reader) -> option<str> {
-    let mut c = rdr.curr;
-    let mut rslt = "";
-    if c == 'e' || c == 'E' {
-        str::push_char(rslt, c);
-        rdr.bump();
-        c = rdr.curr;
-        if c == '-' || c == '+' {
-            str::push_char(rslt, c);
-            rdr.bump();
-        }
-        let exponent = scan_digits(rdr, 10u);
-        if str::len(exponent) > 0u {
-            ret some(rslt + exponent);
-        } else { rdr.fatal("scan_exponent: bad fp literal"); }
-    } else { ret none::<str>; }
-}
-
-fn scan_digits(rdr: reader, radix: uint) -> str {
-    let mut rslt = "";
-    loop {
-        let c = rdr.curr;
-        if c == '_' { rdr.bump(); cont; }
-        alt char::to_digit(c, radix) {
-          some(d) {
-            str::push_char(rslt, c);
-            rdr.bump();
-          }
-          _ { ret rslt; }
-        }
-    };
-}
-
-fn scan_number(c: char, rdr: reader) -> token::token {
-    let mut num_str, base = 10u, c = c, n = rdr.next();
-    if c == '0' && n == 'x' {
-        rdr.bump();
-        rdr.bump();
-        base = 16u;
-    } else if c == '0' && n == 'b' {
-        rdr.bump();
-        rdr.bump();
-        base = 2u;
-    }
-    num_str = scan_digits(rdr, base);
-    c = rdr.curr;
-    rdr.next();
-    if c == 'u' || c == 'i' {
-        let signed = c == 'i';
-        let mut tp = {
-            if signed { either::left(ast::ty_i) }
-            else { either::right(ast::ty_u) }
-        };
-        rdr.bump();
-        c = rdr.curr;
-        if c == '8' {
-            rdr.bump();
-            tp = if signed { either::left(ast::ty_i8) }
-                      else { either::right(ast::ty_u8) };
-        }
-        n = rdr.next();
-        if c == '1' && n == '6' {
-            rdr.bump();
-            rdr.bump();
-            tp = if signed { either::left(ast::ty_i16) }
-                      else { either::right(ast::ty_u16) };
-        } else if c == '3' && n == '2' {
-            rdr.bump();
-            rdr.bump();
-            tp = if signed { either::left(ast::ty_i32) }
-                      else { either::right(ast::ty_u32) };
-        } else if c == '6' && n == '4' {
-            rdr.bump();
-            rdr.bump();
-            tp = if signed { either::left(ast::ty_i64) }
-                      else { either::right(ast::ty_u64) };
-        }
-        if str::len(num_str) == 0u {
-            rdr.fatal("no valid digits found for number");
-        }
-        let parsed = option::get(u64::from_str_radix(num_str, base as u64));
-        alt tp {
-          either::left(t) { ret token::LIT_INT(parsed as i64, t); }
-          either::right(t) { ret token::LIT_UINT(parsed, t); }
-        }
-    }
-    let mut is_float = false;
-    if rdr.curr == '.' && !(is_alpha(rdr.next()) || rdr.next() == '_') {
-        is_float = true;
-        rdr.bump();
-        let dec_part = scan_digits(rdr, 10u);
-        num_str += "." + dec_part;
-    }
-    alt scan_exponent(rdr) {
-      some(s) {
-        is_float = true;
-        num_str += s;
-      }
-      none {}
-    }
-    if rdr.curr == 'f' {
-        rdr.bump();
-        c = rdr.curr;
-        n = rdr.next();
-        if c == '3' && n == '2' {
-            rdr.bump();
-            rdr.bump();
-            ret token::LIT_FLOAT(intern(*rdr.interner, num_str),
-                                 ast::ty_f32);
-        } else if c == '6' && n == '4' {
-            rdr.bump();
-            rdr.bump();
-            ret token::LIT_FLOAT(intern(*rdr.interner, num_str),
-                                 ast::ty_f64);
-            /* FIXME: if this is out of range for either a 32-bit or
-            64-bit float, it won't be noticed till the back-end (Issue #2252)
-            */
-        } else {
-            is_float = true;
-        }
-    }
-    if is_float {
-        ret token::LIT_FLOAT(interner::intern(*rdr.interner, num_str),
-                             ast::ty_f);
-    } else {
-        if str::len(num_str) == 0u {
-            rdr.fatal("no valid digits found for number");
-        }
-        let parsed = option::get(u64::from_str_radix(num_str, base as u64));
-        ret token::LIT_INT(parsed as i64, ast::ty_i);
-    }
-}
-
-fn scan_numeric_escape(rdr: reader, n_hex_digits: uint) -> char {
-    let mut accum_int = 0, i = n_hex_digits;
-    while i != 0u {
-        let n = rdr.curr;
-        rdr.bump();
-        if !is_hex_digit(n) {
-            rdr.fatal(#fmt["illegal numeric character escape: %d", n as int]);
-        }
-        accum_int *= 16;
-        accum_int += hex_digit_val(n);
-        i -= 1u;
-    }
-    ret accum_int as char;
-}
-
-fn next_token(rdr: reader) -> {tok: token::token, chpos: uint, bpos: uint} {
-    consume_whitespace_and_comments(rdr);
-    let start_chpos = rdr.chpos;
-    let start_bpos = rdr.pos;
-    let tok = if rdr.is_eof() { token::EOF } else { next_token_inner(rdr) };
-    ret {tok: tok, chpos: start_chpos, bpos: start_bpos};
-}
-
-fn next_token_inner(rdr: reader) -> token::token {
-    let mut accum_str = "";
-    let mut c = rdr.curr;
-    if (c >= 'a' && c <= 'z')
-        || (c >= 'A' && c <= 'Z')
-        || c == '_'
-        || (c > 'z' && char::is_XID_start(c)) {
-        while (c >= 'a' && c <= 'z')
-            || (c >= 'A' && c <= 'Z')
-            || (c >= '0' && c <= '9')
-            || c == '_'
-            || (c > 'z' && char::is_XID_continue(c)) {
-            str::push_char(accum_str, c);
-            rdr.bump();
-            c = rdr.curr;
-        }
-        if str::eq(accum_str, "_") { ret token::UNDERSCORE; }
-        let is_mod_name = c == ':' && rdr.next() == ':';
-
-        // FIXME: perform NFKC normalization here. (Issue #2253)
-        ret token::IDENT(interner::intern::<str>(*rdr.interner,
-                                                 accum_str), is_mod_name);
-    }
-    if is_dec_digit(c) {
-        ret scan_number(c, rdr);
-    }
-    fn binop(rdr: reader, op: token::binop) -> token::token {
-        rdr.bump();
-        if rdr.curr == '=' {
-            rdr.bump();
-            ret token::BINOPEQ(op);
-        } else { ret token::BINOP(op); }
-    }
-    alt c {
-
-
-
-
-
-      // One-byte tokens.
-      ';' { rdr.bump(); ret token::SEMI; }
-      ',' { rdr.bump(); ret token::COMMA; }
-      '.' {
-        rdr.bump();
-        if rdr.curr == '.' && rdr.next() == '.' {
-            rdr.bump();
-            rdr.bump();
-            ret token::ELLIPSIS;
-        }
-        ret token::DOT;
-      }
-      '(' { rdr.bump(); ret token::LPAREN; }
-      ')' { rdr.bump(); ret token::RPAREN; }
-      '{' { rdr.bump(); ret token::LBRACE; }
-      '}' { rdr.bump(); ret token::RBRACE; }
-      '[' { rdr.bump(); ret token::LBRACKET; }
-      ']' { rdr.bump(); ret token::RBRACKET; }
-      '@' { rdr.bump(); ret token::AT; }
-      '#' { rdr.bump(); ret token::POUND; }
-      '~' { rdr.bump(); ret token::TILDE; }
-      ':' {
-        rdr.bump();
-        if rdr.curr == ':' {
-            rdr.bump();
-            ret token::MOD_SEP;
-        } else { ret token::COLON; }
-      }
-
-      '$' { rdr.bump(); ret token::DOLLAR; }
-
-
-
-
-
-      // Multi-byte tokens.
-      '=' {
-        rdr.bump();
-        if rdr.curr == '=' {
-            rdr.bump();
-            ret token::EQEQ;
-        } else { ret token::EQ; }
-      }
-      '!' {
-        rdr.bump();
-        if rdr.curr == '=' {
-            rdr.bump();
-            ret token::NE;
-        } else { ret token::NOT; }
-      }
-      '<' {
-        rdr.bump();
-        alt rdr.curr {
-          '=' { rdr.bump(); ret token::LE; }
-          '<' { ret binop(rdr, token::SHL); }
-          '-' {
-            rdr.bump();
-            alt rdr.curr {
-              '>' { rdr.bump(); ret token::DARROW; }
-              _ { ret token::LARROW; }
-            }
-          }
-          _ { ret token::LT; }
-        }
-      }
-      '>' {
-        rdr.bump();
-        alt rdr.curr {
-          '=' { rdr.bump(); ret token::GE; }
-          '>' { ret binop(rdr, token::SHR); }
-          _ { ret token::GT; }
-        }
-      }
-      '\'' {
-        rdr.bump();
-        let mut c2 = rdr.curr;
-        rdr.bump();
-        if c2 == '\\' {
-            let escaped = rdr.curr;
-            rdr.bump();
-            alt escaped {
-              'n' { c2 = '\n'; }
-              'r' { c2 = '\r'; }
-              't' { c2 = '\t'; }
-              '\\' { c2 = '\\'; }
-              '\'' { c2 = '\''; }
-              'x' { c2 = scan_numeric_escape(rdr, 2u); }
-              'u' { c2 = scan_numeric_escape(rdr, 4u); }
-              'U' { c2 = scan_numeric_escape(rdr, 8u); }
-              c2 {
-                rdr.fatal(#fmt["unknown character escape: %d", c2 as int]);
-              }
-            }
-        }
-        if rdr.curr != '\'' {
-            rdr.fatal("unterminated character constant");
-        }
-        rdr.bump(); // advance curr past token
-        ret token::LIT_INT(c2 as i64, ast::ty_char);
-      }
-      '"' {
-        let n = rdr.chpos;
-        rdr.bump();
-        while rdr.curr != '"' {
-            if rdr.is_eof() {
-                rdr.fatal(#fmt["unterminated double quote string: %s",
-                             rdr.get_str_from(n)]);
-            }
-
-            let ch = rdr.curr;
-            rdr.bump();
-            alt ch {
-              '\\' {
-                let escaped = rdr.curr;
-                rdr.bump();
-                alt escaped {
-                  'n' { str::push_char(accum_str, '\n'); }
-                  'r' { str::push_char(accum_str, '\r'); }
-                  't' { str::push_char(accum_str, '\t'); }
-                  '\\' { str::push_char(accum_str, '\\'); }
-                  '"' { str::push_char(accum_str, '"'); }
-                  '\n' { consume_whitespace(rdr); }
-                  'x' {
-                    str::push_char(accum_str, scan_numeric_escape(rdr, 2u));
-                  }
-                  'u' {
-                    str::push_char(accum_str, scan_numeric_escape(rdr, 4u));
-                  }
-                  'U' {
-                    str::push_char(accum_str, scan_numeric_escape(rdr, 8u));
-                  }
-                  c2 {
-                    rdr.fatal(#fmt["unknown string escape: %d", c2 as int]);
-                  }
-                }
-              }
-              _ { str::push_char(accum_str, ch); }
-            }
-        }
-        rdr.bump();
-        ret token::LIT_STR(interner::intern::<str>(*rdr.interner,
-                                                   accum_str));
-      }
-      '-' {
-        if rdr.next() == '>' {
-            rdr.bump();
-            rdr.bump();
-            ret token::RARROW;
-        } else { ret binop(rdr, token::MINUS); }
-      }
-      '&' {
-        if rdr.next() == '&' {
-            rdr.bump();
-            rdr.bump();
-            ret token::ANDAND;
-        } else { ret binop(rdr, token::AND); }
-      }
-      '|' {
-        alt rdr.next() {
-          '|' { rdr.bump(); rdr.bump(); ret token::OROR; }
-          _ { ret binop(rdr, token::OR); }
-        }
-      }
-      '+' { ret binop(rdr, token::PLUS); }
-      '*' { ret binop(rdr, token::STAR); }
-      '/' { ret binop(rdr, token::SLASH); }
-      '^' { ret binop(rdr, token::CARET); }
-      '%' { ret binop(rdr, token::PERCENT); }
-      c { rdr.fatal(#fmt["unknown start of token: %d", c as int]); }
-    }
-}
-
-fn consume_whitespace(rdr: reader) {
-    while is_whitespace(rdr.curr) && !rdr.is_eof() { rdr.bump(); }
-}
-
-
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/parse/parser.rs b/src/librustsyntax/parse/parser.rs
deleted file mode 100644
index 8594aed9776..00000000000
--- a/src/librustsyntax/parse/parser.rs
+++ /dev/null
@@ -1,2557 +0,0 @@
-import result::result;
-import either::{either, left, right};
-import std::map::{hashmap, str_hash};
-import token::{can_begin_expr, is_ident, is_plain_ident};
-import codemap::{span,fss_none};
-import util::interner;
-import ast_util::{spanned, mk_sp, ident_to_path, operator_prec};
-import ast::*;
-import lexer::reader;
-import prec::{as_prec, token_to_binop};
-import attr::parser_attr;
-import common::*;
-import dvec::{dvec, extensions};
-
-export file_type;
-export parser;
-export parse_expr;
-export parse_pat;
-
-// FIXME: #ast expects to find this here but it's actually defined in `parse`
-// Fixing this will be easier when we have export decls on individual items --
-// then parse can export this publicly, and everything else crate-visibly.
-// (See #1893)
-import parse_from_source_str;
-export parse_from_source_str;
-
-// TODO: remove these once we go around a snapshot cycle.
-// These are here for the old way that #ast (qquote.rs) worked
-fn parse_expr(p: parser) -> @ast::expr { p.parse_expr() }
-fn parse_pat(p: parser) -> @ast::pat { p.parse_pat() }
-
-
-enum restriction {
-    UNRESTRICTED,
-    RESTRICT_STMT_EXPR,
-    RESTRICT_NO_CALL_EXPRS,
-    RESTRICT_NO_BAR_OP,
-}
-
-enum file_type { CRATE_FILE, SOURCE_FILE, }
-
-
-// We don't allow single-entry tuples in the true AST; that indicates a
-// parenthesized expression.  However, we preserve them temporarily while
-// parsing because `(while{...})+3` parses differently from `while{...}+3`.
-//
-// To reflect the fact that the @expr is not a true expr that should be
-// part of the AST, we wrap such expressions in the pexpr enum.  They
-// can then be converted to true expressions by a call to `to_expr()`.
-enum pexpr {
-    pexpr(@expr),
-}
-
-/*
-  So that we can distinguish a class ctor or dtor
-  from other class members
- */
-enum class_contents { ctor_decl(fn_decl, blk, codemap::span),
-                      dtor_decl(blk, codemap::span),
-                      members([@class_member]) }
-
-type arg_or_capture_item = either<arg, capture_item>;
-type item_info = (ident, item_, option<[attribute]>);
-
-class parser {
-    let sess: parse_sess;
-    let cfg: crate_cfg;
-    let file_type: file_type;
-    let mut token: token::token;
-    let mut span: span;
-    let mut last_span: span;
-    let buffer: dvec<{tok: token::token, span: span}>;
-    let mut restriction: restriction;
-    let reader: reader;
-    let keywords: hashmap<str, ()>;
-    let restricted_keywords: hashmap<str, ()>;
-
-    new(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader,
-        ftype: file_type) {
-        let tok0 = lexer::next_token(rdr);
-        let span0 = ast_util::mk_sp(tok0.chpos, rdr.chpos);
-        self.sess = sess;
-        self.cfg = cfg;
-        self.file_type = ftype;
-        self.token = tok0.tok;
-        self.span = span0;
-        self.last_span = span0;
-        self.buffer = dvec::dvec();
-        self.restriction = UNRESTRICTED;
-        self.reader = rdr;
-        self.keywords = token::keyword_table();
-        self.restricted_keywords = token::restricted_keyword_table();
-    }
-
-    //TODO: uncomment when destructors workd
-    //drop {} /* do not copy the parser; its state is tied to outside state */
-
-    fn bump() {
-        self.last_span = self.span;
-        if self.buffer.len() == 0u {
-            let next = lexer::next_token(self.reader);
-            self.token = next.tok;
-            self.span = mk_sp(next.chpos, self.reader.chpos);
-        } else {
-            let next = self.buffer.shift();
-            self.token = next.tok;
-            self.span = next.span;
-        }
-    }
-    fn swap(next: token::token, lo: uint, hi: uint) {
-        self.token = next;
-        self.span = mk_sp(lo, hi);
-    }
-    fn look_ahead(distance: uint) -> token::token {
-        while self.buffer.len() < distance {
-            let next = lexer::next_token(self.reader);
-            let sp = mk_sp(next.chpos, self.reader.chpos);
-            self.buffer.push({tok: next.tok, span: sp});
-        }
-        ret self.buffer[distance - 1u].tok;
-    }
-    fn fatal(m: str) -> ! {
-        self.sess.span_diagnostic.span_fatal(self.span, m)
-    }
-    fn span_fatal(sp: span, m: str) -> ! {
-        self.sess.span_diagnostic.span_fatal(sp, m)
-    }
-    fn bug(m: str) -> ! {
-        self.sess.span_diagnostic.span_bug(self.span, m)
-    }
-    fn warn(m: str) {
-        self.sess.span_diagnostic.span_warn(self.span, m)
-    }
-    fn get_str(i: token::str_num) -> str {
-        interner::get(*self.reader.interner, i)
-    }
-    fn get_id() -> node_id { next_node_id(self.sess) }
-
-    fn parse_ty_fn(purity: ast::purity) -> ty_ {
-        let proto = if self.eat_keyword("native") {
-            self.expect_keyword("fn");
-            ast::proto_bare
-        } else {
-            self.expect_keyword("fn");
-            self.parse_fn_ty_proto()
-        };
-        ty_fn(proto, self.parse_ty_fn_decl(purity))
-    }
-
-    fn parse_ty_fn_decl(purity: ast::purity) -> fn_decl {
-        let inputs =
-            self.parse_seq(token::LPAREN, token::RPAREN,
-                           seq_sep(token::COMMA)) { |p|
-            let mode = p.parse_arg_mode();
-            let name = if is_plain_ident(p.token)
-                && p.look_ahead(1u) == token::COLON {
-
-                let name = self.parse_value_ident();
-                p.bump();
-                name
-            } else { "" };
-
-            {mode: mode, ty: p.parse_ty(false), ident: name,
-             id: p.get_id()}
-        };
-        // FIXME: constrs is empty because right now, higher-order functions
-        // can't have constrained types.
-        // Not sure whether that would be desirable anyway. See #34 for the
-        // story on constrained types.
-        let constrs: [@constr] = [];
-        let (ret_style, ret_ty) = self.parse_ret_ty();
-        ret {inputs: inputs.node, output: ret_ty,
-             purity: purity, cf: ret_style,
-             constraints: constrs};
-    }
-
-    fn parse_ty_methods() -> [ty_method] {
-        (self.parse_seq(token::LBRACE, token::RBRACE, seq_sep_none()) { |p|
-            let attrs = p.parse_outer_attributes();
-            let flo = p.span.lo;
-            let pur = p.parse_fn_purity();
-            let ident = p.parse_method_name();
-            let tps = p.parse_ty_params();
-            let d = p.parse_ty_fn_decl(pur), fhi = p.last_span.hi;
-            self.expect(token::SEMI);
-            {ident: ident, attrs: attrs, decl: {purity: pur with d}, tps: tps,
-             span: mk_sp(flo, fhi)}
-        }).node
-    }
-
-    fn parse_mt() -> mt {
-        let mutbl = self.parse_mutability();
-        let t = self.parse_ty(false);
-        ret {ty: t, mutbl: mutbl};
-    }
-
-    fn parse_ty_field() -> ty_field {
-        let lo = self.span.lo;
-        let mutbl = self.parse_mutability();
-        let id = self.parse_ident();
-        self.expect(token::COLON);
-        let ty = self.parse_ty(false);
-        ret spanned(lo, ty.span.hi, {ident: id, mt: {ty: ty, mutbl: mutbl}});
-    }
-
-    // if i is the jth ident in args, return j
-    // otherwise, fail
-    fn ident_index(args: [arg], i: ident) -> uint {
-        let mut j = 0u;
-        for args.each {|a| if a.ident == i { ret j; } j += 1u; }
-        self.fatal("unbound variable `" + i + "` in constraint arg");
-    }
-
-    fn parse_type_constr_arg() -> @ty_constr_arg {
-        let sp = self.span;
-        let mut carg = carg_base;
-        self.expect(token::BINOP(token::STAR));
-        if self.token == token::DOT {
-            // "*..." notation for record fields
-            self.bump();
-            let pth = self.parse_path_without_tps();
-            carg = carg_ident(pth);
-        }
-        // No literals yet, I guess?
-        ret @{node: carg, span: sp};
-    }
-
-    fn parse_constr_arg(args: [arg]) -> @constr_arg {
-        let sp = self.span;
-        let mut carg = carg_base;
-        if self.token == token::BINOP(token::STAR) {
-            self.bump();
-        } else {
-            let i: ident = self.parse_value_ident();
-            carg = carg_ident(self.ident_index(args, i));
-        }
-        ret @{node: carg, span: sp};
-    }
-
-    fn parse_ty_constr(fn_args: [arg]) -> @constr {
-        let lo = self.span.lo;
-        let path = self.parse_path_without_tps();
-        let args: {node: [@constr_arg], span: span} =
-            self.parse_seq(token::LPAREN, token::RPAREN,
-                           seq_sep(token::COMMA),
-                           {|p| p.parse_constr_arg(fn_args)});
-        ret @spanned(lo, args.span.hi,
-                     {path: path, args: args.node, id: self.get_id()});
-    }
-
-    fn parse_constr_in_type() -> @ty_constr {
-        let lo = self.span.lo;
-        let path = self.parse_path_without_tps();
-        let args: [@ty_constr_arg] =
-            self.parse_seq(token::LPAREN, token::RPAREN,
-                           seq_sep(token::COMMA),
-                           {|p| p.parse_type_constr_arg()}).node;
-        let hi = self.span.lo;
-        let tc: ty_constr_ = {path: path, args: args, id: self.get_id()};
-        ret @spanned(lo, hi, tc);
-    }
-
-
-    fn parse_constrs<T: copy>(pser: fn(parser) -> @constr_general<T>) ->
-        [@constr_general<T>] {
-        let mut constrs: [@constr_general<T>] = [];
-        loop {
-            let constr = pser(self);
-            constrs += [constr];
-            if self.token == token::COMMA { self.bump(); }
-            else { ret constrs; }
-        };
-    }
-
-    fn parse_type_constraints() -> [@ty_constr] {
-        ret self.parse_constrs({|p| p.parse_constr_in_type()});
-    }
-
-    fn parse_ret_ty() -> (ret_style, @ty) {
-        ret if self.eat(token::RARROW) {
-            let lo = self.span.lo;
-            if self.eat(token::NOT) {
-                (noreturn, @{id: self.get_id(),
-                             node: ty_bot,
-                             span: mk_sp(lo, self.last_span.hi)})
-            } else {
-                (return_val, self.parse_ty(false))
-            }
-        } else {
-            let pos = self.span.lo;
-            (return_val, @{id: self.get_id(),
-                           node: ty_nil,
-                           span: mk_sp(pos, pos)})
-        }
-    }
-
-    fn region_from_name(s: option<str>) -> @region {
-        let r = alt s {
-          some (string) { re_named(string) }
-          none { re_anon }
-        };
-
-        @{id: self.get_id(), node: r}
-    }
-
-    // Parses something like "&x"
-    fn parse_region() -> @region {
-        self.expect(token::BINOP(token::AND));
-        alt self.token {
-          token::IDENT(sid, _) {
-            self.bump();
-            let n = self.get_str(sid);
-            self.region_from_name(some(n))
-          }
-          _ {
-            self.region_from_name(none)
-          }
-        }
-    }
-
-    // Parses something like "&x." (note the trailing dot)
-    fn parse_region_dot() -> @region {
-        let name =
-            alt self.token {
-              token::IDENT(sid, _) if self.look_ahead(1u) == token::DOT {
-                self.bump(); self.bump();
-                some(self.get_str(sid))
-              }
-              _ { none }
-            };
-        self.region_from_name(name)
-    }
-
-    fn parse_ty(colons_before_params: bool) -> @ty {
-        let lo = self.span.lo;
-
-        alt self.maybe_parse_dollar_mac() {
-          some(e) {
-            ret @{id: self.get_id(),
-                  node: ty_mac(spanned(lo, self.span.hi, e)),
-                  span: mk_sp(lo, self.span.hi)};
-          }
-          none {}
-        }
-
-        let t = if self.token == token::LPAREN {
-            self.bump();
-            if self.token == token::RPAREN {
-                self.bump();
-                ty_nil
-            } else {
-                let mut ts = [self.parse_ty(false)];
-                while self.token == token::COMMA {
-                    self.bump();
-                    ts += [self.parse_ty(false)];
-                }
-                let t = if vec::len(ts) == 1u { ts[0].node }
-                else { ty_tup(ts) };
-                self.expect(token::RPAREN);
-                t
-            }
-        } else if self.token == token::AT {
-            self.bump();
-            ty_box(self.parse_mt())
-        } else if self.token == token::TILDE {
-            self.bump();
-            ty_uniq(self.parse_mt())
-        } else if self.token == token::BINOP(token::STAR) {
-            self.bump();
-            ty_ptr(self.parse_mt())
-        } else if self.token == token::LBRACE {
-            let elems = self.parse_seq(token::LBRACE, token::RBRACE,
-                                       seq_sep_opt(token::COMMA),
-                                       {|p| p.parse_ty_field()});
-            if vec::len(elems.node) == 0u {
-                self.unexpected_last(token::RBRACE);
-            }
-            let hi = elems.span.hi;
-
-            let t = ty_rec(elems.node);
-            if self.token == token::COLON {
-                self.bump();
-                ty_constr(@{id: self.get_id(),
-                            node: t,
-                            span: mk_sp(lo, hi)},
-                          self.parse_type_constraints())
-            } else { t }
-        } else if self.token == token::LBRACKET {
-            self.expect(token::LBRACKET);
-            let t = ty_vec(self.parse_mt());
-            self.expect(token::RBRACKET);
-            t
-        } else if self.token == token::BINOP(token::AND) {
-            self.bump();
-            let region = self.parse_region_dot();
-            let mt = self.parse_mt();
-            ty_rptr(region, mt)
-        } else if self.eat_keyword("pure") {
-            self.parse_ty_fn(ast::pure_fn)
-        } else if self.eat_keyword("unsafe") {
-            self.parse_ty_fn(ast::unsafe_fn)
-        } else if self.is_keyword("fn") {
-            self.parse_ty_fn(ast::impure_fn)
-        } else if self.eat_keyword("native") {
-            self.expect_keyword("fn");
-            ty_fn(proto_bare, self.parse_ty_fn_decl(ast::impure_fn))
-        } else if self.token == token::MOD_SEP || is_ident(self.token) {
-            let path = self.parse_path_with_tps(colons_before_params);
-            ty_path(path, self.get_id())
-        } else { self.fatal("expecting type"); };
-
-        let sp = mk_sp(lo, self.last_span.hi);
-        ret @{id: self.get_id(),
-              node: alt self.maybe_parse_vstore() {
-                // Consider a vstore suffix like /@ or /~
-                none { t }
-                some(v) {
-                  ty_vstore(@{id: self.get_id(), node:t, span: sp}, v)
-                } },
-              span: sp}
-    }
-
-    fn parse_arg_mode() -> mode {
-        if self.eat(token::BINOP(token::AND)) {
-            expl(by_mutbl_ref)
-        } else if self.eat(token::BINOP(token::MINUS)) {
-            expl(by_move)
-        } else if self.eat(token::ANDAND) {
-            expl(by_ref)
-        } else if self.eat(token::BINOP(token::PLUS)) {
-            if self.eat(token::BINOP(token::PLUS)) {
-                expl(by_val)
-            } else {
-                expl(by_copy)
-            }
-        } else { infer(self.get_id()) }
-    }
-
-    fn parse_capture_item_or(parse_arg_fn: fn(parser) -> arg_or_capture_item)
-        -> arg_or_capture_item {
-
-        fn parse_capture_item(p:parser, is_move: bool) -> capture_item {
-            let sp = mk_sp(p.span.lo, p.span.hi);
-            let ident = p.parse_ident();
-            @{id: p.get_id(), is_move: is_move, name: ident, span: sp}
-        }
-
-        if self.eat_keyword("move") {
-            either::right(parse_capture_item(self, true))
-        } else if self.eat_keyword("copy") {
-            either::right(parse_capture_item(self, false))
-        } else {
-            parse_arg_fn(self)
-        }
-    }
-
-    fn parse_arg() -> arg_or_capture_item {
-        let m = self.parse_arg_mode();
-        let i = self.parse_value_ident();
-        self.expect(token::COLON);
-        let t = self.parse_ty(false);
-        either::left({mode: m, ty: t, ident: i, id: self.get_id()})
-    }
-
-    fn parse_arg_or_capture_item() -> arg_or_capture_item {
-        self.parse_capture_item_or() {|p| p.parse_arg() }
-    }
-
-    fn parse_fn_block_arg() -> arg_or_capture_item {
-        self.parse_capture_item_or() {|p|
-            let m = p.parse_arg_mode();
-            let i = p.parse_value_ident();
-            let t = if p.eat(token::COLON) {
-                p.parse_ty(false)
-            } else {
-                @{id: p.get_id(),
-                  node: ty_infer,
-                  span: mk_sp(p.span.lo, p.span.hi)}
-            };
-            either::left({mode: m, ty: t, ident: i, id: p.get_id()})
-        }
-    }
-
-    fn maybe_parse_dollar_mac() -> option<mac_> {
-        alt self.token {
-          token::DOLLAR {
-            let lo = self.span.lo;
-            self.bump();
-            alt self.token {
-              token::LIT_INT(num, ty_i) {
-                self.bump();
-                some(mac_var(num as uint))
-              }
-              token::LPAREN {
-                self.bump();
-                let e = self.parse_expr();
-                self.expect(token::RPAREN);
-                let hi = self.last_span.hi;
-                some(mac_aq(mk_sp(lo,hi), e))
-              }
-              _ {
-                self.fatal("expected `(` or integer literal");
-              }
-            }
-          }
-          _ {none}
-        }
-    }
-
-    fn maybe_parse_vstore() -> option<vstore> {
-        if self.token == token::BINOP(token::SLASH) {
-            self.bump();
-            alt self.token {
-              token::AT {
-                self.bump(); some(vstore_box)
-              }
-              token::TILDE {
-                self.bump(); some(vstore_uniq)
-              }
-              token::UNDERSCORE {
-                self.bump(); some(vstore_fixed(none))
-              }
-              token::LIT_INT(i, ty_i) if i >= 0i64 {
-                self.bump(); some(vstore_fixed(some(i as uint)))
-              }
-              token::BINOP(token::AND) {
-                some(vstore_slice(self.parse_region()))
-              }
-              _ {
-                none
-              }
-            }
-        } else {
-            none
-        }
-    }
-
-    fn lit_from_token(tok: token::token) -> lit_ {
-        alt tok {
-          token::LIT_INT(i, it) { lit_int(i, it) }
-          token::LIT_UINT(u, ut) { lit_uint(u, ut) }
-          token::LIT_FLOAT(s, ft) { lit_float(self.get_str(s), ft) }
-          token::LIT_STR(s) { lit_str(self.get_str(s)) }
-          token::LPAREN { self.expect(token::RPAREN); lit_nil }
-          _ { self.unexpected_last(tok); }
-        }
-    }
-
-    fn parse_lit() -> lit {
-        let lo = self.span.lo;
-        let lit = if self.eat_keyword("true") {
-            lit_bool(true)
-        } else if self.eat_keyword("false") {
-            lit_bool(false)
-        } else {
-            let tok = self.token;
-            self.bump();
-            self.lit_from_token(tok)
-        };
-        ret {node: lit, span: mk_sp(lo, self.last_span.hi)};
-    }
-
-    fn parse_path_without_tps() -> @path {
-        self.parse_path_without_tps_({|p| p.parse_ident()},
-                                     {|p| p.parse_ident()})
-    }
-
-    fn parse_path_without_tps_(
-        parse_ident: fn(parser) -> ident,
-        parse_last_ident: fn(parser) -> ident) -> @path {
-
-        let lo = self.span.lo;
-        let global = self.eat(token::MOD_SEP);
-        let mut ids = [];
-        loop {
-            let is_not_last =
-                self.look_ahead(2u) != token::LT
-                && self.look_ahead(1u) == token::MOD_SEP;
-
-            if is_not_last {
-                ids += [parse_ident(self)];
-                self.expect(token::MOD_SEP);
-            } else {
-                ids += [parse_last_ident(self)];
-                break;
-            }
-        }
-        @{span: mk_sp(lo, self.last_span.hi), global: global,
-          idents: ids, rp: none, types: []}
-    }
-
-    fn parse_value_path() -> @path {
-        self.parse_path_without_tps_({|p| p.parse_ident()},
-                                     {|p| p.parse_value_ident()})
-    }
-
-    fn parse_path_with_tps(colons: bool) -> @path {
-        #debug["parse_path_with_tps(colons=%b)", colons];
-
-        let lo = self.span.lo;
-        let path = self.parse_path_without_tps();
-        if colons && !self.eat(token::MOD_SEP) {
-            ret path;
-        }
-
-        // Parse the region parameter, if any, which will
-        // be written "foo/&x"
-        let rp = {
-            // Hack: avoid parsing vstores like /@ and /~.  This is painful
-            // because the notation for region bounds and the notation for
-            // vstores is... um... the same.  I guess that's my fault.  This
-            // is still not ideal as for str/& we end up parsing more than we
-            // ought to and have to sort it out later.
-            if self.token == token::BINOP(token::SLASH)
-                && self.look_ahead(1u) == token::BINOP(token::AND) {
-
-                self.expect(token::BINOP(token::SLASH));
-                some(self.parse_region())
-            } else {
-                none
-            }
-        };
-
-        // Parse any type parameters which may appear:
-        let tps = {
-            if self.token == token::LT {
-                self.parse_seq_lt_gt(some(token::COMMA),
-                                     {|p| p.parse_ty(false)})
-            } else {
-                {node: [], span: path.span}
-            }
-        };
-
-        ret @{span: mk_sp(lo, tps.span.hi),
-              rp: rp,
-              types: tps.node with *path};
-    }
-
-    fn parse_mutability() -> mutability {
-        if self.eat_keyword("mut") {
-            m_mutbl
-        } else if self.eat_keyword("mut") {
-            m_mutbl
-        } else if self.eat_keyword("const") {
-            m_const
-        } else {
-            m_imm
-        }
-    }
-
-    fn parse_field(sep: token::token) -> field {
-        let lo = self.span.lo;
-        let m = self.parse_mutability();
-        let i = self.parse_ident();
-        self.expect(sep);
-        let e = self.parse_expr();
-        ret spanned(lo, e.span.hi, {mutbl: m, ident: i, expr: e});
-    }
-
-    fn mk_expr(lo: uint, hi: uint, +node: expr_) -> @expr {
-        ret @{id: self.get_id(), node: node, span: mk_sp(lo, hi)};
-    }
-
-    fn mk_mac_expr(lo: uint, hi: uint, m: mac_) -> @expr {
-        ret @{id: self.get_id(),
-              node: expr_mac({node: m, span: mk_sp(lo, hi)}),
-              span: mk_sp(lo, hi)};
-    }
-
-    fn mk_lit_u32(i: u32) -> @expr {
-        let span = self.span;
-        let lv_lit = @{node: lit_uint(i as u64, ty_u32),
-                       span: span};
-
-        ret @{id: self.get_id(), node: expr_lit(lv_lit), span: span};
-    }
-
-    fn mk_pexpr(lo: uint, hi: uint, node: expr_) -> pexpr {
-        ret pexpr(self.mk_expr(lo, hi, node));
-    }
-
-    fn to_expr(e: pexpr) -> @expr {
-        alt e.node {
-          expr_tup(es) if vec::len(es) == 1u { es[0u] }
-          _ { *e }
-        }
-    }
-
-    fn parse_bottom_expr() -> pexpr {
-        let lo = self.span.lo;
-        let mut hi = self.span.hi;
-
-        let mut ex: expr_;
-
-        alt self.maybe_parse_dollar_mac() {
-          some(x) {ret pexpr(self.mk_mac_expr(lo, self.span.hi, x));}
-          _ {}
-        }
-
-        if self.token == token::LPAREN {
-            self.bump();
-            if self.token == token::RPAREN {
-                hi = self.span.hi;
-                self.bump();
-                let lit = @spanned(lo, hi, lit_nil);
-                ret self.mk_pexpr(lo, hi, expr_lit(lit));
-            }
-            let mut es = [self.parse_expr()];
-            while self.token == token::COMMA {
-                self.bump(); es += [self.parse_expr()];
-            }
-            hi = self.span.hi;
-            self.expect(token::RPAREN);
-
-            // Note: we retain the expr_tup() even for simple
-            // parenthesized expressions, but only for a "little while".
-            // This is so that wrappers around parse_bottom_expr()
-            // can tell whether the expression was parenthesized or not,
-            // which affects expr_is_complete().
-            ret self.mk_pexpr(lo, hi, expr_tup(es));
-        } else if self.token == token::LBRACE {
-            self.bump();
-            if self.is_keyword("mut") ||
-                is_plain_ident(self.token)
-                && self.look_ahead(1u) == token::COLON {
-                let mut fields = [self.parse_field(token::COLON)];
-                let mut base = none;
-                while self.token != token::RBRACE {
-                    if self.eat_keyword("with") {
-                        base = some(self.parse_expr()); break;
-                    }
-                    self.expect(token::COMMA);
-                    if self.token == token::RBRACE {
-                        // record ends by an optional trailing comma
-                        break;
-                    }
-                    fields += [self.parse_field(token::COLON)];
-                }
-                hi = self.span.hi;
-                self.expect(token::RBRACE);
-                ex = expr_rec(fields, base);
-            } else if token::is_bar(self.token) {
-                ret pexpr(self.parse_fn_block_expr());
-            } else {
-                let blk = self.parse_block_tail(lo, default_blk);
-                ret self.mk_pexpr(blk.span.lo, blk.span.hi, expr_block(blk));
-            }
-        } else if self.eat_keyword("new") {
-            self.expect(token::LPAREN);
-            let r = self.parse_expr();
-            self.expect(token::RPAREN);
-            let v = self.parse_expr();
-            ret self.mk_pexpr(lo, self.span.hi,
-                              expr_new(r, self.get_id(), v));
-        } else if self.eat_keyword("if") {
-            ret pexpr(self.parse_if_expr());
-        } else if self.eat_keyword("for") {
-            ret pexpr(self.parse_for_expr());
-        } else if self.eat_keyword("while") {
-            ret pexpr(self.parse_while_expr());
-        } else if self.eat_keyword("loop") {
-            ret pexpr(self.parse_loop_expr());
-        } else if self.eat_keyword("alt") {
-            ret pexpr(self.parse_alt_expr());
-        } else if self.eat_keyword("fn") {
-            let proto = self.parse_fn_ty_proto();
-            alt proto {
-              proto_bare { self.fatal("fn expr are deprecated, use fn@"); }
-              proto_any { self.fatal("fn* cannot be used in an expression"); }
-              _ { /* fallthrough */ }
-            }
-            ret pexpr(self.parse_fn_expr(proto));
-        } else if self.eat_keyword("unchecked") {
-            ret pexpr(self.parse_block_expr(lo, unchecked_blk));
-        } else if self.eat_keyword("unsafe") {
-            ret pexpr(self.parse_block_expr(lo, unsafe_blk));
-        } else if self.token == token::LBRACKET {
-            self.bump();
-            let mutbl = self.parse_mutability();
-            let es =
-                self.parse_seq_to_end(token::RBRACKET, seq_sep(token::COMMA),
-                                      {|p| p.parse_expr()});
-            hi = self.span.hi;
-            ex = expr_vec(es, mutbl);
-        } else if self.token == token::POUND
-            && self.look_ahead(1u) == token::LT {
-            self.bump();
-            self.bump();
-            let ty = self.parse_ty(false);
-            self.expect(token::GT);
-
-            /* hack: early return to take advantage of specialized function */
-            ret pexpr(self.mk_mac_expr(lo, self.span.hi,
-                                       mac_embed_type(ty)));
-        } else if self.token == token::POUND
-            && self.look_ahead(1u) == token::LBRACE {
-            self.bump();
-            self.bump();
-            let blk = mac_embed_block(
-                self.parse_block_tail(lo, default_blk));
-            ret pexpr(self.mk_mac_expr(lo, self.span.hi, blk));
-        } else if self.token == token::ELLIPSIS {
-            self.bump();
-            ret pexpr(self.mk_mac_expr(lo, self.span.hi, mac_ellipsis));
-        } else if self.token == token::POUND {
-            let ex_ext = self.parse_syntax_ext();
-            hi = ex_ext.span.hi;
-            ex = ex_ext.node;
-        } else if self.eat_keyword("bind") {
-            let e = self.parse_expr_res(RESTRICT_NO_CALL_EXPRS);
-            let es = self.parse_seq(token::LPAREN, token::RPAREN,
-                                    seq_sep(token::COMMA),
-                                    {|p| p.parse_expr_or_hole()});
-            hi = es.span.hi;
-            ex = expr_bind(e, es.node);
-        } else if self.eat_keyword("fail") {
-            if can_begin_expr(self.token) {
-                let e = self.parse_expr();
-                hi = e.span.hi;
-                ex = expr_fail(some(e));
-            } else { ex = expr_fail(none); }
-        } else if self.eat_keyword("log") {
-            self.expect(token::LPAREN);
-            let lvl = self.parse_expr();
-            self.expect(token::COMMA);
-            let e = self.parse_expr();
-            ex = expr_log(2, lvl, e);
-            hi = self.span.hi;
-            self.expect(token::RPAREN);
-        } else if self.eat_keyword("assert") {
-            let e = self.parse_expr();
-            ex = expr_assert(e);
-            hi = e.span.hi;
-        } else if self.eat_keyword("check") {
-            /* Should be a predicate (pure boolean function) applied to
-            arguments that are all either slot variables or literals.
-            but the typechecker enforces that. */
-            let e = self.parse_expr();
-            hi = e.span.hi;
-            ex = expr_check(checked_expr, e);
-        } else if self.eat_keyword("claim") {
-            /* Same rules as check, except that if check-claims
-            is enabled (a command-line flag), then the parser turns
-            claims into check */
-
-            let e = self.parse_expr();
-            hi = e.span.hi;
-            ex = expr_check(claimed_expr, e);
-        } else if self.eat_keyword("ret") {
-            if can_begin_expr(self.token) {
-                let e = self.parse_expr();
-                hi = e.span.hi;
-                ex = expr_ret(some(e));
-            } else { ex = expr_ret(none); }
-        } else if self.eat_keyword("break") {
-            ex = expr_break;
-            hi = self.span.hi;
-        } else if self.eat_keyword("cont") {
-            ex = expr_cont;
-            hi = self.span.hi;
-        } else if self.eat_keyword("copy") {
-            let e = self.parse_expr();
-            ex = expr_copy(e);
-            hi = e.span.hi;
-        } else if self.token == token::MOD_SEP ||
-            is_ident(self.token) && !self.is_keyword("true") &&
-            !self.is_keyword("false") {
-            let pth = self.parse_path_with_tps(true);
-            hi = pth.span.hi;
-            ex = expr_path(pth);
-        } else {
-            let lit = self.parse_lit();
-            hi = lit.span.hi;
-            ex = expr_lit(@lit);
-        }
-
-        // Vstore is legal following expr_lit(lit_str(...)) and expr_vec(...)
-        // only.
-        alt ex {
-          expr_lit(@{node: lit_str(_), span: _}) |
-          expr_vec(_, _)  {
-            alt self.maybe_parse_vstore() {
-              none { }
-              some(v) {
-                hi = self.span.hi;
-                ex = expr_vstore(self.mk_expr(lo, hi, ex), v);
-              }
-            }
-          }
-          _ { }
-        }
-
-        ret self.mk_pexpr(lo, hi, ex);
-    }
-
-    fn parse_block_expr(lo: uint, blk_mode: blk_check_mode) -> @expr {
-        self.expect(token::LBRACE);
-        let blk = self.parse_block_tail(lo, blk_mode);
-        ret self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk));
-    }
-
-    fn parse_syntax_ext() -> @expr {
-        let lo = self.span.lo;
-        self.expect(token::POUND);
-        ret self.parse_syntax_ext_naked(lo);
-    }
-
-    fn parse_syntax_ext_naked(lo: uint) -> @expr {
-        alt self.token {
-          token::IDENT(_, _) {}
-          _ { self.fatal("expected a syntax expander name"); }
-        }
-        let pth = self.parse_path_without_tps();
-        //temporary for a backwards-compatible cycle:
-        let sep = seq_sep(token::COMMA);
-        let mut e = none;
-        if (self.token == token::LPAREN || self.token == token::LBRACKET) {
-            let es =
-                if self.token == token::LPAREN {
-                self.parse_seq(token::LPAREN, token::RPAREN,
-                               sep, {|p| p.parse_expr()})
-        } else {
-            self.parse_seq(token::LBRACKET, token::RBRACKET,
-                           sep, {|p| p.parse_expr()})
-        };
-        let hi = es.span.hi;
-        e = some(self.mk_expr(es.span.lo, hi,
-                              expr_vec(es.node, m_imm)));
-    }
-    let mut b = none;
-    if self.token == token::LBRACE {
-        self.bump();
-        let lo = self.span.lo;
-        let mut depth = 1u;
-        while (depth > 0u) {
-            alt (self.token) {
-              token::LBRACE {depth += 1u;}
-              token::RBRACE {depth -= 1u;}
-              token::EOF {self.fatal("unexpected EOF in macro body");}
-              _ {}
-            }
-            self.bump();
-        }
-        let hi = self.last_span.lo;
-        b = some({span: mk_sp(lo,hi)});
-    }
-    ret self.mk_mac_expr(lo, self.span.hi, mac_invoc(pth, e, b));
-}
-
-    fn parse_dot_or_call_expr() -> pexpr {
-        let b = self.parse_bottom_expr();
-        self.parse_dot_or_call_expr_with(b)
-    }
-
-    fn permits_call() -> bool {
-        ret self.restriction != RESTRICT_NO_CALL_EXPRS;
-    }
-
-    fn parse_dot_or_call_expr_with(e0: pexpr) -> pexpr {
-        let mut e = e0;
-        let lo = e.span.lo;
-        let mut hi;
-        loop {
-            // expr.f
-            if self.eat(token::DOT) {
-                alt self.token {
-                  token::IDENT(i, _) {
-                    hi = self.span.hi;
-                    self.bump();
-                    let tys = if self.eat(token::MOD_SEP) {
-                        self.expect(token::LT);
-                        self.parse_seq_to_gt(some(token::COMMA),
-                                        {|p| p.parse_ty(false)})
-                    } else { [] };
-                    e = self.mk_pexpr(lo, hi, expr_field(self.to_expr(e),
-                                                         self.get_str(i),
-                                                         tys));
-                  }
-                  _ { self.unexpected(); }
-                }
-                cont;
-            }
-            if self.expr_is_complete(e) { break; }
-            alt self.token {
-              // expr(...)
-              token::LPAREN if self.permits_call() {
-                let es_opt = self.parse_seq(token::LPAREN, token::RPAREN,
-                                            seq_sep(token::COMMA),
-                                            {|p| p.parse_expr_or_hole()});
-                hi = es_opt.span.hi;
-
-                let nd =
-                    if vec::any(es_opt.node, {|e| option::is_none(e) }) {
-                    expr_bind(self.to_expr(e), es_opt.node)
-            } else {
-                let es = vec::map(es_opt.node) {|e| option::get(e) };
-                expr_call(self.to_expr(e), es, false)
-            };
-            e = self.mk_pexpr(lo, hi, nd);
-          }
-
-          // expr {|| ... }
-          token::LBRACE if (token::is_bar(self.look_ahead(1u))
-                            && self.permits_call()) {
-            self.bump();
-            let blk = self.parse_fn_block_expr();
-            alt e.node {
-              expr_call(f, args, false) {
-                e = pexpr(@{node: expr_call(f, args + [blk], true)
-                            with *self.to_expr(e)});
-              }
-              _ {
-                e = self.mk_pexpr(lo, self.last_span.hi,
-                                  expr_call(self.to_expr(e), [blk], true));
-              }
-            }
-          }
-
-          // expr[...]
-          token::LBRACKET {
-            self.bump();
-            let ix = self.parse_expr();
-            hi = ix.span.hi;
-            self.expect(token::RBRACKET);
-            self.get_id(); // see ast_util::op_expr_callee_id
-            e = self.mk_pexpr(lo, hi, expr_index(self.to_expr(e), ix));
-          }
-
-          _ { ret e; }
-        }
-    }
-    ret e;
-}
-
-    fn parse_prefix_expr() -> pexpr {
-        let lo = self.span.lo;
-        let mut hi;
-
-        let mut ex;
-        alt self.token {
-          token::NOT {
-            self.bump();
-            let e = self.to_expr(self.parse_prefix_expr());
-            hi = e.span.hi;
-            self.get_id(); // see ast_util::op_expr_callee_id
-            ex = expr_unary(not, e);
-          }
-          token::BINOP(b) {
-            alt b {
-              token::MINUS {
-                self.bump();
-                let e = self.to_expr(self.parse_prefix_expr());
-                hi = e.span.hi;
-                self.get_id(); // see ast_util::op_expr_callee_id
-                ex = expr_unary(neg, e);
-              }
-              token::STAR {
-                self.bump();
-                let e = self.to_expr(self.parse_prefix_expr());
-                hi = e.span.hi;
-                ex = expr_unary(deref, e);
-              }
-              token::AND {
-                self.bump();
-                let m = self.parse_mutability();
-                let e = self.to_expr(self.parse_prefix_expr());
-                hi = e.span.hi;
-                ex = expr_addr_of(m, e);
-              }
-              _ { ret self.parse_dot_or_call_expr(); }
-            }
-          }
-          token::AT {
-            self.bump();
-            let m = self.parse_mutability();
-            let e = self.to_expr(self.parse_prefix_expr());
-            hi = e.span.hi;
-            ex = expr_unary(box(m), e);
-          }
-          token::TILDE {
-            self.bump();
-            let m = self.parse_mutability();
-            let e = self.to_expr(self.parse_prefix_expr());
-            hi = e.span.hi;
-            ex = expr_unary(uniq(m), e);
-          }
-          _ { ret self.parse_dot_or_call_expr(); }
-        }
-        ret self.mk_pexpr(lo, hi, ex);
-    }
-
-
-    fn parse_binops() -> @expr {
-        ret self.parse_more_binops(self.parse_prefix_expr(), 0u);
-    }
-
-    fn parse_more_binops(plhs: pexpr, min_prec: uint) ->
-        @expr {
-        let lhs = self.to_expr(plhs);
-        if self.expr_is_complete(plhs) { ret lhs; }
-        let peeked = self.token;
-        if peeked == token::BINOP(token::OR) &&
-            self.restriction == RESTRICT_NO_BAR_OP { ret lhs; }
-        let cur_opt   = token_to_binop(peeked);
-        alt cur_opt {
-          some(cur_op) {
-            let cur_prec = operator_prec(cur_op);
-            if cur_prec > min_prec {
-                self.bump();
-                let expr = self.parse_prefix_expr();
-                let rhs = self.parse_more_binops(expr, cur_prec);
-                self.get_id(); // see ast_util::op_expr_callee_id
-                let bin = self.mk_pexpr(lhs.span.lo, rhs.span.hi,
-                                        expr_binary(cur_op, lhs, rhs));
-                ret self.parse_more_binops(bin, min_prec);
-            }
-          }
-          _ {}
-        }
-        if as_prec > min_prec && self.eat_keyword("as") {
-            let rhs = self.parse_ty(true);
-            let _as =
-                self.mk_pexpr(lhs.span.lo, rhs.span.hi, expr_cast(lhs, rhs));
-            ret self.parse_more_binops(_as, min_prec);
-        }
-        ret lhs;
-    }
-
-    fn parse_assign_expr() -> @expr {
-        let lo = self.span.lo;
-        let lhs = self.parse_binops();
-        alt self.token {
-          token::EQ {
-            self.bump();
-            let rhs = self.parse_expr();
-            ret self.mk_expr(lo, rhs.span.hi, expr_assign(lhs, rhs));
-          }
-          token::BINOPEQ(op) {
-            self.bump();
-            let rhs = self.parse_expr();
-            let mut aop;
-            alt op {
-              token::PLUS { aop = add; }
-              token::MINUS { aop = subtract; }
-              token::STAR { aop = mul; }
-              token::SLASH { aop = div; }
-              token::PERCENT { aop = rem; }
-              token::CARET { aop = bitxor; }
-              token::AND { aop = bitand; }
-              token::OR { aop = bitor; }
-              token::SHL { aop = shl; }
-              token::SHR { aop = shr; }
-            }
-            self.get_id(); // see ast_util::op_expr_callee_id
-            ret self.mk_expr(lo, rhs.span.hi, expr_assign_op(aop, lhs, rhs));
-          }
-          token::LARROW {
-            self.bump();
-            let rhs = self.parse_expr();
-            ret self.mk_expr(lo, rhs.span.hi, expr_move(lhs, rhs));
-          }
-          token::DARROW {
-            self.bump();
-            let rhs = self.parse_expr();
-            ret self.mk_expr(lo, rhs.span.hi, expr_swap(lhs, rhs));
-          }
-          _ {/* fall through */ }
-        }
-        ret lhs;
-    }
-
-    fn parse_if_expr_1() ->
-        {cond: @expr,
-         then: blk,
-         els: option<@expr>,
-         lo: uint,
-         hi: uint} {
-        let lo = self.last_span.lo;
-        let cond = self.parse_expr();
-        let thn = self.parse_block();
-        let mut els: option<@expr> = none;
-        let mut hi = thn.span.hi;
-        if self.eat_keyword("else") {
-            let elexpr = self.parse_else_expr();
-            els = some(elexpr);
-            hi = elexpr.span.hi;
-        }
-        ret {cond: cond, then: thn, els: els, lo: lo, hi: hi};
-    }
-
-    fn parse_if_expr() -> @expr {
-        if self.eat_keyword("check") {
-            let q = self.parse_if_expr_1();
-            ret self.mk_expr(q.lo, q.hi,
-                             expr_if_check(q.cond, q.then, q.els));
-        } else {
-            let q = self.parse_if_expr_1();
-            ret self.mk_expr(q.lo, q.hi, expr_if(q.cond, q.then, q.els));
-        }
-    }
-
-    fn parse_fn_expr(proto: proto) -> @expr {
-        let lo = self.last_span.lo;
-
-        let cc_old = self.parse_old_skool_capture_clause();
-
-        // if we want to allow fn expression argument types to be inferred in
-        // the future, just have to change parse_arg to parse_fn_block_arg.
-        let (decl, capture_clause) =
-            self.parse_fn_decl(impure_fn,
-                               {|p| p.parse_arg_or_capture_item()});
-
-        let body = self.parse_block();
-        ret self.mk_expr(lo, body.span.hi,
-                         expr_fn(proto, decl, body,
-                                 @(*capture_clause + cc_old)));
-    }
-
-    fn parse_fn_block_expr() -> @expr {
-        let lo = self.last_span.lo;
-        let (decl, captures) = self.parse_fn_block_decl();
-        let body = self.parse_block_tail(lo, default_blk);
-        ret self.mk_expr(lo, body.span.hi,
-                         expr_fn_block(decl, body, captures));
-    }
-
-    fn parse_else_expr() -> @expr {
-        if self.eat_keyword("if") {
-            ret self.parse_if_expr();
-        } else {
-            let blk = self.parse_block();
-            ret self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk));
-        }
-    }
-
-    fn parse_for_expr() -> @expr {
-        let lo = self.last_span;
-        let call = self.parse_expr_res(RESTRICT_STMT_EXPR);
-        alt call.node {
-          expr_call(f, args, true) {
-            let b_arg = vec::last(args);
-            let last = self.mk_expr(b_arg.span.lo, b_arg.span.hi,
-                                    expr_loop_body(b_arg));
-            @{node: expr_call(f, vec::init(args) + [last], true)
-              with *call}
-          }
-          _ {
-            self.span_fatal(lo, "`for` must be followed by a block call");
-          }
-        }
-    }
-
-    fn parse_while_expr() -> @expr {
-        let lo = self.last_span.lo;
-        let cond = self.parse_expr();
-        let body = self.parse_block_no_value();
-        let mut hi = body.span.hi;
-        ret self.mk_expr(lo, hi, expr_while(cond, body));
-    }
-
-    fn parse_loop_expr() -> @expr {
-        let lo = self.last_span.lo;
-        let body = self.parse_block_no_value();
-        let mut hi = body.span.hi;
-        ret self.mk_expr(lo, hi, expr_loop(body));
-    }
-
-    fn parse_alt_expr() -> @expr {
-        let lo = self.last_span.lo;
-        let mode = if self.eat_keyword("check") { alt_check }
-        else { alt_exhaustive };
-        let discriminant = self.parse_expr();
-        self.expect(token::LBRACE);
-        let mut arms: [arm] = [];
-        while self.token != token::RBRACE {
-            let pats = self.parse_pats();
-            let mut guard = none;
-            if self.eat_keyword("if") { guard = some(self.parse_expr()); }
-            let blk = self.parse_block();
-            arms += [{pats: pats, guard: guard, body: blk}];
-        }
-        let mut hi = self.span.hi;
-        self.bump();
-        ret self.mk_expr(lo, hi, expr_alt(discriminant, arms, mode));
-    }
-
-    fn parse_expr() -> @expr {
-        ret self.parse_expr_res(UNRESTRICTED);
-    }
-
-    fn parse_expr_or_hole() -> option<@expr> {
-        alt self.token {
-          token::UNDERSCORE { self.bump(); ret none; }
-          _ { ret some(self.parse_expr()); }
-        }
-    }
-
-    fn parse_expr_res(r: restriction) -> @expr {
-        let old = self.restriction;
-        self.restriction = r;
-        let e = self.parse_assign_expr();
-        self.restriction = old;
-        ret e;
-    }
-
-    fn parse_initializer() -> option<initializer> {
-        alt self.token {
-          token::EQ {
-            self.bump();
-            ret some({op: init_assign, expr: self.parse_expr()});
-          }
-          token::LARROW {
-            self.bump();
-            ret some({op: init_move, expr: self.parse_expr()});
-          }
-          // Now that the the channel is the first argument to receive,
-          // combining it with an initializer doesn't really make sense.
-          // case (token::RECV) {
-          //     self.bump();
-          //     ret some(rec(op = init_recv,
-          //                  expr = self.parse_expr()));
-          // }
-          _ {
-            ret none;
-          }
-        }
-    }
-
-    fn parse_pats() -> [@pat] {
-        let mut pats = [];
-        loop {
-            pats += [self.parse_pat()];
-            if self.token == token::BINOP(token::OR) { self.bump(); }
-            else { ret pats; }
-        };
-    }
-
-    fn parse_pat() -> @pat {
-        let lo = self.span.lo;
-        let mut hi = self.span.hi;
-        let mut pat;
-        alt self.token {
-          token::UNDERSCORE { self.bump(); pat = pat_wild; }
-          token::AT {
-            self.bump();
-            let sub = self.parse_pat();
-            pat = pat_box(sub);
-            hi = sub.span.hi;
-          }
-          token::TILDE {
-            self.bump();
-            let sub = self.parse_pat();
-            pat = pat_uniq(sub);
-            hi = sub.span.hi;
-          }
-          token::LBRACE {
-            self.bump();
-            let mut fields = [];
-            let mut etc = false;
-            let mut first = true;
-            while self.token != token::RBRACE {
-                if first { first = false; }
-                else { self.expect(token::COMMA); }
-
-                if self.token == token::UNDERSCORE {
-                    self.bump();
-                    if self.token != token::RBRACE {
-                        self.fatal("expecting }, found " +
-                                   token_to_str(self.reader, self.token));
-                    }
-                    etc = true;
-                    break;
-                }
-
-                let lo1 = self.last_span.lo;
-                let fieldname = if self.look_ahead(1u) == token::COLON {
-                    self.parse_ident()
-                } else {
-                    self.parse_value_ident()
-                };
-                let hi1 = self.last_span.lo;
-                let fieldpath = ast_util::ident_to_path(mk_sp(lo1, hi1),
-                                                        fieldname);
-                let mut subpat;
-                if self.token == token::COLON {
-                    self.bump();
-                    subpat = self.parse_pat();
-                } else {
-                    subpat = @{id: self.get_id(),
-                               node: pat_ident(fieldpath, none),
-                               span: mk_sp(lo, hi)};
-                }
-                fields += [{ident: fieldname, pat: subpat}];
-            }
-            hi = self.span.hi;
-            self.bump();
-            pat = pat_rec(fields, etc);
-          }
-          token::LPAREN {
-            self.bump();
-            if self.token == token::RPAREN {
-                hi = self.span.hi;
-                self.bump();
-                let lit = @{node: lit_nil, span: mk_sp(lo, hi)};
-                let expr = self.mk_expr(lo, hi, expr_lit(lit));
-                pat = pat_lit(expr);
-            } else {
-                let mut fields = [self.parse_pat()];
-                while self.token == token::COMMA {
-                    self.bump();
-                    fields += [self.parse_pat()];
-                }
-                if vec::len(fields) == 1u { self.expect(token::COMMA); }
-                hi = self.span.hi;
-                self.expect(token::RPAREN);
-                pat = pat_tup(fields);
-            }
-          }
-          tok {
-            if !is_ident(tok) || self.is_keyword("true")
-                || self.is_keyword("false") {
-                let val = self.parse_expr_res(RESTRICT_NO_BAR_OP);
-                if self.eat_keyword("to") {
-                    let end = self.parse_expr_res(RESTRICT_NO_BAR_OP);
-                    hi = end.span.hi;
-                    pat = pat_range(val, end);
-                } else {
-                    hi = val.span.hi;
-                    pat = pat_lit(val);
-                }
-            } else if is_plain_ident(self.token) &&
-                alt self.look_ahead(1u) {
-                  token::LPAREN | token::LBRACKET | token::LT { false }
-                  _ { true }
-                } {
-                let name = self.parse_value_path();
-                let sub = if self.eat(token::AT) { some(self.parse_pat()) }
-                else { none };
-                pat = pat_ident(name, sub);
-            } else {
-                let enum_path = self.parse_path_with_tps(true);
-                hi = enum_path.span.hi;
-                let mut args: [@pat] = [];
-                let mut star_pat = false;
-                alt self.token {
-                  token::LPAREN {
-                    alt self.look_ahead(1u) {
-                      token::BINOP(token::STAR) {
-                        // This is a "top constructor only" pat
-                        self.bump(); self.bump();
-                        star_pat = true;
-                        self.expect(token::RPAREN);
-                      }
-                      _ {
-                        let a = self.parse_seq(token::LPAREN, token::RPAREN,
-                                               seq_sep(token::COMMA),
-                                               {|p| p.parse_pat()});
-                        args = a.node;
-                        hi = a.span.hi;
-                      }
-                    }
-                  }
-                  _ { }
-                }
-                // at this point, we're not sure whether it's a enum or a bind
-                if star_pat {
-                    pat = pat_enum(enum_path, none);
-                }
-                else if vec::is_empty(args) &&
-                    vec::len(enum_path.idents) == 1u {
-                    pat = pat_ident(enum_path, none);
-                }
-                else {
-                    pat = pat_enum(enum_path, some(args));
-                }
-            }
-          }
-        }
-        ret @{id: self.get_id(), node: pat, span: mk_sp(lo, hi)};
-    }
-
-    fn parse_local(is_mutbl: bool,
-                   allow_init: bool) -> @local {
-        let lo = self.span.lo;
-        let pat = self.parse_pat();
-        let mut ty = @{id: self.get_id(),
-                       node: ty_infer,
-                       span: mk_sp(lo, lo)};
-        if self.eat(token::COLON) { ty = self.parse_ty(false); }
-        let init = if allow_init { self.parse_initializer() } else { none };
-        ret @spanned(lo, self.last_span.hi,
-                     {is_mutbl: is_mutbl, ty: ty, pat: pat,
-                      init: init, id: self.get_id()});
-    }
-
-    fn parse_let() -> @decl {
-        let is_mutbl = self.eat_keyword("mut");
-        let lo = self.span.lo;
-        let mut locals = [self.parse_local(is_mutbl, true)];
-        while self.eat(token::COMMA) {
-            locals += [self.parse_local(is_mutbl, true)];
-        }
-        ret @spanned(lo, self.last_span.hi, decl_local(locals));
-    }
-
-    /* assumes "let" token has already been consumed */
-    fn parse_instance_var(pr: visibility) -> @class_member {
-        let mut is_mutbl = class_immutable;
-        let lo = self.span.lo;
-        if self.eat_keyword("mut") {
-            is_mutbl = class_mutable;
-        }
-        if !is_plain_ident(self.token) {
-            self.fatal("expecting ident");
-        }
-        let name = self.parse_ident();
-        self.expect(token::COLON);
-        let ty = self.parse_ty(false);
-        ret @{node: instance_var(name, ty, is_mutbl, self.get_id(), pr),
-              span: mk_sp(lo, self.last_span.hi)};
-    }
-
-    fn parse_stmt(+first_item_attrs: [attribute]) -> @stmt {
-        fn check_expected_item(p: parser, current_attrs: [attribute]) {
-            // If we have attributes then we should have an item
-            if vec::is_not_empty(current_attrs) {
-                p.fatal("expected item");
-            }
-        }
-
-        let lo = self.span.lo;
-        if self.is_keyword("let") {
-            check_expected_item(self, first_item_attrs);
-            self.expect_keyword("let");
-            let decl = self.parse_let();
-            ret @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id()));
-        } else {
-            let mut item_attrs;
-            alt self.parse_outer_attrs_or_ext(first_item_attrs) {
-              none { item_attrs = []; }
-              some(left(attrs)) { item_attrs = attrs; }
-              some(right(ext)) {
-                ret @spanned(lo, ext.span.hi, stmt_expr(ext, self.get_id()));
-              }
-            }
-
-            let item_attrs = first_item_attrs + item_attrs;
-
-            alt self.parse_item(item_attrs, public) {
-              some(i) {
-                let mut hi = i.span.hi;
-                let decl = @spanned(lo, hi, decl_item(i));
-                ret @spanned(lo, hi, stmt_decl(decl, self.get_id()));
-              }
-              none() { /* fallthrough */ }
-            }
-
-            check_expected_item(self, item_attrs);
-
-            // Remainder are line-expr stmts.
-            let e = self.parse_expr_res(RESTRICT_STMT_EXPR);
-            ret @spanned(lo, e.span.hi, stmt_expr(e, self.get_id()));
-        }
-    }
-
-    fn expr_is_complete(e: pexpr) -> bool {
-        log(debug, ("expr_is_complete", self.restriction,
-                    print::pprust::expr_to_str(*e),
-                    classify::expr_requires_semi_to_be_stmt(*e)));
-        ret self.restriction == RESTRICT_STMT_EXPR &&
-            !classify::expr_requires_semi_to_be_stmt(*e);
-    }
-
-    fn parse_block() -> blk {
-        let (attrs, blk) = self.parse_inner_attrs_and_block(false);
-        assert vec::is_empty(attrs);
-        ret blk;
-    }
-
-    fn parse_inner_attrs_and_block(parse_attrs: bool) -> ([attribute], blk) {
-
-        fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) ->
-            {inner: [attribute], next: [attribute]} {
-            if parse_attrs {
-                p.parse_inner_attrs_and_next()
-            } else {
-                {inner: [], next: []}
-            }
-        }
-
-        let lo = self.span.lo;
-        if self.eat_keyword("unchecked") {
-            self.expect(token::LBRACE);
-            let {inner, next} = maybe_parse_inner_attrs_and_next(self,
-                                                                 parse_attrs);
-            ret (inner, self.parse_block_tail_(lo, unchecked_blk, next));
-        } else if self.eat_keyword("unsafe") {
-            self.expect(token::LBRACE);
-            let {inner, next} = maybe_parse_inner_attrs_and_next(self,
-                                                                 parse_attrs);
-            ret (inner, self.parse_block_tail_(lo, unsafe_blk, next));
-        } else {
-            self.expect(token::LBRACE);
-            let {inner, next} = maybe_parse_inner_attrs_and_next(self,
-                                                                 parse_attrs);
-            ret (inner, self.parse_block_tail_(lo, default_blk, next));
-        }
-    }
-
-    fn parse_block_no_value() -> blk {
-        // We parse blocks that cannot have a value the same as any other
-        // block; the type checker will make sure that the tail expression (if
-        // any) has unit type.
-        ret self.parse_block();
-    }
-
-    // Precondition: already parsed the '{' or '#{'
-    // I guess that also means "already parsed the 'impure'" if
-    // necessary, and this should take a qualifier.
-    // some blocks start with "#{"...
-    fn parse_block_tail(lo: uint, s: blk_check_mode) -> blk {
-        self.parse_block_tail_(lo, s, [])
-    }
-
-    fn parse_block_tail_(lo: uint, s: blk_check_mode,
-                         +first_item_attrs: [attribute]) -> blk {
-        let mut stmts = [];
-        let mut expr = none;
-        let {attrs_remaining, view_items} =
-            self.parse_view(first_item_attrs, true);
-        let mut initial_attrs = attrs_remaining;
-
-        if self.token == token::RBRACE && !vec::is_empty(initial_attrs) {
-            self.fatal("expected item");
-        }
-
-        while self.token != token::RBRACE {
-            alt self.token {
-              token::SEMI {
-                self.bump(); // empty
-              }
-              _ {
-                let stmt = self.parse_stmt(initial_attrs);
-                initial_attrs = [];
-                alt stmt.node {
-                  stmt_expr(e, stmt_id) { // Expression without semicolon:
-                    alt self.token {
-                      token::SEMI {
-                        self.bump();
-                        stmts += [@{node: stmt_semi(e, stmt_id) with *stmt}];
-                      }
-                      token::RBRACE {
-                        expr = some(e);
-                      }
-                      t {
-                        if classify::stmt_ends_with_semi(*stmt) {
-                            self.fatal("expected ';' or '}' after expression \
-                                        but found '"
-                                       + token_to_str(self.reader, t) + "'");
-                        }
-                        stmts += [stmt];
-                      }
-                    }
-                  }
-
-                  _ { // All other kinds of statements:
-                    stmts += [stmt];
-
-                    if classify::stmt_ends_with_semi(*stmt) {
-                        self.expect(token::SEMI);
-                    }
-                  }
-                }
-              }
-            }
-        }
-        let mut hi = self.span.hi;
-        self.bump();
-        let bloc = {view_items: view_items, stmts: stmts, expr: expr,
-                    id: self.get_id(), rules: s};
-        ret spanned(lo, hi, bloc);
-    }
-
-    fn parse_ty_param() -> ty_param {
-        let mut bounds = [];
-        let ident = self.parse_ident();
-        if self.eat(token::COLON) {
-            while self.token != token::COMMA && self.token != token::GT {
-                if self.eat_keyword("send") { bounds += [bound_send]; }
-                else if self.eat_keyword("copy") { bounds += [bound_copy]; }
-                else if self.eat_keyword("const") { bounds += [bound_const]; }
-                else { bounds += [bound_iface(self.parse_ty(false))]; }
-            }
-        }
-        ret {ident: ident, id: self.get_id(), bounds: @bounds};
-    }
-
-    fn parse_ty_params() -> [ty_param] {
-        if self.eat(token::LT) {
-            self.parse_seq_to_gt(some(token::COMMA), {|p| p.parse_ty_param()})
-        } else { [] }
-    }
-
-    // FIXME Remove after snapshot
-    fn parse_old_skool_capture_clause() -> [capture_item] {
-        fn expect_opt_trailing_semi(p: parser) {
-            if !p.eat(token::SEMI) {
-                if p.token != token::RBRACKET {
-                    p.fatal("expecting ; or ]");
-                }
-            }
-        }
-
-        fn eat_ident_list(p: parser, is_move: bool) -> [capture_item] {
-            let mut res = [];
-            loop {
-                alt p.token {
-                  token::IDENT(_, _) {
-                    let id = p.get_id();
-                    let sp = mk_sp(p.span.lo, p.span.hi);
-                    let ident = p.parse_ident();
-                    res += [@{id:id, is_move: is_move, name:ident, span:sp}];
-                    if !p.eat(token::COMMA) {
-                        ret res;
-                    }
-                  }
-
-                  _ { ret res; }
-                }
-            };
-        }
-
-        let mut cap_items = [];
-
-        if self.eat(token::LBRACKET) {
-            while !self.eat(token::RBRACKET) {
-                if self.eat_keyword("copy") {
-                    cap_items += eat_ident_list(self, false);
-                    expect_opt_trailing_semi(self);
-                } else if self.eat_keyword("move") {
-                    cap_items += eat_ident_list(self, true);
-                    expect_opt_trailing_semi(self);
-                } else {
-                    let s: str = "expecting send, copy, or move clause";
-                    self.fatal(s);
-                }
-            }
-        }
-
-        ret cap_items;
-    }
-
-    fn parse_fn_decl(purity: purity,
-                     parse_arg_fn: fn(parser) -> arg_or_capture_item)
-        -> (fn_decl, capture_clause) {
-
-        let args_or_capture_items: [arg_or_capture_item] =
-            self.parse_seq(token::LPAREN, token::RPAREN,
-                           seq_sep(token::COMMA), parse_arg_fn).node;
-
-        let inputs = either::lefts(args_or_capture_items);
-        let capture_clause = @either::rights(args_or_capture_items);
-
-        // Use the args list to translate each bound variable
-        // mentioned in a constraint to an arg index.
-        // Seems weird to do this in the parser, but I'm not sure how else to.
-        let mut constrs = [];
-        if self.token == token::COLON {
-            self.bump();
-            constrs = self.parse_constrs({|p| p.parse_ty_constr(inputs) });
-        }
-        let (ret_style, ret_ty) = self.parse_ret_ty();
-        ret ({inputs: inputs,
-              output: ret_ty,
-              purity: purity,
-              cf: ret_style,
-              constraints: constrs}, capture_clause);
-    }
-
-    fn parse_fn_block_decl() -> (fn_decl, capture_clause) {
-        let inputs_captures = {
-            if self.eat(token::OROR) {
-                []
-            } else {
-                self.parse_seq(token::BINOP(token::OR),
-                               token::BINOP(token::OR), seq_sep(token::COMMA),
-                               {|p| p.parse_fn_block_arg()}).node
-            }
-        };
-        let output = if self.eat(token::RARROW) {
-            self.parse_ty(false)
-        } else {
-            @{id: self.get_id(), node: ty_infer, span: self.span}
-        };
-        ret ({inputs: either::lefts(inputs_captures),
-              output: output,
-              purity: impure_fn,
-              cf: return_val,
-              constraints: []},
-             @either::rights(inputs_captures));
-    }
-
-    fn parse_fn_header() -> {ident: ident, tps: [ty_param]} {
-        let id = self.parse_value_ident();
-        let ty_params = self.parse_ty_params();
-        ret {ident: id, tps: ty_params};
-    }
-
-    fn mk_item(lo: uint, hi: uint, +ident: ident,
-               +node: item_, vis: visibility,
-               +attrs: [attribute]) -> @item {
-        ret @{ident: ident,
-              attrs: attrs,
-              id: self.get_id(),
-              node: node,
-              vis: vis,
-              span: mk_sp(lo, hi)};
-    }
-
-    fn parse_item_fn(purity: purity) -> item_info {
-        let t = self.parse_fn_header();
-        let (decl, _) = self.parse_fn_decl(purity, {|p| p.parse_arg()});
-        let (inner_attrs, body) = self.parse_inner_attrs_and_block(true);
-        (t.ident, item_fn(decl, t.tps, body), some(inner_attrs))
-    }
-
-    fn parse_method_name() -> ident {
-        alt self.token {
-          token::BINOP(op) { self.bump(); token::binop_to_str(op) }
-          token::NOT { self.bump(); "!" }
-          token::LBRACKET { self.bump(); self.expect(token::RBRACKET); "[]" }
-          _ {
-            let id = self.parse_value_ident();
-            if id == "unary" && self.eat(token::BINOP(token::MINUS)) {
-                "unary-"
-            }
-            else { id }
-          }
-        }
-    }
-
-    fn parse_method(pr: visibility) -> @method {
-        let attrs = self.parse_outer_attributes();
-        let lo = self.span.lo, pur = self.parse_fn_purity();
-        let ident = self.parse_method_name();
-        let tps = self.parse_ty_params();
-        let (decl, _) = self.parse_fn_decl(pur, {|p| p.parse_arg()});
-        let (inner_attrs, body) = self.parse_inner_attrs_and_block(true);
-        let attrs = attrs + inner_attrs;
-        @{ident: ident, attrs: attrs, tps: tps, decl: decl, body: body,
-          id: self.get_id(), span: mk_sp(lo, body.span.hi),
-          self_id: self.get_id(), vis: pr}
-    }
-
-    fn parse_item_iface() -> item_info {
-        let ident = self.parse_ident();
-        let rp = self.parse_region_param();
-        let tps = self.parse_ty_params();
-        let meths = self.parse_ty_methods();
-        (ident, item_iface(tps, rp, meths), none)
-    }
-
-    // Parses three variants (with the region/type params always optional):
-    //    impl /&<T: copy> of to_str for [T] { ... }
-    //    impl name/&<T> of to_str for [T] { ... }
-    //    impl name/&<T> for [T] { ... }
-    fn parse_item_impl() -> item_info {
-        fn wrap_path(p: parser, pt: @path) -> @ty {
-            @{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span}
-        }
-        let mut (ident, rp, tps) = {
-            if self.token == token::LT {
-                (none, rp_none, self.parse_ty_params())
-            } else if self.token == token::BINOP(token::SLASH) {
-                (none, self.parse_region_param(), self.parse_ty_params())
-            }
-            else if self.is_keyword("of") {
-                (none, rp_none, [])
-            } else {
-                let id = self.parse_ident();
-                let rp = self.parse_region_param();
-                (some(id), rp, self.parse_ty_params())
-            }
-        };
-        let ifce = if self.eat_keyword("of") {
-            let path = self.parse_path_with_tps(false);
-            if option::is_none(ident) {
-                ident = some(vec::last(path.idents));
-            }
-            some(@{path: path, id: self.get_id()})
-        } else { none };
-        let ident = alt ident {
-          some(name) { name }
-          none { self.expect_keyword("of"); fail; }
-        };
-        self.expect_keyword("for");
-        let ty = self.parse_ty(false);
-        let mut meths = [];
-        self.expect(token::LBRACE);
-        while !self.eat(token::RBRACE) {
-            meths += [self.parse_method(public)];
-        }
-        (ident, item_impl(tps, rp, ifce, ty, meths), none)
-    }
-
-    fn parse_item_res() -> item_info {
-        let ident = self.parse_value_ident();
-        let rp = self.parse_region_param();
-        let ty_params = self.parse_ty_params();
-        self.expect(token::LPAREN);
-        let arg_ident = self.parse_value_ident();
-        self.expect(token::COLON);
-        let t = self.parse_ty(false);
-        self.expect(token::RPAREN);
-        let dtor = self.parse_block_no_value();
-        let decl = {
-            inputs: [{mode: expl(by_ref), ty: t,
-                      ident: arg_ident, id: self.get_id()}],
-            output: @{id: self.get_id(), node: ty_nil,
-                      span: ast_util::dummy_sp()},
-            purity: impure_fn,
-            cf: return_val,
-            constraints: []
-        };
-        (ident, item_res(decl, ty_params, dtor,
-                         self.get_id(), self.get_id(), rp), none)
-    }
-
-    // Instantiates ident <i> with references to <typarams> as arguments.
-    // Used to create a path that refers to a class which will be defined as
-    // the return type of the ctor function.
-    fn ident_to_path_tys(i: ident,
-                         rp: region_param,
-                         typarams: [ty_param]) -> @path {
-        let s = self.last_span;
-
-        // Hack.  But then, this whole function is in service of a hack.
-        let a_r = alt rp {
-          rp_none { none }
-          rp_self { some(self.region_from_name(some("self"))) }
-        };
-
-        @{span: s, global: false, idents: [i],
-          rp: a_r,
-          types: vec::map(typarams, {|tp|
-              @{id: self.get_id(),
-                node: ty_path(ident_to_path(s, tp.ident), self.get_id()),
-                span: s}})
-         }
-    }
-
-    fn parse_iface_ref() -> @iface_ref {
-        @{path: self.parse_path_with_tps(false),
-          id: self.get_id()}
-    }
-
-    fn parse_iface_ref_list() -> [@iface_ref] {
-        self.parse_seq_to_before_end(token::LBRACE, seq_sep(token::COMMA),
-                                     {|p| p.parse_iface_ref()})
-    }
-
-    fn parse_item_class() -> item_info {
-        let class_name = self.parse_value_ident();
-        let rp = self.parse_region_param();
-        let ty_params = self.parse_ty_params();
-        let class_path = self.ident_to_path_tys(class_name, rp, ty_params);
-        let ifaces : [@iface_ref] = if self.eat_keyword("implements")
-            { self.parse_iface_ref_list() }
-        else { [] };
-        self.expect(token::LBRACE);
-        let mut ms: [@class_member] = [];
-        let ctor_id = self.get_id();
-        let mut the_ctor : option<(fn_decl, blk, codemap::span)> = none;
-        let mut the_dtor : option<(blk, codemap::span)> = none;
-        while self.token != token::RBRACE {
-            alt self.parse_class_item(class_path) {
-              ctor_decl(a_fn_decl, blk, s) {
-                the_ctor = some((a_fn_decl, blk, s));
-              }
-              dtor_decl(blk, s) {
-                the_dtor = some((blk, s));
-              }
-              members(mms) { ms += mms; }
-            }
-        }
-        let actual_dtor = option::map(the_dtor) {|dtor|
-            let (d_body, d_s) = dtor;
-            {node: {id: self.get_id(),
-                    self_id: self.get_id(),
-                    body: d_body},
-             span: d_s}};
-        self.bump();
-        alt the_ctor {
-          some((ct_d, ct_b, ct_s)) {
-            (class_name,
-             item_class(ty_params, ifaces, ms, {
-                 node: {id: ctor_id,
-                        self_id: self.get_id(),
-                        dec: ct_d,
-                        body: ct_b},
-                 span: ct_s}, actual_dtor, rp),
-             none)
-          }
-          /*
-          Is it strange for the parser to check this?
-          */
-          none {
-            self.fatal("class with no ctor");
-          }
-        }
-    }
-
-    fn parse_single_class_item(vis: visibility)
-        -> @class_member {
-        if self.eat_keyword("let") {
-            let a_var = self.parse_instance_var(vis);
-            self.expect(token::SEMI);
-            ret a_var;
-        }
-        else {
-            let m = self.parse_method(vis);
-            ret @{node: class_method(m), span: m.span};
-        }
-    }
-
-    fn parse_ctor(result_ty: ast::ty_) -> class_contents {
-        // Can ctors/dtors have attrs? FIXME
-        let lo = self.last_span.lo;
-        let (decl_, _) = self.parse_fn_decl(impure_fn, {|p| p.parse_arg()});
-        let decl = {output: @{id: self.get_id(),
-                              node: result_ty, span: decl_.output.span}
-                    with decl_};
-        let body = self.parse_block();
-        ctor_decl(decl, body, mk_sp(lo, self.last_span.hi))
-    }
-
-    fn parse_dtor() -> class_contents {
-        // Can ctors/dtors have attrs? FIXME
-        let lo = self.last_span.lo;
-        let body = self.parse_block();
-        dtor_decl(body, mk_sp(lo, self.last_span.hi))
-    }
-
-    fn parse_class_item(class_name_with_tps: @path)
-        -> class_contents {
-        if self.eat_keyword("new") {
-            // result type is always the type of the class
-            ret self.parse_ctor(ty_path(class_name_with_tps,
-                                        self.get_id()));
-        }
-        else if self.eat_keyword("drop") {
-            ret self.parse_dtor();
-        }
-        else if self.eat_keyword("priv") {
-            self.expect(token::LBRACE);
-        let mut results = [];
-        while self.token != token::RBRACE {
-            results += [self.parse_single_class_item(private)];
-        }
-        self.bump();
-        ret members(results);
-    }
-    else {
-        // Probably need to parse attrs
-        ret members([self.parse_single_class_item(public)]);
-    }
-}
-
-    fn parse_visibility(def: visibility) -> visibility {
-        if self.eat_keyword("pub") { public }
-        else if self.eat_keyword("priv") { private }
-        else { def }
-    }
-
-    fn parse_mod_items(term: token::token,
-                       +first_item_attrs: [attribute]) -> _mod {
-        // Shouldn't be any view items since we've already parsed an item attr
-        let {attrs_remaining, view_items} =
-            self.parse_view(first_item_attrs, false);
-        let mut items: [@item] = [];
-        let mut first = true;
-        while self.token != term {
-            let mut attrs = self.parse_outer_attributes();
-            if first { attrs = attrs_remaining + attrs; first = false; }
-            #debug["parse_mod_items: parse_item(attrs=%?)", attrs];
-            let vis = self.parse_visibility(private);
-            alt self.parse_item(attrs, vis) {
-              some(i) { items += [i]; }
-              _ {
-                self.fatal("expected item but found '" +
-                           token_to_str(self.reader, self.token) + "'");
-              }
-            }
-            #debug["parse_mod_items: attrs=%?", attrs];
-        }
-
-        if first && attrs_remaining.len() > 0u {
-            // We parsed attributes for the first item but didn't find it
-            self.fatal("expected item");
-        }
-
-        ret {view_items: view_items, items: items};
-    }
-
-    fn parse_item_const() -> item_info {
-        let id = self.parse_value_ident();
-        self.expect(token::COLON);
-        let ty = self.parse_ty(false);
-        self.expect(token::EQ);
-        let e = self.parse_expr();
-        self.expect(token::SEMI);
-        (id, item_const(ty, e), none)
-    }
-
-    fn parse_item_mod() -> item_info {
-        let id = self.parse_ident();
-        self.expect(token::LBRACE);
-        let inner_attrs = self.parse_inner_attrs_and_next();
-        let m = self.parse_mod_items(token::RBRACE, inner_attrs.next);
-        self.expect(token::RBRACE);
-        (id, item_mod(m), some(inner_attrs.inner))
-    }
-
-    fn parse_item_native_fn(+attrs: [attribute],
-                            purity: purity) -> @native_item {
-        let lo = self.last_span.lo;
-        let t = self.parse_fn_header();
-        let (decl, _) = self.parse_fn_decl(purity, {|p| p.parse_arg()});
-        let mut hi = self.span.hi;
-        self.expect(token::SEMI);
-        ret @{ident: t.ident,
-              attrs: attrs,
-              node: native_item_fn(decl, t.tps),
-              id: self.get_id(),
-              span: mk_sp(lo, hi)};
-    }
-
-    fn parse_fn_purity() -> purity {
-        if self.eat_keyword("fn") { impure_fn }
-        else if self.eat_keyword("pure") {
-            self.expect_keyword("fn");
-            pure_fn
-        } else if self.eat_keyword("unsafe") {
-            self.expect_keyword("fn");
-            unsafe_fn
-        }
-        else { self.unexpected(); }
-    }
-
-    fn parse_native_item(+attrs: [attribute]) ->
-        @native_item {
-        self.parse_item_native_fn(attrs, self.parse_fn_purity())
-    }
-
-    fn parse_native_mod_items(+first_item_attrs: [attribute]) ->
-        native_mod {
-        // Shouldn't be any view items since we've already parsed an item attr
-        let {attrs_remaining, view_items} =
-            self.parse_view(first_item_attrs, false);
-        let mut items: [@native_item] = [];
-        let mut initial_attrs = attrs_remaining;
-        while self.token != token::RBRACE {
-            let attrs = initial_attrs + self.parse_outer_attributes();
-            initial_attrs = [];
-            items += [self.parse_native_item(attrs)];
-        }
-        ret {view_items: view_items,
-             items: items};
-    }
-
-    fn parse_item_native_mod() -> item_info {
-        self.expect_keyword("mod");
-        let id = self.parse_ident();
-        self.expect(token::LBRACE);
-        let more_attrs = self.parse_inner_attrs_and_next();
-        let m = self.parse_native_mod_items(more_attrs.next);
-        self.expect(token::RBRACE);
-        (id, item_native_mod(m), some(more_attrs.inner))
-    }
-
-    fn parse_type_decl() -> {lo: uint, ident: ident} {
-        let lo = self.last_span.lo;
-        let id = self.parse_ident();
-        ret {lo: lo, ident: id};
-    }
-
-    fn parse_item_type() -> item_info {
-        let t = self.parse_type_decl();
-        let rp = self.parse_region_param();
-        let tps = self.parse_ty_params();
-        self.expect(token::EQ);
-        let ty = self.parse_ty(false);
-        self.expect(token::SEMI);
-        (t.ident, item_ty(ty, tps, rp), none)
-    }
-
-    fn parse_region_param() -> region_param {
-        if self.eat(token::BINOP(token::SLASH)) {
-            self.expect(token::BINOP(token::AND));
-            rp_self
-        } else {
-            rp_none
-        }
-    }
-
-    fn parse_item_enum(default_vis: visibility) -> item_info {
-        let id = self.parse_ident();
-        let rp = self.parse_region_param();
-        let ty_params = self.parse_ty_params();
-        let mut variants: [variant] = [];
-        // Newtype syntax
-        if self.token == token::EQ {
-            self.check_restricted_keywords_(id);
-            self.bump();
-            let ty = self.parse_ty(false);
-            self.expect(token::SEMI);
-            let variant =
-                spanned(ty.span.lo, ty.span.hi,
-                        {name: id,
-                         attrs: [],
-                         args: [{ty: ty, id: self.get_id()}],
-                         id: self.get_id(),
-                         disr_expr: none,
-                         vis: public});
-            ret (id, item_enum([variant], ty_params, rp), none);
-        }
-        self.expect(token::LBRACE);
-
-        let mut all_nullary = true, have_disr = false;
-
-        while self.token != token::RBRACE {
-            let variant_attrs = self.parse_outer_attributes();
-            let vlo = self.span.lo;
-            let vis = self.parse_visibility(default_vis);
-            let ident = self.parse_value_ident();
-            let mut args = [], disr_expr = none;
-            if self.token == token::LPAREN {
-                all_nullary = false;
-                let arg_tys = self.parse_seq(token::LPAREN, token::RPAREN,
-                                             seq_sep(token::COMMA),
-                                             {|p| p.parse_ty(false)});
-                for arg_tys.node.each {|ty|
-                    args += [{ty: ty, id: self.get_id()}];
-                }
-            } else if self.eat(token::EQ) {
-                have_disr = true;
-                disr_expr = some(self.parse_expr());
-            }
-
-            let vr = {name: ident, attrs: variant_attrs,
-                      args: args, id: self.get_id(),
-                      disr_expr: disr_expr, vis: vis};
-            variants += [spanned(vlo, self.last_span.hi, vr)];
-
-            if !self.eat(token::COMMA) { break; }
-        }
-        self.expect(token::RBRACE);
-        if (have_disr && !all_nullary) {
-            self.fatal("discriminator values can only be used with a c-like \
-                        enum");
-        }
-        (id, item_enum(variants, ty_params, rp), none)
-    }
-
-    fn parse_fn_ty_proto() -> proto {
-        alt self.token {
-          token::AT {
-            self.bump();
-            proto_box
-          }
-          token::TILDE {
-            self.bump();
-            proto_uniq
-          }
-          token::BINOP(token::AND) {
-            self.bump();
-            proto_block
-          }
-          _ {
-            proto_any
-          }
-        }
-    }
-
-    fn fn_expr_lookahead(tok: token::token) -> bool {
-        alt tok {
-          token::LPAREN | token::AT | token::TILDE | token::BINOP(_) {
-            true
-          }
-          _ {
-            false
-          }
-        }
-    }
-
-    fn parse_item(+attrs: [attribute], vis: visibility)
-        -> option<@item> {
-        let lo = self.span.lo;
-        let (ident, item_, extra_attrs) = if self.eat_keyword("const") {
-            self.parse_item_const()
-        } else if self.is_keyword("fn") &&
-            !self.fn_expr_lookahead(self.look_ahead(1u)) {
-            self.bump();
-            self.parse_item_fn(impure_fn)
-        } else if self.eat_keyword("pure") {
-            self.expect_keyword("fn");
-            self.parse_item_fn(pure_fn)
-        } else if self.is_keyword("unsafe")
-            && self.look_ahead(1u) != token::LBRACE {
-            self.bump();
-            self.expect_keyword("fn");
-            self.parse_item_fn(unsafe_fn)
-        } else if self.eat_keyword("crust") {
-            self.expect_keyword("fn");
-            self.parse_item_fn(crust_fn)
-        } else if self.eat_keyword("mod") {
-            self.parse_item_mod()
-        } else if self.eat_keyword("native") {
-            self.parse_item_native_mod()
-        } else if self.eat_keyword("type") {
-            self.parse_item_type()
-        } else if self.eat_keyword("enum") {
-            self.parse_item_enum(vis)
-        } else if self.eat_keyword("iface") {
-            self.parse_item_iface()
-        } else if self.eat_keyword("impl") {
-            self.parse_item_impl()
-        } else if self.eat_keyword("resource") {
-            self.parse_item_res()
-        } else if self.eat_keyword("class") {
-            self.parse_item_class()
-        } else { ret none; };
-        some(self.mk_item(lo, self.last_span.hi, ident, item_, vis,
-                          alt extra_attrs {
-                              some(as) { attrs + as }
-                              none { attrs }
-                          }))
-    }
-
-    fn parse_use() -> view_item_ {
-        let ident = self.parse_ident();
-        let metadata = self.parse_optional_meta();
-        ret view_item_use(ident, metadata, self.get_id());
-    }
-
-    fn parse_view_path() -> @view_path {
-        let lo = self.span.lo;
-        let first_ident = self.parse_ident();
-        let mut path = [first_ident];
-        #debug("parsed view_path: %s", first_ident);
-        alt self.token {
-          token::EQ {
-            // x = foo::bar
-            self.bump();
-            path = [self.parse_ident()];
-            while self.token == token::MOD_SEP {
-                self.bump();
-                let id = self.parse_ident();
-                path += [id];
-            }
-            let path = @{span: mk_sp(lo, self.span.hi), global: false,
-                         idents: path, rp: none, types: []};
-            ret @spanned(lo, self.span.hi,
-                         view_path_simple(first_ident, path, self.get_id()));
-          }
-
-          token::MOD_SEP {
-            // foo::bar or foo::{a,b,c} or foo::*
-            while self.token == token::MOD_SEP {
-                self.bump();
-
-                alt self.token {
-
-                  token::IDENT(i, _) {
-                    self.bump();
-                    path += [self.get_str(i)];
-                  }
-
-                  // foo::bar::{a,b,c}
-                  token::LBRACE {
-                    let idents =
-                        self.parse_seq(token::LBRACE, token::RBRACE,
-                                       seq_sep(token::COMMA),
-                                       {|p| p.parse_path_list_ident()}).node;
-                    let path = @{span: mk_sp(lo, self.span.hi),
-                                 global: false, idents: path,
-                                 rp: none, types: []};
-                    ret @spanned(lo, self.span.hi,
-                                 view_path_list(path, idents, self.get_id()));
-                  }
-
-                  // foo::bar::*
-                  token::BINOP(token::STAR) {
-                    self.bump();
-                    let path = @{span: mk_sp(lo, self.span.hi),
-                                 global: false, idents: path,
-                                 rp: none, types: []};
-                    ret @spanned(lo, self.span.hi,
-                                 view_path_glob(path, self.get_id()));
-                  }
-
-                  _ { break; }
-                }
-            }
-          }
-          _ { }
-        }
-        let last = path[vec::len(path) - 1u];
-        let path = @{span: mk_sp(lo, self.span.hi), global: false,
-                     idents: path, rp: none, types: []};
-        ret @spanned(lo, self.span.hi,
-                     view_path_simple(last, path, self.get_id()));
-    }
-
-    fn parse_view_paths() -> [@view_path] {
-        let mut vp = [self.parse_view_path()];
-        while self.token == token::COMMA {
-            self.bump();
-            vp += [self.parse_view_path()];
-        }
-        ret vp;
-    }
-
-    fn is_view_item() -> bool {
-        let tok = if !self.is_keyword("pub") && !self.is_keyword("priv") {
-            self.token
-        } else { self.look_ahead(1u) };
-        self.token_is_keyword("use", tok)
-            || self.token_is_keyword("import", tok)
-            || self.token_is_keyword("export", tok)
-    }
-
-    fn parse_view_item(+attrs: [attribute]) -> @view_item {
-        let lo = self.span.lo, vis = self.parse_visibility(private);
-        let node = if self.eat_keyword("use") {
-            self.parse_use()
-        } else if self.eat_keyword("import") {
-            view_item_import(self.parse_view_paths())
-        } else if self.eat_keyword("export") {
-            view_item_export(self.parse_view_paths())
-        } else { fail; };
-        self.expect(token::SEMI);
-        @{node: node, attrs: attrs,
-          vis: vis, span: mk_sp(lo, self.last_span.hi)}
-    }
-
-    fn parse_view(+first_item_attrs: [attribute],
-                  only_imports: bool) -> {attrs_remaining: [attribute],
-                                          view_items: [@view_item]} {
-        let mut attrs = first_item_attrs + self.parse_outer_attributes();
-        let mut items = [];
-        while if only_imports { self.is_keyword("import") }
-        else { self.is_view_item() } {
-            items += [self.parse_view_item(attrs)];
-            attrs = self.parse_outer_attributes();
-        }
-        {attrs_remaining: attrs, view_items: items}
-    }
-
-    // Parses a source module as a crate
-    fn parse_crate_mod(_cfg: crate_cfg) -> @crate {
-        let lo = self.span.lo;
-        let crate_attrs = self.parse_inner_attrs_and_next();
-        let first_item_outer_attrs = crate_attrs.next;
-        let m = self.parse_mod_items(token::EOF, first_item_outer_attrs);
-        ret @spanned(lo, self.span.lo,
-                     {directives: [],
-                      module: m,
-                      attrs: crate_attrs.inner,
-                      config: self.cfg});
-    }
-
-    fn parse_str() -> str {
-        alt self.token {
-          token::LIT_STR(s) { self.bump(); self.get_str(s) }
-          _ {
-            self.fatal("expected string literal")
-          }
-        }
-    }
-
-    // Logic for parsing crate files (.rc)
-    //
-    // Each crate file is a sequence of directives.
-    //
-    // Each directive imperatively extends its environment with 0 or more
-    // items.
-    fn parse_crate_directive(first_outer_attr: [attribute]) ->
-        crate_directive {
-
-        // Collect the next attributes
-        let outer_attrs = first_outer_attr + self.parse_outer_attributes();
-        // In a crate file outer attributes are only going to apply to mods
-        let expect_mod = vec::len(outer_attrs) > 0u;
-
-        let lo = self.span.lo;
-        if expect_mod || self.is_keyword("mod") {
-            self.expect_keyword("mod");
-            let id = self.parse_ident();
-            alt self.token {
-              // mod x = "foo.rs";
-              token::SEMI {
-                let mut hi = self.span.hi;
-                self.bump();
-                ret spanned(lo, hi, cdir_src_mod(id, outer_attrs));
-              }
-              // mod x = "foo_dir" { ...directives... }
-              token::LBRACE {
-                self.bump();
-                let inner_attrs = self.parse_inner_attrs_and_next();
-                let mod_attrs = outer_attrs + inner_attrs.inner;
-                let next_outer_attr = inner_attrs.next;
-                let cdirs = self.parse_crate_directives(token::RBRACE,
-                                                        next_outer_attr);
-                let mut hi = self.span.hi;
-                self.expect(token::RBRACE);
-                ret spanned(lo, hi,
-                            cdir_dir_mod(id, cdirs, mod_attrs));
-              }
-              _ { self.unexpected(); }
-            }
-        } else if self.is_view_item() {
-            let vi = self.parse_view_item(outer_attrs);
-            ret spanned(lo, vi.span.hi, cdir_view_item(vi));
-        } else { ret self.fatal("expected crate directive"); }
-    }
-
-    fn parse_crate_directives(term: token::token,
-                              first_outer_attr: [attribute]) ->
-        [@crate_directive] {
-
-        // This is pretty ugly. If we have an outer attribute then we can't
-        // accept seeing the terminator next, so if we do see it then fail the
-        // same way parse_crate_directive would
-        if vec::len(first_outer_attr) > 0u && self.token == term {
-            self.expect_keyword("mod");
-        }
-
-        let mut cdirs: [@crate_directive] = [];
-        let mut first_outer_attr = first_outer_attr;
-        while self.token != term {
-            let cdir = @self.parse_crate_directive(first_outer_attr);
-            cdirs += [cdir];
-            first_outer_attr = [];
-        }
-        ret cdirs;
-    }
-}
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/parse/prec.rs b/src/librustsyntax/parse/prec.rs
deleted file mode 100644
index e2e35447af3..00000000000
--- a/src/librustsyntax/parse/prec.rs
+++ /dev/null
@@ -1,43 +0,0 @@
-export as_prec;
-export unop_prec;
-export token_to_binop;
-
-import token::*;
-import token::token;
-import ast::*;
-
-#[doc = "Unary operators have higher precedence than binary"]
-const unop_prec: uint = 100u;
-
-#[doc = "
-Precedence of the `as` operator, which is a binary operator
-but is not represented in the precedence table.
-"]
-const as_prec: uint = 11u;
-
-#[doc = "Maps a token to a record specifying the corresponding binary
-         operator and its precedence"]
-fn token_to_binop(tok: token) -> option<ast::binop> {
-  alt tok {
-      BINOP(STAR)    { some(mul) }
-      BINOP(SLASH)   { some(div) }
-      BINOP(PERCENT) { some(rem) }
-      // 'as' sits between here with 11
-      BINOP(PLUS)    { some(add) }
-      BINOP(MINUS)   { some(subtract) }
-      BINOP(SHL)     { some(shl) }
-      BINOP(SHR)     { some(shr) }
-      BINOP(AND)     { some(bitand) }
-      BINOP(CARET)   { some(bitxor) }
-      BINOP(OR)      { some(bitor) }
-      LT             { some(lt) }
-      LE             { some(le) }
-      GE             { some(ge) }
-      GT             { some(gt) }
-      EQEQ           { some(eq) }
-      NE             { some(ne) }
-      ANDAND         { some(and) }
-      OROR           { some(or) }
-      _              { none }
-  }
-}
diff --git a/src/librustsyntax/parse/token.rs b/src/librustsyntax/parse/token.rs
deleted file mode 100644
index 1c6f240cf82..00000000000
--- a/src/librustsyntax/parse/token.rs
+++ /dev/null
@@ -1,286 +0,0 @@
-
-import util::interner;
-import util::interner::interner;
-import std::map::{hashmap, str_hash};
-
-type str_num = uint;
-
-enum binop {
-    PLUS,
-    MINUS,
-    STAR,
-    SLASH,
-    PERCENT,
-    CARET,
-    AND,
-    OR,
-    SHL,
-    SHR,
-}
-
-enum token {
-    /* Expression-operator symbols. */
-    EQ,
-    LT,
-    LE,
-    EQEQ,
-    NE,
-    GE,
-    GT,
-    ANDAND,
-    OROR,
-    NOT,
-    TILDE,
-    BINOP(binop),
-    BINOPEQ(binop),
-
-    /* Structural symbols */
-    AT,
-    DOT,
-    ELLIPSIS,
-    COMMA,
-    SEMI,
-    COLON,
-    MOD_SEP,
-    RARROW,
-    LARROW,
-    DARROW,
-    LPAREN,
-    RPAREN,
-    LBRACKET,
-    RBRACKET,
-    LBRACE,
-    RBRACE,
-    POUND,
-    DOLLAR,
-
-    /* Literals */
-    LIT_INT(i64, ast::int_ty),
-    LIT_UINT(u64, ast::uint_ty),
-    LIT_FLOAT(str_num, ast::float_ty),
-    LIT_STR(str_num),
-
-    /* Name components */
-    IDENT(str_num, bool),
-    UNDERSCORE,
-    EOF,
-
-}
-
-fn binop_to_str(o: binop) -> str {
-    alt o {
-      PLUS { ret "+"; }
-      MINUS { ret "-"; }
-      STAR { ret "*"; }
-      SLASH { ret "/"; }
-      PERCENT { ret "%"; }
-      CARET { ret "^"; }
-      AND { ret "&"; }
-      OR { ret "|"; }
-      SHL { ret "<<"; }
-      SHR { ret ">>"; }
-    }
-}
-
-fn to_str(in: interner<str>, t: token) -> str {
-    alt t {
-      EQ { ret "="; }
-      LT { ret "<"; }
-      LE { ret "<="; }
-      EQEQ { ret "=="; }
-      NE { ret "!="; }
-      GE { ret ">="; }
-      GT { ret ">"; }
-      NOT { ret "!"; }
-      TILDE { ret "~"; }
-      OROR { ret "||"; }
-      ANDAND { ret "&&"; }
-      BINOP(op) { ret binop_to_str(op); }
-      BINOPEQ(op) { ret binop_to_str(op) + "="; }
-
-      /* Structural symbols */
-      AT {
-        ret "@";
-      }
-      DOT { ret "."; }
-      ELLIPSIS { ret "..."; }
-      COMMA { ret ","; }
-      SEMI { ret ";"; }
-      COLON { ret ":"; }
-      MOD_SEP { ret "::"; }
-      RARROW { ret "->"; }
-      LARROW { ret "<-"; }
-      DARROW { ret "<->"; }
-      LPAREN { ret "("; }
-      RPAREN { ret ")"; }
-      LBRACKET { ret "["; }
-      RBRACKET { ret "]"; }
-      LBRACE { ret "{"; }
-      RBRACE { ret "}"; }
-      POUND { ret "#"; }
-      DOLLAR { ret "$"; }
-
-      /* Literals */
-      LIT_INT(c, ast::ty_char) {
-        // FIXME: escape.
-        let mut tmp = "'";
-        str::push_char(tmp, c as char);
-        str::push_char(tmp, '\'');
-        ret tmp;
-      }
-      LIT_INT(i, t) {
-        ret int::to_str(i as int, 10u) + ast_util::int_ty_to_str(t);
-      }
-      LIT_UINT(u, t) {
-        ret uint::to_str(u as uint, 10u) + ast_util::uint_ty_to_str(t);
-      }
-      LIT_FLOAT(s, t) {
-        ret interner::get::<str>(in, s) +
-            ast_util::float_ty_to_str(t);
-      }
-      LIT_STR(s) { // FIXME: escape.
-        ret "\"" + interner::get::<str>(in, s) + "\"";
-      }
-
-      /* Name components */
-      IDENT(s, _) {
-        ret interner::get::<str>(in, s);
-      }
-      UNDERSCORE { ret "_"; }
-      EOF { ret "<eof>"; }
-    }
-}
-
-
-pure fn can_begin_expr(t: token) -> bool {
-    alt t {
-      LPAREN { true }
-      LBRACE { true }
-      LBRACKET { true }
-      IDENT(_, _) { true }
-      UNDERSCORE { true }
-      TILDE { true }
-      LIT_INT(_, _) { true }
-      LIT_UINT(_, _) { true }
-      LIT_FLOAT(_, _) { true }
-      LIT_STR(_) { true }
-      POUND { true }
-      AT { true }
-      NOT { true }
-      BINOP(MINUS) { true }
-      BINOP(STAR) { true }
-      BINOP(AND) { true }
-      MOD_SEP { true }
-      _ { false }
-    }
-}
-
-fn is_lit(t: token::token) -> bool {
-    ret alt t {
-          token::LIT_INT(_, _) { true }
-          token::LIT_UINT(_, _) { true }
-          token::LIT_FLOAT(_, _) { true }
-          token::LIT_STR(_) { true }
-          _ { false }
-        }
-}
-
-fn is_ident(t: token::token) -> bool {
-    alt t { token::IDENT(_, _) { ret true; } _ { } }
-    ret false;
-}
-
-fn is_plain_ident(t: token::token) -> bool {
-    ret alt t { token::IDENT(_, false) { true } _ { false } };
-}
-
-fn is_bar(t: token::token) -> bool {
-    alt t { token::BINOP(token::OR) | token::OROR { true } _ { false } }
-}
-
-#[doc = "
-All the valid words that have meaning in the Rust language.
-
-Rust keywords are either 'contextual' or 'restricted'. Contextual
-keywords may be used as identifiers because their appearance in
-the grammar is unambiguous. Restricted keywords may not appear
-in positions that might otherwise contain _value identifiers_.
-"]
-fn keyword_table() -> hashmap<str, ()> {
-    let keywords = str_hash();
-    for contextual_keyword_table().each_key {|word|
-        keywords.insert(word, ());
-    }
-    for restricted_keyword_table().each_key {|word|
-        keywords.insert(word, ());
-    }
-    ret keywords;
-}
-
-#[doc = "Keywords that may be used as identifiers"]
-fn contextual_keyword_table() -> hashmap<str, ()> {
-    let words = str_hash();
-    let keys = [
-        "as",
-        "bind",
-        "else",
-        "implements",
-        "move",
-        "of",
-        "priv", "pub",
-        "self", "send", "static",
-        "to",
-        "use",
-        "with"
-    ];
-    for keys.each {|word|
-        words.insert(word, ());
-    }
-    words
-}
-
-#[doc = "
-Keywords that may not appear in any position that might otherwise contain a
-_value identifier_. Restricted keywords may still be used as other types of
-identifiers.
-
-Reasons:
-
-* For some (most?), if used at the start of a line, they will cause the line
-  to be interpreted as a specific kind of statement, which would be confusing.
-
-* `true` or `false` as identifiers would always be shadowed by
-  the boolean constants
-"]
-fn restricted_keyword_table() -> hashmap<str, ()> {
-    let words = str_hash();
-    let keys = [
-        "alt",
-        "assert",
-        "be", "break",
-        "check", "claim", "class", "const", "cont", "copy", "crust",
-        "drop",
-        "else", "enum", "export",
-        "fail", "false", "fn", "for",
-        "if", "iface", "impl", "import",
-        "let", "log", "loop",
-        "mod", "mut",
-        "native", "new",
-        "pure",
-        "resource", "ret",
-        "true", "trait", "type",
-        "unchecked", "unsafe",
-        "while"
-    ];
-    for keys.each {|word|
-        words.insert(word, ());
-    }
-    words
-}
-
-// Local Variables:
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
diff --git a/src/librustsyntax/print/pp.rs b/src/librustsyntax/print/pp.rs
deleted file mode 100644
index 882ce7b6461..00000000000
--- a/src/librustsyntax/print/pp.rs
+++ /dev/null
@@ -1,528 +0,0 @@
-import io::writer_util;
-import dvec::{dvec, extensions};
-
-/*
- * This pretty-printer is a direct reimplementation of Philip Karlton's
- * Mesa pretty-printer, as described in appendix A of
- *
- *     STAN-CS-79-770: "Pretty Printing", by Derek C. Oppen.
- *     Stanford Department of Computer Science, 1979.
- *
- * The algorithm's aim is to break a stream into as few lines as possible
- * while respecting the indentation-consistency requirements of the enclosing
- * block, and avoiding breaking at silly places on block boundaries, for
- * example, between "x" and ")" in "x)".
- *
- * I am implementing this algorithm because it comes with 20 pages of
- * documentation explaining its theory, and because it addresses the set of
- * concerns I've seen other pretty-printers fall down on. Weirdly. Even though
- * it's 32 years old and not written in Haskell. What can I say?
- *
- * Despite some redundancies and quirks in the way it's implemented in that
- * paper, I've opted to keep the implementation here as similar as I can,
- * changing only what was blatantly wrong, a typo, or sufficiently
- * non-idiomatic rust that it really stuck out.
- *
- * In particular you'll see a certain amount of churn related to INTEGER vs.
- * CARDINAL in the Mesa implementation. Mesa apparently interconverts the two
- * somewhat readily? In any case, I've used uint for indices-in-buffers and
- * ints for character-sizes-and-indentation-offsets. This respects the need
- * for ints to "go negative" while carrying a pending-calculation balance, and
- * helps differentiate all the numbers flying around internally (slightly).
- *
- * I also inverted the indentation arithmetic used in the print stack, since
- * the Mesa implementation (somewhat randomly) stores the offset on the print
- * stack in terms of margin-col rather than col itself. I store col.
- *
- * I also implemented a small change in the STRING token, in that I store an
- * explicit length for the string. For most tokens this is just the length of
- * the accompanying string. But it's necessary to permit it to differ, for
- * encoding things that are supposed to "go on their own line" -- certain
- * classes of comment and blank-line -- where relying on adjacent
- * hardbreak-like BREAK tokens with long blankness indication doesn't actually
- * work. To see why, consider when there is a "thing that should be on its own
- * line" between two long blocks, say functions. If you put a hardbreak after
- * each function (or before each) and the breaking algorithm decides to break
- * there anyways (because the functions themselves are long) you wind up with
- * extra blank lines. If you don't put hardbreaks you can wind up with the
- * "thing which should be on its own line" not getting its own line in the
- * rare case of "really small functions" or such. This re-occurs with comments
- * and explicit blank lines. So in those cases we use a string with a payload
- * we want isolated to a line and an explicit length that's huge, surrounded
- * by two zero-length breaks. The algorithm will try its best to fit it on a
- * line (which it can't) and so naturally place the content on its own line to
- * avoid combining it with other lines and making matters even worse.
- */
-enum breaks { consistent, inconsistent, }
-
-type break_t = {offset: int, blank_space: int};
-
-type begin_t = {offset: int, breaks: breaks};
-
-enum token { STRING(str, int), BREAK(break_t), BEGIN(begin_t), END, EOF, }
-
-fn tok_str(t: token) -> str {
-    alt t {
-      STRING(s, len) { ret #fmt["STR(%s,%d)", s, len]; }
-      BREAK(_) { ret "BREAK"; }
-      BEGIN(_) { ret "BEGIN"; }
-      END { ret "END"; }
-      EOF { ret "EOF"; }
-    }
-}
-
-fn buf_str(toks: [mut token], szs: [mut int], left: uint, right: uint,
-           lim: uint) -> str {
-    let n = vec::len(toks);
-    assert (n == vec::len(szs));
-    let mut i = left;
-    let mut L = lim;
-    let mut s = "[";
-    while i != right && L != 0u {
-        L -= 1u;
-        if i != left { s += ", "; }
-        s += #fmt["%d=%s", szs[i], tok_str(toks[i])];
-        i += 1u;
-        i %= n;
-    }
-    s += "]";
-    ret s;
-}
-
-enum print_stack_break { fits, broken(breaks), }
-
-type print_stack_elt = {offset: int, pbreak: print_stack_break};
-
-const size_infinity: int = 0xffff;
-
-fn mk_printer(out: io::writer, linewidth: uint) -> printer {
-    // Yes 3, it makes the ring buffers big enough to never
-    // fall behind.
-    let n: uint = 3u * linewidth;
-    #debug("mk_printer %u", linewidth);
-    let token: [mut token] = vec::to_mut(vec::from_elem(n, EOF));
-    let size: [mut int] = vec::to_mut(vec::from_elem(n, 0));
-    let scan_stack: [mut uint] = vec::to_mut(vec::from_elem(n, 0u));
-    @{out: out,
-      buf_len: n,
-      mut margin: linewidth as int,
-      mut space: linewidth as int,
-      mut left: 0u,
-      mut right: 0u,
-      mut token: token,
-      mut size: size,
-      mut left_total: 0,
-      mut right_total: 0,
-      mut scan_stack: scan_stack,
-      mut scan_stack_empty: true,
-      mut top: 0u,
-      mut bottom: 0u,
-      print_stack: dvec(),
-      mut pending_indentation: 0}
-}
-
-
-/*
- * In case you do not have the paper, here is an explanation of what's going
- * on.
- *
- * There is a stream of input tokens flowing through this printer.
- *
- * The printer buffers up to 3N tokens inside itself, where N is linewidth.
- * Yes, linewidth is chars and tokens are multi-char, but in the worst
- * case every token worth buffering is 1 char long, so it's ok.
- *
- * Tokens are STRING, BREAK, and BEGIN/END to delimit blocks.
- *
- * BEGIN tokens can carry an offset, saying "how far to indent when you break
- * inside here", as well as a flag indicating "consistent" or "inconsistent"
- * breaking. Consistent breaking means that after the first break, no attempt
- * will be made to flow subsequent breaks together onto lines. Inconsistent
- * is the opposite. Inconsistent breaking example would be, say:
- *
- *  foo(hello, there, good, friends)
- *
- * breaking inconsistently to become
- *
- *  foo(hello, there
- *      good, friends);
- *
- * whereas a consistent breaking would yield:
- *
- *  foo(hello,
- *      there
- *      good,
- *      friends);
- *
- * That is, in the consistent-break blocks we value vertical alignment
- * more than the ability to cram stuff onto a line. But in all cases if it
- * can make a block a one-liner, it'll do so.
- *
- * Carrying on with high-level logic:
- *
- * The buffered tokens go through a ring-buffer, 'tokens'. The 'left' and
- * 'right' indices denote the active portion of the ring buffer as well as
- * describing hypothetical points-in-the-infinite-stream at most 3N tokens
- * apart (i.e. "not wrapped to ring-buffer boundaries"). The paper will switch
- * between using 'left' and 'right' terms to denote the wrapepd-to-ring-buffer
- * and point-in-infinite-stream senses freely.
- *
- * There is a parallel ring buffer, 'size', that holds the calculated size of
- * each token. Why calculated? Because for BEGIN/END pairs, the "size"
- * includes everything betwen the pair. That is, the "size" of BEGIN is
- * actually the sum of the sizes of everything between BEGIN and the paired
- * END that follows. Since that is arbitrarily far in the future, 'size' is
- * being rewritten regularly while the printer runs; in fact most of the
- * machinery is here to work out 'size' entries on the fly (and give up when
- * they're so obviously over-long that "infinity" is a good enough
- * approximation for purposes of line breaking).
- *
- * The "input side" of the printer is managed as an abstract process called
- * SCAN, which uses 'scan_stack', 'scan_stack_empty', 'top' and 'bottom', to
- * manage calculating 'size'. SCAN is, in other words, the process of
- * calculating 'size' entries.
- *
- * The "output side" of the printer is managed by an abstract process called
- * PRINT, which uses 'print_stack', 'margin' and 'space' to figure out what to
- * do with each token/size pair it consumes as it goes. It's trying to consume
- * the entire buffered window, but can't output anything until the size is >=
- * 0 (sizes are set to negative while they're pending calculation).
- *
- * So SCAN takeks input and buffers tokens and pending calculations, while
- * PRINT gobbles up completed calculations and tokens from the buffer. The
- * theory is that the two can never get more than 3N tokens apart, because
- * once there's "obviously" too much data to fit on a line, in a size
- * calculation, SCAN will write "infinity" to the size and let PRINT consume
- * it.
- *
- * In this implementation (following the paper, again) the SCAN process is
- * the method called 'pretty_print', and the 'PRINT' process is the method
- * called 'print'.
- */
-type printer = @{
-    out: io::writer,
-    buf_len: uint,
-    mut margin: int, // width of lines we're constrained to
-    mut space: int, // number of spaces left on line
-    mut left: uint, // index of left side of input stream
-    mut right: uint, // index of right side of input stream
-    mut token: [mut token], // ring-buffr stream goes through
-    mut size: [mut int], // ring-buffer of calculated sizes
-    mut left_total: int, // running size of stream "...left"
-    mut right_total: int, // running size of stream "...right"
-    // pseudo-stack, really a ring too. Holds the
-    // primary-ring-buffers index of the BEGIN that started the
-    // current block, possibly with the most recent BREAK after that
-    // BEGIN (if there is any) on top of it. Stuff is flushed off the
-    // bottom as it becomes irrelevant due to the primary ring-buffer
-    // advancing.
-    mut scan_stack: [mut uint],
-    mut scan_stack_empty: bool, // top==bottom disambiguator
-    mut top: uint, // index of top of scan_stack
-    mut bottom: uint, // index of bottom of scan_stack
-    // stack of blocks-in-progress being flushed by print
-    print_stack: dvec<print_stack_elt>,
-    // buffered indentation to avoid writing trailing whitespace
-    mut pending_indentation: int
-};
-
-impl printer for printer {
-    fn last_token() -> token { self.token[self.right] }
-    // be very careful with this!
-    fn replace_last_token(t: token) { self.token[self.right] = t; }
-    fn pretty_print(t: token) {
-        #debug("pp [%u,%u]", self.left, self.right);
-        alt t {
-          EOF {
-            if !self.scan_stack_empty {
-                self.check_stack(0);
-                self.advance_left(self.token[self.left],
-                                  self.size[self.left]);
-            }
-            self.indent(0);
-          }
-          BEGIN(b) {
-            if self.scan_stack_empty {
-                self.left_total = 1;
-                self.right_total = 1;
-                self.left = 0u;
-                self.right = 0u;
-            } else { self.advance_right(); }
-            #debug("pp BEGIN/buffer [%u,%u]", self.left, self.right);
-            self.token[self.right] = t;
-            self.size[self.right] = -self.right_total;
-            self.scan_push(self.right);
-          }
-          END {
-            if self.scan_stack_empty {
-                #debug("pp END/print [%u,%u]", self.left, self.right);
-                self.print(t, 0);
-            } else {
-                #debug("pp END/buffer [%u,%u]", self.left, self.right);
-                self.advance_right();
-                self.token[self.right] = t;
-                self.size[self.right] = -1;
-                self.scan_push(self.right);
-            }
-          }
-          BREAK(b) {
-            if self.scan_stack_empty {
-                self.left_total = 1;
-                self.right_total = 1;
-                self.left = 0u;
-                self.right = 0u;
-            } else { self.advance_right(); }
-            #debug("pp BREAK/buffer [%u,%u]", self.left, self.right);
-            self.check_stack(0);
-            self.scan_push(self.right);
-            self.token[self.right] = t;
-            self.size[self.right] = -self.right_total;
-            self.right_total += b.blank_space;
-          }
-          STRING(s, len) {
-            if self.scan_stack_empty {
-                #debug("pp STRING/print [%u,%u]", self.left, self.right);
-                self.print(t, len);
-            } else {
-                #debug("pp STRING/buffer [%u,%u]", self.left, self.right);
-                self.advance_right();
-                self.token[self.right] = t;
-                self.size[self.right] = len;
-                self.right_total += len;
-                self.check_stream();
-            }
-          }
-        }
-    }
-    fn check_stream() {
-        #debug("check_stream [%u, %u] with left_total=%d, right_total=%d",
-               self.left, self.right, self.left_total, self.right_total);
-        if self.right_total - self.left_total > self.space {
-            #debug("scan window is %d, longer than space on line (%d)",
-                   self.right_total - self.left_total, self.space);
-            if !self.scan_stack_empty {
-                if self.left == self.scan_stack[self.bottom] {
-                    #debug("setting %u to infinity and popping", self.left);
-                    self.size[self.scan_pop_bottom()] = size_infinity;
-                }
-            }
-            self.advance_left(self.token[self.left], self.size[self.left]);
-            if self.left != self.right { self.check_stream(); }
-        }
-    }
-    fn scan_push(x: uint) {
-        #debug("scan_push %u", x);
-        if self.scan_stack_empty {
-            self.scan_stack_empty = false;
-        } else {
-            self.top += 1u;
-            self.top %= self.buf_len;
-            assert (self.top != self.bottom);
-        }
-        self.scan_stack[self.top] = x;
-    }
-    fn scan_pop() -> uint {
-        assert (!self.scan_stack_empty);
-        let x = self.scan_stack[self.top];
-        if self.top == self.bottom {
-            self.scan_stack_empty = true;
-        } else { self.top += self.buf_len - 1u; self.top %= self.buf_len; }
-        ret x;
-    }
-    fn scan_top() -> uint {
-        assert (!self.scan_stack_empty);
-        ret self.scan_stack[self.top];
-    }
-    fn scan_pop_bottom() -> uint {
-        assert (!self.scan_stack_empty);
-        let x = self.scan_stack[self.bottom];
-        if self.top == self.bottom {
-            self.scan_stack_empty = true;
-        } else { self.bottom += 1u; self.bottom %= self.buf_len; }
-        ret x;
-    }
-    fn advance_right() {
-        self.right += 1u;
-        self.right %= self.buf_len;
-        assert (self.right != self.left);
-    }
-    fn advance_left(x: token, L: int) {
-        #debug("advnce_left [%u,%u], sizeof(%u)=%d", self.left, self.right,
-               self.left, L);
-        if L >= 0 {
-            self.print(x, L);
-            alt x {
-              BREAK(b) { self.left_total += b.blank_space; }
-              STRING(_, len) { assert (len == L); self.left_total += len; }
-              _ { }
-            }
-            if self.left != self.right {
-                self.left += 1u;
-                self.left %= self.buf_len;
-                self.advance_left(self.token[self.left],
-                                  self.size[self.left]);
-            }
-        }
-    }
-    fn check_stack(k: int) {
-        if !self.scan_stack_empty {
-            let x = self.scan_top();
-            alt self.token[x] {
-              BEGIN(b) {
-                if k > 0 {
-                    self.size[self.scan_pop()] = self.size[x] +
-                        self.right_total;
-                    self.check_stack(k - 1);
-                }
-              }
-              END {
-                // paper says + not =, but that makes no sense.
-                self.size[self.scan_pop()] = 1;
-                self.check_stack(k + 1);
-              }
-              _ {
-                self.size[self.scan_pop()] = self.size[x] + self.right_total;
-                if k > 0 { self.check_stack(k); }
-              }
-            }
-        }
-    }
-    fn print_newline(amount: int) {
-        #debug("NEWLINE %d", amount);
-        self.out.write_str("\n");
-        self.pending_indentation = 0;
-        self.indent(amount);
-    }
-    fn indent(amount: int) {
-        #debug("INDENT %d", amount);
-        self.pending_indentation += amount;
-    }
-    fn get_top() -> print_stack_elt {
-        let n = self.print_stack.len();
-        if n != 0u {
-            self.print_stack[n - 1u]
-        } else {
-            {offset: 0, pbreak: broken(inconsistent)}
-        }
-    }
-    fn write_str(s: str) {
-        while self.pending_indentation > 0 {
-            self.out.write_str(" ");
-            self.pending_indentation -= 1;
-        }
-        self.out.write_str(s);
-    }
-    fn print(x: token, L: int) {
-        #debug("print %s %d (remaining line space=%d)", tok_str(x), L,
-               self.space);
-        log(debug, buf_str(self.token, self.size, self.left, self.right, 6u));
-        alt x {
-          BEGIN(b) {
-            if L > self.space {
-                let col = self.margin - self.space + b.offset;
-                #debug("print BEGIN -> push broken block at col %d", col);
-                self.print_stack.push({offset: col,
-                                       pbreak: broken(b.breaks)});
-            } else {
-                #debug("print BEGIN -> push fitting block");
-                self.print_stack.push({offset: 0,
-                                       pbreak: fits});
-            }
-          }
-          END {
-            #debug("print END -> pop END");
-            assert (self.print_stack.len() != 0u);
-            self.print_stack.pop();
-          }
-          BREAK(b) {
-            let top = self.get_top();
-            alt top.pbreak {
-              fits {
-                #debug("print BREAK in fitting block");
-                self.space -= b.blank_space;
-                self.indent(b.blank_space);
-              }
-              broken(consistent) {
-                #debug("print BREAK in consistent block");
-                self.print_newline(top.offset + b.offset);
-                self.space = self.margin - (top.offset + b.offset);
-              }
-              broken(inconsistent) {
-                if L > self.space {
-                    #debug("print BREAK w/ newline in inconsistent");
-                    self.print_newline(top.offset + b.offset);
-                    self.space = self.margin - (top.offset + b.offset);
-                } else {
-                    #debug("print BREAK w/o newline in inconsistent");
-                    self.indent(b.blank_space);
-                    self.space -= b.blank_space;
-                }
-              }
-            }
-          }
-          STRING(s, len) {
-            #debug("print STRING");
-            assert (L == len);
-            // assert L <= space;
-            self.space -= len;
-            self.write_str(s);
-          }
-          EOF {
-            // EOF should never get here.
-            fail;
-          }
-        }
-    }
-}
-
-// Convenience functions to talk to the printer.
-fn box(p: printer, indent: uint, b: breaks) {
-    p.pretty_print(BEGIN({offset: indent as int, breaks: b}));
-}
-
-fn ibox(p: printer, indent: uint) { box(p, indent, inconsistent); }
-
-fn cbox(p: printer, indent: uint) { box(p, indent, consistent); }
-
-fn break_offset(p: printer, n: uint, off: int) {
-    p.pretty_print(BREAK({offset: off, blank_space: n as int}));
-}
-
-fn end(p: printer) { p.pretty_print(END); }
-
-fn eof(p: printer) { p.pretty_print(EOF); }
-
-fn word(p: printer, wrd: str) {
-    p.pretty_print(STRING(wrd, str::len(wrd) as int));
-}
-
-fn huge_word(p: printer, wrd: str) {
-    p.pretty_print(STRING(wrd, size_infinity));
-}
-
-fn zero_word(p: printer, wrd: str) { p.pretty_print(STRING(wrd, 0)); }
-
-fn spaces(p: printer, n: uint) { break_offset(p, n, 0); }
-
-fn zerobreak(p: printer) { spaces(p, 0u); }
-
-fn space(p: printer) { spaces(p, 1u); }
-
-fn hardbreak(p: printer) { spaces(p, size_infinity as uint); }
-
-fn hardbreak_tok_offset(off: int) -> token {
-    ret BREAK({offset: off, blank_space: size_infinity});
-}
-
-fn hardbreak_tok() -> token { ret hardbreak_tok_offset(0); }
-
-
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/print/pprust.rs b/src/librustsyntax/print/pprust.rs
deleted file mode 100644
index 8206bfd2a4a..00000000000
--- a/src/librustsyntax/print/pprust.rs
+++ /dev/null
@@ -1,1861 +0,0 @@
-import parse::classify::*;
-import parse::comments;
-import parse::lexer;
-import codemap::codemap;
-import pp::{break_offset, word, printer,
-            space, zerobreak, hardbreak, breaks, consistent,
-            inconsistent, eof};
-import diagnostic;
-import ast_util::operator_prec;
-import dvec::{dvec, extensions};
-
-// The ps is stored here to prevent recursive type.
-enum ann_node {
-    node_block(ps, ast::blk),
-    node_item(ps, @ast::item),
-    node_expr(ps, @ast::expr),
-    node_pat(ps, @ast::pat),
-}
-type pp_ann = {pre: fn@(ann_node), post: fn@(ann_node)};
-
-fn no_ann() -> pp_ann {
-    fn ignore(_node: ann_node) { }
-    ret {pre: ignore, post: ignore};
-}
-
-type ps =
-    @{s: pp::printer,
-      cm: option<codemap>,
-      comments: option<[comments::cmnt]>,
-      literals: option<[comments::lit]>,
-      mut cur_cmnt: uint,
-      mut cur_lit: uint,
-      boxes: dvec<pp::breaks>,
-      ann: pp_ann};
-
-fn ibox(s: ps, u: uint) {
-    s.boxes.push(pp::inconsistent);
-    pp::ibox(s.s, u);
-}
-
-fn end(s: ps) {
-    s.boxes.pop();
-    pp::end(s.s);
-}
-
-fn rust_printer(writer: io::writer) -> ps {
-    ret @{s: pp::mk_printer(writer, default_columns),
-          cm: none::<codemap>,
-          comments: none::<[comments::cmnt]>,
-          literals: none::<[comments::lit]>,
-          mut cur_cmnt: 0u,
-          mut cur_lit: 0u,
-          boxes: dvec(),
-          ann: no_ann()};
-}
-
-const indent_unit: uint = 4u;
-const alt_indent_unit: uint = 2u;
-
-const default_columns: uint = 78u;
-
-// Requires you to pass an input filename and reader so that
-// it can scan the input text for comments and literals to
-// copy forward.
-fn print_crate(cm: codemap, span_diagnostic: diagnostic::span_handler,
-               crate: @ast::crate, filename: str, in: io::reader,
-               out: io::writer, ann: pp_ann) {
-    let r = comments::gather_comments_and_literals(span_diagnostic,
-                                                   filename, in);
-    let s =
-        @{s: pp::mk_printer(out, default_columns),
-          cm: some(cm),
-          comments: some(r.cmnts),
-          literals: some(r.lits),
-          mut cur_cmnt: 0u,
-          mut cur_lit: 0u,
-          boxes: dvec(),
-          ann: ann};
-    print_crate_(s, crate);
-}
-
-fn print_crate_(s: ps, &&crate: @ast::crate) {
-    print_mod(s, crate.node.module, crate.node.attrs);
-    print_remaining_comments(s);
-    eof(s.s);
-}
-
-fn ty_to_str(ty: @ast::ty) -> str { ret to_str(ty, print_type); }
-
-fn pat_to_str(pat: @ast::pat) -> str { ret to_str(pat, print_pat); }
-
-fn expr_to_str(e: @ast::expr) -> str { ret to_str(e, print_expr); }
-
-fn stmt_to_str(s: ast::stmt) -> str { ret to_str(s, print_stmt); }
-
-fn item_to_str(i: @ast::item) -> str { ret to_str(i, print_item); }
-
-fn attr_to_str(i: ast::attribute) -> str { ret to_str(i, print_attribute); }
-
-fn typarams_to_str(tps: [ast::ty_param]) -> str {
-    ret to_str(tps, print_type_params)
-}
-
-fn path_to_str(&&p: @ast::path) -> str {
-    ret to_str(p, bind print_path(_, _, false));
-}
-
-fn fun_to_str(decl: ast::fn_decl, name: ast::ident,
-              params: [ast::ty_param]) -> str {
-    let buffer = io::mem_buffer();
-    let s = rust_printer(io::mem_buffer_writer(buffer));
-    print_fn(s, decl, name, params);
-    end(s); // Close the head box
-    end(s); // Close the outer box
-    eof(s.s);
-    io::mem_buffer_str(buffer)
-}
-
-#[test]
-fn test_fun_to_str() {
-    let decl: ast::fn_decl = {
-        inputs: [],
-        output: @{id: 0,
-                  node: ast::ty_nil,
-                  span: ast_util::dummy_sp()},
-        purity: ast::impure_fn,
-        cf: ast::return_val,
-        constraints: []
-    };
-    assert fun_to_str(decl, "a", []) == "fn a()";
-}
-
-fn res_to_str(decl: ast::fn_decl, name: ast::ident,
-              params: [ast::ty_param], rp: ast::region_param) -> str {
-    let buffer = io::mem_buffer();
-    let s = rust_printer(io::mem_buffer_writer(buffer));
-    print_res(s, decl, name, params, rp);
-    end(s); // Close the head box
-    end(s); // Close the outer box
-    eof(s.s);
-    io::mem_buffer_str(buffer)
-}
-
-#[test]
-fn test_res_to_str() {
-    let decl: ast::fn_decl = {
-        inputs: [{
-            mode: ast::expl(ast::by_val),
-            ty: @{id: 0,
-                  node: ast::ty_nil,
-                  span: ast_util::dummy_sp()},
-            ident: "b",
-            id: 0
-        }],
-        output: @{id: 0,
-                  node: ast::ty_nil,
-                  span: ast_util::dummy_sp()},
-        purity: ast::impure_fn,
-        cf: ast::return_val,
-        constraints: []
-    };
-    assert res_to_str(decl, "a", []) == "resource a(b: ())";
-}
-
-fn block_to_str(blk: ast::blk) -> str {
-    let buffer = io::mem_buffer();
-    let s = rust_printer(io::mem_buffer_writer(buffer));
-    // containing cbox, will be closed by print-block at }
-    cbox(s, indent_unit);
-    // head-ibox, will be closed by print-block after {
-    ibox(s, 0u);
-    print_block(s, blk);
-    eof(s.s);
-    io::mem_buffer_str(buffer)
-}
-
-fn meta_item_to_str(mi: ast::meta_item) -> str {
-    ret to_str(@mi, print_meta_item);
-}
-
-fn attribute_to_str(attr: ast::attribute) -> str {
-    ret to_str(attr, print_attribute);
-}
-
-fn variant_to_str(var: ast::variant) -> str {
-    ret to_str(var, print_variant);
-}
-
-#[test]
-fn test_variant_to_str() {
-    let var = ast_util::respan(ast_util::dummy_sp(), {
-        name: "principle_skinner",
-        attrs: [],
-        args: [],
-        id: 0,
-        disr_expr: none
-    });
-
-    let varstr = variant_to_str(var);
-    assert varstr == "principle_skinner";
-}
-
-fn cbox(s: ps, u: uint) {
-    s.boxes.push(pp::consistent);
-    pp::cbox(s.s, u);
-}
-
-fn box(s: ps, u: uint, b: pp::breaks) {
-    s.boxes.push(b);
-    pp::box(s.s, u, b);
-}
-
-fn nbsp(s: ps) { word(s.s, " "); }
-
-fn word_nbsp(s: ps, w: str) { word(s.s, w); nbsp(s); }
-
-fn word_space(s: ps, w: str) { word(s.s, w); space(s.s); }
-
-fn popen(s: ps) { word(s.s, "("); }
-
-fn pclose(s: ps) { word(s.s, ")"); }
-
-fn head(s: ps, w: str) {
-    // outer-box is consistent
-    cbox(s, indent_unit);
-    // head-box is inconsistent
-    ibox(s, str::len(w) + 1u);
-    // keyword that starts the head
-    word_nbsp(s, w);
-}
-
-fn bopen(s: ps) {
-    word(s.s, "{");
-    end(s); // close the head-box
-}
-
-fn bclose_(s: ps, span: codemap::span, indented: uint) {
-    maybe_print_comment(s, span.hi);
-    break_offset_if_not_bol(s, 1u, -(indented as int));
-    word(s.s, "}");
-    end(s); // close the outer-box
-}
-fn bclose(s: ps, span: codemap::span) { bclose_(s, span, indent_unit); }
-
-fn is_begin(s: ps) -> bool {
-    alt s.s.last_token() { pp::BEGIN(_) { true } _ { false } }
-}
-
-fn is_end(s: ps) -> bool {
-    alt s.s.last_token() { pp::END { true } _ { false } }
-}
-
-fn is_bol(s: ps) -> bool {
-    ret s.s.last_token() == pp::EOF ||
-            s.s.last_token() == pp::hardbreak_tok();
-}
-
-fn in_cbox(s: ps) -> bool {
-    let len = s.boxes.len();
-    if len == 0u { ret false; }
-    ret s.boxes[len - 1u] == pp::consistent;
-}
-
-fn hardbreak_if_not_bol(s: ps) { if !is_bol(s) { hardbreak(s.s); } }
-fn space_if_not_bol(s: ps) { if !is_bol(s) { space(s.s); } }
-fn break_offset_if_not_bol(s: ps, n: uint, off: int) {
-    if !is_bol(s) {
-        break_offset(s.s, n, off);
-    } else {
-        if off != 0 && s.s.last_token() == pp::hardbreak_tok() {
-            // We do something pretty sketchy here: tuck the nonzero
-            // offset-adjustment we were going to deposit along with the
-            // break into the previous hardbreak.
-            s.s.replace_last_token(pp::hardbreak_tok_offset(off));
-        }
-    }
-}
-
-// Synthesizes a comment that was not textually present in the original source
-// file.
-fn synth_comment(s: ps, text: str) {
-    word(s.s, "/*");
-    space(s.s);
-    word(s.s, text);
-    space(s.s);
-    word(s.s, "*/");
-}
-
-fn commasep<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) {
-    box(s, 0u, b);
-    let mut first = true;
-    for elts.each {|elt|
-        if first { first = false; } else { word_space(s, ","); }
-        op(s, elt);
-    }
-    end(s);
-}
-
-
-fn commasep_cmnt<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN),
-                     get_span: fn(IN) -> codemap::span) {
-    box(s, 0u, b);
-    let len = vec::len::<IN>(elts);
-    let mut i = 0u;
-    for elts.each {|elt|
-        maybe_print_comment(s, get_span(elt).hi);
-        op(s, elt);
-        i += 1u;
-        if i < len {
-            word(s.s, ",");
-            maybe_print_trailing_comment(s, get_span(elt),
-                                         some(get_span(elts[i]).hi));
-            space_if_not_bol(s);
-        }
-    }
-    end(s);
-}
-
-fn commasep_exprs(s: ps, b: breaks, exprs: [@ast::expr]) {
-    fn expr_span(&&expr: @ast::expr) -> codemap::span { ret expr.span; }
-    commasep_cmnt(s, b, exprs, print_expr, expr_span);
-}
-
-fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]) {
-    print_inner_attributes(s, attrs);
-    for _mod.view_items.each {|vitem|
-        print_view_item(s, vitem);
-    }
-    for _mod.items.each {|item| print_item(s, item); }
-}
-
-fn print_native_mod(s: ps, nmod: ast::native_mod, attrs: [ast::attribute]) {
-    print_inner_attributes(s, attrs);
-    for nmod.view_items.each {|vitem|
-        print_view_item(s, vitem);
-    }
-    for nmod.items.each {|item| print_native_item(s, item); }
-}
-
-fn print_region(s: ps, region: @ast::region) {
-    alt region.node {
-      ast::re_anon { word_space(s, "&"); }
-      ast::re_named(name) {
-        word(s.s, "&");
-        word_space(s, name);
-      }
-    }
-}
-
-fn print_type(s: ps, &&ty: @ast::ty) {
-    print_type_ex(s, ty, false);
-}
-
-fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) {
-    maybe_print_comment(s, ty.span.lo);
-    ibox(s, 0u);
-    alt ty.node {
-      ast::ty_nil { word(s.s, "()"); }
-      ast::ty_bot { word(s.s, "!"); }
-      ast::ty_box(mt) { word(s.s, "@"); print_mt(s, mt); }
-      ast::ty_uniq(mt) { word(s.s, "~"); print_mt(s, mt); }
-      ast::ty_vec(mt) {
-        word(s.s, "[");
-        alt mt.mutbl {
-          ast::m_mutbl { word_space(s, "mut"); }
-          ast::m_const { word_space(s, "const"); }
-          ast::m_imm { }
-        }
-        print_type(s, mt.ty);
-        word(s.s, "]");
-      }
-      ast::ty_ptr(mt) { word(s.s, "*"); print_mt(s, mt); }
-      ast::ty_rptr(region, mt) {
-        alt region.node {
-          ast::re_anon { word(s.s, "&"); }
-          _ { print_region(s, region); word(s.s, "."); }
-        }
-        print_mt(s, mt);
-      }
-      ast::ty_rec(fields) {
-        word(s.s, "{");
-        fn print_field(s: ps, f: ast::ty_field) {
-            cbox(s, indent_unit);
-            print_mutability(s, f.node.mt.mutbl);
-            word(s.s, f.node.ident);
-            word_space(s, ":");
-            print_type(s, f.node.mt.ty);
-            end(s);
-        }
-        fn get_span(f: ast::ty_field) -> codemap::span { ret f.span; }
-        commasep_cmnt(s, consistent, fields, print_field, get_span);
-        word(s.s, ",}");
-      }
-      ast::ty_tup(elts) {
-        popen(s);
-        commasep(s, inconsistent, elts, print_type);
-        pclose(s);
-      }
-      ast::ty_fn(proto, d) {
-        print_ty_fn(s, some(proto), d, none, none);
-      }
-      ast::ty_path(path, _) { print_path(s, path, print_colons); }
-      ast::ty_constr(t, cs) {
-        print_type(s, t);
-        space(s.s);
-        word(s.s, constrs_str(cs, ty_constr_to_str));
-      }
-      ast::ty_vstore(t, v) {
-        print_type(s, t);
-        print_vstore(s, v);
-      }
-      ast::ty_mac(_) {
-          fail "print_type doesn't know how to print a ty_mac";
-      }
-      ast::ty_infer {
-          fail "print_type shouldn't see a ty_infer";
-      }
-
-    }
-    end(s);
-}
-
-fn print_native_item(s: ps, item: @ast::native_item) {
-    hardbreak_if_not_bol(s);
-    maybe_print_comment(s, item.span.lo);
-    print_outer_attributes(s, item.attrs);
-    alt item.node {
-      ast::native_item_fn(decl, typarams) {
-        print_fn(s, decl, item.ident, typarams);
-        end(s); // end head-ibox
-        word(s.s, ";");
-        end(s); // end the outer fn box
-      }
-    }
-}
-
-fn print_item(s: ps, &&item: @ast::item) {
-    hardbreak_if_not_bol(s);
-    maybe_print_comment(s, item.span.lo);
-    print_outer_attributes(s, item.attrs);
-    let ann_node = node_item(s, item);
-    s.ann.pre(ann_node);
-    alt item.node {
-      ast::item_const(ty, expr) {
-        head(s, "const");
-        word_space(s, item.ident + ":");
-        print_type(s, ty);
-        space(s.s);
-        end(s); // end the head-ibox
-
-        word_space(s, "=");
-        print_expr(s, expr);
-        word(s.s, ";");
-        end(s); // end the outer cbox
-
-      }
-      ast::item_fn(decl, typarams, body) {
-        print_fn(s, decl, item.ident, typarams);
-        word(s.s, " ");
-        print_block_with_attrs(s, body, item.attrs);
-      }
-      ast::item_mod(_mod) {
-        head(s, "mod");
-        word_nbsp(s, item.ident);
-        bopen(s);
-        print_mod(s, _mod, item.attrs);
-        bclose(s, item.span);
-      }
-      ast::item_native_mod(nmod) {
-        head(s, "native");
-        word_nbsp(s, "mod");
-        word_nbsp(s, item.ident);
-        bopen(s);
-        print_native_mod(s, nmod, item.attrs);
-        bclose(s, item.span);
-      }
-      ast::item_ty(ty, params, rp) {
-        ibox(s, indent_unit);
-        ibox(s, 0u);
-        word_nbsp(s, "type");
-        word(s.s, item.ident);
-        print_region_param(s, rp);
-        print_type_params(s, params);
-        end(s); // end the inner ibox
-
-        space(s.s);
-        word_space(s, "=");
-        print_type(s, ty);
-        word(s.s, ";");
-        end(s); // end the outer ibox
-      }
-      ast::item_enum(variants, params, rp) {
-        let newtype =
-            vec::len(variants) == 1u &&
-                str::eq(item.ident, variants[0].node.name) &&
-                vec::len(variants[0].node.args) == 1u;
-        if newtype {
-            ibox(s, indent_unit);
-            word_space(s, "enum");
-        } else { head(s, "enum"); }
-        word(s.s, item.ident);
-        print_region_param(s, rp);
-        print_type_params(s, params);
-        space(s.s);
-        if newtype {
-            word_space(s, "=");
-            print_type(s, variants[0].node.args[0].ty);
-            word(s.s, ";");
-            end(s);
-        } else {
-            bopen(s);
-            for variants.each {|v|
-                space_if_not_bol(s);
-                maybe_print_comment(s, v.span.lo);
-                print_outer_attributes(s, v.node.attrs);
-                ibox(s, indent_unit);
-                print_variant(s, v);
-                word(s.s, ",");
-                end(s);
-                maybe_print_trailing_comment(s, v.span, none::<uint>);
-            }
-            bclose(s, item.span);
-        }
-      }
-      ast::item_class(tps, ifaces, items, ctor, m_dtor, rp) {
-          head(s, "class");
-          word_nbsp(s, item.ident);
-          print_region_param(s, rp);
-          print_type_params(s, tps);
-          word_space(s, "implements");
-          commasep(s, inconsistent, ifaces, {|s, p|
-                      print_path(s, p.path, false)});
-          bopen(s);
-          hardbreak_if_not_bol(s);
-          maybe_print_comment(s, ctor.span.lo);
-          head(s, "new");
-          print_fn_args_and_ret(s, ctor.node.dec, []);
-          space(s.s);
-          print_block(s, ctor.node.body);
-          option::iter(m_dtor) {|dtor|
-            hardbreak_if_not_bol(s);
-            head(s, "drop");
-            print_block(s, dtor.node.body);
-          }
-          for items.each {|ci|
-                  /*
-                     FIXME: collect all private items and print them
-                     in a single "priv" section
-
-                     tjc: I'm not going to fix this yet b/c we might
-                     change how exports work, including for class items
-                     (see #1893)
-                   */
-             hardbreak_if_not_bol(s);
-             maybe_print_comment(s, ci.span.lo);
-             let pr = ast_util::class_member_visibility(ci);
-             alt pr {
-                ast::private {
-                    head(s, "priv");
-                    bopen(s);
-                    hardbreak_if_not_bol(s);
-                }
-                _ {}
-             }
-             alt ci.node {
-                ast::instance_var(nm, t, mt, _,_) {
-                    word_nbsp(s, "let");
-                    alt mt {
-                      ast::class_mutable { word_nbsp(s, "mut"); }
-                      _ {}
-                    }
-                    word(s.s, nm);
-                    word_nbsp(s, ":");
-                    print_type(s, t);
-                    word(s.s, ";");
-                }
-                ast::class_method(m) {
-                    print_method(s, m);
-                }
-             }
-             alt pr {
-                 ast::private { bclose(s, ci.span); }
-                 _ {}
-             }
-          }
-          bclose(s, item.span);
-       }
-      ast::item_impl(tps, rp, ifce, ty, methods) {
-        head(s, "impl");
-        word(s.s, item.ident);
-        print_region_param(s, rp);
-        print_type_params(s, tps);
-        space(s.s);
-        option::iter(ifce, {|p|
-            word_nbsp(s, "of");
-            print_path(s, p.path, false);
-            space(s.s);
-            });
-        word_nbsp(s, "for");
-        print_type(s, ty);
-        space(s.s);
-        bopen(s);
-        for methods.each {|meth|
-           print_method(s, meth);
-        }
-        bclose(s, item.span);
-      }
-      ast::item_iface(tps, rp, methods) {
-        head(s, "iface");
-        word(s.s, item.ident);
-        print_region_param(s, rp);
-        print_type_params(s, tps);
-        word(s.s, " ");
-        bopen(s);
-        for methods.each {|meth| print_ty_method(s, meth); }
-        bclose(s, item.span);
-      }
-      ast::item_res(decl, tps, body, dt_id, ct_id, rp) {
-        print_res(s, decl, item.ident, tps, rp);
-        print_block(s, body);
-      }
-    }
-    s.ann.post(ann_node);
-}
-
-fn print_res(s: ps, decl: ast::fn_decl, name: ast::ident,
-             typarams: [ast::ty_param], rp: ast::region_param) {
-    head(s, "resource");
-    word(s.s, name);
-    print_region_param(s, rp);
-    print_type_params(s, typarams);
-    popen(s);
-    word_space(s, decl.inputs[0].ident + ":");
-    print_type(s, decl.inputs[0].ty);
-    pclose(s);
-    space(s.s);
-}
-
-fn print_variant(s: ps, v: ast::variant) {
-    word(s.s, v.node.name);
-    if vec::len(v.node.args) > 0u {
-        popen(s);
-        fn print_variant_arg(s: ps, arg: ast::variant_arg) {
-            print_type(s, arg.ty);
-        }
-        commasep(s, consistent, v.node.args, print_variant_arg);
-        pclose(s);
-    }
-    alt v.node.disr_expr {
-      some(d) {
-        space(s.s);
-        word_space(s, "=");
-        print_expr(s, d);
-      }
-      _ {}
-    }
-}
-
-fn print_ty_method(s: ps, m: ast::ty_method) {
-    hardbreak_if_not_bol(s);
-    maybe_print_comment(s, m.span.lo);
-    print_outer_attributes(s, m.attrs);
-    print_ty_fn(s, none, m.decl, some(m.ident), some(m.tps));
-    word(s.s, ";");
-}
-
-fn print_method(s: ps, meth: @ast::method) {
-    hardbreak_if_not_bol(s);
-    maybe_print_comment(s, meth.span.lo);
-    print_outer_attributes(s, meth.attrs);
-    print_fn(s, meth.decl, meth.ident, meth.tps);
-    word(s.s, " ");
-    print_block_with_attrs(s, meth.body, meth.attrs);
-}
-
-fn print_outer_attributes(s: ps, attrs: [ast::attribute]) {
-    let mut count = 0;
-    for attrs.each {|attr|
-        alt attr.node.style {
-          ast::attr_outer { print_attribute(s, attr); count += 1; }
-          _ {/* fallthrough */ }
-        }
-    }
-    if count > 0 { hardbreak_if_not_bol(s); }
-}
-
-fn print_inner_attributes(s: ps, attrs: [ast::attribute]) {
-    let mut count = 0;
-    for attrs.each {|attr|
-        alt attr.node.style {
-          ast::attr_inner {
-            print_attribute(s, attr);
-            word(s.s, ";");
-            count += 1;
-          }
-          _ {/* fallthrough */ }
-        }
-    }
-    if count > 0 { hardbreak_if_not_bol(s); }
-}
-
-fn print_attribute(s: ps, attr: ast::attribute) {
-    hardbreak_if_not_bol(s);
-    maybe_print_comment(s, attr.span.lo);
-    word(s.s, "#[");
-    print_meta_item(s, @attr.node.value);
-    word(s.s, "]");
-}
-
-
-fn print_stmt(s: ps, st: ast::stmt) {
-    maybe_print_comment(s, st.span.lo);
-    alt st.node {
-      ast::stmt_decl(decl, _) {
-        print_decl(s, decl);
-      }
-      ast::stmt_expr(expr, _) {
-        space_if_not_bol(s);
-        print_expr(s, expr);
-      }
-      ast::stmt_semi(expr, _) {
-        space_if_not_bol(s);
-        print_expr(s, expr);
-        word(s.s, ";");
-      }
-    }
-    if parse::classify::stmt_ends_with_semi(st) { word(s.s, ";"); }
-    maybe_print_trailing_comment(s, st.span, none::<uint>);
-}
-
-fn print_block(s: ps, blk: ast::blk) {
-    print_possibly_embedded_block(s, blk, block_normal, indent_unit);
-}
-
-fn print_block_with_attrs(s: ps, blk: ast::blk, attrs: [ast::attribute]) {
-    print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs);
-}
-
-enum embed_type { block_macro, block_block_fn, block_normal, }
-
-fn print_possibly_embedded_block(s: ps, blk: ast::blk, embedded: embed_type,
-                                 indented: uint) {
-    print_possibly_embedded_block_(
-        s, blk, embedded, indented, []);
-}
-
-fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type,
-                                  indented: uint, attrs: [ast::attribute]) {
-    alt blk.node.rules {
-      ast::unchecked_blk { word(s.s, "unchecked"); }
-      ast::unsafe_blk { word(s.s, "unsafe"); }
-      ast::default_blk { }
-    }
-    maybe_print_comment(s, blk.span.lo);
-    let ann_node = node_block(s, blk);
-    s.ann.pre(ann_node);
-    alt embedded {
-      block_macro { word(s.s, "#{"); end(s); }
-      block_block_fn { end(s); }
-      block_normal { bopen(s); }
-    }
-
-    print_inner_attributes(s, attrs);
-
-    for blk.node.view_items.each {|vi| print_view_item(s, vi); }
-    for blk.node.stmts.each {|st|
-        print_stmt(s, *st);
-    }
-    alt blk.node.expr {
-      some(expr) {
-        space_if_not_bol(s);
-        print_expr(s, expr);
-        maybe_print_trailing_comment(s, expr.span, some(blk.span.hi));
-      }
-      _ { }
-    }
-    bclose_(s, blk.span, indented);
-    s.ann.post(ann_node);
-}
-
-// ret and fail, without arguments cannot appear is the discriminant of if,
-// alt, do, & while unambiguously without being parenthesized
-fn print_maybe_parens_discrim(s: ps, e: @ast::expr) {
-    let disambig = alt e.node {
-      ast::expr_ret(none) | ast::expr_fail(none) { true }
-      _ { false }
-    };
-    if disambig { popen(s); }
-    print_expr(s, e);
-    if disambig { pclose(s); }
-}
-
-fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
-            elseopt: option<@ast::expr>, chk: bool) {
-    head(s, "if");
-    if chk { word_nbsp(s, "check"); }
-    print_maybe_parens_discrim(s, test);
-    space(s.s);
-    print_block(s, blk);
-    fn do_else(s: ps, els: option<@ast::expr>) {
-        alt els {
-          some(_else) {
-            alt _else.node {
-              // "another else-if"
-              ast::expr_if(i, t, e) {
-                cbox(s, indent_unit - 1u);
-                ibox(s, 0u);
-                word(s.s, " else if ");
-                print_maybe_parens_discrim(s, i);
-                space(s.s);
-                print_block(s, t);
-                do_else(s, e);
-              }
-              // "final else"
-              ast::expr_block(b) {
-                cbox(s, indent_unit - 1u);
-                ibox(s, 0u);
-                word(s.s, " else ");
-                print_block(s, b);
-              }
-              // BLEAH, constraints would be great here
-              _ {
-                  fail "print_if saw if with weird alternative";
-              }
-            }
-          }
-          _ {/* fall through */ }
-        }
-    }
-    do_else(s, elseopt);
-}
-
-fn print_mac(s: ps, m: ast::mac) {
-    alt m.node {
-      ast::mac_invoc(path, arg, body) {
-        word(s.s, "#");
-        print_path(s, path, false);
-        alt arg {
-          some(@{node: ast::expr_vec(_, _), _}) { }
-          _ { word(s.s, " "); }
-        }
-        option::iter(arg, bind print_expr(s, _));
-        // FIXME: extension 'body' (#2339)
-      }
-      ast::mac_embed_type(ty) {
-        word(s.s, "#<");
-        print_type(s, ty);
-        word(s.s, ">");
-      }
-      ast::mac_embed_block(blk) {
-        print_possibly_embedded_block(s, blk, block_normal, indent_unit);
-      }
-      ast::mac_ellipsis { word(s.s, "..."); }
-      ast::mac_var(v) { word(s.s, #fmt("$%u", v)); }
-      _ { /* fixme */ }
-    }
-}
-
-fn print_vstore(s: ps, t: ast::vstore) {
-    alt t {
-      ast::vstore_fixed(some(i)) { word_space(s, #fmt("/%u", i)); }
-      ast::vstore_fixed(none) { word_space(s, "/_"); }
-      ast::vstore_uniq { word_space(s, "/~"); }
-      ast::vstore_box { word_space(s, "/@"); }
-      ast::vstore_slice(r) { word(s.s, "/"); print_region(s, r); }
-    }
-}
-
-fn print_expr(s: ps, &&expr: @ast::expr) {
-    maybe_print_comment(s, expr.span.lo);
-    ibox(s, indent_unit);
-    let ann_node = node_expr(s, expr);
-    s.ann.pre(ann_node);
-    alt expr.node {
-      ast::expr_vstore(e, v) {
-        print_expr(s, e);
-        print_vstore(s, v);
-      }
-      ast::expr_vec(exprs, mutbl) {
-        ibox(s, indent_unit);
-        word(s.s, "[");
-        if mutbl == ast::m_mutbl {
-            word(s.s, "mut");
-            if vec::len(exprs) > 0u { nbsp(s); }
-        }
-        commasep_exprs(s, inconsistent, exprs);
-        word(s.s, "]");
-        end(s);
-      }
-      ast::expr_rec(fields, wth) {
-        fn print_field(s: ps, field: ast::field) {
-            ibox(s, indent_unit);
-            if field.node.mutbl == ast::m_mutbl { word_nbsp(s, "mut"); }
-            word(s.s, field.node.ident);
-            word_space(s, ":");
-            print_expr(s, field.node.expr);
-            end(s);
-        }
-        fn get_span(field: ast::field) -> codemap::span { ret field.span; }
-        word(s.s, "{");
-        commasep_cmnt(s, consistent, fields, print_field, get_span);
-        alt wth {
-          some(expr) {
-            if vec::len(fields) > 0u { space(s.s); }
-            ibox(s, indent_unit);
-            word_space(s, "with");
-            print_expr(s, expr);
-            end(s);
-          }
-          _ { word(s.s, ","); }
-        }
-        word(s.s, "}");
-      }
-      ast::expr_tup(exprs) {
-        popen(s);
-        commasep_exprs(s, inconsistent, exprs);
-        pclose(s);
-      }
-      ast::expr_call(func, args, has_block) {
-        let mut base_args = args;
-        let blk = if has_block {
-            let blk_arg = vec::pop(base_args);
-            alt blk_arg.node {
-              ast::expr_loop_body(_) { word_nbsp(s, "for"); }
-              _ {}
-            }
-            some(blk_arg)
-        } else { none };
-        print_expr_parens_if_not_bot(s, func);
-        if !has_block || vec::len(base_args) > 0u {
-            popen(s);
-            commasep_exprs(s, inconsistent, base_args);
-            pclose(s);
-        }
-        if has_block {
-            nbsp(s);
-            print_expr(s, option::get(blk));
-        }
-      }
-      ast::expr_bind(func, args) {
-        fn print_opt(s: ps, expr: option<@ast::expr>) {
-            alt expr {
-              some(expr) { print_expr(s, expr); }
-              _ { word(s.s, "_"); }
-            }
-        }
-
-        // "bind" keyword is only needed if there are no "_" arguments.
-        if !vec::any(args) {|arg| option::is_none(arg) } {
-            word_nbsp(s, "bind");
-        }
-
-        print_expr(s, func);
-        popen(s);
-        commasep(s, inconsistent, args, print_opt);
-        pclose(s);
-      }
-      ast::expr_binary(op, lhs, rhs) {
-        let prec = operator_prec(op);
-        print_op_maybe_parens(s, lhs, prec);
-        space(s.s);
-        word_space(s, ast_util::binop_to_str(op));
-        print_op_maybe_parens(s, rhs, prec + 1u);
-      }
-      ast::expr_unary(op, expr) {
-        word(s.s, ast_util::unop_to_str(op));
-        print_op_maybe_parens(s, expr, parse::prec::unop_prec);
-      }
-      ast::expr_addr_of(m, expr) {
-        word(s.s, "&");
-        print_mutability(s, m);
-        print_expr(s, expr);
-      }
-      ast::expr_lit(lit) { print_literal(s, lit); }
-      ast::expr_cast(expr, ty) {
-        print_op_maybe_parens(s, expr, parse::prec::as_prec);
-        space(s.s);
-        word_space(s, "as");
-        print_type_ex(s, ty, true);
-      }
-      ast::expr_if(test, blk, elseopt) {
-        print_if(s, test, blk, elseopt, false);
-      }
-      ast::expr_if_check(test, blk, elseopt) {
-        print_if(s, test, blk, elseopt, true);
-      }
-      ast::expr_while(test, blk) {
-        head(s, "while");
-        print_maybe_parens_discrim(s, test);
-        space(s.s);
-        print_block(s, blk);
-      }
-      ast::expr_loop(blk) {
-        head(s, "loop");
-        space(s.s);
-        print_block(s, blk);
-      }
-      ast::expr_alt(expr, arms, mode) {
-        cbox(s, alt_indent_unit);
-        ibox(s, 4u);
-        word_nbsp(s, "alt");
-        if mode == ast::alt_check { word_nbsp(s, "check"); }
-        print_maybe_parens_discrim(s, expr);
-        space(s.s);
-        bopen(s);
-        for arms.each {|arm|
-            space(s.s);
-            cbox(s, alt_indent_unit);
-            ibox(s, 0u);
-            let mut first = true;
-            for arm.pats.each {|p|
-                if first {
-                    first = false;
-                } else { space(s.s); word_space(s, "|"); }
-                print_pat(s, p);
-            }
-            space(s.s);
-            alt arm.guard {
-              some(e) { word_space(s, "if"); print_expr(s, e); space(s.s); }
-              none { }
-            }
-            print_possibly_embedded_block(s, arm.body, block_normal,
-                                          alt_indent_unit);
-        }
-        bclose_(s, expr.span, alt_indent_unit);
-      }
-      ast::expr_fn(proto, decl, body, cap_clause) {
-        // containing cbox, will be closed by print-block at }
-        cbox(s, indent_unit);
-        // head-box, will be closed by print-block at start
-        ibox(s, 0u);
-        print_purity(s, decl.purity);
-        word(s.s, proto_to_str(proto));
-        print_fn_args_and_ret(s, decl, *cap_clause);
-        space(s.s);
-        print_block(s, body);
-      }
-      ast::expr_fn_block(decl, body, cap_clause) {
-        // containing cbox, will be closed by print-block at }
-        cbox(s, indent_unit);
-        // head-box, will be closed by print-block at start
-        ibox(s, 0u);
-        word(s.s, "{");
-        print_fn_block_args(s, decl, *cap_clause);
-        print_possibly_embedded_block(s, body, block_block_fn, indent_unit);
-      }
-      ast::expr_loop_body(body) {
-        print_expr(s, body);
-      }
-      ast::expr_block(blk) {
-        // containing cbox, will be closed by print-block at }
-        cbox(s, indent_unit);
-        // head-box, will be closed by print-block after {
-        ibox(s, 0u);
-        print_block(s, blk);
-      }
-      ast::expr_copy(e) { word_space(s, "copy"); print_expr(s, e); }
-      ast::expr_move(lhs, rhs) {
-        print_expr(s, lhs);
-        space(s.s);
-        word_space(s, "<-");
-        print_expr(s, rhs);
-      }
-      ast::expr_assign(lhs, rhs) {
-        print_expr(s, lhs);
-        space(s.s);
-        word_space(s, "=");
-        print_expr(s, rhs);
-      }
-      ast::expr_swap(lhs, rhs) {
-        print_expr(s, lhs);
-        space(s.s);
-        word_space(s, "<->");
-        print_expr(s, rhs);
-      }
-      ast::expr_assign_op(op, lhs, rhs) {
-        print_expr(s, lhs);
-        space(s.s);
-        word(s.s, ast_util::binop_to_str(op));
-        word_space(s, "=");
-        print_expr(s, rhs);
-      }
-      ast::expr_field(expr, id, tys) {
-        // Deal with '10.x'
-        if ends_in_lit_int(expr) {
-            popen(s); print_expr(s, expr); pclose(s);
-        } else {
-            print_expr_parens_if_not_bot(s, expr);
-        }
-        word(s.s, ".");
-        word(s.s, id);
-        if vec::len(tys) > 0u {
-            word(s.s, "::<");
-            commasep(s, inconsistent, tys, print_type);
-            word(s.s, ">");
-        }
-      }
-      ast::expr_index(expr, index) {
-        print_expr_parens_if_not_bot(s, expr);
-        word(s.s, "[");
-        print_expr(s, index);
-        word(s.s, "]");
-      }
-      ast::expr_path(path) { print_path(s, path, true); }
-      ast::expr_fail(maybe_fail_val) {
-        word(s.s, "fail");
-        alt maybe_fail_val {
-          some(expr) { word(s.s, " "); print_expr(s, expr); }
-          _ { }
-        }
-      }
-      ast::expr_break { word(s.s, "break"); }
-      ast::expr_cont { word(s.s, "cont"); }
-      ast::expr_ret(result) {
-        word(s.s, "ret");
-        alt result {
-          some(expr) { word(s.s, " "); print_expr(s, expr); }
-          _ { }
-        }
-      }
-      ast::expr_log(lvl, lexp, expr) {
-        alt check lvl {
-          1 { word_nbsp(s, "log"); print_expr(s, expr); }
-          0 { word_nbsp(s, "log_err"); print_expr(s, expr); }
-          2 {
-            word_nbsp(s, "log");
-            popen(s);
-            print_expr(s, lexp);
-            word(s.s, ",");
-            space_if_not_bol(s);
-            print_expr(s, expr);
-            pclose(s);
-          }
-        }
-      }
-      ast::expr_check(m, expr) {
-        alt m {
-          ast::claimed_expr { word_nbsp(s, "claim"); }
-          ast::checked_expr { word_nbsp(s, "check"); }
-        }
-        popen(s);
-        print_expr(s, expr);
-        pclose(s);
-      }
-      ast::expr_assert(expr) {
-        word_nbsp(s, "assert");
-        print_expr(s, expr);
-      }
-      ast::expr_new(p, _, v) {
-        word_nbsp(s, "new");
-        popen(s);
-        print_expr(s, p);
-        pclose(s);
-        print_expr(s, v);
-      }
-      ast::expr_mac(m) { print_mac(s, m); }
-    }
-    s.ann.post(ann_node);
-    end(s);
-}
-
-fn print_expr_parens_if_not_bot(s: ps, ex: @ast::expr) {
-    let parens = alt ex.node {
-      ast::expr_fail(_) | ast::expr_ret(_) |
-      ast::expr_binary(_, _, _) | ast::expr_unary(_, _) |
-      ast::expr_move(_, _) | ast::expr_copy(_) |
-      ast::expr_assign(_, _) |
-      ast::expr_assign_op(_, _, _) | ast::expr_swap(_, _) |
-      ast::expr_log(_, _, _) | ast::expr_assert(_) |
-      ast::expr_call(_, _, true) |
-      ast::expr_check(_, _) { true }
-      _ { false }
-    };
-    if parens { popen(s); }
-    print_expr(s, ex);
-    if parens { pclose(s); }
-}
-
-fn print_local_decl(s: ps, loc: @ast::local) {
-    print_pat(s, loc.node.pat);
-    alt loc.node.ty.node {
-      ast::ty_infer { }
-      _ { word_space(s, ":"); print_type(s, loc.node.ty); }
-    }
-}
-
-fn print_decl(s: ps, decl: @ast::decl) {
-    maybe_print_comment(s, decl.span.lo);
-    alt decl.node {
-      ast::decl_local(locs) {
-        space_if_not_bol(s);
-        ibox(s, indent_unit);
-        word_nbsp(s, "let");
-
-        // if any are mut, all are mut
-        if vec::any(locs) {|l| l.node.is_mutbl } {
-            assert vec::all(locs) {|l| l.node.is_mutbl };
-            word_nbsp(s, "mut");
-        }
-
-        fn print_local(s: ps, &&loc: @ast::local) {
-            ibox(s, indent_unit);
-            print_local_decl(s, loc);
-            end(s);
-            alt loc.node.init {
-              some(init) {
-                nbsp(s);
-                alt init.op {
-                  ast::init_assign { word_space(s, "="); }
-                  ast::init_move { word_space(s, "<-"); }
-                }
-                print_expr(s, init.expr);
-              }
-              _ { }
-            }
-        }
-        commasep(s, consistent, locs, print_local);
-        end(s);
-      }
-      ast::decl_item(item) { print_item(s, item); }
-    }
-}
-
-fn print_ident(s: ps, ident: ast::ident) { word(s.s, ident); }
-
-fn print_for_decl(s: ps, loc: @ast::local, coll: @ast::expr) {
-    print_local_decl(s, loc);
-    space(s.s);
-    word_space(s, "in");
-    print_expr(s, coll);
-}
-
-fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) {
-    maybe_print_comment(s, path.span.lo);
-    if path.global { word(s.s, "::"); }
-    let mut first = true;
-    for path.idents.each {|id|
-        if first { first = false; } else { word(s.s, "::"); }
-        word(s.s, id);
-    }
-    if path.rp.is_some() || !path.types.is_empty() {
-        if colons_before_params { word(s.s, "::"); }
-
-        alt path.rp {
-          none { /* ok */ }
-          some(r) {
-            word(s.s, "/");
-            print_region(s, r);
-          }
-        }
-
-        if !path.types.is_empty() {
-            word(s.s, "<");
-            commasep(s, inconsistent, path.types, print_type);
-            word(s.s, ">");
-        }
-    }
-}
-
-fn print_pat(s: ps, &&pat: @ast::pat) {
-    maybe_print_comment(s, pat.span.lo);
-    let ann_node = node_pat(s, pat);
-    s.ann.pre(ann_node);
-    /* Pat isn't normalized, but the beauty of it
-     is that it doesn't matter */
-    alt pat.node {
-      ast::pat_wild { word(s.s, "_"); }
-      ast::pat_ident(path, sub) {
-        print_path(s, path, true);
-        alt sub {
-          some(p) { word(s.s, "@"); print_pat(s, p); }
-          none {}
-        }
-      }
-      ast::pat_enum(path, args_) {
-        print_path(s, path, true);
-        alt args_ {
-          none { word(s.s, "(*)"); }
-          some(args) {
-            if vec::len(args) > 0u {
-              popen(s);
-              commasep(s, inconsistent, args, print_pat);
-              pclose(s);
-            } else { }
-          }
-        }
-      }
-      ast::pat_rec(fields, etc) {
-        word(s.s, "{");
-        fn print_field(s: ps, f: ast::field_pat) {
-            cbox(s, indent_unit);
-            word(s.s, f.ident);
-            word_space(s, ":");
-            print_pat(s, f.pat);
-            end(s);
-        }
-        fn get_span(f: ast::field_pat) -> codemap::span { ret f.pat.span; }
-        commasep_cmnt(s, consistent, fields, print_field, get_span);
-        if etc {
-            if vec::len(fields) != 0u { word_space(s, ","); }
-            word(s.s, "_");
-        }
-        word(s.s, "}");
-      }
-      ast::pat_tup(elts) {
-        popen(s);
-        commasep(s, inconsistent, elts, print_pat);
-        pclose(s);
-      }
-      ast::pat_box(inner) { word(s.s, "@"); print_pat(s, inner); }
-      ast::pat_uniq(inner) { word(s.s, "~"); print_pat(s, inner); }
-      ast::pat_lit(e) { print_expr(s, e); }
-      ast::pat_range(begin, end) {
-        print_expr(s, begin);
-        space(s.s);
-        word_space(s, "to");
-        print_expr(s, end);
-      }
-    }
-    s.ann.post(ann_node);
-}
-
-fn print_fn(s: ps, decl: ast::fn_decl, name: ast::ident,
-            typarams: [ast::ty_param]) {
-    alt decl.purity {
-      ast::impure_fn { head(s, "fn") }
-      _ { head(s, purity_to_str(decl.purity) + " fn") }
-    }
-    word(s.s, name);
-    print_type_params(s, typarams);
-    print_fn_args_and_ret(s, decl, []);
-}
-
-fn print_fn_args(s: ps, decl: ast::fn_decl,
-                 cap_items: [ast::capture_item]) {
-    commasep(s, inconsistent, decl.inputs, print_arg);
-    if cap_items.is_not_empty() {
-        let mut first = decl.inputs.is_empty();
-        for cap_items.each { |cap_item|
-            if first { first = false; } else { word_space(s, ","); }
-            if cap_item.is_move { word_nbsp(s, "move") }
-            else { word_nbsp(s, "copy") }
-            word(s.s, cap_item.name);
-        }
-    }
-}
-
-fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl,
-                         cap_items: [ast::capture_item]) {
-    popen(s);
-    print_fn_args(s, decl, cap_items);
-    pclose(s);
-    word(s.s, constrs_str(decl.constraints, {|c|
-        ast_fn_constr_to_str(decl, c)
-    }));
-
-    maybe_print_comment(s, decl.output.span.lo);
-    if decl.output.node != ast::ty_nil {
-        space_if_not_bol(s);
-        word_space(s, "->");
-        print_type(s, decl.output);
-    }
-}
-
-fn print_fn_block_args(s: ps, decl: ast::fn_decl,
-                       cap_items: [ast::capture_item]) {
-    word(s.s, "|");
-    print_fn_args(s, decl, cap_items);
-    word(s.s, "|");
-    if decl.output.node != ast::ty_infer {
-        space_if_not_bol(s);
-        word_space(s, "->");
-        print_type(s, decl.output);
-    }
-    maybe_print_comment(s, decl.output.span.lo);
-}
-
-fn mode_to_str(m: ast::mode) -> str {
-    alt m {
-      ast::expl(ast::by_mutbl_ref) { "&" }
-      ast::expl(ast::by_move) { "-" }
-      ast::expl(ast::by_ref) { "&&" }
-      ast::expl(ast::by_val) { "++" }
-      ast::expl(ast::by_copy) { "+" }
-      ast::infer(_) { "" }
-    }
-}
-
-fn print_arg_mode(s: ps, m: ast::mode) {
-    let ms = mode_to_str(m);
-    if ms != "" { word(s.s, ms); }
-}
-
-fn print_bounds(s: ps, bounds: @[ast::ty_param_bound]) {
-    if vec::len(*bounds) > 0u {
-        word(s.s, ":");
-        for vec::each(*bounds) {|bound|
-            nbsp(s);
-            alt bound {
-              ast::bound_copy { word(s.s, "copy"); }
-              ast::bound_send { word(s.s, "send"); }
-              ast::bound_const { word(s.s, "const"); }
-              ast::bound_iface(t) { print_type(s, t); }
-            }
-        }
-    }
-}
-
-fn print_region_param(s: ps, rp: ast::region_param) {
-    alt rp {
-      ast::rp_self { word(s.s, "/&") }
-      ast::rp_none { }
-    }
-}
-
-fn print_type_params(s: ps, &&params: [ast::ty_param]) {
-    if vec::len(params) > 0u {
-        word(s.s, "<");
-        fn printParam(s: ps, param: ast::ty_param) {
-            word(s.s, param.ident);
-            print_bounds(s, param.bounds);
-        }
-        commasep(s, inconsistent, params, printParam);
-        word(s.s, ">");
-    }
-}
-
-fn print_meta_item(s: ps, &&item: @ast::meta_item) {
-    ibox(s, indent_unit);
-    alt item.node {
-      ast::meta_word(name) { word(s.s, name); }
-      ast::meta_name_value(name, value) {
-        word_space(s, name);
-        word_space(s, "=");
-        print_literal(s, @value);
-      }
-      ast::meta_list(name, items) {
-        word(s.s, name);
-        popen(s);
-        commasep(s, consistent, items, print_meta_item);
-        pclose(s);
-      }
-    }
-    end(s);
-}
-
-fn print_view_path(s: ps, &&vp: @ast::view_path) {
-    alt vp.node {
-      ast::view_path_simple(ident, path, _) {
-        if path.idents[vec::len(path.idents)-1u] != ident {
-            word_space(s, ident);
-            word_space(s, "=");
-        }
-        print_path(s, path, false);
-      }
-
-      ast::view_path_glob(path, _) {
-        print_path(s, path, false);
-        word(s.s, "::*");
-      }
-
-      ast::view_path_list(path, idents, _) {
-        print_path(s, path, false);
-        word(s.s, "::{");
-        commasep(s, inconsistent, idents) {|s, w|
-            word(s.s, w.node.name)
-        }
-        word(s.s, "}");
-      }
-    }
-}
-
-fn print_view_paths(s: ps, vps: [@ast::view_path]) {
-    commasep(s, inconsistent, vps, print_view_path);
-}
-
-fn print_view_item(s: ps, item: @ast::view_item) {
-    hardbreak_if_not_bol(s);
-    maybe_print_comment(s, item.span.lo);
-    alt item.node {
-      ast::view_item_use(id, mta, _) {
-        head(s, "use");
-        word(s.s, id);
-        if vec::len(mta) > 0u {
-            popen(s);
-            commasep(s, consistent, mta, print_meta_item);
-            pclose(s);
-        }
-      }
-
-      ast::view_item_import(vps) {
-        head(s, "import");
-        print_view_paths(s, vps);
-      }
-
-      ast::view_item_export(vps) {
-        head(s, "export");
-        print_view_paths(s, vps);
-      }
-    }
-    word(s.s, ";");
-    end(s); // end inner head-block
-    end(s); // end outer head-block
-}
-
-fn print_op_maybe_parens(s: ps, expr: @ast::expr, outer_prec: uint) {
-    let add_them = need_parens(expr, outer_prec);
-    if add_them { popen(s); }
-    print_expr(s, expr);
-    if add_them { pclose(s); }
-}
-
-fn print_mutability(s: ps, mutbl: ast::mutability) {
-    alt mutbl {
-      ast::m_mutbl { word_nbsp(s, "mut"); }
-      ast::m_const { word_nbsp(s, "const"); }
-      ast::m_imm {/* nothing */ }
-    }
-}
-
-fn print_mt(s: ps, mt: ast::mt) {
-    print_mutability(s, mt.mutbl);
-    print_type(s, mt.ty);
-}
-
-fn print_arg(s: ps, input: ast::arg) {
-    ibox(s, indent_unit);
-    print_arg_mode(s, input.mode);
-    alt input.ty.node {
-      ast::ty_infer {
-        word(s.s, input.ident);
-      }
-      _ {
-        if str::len(input.ident) > 0u {
-            word_space(s, input.ident + ":");
-        }
-        print_type(s, input.ty);
-      }
-    }
-    end(s);
-}
-
-fn print_ty_fn(s: ps, opt_proto: option<ast::proto>,
-               decl: ast::fn_decl, id: option<ast::ident>,
-               tps: option<[ast::ty_param]>) {
-    ibox(s, indent_unit);
-    word(s.s, opt_proto_to_str(opt_proto));
-    alt id { some(id) { word(s.s, " "); word(s.s, id); } _ { } }
-    alt tps { some(tps) { print_type_params(s, tps); } _ { } }
-    zerobreak(s.s);
-    popen(s);
-    commasep(s, inconsistent, decl.inputs, print_arg);
-    pclose(s);
-    maybe_print_comment(s, decl.output.span.lo);
-    if decl.output.node != ast::ty_nil {
-        space_if_not_bol(s);
-        ibox(s, indent_unit);
-        word_space(s, "->");
-        if decl.cf == ast::noreturn { word_nbsp(s, "!"); }
-        else { print_type(s, decl.output); }
-        end(s);
-    }
-    word(s.s, constrs_str(decl.constraints, ast_ty_fn_constr_to_str));
-    end(s);
-}
-
-fn maybe_print_trailing_comment(s: ps, span: codemap::span,
-                                next_pos: option<uint>) {
-    let mut cm;
-    alt s.cm { some(ccm) { cm = ccm; } _ { ret; } }
-    alt next_comment(s) {
-      some(cmnt) {
-        if cmnt.style != comments::trailing { ret; }
-        let span_line = codemap::lookup_char_pos(cm, span.hi);
-        let comment_line = codemap::lookup_char_pos(cm, cmnt.pos);
-        let mut next = cmnt.pos + 1u;
-        alt next_pos { none { } some(p) { next = p; } }
-        if span.hi < cmnt.pos && cmnt.pos < next &&
-               span_line.line == comment_line.line {
-            print_comment(s, cmnt);
-            s.cur_cmnt += 1u;
-        }
-      }
-      _ { }
-    }
-}
-
-fn print_remaining_comments(s: ps) {
-    // If there aren't any remaining comments, then we need to manually
-    // make sure there is a line break at the end.
-    if option::is_none(next_comment(s)) { hardbreak(s.s); }
-    loop {
-        alt next_comment(s) {
-          some(cmnt) { print_comment(s, cmnt); s.cur_cmnt += 1u; }
-          _ { break; }
-        }
-    }
-}
-
-fn print_literal(s: ps, &&lit: @ast::lit) {
-    maybe_print_comment(s, lit.span.lo);
-    alt next_lit(s, lit.span.lo) {
-      some(lt) {
-        word(s.s, lt.lit);
-        ret;
-      }
-      _ {}
-    }
-    alt lit.node {
-      ast::lit_str(st) { print_string(s, st); }
-      ast::lit_int(ch, ast::ty_char) {
-        word(s.s, "'" + escape_str(str::from_char(ch as char), '\'') + "'");
-      }
-      ast::lit_int(i, t) {
-        if i < 0_i64 {
-            word(s.s,
-                 "-" + u64::to_str(-i as u64, 10u)
-                 + ast_util::int_ty_to_str(t));
-        } else {
-            word(s.s,
-                 u64::to_str(i as u64, 10u)
-                 + ast_util::int_ty_to_str(t));
-        }
-      }
-      ast::lit_uint(u, t) {
-        word(s.s,
-             u64::to_str(u, 10u)
-             + ast_util::uint_ty_to_str(t));
-      }
-      ast::lit_float(f, t) {
-        word(s.s, f + ast_util::float_ty_to_str(t));
-      }
-      ast::lit_nil { word(s.s, "()"); }
-      ast::lit_bool(val) {
-        if val { word(s.s, "true"); } else { word(s.s, "false"); }
-      }
-    }
-}
-
-fn lit_to_str(l: @ast::lit) -> str { ret to_str(l, print_literal); }
-
-fn next_lit(s: ps, pos: uint) -> option<comments::lit> {
-    alt s.literals {
-      some(lits) {
-        while s.cur_lit < vec::len(lits) {
-            let lt = lits[s.cur_lit];
-            if lt.pos > pos { ret none; }
-            s.cur_lit += 1u;
-            if lt.pos == pos { ret some(lt); }
-        }
-        ret none;
-      }
-      _ { ret none; }
-    }
-}
-
-fn maybe_print_comment(s: ps, pos: uint) {
-    loop {
-        alt next_comment(s) {
-          some(cmnt) {
-            if cmnt.pos < pos {
-                print_comment(s, cmnt);
-                s.cur_cmnt += 1u;
-            } else { break; }
-          }
-          _ { break; }
-        }
-    }
-}
-
-fn print_comment(s: ps, cmnt: comments::cmnt) {
-    alt cmnt.style {
-      comments::mixed {
-        assert (vec::len(cmnt.lines) == 1u);
-        zerobreak(s.s);
-        word(s.s, cmnt.lines[0]);
-        zerobreak(s.s);
-      }
-      comments::isolated {
-        pprust::hardbreak_if_not_bol(s);
-        for cmnt.lines.each {|line|
-            // Don't print empty lines because they will end up as trailing
-            // whitespace
-            if str::is_not_empty(line) { word(s.s, line); }
-            hardbreak(s.s);
-        }
-      }
-      comments::trailing {
-        word(s.s, " ");
-        if vec::len(cmnt.lines) == 1u {
-            word(s.s, cmnt.lines[0]);
-            hardbreak(s.s);
-        } else {
-            ibox(s, 0u);
-            for cmnt.lines.each {|line|
-                if str::is_not_empty(line) { word(s.s, line); }
-                hardbreak(s.s);
-            }
-            end(s);
-        }
-      }
-      comments::blank_line {
-        // We need to do at least one, possibly two hardbreaks.
-        let is_semi =
-            alt s.s.last_token() {
-              pp::STRING(s, _) { s == ";" }
-              _ { false }
-            };
-        if is_semi || is_begin(s) || is_end(s) { hardbreak(s.s); }
-        hardbreak(s.s);
-      }
-    }
-}
-
-fn print_string(s: ps, st: str) {
-    word(s.s, "\"");
-    word(s.s, escape_str(st, '"'));
-    word(s.s, "\"");
-}
-
-fn escape_str(st: str, to_escape: char) -> str {
-    let mut out: str = "";
-    let len = str::len(st);
-    let mut i = 0u;
-    while i < len {
-        alt st[i] as char {
-          '\n' { out += "\\n"; }
-          '\t' { out += "\\t"; }
-          '\r' { out += "\\r"; }
-          '\\' { out += "\\\\"; }
-          cur {
-            if cur == to_escape { out += "\\"; }
-            // FIXME some (or all?) non-ascii things should be escaped
-            // (See #2306)
-            str::push_char(out, cur);
-          }
-        }
-        i += 1u;
-    }
-    ret out;
-}
-
-fn to_str<T>(t: T, f: fn@(ps, T)) -> str {
-    let buffer = io::mem_buffer();
-    let s = rust_printer(io::mem_buffer_writer(buffer));
-    f(s, t);
-    eof(s.s);
-    io::mem_buffer_str(buffer)
-}
-
-fn next_comment(s: ps) -> option<comments::cmnt> {
-    alt s.comments {
-      some(cmnts) {
-        if s.cur_cmnt < vec::len(cmnts) {
-            ret some(cmnts[s.cur_cmnt]);
-        } else { ret none::<comments::cmnt>; }
-      }
-      _ { ret none::<comments::cmnt>; }
-    }
-}
-
-fn constr_args_to_str<T>(f: fn@(T) -> str, args: [@ast::sp_constr_arg<T>]) ->
-   str {
-    let mut comma = false;
-    let mut s = "(";
-    for args.each {|a|
-        if comma { s += ", "; } else { comma = true; }
-        s += constr_arg_to_str::<T>(f, a.node);
-    }
-    s += ")";
-    ret s;
-}
-
-fn constr_arg_to_str<T>(f: fn@(T) -> str, c: ast::constr_arg_general_<T>) ->
-   str {
-    alt c {
-      ast::carg_base { ret "*"; }
-      ast::carg_ident(i) { ret f(i); }
-      ast::carg_lit(l) { ret lit_to_str(l); }
-    }
-}
-
-// needed b/c constr_args_to_str needs
-// something that takes an alias
-// (argh)
-fn uint_to_str(&&i: uint) -> str { ret uint::str(i); }
-
-fn ast_ty_fn_constr_to_str(&&c: @ast::constr) -> str {
-    ret path_to_str(c.node.path) +
-            constr_args_to_str(uint_to_str, c.node.args);
-}
-
-fn ast_fn_constr_to_str(decl: ast::fn_decl, &&c: @ast::constr) -> str {
-    let arg_to_str = bind fn_arg_idx_to_str(decl, _);
-    ret path_to_str(c.node.path) +
-            constr_args_to_str(arg_to_str, c.node.args);
-}
-
-fn ty_constr_to_str(&&c: @ast::ty_constr) -> str {
-    fn ty_constr_path_to_str(&&p: @ast::path) -> str { "*." + path_to_str(p) }
-
-    ret path_to_str(c.node.path) +
-            constr_args_to_str::<@ast::path>(ty_constr_path_to_str,
-                                             c.node.args);
-}
-
-fn constrs_str<T>(constrs: [T], elt: fn(T) -> str) -> str {
-    let mut s = "", colon = true;
-    for constrs.each {|c|
-        if colon { s += " : "; colon = false; } else { s += ", "; }
-        s += elt(c);
-    }
-    ret s;
-}
-
-fn fn_arg_idx_to_str(decl: ast::fn_decl, &&idx: uint) -> str {
-    decl.inputs[idx].ident
-}
-
-fn opt_proto_to_str(opt_p: option<ast::proto>) -> str {
-    alt opt_p {
-      none { "fn" }
-      some(p) { proto_to_str(p) }
-    }
-}
-
-fn purity_to_str(p: ast::purity) -> str {
-    alt p {
-      ast::impure_fn {"impure"}
-      ast::unsafe_fn {"unsafe"}
-      ast::pure_fn {"pure"}
-      ast::crust_fn {"crust"}
-    }
-}
-
-fn print_purity(s: ps, p: ast::purity) {
-    alt p {
-      ast::impure_fn {}
-      _ { word_nbsp(s, purity_to_str(p)) }
-    }
-}
-
-fn proto_to_str(p: ast::proto) -> str {
-    ret alt p {
-      ast::proto_bare { "native fn" }
-      ast::proto_any { "fn" }
-      ast::proto_block { "fn&" }
-      ast::proto_uniq { "fn~" }
-      ast::proto_box { "fn@" }
-    };
-}
-
-//
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End:
-//
diff --git a/src/librustsyntax/rustsyntax.rc b/src/librustsyntax/rustsyntax.rc
deleted file mode 100644
index eeb2d31d6a3..00000000000
--- a/src/librustsyntax/rustsyntax.rc
+++ /dev/null
@@ -1,70 +0,0 @@
-#[link(name = "rustsyntax",
-       vers = "0.2",
-       uuid = "c2b79dd0-a437-4ec7-95f9-858d77808c2f")];
-
-#[crate_type = "lib"];
-
-#[no_core];
-
-use core(vers = "0.2");
-use std(vers = "0.2");
-
-import core::*;
-
-mod attr;
-mod diagnostic;
-mod codemap;
-mod ast;
-mod ast_util;
-mod ast_map;
-mod visit;
-mod fold;
-mod util {
-    mod interner;
-}
-
-mod parse {
-    export parser;
-    export lexer;
-    export comments;
-    export prec;
-    export classify;
-    export attr;
-
-    mod eval;
-    mod lexer;
-    mod parser;
-    mod token;
-    mod comments;
-    mod attr;
-
-    #[doc = "Common routines shared by parser mods"]
-    mod common;
-
-    #[doc = "Functions dealing with operator precedence"]
-    mod prec;
-
-    #[doc = "Routines the parser uses to classify AST nodes"]
-    mod classify;
-}
-
-mod print {
-    mod pp;
-    mod pprust;
-}
-
-mod ext {
-    mod base;
-    mod expand;
-    mod qquote;
-    mod build;
-
-    mod fmt;
-    mod env;
-    mod simplext;
-    mod concat_idents;
-    mod ident_to_str;
-    mod log_syntax;
-    mod auto_serialize;
-    mod source_util;
-}
diff --git a/src/librustsyntax/util/interner.rs b/src/librustsyntax/util/interner.rs
deleted file mode 100644
index 89078bfaa36..00000000000
--- a/src/librustsyntax/util/interner.rs
+++ /dev/null
@@ -1,40 +0,0 @@
-// An "interner" is a data structure that associates values with uint tags and
-// allows bidirectional lookup; i.e. given a value, one can easily find the
-// type, and vice versa.
-import std::map;
-import std::map::{hashmap, hashfn, eqfn};
-import dvec::{dvec, extensions};
-
-type interner<T> =
-    {map: hashmap<T, uint>,
-     vect: dvec<T>,
-     hasher: hashfn<T>,
-     eqer: eqfn<T>};
-
-fn mk<T: copy>(hasher: hashfn<T>, eqer: eqfn<T>) -> interner<T> {
-    let m = map::hashmap::<T, uint>(hasher, eqer);
-    ret {map: m, vect: dvec(), hasher: hasher, eqer: eqer};
-}
-
-fn intern<T: copy>(itr: interner<T>, val: T) -> uint {
-    alt itr.map.find(val) {
-      some(idx) { ret idx; }
-      none {
-        let new_idx = itr.vect.len();
-        itr.map.insert(val, new_idx);
-        itr.vect.push(val);
-        ret new_idx;
-      }
-    }
-}
-
-// |get| isn't "pure" in the traditional sense, because it can go from
-// failing to returning a value as items are interned. But for typestate,
-// where we first check a pred and then rely on it, ceasing to fail is ok.
-pure fn get<T: copy>(itr: interner<T>, idx: uint) -> T {
-    unchecked {
-        itr.vect.get_elt(idx)
-    }
-}
-
-fn len<T>(itr: interner<T>) -> uint { ret itr.vect.len(); }
diff --git a/src/librustsyntax/visit.rs b/src/librustsyntax/visit.rs
deleted file mode 100644
index c5be778572b..00000000000
--- a/src/librustsyntax/visit.rs
+++ /dev/null
@@ -1,590 +0,0 @@
-
-import ast::*;
-import codemap::span;
-
-// Context-passing AST walker. Each overridden visit method has full control
-// over what happens with its node, it can do its own traversal of the node's
-// children (potentially passing in different contexts to each), call
-// visit::visit_* to apply the default traversal algorithm (again, it can
-// override the context), or prevent deeper traversal by doing nothing.
-
-// Our typesystem doesn't do circular types, so the visitor record can not
-// hold functions that take visitors. A vt enum is used to break the cycle.
-enum vt<E> { mk_vt(visitor<E>), }
-
-enum fn_kind {
-    fk_item_fn(ident, [ty_param]), //< an item declared with fn()
-    fk_method(ident, [ty_param], @method),
-    fk_res(ident, [ty_param], region_param),
-    fk_anon(proto, capture_clause),  //< an anonymous function like fn@(...)
-    fk_fn_block(capture_clause),     //< a block {||...}
-    fk_ctor(ident, [ty_param], node_id /* self id */,
-            def_id /* parent class id */), // class constructor
-    fk_dtor([ty_param], node_id /* self id */,
-            def_id /* parent class id */) // class destructor
-
-}
-
-fn name_of_fn(fk: fn_kind) -> ident {
-    alt fk {
-      fk_item_fn(name, _) | fk_method(name, _, _) | fk_res(name, _, _)
-          | fk_ctor(name, _, _, _) { name }
-      fk_anon(*) | fk_fn_block(*) { "anon" }
-      fk_dtor(*)                  { "drop" }
-    }
-}
-
-fn tps_of_fn(fk: fn_kind) -> [ty_param] {
-    alt fk {
-      fk_item_fn(_, tps) | fk_method(_, tps, _) | fk_res(_, tps, _)
-          | fk_ctor(_, tps, _, _) | fk_dtor(tps, _, _) { tps }
-      fk_anon(*) | fk_fn_block(*) { [] }
-    }
-}
-
-type visitor<E> =
-    // takes the components so that one function can be
-    // generic over constr and ty_constr
-    @{visit_mod: fn@(_mod, span, node_id, E, vt<E>),
-      visit_view_item: fn@(@view_item, E, vt<E>),
-      visit_native_item: fn@(@native_item, E, vt<E>),
-      visit_item: fn@(@item, E, vt<E>),
-      visit_local: fn@(@local, E, vt<E>),
-      visit_block: fn@(ast::blk, E, vt<E>),
-      visit_stmt: fn@(@stmt, E, vt<E>),
-      visit_arm: fn@(arm, E, vt<E>),
-      visit_pat: fn@(@pat, E, vt<E>),
-      visit_decl: fn@(@decl, E, vt<E>),
-      visit_expr: fn@(@expr, E, vt<E>),
-      visit_ty: fn@(@ty, E, vt<E>),
-      visit_ty_params: fn@([ty_param], E, vt<E>),
-      visit_constr: fn@(@path, span, node_id, E, vt<E>),
-      visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id, E, vt<E>),
-      visit_class_item: fn@(@class_member, E, vt<E>)};
-
-fn default_visitor<E>() -> visitor<E> {
-    ret @{visit_mod: bind visit_mod::<E>(_, _, _, _, _),
-          visit_view_item: bind visit_view_item::<E>(_, _, _),
-          visit_native_item: bind visit_native_item::<E>(_, _, _),
-          visit_item: bind visit_item::<E>(_, _, _),
-          visit_local: bind visit_local::<E>(_, _, _),
-          visit_block: bind visit_block::<E>(_, _, _),
-          visit_stmt: bind visit_stmt::<E>(_, _, _),
-          visit_arm: bind visit_arm::<E>(_, _, _),
-          visit_pat: bind visit_pat::<E>(_, _, _),
-          visit_decl: bind visit_decl::<E>(_, _, _),
-          visit_expr: bind visit_expr::<E>(_, _, _),
-          visit_ty: bind skip_ty::<E>(_, _, _),
-          visit_ty_params: bind visit_ty_params::<E>(_, _, _),
-          visit_constr: bind visit_constr::<E>(_, _, _, _, _),
-          visit_fn: bind visit_fn::<E>(_, _, _, _, _, _, _),
-          visit_class_item: bind visit_class_item::<E>(_,_,_)};
-}
-
-fn visit_crate<E>(c: crate, e: E, v: vt<E>) {
-    v.visit_mod(c.node.module, c.span, crate_node_id, e, v);
-}
-
-fn visit_crate_directive<E>(cd: @crate_directive, e: E, v: vt<E>) {
-    alt cd.node {
-      cdir_src_mod(_, _) { }
-      cdir_dir_mod(_, cdirs, _) {
-        for cdirs.each {|cdir|
-            visit_crate_directive(cdir, e, v);
-        }
-      }
-      cdir_view_item(vi) { v.visit_view_item(vi, e, v); }
-      cdir_syntax(_) { }
-    }
-}
-
-fn visit_mod<E>(m: _mod, _sp: span, _id: node_id, e: E, v: vt<E>) {
-    for m.view_items.each {|vi| v.visit_view_item(vi, e, v); }
-    for m.items.each {|i| v.visit_item(i, e, v); }
-}
-
-fn visit_view_item<E>(_vi: @view_item, _e: E, _v: vt<E>) { }
-
-fn visit_local<E>(loc: @local, e: E, v: vt<E>) {
-    v.visit_pat(loc.node.pat, e, v);
-    v.visit_ty(loc.node.ty, e, v);
-    alt loc.node.init { none { } some(i) { v.visit_expr(i.expr, e, v); } }
-}
-
-fn visit_item<E>(i: @item, e: E, v: vt<E>) {
-    alt i.node {
-      item_const(t, ex) { v.visit_ty(t, e, v); v.visit_expr(ex, e, v); }
-      item_fn(decl, tp, body) {
-        v.visit_fn(fk_item_fn(i.ident, tp), decl, body, i.span, i.id, e, v);
-      }
-      item_mod(m) { v.visit_mod(m, i.span, i.id, e, v); }
-      item_native_mod(nm) {
-        for nm.view_items.each {|vi| v.visit_view_item(vi, e, v); }
-        for nm.items.each {|ni| v.visit_native_item(ni, e, v); }
-      }
-      item_ty(t, tps, rp) {
-        v.visit_ty(t, e, v);
-        v.visit_ty_params(tps, e, v);
-      }
-      item_res(decl, tps, body, dtor_id, _, rp) {
-        v.visit_fn(fk_res(i.ident, tps, rp), decl, body, i.span,
-                   dtor_id, e, v);
-      }
-      item_enum(variants, tps, _) {
-        v.visit_ty_params(tps, e, v);
-        for variants.each {|vr|
-            for vr.node.args.each {|va| v.visit_ty(va.ty, e, v); }
-        }
-      }
-      item_impl(tps, _rp, ifce, ty, methods) {
-        v.visit_ty_params(tps, e, v);
-        option::iter(ifce, {|p| visit_path(p.path, e, v)});
-        v.visit_ty(ty, e, v);
-        for methods.each {|m|
-            visit_method_helper(m, e, v)
-        }
-      }
-      item_class(tps, ifaces, members, ctor, m_dtor, _) {
-          v.visit_ty_params(tps, e, v);
-          for members.each {|m|
-             v.visit_class_item(m, e, v);
-          }
-          for ifaces.each {|p| visit_path(p.path, e, v); }
-          visit_class_ctor_helper(ctor, i.ident, tps,
-                                  ast_util::local_def(i.id), e, v);
-          option::iter(m_dtor) {|dtor|
-                  visit_class_dtor_helper(dtor, tps,
-                     ast_util::local_def(i.id), e, v)};
-      }
-      item_iface(tps, _rp, methods) {
-        v.visit_ty_params(tps, e, v);
-        for methods.each {|m|
-            for m.decl.inputs.each {|a| v.visit_ty(a.ty, e, v); }
-            v.visit_ty_params(m.tps, e, v);
-            v.visit_ty(m.decl.output, e, v);
-        }
-      }
-    }
-}
-
-fn visit_class_item<E>(cm: @class_member, e:E, v:vt<E>) {
-    alt cm.node {
-        instance_var(_, t, _, _, _) {
-            v.visit_ty(t, e, v);
-        }
-        class_method(m) {
-            visit_method_helper(m, e, v);
-        }
-    }
-}
-
-fn skip_ty<E>(_t: @ty, _e: E, _v: vt<E>) {}
-
-fn visit_ty<E>(t: @ty, e: E, v: vt<E>) {
-    alt t.node {
-      ty_box(mt) | ty_uniq(mt) |
-      ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) {
-        v.visit_ty(mt.ty, e, v);
-      }
-      ty_rec(flds) {
-        for flds.each {|f| v.visit_ty(f.node.mt.ty, e, v); }
-      }
-      ty_tup(ts) { for ts.each {|tt| v.visit_ty(tt, e, v); } }
-      ty_fn(_, decl) {
-        for decl.inputs.each {|a| v.visit_ty(a.ty, e, v); }
-        for decl.constraints.each {|c|
-            v.visit_constr(c.node.path, c.span, c.node.id, e, v);
-        }
-        v.visit_ty(decl.output, e, v);
-      }
-      ty_path(p, _) { visit_path(p, e, v); }
-      ty_vstore(t, _) {
-        v.visit_ty(t, e, v);
-      }
-      ty_constr(t, cs) {
-        v.visit_ty(t, e, v);
-        for cs.each {|tc|
-            v.visit_constr(tc.node.path, tc.span, tc.node.id, e, v);
-        }
-      }
-      ty_nil |
-      ty_bot |
-      ty_mac(_) |
-      ty_infer {
-      }
-    }
-}
-
-fn visit_constr<E>(_operator: @path, _sp: span, _id: node_id, _e: E,
-                   _v: vt<E>) {
-    // default
-}
-
-fn visit_path<E>(p: @path, e: E, v: vt<E>) {
-    for p.types.each {|tp| v.visit_ty(tp, e, v); }
-}
-
-fn visit_pat<E>(p: @pat, e: E, v: vt<E>) {
-    alt p.node {
-      pat_enum(path, children) {
-        visit_path(path, e, v);
-        option::iter(children) {|children|
-                for children.each {|child| v.visit_pat(child, e, v); }}
-      }
-      pat_rec(fields, _) {
-        for fields.each {|f| v.visit_pat(f.pat, e, v); }
-      }
-      pat_tup(elts) { for elts.each {|elt| v.visit_pat(elt, e, v); } }
-      pat_box(inner) | pat_uniq(inner) {
-        v.visit_pat(inner, e, v);
-      }
-      pat_ident(path, inner) {
-          visit_path(path, e, v);
-          option::iter(inner) {|subpat| v.visit_pat(subpat, e, v)};
-      }
-      pat_lit(ex) { v.visit_expr(ex, e, v); }
-      pat_range(e1, e2) { v.visit_expr(e1, e, v); v.visit_expr(e2, e, v); }
-      pat_wild {}
-    }
-}
-
-fn visit_native_item<E>(ni: @native_item, e: E, v: vt<E>) {
-    alt ni.node {
-      native_item_fn(fd, tps) {
-        v.visit_ty_params(tps, e, v);
-        visit_fn_decl(fd, e, v);
-      }
-    }
-}
-
-fn visit_ty_params<E>(tps: [ty_param], e: E, v: vt<E>) {
-    for tps.each {|tp|
-        for vec::each(*tp.bounds) {|bound|
-            alt bound {
-              bound_iface(t) { v.visit_ty(t, e, v); }
-              bound_copy | bound_send | bound_const { }
-            }
-        }
-    }
-}
-
-fn visit_fn_decl<E>(fd: fn_decl, e: E, v: vt<E>) {
-    for fd.inputs.each {|a| v.visit_ty(a.ty, e, v); }
-    for fd.constraints.each {|c|
-        v.visit_constr(c.node.path, c.span, c.node.id, e, v);
-    }
-    v.visit_ty(fd.output, e, v);
-}
-
-// Note: there is no visit_method() method in the visitor, instead override
-// visit_fn() and check for fk_method().  I named this visit_method_helper()
-// because it is not a default impl of any method, though I doubt that really
-// clarifies anything. - Niko
-fn visit_method_helper<E>(m: @method, e: E, v: vt<E>) {
-    v.visit_fn(fk_method(m.ident, m.tps, m), m.decl, m.body, m.span,
-               m.id, e, v);
-}
-
-// Similar logic to the comment on visit_method_helper - Tim
-fn visit_class_ctor_helper<E>(ctor: class_ctor, nm: ident, tps: [ty_param],
-                              parent_id: def_id, e: E, v: vt<E>) {
-    v.visit_fn(fk_ctor(nm, tps, ctor.node.self_id,
-                              parent_id), ctor.node.dec,
-               ctor.node.body, ctor.span, ctor.node.id, e, v)
-
-}
-
-fn visit_class_dtor_helper<E>(dtor: class_dtor, tps: [ty_param],
-                              parent_id: def_id, e: E, v: vt<E>) {
-    v.visit_fn(fk_dtor(tps, dtor.node.self_id,
-                       parent_id), ast_util::dtor_dec(),
-               dtor.node.body, dtor.span, dtor.node.id, e, v)
-
-}
-
-fn visit_fn<E>(fk: fn_kind, decl: fn_decl, body: blk, _sp: span,
-               _id: node_id, e: E, v: vt<E>) {
-    visit_fn_decl(decl, e, v);
-    v.visit_ty_params(tps_of_fn(fk), e, v);
-    v.visit_block(body, e, v);
-}
-
-fn visit_block<E>(b: ast::blk, e: E, v: vt<E>) {
-    for b.node.view_items.each {|vi| v.visit_view_item(vi, e, v); }
-    for b.node.stmts.each {|s| v.visit_stmt(s, e, v); }
-    visit_expr_opt(b.node.expr, e, v);
-}
-
-fn visit_stmt<E>(s: @stmt, e: E, v: vt<E>) {
-    alt s.node {
-      stmt_decl(d, _) { v.visit_decl(d, e, v); }
-      stmt_expr(ex, _) { v.visit_expr(ex, e, v); }
-      stmt_semi(ex, _) { v.visit_expr(ex, e, v); }
-    }
-}
-
-fn visit_decl<E>(d: @decl, e: E, v: vt<E>) {
-    alt d.node {
-      decl_local(locs) {
-        for locs.each {|loc| v.visit_local(loc, e, v); }
-      }
-      decl_item(it) { v.visit_item(it, e, v); }
-    }
-}
-
-fn visit_expr_opt<E>(eo: option<@expr>, e: E, v: vt<E>) {
-    alt eo { none { } some(ex) { v.visit_expr(ex, e, v); } }
-}
-
-fn visit_exprs<E>(exprs: [@expr], e: E, v: vt<E>) {
-    for exprs.each {|ex| v.visit_expr(ex, e, v); }
-}
-
-fn visit_mac<E>(m: mac, e: E, v: vt<E>) {
-    alt m.node {
-      ast::mac_invoc(pth, arg, body) {
-        option::map(arg) {|arg| v.visit_expr(arg, e, v)}; }
-      ast::mac_embed_type(ty) { v.visit_ty(ty, e, v); }
-      ast::mac_embed_block(blk) { v.visit_block(blk, e, v); }
-      ast::mac_ellipsis { }
-      ast::mac_aq(_, e) { /* FIXME: maybe visit (Issue #2340) */ }
-      ast::mac_var(_) { }
-    }
-}
-
-fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
-    alt ex.node {
-      expr_new(pool, _, val) {
-        v.visit_expr(pool, e, v);
-        v.visit_expr(val, e, v);
-      }
-      expr_vstore(x, _) { v.visit_expr(x, e, v); }
-      expr_vec(es, _) { visit_exprs(es, e, v); }
-      expr_rec(flds, base) {
-        for flds.each {|f| v.visit_expr(f.node.expr, e, v); }
-        visit_expr_opt(base, e, v);
-      }
-      expr_tup(elts) { for elts.each {|el| v.visit_expr(el, e, v); } }
-      expr_call(callee, args, _) {
-        visit_exprs(args, e, v);
-        v.visit_expr(callee, e, v);
-      }
-      expr_bind(callee, args) {
-        v.visit_expr(callee, e, v);
-        for args.each {|eo| visit_expr_opt(eo, e, v); }
-      }
-      expr_binary(_, a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
-      expr_addr_of(_, x) | expr_unary(_, x) | expr_loop_body(x) |
-      expr_check(_, x) | expr_assert(x) {
-        v.visit_expr(x, e, v);
-      }
-      expr_lit(_) { }
-      expr_cast(x, t) { v.visit_expr(x, e, v); v.visit_ty(t, e, v); }
-      expr_if(x, b, eo) {
-        v.visit_expr(x, e, v);
-        v.visit_block(b, e, v);
-        visit_expr_opt(eo, e, v);
-      }
-      expr_if_check(x, b, eo) {
-        v.visit_expr(x, e, v);
-        v.visit_block(b, e, v);
-        visit_expr_opt(eo, e, v);
-      }
-      expr_while(x, b) { v.visit_expr(x, e, v); v.visit_block(b, e, v); }
-      expr_loop(b) { v.visit_block(b, e, v); }
-      expr_alt(x, arms, _) {
-        v.visit_expr(x, e, v);
-        for arms.each {|a| v.visit_arm(a, e, v); }
-      }
-      expr_fn(proto, decl, body, cap_clause) {
-        v.visit_fn(fk_anon(proto, cap_clause), decl, body,
-                   ex.span, ex.id, e, v);
-      }
-      expr_fn_block(decl, body, cap_clause) {
-        v.visit_fn(fk_fn_block(cap_clause), decl, body,
-                   ex.span, ex.id, e, v);
-      }
-      expr_block(b) { v.visit_block(b, e, v); }
-      expr_assign(a, b) { v.visit_expr(b, e, v); v.visit_expr(a, e, v); }
-      expr_copy(a) { v.visit_expr(a, e, v); }
-      expr_move(a, b) { v.visit_expr(b, e, v); v.visit_expr(a, e, v); }
-      expr_swap(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
-      expr_assign_op(_, a, b) {
-        v.visit_expr(b, e, v);
-        v.visit_expr(a, e, v);
-      }
-      expr_field(x, _, tys) {
-        v.visit_expr(x, e, v);
-        for tys.each {|tp| v.visit_ty(tp, e, v); }
-      }
-      expr_index(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
-      expr_path(p) { visit_path(p, e, v); }
-      expr_fail(eo) { visit_expr_opt(eo, e, v); }
-      expr_break { }
-      expr_cont { }
-      expr_ret(eo) { visit_expr_opt(eo, e, v); }
-      expr_log(_, lv, x) {
-        v.visit_expr(lv, e, v);
-        v.visit_expr(x, e, v);
-      }
-      expr_mac(mac) { visit_mac(mac, e, v); }
-    }
-}
-
-fn visit_arm<E>(a: arm, e: E, v: vt<E>) {
-    for a.pats.each {|p| v.visit_pat(p, e, v); }
-    visit_expr_opt(a.guard, e, v);
-    v.visit_block(a.body, e, v);
-}
-
-// Simpler, non-context passing interface. Always walks the whole tree, simply
-// calls the given functions on the nodes.
-
-type simple_visitor =
-    // takes the components so that one function can be
-    // generic over constr and ty_constr
-    @{visit_mod: fn@(_mod, span, node_id),
-      visit_view_item: fn@(@view_item),
-      visit_native_item: fn@(@native_item),
-      visit_item: fn@(@item),
-      visit_local: fn@(@local),
-      visit_block: fn@(ast::blk),
-      visit_stmt: fn@(@stmt),
-      visit_arm: fn@(arm),
-      visit_pat: fn@(@pat),
-      visit_decl: fn@(@decl),
-      visit_expr: fn@(@expr),
-      visit_ty: fn@(@ty),
-      visit_ty_params: fn@([ty_param]),
-      visit_constr: fn@(@path, span, node_id),
-      visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id),
-      visit_class_item: fn@(@class_member)};
-
-fn simple_ignore_ty(_t: @ty) {}
-
-fn default_simple_visitor() -> simple_visitor {
-    ret @{visit_mod: fn@(_m: _mod, _sp: span, _id: node_id) { },
-          visit_view_item: fn@(_vi: @view_item) { },
-          visit_native_item: fn@(_ni: @native_item) { },
-          visit_item: fn@(_i: @item) { },
-          visit_local: fn@(_l: @local) { },
-          visit_block: fn@(_b: ast::blk) { },
-          visit_stmt: fn@(_s: @stmt) { },
-          visit_arm: fn@(_a: arm) { },
-          visit_pat: fn@(_p: @pat) { },
-          visit_decl: fn@(_d: @decl) { },
-          visit_expr: fn@(_e: @expr) { },
-          visit_ty: simple_ignore_ty,
-          visit_ty_params: fn@(_ps: [ty_param]) {},
-          visit_constr: fn@(_p: @path, _sp: span, _id: node_id) { },
-          visit_fn: fn@(_fk: fn_kind, _d: fn_decl, _b: blk, _sp: span,
-                        _id: node_id) { },
-          visit_class_item: fn@(_c: @class_member) {}
-         };
-}
-
-fn mk_simple_visitor(v: simple_visitor) -> vt<()> {
-    fn v_mod(f: fn@(_mod, span, node_id), m: _mod, sp: span, id: node_id,
-             &&e: (), v: vt<()>) {
-        f(m, sp, id);
-        visit_mod(m, sp, id, e, v);
-    }
-    fn v_view_item(f: fn@(@view_item), vi: @view_item, &&e: (), v: vt<()>) {
-        f(vi);
-        visit_view_item(vi, e, v);
-    }
-    fn v_native_item(f: fn@(@native_item), ni: @native_item, &&e: (),
-                     v: vt<()>) {
-        f(ni);
-        visit_native_item(ni, e, v);
-    }
-    fn v_item(f: fn@(@item), i: @item, &&e: (), v: vt<()>) {
-        f(i);
-        visit_item(i, e, v);
-    }
-    fn v_local(f: fn@(@local), l: @local, &&e: (), v: vt<()>) {
-        f(l);
-        visit_local(l, e, v);
-    }
-    fn v_block(f: fn@(ast::blk), bl: ast::blk, &&e: (), v: vt<()>) {
-        f(bl);
-        visit_block(bl, e, v);
-    }
-    fn v_stmt(f: fn@(@stmt), st: @stmt, &&e: (), v: vt<()>) {
-        f(st);
-        visit_stmt(st, e, v);
-    }
-    fn v_arm(f: fn@(arm), a: arm, &&e: (), v: vt<()>) {
-        f(a);
-        visit_arm(a, e, v);
-    }
-    fn v_pat(f: fn@(@pat), p: @pat, &&e: (), v: vt<()>) {
-        f(p);
-        visit_pat(p, e, v);
-    }
-    fn v_decl(f: fn@(@decl), d: @decl, &&e: (), v: vt<()>) {
-        f(d);
-        visit_decl(d, e, v);
-    }
-    fn v_expr(f: fn@(@expr), ex: @expr, &&e: (), v: vt<()>) {
-        f(ex);
-        visit_expr(ex, e, v);
-    }
-    fn v_ty(f: fn@(@ty), ty: @ty, &&e: (), v: vt<()>) {
-        f(ty);
-        visit_ty(ty, e, v);
-    }
-    fn v_ty_params(f: fn@([ty_param]), ps: [ty_param], &&e: (), v: vt<()>) {
-        f(ps);
-        visit_ty_params(ps, e, v);
-    }
-    fn v_constr(f: fn@(@path, span, node_id), pt: @path, sp: span,
-                id: node_id, &&e: (), v: vt<()>) {
-        f(pt, sp, id);
-        visit_constr(pt, sp, id, e, v);
-    }
-    fn v_fn(f: fn@(fn_kind, fn_decl, blk, span, node_id),
-            fk: fn_kind, decl: fn_decl, body: blk, sp: span,
-            id: node_id, &&e: (), v: vt<()>) {
-        f(fk, decl, body, sp, id);
-        visit_fn(fk, decl, body, sp, id, e, v);
-    }
-    let visit_ty = if v.visit_ty == simple_ignore_ty {
-        bind skip_ty(_, _, _)
-    } else {
-        bind v_ty(v.visit_ty, _, _, _)
-    };
-    fn v_class_item(f: fn@(@class_member),
-                    cm: @class_member, &&e: (),
-                    v: vt<()>) {
-        f(cm);
-        visit_class_item(cm, e, v);
-    }
-    ret mk_vt(@{visit_mod: bind v_mod(v.visit_mod, _, _, _, _, _),
-                visit_view_item: bind v_view_item(v.visit_view_item, _, _, _),
-                visit_native_item:
-                    bind v_native_item(v.visit_native_item, _, _, _),
-                visit_item: bind v_item(v.visit_item, _, _, _),
-                visit_local: bind v_local(v.visit_local, _, _, _),
-                visit_block: bind v_block(v.visit_block, _, _, _),
-                visit_stmt: bind v_stmt(v.visit_stmt, _, _, _),
-                visit_arm: bind v_arm(v.visit_arm, _, _, _),
-                visit_pat: bind v_pat(v.visit_pat, _, _, _),
-                visit_decl: bind v_decl(v.visit_decl, _, _, _),
-                visit_expr: bind v_expr(v.visit_expr, _, _, _),
-                visit_ty: visit_ty,
-                visit_ty_params: bind v_ty_params(v.visit_ty_params, _, _, _),
-                visit_constr: bind v_constr(v.visit_constr, _, _, _, _, _),
-                visit_fn: bind v_fn(v.visit_fn, _, _, _, _, _, _, _),
-                visit_class_item: bind v_class_item(v.visit_class_item, _, _,
-                                                    _)
-               });
-}
-
-// Local Variables:
-// mode: rust
-// fill-column: 78;
-// indent-tabs-mode: nil
-// c-basic-offset: 4
-// buffer-file-coding-system: utf-8-unix
-// End: