diff options
| author | bors <bors@rust-lang.org> | 2014-03-02 08:31:33 -0800 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2014-03-02 08:31:33 -0800 |
| commit | 910012aabae3dfd4b7190f46e88cde75804b5cb0 (patch) | |
| tree | ac07696b5bb7a8ba6dacd1b2abd3926b59621058 /src/libsyntax | |
| parent | baf79083aedb8ae64efddbcf28b358841cfd1157 (diff) | |
| parent | 355932407ba324d33cd9353a69203f7f76c059a6 (diff) | |
| download | rust-910012aabae3dfd4b7190f46e88cde75804b5cb0.tar.gz rust-910012aabae3dfd4b7190f46e88cde75804b5cb0.zip | |
auto merge of #12637 : pcwalton/rust/devecing, r=alexcrichton
r? @alexcrichton
Diffstat (limited to 'src/libsyntax')
54 files changed, 1449 insertions, 1250 deletions
diff --git a/src/libsyntax/abi.rs b/src/libsyntax/abi.rs index 861cd8ae7d3..a06415bc083 100644 --- a/src/libsyntax/abi.rs +++ b/src/libsyntax/abi.rs @@ -9,6 +9,7 @@ // except according to those terms. use std::fmt; +use std::vec_ng::Vec; use std::fmt::Show; #[deriving(Eq)] @@ -117,8 +118,8 @@ pub fn lookup(name: &str) -> Option<Abi> { res } -pub fn all_names() -> ~[&'static str] { - AbiDatas.map(|d| d.name) +pub fn all_names() -> Vec<&'static str> { + AbiDatas.iter().map(|d| d.name).collect() } impl Abi { @@ -232,7 +233,7 @@ impl AbiSet { } pub fn check_valid(&self) -> Option<(Abi, Abi)> { - let mut abis = ~[]; + let mut abis = Vec::new(); self.each(|abi| { abis.push(abi); true }); for (i, abi) in abis.iter().enumerate() { diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index f6dca713e71..947463d8f47 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -23,6 +23,7 @@ use std::cell::RefCell; use collections::HashMap; use std::option::Option; use std::rc::Rc; +use std::vec_ng::Vec; use serialize::{Encodable, Decodable, Encoder, Decoder}; /// A pointer abstraction. FIXME(eddyb) #10676 use Rc<T> in the future. @@ -98,7 +99,7 @@ pub type SyntaxContext = u32; // it should cut down on memory use *a lot*; applying a mark // to a tree containing 50 identifiers would otherwise generate pub struct SCTable { - table: RefCell<~[SyntaxContext_]>, + table: RefCell<Vec<SyntaxContext_> >, mark_memo: RefCell<HashMap<(SyntaxContext,Mrk),SyntaxContext>>, rename_memo: RefCell<HashMap<(SyntaxContext,Ident,Name),SyntaxContext>>, } @@ -164,7 +165,7 @@ pub struct Path { /// module (like paths in an import). global: bool, /// The segments in the path: the things separated by `::`. - segments: ~[PathSegment], + segments: Vec<PathSegment> , } /// A segment of a path: an identifier, an optional lifetime, and a set of @@ -288,12 +289,12 @@ pub enum DefRegion { // The set of MetaItems that define the compilation environment of the crate, // used to drive conditional compilation -pub type CrateConfig = ~[@MetaItem]; +pub type CrateConfig = Vec<@MetaItem> ; #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Crate { module: Mod, - attrs: ~[Attribute], + attrs: Vec<Attribute> , config: CrateConfig, span: Span, } @@ -303,7 +304,7 @@ pub type MetaItem = Spanned<MetaItem_>; #[deriving(Clone, Encodable, Decodable, Hash)] pub enum MetaItem_ { MetaWord(InternedString), - MetaList(InternedString, ~[@MetaItem]), + MetaList(InternedString, Vec<@MetaItem> ), MetaNameValue(InternedString, Lit), } @@ -334,8 +335,8 @@ impl Eq for MetaItem_ { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Block { - view_items: ~[ViewItem], - stmts: ~[@Stmt], + view_items: Vec<ViewItem> , + stmts: Vec<@Stmt> , expr: Option<@Expr>, id: NodeId, rules: BlockCheckMode, @@ -373,17 +374,17 @@ pub enum Pat_ { // records this pattern's NodeId in an auxiliary // set (of "pat_idents that refer to nullary enums") PatIdent(BindingMode, Path, Option<@Pat>), - PatEnum(Path, Option<~[@Pat]>), /* "none" means a * pattern where + PatEnum(Path, Option<Vec<@Pat> >), /* "none" means a * pattern where * we don't bind the fields to names */ - PatStruct(Path, ~[FieldPat], bool), - PatTup(~[@Pat]), + PatStruct(Path, Vec<FieldPat> , bool), + PatTup(Vec<@Pat> ), PatUniq(@Pat), PatRegion(@Pat), // reference pattern PatLit(@Expr), PatRange(@Expr, @Expr), // [a, b, ..i, y, z] is represented as // PatVec(~[a, b], Some(i), ~[y, z]) - PatVec(~[@Pat], Option<@Pat>, ~[@Pat]) + PatVec(Vec<@Pat> , Option<@Pat>, Vec<@Pat> ) } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] @@ -488,7 +489,7 @@ pub enum Decl_ { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Arm { - pats: ~[@Pat], + pats: Vec<@Pat> , guard: Option<@Expr>, body: P<Block>, } @@ -526,10 +527,10 @@ pub enum Expr_ { ExprVstore(@Expr, ExprVstore), // First expr is the place; second expr is the value. ExprBox(@Expr, @Expr), - ExprVec(~[@Expr], Mutability), - ExprCall(@Expr, ~[@Expr]), - ExprMethodCall(Ident, ~[P<Ty>], ~[@Expr]), - ExprTup(~[@Expr]), + ExprVec(Vec<@Expr> , Mutability), + ExprCall(@Expr, Vec<@Expr> ), + ExprMethodCall(Ident, Vec<P<Ty>> , Vec<@Expr> ), + ExprTup(Vec<@Expr> ), ExprBinary(BinOp, @Expr, @Expr), ExprUnary(UnOp, @Expr), ExprLit(@Lit), @@ -541,14 +542,14 @@ pub enum Expr_ { // Conditionless loop (can be exited with break, cont, or ret) // FIXME #6993: change to Option<Name> ExprLoop(P<Block>, Option<Ident>), - ExprMatch(@Expr, ~[Arm]), + ExprMatch(@Expr, Vec<Arm> ), ExprFnBlock(P<FnDecl>, P<Block>), ExprProc(P<FnDecl>, P<Block>), ExprBlock(P<Block>), ExprAssign(@Expr, @Expr), ExprAssignOp(BinOp, @Expr, @Expr), - ExprField(@Expr, Ident, ~[P<Ty>]), + ExprField(@Expr, Ident, Vec<P<Ty>> ), ExprIndex(@Expr, @Expr), /// Expression that looks like a "name". For example, @@ -569,7 +570,7 @@ pub enum Expr_ { ExprMac(Mac), // A struct literal expression. - ExprStruct(Path, ~[Field], Option<@Expr> /* base */), + ExprStruct(Path, Vec<Field> , Option<@Expr> /* base */), // A vector literal constructed from one repeated element. ExprRepeat(@Expr /* element */, @Expr /* count */, Mutability), @@ -600,14 +601,14 @@ pub enum TokenTree { TTTok(Span, ::parse::token::Token), // a delimited sequence (the delimiters appear as the first // and last elements of the vector) - TTDelim(@~[TokenTree]), + TTDelim(@Vec<TokenTree> ), // These only make sense for right-hand-sides of MBE macros: // a kleene-style repetition sequence with a span, a TTForest, // an optional separator, and a boolean where true indicates // zero or more (..), and false indicates one or more (+). - TTSeq(Span, @~[TokenTree], Option<::parse::token::Token>, bool), + TTSeq(Span, @Vec<TokenTree> , Option<::parse::token::Token>, bool), // a syntactic variable that will be filled in by macro expansion. TTNonterminal(Span, Ident) @@ -673,7 +674,7 @@ pub enum Matcher_ { MatchTok(::parse::token::Token), // match repetitions of a sequence: body, separator, zero ok?, // lo, hi position-in-match-array used: - MatchSeq(~[Matcher], Option<::parse::token::Token>, bool, uint, uint), + MatchSeq(Vec<Matcher> , Option<::parse::token::Token>, bool, uint, uint), // parse a Rust NT: name to bind, name of NT, position in match array: MatchNonterminal(Ident, Ident, uint) } @@ -686,7 +687,7 @@ pub type Mac = Spanned<Mac_>; // There's only one flavor, now, so this could presumably be simplified. #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub enum Mac_ { - MacInvocTT(Path, ~[TokenTree], SyntaxContext), // new macro-invocation + MacInvocTT(Path, Vec<TokenTree> , SyntaxContext), // new macro-invocation } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] @@ -700,7 +701,7 @@ pub type Lit = Spanned<Lit_>; #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub enum Lit_ { LitStr(InternedString, StrStyle), - LitBinary(Rc<~[u8]>), + LitBinary(Rc<Vec<u8> >), LitChar(u32), LitInt(i64, IntTy), LitUint(u64, UintTy), @@ -729,7 +730,7 @@ pub struct TypeField { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct TypeMethod { ident: Ident, - attrs: ~[Attribute], + attrs: Vec<Attribute> , purity: Purity, decl: P<FnDecl>, generics: Generics, @@ -858,7 +859,7 @@ pub enum Ty_ { TyRptr(Option<Lifetime>, MutTy), TyClosure(@ClosureTy), TyBareFn(@BareFnTy), - TyTup(~[P<Ty>]), + TyTup(Vec<P<Ty>> ), TyPath(Path, Option<OptVec<TyParamBound>>, NodeId), // for #7264; see above TyTypeof(@Expr), // TyInfer means the type should be inferred instead of it having been @@ -878,8 +879,8 @@ pub struct InlineAsm { asm: InternedString, asm_str_style: StrStyle, clobbers: InternedString, - inputs: ~[(InternedString, @Expr)], - outputs: ~[(InternedString, @Expr)], + inputs: Vec<(InternedString, @Expr)> , + outputs: Vec<(InternedString, @Expr)> , volatile: bool, alignstack: bool, dialect: AsmDialect @@ -914,7 +915,7 @@ impl Arg { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct FnDecl { - inputs: ~[Arg], + inputs: Vec<Arg> , output: P<Ty>, cf: RetStyle, variadic: bool @@ -957,7 +958,7 @@ pub type ExplicitSelf = Spanned<ExplicitSelf_>; #[deriving(Eq, Encodable, Decodable, Hash)] pub struct Method { ident: Ident, - attrs: ~[Attribute], + attrs: Vec<Attribute> , generics: Generics, explicit_self: ExplicitSelf, purity: Purity, @@ -970,15 +971,15 @@ pub struct Method { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Mod { - view_items: ~[ViewItem], - items: ~[@Item], + view_items: Vec<ViewItem> , + items: Vec<@Item> , } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct ForeignMod { abis: AbiSet, - view_items: ~[ViewItem], - items: ~[@ForeignItem], + view_items: Vec<ViewItem> , + items: Vec<@ForeignItem> , } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] @@ -989,19 +990,19 @@ pub struct VariantArg { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub enum VariantKind { - TupleVariantKind(~[VariantArg]), + TupleVariantKind(Vec<VariantArg> ), StructVariantKind(@StructDef), } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct EnumDef { - variants: ~[P<Variant>], + variants: Vec<P<Variant>> , } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Variant_ { name: Ident, - attrs: ~[Attribute], + attrs: Vec<Attribute> , kind: VariantKind, id: NodeId, disr_expr: Option<@Expr>, @@ -1034,13 +1035,13 @@ pub enum ViewPath_ { ViewPathGlob(Path, NodeId), // foo::bar::{a,b,c} - ViewPathList(Path, ~[PathListIdent], NodeId) + ViewPathList(Path, Vec<PathListIdent> , NodeId) } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct ViewItem { node: ViewItem_, - attrs: ~[Attribute], + attrs: Vec<Attribute> , vis: Visibility, span: Span, } @@ -1052,7 +1053,7 @@ pub enum ViewItem_ { // (containing arbitrary characters) from which to fetch the crate sources // For example, extern crate whatever = "github.com/mozilla/rust" ViewItemExternMod(Ident, Option<(InternedString,StrStyle)>, NodeId), - ViewItemUse(~[@ViewPath]), + ViewItemUse(Vec<@ViewPath> ), } // Meta-data associated with an item @@ -1109,7 +1110,7 @@ pub struct StructField_ { kind: StructFieldKind, id: NodeId, ty: P<Ty>, - attrs: ~[Attribute], + attrs: Vec<Attribute> , } pub type StructField = Spanned<StructField_>; @@ -1122,7 +1123,7 @@ pub enum StructFieldKind { #[deriving(Eq, Encodable, Decodable, Hash)] pub struct StructDef { - fields: ~[StructField], /* fields, not including ctor */ + fields: Vec<StructField> , /* fields, not including ctor */ /* ID of the constructor. This is only used for tuple- or enum-like * structs. */ ctor_id: Option<NodeId> @@ -1135,7 +1136,7 @@ pub struct StructDef { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Item { ident: Ident, - attrs: ~[Attribute], + attrs: Vec<Attribute> , id: NodeId, node: Item_, vis: Visibility, @@ -1151,11 +1152,11 @@ pub enum Item_ { ItemTy(P<Ty>, Generics), ItemEnum(EnumDef, Generics), ItemStruct(@StructDef, Generics), - ItemTrait(Generics, ~[TraitRef], ~[TraitMethod]), + ItemTrait(Generics, Vec<TraitRef> , Vec<TraitMethod> ), ItemImpl(Generics, Option<TraitRef>, // (optional) trait this impl implements P<Ty>, // self - ~[@Method]), + Vec<@Method> ), // a macro invocation (which includes macro definition) ItemMac(Mac), } @@ -1163,7 +1164,7 @@ pub enum Item_ { #[deriving(Eq, Encodable, Decodable, Hash)] pub struct ForeignItem { ident: Ident, - attrs: ~[Attribute], + attrs: Vec<Attribute> , node: ForeignItem_, id: NodeId, span: Span, @@ -1193,6 +1194,8 @@ mod test { use codemap::*; use super::*; + use std::vec_ng::Vec; + fn is_freeze<T: Freeze>() {} // Assert that the AST remains Freeze (#10693). @@ -1205,9 +1208,9 @@ mod test { #[test] fn check_asts_encodable() { let e = Crate { - module: Mod {view_items: ~[], items: ~[]}, - attrs: ~[], - config: ~[], + module: Mod {view_items: Vec::new(), items: Vec::new()}, + attrs: Vec::new(), + config: Vec::new(), span: Span { lo: BytePos(10), hi: BytePos(20), diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index 31c258b36c0..56a99736866 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -23,6 +23,7 @@ use std::cell::RefCell; use std::iter; use std::vec; use std::fmt; +use std::vec_ng::Vec; #[deriving(Clone, Eq)] pub enum PathElem { @@ -134,7 +135,7 @@ enum MapEntry { } struct InlinedParent { - path: ~[PathElem], + path: Vec<PathElem> , // Required by NodeTraitMethod and NodeMethod. def_id: DefId } @@ -185,13 +186,17 @@ pub struct Map { /// /// Also, indexing is pretty quick when you've got a vector and /// plain old integers. - priv map: RefCell<~[MapEntry]> + priv map: RefCell<Vec<MapEntry> > } impl Map { fn find_entry(&self, id: NodeId) -> Option<MapEntry> { let map = self.map.borrow(); - map.get().get(id as uint).map(|x| *x) + if map.get().len() > id as uint { + Some(*map.get().get(id as uint)) + } else { + None + } } /// Retrieve the Node corresponding to `id`, failing if it cannot @@ -522,7 +527,7 @@ impl<'a, F: FoldOps> Folder for Ctx<'a, F> { } pub fn map_crate<F: FoldOps>(krate: Crate, fold_ops: F) -> (Crate, Map) { - let map = Map { map: RefCell::new(~[]) }; + let map = Map { map: RefCell::new(Vec::new()) }; let krate = { let mut cx = Ctx { map: &map, @@ -557,7 +562,7 @@ pub fn map_crate<F: FoldOps>(krate: Crate, fold_ops: F) -> (Crate, Map) { // crate. The `path` should be the path to the item but should not include // the item itself. pub fn map_decoded_item<F: FoldOps>(map: &Map, - path: ~[PathElem], + path: Vec<PathElem> , fold_ops: F, fold: |&mut Ctx<F>| -> InlinedItem) -> InlinedItem { diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 4cf4aefa0e2..db9ea480e96 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -23,6 +23,7 @@ use std::cmp; use collections::HashMap; use std::u32; use std::local_data; +use std::vec_ng::Vec; pub fn path_name_i(idents: &[Ident]) -> ~str { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") @@ -180,8 +181,8 @@ pub fn is_call_expr(e: @Expr) -> bool { pub fn block_from_expr(e: @Expr) -> P<Block> { P(Block { - view_items: ~[], - stmts: ~[], + view_items: Vec::new(), + stmts: Vec::new(), expr: Some(e), id: e.id, rules: DefaultBlock, @@ -193,13 +194,13 @@ pub fn ident_to_path(s: Span, identifier: Ident) -> Path { ast::Path { span: s, global: false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: identifier, lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), } } @@ -216,7 +217,7 @@ pub fn is_unguarded(a: &Arm) -> bool { } } -pub fn unguarded_pat(a: &Arm) -> Option<~[@Pat]> { +pub fn unguarded_pat(a: &Arm) -> Option<Vec<@Pat> > { if is_unguarded(a) { Some(/* FIXME (#2543) */ a.pats.clone()) } else { @@ -241,7 +242,7 @@ pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> Ident { token::gensym_ident(pretty) } -pub fn public_methods(ms: ~[@Method]) -> ~[@Method] { +pub fn public_methods(ms: Vec<@Method> ) -> Vec<@Method> { ms.move_iter().filter(|m| { match m.vis { Public => true, @@ -271,9 +272,9 @@ pub fn trait_method_to_ty_method(method: &TraitMethod) -> TypeMethod { } pub fn split_trait_methods(trait_methods: &[TraitMethod]) - -> (~[TypeMethod], ~[@Method]) { - let mut reqd = ~[]; - let mut provd = ~[]; + -> (Vec<TypeMethod> , Vec<@Method> ) { + let mut reqd = Vec::new(); + let mut provd = Vec::new(); for trt_method in trait_methods.iter() { match *trt_method { Required(ref tm) => reqd.push((*tm).clone()), @@ -724,7 +725,7 @@ pub fn new_rename_internal(id: Ident, // FIXME #8215 : currently pub to allow testing pub fn new_sctable_internal() -> SCTable { SCTable { - table: RefCell::new(~[EmptyCtxt,IllegalCtxt]), + table: RefCell::new(vec!(EmptyCtxt,IllegalCtxt)), mark_memo: RefCell::new(HashMap::new()), rename_memo: RefCell::new(HashMap::new()), } @@ -754,7 +755,7 @@ pub fn display_sctable(table : &SCTable) { /// Add a value to the end of a vec, return its index -fn idx_push<T>(vec: &mut ~[T], val: T) -> u32 { +fn idx_push<T>(vec: &mut Vec<T> , val: T) -> u32 { vec.push(val); (vec.len() - 1) as u32 } @@ -795,7 +796,7 @@ pub fn resolve_internal(id : Ident, let resolved = { let result = { let table = table.table.borrow(); - table.get()[id.ctxt] + *table.get().get(id.ctxt as uint) }; match result { EmptyCtxt => id.name, @@ -831,20 +832,20 @@ pub fn resolve_internal(id : Ident, } /// Compute the marks associated with a syntax context. -pub fn mtwt_marksof(ctxt: SyntaxContext, stopname: Name) -> ~[Mrk] { +pub fn mtwt_marksof(ctxt: SyntaxContext, stopname: Name) -> Vec<Mrk> { marksof(ctxt, stopname, get_sctable()) } // the internal function for computing marks // it's not clear to me whether it's better to use a [] mutable // vector or a cons-list for this. -pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> ~[Mrk] { - let mut result = ~[]; +pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> Vec<Mrk> { + let mut result = Vec::new(); let mut loopvar = ctxt; loop { let table_entry = { let table = table.table.borrow(); - table.get()[loopvar] + *table.get().get(loopvar as uint) }; match table_entry { EmptyCtxt => { @@ -873,7 +874,7 @@ pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> ~[Mrk] { pub fn mtwt_outer_mark(ctxt: SyntaxContext) -> Mrk { let sctable = get_sctable(); let table = sctable.table.borrow(); - match table.get()[ctxt] { + match *table.get().get(ctxt as uint) { ast::Mark(mrk,_) => mrk, _ => fail!("can't retrieve outer mark when outside is not a mark") } @@ -881,7 +882,7 @@ pub fn mtwt_outer_mark(ctxt: SyntaxContext) -> Mrk { /// Push a name... unless it matches the one on top, in which /// case pop and discard (so two of the same marks cancel) -pub fn xorPush(marks: &mut ~[Mrk], mark: Mrk) { +pub fn xorPush(marks: &mut Vec<Mrk> , mark: Mrk) { if (marks.len() > 0) && (getLast(marks) == mark) { marks.pop().unwrap(); } else { @@ -891,7 +892,7 @@ pub fn xorPush(marks: &mut ~[Mrk], mark: Mrk) { // get the last element of a mutable array. // FIXME #4903: , must be a separate procedure for now. -pub fn getLast(arr: &~[Mrk]) -> Mrk { +pub fn getLast(arr: &Vec<Mrk> ) -> Mrk { *arr.last().unwrap() } @@ -901,7 +902,7 @@ pub fn getLast(arr: &~[Mrk]) -> Mrk { pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool { (a.span == b.span) && (a.global == b.global) - && (segments_name_eq(a.segments, b.segments)) + && (segments_name_eq(a.segments.as_slice(), b.segments.as_slice())) } // are two arrays of segments equal when compared unhygienically? @@ -938,6 +939,8 @@ mod test { use opt_vec; use collections::HashMap; + use std::vec_ng::Vec; + fn ident_to_segment(id : &Ident) -> PathSegment { PathSegment {identifier:id.clone(), lifetimes: opt_vec::Empty, @@ -956,21 +959,21 @@ mod test { } #[test] fn xorpush_test () { - let mut s = ~[]; + let mut s = Vec::new(); xorPush(&mut s, 14); - assert_eq!(s.clone(), ~[14]); + assert_eq!(s.clone(), vec!(14)); xorPush(&mut s, 14); - assert_eq!(s.clone(), ~[]); + assert_eq!(s.clone(), Vec::new()); xorPush(&mut s, 14); - assert_eq!(s.clone(), ~[14]); + assert_eq!(s.clone(), vec!(14)); xorPush(&mut s, 15); - assert_eq!(s.clone(), ~[14, 15]); + assert_eq!(s.clone(), vec!(14, 15)); xorPush(&mut s, 16); - assert_eq!(s.clone(), ~[14, 15, 16]); + assert_eq!(s.clone(), vec!(14, 15, 16)); xorPush(&mut s, 16); - assert_eq!(s.clone(), ~[14, 15]); + assert_eq!(s.clone(), vec!(14, 15)); xorPush(&mut s, 15); - assert_eq!(s.clone(), ~[14]); + assert_eq!(s.clone(), vec!(14)); } fn id(n: Name, s: SyntaxContext) -> Ident { @@ -987,7 +990,7 @@ mod test { // unfold a vector of TestSC values into a SCTable, // returning the resulting index - fn unfold_test_sc(tscs : ~[TestSC], tail: SyntaxContext, table: &SCTable) + fn unfold_test_sc(tscs : Vec<TestSC> , tail: SyntaxContext, table: &SCTable) -> SyntaxContext { tscs.rev_iter().fold(tail, |tail : SyntaxContext, tsc : &TestSC| {match *tsc { @@ -996,11 +999,11 @@ mod test { } // gather a SyntaxContext back into a vector of TestSCs - fn refold_test_sc(mut sc: SyntaxContext, table : &SCTable) -> ~[TestSC] { - let mut result = ~[]; + fn refold_test_sc(mut sc: SyntaxContext, table : &SCTable) -> Vec<TestSC> { + let mut result = Vec::new(); loop { let table = table.table.borrow(); - match table.get()[sc] { + match *table.get().get(sc as uint) { EmptyCtxt => {return result;}, Mark(mrk,tail) => { result.push(M(mrk)); @@ -1020,20 +1023,20 @@ mod test { #[test] fn test_unfold_refold(){ let mut t = new_sctable_internal(); - let test_sc = ~[M(3),R(id(101,0),14),M(9)]; + let test_sc = vec!(M(3),R(id(101,0),14),M(9)); assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4); { let table = t.table.borrow(); - assert!(table.get()[2] == Mark(9,0)); - assert!(table.get()[3] == Rename(id(101,0),14,2)); - assert!(table.get()[4] == Mark(3,3)); + assert!(*table.get().get(2) == Mark(9,0)); + assert!(*table.get().get(3) == Rename(id(101,0),14,2)); + assert!(*table.get().get(4) == Mark(3,3)); } assert_eq!(refold_test_sc(4,&t),test_sc); } // extend a syntax context with a sequence of marks given // in a vector. v[0] will be the outermost mark. - fn unfold_marks(mrks: ~[Mrk], tail: SyntaxContext, table: &SCTable) + fn unfold_marks(mrks: Vec<Mrk> , tail: SyntaxContext, table: &SCTable) -> SyntaxContext { mrks.rev_iter().fold(tail, |tail:SyntaxContext, mrk:&Mrk| {new_mark_internal(*mrk,tail,table)}) @@ -1042,11 +1045,11 @@ mod test { #[test] fn unfold_marks_test() { let mut t = new_sctable_internal(); - assert_eq!(unfold_marks(~[3,7],EMPTY_CTXT,&mut t),3); + assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3); { let table = t.table.borrow(); - assert!(table.get()[2] == Mark(7,0)); - assert!(table.get()[3] == Mark(3,2)); + assert!(*table.get().get(2) == Mark(7,0)); + assert!(*table.get().get(3) == Mark(3,2)); } } @@ -1054,32 +1057,32 @@ mod test { let stopname = 242; let name1 = 243; let mut t = new_sctable_internal(); - assert_eq!(marksof (EMPTY_CTXT,stopname,&t),~[]); + assert_eq!(marksof (EMPTY_CTXT,stopname,&t),Vec::new()); // FIXME #5074: ANF'd to dodge nested calls - { let ans = unfold_marks(~[4,98],EMPTY_CTXT,&mut t); - assert_eq! (marksof (ans,stopname,&t),~[4,98]);} + { let ans = unfold_marks(vec!(4,98),EMPTY_CTXT,&mut t); + assert_eq! (marksof (ans,stopname,&t),vec!(4,98));} // does xoring work? - { let ans = unfold_marks(~[5,5,16],EMPTY_CTXT,&mut t); - assert_eq! (marksof (ans,stopname,&t), ~[16]);} + { let ans = unfold_marks(vec!(5,5,16),EMPTY_CTXT,&mut t); + assert_eq! (marksof (ans,stopname,&t), vec!(16));} // does nested xoring work? - { let ans = unfold_marks(~[5,10,10,5,16],EMPTY_CTXT,&mut t); - assert_eq! (marksof (ans, stopname,&t), ~[16]);} + { let ans = unfold_marks(vec!(5,10,10,5,16),EMPTY_CTXT,&mut t); + assert_eq! (marksof (ans, stopname,&t), vec!(16));} // rename where stop doesn't match: - { let chain = ~[M(9), + { let chain = vec!(M(9), R(id(name1, new_mark_internal (4, EMPTY_CTXT,&mut t)), 100101102), - M(14)]; + M(14)); let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t); - assert_eq! (marksof (ans, stopname, &t), ~[9,14]);} + assert_eq! (marksof (ans, stopname, &t), vec!(9,14));} // rename where stop does match { let name1sc = new_mark_internal(4, EMPTY_CTXT, &mut t); - let chain = ~[M(9), + let chain = vec!(M(9), R(id(name1, name1sc), stopname), - M(14)]; + M(14)); let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t); - assert_eq! (marksof (ans, stopname, &t), ~[9]); } + assert_eq! (marksof (ans, stopname, &t), vec!(9)); } } @@ -1090,32 +1093,32 @@ mod test { // - ctxt is MT assert_eq!(resolve_internal(id(a,EMPTY_CTXT),&mut t, &mut rt),a); // - simple ignored marks - { let sc = unfold_marks(~[1,2,3],EMPTY_CTXT,&mut t); + { let sc = unfold_marks(vec!(1,2,3),EMPTY_CTXT,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),a);} // - orthogonal rename where names don't match - { let sc = unfold_test_sc(~[R(id(50,EMPTY_CTXT),51),M(12)],EMPTY_CTXT,&mut t); + { let sc = unfold_test_sc(vec!(R(id(50,EMPTY_CTXT),51),M(12)),EMPTY_CTXT,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),a);} // - rename where names do match, but marks don't { let sc1 = new_mark_internal(1,EMPTY_CTXT,&mut t); - let sc = unfold_test_sc(~[R(id(a,sc1),50), + let sc = unfold_test_sc(vec!(R(id(a,sc1),50), M(1), - M(2)], + M(2)), EMPTY_CTXT,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), a);} // - rename where names and marks match - { let sc1 = unfold_test_sc(~[M(1),M(2)],EMPTY_CTXT,&mut t); - let sc = unfold_test_sc(~[R(id(a,sc1),50),M(1),M(2)],EMPTY_CTXT,&mut t); + { let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t); + let sc = unfold_test_sc(vec!(R(id(a,sc1),50),M(1),M(2)),EMPTY_CTXT,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), 50); } // - rename where names and marks match by literal sharing - { let sc1 = unfold_test_sc(~[M(1),M(2)],EMPTY_CTXT,&mut t); - let sc = unfold_test_sc(~[R(id(a,sc1),50)],sc1,&mut t); + { let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t); + let sc = unfold_test_sc(vec!(R(id(a,sc1),50)),sc1,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), 50); } // - two renames of the same var.. can only happen if you use // local-expand to prevent the inner binding from being renamed // during the rename-pass caused by the first: println!("about to run bad test"); - { let sc = unfold_test_sc(~[R(id(a,EMPTY_CTXT),50), - R(id(a,EMPTY_CTXT),51)], + { let sc = unfold_test_sc(vec!(R(id(a,EMPTY_CTXT),50), + R(id(a,EMPTY_CTXT),51)), EMPTY_CTXT,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), 51); } // the simplest double-rename: @@ -1126,8 +1129,8 @@ mod test { let sc = new_mark_internal(9,a50_to_a51,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),51); // but mark on the inside does: - let a50_to_a51_b = unfold_test_sc(~[R(id(a,a_to_a50),51), - M(9)], + let a50_to_a51_b = unfold_test_sc(vec!(R(id(a,a_to_a50),51), + M(9)), a_to_a50, &mut t); assert_eq!(resolve_internal(id(a,a50_to_a51_b),&mut t, &mut rt),50);} diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 6a3ca911d76..ed56ef15a1c 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -21,6 +21,7 @@ use parse::token; use crateid::CrateId; use collections::HashSet; +use std::vec_ng::Vec; pub trait AttrMetaMethods { // This could be changed to `fn check_name(&self, name: InternedString) -> @@ -146,7 +147,7 @@ pub fn mk_name_value_item(name: InternedString, value: ast::Lit) @dummy_spanned(MetaNameValue(name, value)) } -pub fn mk_list_item(name: InternedString, items: ~[@MetaItem]) -> @MetaItem { +pub fn mk_list_item(name: InternedString, items: Vec<@MetaItem> ) -> @MetaItem { @dummy_spanned(MetaList(name, items)) } @@ -212,12 +213,12 @@ pub fn last_meta_item_value_str_by_name(items: &[@MetaItem], name: &str) /* Higher-level applications */ -pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] { +pub fn sort_meta_items(items: &[@MetaItem]) -> Vec<@MetaItem> { // This is sort of stupid here, but we need to sort by // human-readable strings. let mut v = items.iter() .map(|&mi| (mi.name(), mi)) - .collect::<~[(InternedString, @MetaItem)]>(); + .collect::<Vec<(InternedString, @MetaItem)> >(); v.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b)); @@ -226,7 +227,8 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] { match m.node { MetaList(ref n, ref mis) => { @Spanned { - node: MetaList((*n).clone(), sort_meta_items(*mis)), + node: MetaList((*n).clone(), + sort_meta_items(mis.as_slice())), .. /*bad*/ (*m).clone() } } @@ -239,11 +241,11 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] { * From a list of crate attributes get only the meta_items that affect crate * linkage */ -pub fn find_linkage_metas(attrs: &[Attribute]) -> ~[@MetaItem] { - let mut result = ~[]; +pub fn find_linkage_metas(attrs: &[Attribute]) -> Vec<@MetaItem> { + let mut result = Vec::new(); for attr in attrs.iter().filter(|at| at.name().equiv(&("link"))) { match attr.meta().node { - MetaList(_, ref items) => result.push_all(*items), + MetaList(_, ref items) => result.push_all(items.as_slice()), _ => () } } @@ -272,9 +274,9 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr { match attr.node.value.node { MetaWord(ref n) if n.equiv(&("inline")) => InlineHint, MetaList(ref n, ref items) if n.equiv(&("inline")) => { - if contains_name(*items, "always") { + if contains_name(items.as_slice(), "always") { InlineAlways - } else if contains_name(*items, "never") { + } else if contains_name(items.as_slice(), "never") { InlineNever } else { InlineHint diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index d114d8971f7..6f17505c902 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -23,6 +23,7 @@ source code snippets, etc. use std::cell::RefCell; use std::cmp; +use std::vec_ng::Vec; use serialize::{Encodable, Decodable, Encoder, Decoder}; pub trait Pos { @@ -188,8 +189,7 @@ pub type FileName = ~str; pub struct FileLines { file: @FileMap, - lines: ~[uint] -} + lines: Vec<uint> } /// Identifies an offset of a multi-byte character in a FileMap pub struct MultiByteChar { @@ -210,9 +210,9 @@ pub struct FileMap { /// The start position of this source in the CodeMap start_pos: BytePos, /// Locations of lines beginnings in the source code - lines: RefCell<~[BytePos]>, + lines: RefCell<Vec<BytePos> >, /// Locations of multi-byte characters in the source code - multibyte_chars: RefCell<~[MultiByteChar]>, + multibyte_chars: RefCell<Vec<MultiByteChar> >, } impl FileMap { @@ -225,14 +225,14 @@ impl FileMap { // the new charpos must be > the last one (or it's the first one). let mut lines = self.lines.borrow_mut();; let line_len = lines.get().len(); - assert!(line_len == 0 || (lines.get()[line_len - 1] < pos)) + assert!(line_len == 0 || (*lines.get().get(line_len - 1) < pos)) lines.get().push(pos); } // get a line from the list of pre-computed line-beginnings pub fn get_line(&self, line: int) -> ~str { let mut lines = self.lines.borrow_mut(); - let begin: BytePos = lines.get()[line] - self.start_pos; + let begin: BytePos = *lines.get().get(line as uint) - self.start_pos; let begin = begin.to_uint(); let slice = self.src.slice_from(begin); match slice.find('\n') { @@ -257,13 +257,13 @@ impl FileMap { } pub struct CodeMap { - files: RefCell<~[@FileMap]> + files: RefCell<Vec<@FileMap> > } impl CodeMap { pub fn new() -> CodeMap { CodeMap { - files: RefCell::new(~[]), + files: RefCell::new(Vec::new()), } } @@ -278,8 +278,8 @@ impl CodeMap { name: filename, src: src, start_pos: Pos::from_uint(start_pos), - lines: RefCell::new(~[]), - multibyte_chars: RefCell::new(~[]), + lines: RefCell::new(Vec::new()), + multibyte_chars: RefCell::new(Vec::new()), }; files.get().push(filemap); @@ -330,7 +330,7 @@ impl CodeMap { pub fn span_to_lines(&self, sp: Span) -> @FileLines { let lo = self.lookup_char_pos(sp.lo); let hi = self.lookup_char_pos(sp.hi); - let mut lines = ~[]; + let mut lines = Vec::new(); for i in range(lo.line - 1u, hi.line as uint) { lines.push(i); }; @@ -374,7 +374,7 @@ impl CodeMap { let mut b = len; while b - a > 1u { let m = (a + b) / 2u; - if files[m].start_pos > pos { + if files.get(m).start_pos > pos { b = m; } else { a = m; @@ -384,7 +384,7 @@ impl CodeMap { // filemap, but are not the filemaps we want (because they are length 0, they cannot // contain what we are looking for). So, rewind until we find a useful filemap. loop { - let lines = files[a].lines.borrow(); + let lines = files.get(a).lines.borrow(); let lines = lines.get(); if lines.len() > 0 { break; @@ -406,13 +406,13 @@ impl CodeMap { let idx = self.lookup_filemap_idx(pos); let files = self.files.borrow(); - let f = files.get()[idx]; + let f = *files.get().get(idx); let mut a = 0u; let mut lines = f.lines.borrow_mut(); let mut b = lines.get().len(); while b - a > 1u { let m = (a + b) / 2u; - if lines.get()[m] > pos { b = m; } else { a = m; } + if *lines.get().get(m) > pos { b = m; } else { a = m; } } return FileMapAndLine {fm: f, line: a}; } @@ -422,7 +422,7 @@ impl CodeMap { let line = a + 1u; // Line numbers start at 1 let chpos = self.bytepos_to_file_charpos(pos); let lines = f.lines.borrow(); - let linebpos = lines.get()[a]; + let linebpos = *lines.get().get(a); let linechpos = self.bytepos_to_file_charpos(linebpos); debug!("codemap: byte pos {:?} is on the line at byte pos {:?}", pos, linebpos); @@ -441,7 +441,7 @@ impl CodeMap { -> FileMapAndBytePos { let idx = self.lookup_filemap_idx(bpos); let files = self.files.borrow(); - let fm = files.get()[idx]; + let fm = *files.get().get(idx); let offset = bpos - fm.start_pos; return FileMapAndBytePos {fm: fm, pos: offset}; } @@ -451,7 +451,7 @@ impl CodeMap { debug!("codemap: converting {:?} to char pos", bpos); let idx = self.lookup_filemap_idx(bpos); let files = self.files.borrow(); - let map = files.get()[idx]; + let map = files.get().get(idx); // The number of extra bytes due to multibyte chars in the FileMap let mut total_extra_bytes = 0; diff --git a/src/libsyntax/crateid.rs b/src/libsyntax/crateid.rs index b5f02fb7e64..e5136b7081b 100644 --- a/src/libsyntax/crateid.rs +++ b/src/libsyntax/crateid.rs @@ -19,6 +19,7 @@ use std::fmt; /// to be `0.0`. use std::from_str::FromStr; +use std::vec_ng::Vec; #[deriving(Clone, Eq)] pub struct CrateId { @@ -48,25 +49,27 @@ impl fmt::Show for CrateId { impl FromStr for CrateId { fn from_str(s: &str) -> Option<CrateId> { - let pieces: ~[&str] = s.splitn('#', 1).collect(); - let path = pieces[0].to_owned(); + let pieces: Vec<&str> = s.splitn('#', 1).collect(); + let path = pieces.get(0).to_owned(); if path.starts_with("/") || path.ends_with("/") || path.starts_with(".") || path.is_empty() { return None; } - let path_pieces: ~[&str] = path.rsplitn('/', 1).collect(); - let inferred_name = path_pieces[0]; + let path_pieces: Vec<&str> = path.rsplitn('/', 1).collect(); + let inferred_name = *path_pieces.get(0); let (name, version) = if pieces.len() == 1 { (inferred_name.to_owned(), None) } else { - let hash_pieces: ~[&str] = pieces[1].splitn(':', 1).collect(); + let hash_pieces: Vec<&str> = pieces.get(1) + .splitn(':', 1) + .collect(); let (hash_name, hash_version) = if hash_pieces.len() == 1 { - ("", hash_pieces[0]) + ("", *hash_pieces.get(0)) } else { - (hash_pieces[0], hash_pieces[1]) + (*hash_pieces.get(0), *hash_pieces.get(1)) }; let name = if !hash_name.is_empty() { @@ -89,7 +92,7 @@ impl FromStr for CrateId { }; Some(CrateId { - path: path, + path: path.clone(), name: name, version: version, }) diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index cb7034a375d..c0c64d6fd60 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -325,7 +325,7 @@ fn highlight_lines(err: &mut EmitterWriter, if lines.lines.len() == 1u { let lo = cm.lookup_char_pos(sp.lo); let mut digits = 0u; - let mut num = (lines.lines[0] + 1u) / 10u; + let mut num = (*lines.lines.get(0) + 1u) / 10u; // how many digits must be indent past? while num > 0u { num /= 10u; digits += 1u; } @@ -337,7 +337,7 @@ fn highlight_lines(err: &mut EmitterWriter, // part of the 'filename:line ' part of the previous line. let skip = fm.name.len() + digits + 3u; for _ in range(0, skip) { s.push_char(' '); } - let orig = fm.get_line(lines.lines[0] as int); + let orig = fm.get_line(*lines.lines.get(0) as int); for pos in range(0u, left-skip) { let curChar = orig[pos] as char; // Whenever a tab occurs on the previous line, we insert one on diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index 1bf82573c49..6080613460d 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -20,6 +20,8 @@ use parse; use parse::token::InternedString; use parse::token; +use std::vec_ng::Vec; + enum State { Asm, Outputs, @@ -42,12 +44,14 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { let mut p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), - tts.to_owned()); + tts.iter() + .map(|x| (*x).clone()) + .collect()); let mut asm = InternedString::new(""); let mut asm_str_style = None; - let mut outputs = ~[]; - let mut inputs = ~[]; + let mut outputs = Vec::new(); + let mut inputs = Vec::new(); let mut cons = ~""; let mut volatile = false; let mut alignstack = false; @@ -119,7 +123,7 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } Clobbers => { - let mut clobs = ~[]; + let mut clobs = Vec::new(); while p.token != token::EOF && p.token != token::COLON && p.token != token::MOD_SEP { diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 0636d19163e..e9fe21eded6 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -20,6 +20,7 @@ use parse::token::{InternedString, intern, str_to_ident}; use util::small_vector::SmallVector; use collections::HashMap; +use std::vec_ng::Vec; // new-style macro! tt code: // @@ -74,7 +75,7 @@ pub trait IdentMacroExpander { cx: &mut ExtCtxt, sp: Span, ident: ast::Ident, - token_tree: ~[ast::TokenTree]) + token_tree: Vec<ast::TokenTree> ) -> MacResult; } @@ -83,14 +84,14 @@ impl IdentMacroExpander for BasicIdentMacroExpander { cx: &mut ExtCtxt, sp: Span, ident: ast::Ident, - token_tree: ~[ast::TokenTree]) + token_tree: Vec<ast::TokenTree> ) -> MacResult { (self.expander)(cx, sp, ident, token_tree) } } pub type IdentMacroExpanderFn = - fn(&mut ExtCtxt, Span, ast::Ident, ~[ast::TokenTree]) -> MacResult; + fn(&mut ExtCtxt, Span, ast::Ident, Vec<ast::TokenTree> ) -> MacResult; pub type MacroCrateRegistrationFun = fn(|ast::Name, SyntaxExtension|); @@ -154,13 +155,13 @@ impl BlockInfo { pub fn new() -> BlockInfo { BlockInfo { macros_escape: false, - pending_renames: ~[], + pending_renames: Vec::new(), } } } // a list of ident->name renamings -pub type RenameList = ~[(ast::Ident,Name)]; +pub type RenameList = Vec<(ast::Ident,Name)> ; // The base map of methods for expanding syntax extension // AST nodes into full ASTs @@ -271,7 +272,7 @@ pub struct MacroCrate { pub trait CrateLoader { fn load_crate(&mut self, krate: &ast::ViewItem) -> MacroCrate; - fn get_exported_macros(&mut self, crate_num: ast::CrateNum) -> ~[~str]; + fn get_exported_macros(&mut self, crate_num: ast::CrateNum) -> Vec<~str> ; fn get_registrar_symbol(&mut self, crate_num: ast::CrateNum) -> Option<~str>; } @@ -284,7 +285,7 @@ pub struct ExtCtxt<'a> { backtrace: Option<@ExpnInfo>, loader: &'a mut CrateLoader, - mod_path: ~[ast::Ident], + mod_path: Vec<ast::Ident> , trace_mac: bool } @@ -296,7 +297,7 @@ impl<'a> ExtCtxt<'a> { cfg: cfg, backtrace: None, loader: loader, - mod_path: ~[], + mod_path: Vec::new(), trace_mac: false } } @@ -329,7 +330,7 @@ impl<'a> ExtCtxt<'a> { pub fn backtrace(&self) -> Option<@ExpnInfo> { self.backtrace } pub fn mod_push(&mut self, i: ast::Ident) { self.mod_path.push(i); } pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); } - pub fn mod_path(&self) -> ~[ast::Ident] { self.mod_path.clone() } + pub fn mod_path(&self) -> Vec<ast::Ident> { self.mod_path.clone() } pub fn bt_push(&mut self, ei: codemap::ExpnInfo) { match ei { ExpnInfo {call_site: cs, callee: ref callee} => { @@ -458,11 +459,13 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt, /// parsing error, emit a non-fatal error and return None. pub fn get_exprs_from_tts(cx: &ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) -> Option<~[@ast::Expr]> { + tts: &[ast::TokenTree]) -> Option<Vec<@ast::Expr> > { let mut p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), - tts.to_owned()); - let mut es = ~[]; + tts.iter() + .map(|x| (*x).clone()) + .collect()); + let mut es = Vec::new(); while p.token != token::EOF { if es.len() != 0 && !p.eat(&token::COMMA) { cx.span_err(sp, "expected token: `,`"); @@ -507,12 +510,12 @@ impl Drop for MapChainFrame { // Only generic to make it easy to test pub struct SyntaxEnv { - priv chain: ~[MapChainFrame], + priv chain: Vec<MapChainFrame> , } impl SyntaxEnv { pub fn new() -> SyntaxEnv { - let mut map = SyntaxEnv { chain: ~[] }; + let mut map = SyntaxEnv { chain: Vec::new() }; map.push_frame(); map } @@ -553,6 +556,7 @@ impl SyntaxEnv { } pub fn info<'a>(&'a mut self) -> &'a mut BlockInfo { - &mut self.chain[self.chain.len()-1].info + let last_chain_index = self.chain.len() - 1; + &mut self.chain.get_mut(last_chain_index).info } } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 1ddd579a2f1..34625923ea1 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -21,6 +21,8 @@ use opt_vec::OptVec; use parse::token::special_idents; use parse::token; +use std::vec_ng::Vec; + pub struct Field { ident: ast::Ident, ex: @ast::Expr @@ -34,14 +36,14 @@ mod syntax { pub trait AstBuilder { // paths - fn path(&self, span: Span, strs: ~[ast::Ident]) -> ast::Path; + fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path; fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path; - fn path_global(&self, span: Span, strs: ~[ast::Ident]) -> ast::Path; + fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path; fn path_all(&self, sp: Span, global: bool, - idents: ~[ast::Ident], + idents: Vec<ast::Ident> , lifetimes: OptVec<ast::Lifetime>, - types: ~[P<ast::Ty>]) + types: Vec<P<ast::Ty>> ) -> ast::Path; // types @@ -61,8 +63,8 @@ pub trait AstBuilder { fn ty_infer(&self, sp: Span) -> P<ast::Ty>; fn ty_nil(&self) -> P<ast::Ty>; - fn ty_vars(&self, ty_params: &OptVec<ast::TyParam>) -> ~[P<ast::Ty>]; - fn ty_vars_global(&self, ty_params: &OptVec<ast::TyParam>) -> ~[P<ast::Ty>]; + fn ty_vars(&self, ty_params: &OptVec<ast::TyParam>) -> Vec<P<ast::Ty>> ; + fn ty_vars_global(&self, ty_params: &OptVec<ast::TyParam>) -> Vec<P<ast::Ty>> ; fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField; fn strip_bounds(&self, bounds: &Generics) -> Generics; @@ -87,11 +89,11 @@ pub trait AstBuilder { -> @ast::Stmt; // blocks - fn block(&self, span: Span, stmts: ~[@ast::Stmt], expr: Option<@ast::Expr>) -> P<ast::Block>; + fn block(&self, span: Span, stmts: Vec<@ast::Stmt> , expr: Option<@ast::Expr>) -> P<ast::Block>; fn block_expr(&self, expr: @ast::Expr) -> P<ast::Block>; fn block_all(&self, span: Span, - view_items: ~[ast::ViewItem], - stmts: ~[@ast::Stmt], + view_items: Vec<ast::ViewItem> , + stmts: Vec<@ast::Stmt> , expr: Option<@ast::Expr>) -> P<ast::Block>; // expressions @@ -109,19 +111,19 @@ pub trait AstBuilder { fn expr_addr_of(&self, sp: Span, e: @ast::Expr) -> @ast::Expr; fn expr_mut_addr_of(&self, sp: Span, e: @ast::Expr) -> @ast::Expr; fn expr_field_access(&self, span: Span, expr: @ast::Expr, ident: ast::Ident) -> @ast::Expr; - fn expr_call(&self, span: Span, expr: @ast::Expr, args: ~[@ast::Expr]) -> @ast::Expr; - fn expr_call_ident(&self, span: Span, id: ast::Ident, args: ~[@ast::Expr]) -> @ast::Expr; - fn expr_call_global(&self, sp: Span, fn_path: ~[ast::Ident], - args: ~[@ast::Expr]) -> @ast::Expr; + fn expr_call(&self, span: Span, expr: @ast::Expr, args: Vec<@ast::Expr> ) -> @ast::Expr; + fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<@ast::Expr> ) -> @ast::Expr; + fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident> , + args: Vec<@ast::Expr> ) -> @ast::Expr; fn expr_method_call(&self, span: Span, expr: @ast::Expr, ident: ast::Ident, - args: ~[@ast::Expr]) -> @ast::Expr; + args: Vec<@ast::Expr> ) -> @ast::Expr; fn expr_block(&self, b: P<ast::Block>) -> @ast::Expr; fn expr_cast(&self, sp: Span, expr: @ast::Expr, ty: P<ast::Ty>) -> @ast::Expr; fn field_imm(&self, span: Span, name: Ident, e: @ast::Expr) -> ast::Field; - fn expr_struct(&self, span: Span, path: ast::Path, fields: ~[ast::Field]) -> @ast::Expr; - fn expr_struct_ident(&self, span: Span, id: ast::Ident, fields: ~[ast::Field]) -> @ast::Expr; + fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec<ast::Field> ) -> @ast::Expr; + fn expr_struct_ident(&self, span: Span, id: ast::Ident, fields: Vec<ast::Field> ) -> @ast::Expr; fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> @ast::Expr; @@ -131,9 +133,9 @@ pub trait AstBuilder { fn expr_bool(&self, sp: Span, value: bool) -> @ast::Expr; fn expr_vstore(&self, sp: Span, expr: @ast::Expr, vst: ast::ExprVstore) -> @ast::Expr; - fn expr_vec(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; - fn expr_vec_uniq(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; - fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; + fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr; + fn expr_vec_ng(&self, sp: Span) -> @ast::Expr; + fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr; fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr; fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr; @@ -152,55 +154,55 @@ pub trait AstBuilder { span: Span, ident: ast::Ident, bm: ast::BindingMode) -> @ast::Pat; - fn pat_enum(&self, span: Span, path: ast::Path, subpats: ~[@ast::Pat]) -> @ast::Pat; + fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec<@ast::Pat> ) -> @ast::Pat; fn pat_struct(&self, span: Span, - path: ast::Path, field_pats: ~[ast::FieldPat]) -> @ast::Pat; + path: ast::Path, field_pats: Vec<ast::FieldPat> ) -> @ast::Pat; - fn arm(&self, span: Span, pats: ~[@ast::Pat], expr: @ast::Expr) -> ast::Arm; + fn arm(&self, span: Span, pats: Vec<@ast::Pat> , expr: @ast::Expr) -> ast::Arm; fn arm_unreachable(&self, span: Span) -> ast::Arm; - fn expr_match(&self, span: Span, arg: @ast::Expr, arms: ~[ast::Arm]) -> @ast::Expr; + fn expr_match(&self, span: Span, arg: @ast::Expr, arms: Vec<ast::Arm> ) -> @ast::Expr; fn expr_if(&self, span: Span, cond: @ast::Expr, then: @ast::Expr, els: Option<@ast::Expr>) -> @ast::Expr; fn lambda_fn_decl(&self, span: Span, fn_decl: P<ast::FnDecl>, blk: P<ast::Block>) -> @ast::Expr; - fn lambda(&self, span: Span, ids: ~[ast::Ident], blk: P<ast::Block>) -> @ast::Expr; + fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> @ast::Expr; fn lambda0(&self, span: Span, blk: P<ast::Block>) -> @ast::Expr; fn lambda1(&self, span: Span, blk: P<ast::Block>, ident: ast::Ident) -> @ast::Expr; - fn lambda_expr(&self, span: Span, ids: ~[ast::Ident], blk: @ast::Expr) -> @ast::Expr; + fn lambda_expr(&self, span: Span, ids: Vec<ast::Ident> , blk: @ast::Expr) -> @ast::Expr; fn lambda_expr_0(&self, span: Span, expr: @ast::Expr) -> @ast::Expr; fn lambda_expr_1(&self, span: Span, expr: @ast::Expr, ident: ast::Ident) -> @ast::Expr; - fn lambda_stmts(&self, span: Span, ids: ~[ast::Ident], blk: ~[@ast::Stmt]) -> @ast::Expr; - fn lambda_stmts_0(&self, span: Span, stmts: ~[@ast::Stmt]) -> @ast::Expr; - fn lambda_stmts_1(&self, span: Span, stmts: ~[@ast::Stmt], ident: ast::Ident) -> @ast::Expr; + fn lambda_stmts(&self, span: Span, ids: Vec<ast::Ident> , blk: Vec<@ast::Stmt> ) -> @ast::Expr; + fn lambda_stmts_0(&self, span: Span, stmts: Vec<@ast::Stmt> ) -> @ast::Expr; + fn lambda_stmts_1(&self, span: Span, stmts: Vec<@ast::Stmt> , ident: ast::Ident) -> @ast::Expr; // items fn item(&self, span: Span, - name: Ident, attrs: ~[ast::Attribute], node: ast::Item_) -> @ast::Item; + name: Ident, attrs: Vec<ast::Attribute> , node: ast::Item_) -> @ast::Item; fn arg(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::Arg; // FIXME unused self - fn fn_decl(&self, inputs: ~[ast::Arg], output: P<ast::Ty>) -> P<ast::FnDecl>; + fn fn_decl(&self, inputs: Vec<ast::Arg> , output: P<ast::Ty>) -> P<ast::FnDecl>; fn item_fn_poly(&self, span: Span, name: Ident, - inputs: ~[ast::Arg], + inputs: Vec<ast::Arg> , output: P<ast::Ty>, generics: Generics, body: P<ast::Block>) -> @ast::Item; fn item_fn(&self, span: Span, name: Ident, - inputs: ~[ast::Arg], + inputs: Vec<ast::Arg> , output: P<ast::Ty>, body: P<ast::Block>) -> @ast::Item; - fn variant(&self, span: Span, name: Ident, tys: ~[P<ast::Ty>]) -> ast::Variant; + fn variant(&self, span: Span, name: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant; fn item_enum_poly(&self, span: Span, name: Ident, @@ -216,8 +218,8 @@ pub trait AstBuilder { fn item_struct(&self, span: Span, name: Ident, struct_def: ast::StructDef) -> @ast::Item; fn item_mod(&self, span: Span, - name: Ident, attrs: ~[ast::Attribute], - vi: ~[ast::ViewItem], items: ~[@ast::Item]) -> @ast::Item; + name: Ident, attrs: Vec<ast::Attribute> , + vi: Vec<ast::ViewItem> , items: Vec<@ast::Item> ) -> @ast::Item; fn item_ty_poly(&self, span: Span, @@ -232,7 +234,7 @@ pub trait AstBuilder { fn meta_list(&self, sp: Span, name: InternedString, - mis: ~[@ast::MetaItem]) + mis: Vec<@ast::MetaItem> ) -> @ast::MetaItem; fn meta_name_value(&self, sp: Span, @@ -241,35 +243,35 @@ pub trait AstBuilder { -> @ast::MetaItem; fn view_use(&self, sp: Span, - vis: ast::Visibility, vp: ~[@ast::ViewPath]) -> ast::ViewItem; + vis: ast::Visibility, vp: Vec<@ast::ViewPath> ) -> ast::ViewItem; fn view_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> ast::ViewItem; fn view_use_simple_(&self, sp: Span, vis: ast::Visibility, ident: ast::Ident, path: ast::Path) -> ast::ViewItem; fn view_use_list(&self, sp: Span, vis: ast::Visibility, - path: ~[ast::Ident], imports: &[ast::Ident]) -> ast::ViewItem; + path: Vec<ast::Ident> , imports: &[ast::Ident]) -> ast::ViewItem; fn view_use_glob(&self, sp: Span, - vis: ast::Visibility, path: ~[ast::Ident]) -> ast::ViewItem; + vis: ast::Visibility, path: Vec<ast::Ident> ) -> ast::ViewItem; } impl<'a> AstBuilder for ExtCtxt<'a> { - fn path(&self, span: Span, strs: ~[ast::Ident]) -> ast::Path { - self.path_all(span, false, strs, opt_vec::Empty, ~[]) + fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path { + self.path_all(span, false, strs, opt_vec::Empty, Vec::new()) } fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path { - self.path(span, ~[id]) + self.path(span, vec!(id)) } - fn path_global(&self, span: Span, strs: ~[ast::Ident]) -> ast::Path { - self.path_all(span, true, strs, opt_vec::Empty, ~[]) + fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path { + self.path_all(span, true, strs, opt_vec::Empty, Vec::new()) } fn path_all(&self, sp: Span, global: bool, - mut idents: ~[ast::Ident], + mut idents: Vec<ast::Ident> , lifetimes: OptVec<ast::Lifetime>, - types: ~[P<ast::Ty>]) + types: Vec<P<ast::Ty>> ) -> ast::Path { let last_identifier = idents.pop().unwrap(); - let mut segments: ~[ast::PathSegment] = idents.move_iter() + let mut segments: Vec<ast::PathSegment> = idents.move_iter() .map(|ident| { ast::PathSegment { identifier: ident, @@ -335,13 +337,13 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.ty_path( self.path_all(DUMMY_SP, true, - ~[ + vec!( self.ident_of("std"), self.ident_of("option"), self.ident_of("Option") - ], + ), opt_vec::Empty, - ~[ ty ]), None) + vec!( ty )), None) } fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField { @@ -379,15 +381,15 @@ impl<'a> AstBuilder for ExtCtxt<'a> { // these are strange, and probably shouldn't be used outside of // pipes. Specifically, the global version possible generates // incorrect code. - fn ty_vars(&self, ty_params: &OptVec<ast::TyParam>) -> ~[P<ast::Ty>] { + fn ty_vars(&self, ty_params: &OptVec<ast::TyParam>) -> Vec<P<ast::Ty>> { opt_vec::take_vec( ty_params.map(|p| self.ty_ident(DUMMY_SP, p.ident))) } - fn ty_vars_global(&self, ty_params: &OptVec<ast::TyParam>) -> ~[P<ast::Ty>] { + fn ty_vars_global(&self, ty_params: &OptVec<ast::TyParam>) -> Vec<P<ast::Ty>> { opt_vec::take_vec( ty_params.map(|p| self.ty_path( - self.path_global(DUMMY_SP, ~[p.ident]), None))) + self.path_global(DUMMY_SP, vec!(p.ident)), None))) } fn strip_bounds(&self, generics: &Generics) -> Generics { @@ -459,17 +461,17 @@ impl<'a> AstBuilder for ExtCtxt<'a> { @respan(sp, ast::StmtDecl(@decl, ast::DUMMY_NODE_ID)) } - fn block(&self, span: Span, stmts: ~[@ast::Stmt], expr: Option<@Expr>) -> P<ast::Block> { - self.block_all(span, ~[], stmts, expr) + fn block(&self, span: Span, stmts: Vec<@ast::Stmt> , expr: Option<@Expr>) -> P<ast::Block> { + self.block_all(span, Vec::new(), stmts, expr) } fn block_expr(&self, expr: @ast::Expr) -> P<ast::Block> { - self.block_all(expr.span, ~[], ~[], Some(expr)) + self.block_all(expr.span, Vec::new(), Vec::new(), Some(expr)) } fn block_all(&self, span: Span, - view_items: ~[ast::ViewItem], - stmts: ~[@ast::Stmt], + view_items: Vec<ast::ViewItem> , + stmts: Vec<@ast::Stmt> , expr: Option<@ast::Expr>) -> P<ast::Block> { P(ast::Block { view_items: view_items, @@ -517,7 +519,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn expr_field_access(&self, sp: Span, expr: @ast::Expr, ident: ast::Ident) -> @ast::Expr { - self.expr(sp, ast::ExprField(expr, ident, ~[])) + self.expr(sp, ast::ExprField(expr, ident, Vec::new())) } fn expr_addr_of(&self, sp: Span, e: @ast::Expr) -> @ast::Expr { self.expr(sp, ast::ExprAddrOf(ast::MutImmutable, e)) @@ -526,23 +528,23 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr(sp, ast::ExprAddrOf(ast::MutMutable, e)) } - fn expr_call(&self, span: Span, expr: @ast::Expr, args: ~[@ast::Expr]) -> @ast::Expr { + fn expr_call(&self, span: Span, expr: @ast::Expr, args: Vec<@ast::Expr> ) -> @ast::Expr { self.expr(span, ast::ExprCall(expr, args)) } - fn expr_call_ident(&self, span: Span, id: ast::Ident, args: ~[@ast::Expr]) -> @ast::Expr { + fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<@ast::Expr> ) -> @ast::Expr { self.expr(span, ast::ExprCall(self.expr_ident(span, id), args)) } - fn expr_call_global(&self, sp: Span, fn_path: ~[ast::Ident], - args: ~[@ast::Expr]) -> @ast::Expr { + fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident> , + args: Vec<@ast::Expr> ) -> @ast::Expr { let pathexpr = self.expr_path(self.path_global(sp, fn_path)); self.expr_call(sp, pathexpr, args) } fn expr_method_call(&self, span: Span, expr: @ast::Expr, ident: ast::Ident, - mut args: ~[@ast::Expr]) -> @ast::Expr { + mut args: Vec<@ast::Expr> ) -> @ast::Expr { args.unshift(expr); - self.expr(span, ast::ExprMethodCall(ident, ~[], args)) + self.expr(span, ast::ExprMethodCall(ident, Vec::new(), args)) } fn expr_block(&self, b: P<ast::Block>) -> @ast::Expr { self.expr(b.span, ast::ExprBlock(b)) @@ -550,11 +552,11 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn field_imm(&self, span: Span, name: Ident, e: @ast::Expr) -> ast::Field { ast::Field { ident: respan(span, name), expr: e, span: span } } - fn expr_struct(&self, span: Span, path: ast::Path, fields: ~[ast::Field]) -> @ast::Expr { + fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec<ast::Field> ) -> @ast::Expr { self.expr(span, ast::ExprStruct(path, fields, None)) } fn expr_struct_ident(&self, span: Span, - id: ast::Ident, fields: ~[ast::Field]) -> @ast::Expr { + id: ast::Ident, fields: Vec<ast::Field> ) -> @ast::Expr { self.expr_struct(span, self.path_ident(span, id), fields) } @@ -577,13 +579,18 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_vstore(&self, sp: Span, expr: @ast::Expr, vst: ast::ExprVstore) -> @ast::Expr { self.expr(sp, ast::ExprVstore(expr, vst)) } - fn expr_vec(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr { + fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr { self.expr(sp, ast::ExprVec(exprs, ast::MutImmutable)) } - fn expr_vec_uniq(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr { - self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreUniq) + fn expr_vec_ng(&self, sp: Span) -> @ast::Expr { + self.expr_call_global(sp, + vec!(self.ident_of("std"), + self.ident_of("vec_ng"), + self.ident_of("Vec"), + self.ident_of("new")), + Vec::new()) } - fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr { + fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr { self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice) } fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr { @@ -600,20 +607,18 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_some(&self, sp: Span, expr: @ast::Expr) -> @ast::Expr { - let some = ~[ + let some = vec!( self.ident_of("std"), self.ident_of("option"), - self.ident_of("Some"), - ]; - self.expr_call_global(sp, some, ~[expr]) + self.ident_of("Some")); + self.expr_call_global(sp, some, vec!(expr)) } fn expr_none(&self, sp: Span) -> @ast::Expr { - let none = self.path_global(sp, ~[ + let none = self.path_global(sp, vec!( self.ident_of("std"), self.ident_of("option"), - self.ident_of("None"), - ]); + self.ident_of("None"))); self.expr_path(none) } @@ -621,17 +626,15 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let loc = self.codemap().lookup_char_pos(span.lo); self.expr_call_global( span, - ~[ + vec!( self.ident_of("std"), self.ident_of("rt"), - self.ident_of("begin_unwind"), - ], - ~[ + self.ident_of("begin_unwind")), + vec!( self.expr_str(span, msg), self.expr_str(span, token::intern_and_get_ident(loc.file.name)), - self.expr_uint(span, loc.line), - ]) + self.expr_uint(span, loc.line))) } fn expr_unreachable(&self, span: Span) -> @ast::Expr { @@ -662,17 +665,17 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let pat = ast::PatIdent(bm, path, None); self.pat(span, pat) } - fn pat_enum(&self, span: Span, path: ast::Path, subpats: ~[@ast::Pat]) -> @ast::Pat { + fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec<@ast::Pat> ) -> @ast::Pat { let pat = ast::PatEnum(path, Some(subpats)); self.pat(span, pat) } fn pat_struct(&self, span: Span, - path: ast::Path, field_pats: ~[ast::FieldPat]) -> @ast::Pat { + path: ast::Path, field_pats: Vec<ast::FieldPat> ) -> @ast::Pat { let pat = ast::PatStruct(path, field_pats, false); self.pat(span, pat) } - fn arm(&self, _span: Span, pats: ~[@ast::Pat], expr: @ast::Expr) -> ast::Arm { + fn arm(&self, _span: Span, pats: Vec<@ast::Pat> , expr: @ast::Expr) -> ast::Arm { ast::Arm { pats: pats, guard: None, @@ -681,10 +684,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn arm_unreachable(&self, span: Span) -> ast::Arm { - self.arm(span, ~[self.pat_wild(span)], self.expr_unreachable(span)) + self.arm(span, vec!(self.pat_wild(span)), self.expr_unreachable(span)) } - fn expr_match(&self, span: Span, arg: @ast::Expr, arms: ~[ast::Arm]) -> @Expr { + fn expr_match(&self, span: Span, arg: @ast::Expr, arms: Vec<ast::Arm> ) -> @Expr { self.expr(span, ast::ExprMatch(arg, arms)) } @@ -698,24 +701,22 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn_decl: P<ast::FnDecl>, blk: P<ast::Block>) -> @ast::Expr { self.expr(span, ast::ExprFnBlock(fn_decl, blk)) } - fn lambda(&self, span: Span, ids: ~[ast::Ident], blk: P<ast::Block>) -> @ast::Expr { + fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> @ast::Expr { let fn_decl = self.fn_decl( ids.map(|id| self.arg(span, *id, self.ty_infer(span))), self.ty_infer(span)); self.expr(span, ast::ExprFnBlock(fn_decl, blk)) } - fn lambda0(&self, _span: Span, blk: P<ast::Block>) -> @ast::Expr { - let blk_e = self.expr(blk.span, ast::ExprBlock(blk)); - quote_expr!(self, || $blk_e ) + fn lambda0(&self, span: Span, blk: P<ast::Block>) -> @ast::Expr { + self.lambda(span, Vec::new(), blk) } - fn lambda1(&self, _span: Span, blk: P<ast::Block>, ident: ast::Ident) -> @ast::Expr { - let blk_e = self.expr(blk.span, ast::ExprBlock(blk)); - quote_expr!(self, |$ident| $blk_e ) + fn lambda1(&self, span: Span, blk: P<ast::Block>, ident: ast::Ident) -> @ast::Expr { + self.lambda(span, vec!(ident), blk) } - fn lambda_expr(&self, span: Span, ids: ~[ast::Ident], expr: @ast::Expr) -> @ast::Expr { + fn lambda_expr(&self, span: Span, ids: Vec<ast::Ident> , expr: @ast::Expr) -> @ast::Expr { self.lambda(span, ids, self.block_expr(expr)) } fn lambda_expr_0(&self, span: Span, expr: @ast::Expr) -> @ast::Expr { @@ -725,13 +726,17 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.lambda1(span, self.block_expr(expr), ident) } - fn lambda_stmts(&self, span: Span, ids: ~[ast::Ident], stmts: ~[@ast::Stmt]) -> @ast::Expr { + fn lambda_stmts(&self, + span: Span, + ids: Vec<ast::Ident>, + stmts: Vec<@ast::Stmt>) + -> @ast::Expr { self.lambda(span, ids, self.block(span, stmts, None)) } - fn lambda_stmts_0(&self, span: Span, stmts: ~[@ast::Stmt]) -> @ast::Expr { + fn lambda_stmts_0(&self, span: Span, stmts: Vec<@ast::Stmt> ) -> @ast::Expr { self.lambda0(span, self.block(span, stmts, None)) } - fn lambda_stmts_1(&self, span: Span, stmts: ~[@ast::Stmt], ident: ast::Ident) -> @ast::Expr { + fn lambda_stmts_1(&self, span: Span, stmts: Vec<@ast::Stmt> , ident: ast::Ident) -> @ast::Expr { self.lambda1(span, self.block(span, stmts, None), ident) } @@ -745,7 +750,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } // FIXME unused self - fn fn_decl(&self, inputs: ~[ast::Arg], output: P<ast::Ty>) -> P<ast::FnDecl> { + fn fn_decl(&self, inputs: Vec<ast::Arg> , output: P<ast::Ty>) -> P<ast::FnDecl> { P(ast::FnDecl { inputs: inputs, output: output, @@ -755,7 +760,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn item(&self, span: Span, - name: Ident, attrs: ~[ast::Attribute], node: ast::Item_) -> @ast::Item { + name: Ident, attrs: Vec<ast::Attribute> , node: ast::Item_) -> @ast::Item { // FIXME: Would be nice if our generated code didn't violate // Rust coding conventions @ast::Item { ident: name, @@ -769,13 +774,13 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn item_fn_poly(&self, span: Span, name: Ident, - inputs: ~[ast::Arg], + inputs: Vec<ast::Arg> , output: P<ast::Ty>, generics: Generics, body: P<ast::Block>) -> @ast::Item { self.item(span, name, - ~[], + Vec::new(), ast::ItemFn(self.fn_decl(inputs, output), ast::ImpureFn, AbiSet::Rust(), @@ -786,7 +791,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn item_fn(&self, span: Span, name: Ident, - inputs: ~[ast::Arg], + inputs: Vec<ast::Arg> , output: P<ast::Ty>, body: P<ast::Block> ) -> @ast::Item { @@ -799,7 +804,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { body) } - fn variant(&self, span: Span, name: Ident, tys: ~[P<ast::Ty>]) -> ast::Variant { + fn variant(&self, span: Span, name: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant { let args = tys.move_iter().map(|ty| { ast::VariantArg { ty: ty, id: ast::DUMMY_NODE_ID } }).collect(); @@ -807,7 +812,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { respan(span, ast::Variant_ { name: name, - attrs: ~[], + attrs: Vec::new(), kind: ast::TupleVariantKind(args), id: ast::DUMMY_NODE_ID, disr_expr: None, @@ -818,7 +823,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn item_enum_poly(&self, span: Span, name: Ident, enum_definition: ast::EnumDef, generics: Generics) -> @ast::Item { - self.item(span, name, ~[], ast::ItemEnum(enum_definition, generics)) + self.item(span, name, Vec::new(), ast::ItemEnum(enum_definition, generics)) } fn item_enum(&self, span: Span, name: Ident, @@ -839,13 +844,13 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn item_struct_poly(&self, span: Span, name: Ident, struct_def: ast::StructDef, generics: Generics) -> @ast::Item { - self.item(span, name, ~[], ast::ItemStruct(@struct_def, generics)) + self.item(span, name, Vec::new(), ast::ItemStruct(@struct_def, generics)) } fn item_mod(&self, span: Span, name: Ident, - attrs: ~[ast::Attribute], - vi: ~[ast::ViewItem], - items: ~[@ast::Item]) -> @ast::Item { + attrs: Vec<ast::Attribute> , + vi: Vec<ast::ViewItem> , + items: Vec<@ast::Item> ) -> @ast::Item { self.item( span, name, @@ -859,7 +864,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn item_ty_poly(&self, span: Span, name: Ident, ty: P<ast::Ty>, generics: Generics) -> @ast::Item { - self.item(span, name, ~[], ast::ItemTy(ty, generics)) + self.item(span, name, Vec::new(), ast::ItemTy(ty, generics)) } fn item_ty(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> @ast::Item { @@ -880,7 +885,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn meta_list(&self, sp: Span, name: InternedString, - mis: ~[@ast::MetaItem]) + mis: Vec<@ast::MetaItem> ) -> @ast::MetaItem { @respan(sp, ast::MetaList(name, mis)) } @@ -893,10 +898,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn view_use(&self, sp: Span, - vis: ast::Visibility, vp: ~[@ast::ViewPath]) -> ast::ViewItem { + vis: ast::Visibility, vp: Vec<@ast::ViewPath> ) -> ast::ViewItem { ast::ViewItem { node: ast::ViewItemUse(vp), - attrs: ~[], + attrs: Vec::new(), vis: vis, span: sp } @@ -910,30 +915,32 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn view_use_simple_(&self, sp: Span, vis: ast::Visibility, ident: ast::Ident, path: ast::Path) -> ast::ViewItem { self.view_use(sp, vis, - ~[@respan(sp, + vec!(@respan(sp, ast::ViewPathSimple(ident, path, - ast::DUMMY_NODE_ID))]) + ast::DUMMY_NODE_ID)))) } fn view_use_list(&self, sp: Span, vis: ast::Visibility, - path: ~[ast::Ident], imports: &[ast::Ident]) -> ast::ViewItem { + path: Vec<ast::Ident> , imports: &[ast::Ident]) -> ast::ViewItem { let imports = imports.map(|id| { respan(sp, ast::PathListIdent_ { name: *id, id: ast::DUMMY_NODE_ID }) }); self.view_use(sp, vis, - ~[@respan(sp, + vec!(@respan(sp, ast::ViewPathList(self.path(sp, path), - imports, - ast::DUMMY_NODE_ID))]) + imports.iter() + .map(|x| *x) + .collect(), + ast::DUMMY_NODE_ID)))) } fn view_use_glob(&self, sp: Span, - vis: ast::Visibility, path: ~[ast::Ident]) -> ast::ViewItem { + vis: ast::Visibility, path: Vec<ast::Ident> ) -> ast::ViewItem { self.view_use(sp, vis, - ~[@respan(sp, - ast::ViewPathGlob(self.path(sp, path), ast::DUMMY_NODE_ID))]) + vec!(@respan(sp, + ast::ViewPathGlob(self.path(sp, path), ast::DUMMY_NODE_ID)))) } } diff --git a/src/libsyntax/ext/bytes.rs b/src/libsyntax/ext/bytes.rs index 68aa757c524..6123fd4d3d4 100644 --- a/src/libsyntax/ext/bytes.rs +++ b/src/libsyntax/ext/bytes.rs @@ -17,6 +17,7 @@ use ext::base; use ext::build::AstBuilder; use std::char; +use std::vec_ng::Vec; pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { // Gather all argument expressions @@ -24,7 +25,7 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> None => return MacResult::dummy_expr(sp), Some(e) => e, }; - let mut bytes = ~[]; + let mut bytes = Vec::new(); for expr in exprs.iter() { match expr.node { diff --git a/src/libsyntax/ext/cfg.rs b/src/libsyntax/ext/cfg.rs index 295c456c9d0..5d11a0d1e2f 100644 --- a/src/libsyntax/ext/cfg.rs +++ b/src/libsyntax/ext/cfg.rs @@ -26,12 +26,16 @@ use parse::token::InternedString; use parse::token; use parse; +use std::vec_ng::Vec; + pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { let mut p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), - tts.to_owned()); + tts.iter() + .map(|x| (*x).clone()) + .collect()); - let mut cfgs = ~[]; + let mut cfgs = Vec::new(); // parse `cfg!(meta_item, meta_item(x,y), meta_item="foo", ...)` while p.token != token::EOF { cfgs.push(p.parse_meta_item()); @@ -42,7 +46,8 @@ pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::M // test_cfg searches for meta items looking like `cfg(foo, ...)` let in_cfg = &[cx.meta_list(sp, InternedString::new("cfg"), cfgs)]; - let matches_cfg = attr::test_cfg(cx.cfg(), in_cfg.iter().map(|&x| x)); + let matches_cfg = attr::test_cfg(cx.cfg().as_slice(), + in_cfg.iter().map(|&x| x)); let e = cx.expr_bool(sp, matches_cfg); MRExpr(e) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 85cfd4f61e4..25525869398 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -48,13 +48,13 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) ast::Path { span: sp, global: false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: res, lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ] + ) } ), span: sp, diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs index f52a2accd8d..feda1694ff1 100644 --- a/src/libsyntax/ext/deriving/clone.rs +++ b/src/libsyntax/ext/deriving/clone.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_clone(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -21,22 +23,22 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "clone", "Clone"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "clone", "Clone")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "clone", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[], + args: Vec::new(), ret_ty: Self, inline: true, const_nonmatching: false, combine_substructure: |c, s, sub| cs_clone("Clone", c, s, sub) } - ] + ) }; trait_def.expand(cx, mitem, item, push) @@ -49,16 +51,16 @@ pub fn expand_deriving_deep_clone(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "clone", "DeepClone"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "clone", "DeepClone")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "deep_clone", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[], + args: Vec::new(), ret_ty: Self, inline: true, const_nonmatching: false, @@ -66,7 +68,7 @@ pub fn expand_deriving_deep_clone(cx: &mut ExtCtxt, // call deep_clone (not clone) here. combine_substructure: |c, s, sub| cs_clone("DeepClone", c, s, sub) } - ] + ) }; trait_def.expand(cx, mitem, item, push) @@ -80,7 +82,7 @@ fn cs_clone( let ctor_ident; let all_fields; let subcall = |field: &FieldInfo| - cx.expr_method_call(field.span, field.self_, clone_ident, ~[]); + cx.expr_method_call(field.span, field.self_, clone_ident, Vec::new()); match *substr.fields { Struct(ref af) => { @@ -99,7 +101,7 @@ fn cs_clone( name)) } - if all_fields.len() >= 1 && all_fields[0].name.is_none() { + if all_fields.len() >= 1 && all_fields.get(0).name.is_none() { // enum-like let subcalls = all_fields.map(subcall); cx.expr_call_ident(trait_span, ctor_ident, subcalls) diff --git a/src/libsyntax/ext/deriving/cmp/eq.rs b/src/libsyntax/ext/deriving/cmp/eq.rs index b031f69084d..1e7199ccc95 100644 --- a/src/libsyntax/ext/deriving/cmp/eq.rs +++ b/src/libsyntax/ext/deriving/cmp/eq.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_eq(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -36,8 +38,8 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt, name: $name, generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[borrowed_self()], - ret_ty: Literal(Path::new(~["bool"])), + args: vec!(borrowed_self()), + ret_ty: Literal(Path::new(vec!("bool"))), inline: true, const_nonmatching: true, combine_substructure: $f @@ -47,14 +49,14 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt, let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "cmp", "Eq"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "cmp", "Eq")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( md!("eq", cs_eq), md!("ne", cs_ne) - ] + ) }; trait_def.expand(cx, mitem, item, push) } diff --git a/src/libsyntax/ext/deriving/cmp/ord.rs b/src/libsyntax/ext/deriving/cmp/ord.rs index 10a416045cb..66f45988239 100644 --- a/src/libsyntax/ext/deriving/cmp/ord.rs +++ b/src/libsyntax/ext/deriving/cmp/ord.rs @@ -15,6 +15,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_ord(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -26,8 +28,8 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt, name: $name, generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[borrowed_self()], - ret_ty: Literal(Path::new(~["bool"])), + args: vec!(borrowed_self()), + ret_ty: Literal(Path::new(vec!("bool"))), inline: true, const_nonmatching: false, combine_substructure: |cx, span, substr| cs_op($op, $equal, cx, span, substr) @@ -37,16 +39,16 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt, let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "cmp", "Ord"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "cmp", "Ord")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( md!("lt", true, false), md!("le", true, true), md!("gt", false, false), md!("ge", false, true) - ] + ) }; trait_def.expand(cx, mitem, item, push) } diff --git a/src/libsyntax/ext/deriving/cmp/totaleq.rs b/src/libsyntax/ext/deriving/cmp/totaleq.rs index 2bfab8646a6..2b3c0b9ea69 100644 --- a/src/libsyntax/ext/deriving/cmp/totaleq.rs +++ b/src/libsyntax/ext/deriving/cmp/totaleq.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_totaleq(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -26,22 +28,22 @@ pub fn expand_deriving_totaleq(cx: &mut ExtCtxt, let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "cmp", "TotalEq"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "cmp", "TotalEq")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "equals", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[borrowed_self()], - ret_ty: Literal(Path::new(~["bool"])), + args: vec!(borrowed_self()), + ret_ty: Literal(Path::new(vec!("bool"))), inline: true, const_nonmatching: true, combine_substructure: cs_equals } - ] + ) }; trait_def.expand(cx, mitem, item, push) } diff --git a/src/libsyntax/ext/deriving/cmp/totalord.rs b/src/libsyntax/ext/deriving/cmp/totalord.rs index 2e6c4a54228..89a344bdb7b 100644 --- a/src/libsyntax/ext/deriving/cmp/totalord.rs +++ b/src/libsyntax/ext/deriving/cmp/totalord.rs @@ -14,7 +14,9 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; + use std::cmp::{Ordering, Equal, Less, Greater}; +use std::vec_ng::Vec; pub fn expand_deriving_totalord(cx: &mut ExtCtxt, span: Span, @@ -23,22 +25,22 @@ pub fn expand_deriving_totalord(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "cmp", "TotalOrd"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "cmp", "TotalOrd")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "cmp", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[borrowed_self()], - ret_ty: Literal(Path::new(~["std", "cmp", "Ordering"])), + args: vec!(borrowed_self()), + ret_ty: Literal(Path::new(vec!("std", "cmp", "Ordering"))), inline: true, const_nonmatching: false, combine_substructure: cs_cmp } - ] + ) }; trait_def.expand(cx, mitem, item, push) @@ -52,9 +54,9 @@ pub fn ordering_const(cx: &mut ExtCtxt, span: Span, cnst: Ordering) -> ast::Path Greater => "Greater" }; cx.path_global(span, - ~[cx.ident_of("std"), + vec!(cx.ident_of("std"), cx.ident_of("cmp"), - cx.ident_of(cnst)]) + cx.ident_of(cnst))) } pub fn cs_cmp(cx: &mut ExtCtxt, span: Span, @@ -99,7 +101,7 @@ pub fn cs_cmp(cx: &mut ExtCtxt, span: Span, let if_ = cx.expr_if(span, cond, old, Some(cx.expr_ident(span, test_id))); - cx.expr_block(cx.block(span, ~[assign], Some(if_))) + cx.expr_block(cx.block(span, vec!(assign), Some(if_))) }, cx.expr_path(equals_path.clone()), |cx, span, list, _| { diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index 7aaa66cbfb5..bc6d69c7cca 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -21,6 +21,8 @@ use ext::deriving::generic::*; use parse::token::InternedString; use parse::token; +use std::vec_ng::Vec; + pub fn expand_deriving_decodable(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -28,27 +30,26 @@ pub fn expand_deriving_decodable(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new_(~["serialize", "Decodable"], None, - ~[~Literal(Path::new_local("__D"))], true), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new_(vec!("serialize", "Decodable"), None, + vec!(~Literal(Path::new_local("__D"))), true), + additional_bounds: Vec::new(), generics: LifetimeBounds { - lifetimes: ~[], - bounds: ~[("__D", ~[Path::new(~["serialize", "Decoder"])])], + lifetimes: Vec::new(), + bounds: vec!(("__D", vec!(Path::new(vec!("serialize", "Decoder"))))), }, - methods: ~[ + methods: vec!( MethodDef { name: "decode", generics: LifetimeBounds::empty(), explicit_self: None, - args: ~[Ptr(~Literal(Path::new_local("__D")), - Borrowed(None, MutMutable))], + args: vec!(Ptr(~Literal(Path::new_local("__D")), + Borrowed(None, MutMutable))), ret_ty: Self, inline: false, const_nonmatching: true, combine_substructure: decodable_substructure, - }, - ] + }) }; trait_def.expand(cx, mitem, item, push) @@ -57,13 +58,13 @@ pub fn expand_deriving_decodable(cx: &mut ExtCtxt, fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> @Expr { let decoder = substr.nonself_args[0]; - let recurse = ~[cx.ident_of("serialize"), + let recurse = vec!(cx.ident_of("serialize"), cx.ident_of("Decodable"), - cx.ident_of("decode")]; + cx.ident_of("decode")); // throw an underscore in front to suppress unused variable warnings let blkarg = cx.ident_of("_d"); let blkdecoder = cx.expr_ident(trait_span, blkarg); - let calldecode = cx.expr_call_global(trait_span, recurse, ~[blkdecoder]); + let calldecode = cx.expr_call_global(trait_span, recurse, vec!(blkdecoder)); let lambdadecode = cx.lambda_expr_1(trait_span, calldecode, blkarg); return match *substr.fields { @@ -80,24 +81,24 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, summary, |cx, span, name, field| { cx.expr_method_call(span, blkdecoder, read_struct_field, - ~[cx.expr_str(span, name), + vec!(cx.expr_str(span, name), cx.expr_uint(span, field), - lambdadecode]) + lambdadecode)) }); cx.expr_method_call(trait_span, decoder, cx.ident_of("read_struct"), - ~[ + vec!( cx.expr_str(trait_span, token::get_ident(substr.type_ident)), cx.expr_uint(trait_span, nfields), cx.lambda_expr_1(trait_span, result, blkarg) - ]) + )) } StaticEnum(_, ref fields) => { let variant = cx.ident_of("i"); - let mut arms = ~[]; - let mut variants = ~[]; + let mut arms = Vec::new(); + let mut variants = Vec::new(); let rvariant_arg = cx.ident_of("read_enum_variant_arg"); for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() { @@ -110,29 +111,29 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, |cx, span, _, field| { let idx = cx.expr_uint(span, field); cx.expr_method_call(span, blkdecoder, rvariant_arg, - ~[idx, lambdadecode]) + vec!(idx, lambdadecode)) }); arms.push(cx.arm(v_span, - ~[cx.pat_lit(v_span, cx.expr_uint(v_span, i))], + vec!(cx.pat_lit(v_span, cx.expr_uint(v_span, i))), decoded)); } arms.push(cx.arm_unreachable(trait_span)); let result = cx.expr_match(trait_span, cx.expr_ident(trait_span, variant), arms); - let lambda = cx.lambda_expr(trait_span, ~[blkarg, variant], result); + let lambda = cx.lambda_expr(trait_span, vec!(blkarg, variant), result); let variant_vec = cx.expr_vec(trait_span, variants); let result = cx.expr_method_call(trait_span, blkdecoder, cx.ident_of("read_enum_variant"), - ~[variant_vec, lambda]); + vec!(variant_vec, lambda)); cx.expr_method_call(trait_span, decoder, cx.ident_of("read_enum"), - ~[ + vec!( cx.expr_str(trait_span, token::get_ident(substr.type_ident)), cx.lambda_expr_1(trait_span, result, blkarg) - ]) + )) } _ => cx.bug("expected StaticEnum or StaticStruct in deriving(Decodable)") }; diff --git a/src/libsyntax/ext/deriving/default.rs b/src/libsyntax/ext/deriving/default.rs index c5ef86273b6..8259459f57a 100644 --- a/src/libsyntax/ext/deriving/default.rs +++ b/src/libsyntax/ext/deriving/default.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_default(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -21,34 +23,33 @@ pub fn expand_deriving_default(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "default", "Default"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "default", "Default")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "default", generics: LifetimeBounds::empty(), explicit_self: None, - args: ~[], + args: Vec::new(), ret_ty: Self, inline: true, const_nonmatching: false, combine_substructure: default_substructure - }, - ] + }) }; trait_def.expand(cx, mitem, item, push) } fn default_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> @Expr { - let default_ident = ~[ + let default_ident = vec!( cx.ident_of("std"), cx.ident_of("default"), cx.ident_of("Default"), cx.ident_of("default") - ]; - let default_call = |span| cx.expr_call_global(span, default_ident.clone(), ~[]); + ); + let default_call = |span| cx.expr_call_global(span, default_ident.clone(), Vec::new()); return match *substr.fields { StaticStruct(_, ref summary) => { diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index ae23013b7cc..091ff7b9c90 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -89,6 +89,8 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use parse::token; +use std::vec_ng::Vec; + pub fn expand_deriving_encodable(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -96,27 +98,26 @@ pub fn expand_deriving_encodable(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new_(~["serialize", "Encodable"], None, - ~[~Literal(Path::new_local("__E"))], true), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new_(vec!("serialize", "Encodable"), None, + vec!(~Literal(Path::new_local("__E"))), true), + additional_bounds: Vec::new(), generics: LifetimeBounds { - lifetimes: ~[], - bounds: ~[("__E", ~[Path::new(~["serialize", "Encoder"])])], + lifetimes: Vec::new(), + bounds: vec!(("__E", vec!(Path::new(vec!("serialize", "Encoder"))))), }, - methods: ~[ + methods: vec!( MethodDef { name: "encode", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[Ptr(~Literal(Path::new_local("__E")), - Borrowed(None, MutMutable))], + args: vec!(Ptr(~Literal(Path::new_local("__E")), + Borrowed(None, MutMutable))), ret_ty: nil_ty(), inline: false, const_nonmatching: true, combine_substructure: encodable_substructure, - }, - ] + }) }; trait_def.expand(cx, mitem, item, push) @@ -133,7 +134,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, return match *substr.fields { Struct(ref fields) => { let emit_struct_field = cx.ident_of("emit_struct_field"); - let mut stmts = ~[]; + let mut stmts = Vec::new(); for (i, &FieldInfo { name, self_, @@ -146,13 +147,13 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, token::intern_and_get_ident(format!("_field{}", i)) } }; - let enc = cx.expr_method_call(span, self_, encode, ~[blkencoder]); + let enc = cx.expr_method_call(span, self_, encode, vec!(blkencoder)); let lambda = cx.lambda_expr_1(span, enc, blkarg); let call = cx.expr_method_call(span, blkencoder, emit_struct_field, - ~[cx.expr_str(span, name), + vec!(cx.expr_str(span, name), cx.expr_uint(span, i), - lambda]); + lambda)); stmts.push(cx.stmt_expr(call)); } @@ -160,11 +161,11 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, cx.expr_method_call(trait_span, encoder, cx.ident_of("emit_struct"), - ~[ + vec!( cx.expr_str(trait_span, token::get_ident(substr.type_ident)), cx.expr_uint(trait_span, fields.len()), blk - ]) + )) } EnumMatching(idx, variant, ref fields) => { @@ -175,14 +176,14 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let me = cx.stmt_let(trait_span, false, blkarg, encoder); let encoder = cx.expr_ident(trait_span, blkarg); let emit_variant_arg = cx.ident_of("emit_enum_variant_arg"); - let mut stmts = ~[]; + let mut stmts = Vec::new(); for (i, &FieldInfo { self_, span, .. }) in fields.iter().enumerate() { - let enc = cx.expr_method_call(span, self_, encode, ~[blkencoder]); + let enc = cx.expr_method_call(span, self_, encode, vec!(blkencoder)); let lambda = cx.lambda_expr_1(span, enc, blkarg); let call = cx.expr_method_call(span, blkencoder, emit_variant_arg, - ~[cx.expr_uint(span, i), - lambda]); + vec!(cx.expr_uint(span, i), + lambda)); stmts.push(cx.stmt_expr(call)); } @@ -190,19 +191,19 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let name = cx.expr_str(trait_span, token::get_ident(variant.node.name)); let call = cx.expr_method_call(trait_span, blkencoder, cx.ident_of("emit_enum_variant"), - ~[name, + vec!(name, cx.expr_uint(trait_span, idx), cx.expr_uint(trait_span, fields.len()), - blk]); + blk)); let blk = cx.lambda_expr_1(trait_span, call, blkarg); let ret = cx.expr_method_call(trait_span, encoder, cx.ident_of("emit_enum"), - ~[ + vec!( cx.expr_str(trait_span, token::get_ident(substr.type_ident)), blk - ]); - cx.expr_block(cx.block(trait_span, ~[me], Some(ret))) + )); + cx.expr_block(cx.block(trait_span, vec!(me), Some(ret))) } _ => cx.bug("expected Struct or EnumMatching in deriving(Encodable)") diff --git a/src/libsyntax/ext/deriving/generic.rs b/src/libsyntax/ext/deriving/generic.rs index 24d4efb1b0e..1dc474551cf 100644 --- a/src/libsyntax/ext/deriving/generic.rs +++ b/src/libsyntax/ext/deriving/generic.rs @@ -188,7 +188,8 @@ use opt_vec; use parse::token::InternedString; use parse::token; -use std::vec; +use std::vec_ng::Vec; +use std::vec_ng; pub use self::ty::*; mod ty; @@ -197,20 +198,19 @@ pub struct TraitDef<'a> { /// The span for the current #[deriving(Foo)] header. span: Span, - attributes: ~[ast::Attribute], + attributes: Vec<ast::Attribute> , /// Path of the trait, including any type parameters path: Path<'a>, /// Additional bounds required of any type parameters of the type, /// other than the current trait - additional_bounds: ~[Ty<'a>], + additional_bounds: Vec<Ty<'a>> , /// Any extra lifetimes and/or bounds, e.g. `D: serialize::Decoder` generics: LifetimeBounds<'a>, - methods: ~[MethodDef<'a>] -} + methods: Vec<MethodDef<'a>> } pub struct MethodDef<'a> { @@ -225,7 +225,7 @@ pub struct MethodDef<'a> { explicit_self: Option<Option<PtrTy<'a>>>, /// Arguments other than the self argument - args: ~[Ty<'a>], + args: Vec<Ty<'a>> , /// Return type ret_ty: Ty<'a>, @@ -264,39 +264,38 @@ pub struct FieldInfo { self_: @Expr, /// The expressions corresponding to references to this field in /// the other Self arguments. - other: ~[@Expr] -} + other: Vec<@Expr> } /// Fields for a static method pub enum StaticFields { /// Tuple structs/enum variants like this - Unnamed(~[Span]), + Unnamed(Vec<Span> ), /// Normal structs/struct variants. - Named(~[(Ident, Span)]) + Named(Vec<(Ident, Span)> ) } /// A summary of the possible sets of fields. See above for details /// and examples pub enum SubstructureFields<'a> { - Struct(~[FieldInfo]), + Struct(Vec<FieldInfo> ), /** Matching variants of the enum: variant index, ast::Variant, fields: the field name is only non-`None` in the case of a struct variant. */ - EnumMatching(uint, &'a ast::Variant, ~[FieldInfo]), + EnumMatching(uint, &'a ast::Variant, Vec<FieldInfo> ), /** non-matching variants of the enum, [(variant index, ast::Variant, [field span, field ident, fields])] (i.e. all fields for self are in the first tuple, for other1 are in the second tuple, etc.) */ - EnumNonMatching(&'a [(uint, P<ast::Variant>, ~[(Span, Option<Ident>, @Expr)])]), + EnumNonMatching(&'a [(uint, P<ast::Variant>, Vec<(Span, Option<Ident>, @Expr)> )]), /// A static method where Self is a struct. StaticStruct(&'a ast::StructDef, StaticFields), /// A static method where Self is an enum. - StaticEnum(&'a ast::EnumDef, ~[(Ident, Span, StaticFields)]) + StaticEnum(&'a ast::EnumDef, Vec<(Ident, Span, StaticFields)> ) } @@ -316,7 +315,7 @@ representing each variant: (variant index, ast::Variant instance, pub type EnumNonMatchFunc<'a> = 'a |&mut ExtCtxt, Span, - &[(uint, P<ast::Variant>, ~[(Span, Option<Ident>, @Expr)])], + &[(uint, P<ast::Variant>, Vec<(Span, Option<Ident>, @Expr)> )], &[@Expr]| -> @Expr; @@ -360,7 +359,7 @@ impl<'a> TraitDef<'a> { cx: &mut ExtCtxt, type_ident: Ident, generics: &Generics, - methods: ~[@ast::Method]) -> @ast::Item { + methods: Vec<@ast::Method> ) -> @ast::Item { let trait_path = self.path.to_path(cx, self.span, type_ident, generics); let mut trait_generics = self.generics.to_generics(cx, self.span, @@ -397,7 +396,7 @@ impl<'a> TraitDef<'a> { // Create the type of `self`. let self_type = cx.ty_path( - cx.path_all(self.span, false, ~[ type_ident ], self_lifetimes, + cx.path_all(self.span, false, vec!( type_ident ), self_lifetimes, opt_vec::take_vec(self_ty_params)), None); let doc_attr = cx.attribute( @@ -412,7 +411,7 @@ impl<'a> TraitDef<'a> { cx.item( self.span, ident, - vec::append(~[doc_attr], self.attributes), + vec_ng::append(vec!(doc_attr), self.attributes.as_slice()), ast::ItemImpl(trait_generics, opt_trait_ref, self_type, methods.map(|x| *x))) } @@ -433,13 +432,15 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - self_args, nonself_args) + self_args.as_slice(), + nonself_args.as_slice()) } else { method_def.expand_struct_method_body(cx, self, struct_def, type_ident, - self_args, nonself_args) + self_args.as_slice(), + nonself_args.as_slice()) }; method_def.create_method(cx, self, @@ -467,13 +468,15 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - self_args, nonself_args) + self_args.as_slice(), + nonself_args.as_slice()) } else { method_def.expand_enum_method_body(cx, self, enum_def, type_ident, - self_args, nonself_args) + self_args.as_slice(), + nonself_args.as_slice()) }; method_def.create_method(cx, self, @@ -524,11 +527,11 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef, type_ident: Ident, generics: &Generics) - -> (ast::ExplicitSelf, ~[@Expr], ~[@Expr], ~[(Ident, P<ast::Ty>)]) { + -> (ast::ExplicitSelf, Vec<@Expr> , Vec<@Expr> , Vec<(Ident, P<ast::Ty>)> ) { - let mut self_args = ~[]; - let mut nonself_args = ~[]; - let mut arg_tys = ~[]; + let mut self_args = Vec::new(); + let mut nonself_args = Vec::new(); + let mut arg_tys = Vec::new(); let mut nonstatic = false; let ast_explicit_self = match self.explicit_self { @@ -575,7 +578,7 @@ impl<'a> MethodDef<'a> { type_ident: Ident, generics: &Generics, explicit_self: ast::ExplicitSelf, - arg_types: ~[(Ident, P<ast::Ty>)], + arg_types: Vec<(Ident, P<ast::Ty>)> , body: @Expr) -> @ast::Method { // create the generics that aren't for Self let fn_generics = self.generics.to_generics(cx, trait_.span, type_ident, generics); @@ -598,16 +601,16 @@ impl<'a> MethodDef<'a> { let body_block = cx.block_expr(body); let attrs = if self.inline { - ~[ + vec!( cx .attribute(trait_.span, cx .meta_word(trait_.span, InternedString::new( "inline"))) - ] + ) } else { - ~[] + Vec::new() }; // Create the method. @@ -655,9 +658,9 @@ impl<'a> MethodDef<'a> { nonself_args: &[@Expr]) -> @Expr { - let mut raw_fields = ~[]; // ~[[fields of self], + let mut raw_fields = Vec::new(); // ~[[fields of self], // [fields of next Self arg], [etc]] - let mut patterns = ~[]; + let mut patterns = Vec::new(); for i in range(0u, self_args.len()) { let (pat, ident_expr) = trait_.create_struct_pattern(cx, type_ident, struct_def, format!("__self_{}", i), @@ -668,14 +671,15 @@ impl<'a> MethodDef<'a> { // transpose raw_fields let fields = if raw_fields.len() > 0 { - raw_fields[0].iter() - .enumerate() - .map(|(i, &(span, opt_id, field))| { - let other_fields = raw_fields.tail().map(|l| { - match &l[i] { + raw_fields.get(0) + .iter() + .enumerate() + .map(|(i, &(span, opt_id, field))| { + let other_fields = raw_fields.tail().iter().map(|l| { + match l.get(i) { &(_, _, ex) => ex } - }); + }).collect(); FieldInfo { span: span, name: opt_id, @@ -703,7 +707,7 @@ impl<'a> MethodDef<'a> { // matter. for (&arg_expr, &pat) in self_args.iter().zip(patterns.iter()) { body = cx.expr_match(trait_.span, arg_expr, - ~[ cx.arm(trait_.span, ~[pat], body) ]) + vec!( cx.arm(trait_.span, vec!(pat), body) )) } body } @@ -759,7 +763,7 @@ impl<'a> MethodDef<'a> { self_args: &[@Expr], nonself_args: &[@Expr]) -> @Expr { - let mut matches = ~[]; + let mut matches = Vec::new(); self.build_enum_match(cx, trait_, enum_def, type_ident, self_args, nonself_args, None, &mut matches, 0) @@ -795,8 +799,8 @@ impl<'a> MethodDef<'a> { self_args: &[@Expr], nonself_args: &[@Expr], matching: Option<uint>, - matches_so_far: &mut ~[(uint, P<ast::Variant>, - ~[(Span, Option<Ident>, @Expr)])], + matches_so_far: &mut Vec<(uint, P<ast::Variant>, + Vec<(Span, Option<Ident>, @Expr)> )> , match_count: uint) -> @Expr { if match_count == self_args.len() { // we've matched against all arguments, so make the final @@ -822,17 +826,17 @@ impl<'a> MethodDef<'a> { Some(variant_index) => { // `ref` inside let matches is buggy. Causes havoc wih rusc. // let (variant_index, ref self_vec) = matches_so_far[0]; - let (variant, self_vec) = match matches_so_far[0] { - (_, v, ref s) => (v, s) + let (variant, self_vec) = match matches_so_far.get(0) { + &(_, v, ref s) => (v, s) }; - let mut enum_matching_fields = vec::from_elem(self_vec.len(), ~[]); + let mut enum_matching_fields = Vec::from_elem(self_vec.len(), Vec::new()); for triple in matches_so_far.tail().iter() { match triple { &(_, _, ref other_fields) => { for (i, &(_, _, e)) in other_fields.iter().enumerate() { - enum_matching_fields[i].push(e); + enum_matching_fields.get_mut(i).push(e); } } } @@ -851,7 +855,7 @@ impl<'a> MethodDef<'a> { substructure = EnumMatching(variant_index, variant, field_tuples); } None => { - substructure = EnumNonMatching(*matches_so_far); + substructure = EnumNonMatching(matches_so_far.as_slice()); } } self.call_substructure_method(cx, trait_, type_ident, @@ -865,7 +869,7 @@ impl<'a> MethodDef<'a> { format!("__arg_{}", match_count) }; - let mut arms = ~[]; + let mut arms = Vec::new(); // the code for nonmatching variants only matters when // we've seen at least one other variant already @@ -879,7 +883,7 @@ impl<'a> MethodDef<'a> { }; // matching-variant match - let variant = enum_def.variants[index]; + let variant = *enum_def.variants.get(index); let (pattern, idents) = trait_.create_enum_variant_pattern(cx, variant, current_match_str, @@ -895,7 +899,7 @@ impl<'a> MethodDef<'a> { matches_so_far, match_count + 1); matches_so_far.pop().unwrap(); - arms.push(cx.arm(trait_.span, ~[ pattern ], arm_expr)); + arms.push(cx.arm(trait_.span, vec!( pattern ), arm_expr)); if enum_def.variants.len() > 1 { let e = &EnumNonMatching(&[]); @@ -904,7 +908,7 @@ impl<'a> MethodDef<'a> { e); let wild_arm = cx.arm( trait_.span, - ~[ cx.pat_wild(trait_.span) ], + vec!( cx.pat_wild(trait_.span) ), wild_expr); arms.push(wild_arm); } @@ -933,7 +937,7 @@ impl<'a> MethodDef<'a> { match_count + 1); matches_so_far.pop().unwrap(); - let arm = cx.arm(trait_.span, ~[ pattern ], arm_expr); + let arm = cx.arm(trait_.span, vec!( pattern ), arm_expr); arms.push(arm); } } @@ -997,8 +1001,8 @@ impl<'a> TraitDef<'a> { fn summarise_struct(&self, cx: &mut ExtCtxt, struct_def: &StructDef) -> StaticFields { - let mut named_idents = ~[]; - let mut just_spans = ~[]; + let mut named_idents = Vec::new(); + let mut just_spans = Vec::new(); for field in struct_def.fields.iter(){ let sp = self.set_expn_info(cx, field.span); match field.node.kind { @@ -1020,9 +1024,9 @@ impl<'a> TraitDef<'a> { fn create_subpatterns(&self, cx: &mut ExtCtxt, - field_paths: ~[ast::Path], + field_paths: Vec<ast::Path> , mutbl: ast::Mutability) - -> ~[@ast::Pat] { + -> Vec<@ast::Pat> { field_paths.map(|path| { cx.pat(path.span, ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None)) @@ -1035,18 +1039,18 @@ impl<'a> TraitDef<'a> { struct_def: &StructDef, prefix: &str, mutbl: ast::Mutability) - -> (@ast::Pat, ~[(Span, Option<Ident>, @Expr)]) { + -> (@ast::Pat, Vec<(Span, Option<Ident>, @Expr)> ) { if struct_def.fields.is_empty() { return ( cx.pat_ident_binding_mode( self.span, struct_ident, ast::BindByValue(ast::MutImmutable)), - ~[]); + Vec::new()); } - let matching_path = cx.path(self.span, ~[ struct_ident ]); + let matching_path = cx.path(self.span, vec!( struct_ident )); - let mut paths = ~[]; - let mut ident_expr = ~[]; + let mut paths = Vec::new(); + let mut ident_expr = Vec::new(); let mut struct_type = Unknown; for (i, struct_field) in struct_def.fields.iter().enumerate() { @@ -1096,20 +1100,20 @@ impl<'a> TraitDef<'a> { variant: &ast::Variant, prefix: &str, mutbl: ast::Mutability) - -> (@ast::Pat, ~[(Span, Option<Ident>, @Expr)]) { + -> (@ast::Pat, Vec<(Span, Option<Ident>, @Expr)> ) { let variant_ident = variant.node.name; match variant.node.kind { ast::TupleVariantKind(ref variant_args) => { if variant_args.is_empty() { return (cx.pat_ident_binding_mode(variant.span, variant_ident, ast::BindByValue(ast::MutImmutable)), - ~[]); + Vec::new()); } let matching_path = cx.path_ident(variant.span, variant_ident); - let mut paths = ~[]; - let mut ident_expr = ~[]; + let mut paths = Vec::new(); + let mut ident_expr = Vec::new(); for (i, va) in variant_args.iter().enumerate() { let sp = self.set_expn_info(cx, va.ty.span); let path = cx.path_ident(sp, cx.ident_of(format!("{}_{}", prefix, i))); @@ -1151,11 +1155,19 @@ pub fn cs_fold(use_foldl: bool, EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { if use_foldl { all_fields.iter().fold(base, |old, field| { - f(cx, field.span, old, field.self_, field.other) + f(cx, + field.span, + old, + field.self_, + field.other.as_slice()) }) } else { all_fields.rev_iter().fold(base, |old, field| { - f(cx, field.span, old, field.self_, field.other) + f(cx, + field.span, + old, + field.self_, + field.other.as_slice()) }) } }, @@ -1179,7 +1191,7 @@ f(cx, span, ~[self_1.method(__arg_1_1, __arg_2_1), ~~~ */ #[inline] -pub fn cs_same_method(f: |&mut ExtCtxt, Span, ~[@Expr]| -> @Expr, +pub fn cs_same_method(f: |&mut ExtCtxt, Span, Vec<@Expr> | -> @Expr, enum_nonmatch_f: EnumNonMatchFunc, cx: &mut ExtCtxt, trait_span: Span, diff --git a/src/libsyntax/ext/deriving/hash.rs b/src/libsyntax/ext/deriving/hash.rs index acae4f9efa6..1d6cfab120d 100644 --- a/src/libsyntax/ext/deriving/hash.rs +++ b/src/libsyntax/ext/deriving/hash.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_hash(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -22,23 +24,23 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt, let hash_trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "hash", "Hash"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "hash", "Hash")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "hash", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[Ptr(~Literal(Path::new(~["std", "hash", "sip", "SipState"])), - Borrowed(None, MutMutable))], + args: vec!(Ptr(~Literal(Path::new(vec!("std", "hash", "sip", "SipState"))), + Borrowed(None, MutMutable))), ret_ty: nil_ty(), inline: true, const_nonmatching: false, combine_substructure: hash_substructure } - ] + ) }; hash_trait_def.expand(cx, mitem, item, push); @@ -51,10 +53,10 @@ fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) }; let hash_ident = substr.method_ident; let call_hash = |span, thing_expr| { - let expr = cx.expr_method_call(span, thing_expr, hash_ident, ~[state_expr]); + let expr = cx.expr_method_call(span, thing_expr, hash_ident, vec!(state_expr)); cx.stmt_expr(expr) }; - let mut stmts = ~[]; + let mut stmts = Vec::new(); let fields = match *substr.fields { Struct(ref fs) => fs, diff --git a/src/libsyntax/ext/deriving/primitive.rs b/src/libsyntax/ext/deriving/primitive.rs index 03192cc1cd2..ecd042eb172 100644 --- a/src/libsyntax/ext/deriving/primitive.rs +++ b/src/libsyntax/ext/deriving/primitive.rs @@ -16,6 +16,8 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use parse::token::InternedString; +use std::vec_ng::Vec; + pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -23,21 +25,20 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "num", "FromPrimitive"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "num", "FromPrimitive")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "from_i64", generics: LifetimeBounds::empty(), explicit_self: None, - args: ~[ - Literal(Path::new(~["i64"])), - ], - ret_ty: Literal(Path::new_(~["std", "option", "Option"], + args: vec!( + Literal(Path::new(vec!("i64")))), + ret_ty: Literal(Path::new_(vec!("std", "option", "Option"), None, - ~[~Self], + vec!(~Self), true)), // liable to cause code-bloat inline: true, @@ -48,19 +49,17 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, name: "from_u64", generics: LifetimeBounds::empty(), explicit_self: None, - args: ~[ - Literal(Path::new(~["u64"])), - ], - ret_ty: Literal(Path::new_(~["std", "option", "Option"], + args: vec!( + Literal(Path::new(vec!("u64")))), + ret_ty: Literal(Path::new_(vec!("std", "option", "Option"), None, - ~[~Self], + vec!(~Self), true)), // liable to cause code-bloat inline: true, const_nonmatching: false, combine_substructure: |c, s, sub| cs_from("u64", c, s, sub), - }, - ] + }) }; trait_def.expand(cx, mitem, item, push) @@ -84,7 +83,7 @@ fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure return cx.expr_fail(trait_span, InternedString::new("")); } - let mut arms = ~[]; + let mut arms = Vec::new(); for variant in enum_def.variants.iter() { match variant.node.kind { @@ -109,7 +108,7 @@ fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure // arm for `_ if $guard => $body` let arm = ast::Arm { - pats: ~[cx.pat_wild(span)], + pats: vec!(cx.pat_wild(span)), guard: Some(guard), body: cx.block_expr(body), }; @@ -128,7 +127,7 @@ fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure // arm for `_ => None` let arm = ast::Arm { - pats: ~[cx.pat_wild(trait_span)], + pats: vec!(cx.pat_wild(trait_span)), guard: None, body: cx.block_expr(cx.expr_none(trait_span)), }; diff --git a/src/libsyntax/ext/deriving/rand.rs b/src/libsyntax/ext/deriving/rand.rs index 6efe4801592..da9679eb655 100644 --- a/src/libsyntax/ext/deriving/rand.rs +++ b/src/libsyntax/ext/deriving/rand.rs @@ -16,6 +16,8 @@ use ext::build::{AstBuilder}; use ext::deriving::generic::*; use opt_vec; +use std::vec_ng::Vec; + pub fn expand_deriving_rand(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -23,48 +25,48 @@ pub fn expand_deriving_rand(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "rand", "Rand"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "rand", "Rand")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "rand", generics: LifetimeBounds { - lifetimes: ~[], - bounds: ~[("R", - ~[ Path::new(~["std", "rand", "Rng"]) ])] + lifetimes: Vec::new(), + bounds: vec!(("R", + vec!( Path::new(vec!("std", "rand", "Rng")) ))) }, explicit_self: None, - args: ~[ + args: vec!( Ptr(~Literal(Path::new_local("R")), Borrowed(None, ast::MutMutable)) - ], + ), ret_ty: Self, inline: false, const_nonmatching: false, combine_substructure: rand_substructure } - ] + ) }; trait_def.expand(cx, mitem, item, push) } fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> @Expr { let rng = match substr.nonself_args { - [rng] => ~[ rng ], + [rng] => vec!( rng ), _ => cx.bug("Incorrect number of arguments to `rand` in `deriving(Rand)`") }; - let rand_ident = ~[ + let rand_ident = vec!( cx.ident_of("std"), cx.ident_of("rand"), cx.ident_of("Rand"), cx.ident_of("rand") - ]; + ); let rand_call = |cx: &mut ExtCtxt, span| { cx.expr_call_global(span, rand_ident.clone(), - ~[ rng[0] ]) + vec!( *rng.get(0) )) }; return match *substr.fields { @@ -84,13 +86,13 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) true, rand_ident.clone(), opt_vec::Empty, - ~[]); + Vec::new()); let rand_name = cx.expr_path(rand_name); // ::std::rand::Rand::rand(rng) let rv_call = cx.expr_call(trait_span, rand_name, - ~[ rng[0] ]); + vec!( *rng.get(0) )); // need to specify the uint-ness of the random number let uint_ty = cx.ty_ident(trait_span, cx.ident_of("uint")); @@ -113,15 +115,15 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) let pat = cx.pat_lit(v_span, i_expr); let thing = rand_thing(cx, v_span, ident, summary, |cx, sp| rand_call(cx, sp)); - cx.arm(v_span, ~[ pat ], thing) - }).collect::<~[ast::Arm]>(); + cx.arm(v_span, vec!( pat ), thing) + }).collect::<Vec<ast::Arm> >(); // _ => {} at the end. Should never occur arms.push(cx.arm_unreachable(trait_span)); let match_expr = cx.expr_match(trait_span, rand_variant, arms); - let block = cx.block(trait_span, ~[ let_statement ], Some(match_expr)); + let block = cx.block(trait_span, vec!( let_statement ), Some(match_expr)); cx.expr_block(block) } _ => cx.bug("Non-static method in `deriving(Rand)`") diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index 4b9925c8d9f..51399d8efab 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -19,6 +19,7 @@ use ext::deriving::generic::*; use parse::token; use collections::HashMap; +use std::vec_ng::Vec; pub fn expand_deriving_show(cx: &mut ExtCtxt, span: Span, @@ -26,27 +27,27 @@ pub fn expand_deriving_show(cx: &mut ExtCtxt, item: @Item, push: |@Item|) { // &mut ::std::fmt::Formatter - let fmtr = Ptr(~Literal(Path::new(~["std", "fmt", "Formatter"])), + let fmtr = Ptr(~Literal(Path::new(vec!("std", "fmt", "Formatter"))), Borrowed(None, ast::MutMutable)); let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "fmt", "Show"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "fmt", "Show")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "fmt", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[fmtr], - ret_ty: Literal(Path::new(~["std", "fmt", "Result"])), + args: vec!(fmtr), + ret_ty: Literal(Path::new(vec!("std", "fmt", "Result"))), inline: false, const_nonmatching: false, combine_substructure: show_substructure } - ] + ) }; trait_def.expand(cx, mitem, item, push) } @@ -70,7 +71,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let mut format_string = token::get_ident(name).get().to_owned(); // the internal fields we're actually formatting - let mut exprs = ~[]; + let mut exprs = Vec::new(); // Getting harder... making the format string: match *substr.fields { @@ -79,7 +80,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, EnumMatching(_, _, ref fields) if fields.len() == 0 => {} Struct(ref fields) | EnumMatching(_, _, ref fields) => { - if fields[0].name.is_none() { + if fields.get(0).name.is_none() { // tuple struct/"normal" variant format_string.push_str("("); @@ -124,10 +125,10 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let formatter = substr.nonself_args[0]; let buf = cx.expr_field_access(span, formatter, cx.ident_of("buf")); - let std_write = ~[cx.ident_of("std"), cx.ident_of("fmt"), cx.ident_of("write")]; + let std_write = vec!(cx.ident_of("std"), cx.ident_of("fmt"), cx.ident_of("write")); let args = cx.ident_of("__args"); - let write_call = cx.expr_call_global(span, std_write, ~[buf, cx.expr_ident(span, args)]); - let format_closure = cx.lambda_expr(span, ~[args], write_call); + let write_call = cx.expr_call_global(span, std_write, vec!(buf, cx.expr_ident(span, args))); + let format_closure = cx.lambda_expr(span, vec!(args), write_call); let s = token::intern_and_get_ident(format_string); let format_string = cx.expr_str(span, s); @@ -135,6 +136,6 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, // phew, not our responsibility any more! format::expand_preparsed_format_args(cx, span, format_closure, - format_string, exprs, ~[], + format_string, exprs, Vec::new(), HashMap::new()) } diff --git a/src/libsyntax/ext/deriving/ty.rs b/src/libsyntax/ext/deriving/ty.rs index 1d3dd9185ca..b88cd117911 100644 --- a/src/libsyntax/ext/deriving/ty.rs +++ b/src/libsyntax/ext/deriving/ty.rs @@ -21,6 +21,8 @@ use codemap::{Span,respan}; use opt_vec; use opt_vec::OptVec; +use std::vec_ng::Vec; + /// The types of pointers pub enum PtrTy<'a> { Send, // ~ @@ -30,22 +32,22 @@ pub enum PtrTy<'a> { /// A path, e.g. `::std::option::Option::<int>` (global). Has support /// for type parameters and a lifetime. pub struct Path<'a> { - path: ~[&'a str], + path: Vec<&'a str> , lifetime: Option<&'a str>, - params: ~[~Ty<'a>], + params: Vec<~Ty<'a>> , global: bool } impl<'a> Path<'a> { - pub fn new<'r>(path: ~[&'r str]) -> Path<'r> { - Path::new_(path, None, ~[], true) + pub fn new<'r>(path: Vec<&'r str> ) -> Path<'r> { + Path::new_(path, None, Vec::new(), true) } pub fn new_local<'r>(path: &'r str) -> Path<'r> { - Path::new_(~[ path ], None, ~[], false) + Path::new_(vec!( path ), None, Vec::new(), false) } - pub fn new_<'r>(path: ~[&'r str], + pub fn new_<'r>(path: Vec<&'r str> , lifetime: Option<&'r str>, - params: ~[~Ty<'r>], + params: Vec<~Ty<'r>> , global: bool) -> Path<'r> { Path { @@ -87,7 +89,7 @@ pub enum Ty<'a> { // parameter, and things like `int` Literal(Path<'a>), // includes nil - Tuple(~[Ty<'a>]) + Tuple(Vec<Ty<'a>> ) } pub fn borrowed_ptrty<'r>() -> PtrTy<'r> { @@ -106,7 +108,7 @@ pub fn borrowed_self<'r>() -> Ty<'r> { } pub fn nil_ty() -> Ty<'static> { - Tuple(~[]) + Tuple(Vec::new()) } fn mk_lifetime(cx: &ExtCtxt, span: Span, lt: &Option<&str>) -> Option<ast::Lifetime> { @@ -172,7 +174,7 @@ impl<'a> Ty<'a> { }); let lifetimes = self_generics.lifetimes.clone(); - cx.path_all(span, false, ~[self_ty], lifetimes, + cx.path_all(span, false, vec!(self_ty), lifetimes, opt_vec::take_vec(self_params)) } Literal(ref p) => { @@ -188,14 +190,14 @@ impl<'a> Ty<'a> { fn mk_ty_param(cx: &ExtCtxt, span: Span, name: &str, bounds: &[Path], self_ident: Ident, self_generics: &Generics) -> ast::TyParam { let bounds = opt_vec::from( - bounds.map(|b| { + bounds.iter().map(|b| { let path = b.to_path(cx, span, self_ident, self_generics); cx.typarambound(path) - })); + }).collect()); cx.typaram(cx.ident_of(name), bounds, None) } -fn mk_generics(lifetimes: ~[ast::Lifetime], ty_params: ~[ast::TyParam]) -> Generics { +fn mk_generics(lifetimes: Vec<ast::Lifetime> , ty_params: Vec<ast::TyParam> ) -> Generics { Generics { lifetimes: opt_vec::from(lifetimes), ty_params: opt_vec::from(ty_params) @@ -204,14 +206,14 @@ fn mk_generics(lifetimes: ~[ast::Lifetime], ty_params: ~[ast::TyParam]) -> Gene /// Lifetimes and bounds on type parameters pub struct LifetimeBounds<'a> { - lifetimes: ~[&'a str], - bounds: ~[(&'a str, ~[Path<'a>])] + lifetimes: Vec<&'a str>, + bounds: Vec<(&'a str, Vec<Path<'a>>)>, } impl<'a> LifetimeBounds<'a> { pub fn empty() -> LifetimeBounds<'static> { LifetimeBounds { - lifetimes: ~[], bounds: ~[] + lifetimes: Vec::new(), bounds: Vec::new() } } pub fn to_generics(&self, @@ -226,7 +228,12 @@ impl<'a> LifetimeBounds<'a> { let ty_params = self.bounds.map(|t| { match t { &(ref name, ref bounds) => { - mk_ty_param(cx, span, *name, *bounds, self_ty, self_generics) + mk_ty_param(cx, + span, + *name, + bounds.as_slice(), + self_ty, + self_generics) } } }); diff --git a/src/libsyntax/ext/deriving/zero.rs b/src/libsyntax/ext/deriving/zero.rs index 90f4fa0eb58..98c0ec9d072 100644 --- a/src/libsyntax/ext/deriving/zero.rs +++ b/src/libsyntax/ext/deriving/zero.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_zero(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -21,16 +23,16 @@ pub fn expand_deriving_zero(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "num", "Zero"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "num", "Zero")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "zero", generics: LifetimeBounds::empty(), explicit_self: None, - args: ~[], + args: Vec::new(), ret_ty: Self, inline: true, const_nonmatching: false, @@ -40,8 +42,8 @@ pub fn expand_deriving_zero(cx: &mut ExtCtxt, name: "is_zero", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[], - ret_ty: Literal(Path::new(~["bool"])), + args: Vec::new(), + ret_ty: Literal(Path::new(vec!("bool"))), inline: true, const_nonmatching: false, combine_substructure: |cx, span, substr| { @@ -52,19 +54,19 @@ pub fn expand_deriving_zero(cx: &mut ExtCtxt, cx, span, substr) } } - ] + ) }; trait_def.expand(cx, mitem, item, push) } fn zero_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> @Expr { - let zero_ident = ~[ + let zero_ident = vec!( cx.ident_of("std"), cx.ident_of("num"), cx.ident_of("Zero"), cx.ident_of("zero") - ]; - let zero_call = |span| cx.expr_call_global(span, zero_ident.clone(), ~[]); + ); + let zero_call = |span| cx.expr_call_global(span, zero_ident.clone(), Vec::new()); return match *substr.fields { StaticStruct(_, ref summary) => { diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index aacb2a74087..b0b5fa26015 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -19,6 +19,7 @@ use codemap::Span; use ext::base::*; use ext::base; use ext::build::AstBuilder; +use opt_vec; use parse::token; use std::os; @@ -31,8 +32,30 @@ pub fn expand_option_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) }; let e = match os::getenv(var) { - None => quote_expr!(cx, ::std::option::None::<&'static str>), - Some(s) => quote_expr!(cx, ::std::option::Some($s)) + None => { + cx.expr_path(cx.path_all(sp, + true, + vec!(cx.ident_of("std"), + cx.ident_of("option"), + cx.ident_of("None")), + opt_vec::Empty, + vec!(cx.ty_rptr(sp, + cx.ty_ident(sp, + cx.ident_of("str")), + Some(cx.lifetime(sp, + cx.ident_of( + "static").name)), + ast::MutImmutable)))) + } + Some(s) => { + cx.expr_call_global(sp, + vec!(cx.ident_of("std"), + cx.ident_of("option"), + cx.ident_of("Some")), + vec!(cx.expr_str(sp, + token::intern_and_get_ident( + s)))) + } }; MRExpr(e) } @@ -48,7 +71,9 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some(exprs) => exprs }; - let var = match expr_to_str(cx, exprs[0], "expected string literal") { + let var = match expr_to_str(cx, + *exprs.get(0), + "expected string literal") { None => return MacResult::dummy_expr(sp), Some((v, _style)) => v }; @@ -59,7 +84,7 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) var)) } 2 => { - match expr_to_str(cx, exprs[1], "expected string literal") { + match expr_to_str(cx, *exprs.get(1), "expected string literal") { None => return MacResult::dummy_expr(sp), Some((s, _style)) => s } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index b49f9fb3a38..b162e17f53d 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -31,6 +31,7 @@ use util::small_vector::SmallVector; use std::cast; use std::unstable::dynamic_lib::DynamicLibrary; use std::os; +use std::vec_ng::Vec; pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { match e.node { @@ -53,7 +54,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { // let compilation continue return MacResult::raw_dummy_expr(e.span); } - let extname = pth.segments[0].identifier; + let extname = pth.segments.get(0).identifier; let extnamestr = token::get_ident(extname); // leaving explicit deref here to highlight unbox op: let marked_after = match fld.extsbox.find(&extname.name) { @@ -77,7 +78,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { }); let fm = fresh_mark(); // mark before: - let marked_before = mark_tts(*tts,fm); + let marked_before = mark_tts(tts.as_slice(), fm); // The span that we pass to the expanders we want to // be the root of the call stack. That's the most @@ -87,7 +88,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { let expanded = match expandfun.expand(fld.cx, mac_span.call_site, - marked_before) { + marked_before.as_slice()) { MRExpr(e) => e, MRAny(any_macro) => any_macro.make_expr(), _ => { @@ -169,21 +170,24 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { let none_arm = { let break_expr = fld.cx.expr(span, ast::ExprBreak(opt_ident)); let none_pat = fld.cx.pat_ident(span, none_ident); - fld.cx.arm(span, ~[none_pat], break_expr) + fld.cx.arm(span, vec!(none_pat), break_expr) }; // `Some(<src_pat>) => <src_loop_block>` let some_arm = fld.cx.arm(span, - ~[fld.cx.pat_enum(span, some_path, ~[src_pat])], + vec!(fld.cx.pat_enum(span, some_path, vec!(src_pat))), fld.cx.expr_block(src_loop_block)); // `match i.next() { ... }` let match_expr = { let next_call_expr = - fld.cx.expr_method_call(span, fld.cx.expr_path(local_path), next_ident, ~[]); + fld.cx.expr_method_call(span, + fld.cx.expr_path(local_path), + next_ident, + Vec::new()); - fld.cx.expr_match(span, next_call_expr, ~[none_arm, some_arm]) + fld.cx.expr_match(span, next_call_expr, vec!(none_arm, some_arm)) }; // ['ident:] loop { ... } @@ -196,8 +200,8 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { // `match &mut <src_expr> { i => loop { ... } }` let discrim = fld.cx.expr_mut_addr_of(span, src_expr); let i_pattern = fld.cx.pat_ident(span, local_ident); - let arm = fld.cx.arm(span, ~[i_pattern], loop_expr); - fld.cx.expr_match(span, discrim, ~[arm]) + let arm = fld.cx.arm(span, vec!(i_pattern), loop_expr); + fld.cx.expr_match(span, discrim, vec!(arm)) } ast::ExprLoop(loop_block, opt_ident) => { @@ -221,7 +225,7 @@ fn rename_loop_label(opt_ident: Option<Ident>, let new_label = fresh_name(&label); let rename = (label, new_label); fld.extsbox.info().pending_renames.push(rename); - let mut pending_renames = ~[rename]; + let mut pending_renames = vec!(rename); let mut rename_fld = renames_to_fold(&mut pending_renames); (Some(rename_fld.fold_ident(label)), rename_fld.fold_block(loop_block)) @@ -276,7 +280,7 @@ pub fn expand_item(it: @ast::Item, fld: &mut MacroExpander) ast::ItemMac(..) => expand_item_mac(it, fld), ast::ItemMod(_) | ast::ItemForeignMod(_) => { fld.cx.mod_push(it.ident); - let macro_escape = contains_macro_escape(it.attrs); + let macro_escape = contains_macro_escape(it.attrs.as_slice()); let result = with_exts_frame!(fld.extsbox, macro_escape, noop_fold_item(it, fld)); @@ -309,7 +313,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) _ => fld.cx.span_bug(it.span, "invalid item macro invocation") }; - let extname = pth.segments[0].identifier; + let extname = pth.segments.get(0).identifier; let extnamestr = token::get_ident(extname); let fm = fresh_mark(); let expanded = match fld.extsbox.find(&extname.name) { @@ -339,8 +343,8 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) } }); // mark before expansion: - let marked_before = mark_tts(tts,fm); - expander.expand(fld.cx, it.span, marked_before) + let marked_before = mark_tts(tts.as_slice(), fm); + expander.expand(fld.cx, it.span, marked_before.as_slice()) } Some(&IdentTT(ref expander, span)) => { if it.ident.name == parse::token::special_idents::invalid.name { @@ -358,7 +362,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) } }); // mark before expansion: - let marked_tts = mark_tts(tts,fm); + let marked_tts = mark_tts(tts.as_slice(), fm); expander.expand(fld.cx, it.span, it.ident, marked_tts) } _ => { @@ -391,7 +395,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) // yikes... no idea how to apply the mark to this. I'm afraid // we're going to have to wait-and-see on this one. fld.extsbox.insert(intern(name), ext); - if attr::contains_name(it.attrs, "macro_export") { + if attr::contains_name(it.attrs.as_slice(), "macro_export") { SmallVector::one(it) } else { SmallVector::zero() @@ -504,7 +508,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { fld.cx.span_err(pth.span, "expected macro name without module separators"); return SmallVector::zero(); } - let extname = pth.segments[0].identifier; + let extname = pth.segments.get(0).identifier; let extnamestr = token::get_ident(extname); let marked_after = match fld.extsbox.find(&extname.name) { None => { @@ -523,7 +527,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { }); let fm = fresh_mark(); // mark before expansion: - let marked_tts = mark_tts(tts,fm); + let marked_tts = mark_tts(tts.as_slice(), fm); // See the comment in expand_expr for why we want the original span, // not the current mac.span. @@ -531,7 +535,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { let expanded = match expandfun.expand(fld.cx, mac_span.call_site, - marked_tts) { + marked_tts.as_slice()) { MRExpr(e) => { @codemap::Spanned { node: StmtExpr(e, ast::DUMMY_NODE_ID), @@ -607,10 +611,10 @@ fn expand_non_macro_stmt(s: &Stmt, fld: &mut MacroExpander) // oh dear heaven... this is going to include the enum // names, as well... but that should be okay, as long as // the new names are gensyms for the old ones. - let mut name_finder = new_name_finder(~[]); + let mut name_finder = new_name_finder(Vec::new()); name_finder.visit_pat(expanded_pat,()); // generate fresh names, push them to a new pending list - let mut new_pending_renames = ~[]; + let mut new_pending_renames = Vec::new(); for ident in name_finder.ident_accumulator.iter() { let new_name = fresh_name(ident); new_pending_renames.push((*ident,new_name)); @@ -657,7 +661,7 @@ fn expand_non_macro_stmt(s: &Stmt, fld: &mut MacroExpander) // array (passed in to the traversal) #[deriving(Clone)] struct NewNameFinderContext { - ident_accumulator: ~[ast::Ident], + ident_accumulator: Vec<ast::Ident> , } impl Visitor<()> for NewNameFinderContext { @@ -676,7 +680,8 @@ impl Visitor<()> for NewNameFinderContext { span: _, segments: ref segments } if segments.len() == 1 => { - self.ident_accumulator.push(segments[0].identifier) + self.ident_accumulator.push(segments.get(0) + .identifier) } // I believe these must be enums... _ => () @@ -700,7 +705,7 @@ impl Visitor<()> for NewNameFinderContext { // return a visitor that extracts the pat_ident paths // from a given thingy and puts them in a mutable // array (passed in to the traversal) -pub fn new_name_finder(idents: ~[ast::Ident]) -> NewNameFinderContext { +pub fn new_name_finder(idents: Vec<ast::Ident> ) -> NewNameFinderContext { NewNameFinderContext { ident_accumulator: idents, } @@ -843,7 +848,7 @@ impl Folder for Marker { let macro = match m.node { MacInvocTT(ref path, ref tts, ctxt) => { MacInvocTT(self.fold_path(path), - fold_tts(*tts, self), + fold_tts(tts.as_slice(), self), new_mark(self.mark, ctxt)) } }; @@ -860,7 +865,7 @@ fn new_mark_folder(m: Mrk) -> Marker { } // apply a given mark to the given token trees. Used prior to expansion of a macro. -fn mark_tts(tts: &[TokenTree], m: Mrk) -> ~[TokenTree] { +fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec<TokenTree> { fold_tts(tts, &mut new_mark_folder(m)) } @@ -912,12 +917,14 @@ mod test { use visit; use visit::Visitor; + use std::vec_ng::Vec; + // a visitor that extracts the paths // from a given thingy and puts them in a mutable // array (passed in to the traversal) #[deriving(Clone)] struct NewPathExprFinderContext { - path_accumulator: ~[ast::Path], + path_accumulator: Vec<ast::Path> , } impl Visitor<()> for NewPathExprFinderContext { @@ -941,7 +948,7 @@ mod test { // return a visitor that extracts the paths // from a given pattern and puts them in a mutable // array (passed in to the traversal) - pub fn new_path_finder(paths: ~[ast::Path]) -> NewPathExprFinderContext { + pub fn new_path_finder(paths: Vec<ast::Path> ) -> NewPathExprFinderContext { NewPathExprFinderContext { path_accumulator: paths } @@ -954,7 +961,7 @@ mod test { fail!("lolwut") } - fn get_exported_macros(&mut self, _: ast::CrateNum) -> ~[~str] { + fn get_exported_macros(&mut self, _: ast::CrateNum) -> Vec<~str> { fail!("lolwut") } @@ -975,7 +982,7 @@ mod test { let crate_ast = parse::parse_crate_from_source_str( ~"<test>", src, - ~[],sess); + Vec::new(),sess); // should fail: let mut loader = ErrLoader; expand_crate(sess,&mut loader,crate_ast); @@ -990,7 +997,7 @@ mod test { let crate_ast = parse::parse_crate_from_source_str( ~"<test>", src, - ~[],sess); + Vec::new(),sess); // should fail: let mut loader = ErrLoader; expand_crate(sess,&mut loader,crate_ast); @@ -1004,7 +1011,7 @@ mod test { let crate_ast = parse::parse_crate_from_source_str( ~"<test>", src, - ~[], sess); + Vec::new(), sess); // should fail: let mut loader = ErrLoader; expand_crate(sess, &mut loader, crate_ast); @@ -1014,10 +1021,10 @@ mod test { let attr1 = make_dummy_attr ("foo"); let attr2 = make_dummy_attr ("bar"); let escape_attr = make_dummy_attr ("macro_escape"); - let attrs1 = ~[attr1, escape_attr, attr2]; - assert_eq!(contains_macro_escape (attrs1),true); - let attrs2 = ~[attr1,attr2]; - assert_eq!(contains_macro_escape (attrs2),false); + let attrs1 = vec!(attr1, escape_attr, attr2); + assert_eq!(contains_macro_escape(attrs1.as_slice()),true); + let attrs2 = vec!(attr1,attr2); + assert_eq!(contains_macro_escape(attrs2.as_slice()),false); } // make a MetaWord outer attribute with the given name @@ -1082,48 +1089,30 @@ mod test { // in principle, you might want to control this boolean on a per-varref basis, // but that would make things even harder to understand, and might not be // necessary for thorough testing. - type RenamingTest = (&'static str, ~[~[uint]], bool); + type RenamingTest = (&'static str, Vec<Vec<uint>>, bool); #[test] fn automatic_renaming () { - let tests: ~[RenamingTest] = - ~[// b & c should get new names throughout, in the expr too: + let tests: Vec<RenamingTest> = + vec!(// b & c should get new names throughout, in the expr too: ("fn a() -> int { let b = 13; let c = b; b+c }", - ~[~[0,1],~[2]], false), + vec!(vec!(0,1),vec!(2)), false), // both x's should be renamed (how is this causing a bug?) ("fn main () {let x: int = 13;x;}", - ~[~[0]], false), + vec!(vec!(0)), false), // the use of b after the + should be renamed, the other one not: ("macro_rules! f (($x:ident) => (b + $x)) fn a() -> int { let b = 13; f!(b)}", - ~[~[1]], false), + vec!(vec!(1)), false), // the b before the plus should not be renamed (requires marks) ("macro_rules! f (($x:ident) => ({let b=9; ($x + b)})) fn a() -> int { f!(b)}", - ~[~[1]], false), + vec!(vec!(1)), false), // the marks going in and out of letty should cancel, allowing that $x to // capture the one following the semicolon. // this was an awesome test case, and caught a *lot* of bugs. ("macro_rules! letty(($x:ident) => (let $x = 15;)) macro_rules! user(($x:ident) => ({letty!($x); $x})) fn main() -> int {user!(z)}", - ~[~[0]], false), - // no longer a fixme #8062: this test exposes a *potential* bug; our system does - // not behave exactly like MTWT, but a conversation with Matthew Flatt - // suggests that this can only occur in the presence of local-expand, which - // we have no plans to support. - // ("fn main() {let hrcoo = 19; macro_rules! getx(()=>(hrcoo)); getx!();}", - // ~[~[0]], true) - // FIXME #6994: the next string exposes the bug referred to in issue 6994, so I'm - // commenting it out. - // the z flows into and out of two macros (g & f) along one path, and one - // (just g) along the other, so the result of the whole thing should - // be "let z_123 = 3; z_123" - //"macro_rules! g (($x:ident) => - // ({macro_rules! f(($y:ident)=>({let $y=3;$x}));f!($x)})) - // fn a(){g!(z)}" - // create a really evil test case where a $x appears inside a binding of $x - // but *shouldnt* bind because it was inserted by a different macro.... - // can't write this test case until we have macro-generating macros. - ]; + vec!(vec!(0)), false)); for (idx,s) in tests.iter().enumerate() { run_renaming_test(s,idx); } @@ -1137,20 +1126,20 @@ mod test { }; let cr = expand_crate_str(teststr.to_owned()); // find the bindings: - let mut name_finder = new_name_finder(~[]); + let mut name_finder = new_name_finder(Vec::new()); visit::walk_crate(&mut name_finder,&cr,()); let bindings = name_finder.ident_accumulator; // find the varrefs: - let mut path_finder = new_path_finder(~[]); + let mut path_finder = new_path_finder(Vec::new()); visit::walk_crate(&mut path_finder,&cr,()); let varrefs = path_finder.path_accumulator; // must be one check clause for each binding: assert_eq!(bindings.len(),bound_connections.len()); for (binding_idx,shouldmatch) in bound_connections.iter().enumerate() { - let binding_name = mtwt_resolve(bindings[binding_idx]); - let binding_marks = mtwt_marksof(bindings[binding_idx].ctxt,invalid_name); + let binding_name = mtwt_resolve(*bindings.get(binding_idx)); + let binding_marks = mtwt_marksof(bindings.get(binding_idx).ctxt,invalid_name); // shouldmatch can't name varrefs that don't exist: assert!((shouldmatch.len() == 0) || (varrefs.len() > *shouldmatch.iter().max().unwrap())); @@ -1159,13 +1148,18 @@ mod test { // it should be a path of length 1, and it should // be free-identifier=? or bound-identifier=? to the given binding assert_eq!(varref.segments.len(),1); - let varref_name = mtwt_resolve(varref.segments[0].identifier); - let varref_marks = mtwt_marksof(varref.segments[0].identifier.ctxt, + let varref_name = mtwt_resolve(varref.segments + .get(0) + .identifier); + let varref_marks = mtwt_marksof(varref.segments + .get(0) + .identifier + .ctxt, invalid_name); if !(varref_name==binding_name) { println!("uh oh, should match but doesn't:"); println!("varref: {:?}",varref); - println!("binding: {:?}", bindings[binding_idx]); + println!("binding: {:?}", *bindings.get(binding_idx)); ast_util::display_sctable(get_sctable()); } assert_eq!(varref_name,binding_name); @@ -1176,7 +1170,8 @@ mod test { } } else { let fail = (varref.segments.len() == 1) - && (mtwt_resolve(varref.segments[0].identifier) == binding_name); + && (mtwt_resolve(varref.segments.get(0).identifier) == + binding_name); // temp debugging: if fail { println!("failure on test {}",test_idx); @@ -1185,11 +1180,13 @@ mod test { println!("uh oh, matches but shouldn't:"); println!("varref: {:?}",varref); // good lord, you can't make a path with 0 segments, can you? - let string = token::get_ident(varref.segments[0].identifier); + let string = token::get_ident(varref.segments + .get(0) + .identifier); println!("varref's first segment's uint: {}, and string: \"{}\"", - varref.segments[0].identifier.name, + varref.segments.get(0).identifier.name, string.get()); - println!("binding: {:?}", bindings[binding_idx]); + println!("binding: {:?}", *bindings.get(binding_idx)); ast_util::display_sctable(get_sctable()); } assert!(!fail); @@ -1205,40 +1202,41 @@ foo_module!() "; let cr = expand_crate_str(crate_str); // find the xx binding - let mut name_finder = new_name_finder(~[]); + let mut name_finder = new_name_finder(Vec::new()); visit::walk_crate(&mut name_finder, &cr, ()); let bindings = name_finder.ident_accumulator; - let cxbinds: ~[&ast::Ident] = + let cxbinds: Vec<&ast::Ident> = bindings.iter().filter(|b| { let ident = token::get_ident(**b); let string = ident.get(); "xx" == string }).collect(); - let cxbinds: &[&ast::Ident] = cxbinds; + let cxbinds: &[&ast::Ident] = cxbinds.as_slice(); let cxbind = match cxbinds { [b] => b, _ => fail!("expected just one binding for ext_cx") }; let resolved_binding = mtwt_resolve(*cxbind); // find all the xx varrefs: - let mut path_finder = new_path_finder(~[]); + let mut path_finder = new_path_finder(Vec::new()); visit::walk_crate(&mut path_finder, &cr, ()); let varrefs = path_finder.path_accumulator; // the xx binding should bind all of the xx varrefs: for (idx,v) in varrefs.iter().filter(|p| { p.segments.len() == 1 - && "xx" == token::get_ident(p.segments[0].identifier).get() + && "xx" == token::get_ident(p.segments.get(0).identifier).get() }).enumerate() { - if mtwt_resolve(v.segments[0].identifier) != resolved_binding { + if mtwt_resolve(v.segments.get(0).identifier) != + resolved_binding { println!("uh oh, xx binding didn't match xx varref:"); println!("this is xx varref \\# {:?}",idx); println!("binding: {:?}",cxbind); println!("resolves to: {:?}",resolved_binding); - println!("varref: {:?}",v.segments[0].identifier); + println!("varref: {:?}",v.segments.get(0).identifier); println!("resolves to: {:?}", - mtwt_resolve(v.segments[0].identifier)); + mtwt_resolve(v.segments.get(0).identifier)); let table = get_sctable(); println!("SC table:"); @@ -1249,17 +1247,18 @@ foo_module!() } } } - assert_eq!(mtwt_resolve(v.segments[0].identifier),resolved_binding); + assert_eq!(mtwt_resolve(v.segments.get(0).identifier), + resolved_binding); }; } #[test] fn pat_idents(){ let pat = string_to_pat(~"(a,Foo{x:c @ (b,9),y:Bar(4,d)})"); - let mut pat_idents = new_name_finder(~[]); + let mut pat_idents = new_name_finder(Vec::new()); pat_idents.visit_pat(pat, ()); assert_eq!(pat_idents.ident_accumulator, - strs_to_idents(~["a","c","b","d"])); + strs_to_idents(vec!("a","c","b","d"))); } } diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 2642ee00458..7752d885968 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -22,6 +22,7 @@ use rsparse = parse; use std::fmt::parse; use collections::{HashMap, HashSet}; use std::vec; +use std::vec_ng::Vec; #[deriving(Eq)] enum ArgumentType { @@ -41,20 +42,20 @@ struct Context<'a> { // Parsed argument expressions and the types that we've found so far for // them. - args: ~[@ast::Expr], - arg_types: ~[Option<ArgumentType>], + args: Vec<@ast::Expr>, + arg_types: Vec<Option<ArgumentType>>, // Parsed named expressions and the types that we've found for them so far. // Note that we keep a side-array of the ordering of the named arguments // found to be sure that we can translate them in the same order that they // were declared in. names: HashMap<~str, @ast::Expr>, name_types: HashMap<~str, ArgumentType>, - name_ordering: ~[~str], + name_ordering: Vec<~str>, // Collection of the compiled `rt::Piece` structures - pieces: ~[@ast::Expr], + pieces: Vec<@ast::Expr> , name_positions: HashMap<~str, uint>, - method_statics: ~[@ast::Item], + method_statics: Vec<@ast::Item> , // Updated as arguments are consumed or methods are entered nest_level: uint, @@ -70,16 +71,17 @@ struct Context<'a> { /// Some((fmtstr, unnamed arguments, ordering of named arguments, /// named arguments)) fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) - -> (@ast::Expr, Option<(@ast::Expr, ~[@ast::Expr], ~[~str], - HashMap<~str, @ast::Expr>)>) -{ - let mut args = ~[]; + -> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<~str>, + HashMap<~str, @ast::Expr>)>) { + let mut args = Vec::new(); let mut names = HashMap::<~str, @ast::Expr>::new(); - let mut order = ~[]; + let mut order = Vec::new(); let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(), ecx.cfg(), - tts.to_owned()); + tts.iter() + .map(|x| (*x).clone()) + .collect()); // Parse the leading function expression (maybe a block, maybe a path) let extra = p.parse_expr(); if !p.eat(&token::COMMA) { @@ -276,14 +278,14 @@ impl<'a> Context<'a> { return; } { - let arg_type = match self.arg_types[arg] { - None => None, - Some(ref x) => Some(x) + let arg_type = match self.arg_types.get(arg) { + &None => None, + &Some(ref x) => Some(x) }; - self.verify_same(self.args[arg].span, &ty, arg_type); + self.verify_same(self.args.get(arg).span, &ty, arg_type); } - if self.arg_types[arg].is_none() { - self.arg_types[arg] = Some(ty); + if self.arg_types.get(arg).is_none() { + *self.arg_types.get_mut(arg) = Some(ty); } } @@ -357,7 +359,7 @@ impl<'a> Context<'a> { /// These attributes are applied to all statics that this syntax extension /// will generate. - fn static_attrs(&self) -> ~[ast::Attribute] { + fn static_attrs(&self) -> Vec<ast::Attribute> { // Flag statics as `address_insignificant` so LLVM can merge duplicate // globals as much as possible (which we're generating a whole lot of). let unnamed = self.ecx @@ -371,41 +373,41 @@ impl<'a> Context<'a> { InternedString::new("dead_code")); let allow_dead_code = self.ecx.meta_list(self.fmtsp, InternedString::new("allow"), - ~[dead_code]); + vec!(dead_code)); let allow_dead_code = self.ecx.attribute(self.fmtsp, allow_dead_code); - return ~[unnamed, allow_dead_code]; + return vec!(unnamed, allow_dead_code); } - fn parsepath(&self, s: &str) -> ~[ast::Ident] { - ~[self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("parse"), self.ecx.ident_of(s)] + fn parsepath(&self, s: &str) -> Vec<ast::Ident> { + vec!(self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), + self.ecx.ident_of("parse"), self.ecx.ident_of(s)) } - fn rtpath(&self, s: &str) -> ~[ast::Ident] { - ~[self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("rt"), self.ecx.ident_of(s)] + fn rtpath(&self, s: &str) -> Vec<ast::Ident> { + vec!(self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), + self.ecx.ident_of("rt"), self.ecx.ident_of(s)) } - fn ctpath(&self, s: &str) -> ~[ast::Ident] { - ~[self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("parse"), self.ecx.ident_of(s)] + fn ctpath(&self, s: &str) -> Vec<ast::Ident> { + vec!(self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), + self.ecx.ident_of("parse"), self.ecx.ident_of(s)) } fn none(&self) -> @ast::Expr { - let none = self.ecx.path_global(self.fmtsp, ~[ + let none = self.ecx.path_global(self.fmtsp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("option"), - self.ecx.ident_of("None")]); + self.ecx.ident_of("None"))); self.ecx.expr_path(none) } fn some(&self, e: @ast::Expr) -> @ast::Expr { - let p = self.ecx.path_global(self.fmtsp, ~[ + let p = self.ecx.path_global(self.fmtsp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("option"), - self.ecx.ident_of("Some")]); + self.ecx.ident_of("Some"))); let p = self.ecx.expr_path(p); - self.ecx.expr_call(self.fmtsp, p, ~[e]) + self.ecx.expr_call(self.fmtsp, p, vec!(e)) } fn trans_count(&self, c: parse::Count) -> @ast::Expr { @@ -413,11 +415,11 @@ impl<'a> Context<'a> { match c { parse::CountIs(i) => { self.ecx.expr_call_global(sp, self.rtpath("CountIs"), - ~[self.ecx.expr_uint(sp, i)]) + vec!(self.ecx.expr_uint(sp, i))) } parse::CountIsParam(i) => { self.ecx.expr_call_global(sp, self.rtpath("CountIsParam"), - ~[self.ecx.expr_uint(sp, i)]) + vec!(self.ecx.expr_uint(sp, i))) } parse::CountImplied => { let path = self.ecx.path_global(sp, self.rtpath("CountImplied")); @@ -434,7 +436,7 @@ impl<'a> Context<'a> { }; let i = i + self.args.len(); self.ecx.expr_call_global(sp, self.rtpath("CountIsParam"), - ~[self.ecx.expr_uint(sp, i)]) + vec!(self.ecx.expr_uint(sp, i))) } } } @@ -450,21 +452,19 @@ impl<'a> Context<'a> { }).collect(); let s = token::intern_and_get_ident(arm.selector); let selector = self.ecx.expr_str(sp, s); - self.ecx.expr_struct(sp, p, ~[ + self.ecx.expr_struct(sp, p, vec!( self.ecx.field_imm(sp, self.ecx.ident_of("selector"), selector), self.ecx.field_imm(sp, self.ecx.ident_of("result"), - self.ecx.expr_vec_slice(sp, result)), - ]) + self.ecx.expr_vec_slice(sp, result)))) }).collect(); let default = default.iter().map(|p| { self.trans_piece(p) }).collect(); - self.ecx.expr_call_global(sp, self.rtpath("Select"), ~[ + self.ecx.expr_call_global(sp, self.rtpath("Select"), vec!( self.ecx.expr_vec_slice(sp, arms), - self.ecx.expr_vec_slice(sp, default), - ]) + self.ecx.expr_vec_slice(sp, default))) } parse::Plural(offset, ref arms, ref default) => { let offset = match offset { @@ -487,23 +487,21 @@ impl<'a> Context<'a> { } }; let selector = self.ecx.expr_call_global(sp, - lr, ~[selarg]); - self.ecx.expr_struct(sp, p, ~[ + lr, vec!(selarg)); + self.ecx.expr_struct(sp, p, vec!( self.ecx.field_imm(sp, self.ecx.ident_of("selector"), selector), self.ecx.field_imm(sp, self.ecx.ident_of("result"), - self.ecx.expr_vec_slice(sp, result)), - ]) + self.ecx.expr_vec_slice(sp, result)))) }).collect(); let default = default.iter().map(|p| { self.trans_piece(p) }).collect(); - self.ecx.expr_call_global(sp, self.rtpath("Plural"), ~[ + self.ecx.expr_call_global(sp, self.rtpath("Plural"), vec!( offset, self.ecx.expr_vec_slice(sp, arms), - self.ecx.expr_vec_slice(sp, default), - ]) + self.ecx.expr_vec_slice(sp, default))) } }; let life = self.ecx.lifetime(sp, self.ecx.ident_of("static").name); @@ -512,7 +510,7 @@ impl<'a> Context<'a> { true, self.rtpath("Method"), opt_vec::with(life), - ~[] + Vec::new() ), None); let st = ast::ItemStatic(ty, ast::MutImmutable, method); let static_name = self.ecx.ident_of(format!("__STATIC_METHOD_{}", @@ -530,13 +528,13 @@ impl<'a> Context<'a> { let s = token::intern_and_get_ident(s); self.ecx.expr_call_global(sp, self.rtpath("String"), - ~[ + vec!( self.ecx.expr_str(sp, s) - ]) + )) } parse::CurrentArgument => { let nil = self.ecx.expr_lit(sp, ast::LitNil); - self.ecx.expr_call_global(sp, self.rtpath("CurrentArgument"), ~[nil]) + self.ecx.expr_call_global(sp, self.rtpath("CurrentArgument"), vec!(nil)) } parse::Argument(ref arg) => { // Translate the position @@ -549,7 +547,7 @@ impl<'a> Context<'a> { } parse::ArgumentIs(i) => { self.ecx.expr_call_global(sp, self.rtpath("ArgumentIs"), - ~[self.ecx.expr_uint(sp, i)]) + vec!(self.ecx.expr_uint(sp, i))) } // Named arguments are converted to positional arguments at // the end of the list of arguments @@ -560,7 +558,7 @@ impl<'a> Context<'a> { }; let i = i + self.args.len(); self.ecx.expr_call_global(sp, self.rtpath("ArgumentIs"), - ~[self.ecx.expr_uint(sp, i)]) + vec!(self.ecx.expr_uint(sp, i))) } }; @@ -583,13 +581,12 @@ impl<'a> Context<'a> { let prec = self.trans_count(arg.format.precision); let width = self.trans_count(arg.format.width); let path = self.ecx.path_global(sp, self.rtpath("FormatSpec")); - let fmt = self.ecx.expr_struct(sp, path, ~[ + let fmt = self.ecx.expr_struct(sp, path, vec!( self.ecx.field_imm(sp, self.ecx.ident_of("fill"), fill), self.ecx.field_imm(sp, self.ecx.ident_of("align"), align), self.ecx.field_imm(sp, self.ecx.ident_of("flags"), flags), self.ecx.field_imm(sp, self.ecx.ident_of("precision"), prec), - self.ecx.field_imm(sp, self.ecx.ident_of("width"), width), - ]); + self.ecx.field_imm(sp, self.ecx.ident_of("width"), width))); // Translate the method (if any) let method = match arg.method { @@ -600,12 +597,11 @@ impl<'a> Context<'a> { } }; let path = self.ecx.path_global(sp, self.rtpath("Argument")); - let s = self.ecx.expr_struct(sp, path, ~[ + let s = self.ecx.expr_struct(sp, path, vec!( self.ecx.field_imm(sp, self.ecx.ident_of("position"), pos), self.ecx.field_imm(sp, self.ecx.ident_of("format"), fmt), - self.ecx.field_imm(sp, self.ecx.ident_of("method"), method), - ]); - self.ecx.expr_call_global(sp, self.rtpath("Argument"), ~[s]) + self.ecx.field_imm(sp, self.ecx.ident_of("method"), method))); + self.ecx.expr_call_global(sp, self.rtpath("Argument"), vec!(s)) } } } @@ -613,11 +609,11 @@ impl<'a> Context<'a> { /// Actually builds the expression which the iformat! block will be expanded /// to fn to_expr(&self, extra: @ast::Expr) -> @ast::Expr { - let mut lets = ~[]; - let mut locals = ~[]; + let mut lets = Vec::new(); + let mut locals = Vec::new(); let mut names = vec::from_fn(self.name_positions.len(), |_| None); - let mut pats = ~[]; - let mut heads = ~[]; + let mut pats = Vec::new(); + let mut heads = Vec::new(); // First, declare all of our methods that are statics for &method in self.method_statics.iter() { @@ -631,15 +627,14 @@ impl<'a> Context<'a> { let fmt = self.ecx.expr_vec(self.fmtsp, self.pieces.clone()); let piece_ty = self.ecx.ty_path(self.ecx.path_all( self.fmtsp, - true, ~[ + true, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), self.ecx.ident_of("rt"), - self.ecx.ident_of("Piece"), - ], + self.ecx.ident_of("Piece")), opt_vec::with( self.ecx.lifetime(self.fmtsp, self.ecx.ident_of("static").name)), - ~[] + Vec::new() ), None); let ty = ast::TyFixedLengthVec( piece_ty, @@ -661,7 +656,9 @@ impl<'a> Context<'a> { // of each variable because we don't want to move out of the arguments // passed to this function. for (i, &e) in self.args.iter().enumerate() { - if self.arg_types[i].is_none() { continue } // error already generated + if self.arg_types.get(i).is_none() { + continue // error already generated + } let name = self.ecx.ident_of(format!("__arg{}", i)); pats.push(self.ecx.pat_ident(e.span, name)); @@ -696,18 +693,17 @@ impl<'a> Context<'a> { // Now create the fmt::Arguments struct with all our locals we created. let fmt = self.ecx.expr_ident(self.fmtsp, static_name); let args_slice = self.ecx.expr_ident(self.fmtsp, slicename); - let result = self.ecx.expr_call_global(self.fmtsp, ~[ + let result = self.ecx.expr_call_global(self.fmtsp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), self.ecx.ident_of("Arguments"), - self.ecx.ident_of("new"), - ], ~[fmt, args_slice]); + self.ecx.ident_of("new")), vec!(fmt, args_slice)); // We did all the work of making sure that the arguments // structure is safe, so we can safely have an unsafe block. let result = self.ecx.expr_block(P(ast::Block { - view_items: ~[], - stmts: ~[], + view_items: Vec::new(), + stmts: Vec::new(), expr: Some(result), id: ast::DUMMY_NODE_ID, rules: ast::UnsafeBlock(ast::CompilerGenerated), @@ -716,8 +712,8 @@ impl<'a> Context<'a> { let resname = self.ecx.ident_of("__args"); lets.push(self.ecx.stmt_let(self.fmtsp, false, resname, result)); let res = self.ecx.expr_ident(self.fmtsp, resname); - let result = self.ecx.expr_call(extra.span, extra, ~[ - self.ecx.expr_addr_of(extra.span, res)]); + let result = self.ecx.expr_call(extra.span, extra, vec!( + self.ecx.expr_addr_of(extra.span, res))); let body = self.ecx.expr_block(self.ecx.block(self.fmtsp, lets, Some(result))); @@ -749,15 +745,15 @@ impl<'a> Context<'a> { // But the nested match expression is proved to perform not as well // as series of let's; the first approach does. let pat = self.ecx.pat(self.fmtsp, ast::PatTup(pats)); - let arm = self.ecx.arm(self.fmtsp, ~[pat], body); + let arm = self.ecx.arm(self.fmtsp, vec!(pat), body); let head = self.ecx.expr(self.fmtsp, ast::ExprTup(heads)); - self.ecx.expr_match(self.fmtsp, head, ~[arm]) + self.ecx.expr_match(self.fmtsp, head, vec!(arm)) } fn format_arg(&self, sp: Span, argno: Position, arg: @ast::Expr) -> @ast::Expr { let ty = match argno { - Exact(ref i) => self.arg_types[*i].get_ref(), + Exact(ref i) => self.arg_types.get(*i).get_ref(), Named(ref s) => self.name_types.get(s) }; @@ -787,31 +783,27 @@ impl<'a> Context<'a> { } } String => { - return self.ecx.expr_call_global(sp, ~[ + return self.ecx.expr_call_global(sp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("argumentstr"), - ], ~[arg]) + self.ecx.ident_of("argumentstr")), vec!(arg)) } Unsigned => { - return self.ecx.expr_call_global(sp, ~[ + return self.ecx.expr_call_global(sp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("argumentuint"), - ], ~[arg]) + self.ecx.ident_of("argumentuint")), vec!(arg)) } }; - let format_fn = self.ecx.path_global(sp, ~[ + let format_fn = self.ecx.path_global(sp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of(fmt_fn), - ]); - self.ecx.expr_call_global(sp, ~[ + self.ecx.ident_of(fmt_fn))); + self.ecx.expr_call_global(sp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("argument"), - ], ~[self.ecx.expr_path(format_fn), arg]) + self.ecx.ident_of("argument")), vec!(self.ecx.expr_path(format_fn), arg)) } } @@ -832,10 +824,10 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span, /// expression. pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, extra: @ast::Expr, - efmt: @ast::Expr, args: ~[@ast::Expr], - name_ordering: ~[~str], + efmt: @ast::Expr, args: Vec<@ast::Expr>, + name_ordering: Vec<~str>, names: HashMap<~str, @ast::Expr>) -> @ast::Expr { - let arg_types = vec::from_fn(args.len(), |_| None); + let arg_types = Vec::from_fn(args.len(), |_| None); let mut cx = Context { ecx: ecx, args: args, @@ -846,8 +838,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, name_ordering: name_ordering, nest_level: 0, next_arg: 0, - pieces: ~[], - method_statics: ~[], + pieces: Vec::new(), + method_statics: Vec::new(), fmtsp: sp, }; cx.fmtsp = efmt.span; @@ -884,7 +876,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, // Make sure that all arguments were used and all arguments have types. for (i, ty) in cx.arg_types.iter().enumerate() { if ty.is_none() { - cx.ecx.span_err(cx.args[i].span, "argument never used"); + cx.ecx.span_err(cx.args.get(i).span, "argument never used"); } } for (name, e) in cx.names.iter() { diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs index 5ee4084d207..b94928238e9 100644 --- a/src/libsyntax/ext/log_syntax.rs +++ b/src/libsyntax/ext/log_syntax.rs @@ -20,7 +20,8 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, -> base::MacResult { cx.print_backtrace(); - println!("{}", print::pprust::tt_to_str(&ast::TTDelim(@tt.to_owned()))); + println!("{}", print::pprust::tt_to_str(&ast::TTDelim( + @tt.iter().map(|x| (*x).clone()).collect()))); //trivial expression MRExpr(@ast::Expr { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 35a5cbd235a..e96597d4159 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -17,6 +17,8 @@ use parse::token::*; use parse::token; use parse; +use std::vec_ng::Vec; + /** * * Quasiquoting works via token trees. @@ -35,17 +37,19 @@ pub mod rt { use parse; use print::pprust; + use std::vec_ng::Vec; + pub use ast::*; pub use parse::token::*; pub use parse::new_parser_from_tts; pub use codemap::{BytePos, Span, dummy_spanned}; pub trait ToTokens { - fn to_tokens(&self, _cx: &ExtCtxt) -> ~[TokenTree]; + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> ; } - impl ToTokens for ~[TokenTree] { - fn to_tokens(&self, _cx: &ExtCtxt) -> ~[TokenTree] { + impl ToTokens for Vec<TokenTree> { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { (*self).clone() } } @@ -201,7 +205,7 @@ pub mod rt { macro_rules! impl_to_tokens( ($t:ty) => ( impl ToTokens for $t { - fn to_tokens(&self, cx: &ExtCtxt) -> ~[TokenTree] { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { cx.parse_tts(self.to_source()) } } @@ -211,7 +215,7 @@ pub mod rt { macro_rules! impl_to_tokens_self( ($t:ty) => ( impl<'a> ToTokens for $t { - fn to_tokens(&self, cx: &ExtCtxt) -> ~[TokenTree] { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { cx.parse_tts(self.to_source()) } } @@ -242,7 +246,7 @@ pub mod rt { fn parse_item(&self, s: ~str) -> @ast::Item; fn parse_expr(&self, s: ~str) -> @ast::Expr; fn parse_stmt(&self, s: ~str) -> @ast::Stmt; - fn parse_tts(&self, s: ~str) -> ~[ast::TokenTree]; + fn parse_tts(&self, s: ~str) -> Vec<ast::TokenTree> ; } impl<'a> ExtParseUtils for ExtCtxt<'a> { @@ -266,7 +270,7 @@ pub mod rt { parse::parse_stmt_from_source_str("<quote expansion>".to_str(), s, self.cfg(), - ~[], + Vec::new(), self.parse_sess()) } @@ -277,7 +281,7 @@ pub mod rt { self.parse_sess()) } - fn parse_tts(&self, s: ~str) -> ~[ast::TokenTree] { + fn parse_tts(&self, s: ~str) -> Vec<ast::TokenTree> { parse::parse_tts_from_source_str("<quote expansion>".to_str(), s, self.cfg(), @@ -298,16 +302,16 @@ pub fn expand_quote_tokens(cx: &mut ExtCtxt, pub fn expand_quote_expr(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { - let expanded = expand_parse_call(cx, sp, "parse_expr", ~[], tts); + let expanded = expand_parse_call(cx, sp, "parse_expr", Vec::new(), tts); base::MRExpr(expanded) } pub fn expand_quote_item(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { - let e_attrs = cx.expr_vec_uniq(sp, ~[]); + let e_attrs = cx.expr_vec_ng(sp); let expanded = expand_parse_call(cx, sp, "parse_item", - ~[e_attrs], tts); + vec!(e_attrs), tts); base::MRExpr(expanded) } @@ -316,7 +320,7 @@ pub fn expand_quote_pat(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> base::MacResult { let e_refutable = cx.expr_lit(sp, ast::LitBool(true)); let expanded = expand_parse_call(cx, sp, "parse_pat", - ~[e_refutable], tts); + vec!(e_refutable), tts); base::MRExpr(expanded) } @@ -325,20 +329,20 @@ pub fn expand_quote_ty(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> base::MacResult { let e_param_colons = cx.expr_lit(sp, ast::LitBool(false)); let expanded = expand_parse_call(cx, sp, "parse_ty", - ~[e_param_colons], tts); + vec!(e_param_colons), tts); base::MRExpr(expanded) } pub fn expand_quote_stmt(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { - let e_attrs = cx.expr_vec_uniq(sp, ~[]); + let e_attrs = cx.expr_vec_ng(sp); let expanded = expand_parse_call(cx, sp, "parse_stmt", - ~[e_attrs], tts); + vec!(e_attrs), tts); base::MRExpr(expanded) } -fn ids_ext(strs: ~[~str]) -> ~[ast::Ident] { +fn ids_ext(strs: Vec<~str> ) -> Vec<ast::Ident> { strs.map(|str| str_to_ident(*str)) } @@ -352,7 +356,7 @@ fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> @ast::Expr { cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("ident_of"), - ~[e_str]) + vec!(e_str)) } fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOp) -> @ast::Expr { @@ -377,18 +381,18 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { BINOP(binop) => { return cx.expr_call_ident(sp, id_ext("BINOP"), - ~[mk_binop(cx, sp, binop)]); + vec!(mk_binop(cx, sp, binop))); } BINOPEQ(binop) => { return cx.expr_call_ident(sp, id_ext("BINOPEQ"), - ~[mk_binop(cx, sp, binop)]); + vec!(mk_binop(cx, sp, binop))); } LIT_CHAR(i) => { let e_char = cx.expr_lit(sp, ast::LitChar(i)); - return cx.expr_call_ident(sp, id_ext("LIT_CHAR"), ~[e_char]); + return cx.expr_call_ident(sp, id_ext("LIT_CHAR"), vec!(e_char)); } LIT_INT(i, ity) => { @@ -405,7 +409,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { return cx.expr_call_ident(sp, id_ext("LIT_INT"), - ~[e_i64, e_ity]); + vec!(e_i64, e_ity)); } LIT_UINT(u, uty) => { @@ -422,7 +426,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { return cx.expr_call_ident(sp, id_ext("LIT_UINT"), - ~[e_u64, e_uty]); + vec!(e_u64, e_uty)); } LIT_INT_UNSUFFIXED(i) => { @@ -430,7 +434,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { return cx.expr_call_ident(sp, id_ext("LIT_INT_UNSUFFIXED"), - ~[e_i64]); + vec!(e_i64)); } LIT_FLOAT(fident, fty) => { @@ -444,39 +448,39 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { return cx.expr_call_ident(sp, id_ext("LIT_FLOAT"), - ~[e_fident, e_fty]); + vec!(e_fident, e_fty)); } LIT_STR(ident) => { return cx.expr_call_ident(sp, id_ext("LIT_STR"), - ~[mk_ident(cx, sp, ident)]); + vec!(mk_ident(cx, sp, ident))); } LIT_STR_RAW(ident, n) => { return cx.expr_call_ident(sp, id_ext("LIT_STR_RAW"), - ~[mk_ident(cx, sp, ident), - cx.expr_uint(sp, n)]); + vec!(mk_ident(cx, sp, ident), + cx.expr_uint(sp, n))); } IDENT(ident, b) => { return cx.expr_call_ident(sp, id_ext("IDENT"), - ~[mk_ident(cx, sp, ident), - cx.expr_bool(sp, b)]); + vec!(mk_ident(cx, sp, ident), + cx.expr_bool(sp, b))); } LIFETIME(ident) => { return cx.expr_call_ident(sp, id_ext("LIFETIME"), - ~[mk_ident(cx, sp, ident)]); + vec!(mk_ident(cx, sp, ident))); } DOC_COMMENT(ident) => { return cx.expr_call_ident(sp, id_ext("DOC_COMMENT"), - ~[mk_ident(cx, sp, ident)]); + vec!(mk_ident(cx, sp, ident))); } INTERPOLATED(_) => fail!("quote! with interpolated token"), @@ -523,7 +527,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { } -fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> ~[@ast::Stmt] { +fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<@ast::Stmt> { match *tt { @@ -531,16 +535,16 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> ~[@ast::Stmt] { let e_sp = cx.expr_ident(sp, id_ext("_sp")); let e_tok = cx.expr_call_ident(sp, id_ext("TTTok"), - ~[e_sp, mk_token(cx, sp, tok)]); + vec!(e_sp, mk_token(cx, sp, tok))); let e_push = cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("tt")), id_ext("push"), - ~[e_tok]); - ~[cx.stmt_expr(e_push)] + vec!(e_tok)); + vec!(cx.stmt_expr(e_push)) } - ast::TTDelim(ref tts) => mk_tts(cx, sp, **tts), + ast::TTDelim(ref tts) => mk_tts(cx, sp, tts.as_slice()), ast::TTSeq(..) => fail!("TTSeq in quote!"), ast::TTNonterminal(sp, ident) => { @@ -551,22 +555,22 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> ~[@ast::Stmt] { cx.expr_method_call(sp, cx.expr_ident(sp, ident), id_ext("to_tokens"), - ~[cx.expr_ident(sp, id_ext("ext_cx"))]); + vec!(cx.expr_ident(sp, id_ext("ext_cx")))); let e_push = cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("tt")), id_ext("push_all_move"), - ~[e_to_toks]); + vec!(e_to_toks)); - ~[cx.stmt_expr(e_push)] + vec!(cx.stmt_expr(e_push)) } } } fn mk_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) - -> ~[@ast::Stmt] { - let mut ss = ~[]; + -> Vec<@ast::Stmt> { + let mut ss = Vec::new(); for tt in tts.iter() { ss.push_all_move(mk_tt(cx, sp, tt)); } @@ -583,7 +587,9 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let mut p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), - tts.to_owned()); + tts.iter() + .map(|x| (*x).clone()) + .collect()); p.quote_depth += 1u; let cx_expr = p.parse_expr(); @@ -623,20 +629,20 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let e_sp = cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("call_site"), - ~[]); + Vec::new()); let stmt_let_sp = cx.stmt_let(sp, false, id_ext("_sp"), e_sp); - let stmt_let_tt = cx.stmt_let(sp, true, - id_ext("tt"), - cx.expr_vec_uniq(sp, ~[])); + let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); + let mut vector = vec!(stmt_let_sp, stmt_let_tt); + vector.push_all_move(mk_tts(cx, sp, tts.as_slice())); let block = cx.expr_block( cx.block_all(sp, - ~[], - ~[stmt_let_sp, stmt_let_tt] + mk_tts(cx, sp, tts), + Vec::new(), + vector, Some(cx.expr_ident(sp, id_ext("tt"))))); (cx_expr, block) @@ -646,36 +652,36 @@ fn expand_wrapper(cx: &ExtCtxt, sp: Span, cx_expr: @ast::Expr, expr: @ast::Expr) -> @ast::Expr { - let uses = ~[ cx.view_use_glob(sp, ast::Inherited, - ids_ext(~[~"syntax", + let uses = vec!( cx.view_use_glob(sp, ast::Inherited, + ids_ext(vec!(~"syntax", ~"ext", ~"quote", - ~"rt"])) ]; + ~"rt"))) ); let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr); - cx.expr_block(cx.block_all(sp, uses, ~[stmt_let_ext_cx], Some(expr))) + cx.expr_block(cx.block_all(sp, uses, vec!(stmt_let_ext_cx), Some(expr))) } fn expand_parse_call(cx: &ExtCtxt, sp: Span, parse_method: &str, - arg_exprs: ~[@ast::Expr], + arg_exprs: Vec<@ast::Expr> , tts: &[ast::TokenTree]) -> @ast::Expr { let (cx_expr, tts_expr) = expand_tts(cx, sp, tts); let cfg_call = || cx.expr_method_call( sp, cx.expr_ident(sp, id_ext("ext_cx")), - id_ext("cfg"), ~[]); + id_ext("cfg"), Vec::new()); let parse_sess_call = || cx.expr_method_call( sp, cx.expr_ident(sp, id_ext("ext_cx")), - id_ext("parse_sess"), ~[]); + id_ext("parse_sess"), Vec::new()); let new_parser_call = cx.expr_call(sp, cx.expr_ident(sp, id_ext("new_parser_from_tts")), - ~[parse_sess_call(), cfg_call(), tts_expr]); + vec!(parse_sess_call(), cfg_call(), tts_expr)); let expr = cx.expr_method_call(sp, new_parser_call, id_ext(parse_method), arg_exprs); diff --git a/src/libsyntax/ext/registrar.rs b/src/libsyntax/ext/registrar.rs index f0bad1b40eb..4c18eb83afc 100644 --- a/src/libsyntax/ext/registrar.rs +++ b/src/libsyntax/ext/registrar.rs @@ -15,15 +15,18 @@ use diagnostic; use visit; use visit::Visitor; +use std::vec_ng::Vec; + struct MacroRegistrarContext { - registrars: ~[(ast::NodeId, Span)], + registrars: Vec<(ast::NodeId, Span)> , } impl Visitor<()> for MacroRegistrarContext { fn visit_item(&mut self, item: &ast::Item, _: ()) { match item.node { ast::ItemFn(..) => { - if attr::contains_name(item.attrs, "macro_registrar") { + if attr::contains_name(item.attrs.as_slice(), + "macro_registrar") { self.registrars.push((item.id, item.span)); } } @@ -36,7 +39,7 @@ impl Visitor<()> for MacroRegistrarContext { pub fn find_macro_registrar(diagnostic: @diagnostic::SpanHandler, krate: &ast::Crate) -> Option<ast::DefId> { - let mut ctx = MacroRegistrarContext { registrars: ~[] }; + let mut ctx = MacroRegistrarContext { registrars: Vec::new() }; visit::walk_crate(&mut ctx, krate, ()); match ctx.registrars.len() { diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index c81ee55c237..b31388f58eb 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -142,6 +142,7 @@ pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) return MacResult::dummy_expr(sp); } Ok(bytes) => { + let bytes = bytes.iter().map(|x| *x).collect(); base::MRExpr(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes)))) } } diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index db2c9dcddb6..183cccde18e 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -24,7 +24,7 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt, let cfg = cx.cfg(); let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, None, - tt.to_owned()); + tt.iter().map(|x| (*x).clone()).collect()); let mut rust_parser = Parser(sess, cfg.clone(), tt_rdr.dup()); if rust_parser.is_keyword(keywords::True) { diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index edd875a57a7..c9d3150c2cd 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -22,7 +22,7 @@ use parse::token::{Token, EOF, Nonterminal}; use parse::token; use collections::HashMap; -use std::vec; +use std::vec_ng::Vec; /* This is an Earley-like parser, without support for in-grammar nonterminals, only by calling out to the main rust parser for named nonterminals (which it @@ -99,11 +99,11 @@ nonempty body. */ #[deriving(Clone)] pub struct MatcherPos { - elts: ~[ast::Matcher], // maybe should be <'>? Need to understand regions. + elts: Vec<ast::Matcher> , // maybe should be <'>? Need to understand regions. sep: Option<Token>, idx: uint, up: Option<~MatcherPos>, - matches: ~[~[@NamedMatch]], + matches: Vec<Vec<@NamedMatch>>, match_lo: uint, match_hi: uint, sp_lo: BytePos, } @@ -112,12 +112,14 @@ pub fn count_names(ms: &[Matcher]) -> uint { ms.iter().fold(0, |ct, m| { ct + match m.node { MatchTok(_) => 0u, - MatchSeq(ref more_ms, _, _, _, _) => count_names((*more_ms)), + MatchSeq(ref more_ms, _, _, _, _) => { + count_names(more_ms.as_slice()) + } MatchNonterminal(_, _, _) => 1u }}) } -pub fn initial_matcher_pos(ms: ~[Matcher], sep: Option<Token>, lo: BytePos) +pub fn initial_matcher_pos(ms: Vec<Matcher> , sep: Option<Token>, lo: BytePos) -> ~MatcherPos { let mut match_idx_hi = 0u; for elt in ms.iter() { @@ -131,7 +133,7 @@ pub fn initial_matcher_pos(ms: ~[Matcher], sep: Option<Token>, lo: BytePos) } } } - let matches = vec::from_fn(count_names(ms), |_i| ~[]); + let matches = Vec::from_fn(count_names(ms.as_slice()), |_i| Vec::new()); ~MatcherPos { elts: ms, sep: sep, @@ -164,7 +166,7 @@ pub fn initial_matcher_pos(ms: ~[Matcher], sep: Option<Token>, lo: BytePos) // ast::Matcher it was derived from. pub enum NamedMatch { - MatchedSeq(~[@NamedMatch], codemap::Span), + MatchedSeq(Vec<@NamedMatch> , codemap::Span), MatchedNonterminal(Nonterminal) } @@ -206,9 +208,9 @@ pub enum ParseResult { pub fn parse_or_else<R: Reader>(sess: @ParseSess, cfg: ast::CrateConfig, rdr: R, - ms: ~[Matcher]) + ms: Vec<Matcher> ) -> HashMap<Ident, @NamedMatch> { - match parse(sess, cfg, rdr, ms) { + match parse(sess, cfg, rdr, ms.as_slice()) { Success(m) => m, Failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str), Error(sp, str) => sess.span_diagnostic.span_fatal(sp, str) @@ -230,13 +232,17 @@ pub fn parse<R: Reader>(sess: @ParseSess, rdr: R, ms: &[Matcher]) -> ParseResult { - let mut cur_eis = ~[]; - cur_eis.push(initial_matcher_pos(ms.to_owned(), None, rdr.peek().sp.lo)); + let mut cur_eis = Vec::new(); + cur_eis.push(initial_matcher_pos(ms.iter() + .map(|x| (*x).clone()) + .collect(), + None, + rdr.peek().sp.lo)); loop { - let mut bb_eis = ~[]; // black-box parsed by parser.rs - let mut next_eis = ~[]; // or proceed normally - let mut eof_eis = ~[]; + let mut bb_eis = Vec::new(); // black-box parsed by parser.rs + let mut next_eis = Vec::new(); // or proceed normally + let mut eof_eis = Vec::new(); let TokenAndSpan {tok: tok, sp: sp} = rdr.peek(); @@ -274,8 +280,9 @@ pub fn parse<R: Reader>(sess: @ParseSess, // Only touch the binders we have actually bound for idx in range(ei.match_lo, ei.match_hi) { - let sub = ei.matches[idx].clone(); - new_pos.matches[idx] + let sub = (*ei.matches.get(idx)).clone(); + new_pos.matches + .get_mut(idx) .push(@MatchedSeq(sub, mk_sp(ei.sp_lo, sp.hi))); } @@ -308,7 +315,7 @@ pub fn parse<R: Reader>(sess: @ParseSess, eof_eis.push(ei); } } else { - match ei.elts[idx].node.clone() { + match ei.elts.get(idx).node.clone() { /* need to descend into sequence */ MatchSeq(ref matchers, ref sep, zero_ok, match_idx_lo, match_idx_hi) => { @@ -317,13 +324,15 @@ pub fn parse<R: Reader>(sess: @ParseSess, new_ei.idx += 1u; //we specifically matched zero repeats. for idx in range(match_idx_lo, match_idx_hi) { - new_ei.matches[idx].push(@MatchedSeq(~[], sp)); + new_ei.matches + .get_mut(idx) + .push(@MatchedSeq(Vec::new(), sp)); } cur_eis.push(new_ei); } - let matches = vec::from_elem(ei.matches.len(), ~[]); + let matches = Vec::from_elem(ei.matches.len(), Vec::new()); let ei_t = ei; cur_eis.push(~MatcherPos { elts: (*matchers).clone(), @@ -351,11 +360,11 @@ pub fn parse<R: Reader>(sess: @ParseSess, /* error messages here could be improved with links to orig. rules */ if token_name_eq(&tok, &EOF) { if eof_eis.len() == 1u { - let mut v = ~[]; - for dv in eof_eis[0u].matches.mut_iter() { + let mut v = Vec::new(); + for dv in eof_eis.get_mut(0).matches.mut_iter() { v.push(dv.pop().unwrap()); } - return Success(nameize(sess, ms, v)); + return Success(nameize(sess, ms, v.as_slice())); } else if eof_eis.len() > 1u { return Error(sp, ~"ambiguity: multiple successful parses"); } else { @@ -365,7 +374,7 @@ pub fn parse<R: Reader>(sess: @ParseSess, if (bb_eis.len() > 0u && next_eis.len() > 0u) || bb_eis.len() > 1u { let nts = bb_eis.map(|ei| { - match ei.elts[ei.idx].node { + match ei.elts.get(ei.idx).node { MatchNonterminal(bind, name, _) => { format!("{} ('{}')", token::get_ident(name), @@ -390,10 +399,10 @@ pub fn parse<R: Reader>(sess: @ParseSess, let mut rust_parser = Parser(sess, cfg.clone(), rdr.dup()); let mut ei = bb_eis.pop().unwrap(); - match ei.elts[ei.idx].node { + match ei.elts.get(ei.idx).node { MatchNonterminal(_, name, idx) => { let name_string = token::get_ident(name); - ei.matches[idx].push(@MatchedNonterminal( + ei.matches.get_mut(idx).push(@MatchedNonterminal( parse_nt(&mut rust_parser, name_string.get()))); ei.idx += 1u; } @@ -413,12 +422,12 @@ pub fn parse<R: Reader>(sess: @ParseSess, pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { match name { - "item" => match p.parse_item(~[]) { + "item" => match p.parse_item(Vec::new()) { Some(i) => token::NtItem(i), None => p.fatal("expected an item keyword") }, "block" => token::NtBlock(p.parse_block()), - "stmt" => token::NtStmt(p.parse_stmt(~[])), + "stmt" => token::NtStmt(p.parse_stmt(Vec::new())), "pat" => token::NtPat(p.parse_pat()), "expr" => token::NtExpr(p.parse_expr()), "ty" => token::NtTy(p.parse_ty(false /* no need to disambiguate*/)), diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 45fe24ebf68..712d5f6bd27 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -25,9 +25,11 @@ use parse::token::{special_idents, gensym_ident}; use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF}; use parse::token; use print; -use std::cell::RefCell; use util::small_vector::SmallVector; +use std::cell::RefCell; +use std::vec_ng::Vec; + struct ParserAnyMacro { parser: RefCell<Parser>, } @@ -90,8 +92,8 @@ impl AnyMacro for ParserAnyMacro { struct MacroRulesMacroExpander { name: Ident, - lhses: @~[@NamedMatch], - rhses: @~[@NamedMatch], + lhses: @Vec<@NamedMatch> , + rhses: @Vec<@NamedMatch> , } impl MacroExpander for MacroRulesMacroExpander { @@ -100,7 +102,12 @@ impl MacroExpander for MacroRulesMacroExpander { sp: Span, arg: &[ast::TokenTree]) -> MacResult { - generic_extension(cx, sp, self.name, arg, *self.lhses, *self.rhses) + generic_extension(cx, + sp, + self.name, + arg, + self.lhses.as_slice(), + self.rhses.as_slice()) } } @@ -115,7 +122,9 @@ fn generic_extension(cx: &ExtCtxt, if cx.trace_macros() { println!("{}! \\{ {} \\}", token::get_ident(name), - print::pprust::tt_to_str(&TTDelim(@arg.to_owned()))); + print::pprust::tt_to_str(&TTDelim(@arg.iter() + .map(|x| (*x).clone()) + .collect()))); } // Which arm's failure should we report? (the one furthest along) @@ -128,8 +137,12 @@ fn generic_extension(cx: &ExtCtxt, match **lhs { MatchedNonterminal(NtMatchers(ref mtcs)) => { // `None` is because we're not interpolating - let arg_rdr = new_tt_reader(s_d, None, arg.to_owned()); - match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) { + let arg_rdr = new_tt_reader(s_d, + None, + arg.iter() + .map(|x| (*x).clone()) + .collect()); + match parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs.as_slice()) { Success(named_matches) => { let rhs = match *rhses[i] { // okay, what's your transcriber? @@ -137,7 +150,10 @@ fn generic_extension(cx: &ExtCtxt, match *tt { // cut off delimiters; don't parse 'em TTDelim(ref tts) => { - (*tts).slice(1u,(*tts).len()-1u).to_owned() + (*tts).slice(1u,(*tts).len()-1u) + .iter() + .map(|x| (*x).clone()) + .collect() } _ => cx.span_fatal( sp, "macro rhs must be delimited") @@ -174,7 +190,7 @@ fn generic_extension(cx: &ExtCtxt, pub fn add_new_extension(cx: &mut ExtCtxt, sp: Span, name: Ident, - arg: ~[ast::TokenTree]) + arg: Vec<ast::TokenTree> ) -> base::MacResult { // these spans won't matter, anyways fn ms(m: Matcher_) -> Matcher { @@ -191,15 +207,14 @@ pub fn add_new_extension(cx: &mut ExtCtxt, // The grammar for macro_rules! is: // $( $lhs:mtcs => $rhs:tt );+ // ...quasiquoting this would be nice. - let argument_gram = ~[ - ms(MatchSeq(~[ + let argument_gram = vec!( + ms(MatchSeq(vec!( ms(MatchNonterminal(lhs_nm, special_idents::matchers, 0u)), ms(MatchTok(FAT_ARROW)), - ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u)), - ], Some(SEMI), false, 0u, 2u)), + ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI), false, 0u, 2u)), //to phase into semicolon-termination instead of //semicolon-separation - ms(MatchSeq(~[ms(MatchTok(SEMI))], None, true, 2u, 2u))]; + ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, true, 2u, 2u))); // Parse the macro_rules! invocation (`none` is for no interpolations): diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index a8c9fe37226..a3f179e851a 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -18,11 +18,12 @@ use parse::token; use parse::lexer::TokenAndSpan; use std::cell::{Cell, RefCell}; +use std::vec_ng::Vec; use collections::HashMap; ///an unzipping of `TokenTree`s struct TtFrame { - forest: @~[ast::TokenTree], + forest: @Vec<ast::TokenTree> , idx: Cell<uint>, dotdotdoted: bool, sep: Option<Token>, @@ -35,8 +36,8 @@ pub struct TtReader { priv stack: RefCell<@TtFrame>, /* for MBE-style macro transcription */ priv interpolations: RefCell<HashMap<Ident, @NamedMatch>>, - priv repeat_idx: RefCell<~[uint]>, - priv repeat_len: RefCell<~[uint]>, + priv repeat_idx: RefCell<Vec<uint> >, + priv repeat_len: RefCell<Vec<uint> >, /* cached: */ cur_tok: RefCell<Token>, cur_span: RefCell<Span>, @@ -47,7 +48,7 @@ pub struct TtReader { * should) be none. */ pub fn new_tt_reader(sp_diag: @SpanHandler, interp: Option<HashMap<Ident, @NamedMatch>>, - src: ~[ast::TokenTree]) + src: Vec<ast::TokenTree> ) -> TtReader { let r = TtReader { sp_diag: sp_diag, @@ -62,8 +63,8 @@ pub fn new_tt_reader(sp_diag: @SpanHandler, None => RefCell::new(HashMap::new()), Some(x) => RefCell::new(x), }, - repeat_idx: RefCell::new(~[]), - repeat_len: RefCell::new(~[]), + repeat_idx: RefCell::new(Vec::new()), + repeat_len: RefCell::new(Vec::new()), /* dummy values, never read: */ cur_tok: RefCell::new(EOF), cur_span: RefCell::new(DUMMY_SP), @@ -106,7 +107,7 @@ fn lookup_cur_matched_by_matched(r: &TtReader, start: @NamedMatch) // end of the line; duplicate henceforth ad } - MatchedSeq(ref ads, _) => ads[*idx] + MatchedSeq(ref ads, _) => *ads.get(*idx) } } let repeat_idx = r.repeat_idx.borrow(); @@ -217,7 +218,8 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan { r.stack.get().idx.set(0u); { let mut repeat_idx = r.repeat_idx.borrow_mut(); - repeat_idx.get()[repeat_idx.get().len() - 1u] += 1u; + let last_repeat_idx = repeat_idx.get().len() - 1u; + *repeat_idx.get().get_mut(last_repeat_idx) += 1u; } match r.stack.get().sep.clone() { Some(tk) => { @@ -231,7 +233,7 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan { loop { /* because it's easiest, this handles `TTDelim` not starting with a `TTTok`, even though it won't happen */ // FIXME(pcwalton): Bad copy. - match r.stack.get().forest[r.stack.get().idx.get()].clone() { + match (*r.stack.get().forest.get(r.stack.get().idx.get())).clone() { TTDelim(tts) => { r.stack.set(@TtFrame { forest: tts, diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index e62abac443e..b01ba7718ba 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -16,18 +16,20 @@ use parse::token; use opt_vec::OptVec; use util::small_vector::SmallVector; +use std::vec_ng::Vec; + // We may eventually want to be able to fold over type parameters, too. pub trait Folder { fn fold_crate(&mut self, c: Crate) -> Crate { noop_fold_crate(c, self) } - fn fold_meta_items(&mut self, meta_items: &[@MetaItem]) -> ~[@MetaItem] { - meta_items.map(|x| fold_meta_item_(*x, self)) + fn fold_meta_items(&mut self, meta_items: &[@MetaItem]) -> Vec<@MetaItem> { + meta_items.iter().map(|x| fold_meta_item_(*x, self)).collect() } - fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> ~[@ViewPath] { - view_paths.map(|view_path| { + fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> Vec<@ViewPath> { + view_paths.iter().map(|view_path| { let inner_view_path = match view_path.node { ViewPathSimple(ref ident, ref path, node_id) => { ViewPathSimple(ident.clone(), @@ -60,7 +62,7 @@ pub trait Folder { node: inner_view_path, span: self.new_span(view_path.span), } - }) + }).collect() } fn fold_view_item(&mut self, vi: &ViewItem) -> ViewItem { @@ -275,7 +277,7 @@ pub trait Folder { node: match macro.node { MacInvocTT(ref p, ref tts, ctxt) => { MacInvocTT(self.fold_path(p), - fold_tts(*tts, self), + fold_tts(tts.as_slice(), self), ctxt) } }, @@ -283,8 +285,8 @@ pub trait Folder { } } - fn map_exprs(&self, f: |@Expr| -> @Expr, es: &[@Expr]) -> ~[@Expr] { - es.map(|x| f(*x)) + fn map_exprs(&self, f: |@Expr| -> @Expr, es: &[@Expr]) -> Vec<@Expr> { + es.iter().map(|x| f(*x)).collect() } fn new_id(&mut self, i: NodeId) -> NodeId { @@ -370,21 +372,21 @@ fn fold_arg_<T: Folder>(a: &Arg, fld: &mut T) -> Arg { // since many token::IDENT are not necessary part of let bindings and most // token::LIFETIME are certainly not loop labels. But we can't tell in their // token form. So this is less ideal and hacky but it works. -pub fn fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> ~[TokenTree] { - tts.map(|tt| { +pub fn fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree> { + tts.iter().map(|tt| { match *tt { TTTok(span, ref tok) => TTTok(span,maybe_fold_ident(tok,fld)), - TTDelim(tts) => TTDelim(@fold_tts(*tts, fld)), + TTDelim(tts) => TTDelim(@fold_tts(tts.as_slice(), fld)), TTSeq(span, pattern, ref sep, is_optional) => TTSeq(span, - @fold_tts(*pattern, fld), + @fold_tts(pattern.as_slice(), fld), sep.as_ref().map(|tok|maybe_fold_ident(tok,fld)), is_optional), TTNonterminal(sp,ref ident) => TTNonterminal(sp,fld.fold_ident(*ident)) } - }) + }).collect() } // apply ident folder if it's an ident, otherwise leave it alone @@ -518,7 +520,7 @@ pub fn noop_fold_view_item<T: Folder>(vi: &ViewItem, folder: &mut T) folder.new_id(node_id)) } ViewItemUse(ref view_paths) => { - ViewItemUse(folder.fold_view_paths(*view_paths)) + ViewItemUse(folder.fold_view_paths(view_paths.as_slice())) } }; ViewItem { @@ -881,7 +883,7 @@ mod test { // this version doesn't care about getting comments or docstrings in. fn fake_print_crate(s: &mut pprust::State, krate: &ast::Crate) -> io::IoResult<()> { - pprust::print_mod(s, &krate.module, krate.attrs) + pprust::print_mod(s, &krate.module, krate.attrs.as_slice()) } // change every identifier to "zz" diff --git a/src/libsyntax/opt_vec.rs b/src/libsyntax/opt_vec.rs index 325df0ba777..ec81fff51c7 100644 --- a/src/libsyntax/opt_vec.rs +++ b/src/libsyntax/opt_vec.rs @@ -15,20 +15,21 @@ * other useful things like `push()` and `len()`. */ -use std::vec; use std::default::Default; +use std::vec; +use std::vec_ng::Vec; #[deriving(Clone, Encodable, Decodable, Hash)] pub enum OptVec<T> { Empty, - Vec(~[T]) + Vec(Vec<T> ) } pub fn with<T>(t: T) -> OptVec<T> { - Vec(~[t]) + Vec(vec!(t)) } -pub fn from<T>(t: ~[T]) -> OptVec<T> { +pub fn from<T>(t: Vec<T> ) -> OptVec<T> { if t.len() == 0 { Empty } else { @@ -44,7 +45,7 @@ impl<T> OptVec<T> { return; } Empty => { - *self = Vec(~[t]); + *self = Vec(vec!(t)); } } } @@ -87,7 +88,7 @@ impl<T> OptVec<T> { pub fn get<'a>(&'a self, i: uint) -> &'a T { match *self { Empty => fail!("invalid index {}", i), - Vec(ref v) => &v[i] + Vec(ref v) => v.get(i) } } @@ -121,11 +122,11 @@ impl<T> OptVec<T> { } #[inline] - pub fn map_to_vec<B>(&self, op: |&T| -> B) -> ~[B] { + pub fn map_to_vec<B>(&self, op: |&T| -> B) -> Vec<B> { self.iter().map(op).collect() } - pub fn mapi_to_vec<B>(&self, op: |uint, &T| -> B) -> ~[B] { + pub fn mapi_to_vec<B>(&self, op: |uint, &T| -> B) -> Vec<B> { let mut index = 0; self.map_to_vec(|a| { let i = index; @@ -135,19 +136,19 @@ impl<T> OptVec<T> { } } -pub fn take_vec<T>(v: OptVec<T>) -> ~[T] { +pub fn take_vec<T>(v: OptVec<T>) -> Vec<T> { match v { - Empty => ~[], + Empty => Vec::new(), Vec(v) => v } } impl<T:Clone> OptVec<T> { pub fn prepend(&self, t: T) -> OptVec<T> { - let mut v0 = ~[t]; + let mut v0 = vec!(t); match *self { Empty => {} - Vec(ref v1) => { v0.push_all(*v1); } + Vec(ref v1) => { v0.push_all(v1.as_slice()); } } return Vec(v0); } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index c9bea78d02d..0a74c7ca821 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -15,21 +15,23 @@ use parse::token; use parse::parser::Parser; use parse::token::INTERPOLATED; +use std::vec_ng::Vec; + // a parser that can parse attributes. pub trait ParserAttr { - fn parse_outer_attributes(&mut self) -> ~[ast::Attribute]; + fn parse_outer_attributes(&mut self) -> Vec<ast::Attribute> ; fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute; fn parse_inner_attrs_and_next(&mut self) - -> (~[ast::Attribute], ~[ast::Attribute]); + -> (Vec<ast::Attribute> , Vec<ast::Attribute> ); fn parse_meta_item(&mut self) -> @ast::MetaItem; - fn parse_meta_seq(&mut self) -> ~[@ast::MetaItem]; - fn parse_optional_meta(&mut self) -> ~[@ast::MetaItem]; + fn parse_meta_seq(&mut self) -> Vec<@ast::MetaItem> ; + fn parse_optional_meta(&mut self) -> Vec<@ast::MetaItem> ; } impl ParserAttr for Parser { // Parse attributes that appear before an item - fn parse_outer_attributes(&mut self) -> ~[ast::Attribute] { - let mut attrs: ~[ast::Attribute] = ~[]; + fn parse_outer_attributes(&mut self) -> Vec<ast::Attribute> { + let mut attrs: Vec<ast::Attribute> = Vec::new(); loop { debug!("parse_outer_attributes: self.token={:?}", self.token); @@ -116,9 +118,9 @@ impl ParserAttr for Parser { // you can make the 'next' field an Option, but the result is going to be // more useful as a vector. fn parse_inner_attrs_and_next(&mut self) - -> (~[ast::Attribute], ~[ast::Attribute]) { - let mut inner_attrs: ~[ast::Attribute] = ~[]; - let mut next_outer_attrs: ~[ast::Attribute] = ~[]; + -> (Vec<ast::Attribute> , Vec<ast::Attribute> ) { + let mut inner_attrs: Vec<ast::Attribute> = Vec::new(); + let mut next_outer_attrs: Vec<ast::Attribute> = Vec::new(); loop { let attr = match self.token { token::INTERPOLATED(token::NtAttr(..)) => { @@ -188,17 +190,17 @@ impl ParserAttr for Parser { } // matches meta_seq = ( COMMASEP(meta_item) ) - fn parse_meta_seq(&mut self) -> ~[@ast::MetaItem] { + fn parse_meta_seq(&mut self) -> Vec<@ast::MetaItem> { self.parse_seq(&token::LPAREN, &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_meta_item()).node } - fn parse_optional_meta(&mut self) -> ~[@ast::MetaItem] { + fn parse_optional_meta(&mut self) -> Vec<@ast::MetaItem> { match self.token { token::LPAREN => self.parse_meta_seq(), - _ => ~[] + _ => Vec::new() } } } diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index bd1c4f9babb..c2a2097de24 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -20,6 +20,7 @@ use parse::token; use std::io; use std::str; use std::uint; +use std::vec_ng::Vec; #[deriving(Clone, Eq)] pub enum CommentStyle { @@ -32,7 +33,7 @@ pub enum CommentStyle { #[deriving(Clone)] pub struct Comment { style: CommentStyle, - lines: ~[~str], + lines: Vec<~str> , pos: BytePos } @@ -54,28 +55,28 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle { pub fn strip_doc_comment_decoration(comment: &str) -> ~str { /// remove whitespace-only lines from the start/end of lines - fn vertical_trim(lines: ~[~str]) -> ~[~str] { + fn vertical_trim(lines: Vec<~str> ) -> Vec<~str> { let mut i = 0u; let mut j = lines.len(); // first line of all-stars should be omitted - if lines.len() > 0 && lines[0].chars().all(|c| c == '*') { + if lines.len() > 0 && lines.get(0).chars().all(|c| c == '*') { i += 1; } - while i < j && lines[i].trim().is_empty() { + while i < j && lines.get(i).trim().is_empty() { i += 1; } // like the first, a last line of all stars should be omitted - if j > i && lines[j - 1].chars().skip(1).all(|c| c == '*') { + if j > i && lines.get(j - 1).chars().skip(1).all(|c| c == '*') { j -= 1; } - while j > i && lines[j - 1].trim().is_empty() { + while j > i && lines.get(j - 1).trim().is_empty() { j -= 1; } - return lines.slice(i, j).to_owned(); + return lines.slice(i, j).iter().map(|x| (*x).clone()).collect(); } /// remove a "[ \t]*\*" block from each line, if possible - fn horizontal_trim(lines: ~[~str]) -> ~[~str] { + fn horizontal_trim(lines: Vec<~str> ) -> Vec<~str> { let mut i = uint::MAX; let mut can_trim = true; let mut first = true; @@ -122,7 +123,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str { let lines = comment.slice(3u, comment.len() - 2u) .lines_any() .map(|s| s.to_owned()) - .collect::<~[~str]>(); + .collect::<Vec<~str> >(); let lines = vertical_trim(lines); let lines = horizontal_trim(lines); @@ -157,9 +158,9 @@ fn consume_non_eol_whitespace(rdr: &StringReader) { } } -fn push_blank_line_comment(rdr: &StringReader, comments: &mut ~[Comment]) { +fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment> ) { debug!(">>> blank-line comment"); - let v: ~[~str] = ~[]; + let v: Vec<~str> = Vec::new(); comments.push(Comment { style: BlankLine, lines: v, @@ -168,7 +169,7 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut ~[Comment]) { } fn consume_whitespace_counting_blank_lines(rdr: &StringReader, - comments: &mut ~[Comment]) { + comments: &mut Vec<Comment> ) { while is_whitespace(rdr.curr.get()) && !is_eof(rdr) { if rdr.col.get() == CharPos(0u) && rdr.curr_is('\n') { push_blank_line_comment(rdr, &mut *comments); @@ -179,22 +180,22 @@ fn consume_whitespace_counting_blank_lines(rdr: &StringReader, fn read_shebang_comment(rdr: &StringReader, code_to_the_left: bool, - comments: &mut ~[Comment]) { + comments: &mut Vec<Comment> ) { debug!(">>> shebang comment"); let p = rdr.last_pos.get(); debug!("<<< shebang comment"); comments.push(Comment { style: if code_to_the_left { Trailing } else { Isolated }, - lines: ~[read_one_line_comment(rdr)], + lines: vec!(read_one_line_comment(rdr)), pos: p }); } fn read_line_comments(rdr: &StringReader, code_to_the_left: bool, - comments: &mut ~[Comment]) { + comments: &mut Vec<Comment> ) { debug!(">>> line comments"); let p = rdr.last_pos.get(); - let mut lines: ~[~str] = ~[]; + let mut lines: Vec<~str> = Vec::new(); while rdr.curr_is('/') && nextch_is(rdr, '/') { let line = read_one_line_comment(rdr); debug!("{}", line); @@ -232,7 +233,7 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<uint> { return Some(cursor); } -fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str], +fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<~str> , s: ~str, col: CharPos) { let len = s.len(); let s1 = match all_whitespace(s, col) { @@ -249,10 +250,10 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str], fn read_block_comment(rdr: &StringReader, code_to_the_left: bool, - comments: &mut ~[Comment]) { + comments: &mut Vec<Comment> ) { debug!(">>> block comment"); let p = rdr.last_pos.get(); - let mut lines: ~[~str] = ~[]; + let mut lines: Vec<~str> = Vec::new(); let col: CharPos = rdr.col.get(); bump(rdr); bump(rdr); @@ -324,7 +325,7 @@ fn peeking_at_comment(rdr: &StringReader) -> bool { fn consume_comment(rdr: &StringReader, code_to_the_left: bool, - comments: &mut ~[Comment]) { + comments: &mut Vec<Comment> ) { debug!(">>> consume comment"); if rdr.curr_is('/') && nextch_is(rdr, '/') { read_line_comments(rdr, code_to_the_left, comments); @@ -348,15 +349,15 @@ pub fn gather_comments_and_literals(span_diagnostic: @diagnostic::SpanHandler, path: ~str, srdr: &mut io::Reader) - -> (~[Comment], ~[Literal]) { + -> (Vec<Comment> , Vec<Literal> ) { let src = srdr.read_to_end().unwrap(); let src = str::from_utf8_owned(src).unwrap(); let cm = CodeMap::new(); let filemap = cm.new_filemap(path, src); let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap); - let mut comments: ~[Comment] = ~[]; - let mut literals: ~[Literal] = ~[]; + let mut comments: Vec<Comment> = Vec::new(); + let mut literals: Vec<Literal> = Vec::new(); let mut first_read: bool = true; while !is_eof(&rdr) { loop { diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 87706df5e31..884fc306f22 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -1005,6 +1005,7 @@ mod test { use parse::token; use parse::token::{str_to_ident}; use std::io::util; + use std::vec_ng::Vec; // represents a testing reader (incl. both reader and interner) struct Env { @@ -1048,7 +1049,7 @@ mod test { // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) - fn check_tokenization (env: Env, expected: ~[token::Token]) { + fn check_tokenization (env: Env, expected: Vec<token::Token> ) { for expected_tok in expected.iter() { let TokenAndSpan {tok:actual_tok, sp: _} = env.string_reader.next_token(); @@ -1064,32 +1065,32 @@ mod test { #[test] fn doublecolonparsing () { let env = setup (~"a b"); check_tokenization (env, - ~[mk_ident("a",false), - mk_ident("b",false)]); + vec!(mk_ident("a",false), + mk_ident("b",false))); } #[test] fn dcparsing_2 () { let env = setup (~"a::b"); check_tokenization (env, - ~[mk_ident("a",true), + vec!(mk_ident("a",true), token::MOD_SEP, - mk_ident("b",false)]); + mk_ident("b",false))); } #[test] fn dcparsing_3 () { let env = setup (~"a ::b"); check_tokenization (env, - ~[mk_ident("a",false), + vec!(mk_ident("a",false), token::MOD_SEP, - mk_ident("b",false)]); + mk_ident("b",false))); } #[test] fn dcparsing_4 () { let env = setup (~"a:: b"); check_tokenization (env, - ~[mk_ident("a",true), + vec!(mk_ident("a",true), token::MOD_SEP, - mk_ident("b",false)]); + mk_ident("b",false))); } #[test] fn character_a() { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 8f45f911484..9e5db1770bf 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -21,6 +21,7 @@ use parse::parser::Parser; use std::cell::RefCell; use std::io::File; use std::str; +use std::vec_ng::Vec; pub mod lexer; pub mod parser; @@ -42,7 +43,7 @@ pub struct ParseSess { cm: @codemap::CodeMap, // better be the same as the one in the reader! span_diagnostic: @SpanHandler, // better be the same as the one in the reader! /// Used to determine and report recursive mod inclusions - included_mod_stack: RefCell<~[Path]>, + included_mod_stack: RefCell<Vec<Path> >, } pub fn new_parse_sess() -> @ParseSess { @@ -50,7 +51,7 @@ pub fn new_parse_sess() -> @ParseSess { @ParseSess { cm: cm, span_diagnostic: mk_span_handler(default_handler(), cm), - included_mod_stack: RefCell::new(~[]), + included_mod_stack: RefCell::new(Vec::new()), } } @@ -60,7 +61,7 @@ pub fn new_parse_sess_special_handler(sh: @SpanHandler, @ParseSess { cm: cm, span_diagnostic: sh, - included_mod_stack: RefCell::new(~[]), + included_mod_stack: RefCell::new(Vec::new()), } } @@ -82,7 +83,7 @@ pub fn parse_crate_attrs_from_file( input: &Path, cfg: ast::CrateConfig, sess: @ParseSess -) -> ~[ast::Attribute] { +) -> Vec<ast::Attribute> { let mut parser = new_parser_from_file(sess, cfg, input); let (inner, _) = parser.parse_inner_attrs_and_next(); return inner; @@ -104,7 +105,7 @@ pub fn parse_crate_attrs_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, sess: @ParseSess) - -> ~[ast::Attribute] { + -> Vec<ast::Attribute> { let mut p = new_parser_from_source_str(sess, cfg, name, @@ -144,7 +145,7 @@ pub fn parse_meta_from_source_str(name: ~str, pub fn parse_stmt_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, - attrs: ~[ast::Attribute], + attrs: Vec<ast::Attribute> , sess: @ParseSess) -> @ast::Stmt { let mut p = new_parser_from_source_str( @@ -160,7 +161,7 @@ pub fn parse_tts_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, sess: @ParseSess) - -> ~[ast::TokenTree] { + -> Vec<ast::TokenTree> { let mut p = new_parser_from_source_str( sess, cfg, @@ -214,7 +215,7 @@ pub fn filemap_to_parser(sess: @ParseSess, // compiler expands into it pub fn new_parser_from_tts(sess: @ParseSess, cfg: ast::CrateConfig, - tts: ~[ast::TokenTree]) -> Parser { + tts: Vec<ast::TokenTree> ) -> Parser { tts_to_parser(sess,tts,cfg) } @@ -256,10 +257,10 @@ pub fn string_to_filemap(sess: @ParseSess, source: ~str, path: ~str) // given a filemap, produce a sequence of token-trees pub fn filemap_to_tts(sess: @ParseSess, filemap: @FileMap) - -> ~[ast::TokenTree] { + -> Vec<ast::TokenTree> { // it appears to me that the cfg doesn't matter here... indeed, // parsing tt's probably shouldn't require a parser at all. - let cfg = ~[]; + let cfg = Vec::new(); let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap); let mut p1 = Parser(sess, cfg, ~srdr); p1.parse_all_token_trees() @@ -267,7 +268,7 @@ pub fn filemap_to_tts(sess: @ParseSess, filemap: @FileMap) // given tts and cfg, produce a parser pub fn tts_to_parser(sess: @ParseSess, - tts: ~[ast::TokenTree], + tts: Vec<ast::TokenTree> , cfg: ast::CrateConfig) -> Parser { let trdr = lexer::new_tt_reader(sess.span_diagnostic, None, tts); Parser(sess, cfg, ~trdr) @@ -288,6 +289,7 @@ mod test { use std::io; use std::io::MemWriter; use std::str; + use std::vec_ng::Vec; use codemap::{Span, BytePos, Spanned}; use opt_vec; use ast; @@ -318,13 +320,13 @@ mod test { node: ast::ExprPath(ast::Path { span: sp(0, 1), global: false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("a"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }), span: sp(0, 1) }) @@ -337,7 +339,7 @@ mod test { node: ast::ExprPath(ast::Path { span: sp(0, 6), global: true, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("a"), lifetimes: opt_vec::Empty, @@ -348,7 +350,7 @@ mod test { lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ] + ) }), span: sp(0, 6) }) @@ -362,27 +364,28 @@ mod test { // check the token-tree-ization of macros #[test] fn string_to_tts_macro () { let tts = string_to_tts(~"macro_rules! zip (($a)=>($a))"); - let tts: &[ast::TokenTree] = tts; + let tts: &[ast::TokenTree] = tts.as_slice(); match tts { [ast::TTTok(_,_), ast::TTTok(_,token::NOT), ast::TTTok(_,_), ast::TTDelim(delim_elts)] => { - let delim_elts: &[ast::TokenTree] = *delim_elts; + let delim_elts: &[ast::TokenTree] = delim_elts.as_slice(); match delim_elts { [ast::TTTok(_,token::LPAREN), ast::TTDelim(first_set), ast::TTTok(_,token::FAT_ARROW), ast::TTDelim(second_set), ast::TTTok(_,token::RPAREN)] => { - let first_set: &[ast::TokenTree] = *first_set; + let first_set: &[ast::TokenTree] = + first_set.as_slice(); match first_set { [ast::TTTok(_,token::LPAREN), ast::TTTok(_,token::DOLLAR), ast::TTTok(_,_), ast::TTTok(_,token::RPAREN)] => { let second_set: &[ast::TokenTree] = - *second_set; + second_set.as_slice(); match second_set { [ast::TTTok(_,token::LPAREN), ast::TTTok(_,token::DOLLAR), @@ -550,13 +553,13 @@ mod test { node:ast::ExprPath(ast::Path{ span: sp(7, 8), global: false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("d"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }), span:sp(7,8) })), @@ -572,13 +575,13 @@ mod test { node: ast::ExprPath(ast::Path { span:sp(0,1), global:false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("b"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }), span: sp(0,1)}, ast::DUMMY_NODE_ID), @@ -599,13 +602,13 @@ mod test { ast::Path { span:sp(0,1), global:false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("b"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }, None /* no idea */), span: sp(0,1)}); @@ -618,22 +621,22 @@ mod test { assert!(string_to_item(~"fn a (b : int) { b; }") == Some( @ast::Item{ident:str_to_ident("a"), - attrs:~[], + attrs:Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemFn(ast::P(ast::FnDecl { - inputs: ~[ast::Arg{ + inputs: vec!(ast::Arg{ ty: ast::P(ast::Ty{id: ast::DUMMY_NODE_ID, node: ast::TyPath(ast::Path{ span:sp(10,13), global:false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("int"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }, None, ast::DUMMY_NODE_ID), span:sp(10,13) }), @@ -644,21 +647,21 @@ mod test { ast::Path { span:sp(6,7), global:false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("b"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }, None // no idea ), span: sp(6,7) }, id: ast::DUMMY_NODE_ID - }], + }), output: ast::P(ast::Ty{id: ast::DUMMY_NODE_ID, node: ast::TyNil, span:sp(15,15)}), // not sure @@ -672,15 +675,15 @@ mod test { ty_params: opt_vec::Empty, }, ast::P(ast::Block { - view_items: ~[], - stmts: ~[@Spanned{ + view_items: Vec::new(), + stmts: vec!(@Spanned{ node: ast::StmtSemi(@ast::Expr{ id: ast::DUMMY_NODE_ID, node: ast::ExprPath( ast::Path{ span:sp(17,18), global:false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident( @@ -690,11 +693,11 @@ mod test { types: opt_vec::Empty } - ], + ), }), span: sp(17,18)}, ast::DUMMY_NODE_ID), - span: sp(17,18)}], + span: sp(17,18)}), expr: None, id: ast::DUMMY_NODE_ID, rules: ast::DefaultBlock, // no idea diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 2fd6d34adf1..9b209aadf19 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -82,7 +82,8 @@ use std::cell::Cell; use collections::HashSet; use std::kinds::marker; use std::mem::replace; -use std::vec; +use std::vec_ng::Vec; +use std::vec_ng; #[allow(non_camel_case_types)] #[deriving(Eq)] @@ -93,7 +94,7 @@ enum restriction { RESTRICT_NO_BAR_OR_DOUBLEBAR_OP, } -type ItemInfo = (Ident, Item_, Option<~[Attribute]>); +type ItemInfo = (Ident, Item_, Option<Vec<Attribute> >); /// How to parse a path. There are four different kinds of paths, all of which /// are parsed somewhat differently. @@ -129,7 +130,7 @@ pub struct PathAndBounds { enum ItemOrViewItem { // Indicates a failure to parse any kind of item. The attributes are // returned. - IoviNone(~[Attribute]), + IoviNone(Vec<Attribute> ), IoviItem(@Item), IoviForeignItem(@ForeignItem), IoviViewItem(ViewItem) @@ -257,7 +258,7 @@ macro_rules! maybe_whole ( }; match __found__ { Some(INTERPOLATED(token::$constructor(x))) => { - return (~[], x) + return (Vec::new(), x) } _ => {} } @@ -266,21 +267,20 @@ macro_rules! maybe_whole ( ) -fn maybe_append(lhs: ~[Attribute], rhs: Option<~[Attribute]>) - -> ~[Attribute] { +fn maybe_append(lhs: Vec<Attribute> , rhs: Option<Vec<Attribute> >) + -> Vec<Attribute> { match rhs { None => lhs, - Some(ref attrs) => vec::append(lhs, (*attrs)) + Some(ref attrs) => vec_ng::append(lhs, attrs.as_slice()) } } struct ParsedItemsAndViewItems { - attrs_remaining: ~[Attribute], - view_items: ~[ViewItem], - items: ~[@Item], - foreign_items: ~[@ForeignItem] -} + attrs_remaining: Vec<Attribute> , + view_items: Vec<ViewItem> , + items: Vec<@Item> , + foreign_items: Vec<@ForeignItem> } /* ident is handled by common.rs */ @@ -314,8 +314,8 @@ pub fn Parser(sess: @ParseSess, cfg: ast::CrateConfig, rdr: ~Reader:) restriction: UNRESTRICTED, quote_depth: 0, obsolete_set: HashSet::new(), - mod_path_stack: ~[], - open_braces: ~[], + mod_path_stack: Vec::new(), + open_braces: Vec::new(), nopod: marker::NoPod } } @@ -343,9 +343,9 @@ pub struct Parser { /// extra detail when the same error is seen twice obsolete_set: HashSet<ObsoleteSyntax>, /// Used to determine the path to externally loaded source files - mod_path_stack: ~[InternedString], + mod_path_stack: Vec<InternedString> , /// Stack of spans of open delimiters. Used for error message. - open_braces: ~[Span], + open_braces: Vec<Span> , /* do not copy the parser; its state is tied to outside state */ priv nopod: marker::NoPod } @@ -407,8 +407,11 @@ impl Parser { } else if inedible.contains(&self.token) { // leave it in the input } else { - let expected = vec::append(edible.to_owned(), inedible); - let expect = tokens_to_str(expected); + let expected = vec_ng::append(edible.iter() + .map(|x| (*x).clone()) + .collect(), + inedible); + let expect = tokens_to_str(expected.as_slice()); let actual = self.this_token_to_str(); self.fatal( if expected.len() != 1 { @@ -446,8 +449,12 @@ impl Parser { match e.node { ExprPath(..) => { // might be unit-struct construction; check for recoverableinput error. - let expected = vec::append(edible.to_owned(), inedible); - self.check_for_erroneous_unit_struct_expecting(expected); + let expected = vec_ng::append(edible.iter() + .map(|x| (*x).clone()) + .collect(), + inedible); + self.check_for_erroneous_unit_struct_expecting( + expected.as_slice()); } _ => {} } @@ -465,8 +472,12 @@ impl Parser { debug!("commit_stmt {:?}", s); let _s = s; // unused, but future checks might want to inspect `s`. if self.last_token.as_ref().map_or(false, |t| is_ident_or_path(*t)) { - let expected = vec::append(edible.to_owned(), inedible); - self.check_for_erroneous_unit_struct_expecting(expected); + let expected = vec_ng::append(edible.iter() + .map(|x| (*x).clone()) + .collect(), + inedible.as_slice()); + self.check_for_erroneous_unit_struct_expecting( + expected.as_slice()); } self.expect_one_of(edible, inedible) } @@ -578,9 +589,9 @@ impl Parser { &mut self, sep: &token::Token, f: |&mut Parser| -> T) - -> ~[T] { + -> Vec<T> { let mut first = true; - let mut vector = ~[]; + let mut vector = Vec::new(); while self.token != token::BINOP(token::OR) && self.token != token::OROR { if first { @@ -655,7 +666,7 @@ impl Parser { ket: &token::Token, sep: SeqSep, f: |&mut Parser| -> T) - -> ~[T] { + -> Vec<T> { let val = self.parse_seq_to_before_end(ket, sep, f); self.bump(); val @@ -669,9 +680,9 @@ impl Parser { ket: &token::Token, sep: SeqSep, f: |&mut Parser| -> T) - -> ~[T] { + -> Vec<T> { let mut first: bool = true; - let mut v: ~[T] = ~[]; + let mut v: Vec<T> = Vec::new(); while self.token != *ket { match sep.sep { Some(ref t) => { @@ -695,7 +706,7 @@ impl Parser { ket: &token::Token, sep: SeqSep, f: |&mut Parser| -> T) - -> ~[T] { + -> Vec<T> { self.expect(bra); let result = self.parse_seq_to_before_end(ket, sep, f); self.bump(); @@ -710,7 +721,7 @@ impl Parser { ket: &token::Token, sep: SeqSep, f: |&mut Parser| -> T) - -> Spanned<~[T]> { + -> Spanned<Vec<T> > { let lo = self.span.lo; self.expect(bra); let result = self.parse_seq_to_before_end(ket, sep, f); @@ -950,7 +961,7 @@ impl Parser { }; let inputs = if self.eat(&token::OROR) { - ~[] + Vec::new() } else { self.expect_or(); let inputs = self.parse_seq_to_before_or( @@ -1034,7 +1045,7 @@ impl Parser { } // parse the methods in a trait declaration - pub fn parse_trait_methods(&mut self) -> ~[TraitMethod] { + pub fn parse_trait_methods(&mut self) -> Vec<TraitMethod> { self.parse_unspanned_seq( &token::LBRACE, &token::RBRACE, @@ -1083,7 +1094,7 @@ impl Parser { debug!("parse_trait_methods(): parsing provided method"); let (inner_attrs, body) = p.parse_inner_attrs_and_block(); - let attrs = vec::append(attrs, inner_attrs); + let attrs = vec_ng::append(attrs, inner_attrs.as_slice()); Provided(@ast::Method { ident: ident, attrs: attrs, @@ -1176,7 +1187,7 @@ impl Parser { // (t) is a parenthesized ty // (t,) is the type of a tuple with only one field, // of type t - let mut ts = ~[self.parse_ty(false)]; + let mut ts = vec!(self.parse_ty(false)); let mut one_tuple = false; while self.token == token::COMMA { self.bump(); @@ -1190,7 +1201,7 @@ impl Parser { if ts.len() == 1 && !one_tuple { self.expect(&token::RPAREN); - return ts[0] + return *ts.get(0) } let t = TyTup(ts); @@ -1479,7 +1490,7 @@ impl Parser { // Parse any number of segments and bound sets. A segment is an // identifier followed by an optional lifetime and a set of types. // A bound set is a set of type parameter bounds. - let mut segments = ~[]; + let mut segments = Vec::new(); loop { // First, parse an identifier. let identifier = self.parse_ident(); @@ -1541,7 +1552,7 @@ impl Parser { let span = mk_sp(lo, self.last_span.hi); // Assemble the path segments. - let mut path_segments = ~[]; + let mut path_segments = Vec::new(); let mut bounds = None; let last_segment_index = segments.len() - 1; for (i, segment_and_bounds) in segments.move_iter().enumerate() { @@ -1690,11 +1701,11 @@ impl Parser { ExprBinary(binop, lhs, rhs) } - pub fn mk_call(&mut self, f: @Expr, args: ~[@Expr]) -> ast::Expr_ { + pub fn mk_call(&mut self, f: @Expr, args: Vec<@Expr> ) -> ast::Expr_ { ExprCall(f, args) } - fn mk_method_call(&mut self, ident: Ident, tps: ~[P<Ty>], args: ~[@Expr]) -> ast::Expr_ { + fn mk_method_call(&mut self, ident: Ident, tps: Vec<P<Ty>> , args: Vec<@Expr> ) -> ast::Expr_ { ExprMethodCall(ident, tps, args) } @@ -1702,7 +1713,7 @@ impl Parser { ExprIndex(expr, idx) } - pub fn mk_field(&mut self, expr: @Expr, ident: Ident, tys: ~[P<Ty>]) -> ast::Expr_ { + pub fn mk_field(&mut self, expr: @Expr, ident: Ident, tys: Vec<P<Ty>> ) -> ast::Expr_ { ExprField(expr, ident, tys) } @@ -1754,7 +1765,7 @@ impl Parser { let lit = @spanned(lo, hi, LitNil); return self.mk_expr(lo, hi, ExprLit(lit)); } - let mut es = ~[self.parse_expr()]; + let mut es = vec!(self.parse_expr()); self.commit_expr(*es.last().unwrap(), &[], &[token::COMMA, token::RPAREN]); while self.token == token::COMMA { self.bump(); @@ -1770,7 +1781,7 @@ impl Parser { self.commit_expr_expecting(*es.last().unwrap(), token::RPAREN); return if es.len() == 1 && !trailing_comma { - self.mk_expr(lo, hi, ExprParen(es[0])) + self.mk_expr(lo, hi, ExprParen(*es.get(0))) } else { self.mk_expr(lo, hi, ExprTup(es)) @@ -1786,8 +1797,8 @@ impl Parser { let decl = self.parse_proc_decl(); let body = self.parse_expr(); let fakeblock = P(ast::Block { - view_items: ~[], - stmts: ~[], + view_items: Vec::new(), + stmts: Vec::new(), expr: Some(body), id: ast::DUMMY_NODE_ID, rules: DefaultBlock, @@ -1840,7 +1851,7 @@ impl Parser { if self.token == token::RBRACKET { // Empty vector. self.bump(); - ex = ExprVec(~[], mutbl); + ex = ExprVec(Vec::new(), mutbl); } else { // Nonempty vector. let first_expr = self.parse_expr(); @@ -1860,11 +1871,13 @@ impl Parser { seq_sep_trailing_allowed(token::COMMA), |p| p.parse_expr() ); - ex = ExprVec(~[first_expr] + remaining_exprs, mutbl); + let mut exprs = vec!(first_expr); + exprs.push_all_move(remaining_exprs); + ex = ExprVec(exprs, mutbl); } else { // Vector with one element. self.expect(&token::RBRACKET); - ex = ExprVec(~[first_expr], mutbl); + ex = ExprVec(vec!(first_expr), mutbl); } } hi = self.last_span.hi; @@ -1919,7 +1932,7 @@ impl Parser { if self.looking_at_struct_literal() { // It's a struct literal. self.bump(); - let mut fields = ~[]; + let mut fields = Vec::new(); let mut base = None; while self.token != token::RBRACE { @@ -1981,7 +1994,7 @@ impl Parser { self.expect(&token::LT); self.parse_generic_values_after_lt() } else { - (opt_vec::Empty, ~[]) + (opt_vec::Empty, Vec::new()) }; // expr.f() method call @@ -2143,7 +2156,7 @@ impl Parser { // Parse the open delimiter. self.open_braces.push(self.span); - let mut result = ~[parse_any_tt_tok(self)]; + let mut result = vec!(parse_any_tt_tok(self)); let trees = self.parse_seq_to_before_end(&close_delim, @@ -2163,15 +2176,15 @@ impl Parser { // parse a stream of tokens into a list of TokenTree's, // up to EOF. - pub fn parse_all_token_trees(&mut self) -> ~[TokenTree] { - let mut tts = ~[]; + pub fn parse_all_token_trees(&mut self) -> Vec<TokenTree> { + let mut tts = Vec::new(); while self.token != token::EOF { tts.push(self.parse_token_tree()); } tts } - pub fn parse_matchers(&mut self) -> ~[Matcher] { + pub fn parse_matchers(&mut self) -> Vec<Matcher> { // unification of Matcher's and TokenTree's would vastly improve // the interpolation of Matcher's maybe_whole!(self, NtMatchers); @@ -2192,8 +2205,8 @@ impl Parser { pub fn parse_matcher_subseq_upto(&mut self, name_idx: @Cell<uint>, ket: &token::Token) - -> ~[Matcher] { - let mut ret_val = ~[]; + -> Vec<Matcher> { + let mut ret_val = Vec::new(); let mut lparens = 0u; while self.token != *ket || lparens > 0u { @@ -2478,7 +2491,7 @@ impl Parser { _ => { // No argument list - `do foo {` P(FnDecl { - inputs: ~[], + inputs: Vec::new(), output: P(Ty { id: ast::DUMMY_NODE_ID, node: TyInfer, @@ -2513,8 +2526,8 @@ impl Parser { let decl = parse_decl(self); let body = parse_body(self); let fakeblock = P(ast::Block { - view_items: ~[], - stmts: ~[], + view_items: Vec::new(), + stmts: Vec::new(), expr: Some(body), id: ast::DUMMY_NODE_ID, rules: DefaultBlock, @@ -2601,7 +2614,7 @@ impl Parser { let lo = self.last_span.lo; let discriminant = self.parse_expr(); self.commit_expr_expecting(discriminant, token::LBRACE); - let mut arms: ~[Arm] = ~[]; + let mut arms: Vec<Arm> = Vec::new(); while self.token != token::RBRACE { let pats = self.parse_pats(); let mut guard = None; @@ -2622,8 +2635,8 @@ impl Parser { } let blk = P(ast::Block { - view_items: ~[], - stmts: ~[], + view_items: Vec::new(), + stmts: Vec::new(), expr: Some(expr), id: ast::DUMMY_NODE_ID, rules: DefaultBlock, @@ -2662,8 +2675,8 @@ impl Parser { } // parse patterns, separated by '|' s - fn parse_pats(&mut self) -> ~[@Pat] { - let mut pats = ~[]; + fn parse_pats(&mut self) -> Vec<@Pat> { + let mut pats = Vec::new(); loop { pats.push(self.parse_pat()); if self.token == token::BINOP(token::OR) { self.bump(); } @@ -2673,10 +2686,10 @@ impl Parser { fn parse_pat_vec_elements( &mut self, - ) -> (~[@Pat], Option<@Pat>, ~[@Pat]) { - let mut before = ~[]; + ) -> (Vec<@Pat> , Option<@Pat>, Vec<@Pat> ) { + let mut before = Vec::new(); let mut slice = None; - let mut after = ~[]; + let mut after = Vec::new(); let mut first = true; let mut before_slice = true; @@ -2733,8 +2746,8 @@ impl Parser { } // parse the fields of a struct-like pattern - fn parse_pat_fields(&mut self) -> (~[ast::FieldPat], bool) { - let mut fields = ~[]; + fn parse_pat_fields(&mut self) -> (Vec<ast::FieldPat> , bool) { + let mut fields = Vec::new(); let mut etc = false; let mut first = true; while self.token != token::RBRACE { @@ -2900,7 +2913,7 @@ impl Parser { let expr = self.mk_expr(lo, hi, ExprLit(lit)); pat = PatLit(expr); } else { - let mut fields = ~[self.parse_pat()]; + let mut fields = vec!(self.parse_pat()); if self.look_ahead(1, |t| *t != token::RPAREN) { while self.token == token::COMMA { self.bump(); @@ -3002,7 +3015,7 @@ impl Parser { pat = PatStruct(enum_path, fields, etc); } _ => { - let mut args: ~[@Pat] = ~[]; + let mut args: Vec<@Pat> = Vec::new(); match self.token { token::LPAREN => { let is_star = self.look_ahead(1, |t| { @@ -3128,7 +3141,7 @@ impl Parser { // parse a structure field fn parse_name_and_ty(&mut self, pr: Visibility, - attrs: ~[Attribute]) -> StructField { + attrs: Vec<Attribute> ) -> StructField { let lo = self.span.lo; if !is_plain_ident(&self.token) { self.fatal("expected ident"); @@ -3146,7 +3159,7 @@ impl Parser { // parse a statement. may include decl. // precondition: any attributes are parsed already - pub fn parse_stmt(&mut self, item_attrs: ~[Attribute]) -> @Stmt { + pub fn parse_stmt(&mut self, item_attrs: Vec<Attribute> ) -> @Stmt { maybe_whole!(self, NtStmt); fn check_expected_item(p: &mut Parser, found_attrs: bool) { @@ -3229,7 +3242,7 @@ impl Parser { self.mk_item( lo, hi, id /*id is good here*/, ItemMac(spanned(lo, hi, MacInvocTT(pth, tts, EMPTY_CTXT))), - Inherited, ~[/*no attrs*/]))), + Inherited, Vec::new(/*no attrs*/)))), ast::DUMMY_NODE_ID)); } @@ -3275,12 +3288,12 @@ impl Parser { } self.expect(&token::LBRACE); - return self.parse_block_tail_(lo, DefaultBlock, ~[]); + return self.parse_block_tail_(lo, DefaultBlock, Vec::new()); } // parse a block. Inner attrs are allowed. fn parse_inner_attrs_and_block(&mut self) - -> (~[Attribute], P<Block>) { + -> (Vec<Attribute> , P<Block>) { maybe_whole!(pair_empty self, NtBlock); @@ -3299,13 +3312,13 @@ impl Parser { // necessary, and this should take a qualifier. // some blocks start with "#{"... fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> P<Block> { - self.parse_block_tail_(lo, s, ~[]) + self.parse_block_tail_(lo, s, Vec::new()) } // parse the rest of a block expression or function body fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode, - first_item_attrs: ~[Attribute]) -> P<Block> { - let mut stmts = ~[]; + first_item_attrs: Vec<Attribute> ) -> P<Block> { + let mut stmts = Vec::new(); let mut expr = None; // wouldn't it be more uniform to parse view items only, here? @@ -3328,12 +3341,12 @@ impl Parser { while self.token != token::RBRACE { // parsing items even when they're not allowed lets us give // better error messages and recover more gracefully. - attributes_box.push_all(self.parse_outer_attributes()); + attributes_box.push_all(self.parse_outer_attributes().as_slice()); match self.token { token::SEMI => { if !attributes_box.is_empty() { self.span_err(self.last_span, "expected item after attributes"); - attributes_box = ~[]; + attributes_box = Vec::new(); } self.bump(); // empty } @@ -3342,7 +3355,7 @@ impl Parser { } _ => { let stmt = self.parse_stmt(attributes_box); - attributes_box = ~[]; + attributes_box = Vec::new(); match stmt.node { StmtExpr(e, stmt_id) => { // expression without semicolon @@ -3510,7 +3523,7 @@ impl Parser { } } - fn parse_generic_values_after_lt(&mut self) -> (OptVec<ast::Lifetime>, ~[P<Ty>]) { + fn parse_generic_values_after_lt(&mut self) -> (OptVec<ast::Lifetime>, Vec<P<Ty>> ) { let lifetimes = self.parse_lifetimes(); let result = self.parse_seq_to_gt( Some(token::COMMA), @@ -3519,9 +3532,9 @@ impl Parser { } fn parse_fn_args(&mut self, named_args: bool, allow_variadic: bool) - -> (~[Arg], bool) { + -> (Vec<Arg> , bool) { let sp = self.span; - let mut args: ~[Option<Arg>] = + let mut args: Vec<Option<Arg>> = self.parse_unspanned_seq( &token::LPAREN, &token::RPAREN, @@ -3716,7 +3729,7 @@ impl Parser { fn_inputs } token::RPAREN => { - ~[Arg::new_self(explicit_self_sp, mutbl_self)] + vec!(Arg::new_self(explicit_self_sp, mutbl_self)) } _ => { let token_str = self.this_token_to_str(); @@ -3749,7 +3762,7 @@ impl Parser { fn parse_fn_block_decl(&mut self) -> P<FnDecl> { let inputs_captures = { if self.eat(&token::OROR) { - ~[] + Vec::new() } else { self.parse_unspanned_seq( &token::BINOP(token::OR), @@ -3812,7 +3825,7 @@ impl Parser { fn mk_item(&mut self, lo: BytePos, hi: BytePos, ident: Ident, node: Item_, vis: Visibility, - attrs: ~[Attribute]) -> @Item { + attrs: Vec<Attribute> ) -> @Item { @Item { ident: ident, attrs: attrs, @@ -3832,7 +3845,7 @@ impl Parser { } // parse a method in a trait impl, starting with `attrs` attributes. - fn parse_method(&mut self, already_parsed_attrs: Option<~[Attribute]>) -> @Method { + fn parse_method(&mut self, already_parsed_attrs: Option<Vec<Attribute> >) -> @Method { let next_attrs = self.parse_outer_attributes(); let attrs = match already_parsed_attrs { Some(mut a) => { a.push_all_move(next_attrs); a } @@ -3851,7 +3864,7 @@ impl Parser { let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let hi = body.span.hi; - let attrs = vec::append(attrs, inner_attrs); + let attrs = vec_ng::append(attrs, inner_attrs.as_slice()); @ast::Method { ident: ident, attrs: attrs, @@ -3877,7 +3890,7 @@ impl Parser { self.bump(); traits = self.parse_trait_ref_list(&token::LBRACE); } else { - traits = ~[]; + traits = Vec::new(); } let meths = self.parse_trait_methods(); @@ -3925,7 +3938,7 @@ impl Parser { None }; - let mut meths = ~[]; + let mut meths = Vec::new(); self.expect(&token::LBRACE); let (inner_attrs, next) = self.parse_inner_attrs_and_next(); let mut method_attrs = Some(next); @@ -3948,7 +3961,7 @@ impl Parser { } // parse B + C<~str,int> + D - fn parse_trait_ref_list(&mut self, ket: &token::Token) -> ~[TraitRef] { + fn parse_trait_ref_list(&mut self, ket: &token::Token) -> Vec<TraitRef> { self.parse_seq_to_before_end( ket, seq_sep_trailing_disallowed(token::BINOP(token::PLUS)), @@ -3961,13 +3974,13 @@ impl Parser { let class_name = self.parse_ident(); let generics = self.parse_generics(); - let mut fields: ~[StructField]; + let mut fields: Vec<StructField> ; let is_tuple_like; if self.eat(&token::LBRACE) { // It's a record-like struct. is_tuple_like = false; - fields = ~[]; + fields = Vec::new(); while self.token != token::RBRACE { fields.push(self.parse_struct_decl_field()); } @@ -3998,7 +4011,7 @@ impl Parser { } else if self.eat(&token::SEMI) { // It's a unit-like struct. is_tuple_like = true; - fields = ~[]; + fields = Vec::new(); } else { let token_str = self.this_token_to_str(); self.fatal(format!("expected `\\{`, `(`, or `;` after struct \ @@ -4019,7 +4032,7 @@ impl Parser { // parse a structure field declaration pub fn parse_single_struct_field(&mut self, vis: Visibility, - attrs: ~[Attribute]) + attrs: Vec<Attribute> ) -> StructField { let a_var = self.parse_name_and_ty(vis, attrs); match self.token { @@ -4064,7 +4077,7 @@ impl Parser { // attributes (of length 0 or 1), parse all of the items in a module fn parse_mod_items(&mut self, term: token::Token, - first_item_attrs: ~[Attribute]) + first_item_attrs: Vec<Attribute> ) -> Mod { // parse all of the items up to closing or an attribute. // view items are legal here. @@ -4074,7 +4087,7 @@ impl Parser { items: starting_items, .. } = self.parse_items_and_view_items(first_item_attrs, true, true); - let mut items: ~[@Item] = starting_items; + let mut items: Vec<@Item> = starting_items; let attrs_remaining_len = attrs_remaining.len(); // don't think this other loop is even necessary.... @@ -4083,7 +4096,8 @@ impl Parser { while self.token != term { let mut attrs = self.parse_outer_attributes(); if first { - attrs = attrs_remaining + attrs; + attrs = vec_ng::append(attrs_remaining.clone(), + attrs.as_slice()); first = false; } debug!("parse_mod_items: parse_item_or_view_item(attrs={:?})", @@ -4162,10 +4176,10 @@ impl Parser { id: ast::Ident, outer_attrs: &[ast::Attribute], id_sp: Span) - -> (ast::Item_, ~[ast::Attribute]) { + -> (ast::Item_, Vec<ast::Attribute> ) { let mut prefix = Path::new(self.sess.cm.span_to_filename(self.span)); prefix.pop(); - let mod_path = Path::new(".").join_many(self.mod_path_stack); + let mod_path = Path::new(".").join_many(self.mod_path_stack.as_slice()); let dir_path = prefix.join(&mod_path); let file_path = match ::attr::first_attr_value_str_by_name( outer_attrs, "path") { @@ -4195,14 +4209,14 @@ impl Parser { }; self.eval_src_mod_from_path(file_path, - outer_attrs.to_owned(), + outer_attrs.iter().map(|x| *x).collect(), id_sp) } fn eval_src_mod_from_path(&mut self, path: Path, - outer_attrs: ~[ast::Attribute], - id_sp: Span) -> (ast::Item_, ~[ast::Attribute]) { + outer_attrs: Vec<ast::Attribute> , + id_sp: Span) -> (ast::Item_, Vec<ast::Attribute> ) { { let mut included_mod_stack = self.sess .included_mod_stack @@ -4232,7 +4246,7 @@ impl Parser { &path, id_sp); let (inner, next) = p0.parse_inner_attrs_and_next(); - let mod_attrs = vec::append(outer_attrs, inner); + let mod_attrs = vec_ng::append(outer_attrs, inner.as_slice()); let first_item_outer_attrs = next; let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs); { @@ -4246,7 +4260,7 @@ impl Parser { // parse a function declaration from a foreign module fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, - attrs: ~[Attribute]) -> @ForeignItem { + attrs: Vec<Attribute> ) -> @ForeignItem { let lo = self.span.lo; // Parse obsolete purity. @@ -4269,7 +4283,7 @@ impl Parser { // parse a static item from a foreign module fn parse_item_foreign_static(&mut self, vis: ast::Visibility, - attrs: ~[Attribute]) -> @ForeignItem { + attrs: Vec<Attribute> ) -> @ForeignItem { let lo = self.span.lo; self.expect_keyword(keywords::Static); @@ -4303,7 +4317,7 @@ impl Parser { // parse_foreign_items. fn parse_foreign_mod_items(&mut self, abis: AbiSet, - first_item_attrs: ~[Attribute]) + first_item_attrs: Vec<Attribute> ) -> ForeignMod { let ParsedItemsAndViewItems { attrs_remaining: attrs_remaining, @@ -4332,7 +4346,7 @@ impl Parser { fn parse_item_extern_crate(&mut self, lo: BytePos, visibility: Visibility, - attrs: ~[Attribute]) + attrs: Vec<Attribute> ) -> ItemOrViewItem { let (maybe_path, ident) = match self.token { @@ -4377,7 +4391,7 @@ impl Parser { lo: BytePos, opt_abis: Option<AbiSet>, visibility: Visibility, - attrs: ~[Attribute]) + attrs: Vec<Attribute> ) -> ItemOrViewItem { self.expect(&token::LBRACE); @@ -4410,7 +4424,7 @@ impl Parser { // parse a structure-like enum variant definition // this should probably be renamed or refactored... fn parse_struct_def(&mut self) -> @StructDef { - let mut fields: ~[StructField] = ~[]; + let mut fields: Vec<StructField> = Vec::new(); while self.token != token::RBRACE { fields.push(self.parse_struct_decl_field()); } @@ -4424,7 +4438,7 @@ impl Parser { // parse the part of an "enum" decl following the '{' fn parse_enum_def(&mut self, _generics: &ast::Generics) -> EnumDef { - let mut variants = ~[]; + let mut variants = Vec::new(); let mut all_nullary = true; let mut have_disr = false; while self.token != token::RBRACE { @@ -4435,7 +4449,7 @@ impl Parser { let ident; let kind; - let mut args = ~[]; + let mut args = Vec::new(); let mut disr_expr = None; ident = self.parse_ident(); if self.eat(&token::LBRACE) { @@ -4462,7 +4476,7 @@ impl Parser { disr_expr = Some(self.parse_expr()); kind = TupleVariantKind(args); } else { - kind = TupleVariantKind(~[]); + kind = TupleVariantKind(Vec::new()); } let vr = ast::Variant_ { @@ -4551,13 +4565,13 @@ impl Parser { // NB: this function no longer parses the items inside an // extern crate. fn parse_item_or_view_item(&mut self, - attrs: ~[Attribute], + attrs: Vec<Attribute> , macros_allowed: bool) -> ItemOrViewItem { match self.token { INTERPOLATED(token::NtItem(item)) => { self.bump(); - let new_attrs = vec::append(attrs, item.attrs); + let new_attrs = vec_ng::append(attrs, item.attrs.as_slice()); return IoviItem(@Item { attrs: new_attrs, ..(*item).clone() @@ -4663,7 +4677,8 @@ impl Parser { } if self.eat_keyword(keywords::Mod) { // MODULE ITEM - let (ident, item_, extra_attrs) = self.parse_item_mod(attrs); + let (ident, item_, extra_attrs) = + self.parse_item_mod(attrs.as_slice()); let item = self.mk_item(lo, self.last_span.hi, ident, @@ -4732,7 +4747,7 @@ impl Parser { // parse a foreign item; on failure, return IoviNone. fn parse_foreign_item(&mut self, - attrs: ~[Attribute], + attrs: Vec<Attribute> , macros_allowed: bool) -> ItemOrViewItem { maybe_whole!(iovi self, NtItem); @@ -4756,7 +4771,7 @@ impl Parser { // this is the fall-through for parsing items. fn parse_macro_use_or_failure( &mut self, - attrs: ~[Attribute], + attrs: Vec<Attribute> , macros_allowed: bool, lo: BytePos, visibility: Visibility @@ -4820,7 +4835,7 @@ impl Parser { return IoviNone(attrs); } - pub fn parse_item(&mut self, attrs: ~[Attribute]) -> Option<@Item> { + pub fn parse_item(&mut self, attrs: Vec<Attribute> ) -> Option<@Item> { match self.parse_item_or_view_item(attrs, true) { IoviNone(_) => None, IoviViewItem(_) => @@ -4854,20 +4869,20 @@ impl Parser { let path = ast::Path { span: mk_sp(lo, self.span.hi), global: false, - segments: ~[] + segments: Vec::new() }; return @spanned(lo, self.span.hi, ViewPathList(path, idents, ast::DUMMY_NODE_ID)); } let first_ident = self.parse_ident(); - let mut path = ~[first_ident]; + let mut path = vec!(first_ident); match self.token { token::EQ => { // x = foo::bar self.bump(); let path_lo = self.span.lo; - path = ~[self.parse_ident()]; + path = vec!(self.parse_ident()); while self.token == token::MOD_SEP { self.bump(); let id = self.parse_ident(); @@ -4947,7 +4962,7 @@ impl Parser { } _ => () } - let last = path[path.len() - 1u]; + let last = *path.get(path.len() - 1u); let path = ast::Path { span: mk_sp(lo, self.span.hi), global: false, @@ -4965,8 +4980,8 @@ impl Parser { } // matches view_paths = view_path | view_path , view_paths - fn parse_view_paths(&mut self) -> ~[@ViewPath] { - let mut vp = ~[self.parse_view_path()]; + fn parse_view_paths(&mut self) -> Vec<@ViewPath> { + let mut vp = vec!(self.parse_view_path()); while self.token == token::COMMA { self.bump(); self.obsolete(self.last_span, ObsoleteMultipleImport); @@ -4980,15 +4995,16 @@ impl Parser { // - mod_items uses extern_mod_allowed = true // - block_tail_ uses extern_mod_allowed = false fn parse_items_and_view_items(&mut self, - first_item_attrs: ~[Attribute], + first_item_attrs: Vec<Attribute> , mut extern_mod_allowed: bool, macros_allowed: bool) -> ParsedItemsAndViewItems { - let mut attrs = vec::append(first_item_attrs, - self.parse_outer_attributes()); + let mut attrs = vec_ng::append(first_item_attrs, + self.parse_outer_attributes() + .as_slice()); // First, parse view items. - let mut view_items : ~[ast::ViewItem] = ~[]; - let mut items = ~[]; + let mut view_items : Vec<ast::ViewItem> = Vec::new(); + let mut items = Vec::new(); // I think this code would probably read better as a single // loop with a mutable three-state-variable (for extern crates, @@ -5001,7 +5017,7 @@ impl Parser { attrs_remaining: attrs, view_items: view_items, items: items, - foreign_items: ~[] + foreign_items: Vec::new() } } IoviViewItem(view_item) => { @@ -5056,18 +5072,19 @@ impl Parser { attrs_remaining: attrs, view_items: view_items, items: items, - foreign_items: ~[] + foreign_items: Vec::new() } } // Parses a sequence of foreign items. Stops when it finds program // text that can't be parsed as an item - fn parse_foreign_items(&mut self, first_item_attrs: ~[Attribute], + fn parse_foreign_items(&mut self, first_item_attrs: Vec<Attribute> , macros_allowed: bool) -> ParsedItemsAndViewItems { - let mut attrs = vec::append(first_item_attrs, - self.parse_outer_attributes()); - let mut foreign_items = ~[]; + let mut attrs = vec_ng::append(first_item_attrs, + self.parse_outer_attributes() + .as_slice()); + let mut foreign_items = Vec::new(); loop { match self.parse_foreign_item(attrs, macros_allowed) { IoviNone(returned_attrs) => { @@ -5095,8 +5112,8 @@ impl Parser { ParsedItemsAndViewItems { attrs_remaining: attrs, - view_items: ~[], - items: ~[], + view_items: Vec::new(), + items: Vec::new(), foreign_items: foreign_items } } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index edc5e613f91..1499a1b4c19 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -21,6 +21,7 @@ use std::char; use std::fmt; use std::local_data; use std::path::BytesContainer; +use std::vec_ng::Vec; #[allow(non_camel_case_types)] #[deriving(Clone, Encodable, Decodable, Eq, Hash, Show)] @@ -115,7 +116,7 @@ pub enum Nonterminal { NtAttr(@ast::Attribute), // #[foo] NtPath(~ast::Path), NtTT( @ast::TokenTree), // needs @ed to break a circularity - NtMatchers(~[ast::Matcher]) + NtMatchers(Vec<ast::Matcher> ) } impl fmt::Show for Nonterminal { @@ -412,13 +413,11 @@ macro_rules! declare_special_idents_and_keywords {( // The indices here must correspond to the numbers in // special_idents, in Keyword to_ident(), and in static // constants below. - let init_vec = ~[ - $( $si_str, )* - $( $sk_str, )* - $( $rk_str, )* - ]; - - interner::StrInterner::prefill(init_vec) + let mut init_vec = Vec::new(); + $(init_vec.push($si_str);)* + $(init_vec.push($sk_str);)* + $(init_vec.push($rk_str);)* + interner::StrInterner::prefill(init_vec.as_slice()) } }} diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 14d8c662aae..e9e0e483593 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -62,7 +62,7 @@ */ use std::io; -use std::vec; +use std::vec_ng::Vec; #[deriving(Clone, Eq)] pub enum Breaks { @@ -119,7 +119,7 @@ pub fn tok_str(t: Token) -> ~str { } } -pub fn buf_str(toks: ~[Token], szs: ~[int], left: uint, right: uint, +pub fn buf_str(toks: Vec<Token> , szs: Vec<int> , left: uint, right: uint, lim: uint) -> ~str { let n = toks.len(); assert_eq!(n, szs.len()); @@ -131,7 +131,7 @@ pub fn buf_str(toks: ~[Token], szs: ~[int], left: uint, right: uint, if i != left { s.push_str(", "); } - s.push_str(format!("{}={}", szs[i], tok_str(toks[i].clone()))); + s.push_str(format!("{}={}", szs.get(i), tok_str(toks.get(i).clone()))); i += 1u; i %= n; } @@ -156,9 +156,9 @@ pub fn mk_printer(out: ~io::Writer, linewidth: uint) -> Printer { // fall behind. let n: uint = 3 * linewidth; debug!("mk_printer {}", linewidth); - let token: ~[Token] = vec::from_elem(n, Eof); - let size: ~[int] = vec::from_elem(n, 0); - let scan_stack: ~[uint] = vec::from_elem(n, 0u); + let token: Vec<Token> = Vec::from_elem(n, Eof); + let size: Vec<int> = Vec::from_elem(n, 0); + let scan_stack: Vec<uint> = Vec::from_elem(n, 0u); Printer { out: out, buf_len: n, @@ -174,7 +174,7 @@ pub fn mk_printer(out: ~io::Writer, linewidth: uint) -> Printer { scan_stack_empty: true, top: 0, bottom: 0, - print_stack: ~[], + print_stack: Vec::new(), pending_indentation: 0 } } @@ -264,8 +264,8 @@ pub struct Printer { space: int, // number of spaces left on line left: uint, // index of left side of input stream right: uint, // index of right side of input stream - token: ~[Token], // ring-buffr stream goes through - size: ~[int], // ring-buffer of calculated sizes + token: Vec<Token> , // ring-buffr stream goes through + size: Vec<int> , // ring-buffer of calculated sizes left_total: int, // running size of stream "...left" right_total: int, // running size of stream "...right" // pseudo-stack, really a ring too. Holds the @@ -274,23 +274,23 @@ pub struct Printer { // Begin (if there is any) on top of it. Stuff is flushed off the // bottom as it becomes irrelevant due to the primary ring-buffer // advancing. - scan_stack: ~[uint], + scan_stack: Vec<uint> , scan_stack_empty: bool, // top==bottom disambiguator top: uint, // index of top of scan_stack bottom: uint, // index of bottom of scan_stack // stack of blocks-in-progress being flushed by print - print_stack: ~[PrintStackElem], + print_stack: Vec<PrintStackElem> , // buffered indentation to avoid writing trailing whitespace pending_indentation: int, } impl Printer { pub fn last_token(&mut self) -> Token { - self.token[self.right].clone() + (*self.token.get(self.right)).clone() } // be very careful with this! pub fn replace_last_token(&mut self, t: Token) { - self.token[self.right] = t; + *self.token.get_mut(self.right) = t; } pub fn pretty_print(&mut self, t: Token) -> io::IoResult<()> { debug!("pp ~[{},{}]", self.left, self.right); @@ -298,8 +298,9 @@ impl Printer { Eof => { if !self.scan_stack_empty { self.check_stack(0); - let left = self.token[self.left].clone(); - try!(self.advance_left(left, self.size[self.left])); + let left = (*self.token.get(self.left)).clone(); + let left_size = *self.size.get(self.left); + try!(self.advance_left(left, left_size)); } self.indent(0); Ok(()) @@ -313,8 +314,8 @@ impl Printer { } else { self.advance_right(); } debug!("pp Begin({})/buffer ~[{},{}]", b.offset, self.left, self.right); - self.token[self.right] = t; - self.size[self.right] = -self.right_total; + *self.token.get_mut(self.right) = t; + *self.size.get_mut(self.right) = -self.right_total; self.scan_push(self.right); Ok(()) } @@ -325,8 +326,8 @@ impl Printer { } else { debug!("pp End/buffer ~[{},{}]", self.left, self.right); self.advance_right(); - self.token[self.right] = t; - self.size[self.right] = -1; + *self.token.get_mut(self.right) = t; + *self.size.get_mut(self.right) = -1; self.scan_push(self.right); Ok(()) } @@ -342,8 +343,8 @@ impl Printer { b.offset, self.left, self.right); self.check_stack(0); self.scan_push(self.right); - self.token[self.right] = t; - self.size[self.right] = -self.right_total; + *self.token.get_mut(self.right) = t; + *self.size.get_mut(self.right) = -self.right_total; self.right_total += b.blank_space; Ok(()) } @@ -356,8 +357,8 @@ impl Printer { debug!("pp String('{}')/buffer ~[{},{}]", *s, self.left, self.right); self.advance_right(); - self.token[self.right] = t.clone(); - self.size[self.right] = len; + *self.token.get_mut(self.right) = t.clone(); + *self.size.get_mut(self.right) = len; self.right_total += len; self.check_stream() } @@ -371,13 +372,15 @@ impl Printer { debug!("scan window is {}, longer than space on line ({})", self.right_total - self.left_total, self.space); if !self.scan_stack_empty { - if self.left == self.scan_stack[self.bottom] { + if self.left == *self.scan_stack.get(self.bottom) { debug!("setting {} to infinity and popping", self.left); - self.size[self.scan_pop_bottom()] = SIZE_INFINITY; + let scanned = self.scan_pop_bottom(); + *self.size.get_mut(scanned) = SIZE_INFINITY; } } - let left = self.token[self.left].clone(); - try!(self.advance_left(left, self.size[self.left])); + let left = (*self.token.get(self.left)).clone(); + let left_size = *self.size.get(self.left); + try!(self.advance_left(left, left_size)); if self.left != self.right { try!(self.check_stream()); } @@ -393,26 +396,30 @@ impl Printer { self.top %= self.buf_len; assert!((self.top != self.bottom)); } - self.scan_stack[self.top] = x; + *self.scan_stack.get_mut(self.top) = x; } pub fn scan_pop(&mut self) -> uint { assert!((!self.scan_stack_empty)); - let x = self.scan_stack[self.top]; + let x = *self.scan_stack.get(self.top); if self.top == self.bottom { self.scan_stack_empty = true; - } else { self.top += self.buf_len - 1u; self.top %= self.buf_len; } + } else { + self.top += self.buf_len - 1u; self.top %= self.buf_len; + } return x; } pub fn scan_top(&mut self) -> uint { assert!((!self.scan_stack_empty)); - return self.scan_stack[self.top]; + return *self.scan_stack.get(self.top); } pub fn scan_pop_bottom(&mut self) -> uint { assert!((!self.scan_stack_empty)); - let x = self.scan_stack[self.bottom]; + let x = *self.scan_stack.get(self.bottom); if self.top == self.bottom { self.scan_stack_empty = true; - } else { self.bottom += 1u; self.bottom %= self.buf_len; } + } else { + self.bottom += 1u; self.bottom %= self.buf_len; + } return x; } pub fn advance_right(&mut self) { @@ -435,8 +442,9 @@ impl Printer { if self.left != self.right { self.left += 1u; self.left %= self.buf_len; - let left = self.token[self.left].clone(); - try!(self.advance_left(left, self.size[self.left])); + let left = (*self.token.get(self.left)).clone(); + let left_size = *self.size.get(self.left); + try!(self.advance_left(left, left_size)); } ret } else { @@ -446,22 +454,28 @@ impl Printer { pub fn check_stack(&mut self, k: int) { if !self.scan_stack_empty { let x = self.scan_top(); - match self.token[x] { - Begin(_) => { + match self.token.get(x) { + &Begin(_) => { if k > 0 { - self.size[self.scan_pop()] = self.size[x] + + let popped = self.scan_pop(); + *self.size.get_mut(popped) = *self.size.get(x) + self.right_total; self.check_stack(k - 1); } } - End => { + &End => { // paper says + not =, but that makes no sense. - self.size[self.scan_pop()] = 1; + let popped = self.scan_pop(); + *self.size.get_mut(popped) = 1; self.check_stack(k + 1); } _ => { - self.size[self.scan_pop()] = self.size[x] + self.right_total; - if k > 0 { self.check_stack(k); } + let popped = self.scan_pop(); + *self.size.get_mut(popped) = *self.size.get(x) + + self.right_total; + if k > 0 { + self.check_stack(k); + } } } } @@ -481,7 +495,7 @@ impl Printer { let print_stack = &mut self.print_stack; let n = print_stack.len(); if n != 0u { - print_stack[n - 1u] + *print_stack.get(n - 1u) } else { PrintStackElem { offset: 0, diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 688494ec5ee..d027efc1d42 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -33,6 +33,7 @@ use std::char; use std::str; use std::io; use std::io::MemWriter; +use std::vec_ng::Vec; // The &mut State is stored here to prevent recursive type. pub enum AnnNode<'a, 'b> { @@ -60,10 +61,10 @@ pub struct State<'a> { s: pp::Printer, cm: Option<@CodeMap>, intr: @token::IdentInterner, - comments: Option<~[comments::Comment]>, - literals: Option<~[comments::Literal]>, + comments: Option<Vec<comments::Comment> >, + literals: Option<Vec<comments::Literal> >, cur_cmnt_and_lit: CurrentCommentAndLiteral, - boxes: RefCell<~[pp::Breaks]>, + boxes: RefCell<Vec<pp::Breaks> >, ann: &'a PpAnn } @@ -98,7 +99,7 @@ pub fn rust_printer_annotated<'a>(writer: ~io::Writer, ann: &'a PpAnn) -> State< cur_cmnt: 0, cur_lit: 0 }, - boxes: RefCell::new(~[]), + boxes: RefCell::new(Vec::new()), ann: ann } } @@ -140,14 +141,14 @@ pub fn print_crate(cm: @CodeMap, cur_cmnt: 0, cur_lit: 0 }, - boxes: RefCell::new(~[]), + boxes: RefCell::new(Vec::new()), ann: ann }; print_crate_(&mut s, krate) } pub fn print_crate_(s: &mut State, krate: &ast::Crate) -> io::IoResult<()> { - try!(print_mod(s, &krate.module, krate.attrs)); + try!(print_mod(s, &krate.module, krate.attrs.as_slice())); try!(print_remaining_comments(s)); try!(eof(&mut s.s)); Ok(()) @@ -319,7 +320,7 @@ pub fn in_cbox(s: &mut State) -> bool { let boxes = s.boxes.borrow(); let len = boxes.get().len(); if len == 0u { return false; } - return boxes.get()[len - 1u] == pp::Consistent; + return *boxes.get().get(len - 1u) == pp::Consistent; } pub fn hardbreak_if_not_bol(s: &mut State) -> io::IoResult<()> { @@ -463,7 +464,7 @@ pub fn print_type(s: &mut State, ty: &ast::Ty) -> io::IoResult<()> { } ast::TyTup(ref elts) => { try!(popen(s)); - try!(commasep(s, Inconsistent, *elts, print_type_ref)); + try!(commasep(s, Inconsistent, elts.as_slice(), print_type_ref)); if elts.len() == 1 { try!(word(&mut s.s, ",")); } @@ -517,7 +518,7 @@ pub fn print_foreign_item(s: &mut State, item: &ast::ForeignItem) -> io::IoResult<()> { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, item.span.lo)); - try!(print_outer_attributes(s, item.attrs)); + try!(print_outer_attributes(s, item.attrs.as_slice())); match item.node { ast::ForeignItemFn(decl, ref generics) => { try!(print_fn(s, decl, None, AbiSet::Rust(), item.ident, generics, @@ -545,7 +546,7 @@ pub fn print_foreign_item(s: &mut State, pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, item.span.lo)); - try!(print_outer_attributes(s, item.attrs)); + try!(print_outer_attributes(s, item.attrs.as_slice())); { let ann_node = NodeItem(s, item); try!(s.ann.pre(ann_node)); @@ -580,21 +581,21 @@ pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> { item.vis )); try!(word(&mut s.s, " ")); - try!(print_block_with_attrs(s, body, item.attrs)); + try!(print_block_with_attrs(s, body, item.attrs.as_slice())); } ast::ItemMod(ref _mod) => { try!(head(s, visibility_qualified(item.vis, "mod"))); try!(print_ident(s, item.ident)); try!(nbsp(s)); try!(bopen(s)); - try!(print_mod(s, _mod, item.attrs)); + try!(print_mod(s, _mod, item.attrs.as_slice())); try!(bclose(s, item.span)); } ast::ItemForeignMod(ref nmod) => { try!(head(s, "extern")); try!(word_nbsp(s, nmod.abis.to_str())); try!(bopen(s)); - try!(print_foreign_mod(s, nmod, item.attrs)); + try!(print_foreign_mod(s, nmod, item.attrs.as_slice())); try!(bclose(s, item.span)); } ast::ItemTy(ty, ref params) => { @@ -646,7 +647,7 @@ pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> { try!(space(&mut s.s)); try!(bopen(s)); - try!(print_inner_attributes(s, item.attrs)); + try!(print_inner_attributes(s, item.attrs.as_slice())); for meth in methods.iter() { try!(print_method(s, *meth)); } @@ -706,7 +707,7 @@ pub fn print_enum_def(s: &mut State, enum_definition: &ast::EnumDef, try!(print_ident(s, ident)); try!(print_generics(s, generics)); try!(space(&mut s.s)); - try!(print_variants(s, enum_definition.variants, span)); + try!(print_variants(s, enum_definition.variants.as_slice(), span)); Ok(()) } @@ -717,7 +718,7 @@ pub fn print_variants(s: &mut State, for &v in variants.iter() { try!(space_if_not_bol(s)); try!(maybe_print_comment(s, v.span.lo)); - try!(print_outer_attributes(s, v.node.attrs)); + try!(print_outer_attributes(s, v.node.attrs.as_slice())); try!(ibox(s, indent_unit)); try!(print_variant(s, v)); try!(word(&mut s.s, ",")); @@ -761,7 +762,10 @@ pub fn print_struct(s: &mut State, if ast_util::struct_def_is_tuple_like(struct_def) { if !struct_def.fields.is_empty() { try!(popen(s)); - try!(commasep(s, Inconsistent, struct_def.fields, |s, field| { + try!(commasep(s, + Inconsistent, + struct_def.fields.as_slice(), + |s, field| { match field.node.kind { ast::NamedField(..) => fail!("unexpected named field"), ast::UnnamedField => { @@ -787,7 +791,8 @@ pub fn print_struct(s: &mut State, ast::NamedField(ident, visibility) => { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, field.span.lo)); - try!(print_outer_attributes(s, field.node.attrs)); + try!(print_outer_attributes(s, + field.node.attrs.as_slice())); try!(print_visibility(s, visibility)); try!(print_ident(s, ident)); try!(word_nbsp(s, ":")); @@ -857,7 +862,10 @@ pub fn print_variant(s: &mut State, v: &ast::Variant) -> io::IoResult<()> { arg: &ast::VariantArg) -> io::IoResult<()> { print_type(s, arg.ty) } - try!(commasep(s, Consistent, *args, print_variant_arg)); + try!(commasep(s, + Consistent, + args.as_slice(), + print_variant_arg)); try!(pclose(s)); } } @@ -881,7 +889,7 @@ pub fn print_variant(s: &mut State, v: &ast::Variant) -> io::IoResult<()> { pub fn print_ty_method(s: &mut State, m: &ast::TypeMethod) -> io::IoResult<()> { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, m.span.lo)); - try!(print_outer_attributes(s, m.attrs)); + try!(print_outer_attributes(s, m.attrs.as_slice())); try!(print_ty_fn(s, None, None, @@ -907,12 +915,12 @@ pub fn print_trait_method(s: &mut State, pub fn print_method(s: &mut State, meth: &ast::Method) -> io::IoResult<()> { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, meth.span.lo)); - try!(print_outer_attributes(s, meth.attrs)); + try!(print_outer_attributes(s, meth.attrs.as_slice())); try!(print_fn(s, meth.decl, Some(meth.purity), AbiSet::Rust(), meth.ident, &meth.generics, Some(meth.explicit_self.node), meth.vis)); try!(word(&mut s.s, " ")); - print_block_with_attrs(s, meth.body, meth.attrs) + print_block_with_attrs(s, meth.body, meth.attrs.as_slice()) } pub fn print_outer_attributes(s: &mut State, @@ -1184,7 +1192,7 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> { try!(word(&mut s.s, "mut")); if exprs.len() > 0u { try!(nbsp(s)); } } - try!(commasep_exprs(s, Inconsistent, *exprs)); + try!(commasep_exprs(s, Inconsistent, exprs.as_slice())); try!(word(&mut s.s, "]")); try!(end(s)); } @@ -1207,7 +1215,11 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> { ast::ExprStruct(ref path, ref fields, wth) => { try!(print_path(s, path, true)); try!(word(&mut s.s, "{")); - try!(commasep_cmnt(s, Consistent, (*fields), print_field, get_span)); + try!(commasep_cmnt(s, + Consistent, + fields.as_slice(), + print_field, + get_span)); match wth { Some(expr) => { try!(ibox(s, indent_unit)); @@ -1225,7 +1237,7 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> { } ast::ExprTup(ref exprs) => { try!(popen(s)); - try!(commasep_exprs(s, Inconsistent, *exprs)); + try!(commasep_exprs(s, Inconsistent, exprs.as_slice())); if exprs.len() == 1 { try!(word(&mut s.s, ",")); } @@ -1233,16 +1245,16 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> { } ast::ExprCall(func, ref args) => { try!(print_expr(s, func)); - try!(print_call_post(s, *args)); + try!(print_call_post(s, args.as_slice())); } ast::ExprMethodCall(ident, ref tys, ref args) => { let base_args = args.slice_from(1); - try!(print_expr(s, args[0])); + try!(print_expr(s, *args.get(0))); try!(word(&mut s.s, ".")); try!(print_ident(s, ident)); if tys.len() > 0u { try!(word(&mut s.s, "::<")); - try!(commasep(s, Inconsistent, *tys, print_type_ref)); + try!(commasep(s, Inconsistent, tys.as_slice(), print_type_ref)); try!(word(&mut s.s, ">")); } try!(print_call_post(s, base_args)); @@ -1455,7 +1467,7 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> { try!(print_ident(s, id)); if tys.len() > 0u { try!(word(&mut s.s, "::<")); - try!(commasep(s, Inconsistent, *tys, print_type_ref)); + try!(commasep(s, Inconsistent, tys.as_slice(), print_type_ref)); try!(word(&mut s.s, ">")); } } @@ -1649,7 +1661,7 @@ fn print_path_(s: &mut State, } try!(commasep(s, Inconsistent, - segment.types.map_to_vec(|&t| t), + segment.types.map_to_vec(|&t| t).as_slice(), print_type_ref)); } @@ -1708,7 +1720,7 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> { Some(ref args) => { if !args.is_empty() { try!(popen(s)); - try!(commasep(s, Inconsistent, *args, + try!(commasep(s, Inconsistent, args.as_slice(), |s, &p| print_pat(s, p))); try!(pclose(s)); } else { } @@ -1727,7 +1739,7 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> { Ok(()) } fn get_span(f: &ast::FieldPat) -> codemap::Span { return f.pat.span; } - try!(commasep_cmnt(s, Consistent, *fields, + try!(commasep_cmnt(s, Consistent, fields.as_slice(), |s, f| print_field(s,f), get_span)); if etc { @@ -1738,7 +1750,10 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> { } ast::PatTup(ref elts) => { try!(popen(s)); - try!(commasep(s, Inconsistent, *elts, |s, &p| print_pat(s, p))); + try!(commasep(s, + Inconsistent, + elts.as_slice(), + |s, &p| print_pat(s, p))); if elts.len() == 1 { try!(word(&mut s.s, ",")); } @@ -1761,7 +1776,10 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> { } ast::PatVec(ref before, slice, ref after) => { try!(word(&mut s.s, "[")); - try!(commasep(s, Inconsistent, *before, |s, &p| print_pat(s, p))); + try!(commasep(s, + Inconsistent, + before.as_slice(), + |s, &p| print_pat(s, p))); for &p in slice.iter() { if !before.is_empty() { try!(word_space(s, ",")); } match *p { @@ -1773,7 +1791,10 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> { try!(print_pat(s, p)); if !after.is_empty() { try!(word_space(s, ",")); } } - try!(commasep(s, Inconsistent, *after, |s, &p| print_pat(s, p))); + try!(commasep(s, + Inconsistent, + after.as_slice(), + |s, &p| print_pat(s, p))); try!(word(&mut s.s, "]")); } } @@ -1842,7 +1863,7 @@ pub fn print_fn_args(s: &mut State, decl: &ast::FnDecl, for &explicit_self in opt_explicit_self.iter() { let m = match explicit_self { ast::SelfStatic => ast::MutImmutable, - _ => match decl.inputs[0].pat.node { + _ => match decl.inputs.get(0).pat.node { ast::PatIdent(ast::BindByValue(m), _, _) => m, _ => ast::MutImmutable } @@ -1981,12 +2002,12 @@ pub fn print_generics(s: &mut State, } } - let mut ints = ~[]; + let mut ints = Vec::new(); for i in range(0u, total) { ints.push(i); } - try!(commasep(s, Inconsistent, ints, + try!(commasep(s, Inconsistent, ints.as_slice(), |s, &i| print_item(s, generics, i))); try!(word(&mut s.s, ">")); } @@ -2041,7 +2062,7 @@ pub fn print_view_path(s: &mut State, vp: &ast::ViewPath) -> io::IoResult<()> { try!(print_path(s, path, false)); try!(word(&mut s.s, "::{")); } - try!(commasep(s, Inconsistent, (*idents), |s, w| { + try!(commasep(s, Inconsistent, idents.as_slice(), |s, w| { print_ident(s, w.node.name) })); word(&mut s.s, "}") @@ -2057,7 +2078,7 @@ pub fn print_view_paths(s: &mut State, pub fn print_view_item(s: &mut State, item: &ast::ViewItem) -> io::IoResult<()> { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, item.span.lo)); - try!(print_outer_attributes(s, item.attrs)); + try!(print_outer_attributes(s, item.attrs.as_slice())); try!(print_visibility(s, item.vis)); match item.node { ast::ViewItemExternMod(id, ref optional_path, _) => { @@ -2073,7 +2094,7 @@ pub fn print_view_item(s: &mut State, item: &ast::ViewItem) -> io::IoResult<()> ast::ViewItemUse(ref vps) => { try!(head(s, "use")); - try!(print_view_paths(s, *vps)); + try!(print_view_paths(s, vps.as_slice())); } } try!(word(&mut s.s, ";")); @@ -2103,7 +2124,7 @@ pub fn print_arg(s: &mut State, input: &ast::Arg) -> io::IoResult<()> { match input.pat.node { ast::PatIdent(_, ref path, _) if path.segments.len() == 1 && - path.segments[0].identifier.name == + path.segments.get(0).identifier.name == parse::token::special_idents::invalid.name => { // Do nothing. } @@ -2286,7 +2307,7 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) -> io::IoResult<()> { ast::LitBinary(ref arr) => { try!(ibox(s, indent_unit)); try!(word(&mut s.s, "[")); - try!(commasep_cmnt(s, Inconsistent, *arr.borrow(), + try!(commasep_cmnt(s, Inconsistent, arr.borrow().as_slice(), |s, u| word(&mut s.s, format!("{}", *u)), |_| lit.span)); try!(word(&mut s.s, "]")); @@ -2303,7 +2324,7 @@ pub fn next_lit(s: &mut State, pos: BytePos) -> Option<comments::Literal> { match s.literals { Some(ref lits) => { while s.cur_cmnt_and_lit.cur_lit < lits.len() { - let ltrl = (*lits)[s.cur_cmnt_and_lit.cur_lit].clone(); + let ltrl = (*(*lits).get(s.cur_cmnt_and_lit.cur_lit)).clone(); if ltrl.pos > pos { return None; } s.cur_cmnt_and_lit.cur_lit += 1u; if ltrl.pos == pos { return Some(ltrl); } @@ -2335,7 +2356,7 @@ pub fn print_comment(s: &mut State, comments::Mixed => { assert_eq!(cmnt.lines.len(), 1u); try!(zerobreak(&mut s.s)); - try!(word(&mut s.s, cmnt.lines[0])); + try!(word(&mut s.s, *cmnt.lines.get(0))); try!(zerobreak(&mut s.s)); } comments::Isolated => { @@ -2352,7 +2373,7 @@ pub fn print_comment(s: &mut State, comments::Trailing => { try!(word(&mut s.s, " ")); if cmnt.lines.len() == 1u { - try!(word(&mut s.s, cmnt.lines[0])); + try!(word(&mut s.s, *cmnt.lines.get(0))); try!(hardbreak(&mut s.s)); } else { try!(ibox(s, 0u)); @@ -2414,7 +2435,7 @@ pub fn next_comment(s: &mut State) -> Option<comments::Comment> { match s.comments { Some(ref cmnts) => { if s.cur_cmnt_and_lit.cur_cmnt < cmnts.len() { - Some(cmnts[s.cur_cmnt_and_lit.cur_cmnt].clone()) + Some((*cmnts.get(s.cur_cmnt_and_lit.cur_cmnt)).clone()) } else { None } @@ -2535,12 +2556,14 @@ mod test { use codemap; use parse::token; + use std::vec_ng::Vec; + #[test] fn test_fun_to_str() { let abba_ident = token::str_to_ident("abba"); let decl = ast::FnDecl { - inputs: ~[], + inputs: Vec::new(), output: ast::P(ast::Ty {id: 0, node: ast::TyNil, span: codemap::DUMMY_SP}), @@ -2559,9 +2582,9 @@ mod test { let var = codemap::respan(codemap::DUMMY_SP, ast::Variant_ { name: ident, - attrs: ~[], + attrs: Vec::new(), // making this up as I go.... ? - kind: ast::TupleVariantKind(~[]), + kind: ast::TupleVariantKind(Vec::new()), id: 0, disr_expr: None, vis: ast::Public, diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 7b885df0317..ba154a8d892 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -21,10 +21,11 @@ use std::cmp::Equiv; use std::fmt; use std::hash::Hash; use std::rc::Rc; +use std::vec_ng::Vec; pub struct Interner<T> { priv map: RefCell<HashMap<T, Name>>, - priv vect: RefCell<~[T]>, + priv vect: RefCell<Vec<T> >, } // when traits can extend traits, we should extend index<Name,T> to get [] @@ -32,7 +33,7 @@ impl<T:Eq + Hash + Freeze + Clone + 'static> Interner<T> { pub fn new() -> Interner<T> { Interner { map: RefCell::new(HashMap::new()), - vect: RefCell::new(~[]), + vect: RefCell::new(Vec::new()), } } @@ -68,7 +69,7 @@ impl<T:Eq + Hash + Freeze + Clone + 'static> Interner<T> { pub fn get(&self, idx: Name) -> T { let vect = self.vect.borrow(); - vect.get()[idx].clone() + (*vect.get().get(idx as uint)).clone() } pub fn len(&self) -> uint { @@ -134,7 +135,7 @@ impl RcStr { // &str rather than RcStr, resulting in less allocation. pub struct StrInterner { priv map: RefCell<HashMap<RcStr, Name>>, - priv vect: RefCell<~[RcStr]>, + priv vect: RefCell<Vec<RcStr> >, } // when traits can extend traits, we should extend index<Name,T> to get [] @@ -142,7 +143,7 @@ impl StrInterner { pub fn new() -> StrInterner { StrInterner { map: RefCell::new(HashMap::new()), - vect: RefCell::new(~[]), + vect: RefCell::new(Vec::new()), } } @@ -189,21 +190,21 @@ impl StrInterner { let new_idx = self.len() as Name; // leave out of map to avoid colliding let mut vect = self.vect.borrow_mut(); - let existing = vect.get()[idx].clone(); + let existing = (*vect.get().get(idx as uint)).clone(); vect.get().push(existing); new_idx } pub fn get(&self, idx: Name) -> RcStr { let vect = self.vect.borrow(); - vect.get()[idx].clone() + (*vect.get().get(idx as uint)).clone() } /// Returns this string with lifetime tied to the interner. Since /// strings may never be removed from the interner, this is safe. pub fn get_ref<'a>(&'a self, idx: Name) -> &'a str { let vect = self.vect.borrow(); - let s: &str = vect.get()[idx].as_slice(); + let s: &str = vect.get().get(idx as uint).as_slice(); unsafe { cast::transmute(s) } diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 8c7ad028a8e..03fc30e2fd7 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -15,22 +15,24 @@ use parse::{new_parser_from_source_str}; use parse::parser::Parser; use parse::token; +use std::vec_ng::Vec; + // map a string to tts, using a made-up filename: return both the TokenTree's // and the ParseSess -pub fn string_to_tts_and_sess (source_str : ~str) -> (~[ast::TokenTree], @ParseSess) { +pub fn string_to_tts_and_sess (source_str : ~str) -> (Vec<ast::TokenTree> , @ParseSess) { let ps = new_parse_sess(); (filemap_to_tts(ps,string_to_filemap(ps,source_str,~"bogofile")),ps) } // map a string to tts, using a made-up filename: -pub fn string_to_tts(source_str : ~str) -> ~[ast::TokenTree] { +pub fn string_to_tts(source_str : ~str) -> Vec<ast::TokenTree> { let (tts,_) = string_to_tts_and_sess(source_str); tts } pub fn string_to_parser_and_sess(source_str: ~str) -> (Parser,@ParseSess) { let ps = new_parse_sess(); - (new_parser_from_source_str(ps,~[],~"bogofile",source_str),ps) + (new_parser_from_source_str(ps,Vec::new(),~"bogofile",source_str),ps) } // map string to parser (via tts) @@ -69,14 +71,14 @@ pub fn string_to_expr (source_str : ~str) -> @ast::Expr { // parse a string, return an item pub fn string_to_item (source_str : ~str) -> Option<@ast::Item> { with_error_checking_parse(source_str, |p| { - p.parse_item(~[]) + p.parse_item(Vec::new()) }) } // parse a string, return a stmt pub fn string_to_stmt(source_str : ~str) -> @ast::Stmt { with_error_checking_parse(source_str, |p| { - p.parse_stmt(~[]) + p.parse_stmt(Vec::new()) }) } @@ -87,7 +89,7 @@ pub fn string_to_pat(source_str : ~str) -> @ast::Pat { } // convert a vector of strings to a vector of ast::Ident's -pub fn strs_to_idents(ids: ~[&str]) -> ~[ast::Ident] { +pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<ast::Ident> { ids.map(|u| token::str_to_ident(*u)) } diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index d6cc35a6f9d..9eb9871bb21 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -7,14 +7,16 @@ // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. + use std::mem; -use std::vec; +use std::vec_ng::Vec; +use std::vec_ng; /// A vector type optimized for cases where the size is almost always 0 or 1 pub enum SmallVector<T> { priv Zero, priv One(T), - priv Many(~[T]), + priv Many(Vec<T> ), } impl<T> Container for SmallVector<T> { @@ -46,7 +48,7 @@ impl<T> SmallVector<T> { One(v) } - pub fn many(vs: ~[T]) -> SmallVector<T> { + pub fn many(vs: Vec<T> ) -> SmallVector<T> { Many(vs) } @@ -56,7 +58,7 @@ impl<T> SmallVector<T> { One(..) => { let one = mem::replace(self, Zero); match one { - One(v1) => mem::replace(self, Many(~[v1, v])), + One(v1) => mem::replace(self, Many(vec!(v1, v))), _ => unreachable!() }; } @@ -73,7 +75,7 @@ impl<T> SmallVector<T> { pub fn get<'a>(&'a self, idx: uint) -> &'a T { match *self { One(ref v) if idx == 0 => v, - Many(ref vs) => &vs[idx], + Many(ref vs) => vs.get(idx), _ => fail!("out of bounds access") } } @@ -104,7 +106,7 @@ impl<T> SmallVector<T> { pub enum MoveItems<T> { priv ZeroIterator, priv OneIterator(T), - priv ManyIterator(vec::MoveItems<T>), + priv ManyIterator(vec_ng::MoveItems<T>), } impl<T> Iterator<T> for MoveItems<T> { @@ -136,13 +138,15 @@ impl<T> Iterator<T> for MoveItems<T> { mod test { use super::*; + use std::vec_ng::Vec; + #[test] fn test_len() { let v: SmallVector<int> = SmallVector::zero(); assert_eq!(0, v.len()); assert_eq!(1, SmallVector::one(1).len()); - assert_eq!(5, SmallVector::many(~[1, 2, 3, 4, 5]).len()); + assert_eq!(5, SmallVector::many(vec!(1, 2, 3, 4, 5)).len()); } #[test] @@ -161,7 +165,7 @@ mod test { #[test] fn test_from_iterator() { - let v: SmallVector<int> = (~[1, 2, 3]).move_iter().collect(); + let v: SmallVector<int> = (vec!(1, 2, 3)).move_iter().collect(); assert_eq!(3, v.len()); assert_eq!(&1, v.get(0)); assert_eq!(&2, v.get(1)); @@ -171,14 +175,14 @@ mod test { #[test] fn test_move_iter() { let v = SmallVector::zero(); - let v: ~[int] = v.move_iter().collect(); - assert_eq!(~[], v); + let v: Vec<int> = v.move_iter().collect(); + assert_eq!(Vec::new(), v); let v = SmallVector::one(1); - assert_eq!(~[1], v.move_iter().collect()); + assert_eq!(vec!(1), v.move_iter().collect()); - let v = SmallVector::many(~[1, 2, 3]); - assert_eq!(~[1, 2, 3], v.move_iter().collect()); + let v = SmallVector::many(vec!(1, 2, 3)); + assert_eq!(vec!(1, 2, 3), v.move_iter().collect()); } #[test] @@ -190,12 +194,12 @@ mod test { #[test] #[should_fail] fn test_expect_one_many() { - SmallVector::many(~[1, 2]).expect_one(""); + SmallVector::many(vec!(1, 2)).expect_one(""); } #[test] fn test_expect_one_one() { assert_eq!(1, SmallVector::one(1).expect_one("")); - assert_eq!(1, SmallVector::many(~[1]).expect_one("")); + assert_eq!(1, SmallVector::many(vec!(1)).expect_one("")); } } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 39989977d69..2edfd367f4e 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -637,7 +637,7 @@ pub fn walk_expr<E: Clone, V: Visitor<E>>(visitor: &mut V, expression: &Expr, en visitor.visit_expr(subexpression, env.clone()) } ExprVec(ref subexpressions, _) => { - walk_exprs(visitor, *subexpressions, env.clone()) + walk_exprs(visitor, subexpressions.as_slice(), env.clone()) } ExprRepeat(element, count, _) => { visitor.visit_expr(element, env.clone()); @@ -662,7 +662,7 @@ pub fn walk_expr<E: Clone, V: Visitor<E>>(visitor: &mut V, expression: &Expr, en visitor.visit_expr(callee_expression, env.clone()) } ExprMethodCall(_, ref types, ref arguments) => { - walk_exprs(visitor, *arguments, env.clone()); + walk_exprs(visitor, arguments.as_slice(), env.clone()); for &typ in types.iter() { visitor.visit_ty(typ, env.clone()) } |
