From a32249d4477f449646162bbad607c39d0ad7f3ca Mon Sep 17 00:00:00 2001 From: Paul Collier Date: Sat, 17 Jan 2015 23:33:05 +0000 Subject: libsyntax: uint types to usize --- src/libsyntax/parse/parser.rs | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) (limited to 'src/libsyntax/parse/parser.rs') diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 30cc9836374..9822dcef0c0 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -292,9 +292,9 @@ pub struct Parser<'a> { pub buffer: [TokenAndSpan; 4], pub buffer_start: int, pub buffer_end: int, - pub tokens_consumed: uint, + pub tokens_consumed: usize, pub restrictions: Restrictions, - pub quote_depth: uint, // not (yet) related to the quasiquoter + pub quote_depth: usize, // not (yet) related to the quasiquoter pub reader: Box, pub interner: Rc, /// The set of seen errors about obsolete syntax. Used to suppress @@ -932,8 +932,8 @@ impl<'a> Parser<'a> { self.reader.real_token() } else { // Avoid token copies with `replace`. - let buffer_start = self.buffer_start as uint; - let next_index = (buffer_start + 1) & 3 as uint; + let buffer_start = self.buffer_start as usize; + let next_index = (buffer_start + 1) & 3 as usize; self.buffer_start = next_index as int; let placeholder = TokenAndSpan { @@ -972,15 +972,15 @@ impl<'a> Parser<'a> { } return (4 - self.buffer_start) + self.buffer_end; } - pub fn look_ahead(&mut self, distance: uint, f: F) -> R where + pub fn look_ahead(&mut self, distance: usize, f: F) -> R where F: FnOnce(&token::Token) -> R, { let dist = distance as int; while self.buffer_length() < dist { - self.buffer[self.buffer_end as uint] = self.reader.real_token(); + self.buffer[self.buffer_end as usize] = self.reader.real_token(); self.buffer_end = (self.buffer_end + 1) & 3; } - f(&self.buffer[((self.buffer_start + dist - 1) & 3) as uint].tok) + f(&self.buffer[((self.buffer_start + dist - 1) & 3) as usize].tok) } pub fn fatal(&mut self, m: &str) -> ! { self.sess.span_diagnostic.span_fatal(self.span, m) @@ -2087,7 +2087,7 @@ impl<'a> Parser<'a> { ExprField(expr, ident) } - pub fn mk_tup_field(&mut self, expr: P, idx: codemap::Spanned) -> ast::Expr_ { + pub fn mk_tup_field(&mut self, expr: P, idx: codemap::Spanned) -> ast::Expr_ { ExprTupField(expr, idx) } @@ -2485,7 +2485,7 @@ impl<'a> Parser<'a> { hi = self.span.hi; self.bump(); - let index = n.as_str().parse::(); + let index = n.as_str().parse::(); match index { Some(n) => { let id = spanned(dot, hi, n); @@ -2511,7 +2511,7 @@ impl<'a> Parser<'a> { }; self.span_help(last_span, &format!("try parenthesizing the first index; e.g., `(foo.{}){}`", - float.trunc() as uint, + float.trunc() as usize, &float.fract().to_string()[1..])[]); } self.abort_if_errors(); @@ -2864,7 +2864,7 @@ impl<'a> Parser<'a> { } /// Parse an expression of binops of at least min_prec precedence - pub fn parse_more_binops(&mut self, lhs: P, min_prec: uint) -> P { + pub fn parse_more_binops(&mut self, lhs: P, min_prec: usize) -> P { if self.expr_is_complete(&*lhs) { return lhs; } // Prevent dynamic borrow errors later on by limiting the -- cgit 1.4.1-3-g733a5 From 7a24b3a4d7769ad9a4863a2cc61c009056459a67 Mon Sep 17 00:00:00 2001 From: Paul Collier Date: Sat, 17 Jan 2015 23:55:21 +0000 Subject: libsyntax: int => i32 in appropriate places --- src/libsyntax/ext/deriving/generic/mod.rs | 26 ++++++++--------- src/libsyntax/ext/deriving/generic/ty.rs | 4 +-- src/libsyntax/ext/expand.rs | 46 +++++++++++++++---------------- src/libsyntax/parse/lexer/comments.rs | 4 +-- src/libsyntax/parse/mod.rs | 10 +++---- src/libsyntax/parse/parser.rs | 6 ++-- 6 files changed, 48 insertions(+), 48 deletions(-) (limited to 'src/libsyntax/parse/parser.rs') diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index 27199de0ea8..ca3c1d36fba 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -28,7 +28,7 @@ //! arguments: //! //! - `Struct`, when `Self` is a struct (including tuple structs, e.g -//! `struct T(int, char)`). +//! `struct T(i32, char)`). //! - `EnumMatching`, when `Self` is an enum and all the arguments are the //! same variant of the enum (e.g. `Some(1)`, `Some(3)` and `Some(4)`) //! - `EnumNonMatchingCollapsed` when `Self` is an enum and the arguments @@ -54,17 +54,17 @@ //! following snippet //! //! ```rust -//! struct A { x : int } +//! struct A { x : i32 } //! -//! struct B(int); +//! struct B(i32); //! //! enum C { -//! C0(int), -//! C1 { x: int } +//! C0(i32), +//! C1 { x: i32 } //! } //! ``` //! -//! The `int`s in `B` and `C0` don't have an identifier, so the +//! The `i32`s in `B` and `C0` don't have an identifier, so the //! `Option`s would be `None` for them. //! //! In the static cases, the structure is summarised, either into the just @@ -90,8 +90,8 @@ //! trait PartialEq { //! fn eq(&self, other: &Self); //! } -//! impl PartialEq for int { -//! fn eq(&self, other: &int) -> bool { +//! impl PartialEq for i32 { +//! fn eq(&self, other: &i32) -> bool { //! *self == *other //! } //! } @@ -117,7 +117,7 @@ //! //! ```{.text} //! Struct(vec![FieldInfo { -//! span: , +//! span: , //! name: None, //! self_: //! other: vec![] @@ -132,7 +132,7 @@ //! ```{.text} //! EnumMatching(0, , //! vec![FieldInfo { -//! span: +//! span: //! name: None, //! self_: , //! other: vec![] @@ -179,7 +179,7 @@ //! StaticStruct(, Unnamed(vec![])) //! //! StaticEnum(, -//! vec![(, , Unnamed(vec![])), +//! vec![(, , Unnamed(vec![])), //! (, , Named(vec![(, )]))]) //! ``` @@ -719,7 +719,7 @@ impl<'a> MethodDef<'a> { /// ``` /// #[derive(PartialEq)] - /// struct A { x: int, y: int } + /// struct A { x: i32, y: i32 } /// /// // equivalent to: /// impl PartialEq for A { @@ -825,7 +825,7 @@ impl<'a> MethodDef<'a> { /// #[derive(PartialEq)] /// enum A { /// A1, - /// A2(int) + /// A2(i32) /// } /// /// // is equivalent to diff --git a/src/libsyntax/ext/deriving/generic/ty.rs b/src/libsyntax/ext/deriving/generic/ty.rs index a236fa33eb1..e7634716ed7 100644 --- a/src/libsyntax/ext/deriving/generic/ty.rs +++ b/src/libsyntax/ext/deriving/generic/ty.rs @@ -32,7 +32,7 @@ pub enum PtrTy<'a> { Raw(ast::Mutability), } -/// A path, e.g. `::std::option::Option::` (global). Has support +/// A path, e.g. `::std::option::Option::` (global). Has support /// for type parameters and a lifetime. #[derive(Clone)] pub struct Path<'a> { @@ -91,7 +91,7 @@ pub enum Ty<'a> { /// &/Box/ Ty Ptr(Box>, PtrTy<'a>), /// mod::mod::Type<[lifetime], [Params...]>, including a plain type - /// parameter, and things like `int` + /// parameter, and things like `i32` Literal(Path<'a>), /// includes unit Tuple(Vec> ) diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 1fd0334c016..4f614c1a937 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -1507,7 +1507,7 @@ mod test { #[should_fail] #[test] fn macros_cant_escape_fns_test () { let src = "fn bogus() {macro_rules! z (() => (3+4));}\ - fn inty() -> int { z!() }".to_string(); + fn inty() -> i32 { z!() }".to_string(); let sess = parse::new_parse_sess(); let crate_ast = parse::parse_crate_from_source_str( "".to_string(), @@ -1521,7 +1521,7 @@ mod test { #[should_fail] #[test] fn macros_cant_escape_mods_test () { let src = "mod foo {macro_rules! z (() => (3+4));}\ - fn inty() -> int { z!() }".to_string(); + fn inty() -> i32 { z!() }".to_string(); let sess = parse::new_parse_sess(); let crate_ast = parse::parse_crate_from_source_str( "".to_string(), @@ -1533,7 +1533,7 @@ mod test { // macro_use modules should allow macros to escape #[test] fn macros_can_escape_flattened_mods_test () { let src = "#[macro_use] mod foo {macro_rules! z (() => (3+4));}\ - fn inty() -> int { z!() }".to_string(); + fn inty() -> i32 { z!() }".to_string(); let sess = parse::new_parse_sess(); let crate_ast = parse::parse_crate_from_source_str( "".to_string(), @@ -1564,8 +1564,8 @@ mod test { // should be able to use a bound identifier as a literal in a macro definition: #[test] fn self_macro_parsing(){ expand_crate_str( - "macro_rules! foo ((zz) => (287u;)); - fn f(zz : int) {foo!(zz);}".to_string() + "macro_rules! foo ((zz) => (287;)); + fn f(zz: i32) {foo!(zz);}".to_string() ); } @@ -1601,23 +1601,23 @@ mod test { fn automatic_renaming () { let tests: Vec = vec!(// b & c should get new names throughout, in the expr too: - ("fn a() -> int { let b = 13; let c = b; b+c }", + ("fn a() -> i32 { let b = 13; let c = b; b+c }", vec!(vec!(0,1),vec!(2)), false), // both x's should be renamed (how is this causing a bug?) - ("fn main () {let x: int = 13;x;}", + ("fn main () {let x: i32 = 13;x;}", vec!(vec!(0)), false), // the use of b after the + should be renamed, the other one not: - ("macro_rules! f (($x:ident) => (b + $x)); fn a() -> int { let b = 13; f!(b)}", + ("macro_rules! f (($x:ident) => (b + $x)); fn a() -> i32 { let b = 13; f!(b)}", vec!(vec!(1)), false), // the b before the plus should not be renamed (requires marks) - ("macro_rules! f (($x:ident) => ({let b=9; ($x + b)})); fn a() -> int { f!(b)}", + ("macro_rules! f (($x:ident) => ({let b=9; ($x + b)})); fn a() -> i32 { f!(b)}", vec!(vec!(1)), false), // the marks going in and out of letty should cancel, allowing that $x to // capture the one following the semicolon. // this was an awesome test case, and caught a *lot* of bugs. ("macro_rules! letty(($x:ident) => (let $x = 15;)); macro_rules! user(($x:ident) => ({letty!($x); $x})); - fn main() -> int {user!(z)}", + fn main() -> i32 {user!(z)}", vec!(vec!(0)), false) ); for (idx,s) in tests.iter().enumerate() { @@ -1680,13 +1680,13 @@ mod test { // can't write this test case until we have macro-generating macros. // method arg hygiene - // method expands to fn get_x(&self_0, x_1:int) {self_0 + self_2 + x_3 + x_1} + // method expands to fn get_x(&self_0, x_1: i32) {self_0 + self_2 + x_3 + x_1} #[test] fn method_arg_hygiene(){ run_renaming_test( &("macro_rules! inject_x (()=>(x)); macro_rules! inject_self (()=>(self)); struct A; - impl A{fn get_x(&self, x: int) {self + inject_self!() + inject_x!() + x;} }", + impl A{fn get_x(&self, x: i32) {self + inject_self!() + inject_x!() + x;} }", vec!(vec!(0),vec!(3)), true), 0) @@ -1706,21 +1706,21 @@ mod test { } // item fn hygiene - // expands to fn q(x_1:int){fn g(x_2:int){x_2 + x_1};} + // expands to fn q(x_1: i32){fn g(x_2: i32){x_2 + x_1};} #[test] fn issue_9383(){ run_renaming_test( - &("macro_rules! bad_macro (($ex:expr) => (fn g(x:int){ x + $ex })); - fn q(x:int) { bad_macro!(x); }", + &("macro_rules! bad_macro (($ex:expr) => (fn g(x: i32){ x + $ex })); + fn q(x: i32) { bad_macro!(x); }", vec!(vec!(1),vec!(0)),true), 0) } // closure arg hygiene (ExprClosure) - // expands to fn f(){(|x_1 : int| {(x_2 + x_1)})(3);} + // expands to fn f(){(|x_1 : i32| {(x_2 + x_1)})(3);} #[test] fn closure_arg_hygiene(){ run_renaming_test( &("macro_rules! inject_x (()=>(x)); - fn f(){(|x : int| {(inject_x!() + x)})(3);}", + fn f(){(|x : i32| {(inject_x!() + x)})(3);}", vec!(vec!(1)), true), 0) @@ -1729,7 +1729,7 @@ mod test { // macro_rules in method position. Sadly, unimplemented. #[test] fn macro_in_method_posn(){ expand_crate_str( - "macro_rules! my_method (() => (fn thirteen(&self) -> int {13})); + "macro_rules! my_method (() => (fn thirteen(&self) -> i32 {13})); struct A; impl A{ my_method!(); } fn f(){A.thirteen;}".to_string()); @@ -1876,7 +1876,7 @@ foo_module!(); // it's the name of a 0-ary variant, and that 'i' appears twice in succession. #[test] fn crate_bindings_test(){ - let the_crate = string_to_crate("fn main (a : int) -> int {|b| { + let the_crate = string_to_crate("fn main (a: i32) -> i32 {|b| { match 34 {None => 3, Some(i) | i => j, Foo{k:z,l:y} => \"banana\"}} }".to_string()); let idents = crate_bindings(&the_crate); assert_eq!(idents, strs_to_idents(vec!("a","b","None","i","i","z","y"))); @@ -1885,10 +1885,10 @@ foo_module!(); // test the IdentRenamer directly #[test] fn ident_renamer_test () { - let the_crate = string_to_crate("fn f(x : int){let x = x; x}".to_string()); + let the_crate = string_to_crate("fn f(x: i32){let x = x; x}".to_string()); let f_ident = token::str_to_ident("f"); let x_ident = token::str_to_ident("x"); - let int_ident = token::str_to_ident("int"); + let int_ident = token::str_to_ident("i32"); let renames = vec!((x_ident,Name(16))); let mut renamer = IdentRenamer{renames: &renames}; let renamed_crate = renamer.fold_crate(the_crate); @@ -1900,10 +1900,10 @@ foo_module!(); // test the PatIdentRenamer; only PatIdents get renamed #[test] fn pat_ident_renamer_test () { - let the_crate = string_to_crate("fn f(x : int){let x = x; x}".to_string()); + let the_crate = string_to_crate("fn f(x: i32){let x = x; x}".to_string()); let f_ident = token::str_to_ident("f"); let x_ident = token::str_to_ident("x"); - let int_ident = token::str_to_ident("int"); + let int_ident = token::str_to_ident("i32"); let renames = vec!((x_ident,Name(16))); let mut renamer = PatIdentRenamer{renames: &renames}; let renamed_crate = renamer.fold_crate(the_crate); diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index d710c6d6c0f..58d114fb2a7 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -399,9 +399,9 @@ mod test { } #[test] fn test_block_doc_comment_3() { - let comment = "/**\n let a: *int;\n *a = 5;\n*/"; + let comment = "/**\n let a: *i32;\n *a = 5;\n*/"; let stripped = strip_doc_comment_decoration(comment); - assert_eq!(stripped, " let a: *int;\n *a = 5;"); + assert_eq!(stripped, " let a: *i32;\n *a = 5;"); } #[test] fn test_block_doc_comment_4() { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 1a29766cee6..33da7c160d9 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -855,7 +855,7 @@ mod test { #[test] fn string_to_tts_1 () { - let tts = string_to_tts("fn a (b : int) { b; }".to_string()); + let tts = string_to_tts("fn a (b : i32) { b; }".to_string()); assert_eq!(json::encode(&tts), "[\ {\ @@ -919,7 +919,7 @@ mod test { {\ \"variant\":\"Ident\",\ \"fields\":[\ - \"int\",\ + \"i32\",\ \"Plain\"\ ]\ }\ @@ -1031,8 +1031,8 @@ mod test { // check the contents of the tt manually: #[test] fn parse_fundecl () { - // this test depends on the intern order of "fn" and "int" - assert!(string_to_item("fn a (b : int) { b; }".to_string()) == + // this test depends on the intern order of "fn" and "i32" + assert_eq!(string_to_item("fn a (b : i32) { b; }".to_string()), Some( P(ast::Item{ident:str_to_ident("a"), attrs:Vec::new(), @@ -1046,7 +1046,7 @@ mod test { segments: vec!( ast::PathSegment { identifier: - str_to_ident("int"), + str_to_ident("i32"), parameters: ast::PathParameters::none(), } ), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 9822dcef0c0..920cfeb3693 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1499,7 +1499,7 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Bracket)); let t = self.parse_ty_sum(); - // Parse the `; e` in `[ int; e ]` + // Parse the `; e` in `[ i32; e ]` // where `e` is a const expression let t = match self.maybe_parse_fixed_length_of_vec() { None => TyVec(t), @@ -4803,7 +4803,7 @@ impl<'a> Parser<'a> { Some(attrs)) } - /// Parse a::B + /// Parse a::B fn parse_trait_ref(&mut self) -> TraitRef { ast::TraitRef { path: self.parse_path(LifetimeAndTypesWithoutColons), @@ -4822,7 +4822,7 @@ impl<'a> Parser<'a> { } } - /// Parse for<'l> a::B + /// Parse for<'l> a::B fn parse_poly_trait_ref(&mut self) -> PolyTraitRef { let lifetime_defs = self.parse_late_bound_lifetime_defs(); -- cgit 1.4.1-3-g733a5 From 591337431df612dd4e0df8d46b6291358085ac7c Mon Sep 17 00:00:00 2001 From: Paul Collier Date: Sun, 18 Jan 2015 00:18:19 +0000 Subject: libsyntax: int types -> isize --- src/libsyntax/ext/build.rs | 4 +-- src/libsyntax/parse/lexer/comments.rs | 2 +- src/libsyntax/parse/lexer/mod.rs | 2 +- src/libsyntax/parse/parser.rs | 10 +++---- src/libsyntax/print/pp.rs | 50 +++++++++++++++++------------------ src/libsyntax/print/pprust.rs | 4 +-- src/libsyntax/util/small_vector.rs | 8 +++--- 7 files changed, 40 insertions(+), 40 deletions(-) (limited to 'src/libsyntax/parse/parser.rs') diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 3bac972dbb5..8773c0f2f79 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -135,7 +135,7 @@ pub trait AstBuilder { fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P; fn expr_usize(&self, span: Span, i: usize) -> P; - fn expr_int(&self, sp: Span, i: int) -> P; + fn expr_int(&self, sp: Span, i: isize) -> P; fn expr_u8(&self, sp: Span, u: u8) -> P; fn expr_bool(&self, sp: Span, value: bool) -> P; @@ -644,7 +644,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_usize(&self, span: Span, i: usize) -> P { self.expr_lit(span, ast::LitInt(i as u64, ast::UnsignedIntLit(ast::TyUs(false)))) } - fn expr_int(&self, sp: Span, i: int) -> P { + fn expr_int(&self, sp: Span, i: isize) -> P { self.expr_lit(sp, ast::LitInt(i as u64, ast::SignedIntLit(ast::TyIs(false), ast::Sign::new(i)))) } diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 58d114fb2a7..ca9091856d6 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -267,7 +267,7 @@ fn read_block_comment(rdr: &mut StringReader, assert!(!curr_line.contains_char('\n')); lines.push(curr_line); } else { - let mut level: int = 1; + let mut level: isize = 1; while level > 0 { debug!("=== block comment level {}", level); if rdr.is_eof() { diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index dfb0230cf34..2fcf43f9ead 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -519,7 +519,7 @@ impl<'a> StringReader<'a> { let is_doc_comment = self.curr_is('*') || self.curr_is('!'); let start_bpos = self.last_pos - BytePos(2); - let mut level: int = 1; + let mut level: isize = 1; let mut has_cr = false; while level > 0 { if self.is_eof() { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 920cfeb3693..88825cb3f34 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -290,8 +290,8 @@ pub struct Parser<'a> { /// the previous token or None (only stashed sometimes). pub last_token: Option>, pub buffer: [TokenAndSpan; 4], - pub buffer_start: int, - pub buffer_end: int, + pub buffer_start: isize, + pub buffer_end: isize, pub tokens_consumed: usize, pub restrictions: Restrictions, pub quote_depth: usize, // not (yet) related to the quasiquoter @@ -934,7 +934,7 @@ impl<'a> Parser<'a> { // Avoid token copies with `replace`. let buffer_start = self.buffer_start as usize; let next_index = (buffer_start + 1) & 3 as usize; - self.buffer_start = next_index as int; + self.buffer_start = next_index as isize; let placeholder = TokenAndSpan { tok: token::Underscore, @@ -966,7 +966,7 @@ impl<'a> Parser<'a> { self.token = next; self.span = mk_sp(lo, hi); } - pub fn buffer_length(&mut self) -> int { + pub fn buffer_length(&mut self) -> isize { if self.buffer_start <= self.buffer_end { return self.buffer_end - self.buffer_start; } @@ -975,7 +975,7 @@ impl<'a> Parser<'a> { pub fn look_ahead(&mut self, distance: usize, f: F) -> R where F: FnOnce(&token::Token) -> R, { - let dist = distance as int; + let dist = distance as isize; while self.buffer_length() < dist { self.buffer[self.buffer_end as usize] = self.reader.real_token(); self.buffer_end = (self.buffer_end + 1) & 3; diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 0cfc3d38e15..ca99fb2ef89 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -71,19 +71,19 @@ pub enum Breaks { #[derive(Clone, Copy)] pub struct BreakToken { - offset: int, - blank_space: int + offset: isize, + blank_space: isize } #[derive(Clone, Copy)] pub struct BeginToken { - offset: int, + offset: isize, breaks: Breaks } #[derive(Clone)] pub enum Token { - String(String, int), + String(String, isize), Break(BreakToken), Begin(BeginToken), End, @@ -122,7 +122,7 @@ pub fn tok_str(token: &Token) -> String { } pub fn buf_str(toks: &[Token], - szs: &[int], + szs: &[isize], left: usize, right: usize, lim: usize) @@ -155,11 +155,11 @@ pub enum PrintStackBreak { #[derive(Copy)] pub struct PrintStackElem { - offset: int, + offset: isize, pbreak: PrintStackBreak } -static SIZE_INFINITY: int = 0xffff; +static SIZE_INFINITY: isize = 0xffff; pub fn mk_printer(out: Box, linewidth: usize) -> Printer { // Yes 3, it makes the ring buffers big enough to never @@ -167,13 +167,13 @@ pub fn mk_printer(out: Box, linewidth: usize) -> Printer { let n: usize = 3 * linewidth; debug!("mk_printer {}", linewidth); let token: Vec = repeat(Token::Eof).take(n).collect(); - let size: Vec = repeat(0i).take(n).collect(); + let size: Vec = repeat(0i).take(n).collect(); let scan_stack: Vec = repeat(0us).take(n).collect(); Printer { out: out, buf_len: n, - margin: linewidth as int, - space: linewidth as int, + margin: linewidth as isize, + space: linewidth as isize, left: 0, right: 0, token: token, @@ -269,9 +269,9 @@ pub struct Printer { pub out: Box, buf_len: usize, /// Width of lines we're constrained to - margin: int, + margin: isize, /// Number of spaces left on line - space: int, + space: isize, /// Index of left side of input stream left: usize, /// Index of right side of input stream @@ -279,11 +279,11 @@ pub struct Printer { /// Ring-buffer stream goes through token: Vec , /// Ring-buffer of calculated sizes - size: Vec , + size: Vec , /// Running size of stream "...left" - left_total: int, + left_total: isize, /// Running size of stream "...right" - right_total: int, + right_total: isize, /// Pseudo-stack, really a ring too. Holds the /// primary-ring-buffers index of the Begin that started the /// current block, possibly with the most recent Break after that @@ -300,7 +300,7 @@ pub struct Printer { /// Stack of blocks-in-progress being flushed by print print_stack: Vec , /// Buffered indentation to avoid writing trailing whitespace - pending_indentation: int, + pending_indentation: isize, } impl Printer { @@ -479,7 +479,7 @@ impl Printer { Ok(()) } - pub fn check_stack(&mut self, k: int) { + pub fn check_stack(&mut self, k: isize) { if !self.scan_stack_empty { let x = self.scan_top(); match self.token[x] { @@ -506,14 +506,14 @@ impl Printer { } } } - pub fn print_newline(&mut self, amount: int) -> io::IoResult<()> { + pub fn print_newline(&mut self, amount: isize) -> io::IoResult<()> { debug!("NEWLINE {}", amount); let ret = write!(self.out, "\n"); self.pending_indentation = 0; self.indent(amount); return ret; } - pub fn indent(&mut self, amount: int) { + pub fn indent(&mut self, amount: isize) { debug!("INDENT {}", amount); self.pending_indentation += amount; } @@ -536,7 +536,7 @@ impl Printer { } write!(self.out, "{}", s) } - pub fn print(&mut self, token: Token, l: int) -> io::IoResult<()> { + pub fn print(&mut self, token: Token, l: isize) -> io::IoResult<()> { debug!("print {} {} (remaining line space={})", tok_str(&token), l, self.space); debug!("{}", buf_str(&self.token[], @@ -622,7 +622,7 @@ impl Printer { // "raw box" pub fn rbox(p: &mut Printer, indent: usize, b: Breaks) -> io::IoResult<()> { p.pretty_print(Token::Begin(BeginToken { - offset: indent as int, + offset: indent as isize, breaks: b })) } @@ -635,10 +635,10 @@ pub fn cbox(p: &mut Printer, indent: usize) -> io::IoResult<()> { rbox(p, indent, Breaks::Consistent) } -pub fn break_offset(p: &mut Printer, n: usize, off: int) -> io::IoResult<()> { +pub fn break_offset(p: &mut Printer, n: usize, off: isize) -> io::IoResult<()> { p.pretty_print(Token::Break(BreakToken { offset: off, - blank_space: n as int + blank_space: n as isize })) } @@ -651,7 +651,7 @@ pub fn eof(p: &mut Printer) -> io::IoResult<()> { } pub fn word(p: &mut Printer, wrd: &str) -> io::IoResult<()> { - p.pretty_print(Token::String(/* bad */ wrd.to_string(), wrd.len() as int)) + p.pretty_print(Token::String(/* bad */ wrd.to_string(), wrd.len() as isize)) } pub fn huge_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> { @@ -678,7 +678,7 @@ pub fn hardbreak(p: &mut Printer) -> io::IoResult<()> { spaces(p, SIZE_INFINITY as usize) } -pub fn hardbreak_tok_offset(off: int) -> Token { +pub fn hardbreak_tok_offset(off: isize) -> Token { Token::Break(BreakToken {offset: off, blank_space: SIZE_INFINITY}) } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 83ac8432f98..c537ca60b18 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -520,7 +520,7 @@ impl<'a> State<'a> { pub fn bclose_maybe_open (&mut self, span: codemap::Span, indented: usize, close_box: bool) -> IoResult<()> { try!(self.maybe_print_comment(span.hi)); - try!(self.break_offset_if_not_bol(1u, -(indented as int))); + try!(self.break_offset_if_not_bol(1u, -(indented as isize))); try!(word(&mut self.s, "}")); if close_box { try!(self.end()); // close the outer-box @@ -568,7 +568,7 @@ impl<'a> State<'a> { Ok(()) } pub fn break_offset_if_not_bol(&mut self, n: usize, - off: int) -> IoResult<()> { + off: isize) -> IoResult<()> { if !self.is_bol() { break_offset(&mut self.s, n, off) } else { diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index a4494a98864..da358b70225 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -191,7 +191,7 @@ mod test { #[test] fn test_len() { - let v: SmallVector = SmallVector::zero(); + let v: SmallVector = SmallVector::zero(); assert_eq!(0, v.len()); assert_eq!(1, SmallVector::one(1i).len()); @@ -214,7 +214,7 @@ mod test { #[test] fn test_from_iter() { - let v: SmallVector = (vec!(1i, 2, 3)).into_iter().collect(); + let v: SmallVector = (vec![1is, 2, 3]).into_iter().collect(); assert_eq!(3, v.len()); assert_eq!(&1, v.get(0)); assert_eq!(&2, v.get(1)); @@ -224,7 +224,7 @@ mod test { #[test] fn test_move_iter() { let v = SmallVector::zero(); - let v: Vec = v.into_iter().collect(); + let v: Vec = v.into_iter().collect(); assert_eq!(Vec::new(), v); let v = SmallVector::one(1i); @@ -237,7 +237,7 @@ mod test { #[test] #[should_fail] fn test_expect_one_zero() { - let _: int = SmallVector::zero().expect_one(""); + let _: isize = SmallVector::zero().expect_one(""); } #[test] -- cgit 1.4.1-3-g733a5 From 3c32cd1be27f321658382e39d34f5d993d99ae8b Mon Sep 17 00:00:00 2001 From: Paul Collier Date: Sun, 18 Jan 2015 00:41:56 +0000 Subject: libsyntax: 0u -> 0us, 0i -> 0is --- src/libsyntax/ast_util.rs | 18 +++++----- src/libsyntax/codemap.rs | 18 +++++----- src/libsyntax/diagnostic.rs | 32 ++++++++--------- src/libsyntax/ext/deriving/generic/mod.rs | 16 ++++----- src/libsyntax/ext/expand.rs | 4 +-- src/libsyntax/ext/quote.rs | 2 +- src/libsyntax/ext/tt/macro_parser.rs | 34 +++++++++--------- src/libsyntax/ext/tt/transcribe.rs | 2 +- src/libsyntax/parse/lexer/comments.rs | 8 ++--- src/libsyntax/parse/lexer/mod.rs | 32 ++++++++--------- src/libsyntax/parse/mod.rs | 12 +++---- src/libsyntax/parse/parser.rs | 18 +++++----- src/libsyntax/print/pp.rs | 34 +++++++++--------- src/libsyntax/print/pprust.rs | 58 +++++++++++++++---------------- src/libsyntax/test.rs | 4 +-- src/libsyntax/util/small_vector.rs | 20 +++++------ 16 files changed, 156 insertions(+), 156 deletions(-) (limited to 'src/libsyntax/parse/parser.rs') diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 73f84f4fbe7..34fd498322c 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -322,15 +322,15 @@ pub fn struct_field_visibility(field: ast::StructField) -> Visibility { pub fn operator_prec(op: ast::BinOp) -> usize { match op { // 'as' sits here with 12 - BiMul | BiDiv | BiRem => 11u, - BiAdd | BiSub => 10u, - BiShl | BiShr => 9u, - BiBitAnd => 8u, - BiBitXor => 7u, - BiBitOr => 6u, - BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3u, - BiAnd => 2u, - BiOr => 1u + BiMul | BiDiv | BiRem => 11us, + BiAdd | BiSub => 10us, + BiShl | BiShr => 9us, + BiBitAnd => 8us, + BiBitXor => 7us, + BiBitOr => 6us, + BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3us, + BiAnd => 2us, + BiOr => 1us } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 07544f35b19..a5e10f42750 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -430,7 +430,7 @@ impl CodeMap { let lo = self.lookup_char_pos(sp.lo); let hi = self.lookup_char_pos(sp.hi); let mut lines = Vec::new(); - for i in range(lo.line - 1u, hi.line as usize) { + for i in range(lo.line - 1us, hi.line as usize) { lines.push(i); }; FileLines {file: lo.file, lines: lines} @@ -498,10 +498,10 @@ impl CodeMap { let files = self.files.borrow(); let files = &*files; let len = files.len(); - let mut a = 0u; + let mut a = 0us; let mut b = len; - while b - a > 1u { - let m = (a + b) / 2u; + while b - a > 1us { + let m = (a + b) / 2us; if files[m].start_pos > pos { b = m; } else { @@ -537,12 +537,12 @@ impl CodeMap { let files = self.files.borrow(); let f = (*files)[idx].clone(); - let mut a = 0u; + let mut a = 0us; { let lines = f.lines.borrow(); let mut b = lines.len(); - while b - a > 1u { - let m = (a + b) / 2u; + while b - a > 1us { + let m = (a + b) / 2us; if (*lines)[m] > pos { b = m; } else { a = m; } } } @@ -551,7 +551,7 @@ impl CodeMap { fn lookup_pos(&self, pos: BytePos) -> Loc { let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos); - let line = a + 1u; // Line numbers start at 1 + let line = a + 1us; // Line numbers start at 1 let chpos = self.bytepos_to_file_charpos(pos); let linebpos = (*f.lines.borrow())[a]; let linechpos = self.bytepos_to_file_charpos(linebpos); @@ -762,7 +762,7 @@ mod test { assert_eq!(file_lines.file.name, "blork.rs"); assert_eq!(file_lines.lines.len(), 1); - assert_eq!(file_lines.lines[0], 1u); + assert_eq!(file_lines.lines[0], 1us); } #[test] diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index c86991b6e40..6de466ea9bd 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -26,7 +26,7 @@ use term::WriterWrapper; use term; /// maximum number of lines we will print for each error; arbitrary. -static MAX_LINES: usize = 6u; +static MAX_LINES: usize = 6us; #[derive(Clone, Copy)] pub enum RenderSpan { @@ -151,20 +151,20 @@ impl Handler { self.bump_err_count(); } pub fn bump_err_count(&self) { - self.err_count.set(self.err_count.get() + 1u); + self.err_count.set(self.err_count.get() + 1us); } pub fn err_count(&self) -> usize { self.err_count.get() } pub fn has_errors(&self) -> bool { - self.err_count.get()> 0u + self.err_count.get() > 0us } pub fn abort_if_errors(&self) { let s; match self.err_count.get() { - 0u => return, - 1u => s = "aborting due to previous error".to_string(), - _ => { + 0us => return, + 1us => s = "aborting due to previous error".to_string(), + _ => { s = format!("aborting due to {} previous errors", self.err_count.get()); } @@ -448,7 +448,7 @@ fn highlight_lines(err: &mut EmitterWriter, let mut elided = false; let mut display_lines = &lines.lines[]; if display_lines.len() > MAX_LINES { - display_lines = &display_lines[0u..MAX_LINES]; + display_lines = &display_lines[0us..MAX_LINES]; elided = true; } // Print the offending lines @@ -459,32 +459,32 @@ fn highlight_lines(err: &mut EmitterWriter, } } if elided { - let last_line = display_lines[display_lines.len() - 1u]; - let s = format!("{}:{} ", fm.name, last_line + 1u); + let last_line = display_lines[display_lines.len() - 1us]; + let s = format!("{}:{} ", fm.name, last_line + 1us); try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len())); } // FIXME (#3260) // If there's one line at fault we can easily point to the problem - if lines.lines.len() == 1u { + if lines.lines.len() == 1us { let lo = cm.lookup_char_pos(sp.lo); - let mut digits = 0u; - let mut num = (lines.lines[0] + 1u) / 10u; + let mut digits = 0us; + let mut num = (lines.lines[0] + 1us) / 10us; // how many digits must be indent past? - while num > 0u { num /= 10u; digits += 1u; } + while num > 0us { num /= 10us; digits += 1us; } // indent past |name:## | and the 0-offset column location - let left = fm.name.len() + digits + lo.col.to_usize() + 3u; + let left = fm.name.len() + digits + lo.col.to_usize() + 3us; let mut s = String::new(); // Skip is the number of characters we need to skip because they are // part of the 'filename:line ' part of the previous line. - let skip = fm.name.len() + digits + 3u; + let skip = fm.name.len() + digits + 3us; for _ in range(0, skip) { s.push(' '); } if let Some(orig) = fm.get_line(lines.lines[0]) { - for pos in range(0u, left - skip) { + for pos in range(0us, left - skip) { let cur_char = orig.as_bytes()[pos] as char; // Whenever a tab occurs on the previous line, we insert one on // the error-point-squiggly-line as well (instead of a space). diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index ca3c1d36fba..b77c203acfb 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -748,7 +748,7 @@ impl<'a> MethodDef<'a> { let mut raw_fields = Vec::new(); // ~[[fields of self], // [fields of next Self arg], [etc]] let mut patterns = Vec::new(); - for i in range(0u, self_args.len()) { + for i in range(0us, self_args.len()) { let struct_path= cx.path(DUMMY_SP, vec!( type_ident )); let (pat, ident_expr) = trait_.create_struct_pattern(cx, @@ -837,8 +837,8 @@ impl<'a> MethodDef<'a> { /// (&A2(ref __self_0), /// &A2(ref __arg_1_0)) => (*__self_0).eq(&(*__arg_1_0)), /// _ => { - /// let __self_vi = match *self { A1(..) => 0u, A2(..) => 1u }; - /// let __arg_1_vi = match *__arg_1 { A1(..) => 0u, A2(..) => 1u }; + /// let __self_vi = match *self { A1(..) => 0us, A2(..) => 1us }; + /// let __arg_1_vi = match *__arg_1 { A1(..) => 0us, A2(..) => 1us }; /// false /// } /// } @@ -882,8 +882,8 @@ impl<'a> MethodDef<'a> { /// (Variant2, Variant2, Variant2) => ... // delegate Matching on Variant2 /// ... /// _ => { - /// let __this_vi = match this { Variant1 => 0u, Variant2 => 1u, ... }; - /// let __that_vi = match that { Variant1 => 0u, Variant2 => 1u, ... }; + /// let __this_vi = match this { Variant1 => 0us, Variant2 => 1us, ... }; + /// let __that_vi = match that { Variant1 => 0us, Variant2 => 1us, ... }; /// ... // catch-all remainder can inspect above variant index values. /// } /// } @@ -1045,13 +1045,13 @@ impl<'a> MethodDef<'a> { // // ``` // let __self0_vi = match self { - // A => 0u, B(..) => 1u, C(..) => 2u + // A => 0us, B(..) => 1us, C(..) => 2us // }; // let __self1_vi = match __arg1 { - // A => 0u, B(..) => 1u, C(..) => 2u + // A => 0us, B(..) => 1us, C(..) => 2us // }; // let __self2_vi = match __arg2 { - // A => 0u, B(..) => 1u, C(..) => 2u + // A => 0us, B(..) => 1us, C(..) => 2us // }; // ``` let mut index_let_stmts: Vec> = Vec::new(); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 4f614c1a937..9aa602b39b4 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -273,7 +273,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, // in this file. // Token-tree macros: MacInvocTT(pth, tts, _) => { - if pth.segments.len() > 1u { + if pth.segments.len() > 1us { fld.cx.span_err(pth.span, "expected macro name without module \ separators"); @@ -844,7 +844,7 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { }, _ => unreachable!() }; - if pth.segments.len() > 1u { + if pth.segments.len() > 1us { fld.cx.span_err(pth.span, "expected macro name without module separators"); return DummyResult::raw_pat(span); } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index a5885ac6c5d..5339c3d77c6 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -716,7 +716,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // try removing it when enough of them are gone. let mut p = cx.new_parser_from_tts(tts); - p.quote_depth += 1u; + p.quote_depth += 1us; let cx_expr = p.parse_expr(); if !p.eat(&token::Comma) { diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 75d904a4632..d115f2ed620 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -171,11 +171,11 @@ pub fn initial_matcher_pos(ms: Rc>, sep: Option, lo: ByteP stack: vec![], top_elts: TtSeq(ms), sep: sep, - idx: 0u, + idx: 0us, up: None, matches: matches, - match_lo: 0u, - match_cur: 0u, + match_lo: 0us, + match_cur: 0us, match_hi: match_idx_hi, sp_lo: lo } @@ -238,7 +238,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) } } let mut ret_val = HashMap::new(); - let mut idx = 0u; + let mut idx = 0us; for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val, &mut idx) } ret_val } @@ -383,7 +383,7 @@ pub fn parse(sess: &ParseSess, if seq.op == ast::ZeroOrMore { let mut new_ei = ei.clone(); new_ei.match_cur += seq.num_captures; - new_ei.idx += 1u; + new_ei.idx += 1us; //we specifically matched zero repeats. for idx in range(ei.match_cur, ei.match_cur + seq.num_captures) { (&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp))); @@ -398,7 +398,7 @@ pub fn parse(sess: &ParseSess, cur_eis.push(box MatcherPos { stack: vec![], sep: seq.separator.clone(), - idx: 0u, + idx: 0us, matches: matches, match_lo: ei_t.match_cur, match_cur: ei_t.match_cur, @@ -442,20 +442,20 @@ pub fn parse(sess: &ParseSess, /* error messages here could be improved with links to orig. rules */ if token_name_eq(&tok, &token::Eof) { - if eof_eis.len() == 1u { + if eof_eis.len() == 1us { let mut v = Vec::new(); for dv in (&mut eof_eis[0]).matches.iter_mut() { v.push(dv.pop().unwrap()); } return Success(nameize(sess, ms, &v[])); - } else if eof_eis.len() > 1u { + } else if eof_eis.len() > 1us { return Error(sp, "ambiguity: multiple successful parses".to_string()); } else { return Failure(sp, "unexpected end of macro invocation".to_string()); } } else { - if (bb_eis.len() > 0u && next_eis.len() > 0u) - || bb_eis.len() > 1u { + if (bb_eis.len() > 0us && next_eis.len() > 0us) + || bb_eis.len() > 1us { let nts = bb_eis.iter().map(|ei| { match ei.top_elts.get_tt(ei.idx) { TtToken(_, MatchNt(bind, name, _, _)) => { @@ -469,12 +469,12 @@ pub fn parse(sess: &ParseSess, "local ambiguity: multiple parsing options: \ built-in NTs {} or {} other options.", nts, next_eis.len()).to_string()); - } else if bb_eis.len() == 0u && next_eis.len() == 0u { + } else if bb_eis.len() == 0us && next_eis.len() == 0us { return Failure(sp, format!("no rules expected the token `{}`", pprust::token_to_string(&tok)).to_string()); - } else if next_eis.len() > 0u { + } else if next_eis.len() > 0us { /* Now process the next token */ - while next_eis.len() > 0u { + while next_eis.len() > 0us { cur_eis.push(next_eis.pop().unwrap()); } rdr.next_token(); @@ -488,7 +488,7 @@ pub fn parse(sess: &ParseSess, let match_cur = ei.match_cur; (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( parse_nt(&mut rust_parser, name_string.get())))); - ei.idx += 1u; + ei.idx += 1us; ei.match_cur += 1; } _ => panic!() @@ -501,16 +501,16 @@ pub fn parse(sess: &ParseSess, } } - assert!(cur_eis.len() > 0u); + assert!(cur_eis.len() > 0us); } } pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { match name { "tt" => { - p.quote_depth += 1u; //but in theory, non-quoted tts might be useful + p.quote_depth += 1us; //but in theory, non-quoted tts might be useful let res = token::NtTT(P(p.parse_token_tree())); - p.quote_depth -= 1u; + p.quote_depth -= 1us; return res; } _ => {} diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 7936b9fcfc7..0bf20b8f3e1 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -223,7 +223,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { r.repeat_len.pop(); } } else { /* repeat */ - *r.repeat_idx.last_mut().unwrap() += 1u; + *r.repeat_idx.last_mut().unwrap() += 1us; r.stack.last_mut().unwrap().idx = 0; match r.stack.last().unwrap().sep.clone() { Some(tk) => { diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index ca9091856d6..2799696e8eb 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -62,7 +62,7 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle { pub fn strip_doc_comment_decoration(comment: &str) -> String { /// remove whitespace-only lines from the start/end of lines fn vertical_trim(lines: Vec ) -> Vec { - let mut i = 0u; + let mut i = 0us; let mut j = lines.len(); // first line of all-stars should be omitted if lines.len() > 0 && @@ -132,7 +132,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { } if comment.starts_with("/*") { - let lines = comment[3u..(comment.len() - 2u)] + let lines = comment[3us..(comment.len() - 2us)] .lines_any() .map(|s| s.to_string()) .collect:: >(); @@ -158,7 +158,7 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec) { fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mut Vec) { while is_whitespace(rdr.curr) && !rdr.is_eof() { - if rdr.col == CharPos(0u) && rdr.curr_is('\n') { + if rdr.col == CharPos(0us) && rdr.curr_is('\n') { push_blank_line_comment(rdr, &mut *comments); } rdr.bump(); @@ -305,7 +305,7 @@ fn read_block_comment(rdr: &mut StringReader, let mut style = if code_to_the_left { Trailing } else { Isolated }; rdr.consume_non_eol_whitespace(); - if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1u { + if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1us { style = Mixed; } debug!("<<< block comment"); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 2fcf43f9ead..d18bf554975 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -279,7 +279,7 @@ impl<'a> StringReader<'a> { /// Converts CRLF to LF in the given string, raising an error on bare CR. fn translate_crlf<'b>(&self, start: BytePos, s: &'b str, errmsg: &'b str) -> CowString<'b> { - let mut i = 0u; + let mut i = 0us; while i < s.len() { let str::CharRange { ch, next } = s.char_range_at(i); if ch == '\r' { @@ -331,10 +331,10 @@ impl<'a> StringReader<'a> { let byte_offset_diff = next.next - current_byte_offset; self.pos = self.pos + Pos::from_usize(byte_offset_diff); self.curr = Some(next.ch); - self.col = self.col + CharPos(1u); + self.col = self.col + CharPos(1us); if last_char == '\n' { self.filemap.next_line(self.last_pos); - self.col = CharPos(0u); + self.col = CharPos(0us); } if byte_offset_diff > 1 { @@ -472,7 +472,7 @@ impl<'a> StringReader<'a> { cmap.files.borrow_mut().push(self.filemap.clone()); let loc = cmap.lookup_char_pos_adj(self.last_pos); debug!("Skipping a shebang"); - if loc.line == 1u && loc.col == CharPos(0u) { + if loc.line == 1us && loc.col == CharPos(0us) { // FIXME: Add shebang "token", return it let start = self.last_pos; while !self.curr_is('\n') && !self.is_eof() { self.bump(); } @@ -646,7 +646,7 @@ impl<'a> StringReader<'a> { /// Scan through any digits (base `radix`) or underscores, and return how /// many digits there were. fn scan_digits(&mut self, radix: usize) -> usize { - let mut len = 0u; + let mut len = 0us; loop { let c = self.curr; if c == Some('_') { debug!("skipping a _"); self.bump(); continue; } @@ -799,14 +799,14 @@ impl<'a> StringReader<'a> { if self.curr == Some('{') { self.scan_unicode_escape(delim) } else { - let res = self.scan_hex_digits(4u, delim, false); + let res = self.scan_hex_digits(4us, delim, false); let sp = codemap::mk_sp(escaped_pos, self.last_pos); self.old_escape_warning(sp); res } } 'U' if !ascii_only => { - let res = self.scan_hex_digits(8u, delim, false); + let res = self.scan_hex_digits(8us, delim, false); let sp = codemap::mk_sp(escaped_pos, self.last_pos); self.old_escape_warning(sp); res @@ -937,11 +937,11 @@ impl<'a> StringReader<'a> { /// error if it isn't. fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) { match base { - 16u => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \ - supported"), - 8u => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"), - 2u => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"), - _ => () + 16us => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \ + supported"), + 8us => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"), + 2us => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"), + _ => () } } @@ -1189,7 +1189,7 @@ impl<'a> StringReader<'a> { 'r' => { let start_bpos = self.last_pos; self.bump(); - let mut hash_count = 0u; + let mut hash_count = 0us; while self.curr_is('#') { self.bump(); hash_count += 1; @@ -1374,7 +1374,7 @@ impl<'a> StringReader<'a> { fn scan_raw_byte_string(&mut self) -> token::Lit { let start_bpos = self.last_pos; self.bump(); - let mut hash_count = 0u; + let mut hash_count = 0us; while self.curr_is('#') { self.bump(); hash_count += 1; @@ -1616,9 +1616,9 @@ mod test { test!("1.0", Float, "1.0"); test!("1.0e10", Float, "1.0e10"); - assert_eq!(setup(&mk_sh(), "2u".to_string()).next_token().tok, + assert_eq!(setup(&mk_sh(), "2us".to_string()).next_token().tok, token::Literal(token::Integer(token::intern("2")), - Some(token::intern("u")))); + Some(token::intern("us")))); assert_eq!(setup(&mk_sh(), "r###\"raw\"###suffix".to_string()).next_token().tok, token::Literal(token::StrRaw(token::intern("raw"), 3), Some(token::intern("suffix")))); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 33da7c160d9..29414ef94b5 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -181,7 +181,7 @@ pub fn parse_tts_from_source_str(name: String, name, source ); - p.quote_depth += 1u; + p.quote_depth += 1us; // right now this is re-creating the token trees from ... token trees. maybe_aborted(p.parse_all_token_trees(),p) } @@ -325,7 +325,7 @@ pub mod with_hygiene { name, source ); - p.quote_depth += 1u; + p.quote_depth += 1us; // right now this is re-creating the token trees from ... token trees. maybe_aborted(p.parse_all_token_trees(),p) } @@ -574,7 +574,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) { if lit.len() == 1 { (lit.as_bytes()[0], 1) } else { - assert!(lit.as_bytes()[0] == b'\\', err(0i)); + assert!(lit.as_bytes()[0] == b'\\', err(0is)); let b = match lit.as_bytes()[1] { b'"' => b'"', b'n' => b'\n', @@ -684,9 +684,9 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> match suffix { Some(suf) if looks_like_width_suffix(&['f'], suf) => { match base { - 16u => sd.span_err(sp, "hexadecimal float literal is not supported"), - 8u => sd.span_err(sp, "octal float literal is not supported"), - 2u => sd.span_err(sp, "binary float literal is not supported"), + 16us => sd.span_err(sp, "hexadecimal float literal is not supported"), + 8us => sd.span_err(sp, "octal float literal is not supported"), + 2us => sd.span_err(sp, "binary float literal is not supported"), _ => () } let ident = token::intern_and_get_ident(&*s); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 88825cb3f34..ea7c80d3fc4 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -767,7 +767,7 @@ impl<'a> Parser<'a> { // would encounter a `>` and stop. This lets the parser handle trailing // commas in generic parameters, because it can stop either after // parsing a type or after parsing a comma. - for i in iter::count(0u, 1) { + for i in iter::count(0us, 1) { if self.check(&token::Gt) || self.token == token::BinOp(token::Shr) || self.token == token::Ge @@ -944,7 +944,7 @@ impl<'a> Parser<'a> { }; self.span = next.sp; self.token = next.tok; - self.tokens_consumed += 1u; + self.tokens_consumed += 1us; self.expected_tokens.clear(); // check after each token self.check_unknown_macro_variable(); @@ -2638,7 +2638,7 @@ impl<'a> Parser<'a> { } pub fn check_unknown_macro_variable(&mut self) { - if self.quote_depth == 0u { + if self.quote_depth == 0us { match self.token { token::SubstNt(name, _) => self.fatal(&format!("unknown macro variable `{}`", @@ -2707,7 +2707,7 @@ impl<'a> Parser<'a> { token_str)[]) }, /* we ought to allow different depths of unquotation */ - token::Dollar | token::SubstNt(..) if p.quote_depth > 0u => { + token::Dollar | token::SubstNt(..) if p.quote_depth > 0us => { p.parse_unquoted() } _ => { @@ -5079,7 +5079,7 @@ impl<'a> Parser<'a> { } } - if first && attrs_remaining_len > 0u { + if first && attrs_remaining_len > 0us { // We parsed attributes for the first item but didn't find it let last_span = self.last_span; self.span_err(last_span, @@ -5683,7 +5683,7 @@ impl<'a> Parser<'a> { return IoviItem(item); } if self.token.is_keyword(keywords::Unsafe) && - self.look_ahead(1u, |t| t.is_keyword(keywords::Trait)) + self.look_ahead(1us, |t| t.is_keyword(keywords::Trait)) { // UNSAFE TRAIT ITEM self.expect_keyword(keywords::Unsafe); @@ -5700,7 +5700,7 @@ impl<'a> Parser<'a> { return IoviItem(item); } if self.token.is_keyword(keywords::Unsafe) && - self.look_ahead(1u, |t| t.is_keyword(keywords::Impl)) + self.look_ahead(1us, |t| t.is_keyword(keywords::Impl)) { // IMPL ITEM self.expect_keyword(keywords::Unsafe); @@ -5731,7 +5731,7 @@ impl<'a> Parser<'a> { return IoviItem(item); } if self.token.is_keyword(keywords::Unsafe) - && self.look_ahead(1u, |t| *t != token::OpenDelim(token::Brace)) { + && self.look_ahead(1us, |t| *t != token::OpenDelim(token::Brace)) { // UNSAFE FUNCTION ITEM self.bump(); let abi = if self.eat_keyword(keywords::Extern) { @@ -6035,7 +6035,7 @@ impl<'a> Parser<'a> { } } } - let mut rename_to = path[path.len() - 1u]; + let mut rename_to = path[path.len() - 1us]; let path = ast::Path { span: mk_sp(lo, self.last_span.hi), global: false, diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index ca99fb2ef89..0b1bd282941 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -132,15 +132,15 @@ pub fn buf_str(toks: &[Token], let mut i = left; let mut l = lim; let mut s = string::String::from_str("["); - while i != right && l != 0u { - l -= 1u; + while i != right && l != 0us { + l -= 1us; if i != left { s.push_str(", "); } s.push_str(&format!("{}={}", szs[i], tok_str(&toks[i]))[]); - i += 1u; + i += 1us; i %= n; } s.push(']'); @@ -167,7 +167,7 @@ pub fn mk_printer(out: Box, linewidth: usize) -> Printer { let n: usize = 3 * linewidth; debug!("mk_printer {}", linewidth); let token: Vec = repeat(Token::Eof).take(n).collect(); - let size: Vec = repeat(0i).take(n).collect(); + let size: Vec = repeat(0is).take(n).collect(); let scan_stack: Vec = repeat(0us).take(n).collect(); Printer { out: out, @@ -326,8 +326,8 @@ impl Printer { if self.scan_stack_empty { self.left_total = 1; self.right_total = 1; - self.left = 0u; - self.right = 0u; + self.left = 0us; + self.right = 0us; } else { self.advance_right(); } debug!("pp Begin({})/buffer ~[{},{}]", b.offset, self.left, self.right); @@ -355,8 +355,8 @@ impl Printer { if self.scan_stack_empty { self.left_total = 1; self.right_total = 1; - self.left = 0u; - self.right = 0u; + self.left = 0us; + self.right = 0us; } else { self.advance_right(); } debug!("pp Break({})/buffer ~[{},{}]", b.offset, self.left, self.right); @@ -410,7 +410,7 @@ impl Printer { if self.scan_stack_empty { self.scan_stack_empty = false; } else { - self.top += 1u; + self.top += 1us; self.top %= self.buf_len; assert!((self.top != self.bottom)); } @@ -422,7 +422,7 @@ impl Printer { if self.top == self.bottom { self.scan_stack_empty = true; } else { - self.top += self.buf_len - 1u; self.top %= self.buf_len; + self.top += self.buf_len - 1us; self.top %= self.buf_len; } return x; } @@ -436,12 +436,12 @@ impl Printer { if self.top == self.bottom { self.scan_stack_empty = true; } else { - self.bottom += 1u; self.bottom %= self.buf_len; + self.bottom += 1us; self.bottom %= self.buf_len; } return x; } pub fn advance_right(&mut self) { - self.right += 1u; + self.right += 1us; self.right %= self.buf_len; assert!((self.right != self.left)); } @@ -471,7 +471,7 @@ impl Printer { break; } - self.left += 1u; + self.left += 1us; self.left %= self.buf_len; left_size = self.size[self.left]; @@ -520,7 +520,7 @@ impl Printer { pub fn get_top(&mut self) -> PrintStackElem { let print_stack = &mut self.print_stack; let n = print_stack.len(); - if n != 0u { + if n != 0us { (*print_stack)[n - 1] } else { PrintStackElem { @@ -565,7 +565,7 @@ impl Printer { Token::End => { debug!("print End -> pop End"); let print_stack = &mut self.print_stack; - assert!((print_stack.len() != 0u)); + assert!((print_stack.len() != 0us)); print_stack.pop().unwrap(); Ok(()) } @@ -667,11 +667,11 @@ pub fn spaces(p: &mut Printer, n: usize) -> io::IoResult<()> { } pub fn zerobreak(p: &mut Printer) -> io::IoResult<()> { - spaces(p, 0u) + spaces(p, 0us) } pub fn space(p: &mut Printer) -> io::IoResult<()> { - spaces(p, 1u) + spaces(p, 1us) } pub fn hardbreak(p: &mut Printer) -> io::IoResult<()> { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index c537ca60b18..78a97f310d2 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -381,7 +381,7 @@ pub fn block_to_string(blk: &ast::Block) -> String { // containing cbox, will be closed by print-block at } try!(s.cbox(indent_unit)); // head-ibox, will be closed by print-block after { - try!(s.ibox(0u)); + try!(s.ibox(0us)); s.print_block(blk) }) } @@ -520,7 +520,7 @@ impl<'a> State<'a> { pub fn bclose_maybe_open (&mut self, span: codemap::Span, indented: usize, close_box: bool) -> IoResult<()> { try!(self.maybe_print_comment(span.hi)); - try!(self.break_offset_if_not_bol(1u, -(indented as isize))); + try!(self.break_offset_if_not_bol(1us, -(indented as isize))); try!(word(&mut self.s, "}")); if close_box { try!(self.end()); // close the outer-box @@ -595,7 +595,7 @@ impl<'a> State<'a> { pub fn commasep(&mut self, b: Breaks, elts: &[T], mut op: F) -> IoResult<()> where F: FnMut(&mut State, &T) -> IoResult<()>, { - try!(self.rbox(0u, b)); + try!(self.rbox(0us, b)); let mut first = true; for elt in elts.iter() { if first { first = false; } else { try!(self.word_space(",")); } @@ -613,13 +613,13 @@ impl<'a> State<'a> { F: FnMut(&mut State, &T) -> IoResult<()>, G: FnMut(&T) -> codemap::Span, { - try!(self.rbox(0u, b)); + try!(self.rbox(0us, b)); let len = elts.len(); - let mut i = 0u; + let mut i = 0us; for elt in elts.iter() { try!(self.maybe_print_comment(get_span(elt).hi)); try!(op(self, elt)); - i += 1u; + i += 1us; if i < len { try!(word(&mut self.s, ",")); try!(self.maybe_print_trailing_comment(get_span(elt), @@ -670,7 +670,7 @@ impl<'a> State<'a> { pub fn print_type(&mut self, ty: &ast::Ty) -> IoResult<()> { try!(self.maybe_print_comment(ty.span.lo)); - try!(self.ibox(0u)); + try!(self.ibox(0us)); match ty.node { ast::TyVec(ref ty) => { try!(word(&mut self.s, "[")); @@ -871,7 +871,7 @@ impl<'a> State<'a> { } ast::ItemTy(ref ty, ref params) => { try!(self.ibox(indent_unit)); - try!(self.ibox(0u)); + try!(self.ibox(0us)); try!(self.word_nbsp(&visibility_qualified(item.vis, "type")[])); try!(self.print_ident(item.ident)); try!(self.print_generics(params)); @@ -1262,7 +1262,7 @@ impl<'a> State<'a> { pub fn print_outer_attributes(&mut self, attrs: &[ast::Attribute]) -> IoResult<()> { - let mut count = 0u; + let mut count = 0us; for attr in attrs.iter() { match attr.node.style { ast::AttrOuter => { @@ -1280,7 +1280,7 @@ impl<'a> State<'a> { pub fn print_inner_attributes(&mut self, attrs: &[ast::Attribute]) -> IoResult<()> { - let mut count = 0u; + let mut count = 0us; for attr in attrs.iter() { match attr.node.style { ast::AttrInner => { @@ -1404,8 +1404,8 @@ impl<'a> State<'a> { match _else.node { // "another else-if" ast::ExprIf(ref i, ref then, ref e) => { - try!(self.cbox(indent_unit - 1u)); - try!(self.ibox(0u)); + try!(self.cbox(indent_unit - 1us)); + try!(self.ibox(0us)); try!(word(&mut self.s, " else if ")); try!(self.print_expr(&**i)); try!(space(&mut self.s)); @@ -1414,8 +1414,8 @@ impl<'a> State<'a> { } // "another else-if-let" ast::ExprIfLet(ref pat, ref expr, ref then, ref e) => { - try!(self.cbox(indent_unit - 1u)); - try!(self.ibox(0u)); + try!(self.cbox(indent_unit - 1us)); + try!(self.ibox(0us)); try!(word(&mut self.s, " else if let ")); try!(self.print_pat(&**pat)); try!(space(&mut self.s)); @@ -1427,8 +1427,8 @@ impl<'a> State<'a> { } // "final else" ast::ExprBlock(ref b) => { - try!(self.cbox(indent_unit - 1u)); - try!(self.ibox(0u)); + try!(self.cbox(indent_unit - 1us)); + try!(self.ibox(0us)); try!(word(&mut self.s, " else ")); self.print_block(&**b) } @@ -1594,7 +1594,7 @@ impl<'a> State<'a> { try!(self.print_expr(&*args[0])); try!(word(&mut self.s, ".")); try!(self.print_ident(ident.node)); - if tys.len() > 0u { + if tys.len() > 0us { try!(word(&mut self.s, "::<")); try!(self.commasep(Inconsistent, tys, |s, ty| s.print_type(&**ty))); @@ -1765,7 +1765,7 @@ impl<'a> State<'a> { // containing cbox, will be closed by print-block at } try!(self.cbox(indent_unit)); // head-box, will be closed by print-block after { - try!(self.ibox(0u)); + try!(self.ibox(0us)); try!(self.print_block(&**blk)); } ast::ExprAssign(ref lhs, ref rhs) => { @@ -2142,7 +2142,7 @@ impl<'a> State<'a> { }, |f| f.node.pat.span)); if etc { - if fields.len() != 0u { try!(self.word_space(",")); } + if fields.len() != 0us { try!(self.word_space(",")); } try!(word(&mut self.s, "..")); } try!(space(&mut self.s)); @@ -2209,7 +2209,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); } try!(self.cbox(indent_unit)); - try!(self.ibox(0u)); + try!(self.ibox(0us)); try!(self.print_outer_attributes(&arm.attrs[])); let mut first = true; for p in arm.pats.iter() { @@ -2295,7 +2295,7 @@ impl<'a> State<'a> { -> IoResult<()> { // It is unfortunate to duplicate the commasep logic, but we want the // self type and the args all in the same box. - try!(self.rbox(0u, Inconsistent)); + try!(self.rbox(0us, Inconsistent)); let mut first = true; for &explicit_self in opt_explicit_self.iter() { let m = match explicit_self { @@ -2472,7 +2472,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "<")); let mut ints = Vec::new(); - for i in range(0u, total) { + for i in range(0us, total) { ints.push(i); } @@ -2794,7 +2794,7 @@ impl<'a> State<'a> { if span.hi < (*cmnt).pos && (*cmnt).pos < next && span_line.line == comment_line.line { try!(self.print_comment(cmnt)); - self.cur_cmnt_and_lit.cur_cmnt += 1u; + self.cur_cmnt_and_lit.cur_cmnt += 1us; } } _ => () @@ -2812,7 +2812,7 @@ impl<'a> State<'a> { match self.next_comment() { Some(ref cmnt) => { try!(self.print_comment(cmnt)); - self.cur_cmnt_and_lit.cur_cmnt += 1u; + self.cur_cmnt_and_lit.cur_cmnt += 1us; } _ => break } @@ -2894,7 +2894,7 @@ impl<'a> State<'a> { while self.cur_cmnt_and_lit.cur_lit < lits.len() { let ltrl = (*lits)[self.cur_cmnt_and_lit.cur_lit].clone(); if ltrl.pos > pos { return None; } - self.cur_cmnt_and_lit.cur_lit += 1u; + self.cur_cmnt_and_lit.cur_lit += 1us; if ltrl.pos == pos { return Some(ltrl); } } None @@ -2909,7 +2909,7 @@ impl<'a> State<'a> { Some(ref cmnt) => { if (*cmnt).pos < pos { try!(self.print_comment(cmnt)); - self.cur_cmnt_and_lit.cur_cmnt += 1u; + self.cur_cmnt_and_lit.cur_cmnt += 1us; } else { break; } } _ => break @@ -2922,7 +2922,7 @@ impl<'a> State<'a> { cmnt: &comments::Comment) -> IoResult<()> { match cmnt.style { comments::Mixed => { - assert_eq!(cmnt.lines.len(), 1u); + assert_eq!(cmnt.lines.len(), 1us); try!(zerobreak(&mut self.s)); try!(word(&mut self.s, &cmnt.lines[0][])); zerobreak(&mut self.s) @@ -2941,11 +2941,11 @@ impl<'a> State<'a> { } comments::Trailing => { try!(word(&mut self.s, " ")); - if cmnt.lines.len() == 1u { + if cmnt.lines.len() == 1us { try!(word(&mut self.s, &cmnt.lines[0][])); hardbreak(&mut self.s) } else { - try!(self.ibox(0u)); + try!(self.ibox(0us)); for line in cmnt.lines.iter() { if !line.is_empty() { try!(word(&mut self.s, &line[])); diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 895268f96c8..ee8742825f4 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -345,8 +345,8 @@ fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool { let tparm_cnt = generics.ty_params.len(); // NB: inadequate check, but we're running // well before resolve, can't get too deep. - input_cnt == 1u - && no_output && tparm_cnt == 0u + input_cnt == 1us + && no_output && tparm_cnt == 0us } _ => false } diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index da358b70225..d8bac19805b 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -194,14 +194,14 @@ mod test { let v: SmallVector = SmallVector::zero(); assert_eq!(0, v.len()); - assert_eq!(1, SmallVector::one(1i).len()); - assert_eq!(5, SmallVector::many(vec!(1i, 2, 3, 4, 5)).len()); + assert_eq!(1, SmallVector::one(1is).len()); + assert_eq!(5, SmallVector::many(vec!(1is, 2, 3, 4, 5)).len()); } #[test] fn test_push_get() { let mut v = SmallVector::zero(); - v.push(1i); + v.push(1is); assert_eq!(1, v.len()); assert_eq!(&1, v.get(0)); v.push(2); @@ -227,11 +227,11 @@ mod test { let v: Vec = v.into_iter().collect(); assert_eq!(Vec::new(), v); - let v = SmallVector::one(1i); - assert_eq!(vec!(1i), v.into_iter().collect::>()); + let v = SmallVector::one(1is); + assert_eq!(vec!(1is), v.into_iter().collect::>()); - let v = SmallVector::many(vec!(1i, 2i, 3i)); - assert_eq!(vec!(1i, 2i, 3i), v.into_iter().collect::>()); + let v = SmallVector::many(vec!(1is, 2is, 3is)); + assert_eq!(vec!(1is, 2is, 3is), v.into_iter().collect::>()); } #[test] @@ -243,12 +243,12 @@ mod test { #[test] #[should_fail] fn test_expect_one_many() { - SmallVector::many(vec!(1i, 2)).expect_one(""); + SmallVector::many(vec!(1is, 2)).expect_one(""); } #[test] fn test_expect_one_one() { - assert_eq!(1i, SmallVector::one(1i).expect_one("")); - assert_eq!(1i, SmallVector::many(vec!(1i)).expect_one("")); + assert_eq!(1is, SmallVector::one(1is).expect_one("")); + assert_eq!(1is, SmallVector::many(vec!(1is)).expect_one("")); } } -- cgit 1.4.1-3-g733a5 From ed769bf87f1e0ff03cd8ccbdfd90195a8126f135 Mon Sep 17 00:00:00 2001 From: P1start Date: Tue, 20 Jan 2015 19:25:28 +1300 Subject: Fix up some ‘help’ messages MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/librustc/middle/resolve_lifetime.rs | 4 ++-- src/librustc_typeck/astconv.rs | 6 +++--- src/librustc_typeck/check/closure.rs | 2 +- src/libsyntax/parse/parser.rs | 2 +- src/test/compile-fail/hrtb-precedence-of-plus-error-message.rs | 6 +++--- src/test/compile-fail/require-parens-for-chained-comparison.rs | 2 +- src/test/compile-fail/shadowed-lifetime.rs | 8 ++++---- src/test/compile-fail/unsized2.rs | 2 +- 8 files changed, 16 insertions(+), 16 deletions(-) (limited to 'src/libsyntax/parse/parser.rs') diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index eff0018becc..354c476cdd8 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -462,11 +462,11 @@ impl<'a> LifetimeContext<'a> { format!("lifetime name `{}` shadows another \ lifetime name that is already in scope", token::get_name(lifetime.name)).as_slice()); - self.sess.span_help( + self.sess.span_note( lifetime_def.span, format!("shadowed lifetime `{}` declared here", token::get_name(lifetime.name)).as_slice()); - self.sess.span_help( + self.sess.span_note( lifetime.span, "shadowed lifetimes are deprecated \ and will become a hard error before 1.0"); diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 42b12c15866..14f727cb583 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -893,14 +893,14 @@ fn ast_ty_to_trait_ref<'tcx>(this: &AstConv<'tcx>, pprust::ty_to_string(ty)); match ty.node { ast::TyRptr(None, ref mut_ty) => { - span_note!(this.tcx().sess, ty.span, + span_help!(this.tcx().sess, ty.span, "perhaps you meant `&{}({} +{})`? (per RFC 438)", ppaux::mutability_to_string(mut_ty.mutbl), pprust::ty_to_string(&*mut_ty.ty), pprust::bounds_to_string(bounds)); } ast::TyRptr(Some(ref lt), ref mut_ty) => { - span_note!(this.tcx().sess, ty.span, + span_help!(this.tcx().sess, ty.span, "perhaps you meant `&{} {}({} +{})`? (per RFC 438)", pprust::lifetime_to_string(lt), ppaux::mutability_to_string(mut_ty.mutbl), @@ -909,7 +909,7 @@ fn ast_ty_to_trait_ref<'tcx>(this: &AstConv<'tcx>, } _ => { - span_note!(this.tcx().sess, ty.span, + span_help!(this.tcx().sess, ty.span, "perhaps you forgot parentheses? (per RFC 438)"); } } diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs index bfe43086aab..0847807c1c0 100644 --- a/src/librustc_typeck/check/closure.rs +++ b/src/librustc_typeck/check/closure.rs @@ -52,7 +52,7 @@ pub fn check_expr_closure<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, fcx.ccx.tcx.sess.span_err( expr.span, - "can't infer the \"kind\" of the closure, explicitly annotate it. e.g. \ + "can't infer the \"kind\" of the closure; explicitly annotate it; e.g. \ `|&:| {}`"); }, Some((sig, kind)) => { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 83a7504bc49..f5204478906 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2921,7 +2921,7 @@ impl<'a> Parser<'a> { "Chained comparison operators require parentheses"); if op == BiLt && outer_op == BiGt { self.span_help(op_span, - "Use ::< instead of < if you meant to specify type arguments."); + "use ::< instead of < if you meant to specify type arguments"); } } _ => {} diff --git a/src/test/compile-fail/hrtb-precedence-of-plus-error-message.rs b/src/test/compile-fail/hrtb-precedence-of-plus-error-message.rs index 41a0be37add..db67249bbd9 100644 --- a/src/test/compile-fail/hrtb-precedence-of-plus-error-message.rs +++ b/src/test/compile-fail/hrtb-precedence-of-plus-error-message.rs @@ -19,17 +19,17 @@ trait Bar { struct Foo<'a> { a: &'a Bar+'a, //~^ ERROR E0178 - //~^^ NOTE perhaps you meant `&'a (Bar + 'a)`? + //~^^ HELP perhaps you meant `&'a (Bar + 'a)`? b: &'a mut Bar+'a, //~^ ERROR E0178 - //~^^ NOTE perhaps you meant `&'a mut (Bar + 'a)`? + //~^^ HELP perhaps you meant `&'a mut (Bar + 'a)`? c: Box, // OK, no paren needed in this context d: fn() -> Bar+'a, //~^ ERROR E0178 - //~^^ NOTE perhaps you forgot parentheses + //~^^ HELP perhaps you forgot parentheses //~^^^ WARN deprecated syntax } diff --git a/src/test/compile-fail/require-parens-for-chained-comparison.rs b/src/test/compile-fail/require-parens-for-chained-comparison.rs index 7513815ad73..f5d8c574814 100644 --- a/src/test/compile-fail/require-parens-for-chained-comparison.rs +++ b/src/test/compile-fail/require-parens-for-chained-comparison.rs @@ -19,5 +19,5 @@ fn main() { f(); //~^ ERROR: Chained comparison operators require parentheses - //~^^ HELP: Use ::< instead of < if you meant to specify type arguments. + //~^^ HELP: use ::< instead of < if you meant to specify type arguments } diff --git a/src/test/compile-fail/shadowed-lifetime.rs b/src/test/compile-fail/shadowed-lifetime.rs index 57a2744d8f8..bf8a8f5046e 100644 --- a/src/test/compile-fail/shadowed-lifetime.rs +++ b/src/test/compile-fail/shadowed-lifetime.rs @@ -13,18 +13,18 @@ struct Foo<'a>(&'a isize); impl<'a> Foo<'a> { - //~^ HELP shadowed lifetime `'a` declared here + //~^ NOTE shadowed lifetime `'a` declared here fn shadow_in_method<'a>(&'a self) -> &'a isize { //~^ WARNING lifetime name `'a` shadows another lifetime name that is already in scope - //~| HELP deprecated + //~| NOTE deprecated self.0 } fn shadow_in_type<'b>(&'b self) -> &'b isize { - //~^ HELP shadowed lifetime `'b` declared here + //~^ NOTE shadowed lifetime `'b` declared here let x: for<'b> fn(&'b isize) = panic!(); //~^ WARNING lifetime name `'b` shadows another lifetime name that is already in scope - //~| HELP deprecated + //~| NOTE deprecated self.0 } diff --git a/src/test/compile-fail/unsized2.rs b/src/test/compile-fail/unsized2.rs index 604f7ba3255..a47d81e38cc 100644 --- a/src/test/compile-fail/unsized2.rs +++ b/src/test/compile-fail/unsized2.rs @@ -16,5 +16,5 @@ pub fn main() { f(); //~^ ERROR expected identifier, found keyword `type` //~^^ ERROR: Chained comparison operators require parentheses - //~^^^ HELP: Use ::< instead of < if you meant to specify type arguments. + //~^^^ HELP: use ::< instead of < if you meant to specify type arguments } -- cgit 1.4.1-3-g733a5 From d4ced7b468b3a417d879d23b718ec0a3e5827312 Mon Sep 17 00:00:00 2001 From: Seo Sanghyeon Date: Wed, 21 Jan 2015 20:44:49 +0900 Subject: De-mut the parser --- src/libsyntax/parse/mod.rs | 2 +- src/libsyntax/parse/parser.rs | 32 ++++++++++++++++---------------- 2 files changed, 17 insertions(+), 17 deletions(-) (limited to 'src/libsyntax/parse/parser.rs') diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index dd376fe9e10..2ce4aa58146 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -364,7 +364,7 @@ pub mod with_hygiene { } /// Abort if necessary -pub fn maybe_aborted(result: T, mut p: Parser) -> T { +pub fn maybe_aborted(result: T, p: Parser) -> T { p.abort_if_errors(); result } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 83a7504bc49..e538877edc8 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -382,18 +382,18 @@ impl<'a> Parser<'a> { } /// Convert the current token to a string using self's reader - pub fn this_token_to_string(&mut self) -> String { + pub fn this_token_to_string(&self) -> String { Parser::token_to_string(&self.token) } - pub fn unexpected_last(&mut self, t: &token::Token) -> ! { + pub fn unexpected_last(&self, t: &token::Token) -> ! { let token_str = Parser::token_to_string(t); let last_span = self.last_span; self.span_fatal(last_span, &format!("unexpected token: `{}`", token_str)[]); } - pub fn unexpected(&mut self) -> ! { + pub fn unexpected(&self) -> ! { let this_token = self.this_token_to_string(); self.fatal(&format!("unexpected token: `{}`", this_token)[]); } @@ -660,7 +660,7 @@ impl<'a> Parser<'a> { } } - pub fn expect_no_suffix(&mut self, sp: Span, kind: &str, suffix: Option) { + pub fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option) { match suffix { None => {/* everything ok */} Some(suf) => { @@ -983,39 +983,39 @@ impl<'a> Parser<'a> { } f(&self.buffer[((self.buffer_start + dist - 1) & 3) as uint].tok) } - pub fn fatal(&mut self, m: &str) -> ! { + pub fn fatal(&self, m: &str) -> ! { self.sess.span_diagnostic.span_fatal(self.span, m) } - pub fn span_fatal(&mut self, sp: Span, m: &str) -> ! { + pub fn span_fatal(&self, sp: Span, m: &str) -> ! { self.sess.span_diagnostic.span_fatal(sp, m) } - pub fn span_fatal_help(&mut self, sp: Span, m: &str, help: &str) -> ! { + pub fn span_fatal_help(&self, sp: Span, m: &str, help: &str) -> ! { self.span_err(sp, m); self.span_help(sp, help); panic!(diagnostic::FatalError); } - pub fn span_note(&mut self, sp: Span, m: &str) { + pub fn span_note(&self, sp: Span, m: &str) { self.sess.span_diagnostic.span_note(sp, m) } - pub fn span_help(&mut self, sp: Span, m: &str) { + pub fn span_help(&self, sp: Span, m: &str) { self.sess.span_diagnostic.span_help(sp, m) } - pub fn bug(&mut self, m: &str) -> ! { + pub fn bug(&self, m: &str) -> ! { self.sess.span_diagnostic.span_bug(self.span, m) } - pub fn warn(&mut self, m: &str) { + pub fn warn(&self, m: &str) { self.sess.span_diagnostic.span_warn(self.span, m) } - pub fn span_warn(&mut self, sp: Span, m: &str) { + pub fn span_warn(&self, sp: Span, m: &str) { self.sess.span_diagnostic.span_warn(sp, m) } - pub fn span_err(&mut self, sp: Span, m: &str) { + pub fn span_err(&self, sp: Span, m: &str) { self.sess.span_diagnostic.span_err(sp, m) } - pub fn span_bug(&mut self, sp: Span, m: &str) -> ! { + pub fn span_bug(&self, sp: Span, m: &str) -> ! { self.sess.span_diagnostic.span_bug(sp, m) } - pub fn abort_if_errors(&mut self) { + pub fn abort_if_errors(&self) { self.sess.span_diagnostic.handler().abort_if_errors(); } @@ -1670,7 +1670,7 @@ impl<'a> Parser<'a> { } /// Matches token_lit = LIT_INTEGER | ... - pub fn lit_from_token(&mut self, tok: &token::Token) -> Lit_ { + pub fn lit_from_token(&self, tok: &token::Token) -> Lit_ { match *tok { token::Interpolated(token::NtExpr(ref v)) => { match v.node { -- cgit 1.4.1-3-g733a5 From 38ac9e3984392ab11e33b77d5e1927c1fa17fb07 Mon Sep 17 00:00:00 2001 From: Eduard Burtescu Date: Tue, 23 Dec 2014 15:07:30 +0200 Subject: syntax: merge ast::ViewItem into ast::Item. --- src/libsyntax/ast.rs | 49 ++--- src/libsyntax/parse/parser.rs | 434 ++++++++++++------------------------------ 2 files changed, 130 insertions(+), 353 deletions(-) (limited to 'src/libsyntax/parse/parser.rs') diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index fcf80410da2..9d067538813 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -53,7 +53,6 @@ pub use self::UnboxedClosureKind::*; pub use self::UnOp::*; pub use self::UnsafeSource::*; pub use self::VariantKind::*; -pub use self::ViewItem_::*; pub use self::ViewPath_::*; pub use self::Visibility::*; pub use self::PathParameters::*; @@ -511,7 +510,6 @@ impl PartialEq for MetaItem_ { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Block { - pub view_items: Vec, pub stmts: Vec>, pub expr: Option>, pub id: NodeId, @@ -1452,14 +1450,12 @@ pub struct Mod { /// For `mod foo;`, the inner span ranges from the first token /// to the last token in the external file. pub inner: Span, - pub view_items: Vec, pub items: Vec>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct ForeignMod { pub abi: Abi, - pub view_items: Vec, pub items: Vec>, } @@ -1518,44 +1514,13 @@ pub enum ViewPath_ { /// or just /// /// `foo::bar::baz` (with `as baz` implicitly on the right) - ViewPathSimple(Ident, Path, NodeId), + ViewPathSimple(Ident, Path), /// `foo::bar::*` - ViewPathGlob(Path, NodeId), + ViewPathGlob(Path), /// `foo::bar::{a,b,c}` - ViewPathList(Path, Vec , NodeId) -} - -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] -pub struct ViewItem { - pub node: ViewItem_, - pub attrs: Vec, - pub vis: Visibility, - pub span: Span, -} - -impl ViewItem { - pub fn id(&self) -> NodeId { - match self.node { - ViewItemExternCrate(_, _, id) => id, - ViewItemUse(ref vp) => match vp.node { - ViewPathSimple(_, _, id) => id, - ViewPathGlob(_, id) => id, - ViewPathList(_, _, id) => id, - } - } - } -} - -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] -pub enum ViewItem_ { - /// Ident: name used to refer to this crate in the code - /// optional (InternedString,StrStyle): if present, this is a location - /// (containing arbitrary characters) from which to fetch the crate sources - /// For example, extern crate whatever = "github.com/rust-lang/rust" - ViewItemExternCrate(Ident, Option<(InternedString,StrStyle)>, NodeId), - ViewItemUse(P), + ViewPathList(Path, Vec) } /// Meta-data associated with an item @@ -1677,6 +1642,12 @@ pub struct Item { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum Item_ { + // Optional location (containing arbitrary characters) from which + // to fetch the crate sources. + // For example, extern crate whatever = "github.com/rust-lang/rust". + ItemExternCrate(Option<(InternedString, StrStyle)>), + ItemUse(P), + ItemStatic(P, Mutability, P), ItemConst(P, P), ItemFn(P, Unsafety, Abi, Generics, P), @@ -1703,6 +1674,8 @@ pub enum Item_ { impl Item_ { pub fn descriptive_variant(&self) -> &str { match *self { + ItemExternCrate(..) => "extern crate", + ItemUse(..) => "use", ItemStatic(..) => "static item", ItemConst(..) => "constant item", ItemFn(..) => "function", diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 83a7504bc49..a474e4c2bad 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -9,7 +9,6 @@ // except according to those terms. pub use self::PathParsingMode::*; -use self::ItemOrViewItem::*; use abi; use ast::{AssociatedType, BareFnTy}; @@ -35,6 +34,7 @@ use ast::{ForeignItem, ForeignItemStatic, ForeignItemFn, ForeignMod, FunctionRet use ast::{Ident, Inherited, ImplItem, Item, Item_, ItemStatic}; use ast::{ItemEnum, ItemFn, ItemForeignMod, ItemImpl, ItemConst}; use ast::{ItemMac, ItemMod, ItemStruct, ItemTrait, ItemTy}; +use ast::{ItemExternCrate, ItemUse}; use ast::{LifetimeDef, Lit, Lit_}; use ast::{LitBool, LitChar, LitByte, LitBinary}; use ast::{LitStr, LitInt, Local, LocalLet}; @@ -59,7 +59,6 @@ use ast::{TyParam, TyParamBound, TyParen, TyPath, TyPolyTraitRef, TyPtr, TyQPath use ast::{TyRptr, TyTup, TyU32, TyVec, UnUniq}; use ast::{TypeImplItem, TypeTraitItem, Typedef, UnboxedClosureKind}; use ast::{UnnamedField, UnsafeBlock}; -use ast::{ViewItem, ViewItem_, ViewItemExternCrate, ViewItemUse}; use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple}; use ast::{Visibility, WhereClause}; use ast; @@ -122,14 +121,9 @@ pub enum BoundParsingMode { Modified, } -enum ItemOrViewItem { - /// Indicates a failure to parse any kind of item. The attributes are - /// returned. - IoviNone(Vec), - IoviItem(P), - IoviForeignItem(P), - IoviViewItem(ViewItem) -} +/// The `Err` case indicates a failure to parse any kind of item. +/// The attributes are returned. +type MaybeItem = Result, Vec>; /// Possibly accept an `token::Interpolated` expression (a pre-parsed expression @@ -231,19 +225,6 @@ macro_rules! maybe_whole { } } ); - (iovi $p:expr, $constructor:ident) => ( - { - let found = match ($p).token { - token::Interpolated(token::$constructor(_)) => { - Some(($p).bump_and_get()) - } - _ => None - }; - if let Some(token::Interpolated(token::$constructor(x))) = found { - return IoviItem(x.clone()); - } - } - ); (pair_empty $p:expr, $constructor:ident) => ( { let found = match ($p).token { @@ -269,14 +250,6 @@ fn maybe_append(mut lhs: Vec, rhs: Option>) lhs } - -struct ParsedItemsAndViewItems { - attrs_remaining: Vec, - view_items: Vec, - items: Vec> , - foreign_items: Vec> -} - /* ident is handled by common.rs */ pub struct Parser<'a> { @@ -3032,8 +3005,7 @@ impl<'a> Parser<'a> { let body = self.parse_expr(); let fakeblock = P(ast::Block { id: ast::DUMMY_NODE_ID, - view_items: Vec::new(), - stmts: Vec::new(), + stmts: vec![], span: body.span, expr: Some(body), rules: DefaultBlock, @@ -3731,20 +3703,13 @@ impl<'a> Parser<'a> { } else { let found_attrs = !item_attrs.is_empty(); let item_err = Parser::expected_item_err(&item_attrs[]); - match self.parse_item_or_view_item(item_attrs, false) { - IoviItem(i) => { + match self.parse_item_(item_attrs, false) { + Ok(i) => { let hi = i.span.hi; let decl = P(spanned(lo, hi, DeclItem(i))); P(spanned(lo, hi, StmtDecl(decl, ast::DUMMY_NODE_ID))) } - IoviViewItem(vi) => { - self.span_fatal(vi.span, - "view items must be declared at the top of the block"); - } - IoviForeignItem(_) => { - self.fatal("foreign items are not allowed here"); - } - IoviNone(_) => { + Err(_) => { if found_attrs { let last_span = self.last_span; self.span_err(last_span, item_err); @@ -3794,36 +3759,17 @@ impl<'a> Parser<'a> { (inner, self.parse_block_tail_(lo, DefaultBlock, next)) } - /// Precondition: already parsed the '{' or '#{' - /// I guess that also means "already parsed the 'impure'" if - /// necessary, and this should take a qualifier. - /// Some blocks start with "#{"... + /// Precondition: already parsed the '{'. fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> P { self.parse_block_tail_(lo, s, Vec::new()) } /// Parse the rest of a block expression or function body fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode, - first_item_attrs: Vec ) -> P { - let mut stmts = Vec::new(); + first_item_attrs: Vec) -> P { + let mut stmts = vec![]; let mut expr = None; - - // wouldn't it be more uniform to parse view items only, here? - let ParsedItemsAndViewItems { - attrs_remaining, - view_items, - items, - .. - } = self.parse_items_and_view_items(first_item_attrs, - false, false); - - for item in items.into_iter() { - let span = item.span; - let decl = P(spanned(span.lo, span.hi, DeclItem(item))); - stmts.push(P(spanned(span.lo, span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID)))); - } - - let mut attributes_box = attrs_remaining; + let mut attributes_box = first_item_attrs; while self.token != token::CloseDelim(token::Brace) { // parsing items even when they're not allowed lets us give @@ -3932,7 +3878,6 @@ impl<'a> Parser<'a> { let hi = self.span.hi; self.bump(); P(ast::Block { - view_items: view_items, stmts: stmts, expr: expr, id: ast::DUMMY_NODE_ID, @@ -5031,39 +4976,34 @@ impl<'a> Parser<'a> { first_item_attrs: Vec, inner_lo: BytePos) -> Mod { - // parse all of the items up to closing or an attribute. - // view items are legal here. - let ParsedItemsAndViewItems { - attrs_remaining, - view_items, - items: starting_items, - .. - } = self.parse_items_and_view_items(first_item_attrs, true, true); - let mut items: Vec> = starting_items; - let attrs_remaining_len = attrs_remaining.len(); + // Parse all of the items up to closing or an attribute. + + let mut attrs = first_item_attrs; + attrs.push_all(&self.parse_outer_attributes()[]); + let mut items = vec![]; + + loop { + match self.parse_item_(attrs, true) { + Err(returned_attrs) => { + attrs = returned_attrs; + break + } + Ok(item) => { + attrs = self.parse_outer_attributes(); + items.push(item) + } + } + } // don't think this other loop is even necessary.... - let mut first = true; while self.token != term { - let mut attrs = self.parse_outer_attributes(); - if first { - let mut tmp = attrs_remaining.clone(); - tmp.push_all(&attrs[]); - attrs = tmp; - first = false; - } - debug!("parse_mod_items: parse_item_or_view_item(attrs={:?})", - attrs); - match self.parse_item_or_view_item(attrs, - true /* macros allowed */) { - IoviItem(item) => items.push(item), - IoviViewItem(view_item) => { - self.span_fatal(view_item.span, - "view items must be declared at the top of \ - the module"); - } - _ => { + let mut attrs = mem::replace(&mut attrs, vec![]); + attrs.push_all(&self.parse_outer_attributes()[]); + debug!("parse_mod_items: parse_item_(attrs={:?})", attrs); + match self.parse_item_(attrs, true /* macros allowed */) { + Ok(item) => items.push(item), + Err(_) => { let token_str = self.this_token_to_string(); self.fatal(&format!("expected item, found `{}`", token_str)[]) @@ -5071,16 +5011,15 @@ impl<'a> Parser<'a> { } } - if first && attrs_remaining_len > 0u { + if !attrs.is_empty() { // We parsed attributes for the first item but didn't find it let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(&attrs_remaining[])); + Parser::expected_item_err(&attrs[])); } ast::Mod { inner: mk_sp(inner_lo, self.span.lo), - view_items: view_items, items: items } } @@ -5298,23 +5237,12 @@ impl<'a> Parser<'a> { /// parse_foreign_items. fn parse_foreign_mod_items(&mut self, abi: abi::Abi, - first_item_attrs: Vec ) + first_item_attrs: Vec) -> ForeignMod { - let ParsedItemsAndViewItems { - attrs_remaining, - view_items, - items: _, - foreign_items, - } = self.parse_foreign_items(first_item_attrs, true); - if !attrs_remaining.is_empty() { - let last_span = self.last_span; - self.span_err(last_span, - Parser::expected_item_err(&attrs_remaining[])); - } + let foreign_items = self.parse_foreign_items(first_item_attrs); assert!(self.token == token::CloseDelim(token::Brace)); ast::ForeignMod { abi: abi, - view_items: view_items, items: foreign_items } } @@ -5329,8 +5257,8 @@ impl<'a> Parser<'a> { fn parse_item_extern_crate(&mut self, lo: BytePos, visibility: Visibility, - attrs: Vec ) - -> ItemOrViewItem { + attrs: Vec) + -> P { let span = self.span; let (maybe_path, ident) = match self.token { @@ -5374,12 +5302,13 @@ impl<'a> Parser<'a> { } }; - IoviViewItem(ast::ViewItem { - node: ViewItemExternCrate(ident, maybe_path, ast::DUMMY_NODE_ID), - attrs: attrs, - vis: visibility, - span: mk_sp(lo, self.last_span.hi) - }) + let last_span = self.last_span; + self.mk_item(lo, + last_span.hi, + ident, + ItemExternCrate(maybe_path), + visibility, + attrs) } /// Parse `extern` for foreign ABIs @@ -5396,8 +5325,8 @@ impl<'a> Parser<'a> { lo: BytePos, opt_abi: Option, visibility: Visibility, - attrs: Vec ) - -> ItemOrViewItem { + attrs: Vec) + -> P { self.expect(&token::OpenDelim(token::Brace)); @@ -5408,13 +5337,12 @@ impl<'a> Parser<'a> { self.expect(&token::CloseDelim(token::Brace)); let last_span = self.last_span; - let item = self.mk_item(lo, - last_span.hi, - special_idents::invalid, - ItemForeignMod(m), - visibility, - maybe_append(attrs, Some(inner))); - return IoviItem(item); + self.mk_item(lo, + last_span.hi, + special_idents::invalid, + ItemForeignMod(m), + visibility, + maybe_append(attrs, Some(inner))) } /// Parse type Foo = Bar; @@ -5556,14 +5484,12 @@ impl<'a> Parser<'a> { } } - /// Parse one of the items or view items allowed by the - /// flags; on failure, return IoviNone. + /// Parse one of the items allowed by the flags; on failure, + /// return `Err(remaining_attrs)`. /// NB: this function no longer parses the items inside an /// extern crate. - fn parse_item_or_view_item(&mut self, - attrs: Vec , - macros_allowed: bool) - -> ItemOrViewItem { + fn parse_item_(&mut self, attrs: Vec, + macros_allowed: bool) -> MaybeItem { let nt_item = match self.token { token::Interpolated(token::NtItem(ref item)) => { Some((**item).clone()) @@ -5576,7 +5502,7 @@ impl<'a> Parser<'a> { let mut attrs = attrs; mem::swap(&mut item.attrs, &mut attrs); item.attrs.extend(attrs.into_iter()); - return IoviItem(P(item)); + return Ok(P(item)); } None => {} } @@ -5585,22 +5511,24 @@ impl<'a> Parser<'a> { let visibility = self.parse_visibility(); - // must be a view item: if self.eat_keyword(keywords::Use) { - // USE ITEM (IoviViewItem) - let view_item = self.parse_use(); + // USE ITEM + let item_ = ItemUse(self.parse_view_path()); self.expect(&token::Semi); - return IoviViewItem(ast::ViewItem { - node: view_item, - attrs: attrs, - vis: visibility, - span: mk_sp(lo, self.last_span.hi) - }); + + let last_span = self.last_span; + let item = self.mk_item(lo, + last_span.hi, + token::special_idents::invalid, + item_, + visibility, + attrs); + return Ok(item); } - // either a view item or an item: + if self.eat_keyword(keywords::Extern) { if self.eat_keyword(keywords::Crate) { - return self.parse_item_extern_crate(lo, visibility, attrs); + return Ok(self.parse_item_extern_crate(lo, visibility, attrs)); } let opt_abi = self.parse_opt_abi(); @@ -5617,9 +5545,9 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } else if self.check(&token::OpenDelim(token::Brace)) { - return self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs); + return Ok(self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs)); } let span = self.span; @@ -5634,7 +5562,6 @@ impl<'a> Parser<'a> { self.span_err(span, "`virtual` structs have been removed from the language"); } - // the rest are all guaranteed to be items: if self.token.is_keyword(keywords::Static) { // STATIC ITEM self.bump(); @@ -5647,7 +5574,7 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } if self.token.is_keyword(keywords::Const) { // CONST ITEM @@ -5665,7 +5592,7 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } if self.token.is_keyword(keywords::Unsafe) && self.look_ahead(1u, |t| t.is_keyword(keywords::Trait)) @@ -5682,7 +5609,7 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } if self.token.is_keyword(keywords::Unsafe) && self.look_ahead(1u, |t| t.is_keyword(keywords::Impl)) @@ -5698,7 +5625,7 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } if self.token.is_keyword(keywords::Fn) { // FUNCTION ITEM @@ -5712,7 +5639,7 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } if self.token.is_keyword(keywords::Unsafe) && self.look_ahead(1u, |t| *t != token::OpenDelim(token::Brace)) { @@ -5733,7 +5660,7 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } if self.eat_keyword(keywords::Mod) { // MODULE ITEM @@ -5746,7 +5673,7 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } if self.eat_keyword(keywords::Type) { // TYPE ITEM @@ -5758,7 +5685,7 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } if self.eat_keyword(keywords::Enum) { // ENUM ITEM @@ -5770,7 +5697,7 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } if self.eat_keyword(keywords::Trait) { // TRAIT ITEM @@ -5783,7 +5710,7 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } if self.eat_keyword(keywords::Impl) { // IMPL ITEM @@ -5795,7 +5722,7 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } if self.eat_keyword(keywords::Struct) { // STRUCT ITEM @@ -5807,32 +5734,30 @@ impl<'a> Parser<'a> { item_, visibility, maybe_append(attrs, extra_attrs)); - return IoviItem(item); + return Ok(item); } self.parse_macro_use_or_failure(attrs,macros_allowed,lo,visibility) } - /// Parse a foreign item; on failure, return IoviNone. - fn parse_foreign_item(&mut self, - attrs: Vec , - macros_allowed: bool) - -> ItemOrViewItem { - maybe_whole!(iovi self, NtItem); + /// Parse a foreign item; on failure, return `Err(remaining_attrs)`. + fn parse_foreign_item(&mut self, attrs: Vec) + -> Result, Vec> { let lo = self.span.lo; let visibility = self.parse_visibility(); if self.token.is_keyword(keywords::Static) { // FOREIGN STATIC ITEM - let item = self.parse_item_foreign_static(visibility, attrs); - return IoviForeignItem(item); + return Ok(self.parse_item_foreign_static(visibility, attrs)); } if self.token.is_keyword(keywords::Fn) || self.token.is_keyword(keywords::Unsafe) { // FOREIGN FUNCTION ITEM - let item = self.parse_item_foreign_fn(visibility, attrs); - return IoviForeignItem(item); + return Ok(self.parse_item_foreign_fn(visibility, attrs)); } - self.parse_macro_use_or_failure(attrs,macros_allowed,lo,visibility) + + // FIXME #5668: this will occur for a macro invocation: + let item = try!(self.parse_macro_use_or_failure(attrs, true, lo, visibility)); + self.span_fatal(item.span, "macros cannot expand to foreign items"); } /// This is the fall-through for parsing items. @@ -5842,7 +5767,7 @@ impl<'a> Parser<'a> { macros_allowed: bool, lo: BytePos, visibility: Visibility - ) -> ItemOrViewItem { + ) -> MaybeItem { if macros_allowed && !self.token.is_any_keyword() && self.look_ahead(1, |t| *t == token::Not) && (self.look_ahead(2, |t| t.is_plain_ident()) @@ -5891,7 +5816,7 @@ impl<'a> Parser<'a> { item_, visibility, attrs); - return IoviItem(item); + return Ok(item); } // FAILURE TO PARSE ITEM @@ -5902,7 +5827,7 @@ impl<'a> Parser<'a> { self.span_fatal(last_span, "unmatched visibility `pub`"); } } - return IoviNone(attrs); + Err(attrs) } pub fn parse_item_with_outer_attributes(&mut self) -> Option> { @@ -5911,30 +5836,9 @@ impl<'a> Parser<'a> { } pub fn parse_item(&mut self, attrs: Vec) -> Option> { - match self.parse_item_or_view_item(attrs, true) { - IoviNone(_) => None, - IoviViewItem(_) => - self.fatal("view items are not allowed here"), - IoviForeignItem(_) => - self.fatal("foreign items are not allowed here"), - IoviItem(item) => Some(item) - } + self.parse_item_(attrs, true).ok() } - /// Parse a ViewItem, e.g. `use foo::bar` or `extern crate foo` - pub fn parse_view_item(&mut self, attrs: Vec) -> ViewItem { - match self.parse_item_or_view_item(attrs, false) { - IoviViewItem(vi) => vi, - _ => self.fatal("expected `use` or `extern crate`"), - } - } - - /// Parse, e.g., "use a::b::{z,y}" - fn parse_use(&mut self) -> ViewItem_ { - return ViewItemUse(self.parse_view_path()); - } - - /// Matches view_path : MOD? non_global_path as IDENT /// | MOD? non_global_path MOD_SEP LBRACE RBRACE /// | MOD? non_global_path MOD_SEP LBRACE ident_seq RBRACE @@ -5959,8 +5863,7 @@ impl<'a> Parser<'a> { global: false, segments: Vec::new() }; - return P(spanned(lo, self.span.hi, - ViewPathList(path, idents, ast::DUMMY_NODE_ID))); + return P(spanned(lo, self.span.hi, ViewPathList(path, idents))); } let first_ident = self.parse_ident(); @@ -5994,8 +5897,7 @@ impl<'a> Parser<'a> { } }).collect() }; - return P(spanned(lo, self.span.hi, - ViewPathList(path, idents, ast::DUMMY_NODE_ID))); + return P(spanned(lo, self.span.hi, ViewPathList(path, idents))); } // foo::bar::* @@ -6011,8 +5913,7 @@ impl<'a> Parser<'a> { } }).collect() }; - return P(spanned(lo, self.span.hi, - ViewPathGlob(path, ast::DUMMY_NODE_ID))); + return P(spanned(lo, self.span.hi, ViewPathGlob(path))); } _ => break @@ -6033,136 +5934,39 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::As) { rename_to = self.parse_ident() } - P(spanned(lo, - self.last_span.hi, - ViewPathSimple(rename_to, path, ast::DUMMY_NODE_ID))) - } - - /// Parses a sequence of items. Stops when it finds program - /// text that can't be parsed as an item - /// - mod_items uses extern_mod_allowed = true - /// - block_tail_ uses extern_mod_allowed = false - fn parse_items_and_view_items(&mut self, - first_item_attrs: Vec , - mut extern_mod_allowed: bool, - macros_allowed: bool) - -> ParsedItemsAndViewItems { - let mut attrs = first_item_attrs; - attrs.push_all(&self.parse_outer_attributes()[]); - // First, parse view items. - let mut view_items : Vec = Vec::new(); - let mut items = Vec::new(); - - // I think this code would probably read better as a single - // loop with a mutable three-state-variable (for extern crates, - // view items, and regular items) ... except that because - // of macros, I'd like to delay that entire check until later. - loop { - match self.parse_item_or_view_item(attrs, macros_allowed) { - IoviNone(attrs) => { - return ParsedItemsAndViewItems { - attrs_remaining: attrs, - view_items: view_items, - items: items, - foreign_items: Vec::new() - } - } - IoviViewItem(view_item) => { - match view_item.node { - ViewItemUse(..) => { - // `extern crate` must precede `use`. - extern_mod_allowed = false; - } - ViewItemExternCrate(..) if !extern_mod_allowed => { - self.span_err(view_item.span, - "\"extern crate\" declarations are \ - not allowed here"); - } - ViewItemExternCrate(..) => {} - } - view_items.push(view_item); - } - IoviItem(item) => { - items.push(item); - attrs = self.parse_outer_attributes(); - break; - } - IoviForeignItem(_) => { - panic!(); - } - } - attrs = self.parse_outer_attributes(); - } - - // Next, parse items. - loop { - match self.parse_item_or_view_item(attrs, macros_allowed) { - IoviNone(returned_attrs) => { - attrs = returned_attrs; - break - } - IoviViewItem(view_item) => { - attrs = self.parse_outer_attributes(); - self.span_err(view_item.span, - "`use` and `extern crate` declarations must precede items"); - } - IoviItem(item) => { - attrs = self.parse_outer_attributes(); - items.push(item) - } - IoviForeignItem(_) => { - panic!(); - } - } - } - - ParsedItemsAndViewItems { - attrs_remaining: attrs, - view_items: view_items, - items: items, - foreign_items: Vec::new() - } + P(spanned(lo, self.last_span.hi, ViewPathSimple(rename_to, path))) } /// Parses a sequence of foreign items. Stops when it finds program /// text that can't be parsed as an item - fn parse_foreign_items(&mut self, first_item_attrs: Vec , - macros_allowed: bool) - -> ParsedItemsAndViewItems { + fn parse_foreign_items(&mut self, first_item_attrs: Vec) + -> Vec> { let mut attrs = first_item_attrs; attrs.push_all(&self.parse_outer_attributes()[]); let mut foreign_items = Vec::new(); loop { - match self.parse_foreign_item(attrs, macros_allowed) { - IoviNone(returned_attrs) => { + match self.parse_foreign_item(attrs) { + Ok(foreign_item) => { + foreign_items.push(foreign_item); + } + Err(returned_attrs) => { if self.check(&token::CloseDelim(token::Brace)) { attrs = returned_attrs; break } self.unexpected(); - }, - IoviViewItem(view_item) => { - // I think this can't occur: - self.span_err(view_item.span, - "`use` and `extern crate` declarations must precede items"); - } - IoviItem(item) => { - // FIXME #5668: this will occur for a macro invocation: - self.span_fatal(item.span, "macros cannot expand to foreign items"); - } - IoviForeignItem(foreign_item) => { - foreign_items.push(foreign_item); } } attrs = self.parse_outer_attributes(); } - ParsedItemsAndViewItems { - attrs_remaining: attrs, - view_items: Vec::new(), - items: Vec::new(), - foreign_items: foreign_items + if !attrs.is_empty() { + let last_span = self.last_span; + self.span_err(last_span, + Parser::expected_item_err(&attrs[])); } + + foreign_items } /// Parses a source module as a crate. This is the main -- cgit 1.4.1-3-g733a5 From a506d4cbfe8f20a2725c7efd9d43359a0bbd0e9e Mon Sep 17 00:00:00 2001 From: Aaron Turon Date: Sat, 17 Jan 2015 16:15:52 -0800 Subject: Fallout from stabilization. --- src/compiletest/header.rs | 3 +-- src/compiletest/runtest.rs | 4 +-- src/doc/intro.md | 4 +-- src/libcollections/btree/node.rs | 21 ++++++++-------- src/libcollections/ring_buf.rs | 2 +- src/libcollections/slice.rs | 2 +- src/libcollections/str.rs | 26 ++++++++++++-------- src/libcore/fmt/float.rs | 4 +-- src/librand/chacha.rs | 3 +-- src/librbml/io.rs | 2 +- src/libregex/re.rs | 2 +- src/librustc/metadata/cstore.rs | 2 +- src/librustc/metadata/loader.rs | 4 +-- src/librustc/middle/cfg/construct.rs | 2 +- src/librustc/middle/dataflow.rs | 30 +++++++++++------------ src/librustc/middle/infer/region_inference/mod.rs | 5 ++-- src/librustc/middle/subst.rs | 4 +-- src/librustc/session/search_paths.rs | 8 +++--- src/librustc_borrowck/borrowck/check_loans.rs | 2 +- src/librustc_trans/back/link.rs | 2 +- src/librustc_trans/save/mod.rs | 4 +-- src/librustc_trans/trans/debuginfo.rs | 5 ++-- src/librustc_trans/trans/meth.rs | 4 +-- src/librustc_typeck/astconv.rs | 8 +++--- src/librustc_typeck/check/regionck.rs | 2 +- src/librustc_typeck/coherence/overlap.rs | 2 +- src/librustdoc/html/escape.rs | 4 +-- src/librustdoc/html/markdown.rs | 2 +- src/librustdoc/html/render.rs | 4 +-- src/librustdoc/markdown.rs | 4 +-- src/librustdoc/passes.rs | 2 +- src/libstd/ffi/c_str.rs | 2 +- src/libstd/io/buffered.rs | 2 +- src/libstd/io/comm_adapters.rs | 4 +-- src/libstd/io/mem.rs | 10 ++++---- src/libstd/io/mod.rs | 4 +-- src/libstd/io/net/ip.rs | 2 +- src/libstd/io/util.rs | 2 +- src/libstd/num/strconv.rs | 4 +-- src/libstd/rand/os.rs | 2 +- src/libstd/rt/util.rs | 2 +- src/libstd/sys/common/backtrace.rs | 16 ++++++------ src/libstd/sys/unix/process.rs | 4 +-- src/libstd/sys/windows/fs.rs | 2 +- src/libstd/sys/windows/os.rs | 2 +- src/libsyntax/parse/lexer/mod.rs | 6 ++--- src/libsyntax/parse/parser.rs | 2 +- src/libsyntax/print/pprust.rs | 4 +-- src/libunicode/u_str.rs | 8 +++--- 49 files changed, 126 insertions(+), 125 deletions(-) (limited to 'src/libsyntax/parse/parser.rs') diff --git a/src/compiletest/header.rs b/src/compiletest/header.rs index 2413a001ee8..d7af767688e 100644 --- a/src/compiletest/header.rs +++ b/src/compiletest/header.rs @@ -332,8 +332,7 @@ pub fn parse_name_value_directive(line: &str, directive: &str) let keycolon = format!("{}:", directive); match line.find_str(keycolon.as_slice()) { Some(colon) => { - let value = line.slice(colon + keycolon.len(), - line.len()).to_string(); + let value = line[(colon + keycolon.len()) .. line.len()].to_string(); debug!("{}: {}", directive, value); Some(value) } diff --git a/src/compiletest/runtest.rs b/src/compiletest/runtest.rs index 5579479c5e5..8936c20cefd 100644 --- a/src/compiletest/runtest.rs +++ b/src/compiletest/runtest.rs @@ -862,7 +862,7 @@ fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String]) break; } Some(i) => { - rest = rest.slice_from(i + frag.len()); + rest = &rest[(i + frag.len())..]; } } first = false; @@ -1045,7 +1045,7 @@ fn scan_until_char(haystack: &str, needle: char, idx: &mut uint) -> bool { if *idx >= haystack.len() { return false; } - let opt = haystack.slice_from(*idx).find(needle); + let opt = haystack[(*idx)..].find(needle); if opt.is_none() { return false; } diff --git a/src/doc/intro.md b/src/doc/intro.md index 3487738467f..a7c37ba8e07 100644 --- a/src/doc/intro.md +++ b/src/doc/intro.md @@ -485,9 +485,9 @@ fn main() { Thread::spawn(move || { let mut array = number.lock().unwrap(); - (*array)[i] += 1; + array[i as usize] += 1; - println!("numbers[{}] is {}", i, (*array)[i]); + println!("numbers[{}] is {}", i, array[i as usize]); }); } } diff --git a/src/libcollections/btree/node.rs b/src/libcollections/btree/node.rs index fa890643089..50857c78469 100644 --- a/src/libcollections/btree/node.rs +++ b/src/libcollections/btree/node.rs @@ -21,7 +21,7 @@ use core::prelude::*; use core::borrow::BorrowFrom; use core::cmp::Ordering::{Greater, Less, Equal}; use core::iter::Zip; -use core::ops::{Deref, DerefMut}; +use core::ops::{Deref, DerefMut, Index, IndexMut}; use core::ptr::Unique; use core::{slice, mem, ptr, cmp, num, raw}; use alloc::heap; @@ -1487,7 +1487,7 @@ impl AbsTraversal macro_rules! node_slice_impl { ($NodeSlice:ident, $Traversal:ident, - $as_slices_internal:ident, $slice_from:ident, $slice_to:ident, $iter:ident) => { + $as_slices_internal:ident, $index:ident, $iter:ident) => { impl<'a, K: Ord + 'a, V: 'a> $NodeSlice<'a, K, V> { /// Performs linear search in a slice. Returns a tuple of (index, is_exact_match). fn search_linear(&self, key: &Q) -> (uint, bool) @@ -1521,10 +1521,10 @@ macro_rules! node_slice_impl { edges: if !self.has_edges { self.edges } else { - self.edges.$slice_from(pos) + self.edges.$index(&(pos ..)) }, - keys: self.keys.slice_from(pos), - vals: self.vals.$slice_from(pos), + keys: &self.keys[pos ..], + vals: self.vals.$index(&(pos ..)), head_is_edge: !pos_is_kv, tail_is_edge: self.tail_is_edge, } @@ -1550,10 +1550,10 @@ macro_rules! node_slice_impl { edges: if !self.has_edges { self.edges } else { - self.edges.$slice_to(pos + 1) + self.edges.$index(&(.. (pos + 1))) }, - keys: self.keys.slice_to(pos), - vals: self.vals.$slice_to(pos), + keys: &self.keys[..pos], + vals: self.vals.$index(&(.. pos)), head_is_edge: self.head_is_edge, tail_is_edge: !pos_is_kv, } @@ -1583,6 +1583,5 @@ macro_rules! node_slice_impl { } } -node_slice_impl!(NodeSlice, Traversal, as_slices_internal, slice_from, slice_to, iter); -node_slice_impl!(MutNodeSlice, MutTraversal, as_slices_internal_mut, slice_from_mut, - slice_to_mut, iter_mut); +node_slice_impl!(NodeSlice, Traversal, as_slices_internal, index, iter); +node_slice_impl!(MutNodeSlice, MutTraversal, as_slices_internal_mut, index_mut, iter_mut); diff --git a/src/libcollections/ring_buf.rs b/src/libcollections/ring_buf.rs index b9cb4be7c18..69d64bcdf6d 100644 --- a/src/libcollections/ring_buf.rs +++ b/src/libcollections/ring_buf.rs @@ -578,7 +578,7 @@ impl RingBuf { if contiguous { let (empty, buf) = buf.split_at_mut(0); - (buf.slice_mut(tail, head), empty) + (&mut buf[tail .. head], empty) } else { let (mid, right) = buf.split_at_mut(tail); let (left, _) = mid.split_at_mut(head); diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index a640e4ee0e3..16e5a89f343 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -686,7 +686,7 @@ impl SliceExt for [T] { #[inline] fn move_from(&mut self, mut src: Vec, start: uint, end: uint) -> uint { - for (a, b) in self.iter_mut().zip(src.slice_mut(start, end).iter_mut()) { + for (a, b) in self.iter_mut().zip(src[start .. end].iter_mut()) { mem::swap(a, b); } cmp::min(self.len(), end-start) diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index 94554fd1e81..6608d0ee9a7 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -752,21 +752,15 @@ pub trait StrExt: Index { /// Deprecated: use `s[a .. b]` instead. #[deprecated = "use slice notation [a..b] instead"] - fn slice(&self, begin: uint, end: uint) -> &str { - core_str::StrExt::slice(&self[], begin, end) - } + fn slice(&self, begin: uint, end: uint) -> &str; /// Deprecated: use `s[a..]` instead. #[deprecated = "use slice notation [a..] instead"] - fn slice_from(&self, begin: uint) -> &str { - core_str::StrExt::slice_from(&self[], begin) - } + fn slice_from(&self, begin: uint) -> &str; /// Deprecated: use `s[..a]` instead. #[deprecated = "use slice notation [..a] instead"] - fn slice_to(&self, end: uint) -> &str { - core_str::StrExt::slice_to(&self[], end) - } + fn slice_to(&self, end: uint) -> &str; /// Returns a slice of the string from the character range /// [`begin`..`end`). @@ -1304,7 +1298,19 @@ pub trait StrExt: Index { } #[stable] -impl StrExt for str {} +impl StrExt for str { + fn slice(&self, begin: uint, end: uint) -> &str { + &self[begin..end] + } + + fn slice_from(&self, begin: uint) -> &str { + &self[begin..] + } + + fn slice_to(&self, end: uint) -> &str { + &self[..end] + } +} #[cfg(test)] mod tests { diff --git a/src/libcore/fmt/float.rs b/src/libcore/fmt/float.rs index f1b9ebe6d90..245dc00d838 100644 --- a/src/libcore/fmt/float.rs +++ b/src/libcore/fmt/float.rs @@ -179,7 +179,7 @@ pub fn float_to_str_bytes_common( _ => () } - buf.slice_to_mut(end).reverse(); + buf[..end].reverse(); // Remember start of the fractional digits. // Points one beyond end of buf if none get generated, @@ -316,7 +316,7 @@ pub fn float_to_str_bytes_common( impl<'a> fmt::Writer for Filler<'a> { fn write_str(&mut self, s: &str) -> fmt::Result { - slice::bytes::copy_memory(self.buf.slice_from_mut(*self.end), + slice::bytes::copy_memory(&mut self.buf[(*self.end)..], s.as_bytes()); *self.end += s.len(); Ok(()) diff --git a/src/librand/chacha.rs b/src/librand/chacha.rs index 815fc0e7ec7..3332e06e19e 100644 --- a/src/librand/chacha.rs +++ b/src/librand/chacha.rs @@ -174,7 +174,7 @@ impl<'a> SeedableRng<&'a [u32]> for ChaChaRng { // reset state self.init(&[0u32; KEY_WORDS]); // set key in place - let key = self.state.slice_mut(4, 4+KEY_WORDS); + let key = &mut self.state[4 .. 4+KEY_WORDS]; for (k, s) in key.iter_mut().zip(seed.iter()) { *k = *s; } @@ -292,4 +292,3 @@ mod test { } } } - diff --git a/src/librbml/io.rs b/src/librbml/io.rs index f39860c8695..9c746c69baa 100644 --- a/src/librbml/io.rs +++ b/src/librbml/io.rs @@ -103,7 +103,7 @@ impl Writer for SeekableMemWriter { // Do the necessary writes if left.len() > 0 { - slice::bytes::copy_memory(self.buf.slice_from_mut(self.pos), left); + slice::bytes::copy_memory(&mut self.buf[self.pos..], left); } if right.len() > 0 { self.buf.push_all(right); diff --git a/src/libregex/re.rs b/src/libregex/re.rs index abc51d62404..3cdc0be45a6 100644 --- a/src/libregex/re.rs +++ b/src/libregex/re.rs @@ -459,7 +459,7 @@ impl<'t> Captures<'t> { pub fn at(&self, i: uint) -> Option<&'t str> { match self.pos(i) { None => None, - Some((s, e)) => Some(self.text.slice(s, e)) + Some((s, e)) => Some(&self.text[s.. e]) } } diff --git a/src/librustc/metadata/cstore.rs b/src/librustc/metadata/cstore.rs index a928d1c9022..7b7159da438 100644 --- a/src/librustc/metadata/cstore.rs +++ b/src/librustc/metadata/cstore.rs @@ -242,7 +242,7 @@ impl MetadataBlob { ((slice[2] as u32) << 8) | ((slice[3] as u32) << 0)) as uint; if len + 4 <= slice.len() { - slice.slice(4, len + 4) + &slice[4.. len + 4] } else { &[] // corrupt or old metadata } diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 70b6ddf23fd..b1043a4152c 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -392,11 +392,11 @@ impl<'a> Context<'a> { }; let (hash, rlib) = if file.starts_with(&rlib_prefix[]) && file.ends_with(".rlib") { - (file.slice(rlib_prefix.len(), file.len() - ".rlib".len()), + (&file[(rlib_prefix.len()) .. (file.len() - ".rlib".len())], true) } else if file.starts_with(dylib_prefix.as_slice()) && file.ends_with(dypair.1.as_slice()) { - (file.slice(dylib_prefix.len(), file.len() - dypair.1.len()), + (&file[(dylib_prefix.len()) .. (file.len() - dypair.1.len())], false) } else { return FileDoesntMatch diff --git a/src/librustc/middle/cfg/construct.rs b/src/librustc/middle/cfg/construct.rs index a1ac25a5650..1a2162b3076 100644 --- a/src/librustc/middle/cfg/construct.rs +++ b/src/librustc/middle/cfg/construct.rs @@ -424,7 +424,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } ast::ExprMethodCall(_, _, ref args) => { - self.call(expr, pred, &*args[0], args.slice_from(1).iter().map(|e| &**e)) + self.call(expr, pred, &*args[0], args[1..].iter().map(|e| &**e)) } ast::ExprIndex(ref l, ref r) | diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index 2b9bd1cd09f..a1727869810 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -118,17 +118,17 @@ impl<'a, 'tcx, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, 'tcx, O assert!(self.bits_per_id > 0); let cfgidx = to_cfgidx_or_die(id, &self.nodeid_to_index); let (start, end) = self.compute_id_range(cfgidx); - let on_entry = self.on_entry.slice(start, end); + let on_entry = &self.on_entry[start.. end]; let entry_str = bits_to_string(on_entry); - let gens = self.gens.slice(start, end); + let gens = &self.gens[start.. end]; let gens_str = if gens.iter().any(|&u| u != 0) { format!(" gen: {}", bits_to_string(gens)) } else { "".to_string() }; - let kills = self.kills.slice(start, end); + let kills = &self.kills[start .. end]; let kills_str = if kills.iter().any(|&u| u != 0) { format!(" kill: {}", bits_to_string(kills)) } else { @@ -232,7 +232,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let cfgidx = to_cfgidx_or_die(id, &self.nodeid_to_index); let (start, end) = self.compute_id_range(cfgidx); - let gens = self.gens.slice_mut(start, end); + let gens = &mut self.gens[start.. end]; set_bit(gens, bit); } @@ -245,7 +245,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let cfgidx = to_cfgidx_or_die(id, &self.nodeid_to_index); let (start, end) = self.compute_id_range(cfgidx); - let kills = self.kills.slice_mut(start, end); + let kills = &mut self.kills[start.. end]; set_bit(kills, bit); } @@ -256,9 +256,9 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { assert!(self.bits_per_id > 0); let (start, end) = self.compute_id_range(cfgidx); - let gens = self.gens.slice(start, end); + let gens = &self.gens[start.. end]; bitwise(bits, gens, &Union); - let kills = self.kills.slice(start, end); + let kills = &self.kills[start.. end]; bitwise(bits, kills, &Subtract); debug!("{} apply_gen_kill(cfgidx={:?}, bits={}) [after]", @@ -304,7 +304,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { } let (start, end) = self.compute_id_range(cfgidx); - let on_entry = self.on_entry.slice(start, end); + let on_entry = &self.on_entry[start.. end]; let temp_bits; let slice = match e { Entry => on_entry, @@ -336,7 +336,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let cfgidx = to_cfgidx_or_die(id, &self.nodeid_to_index); let (start, end) = self.compute_id_range(cfgidx); - let gens = self.gens.slice(start, end); + let gens = &self.gens[start.. end]; debug!("{} each_gen_bit(id={}, gens={})", self.analysis_name, id, bits_to_string(gens)); self.each_bit(gens, f) @@ -396,7 +396,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { cfg.graph.each_edge(|_edge_index, edge| { let flow_exit = edge.source(); let (start, end) = self.compute_id_range(flow_exit); - let mut orig_kills = self.kills.slice(start, end).to_vec(); + let mut orig_kills = self.kills[start.. end].to_vec(); let mut changed = false; for &node_id in edge.data.exiting_scopes.iter() { @@ -404,7 +404,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { match opt_cfg_idx { Some(cfg_idx) => { let (start, end) = self.compute_id_range(cfg_idx); - let kills = self.kills.slice(start, end); + let kills = &self.kills[start.. end]; if bitwise(orig_kills.as_mut_slice(), kills, &Union) { changed = true; } @@ -418,7 +418,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { } if changed { - let bits = self.kills.slice_mut(start, end); + let bits = &mut self.kills[start.. end]; debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); bits.clone_from_slice(&orig_kills[]); @@ -487,7 +487,7 @@ impl<'a, 'b, 'tcx, O:DataFlowOperator> PropagationContext<'a, 'b, 'tcx, O> { let (start, end) = self.dfcx.compute_id_range(node_index); // Initialize local bitvector with state on-entry. - in_out.clone_from_slice(self.dfcx.on_entry.slice(start, end)); + in_out.clone_from_slice(&self.dfcx.on_entry[start.. end]); // Compute state on-exit by applying transfer function to // state on-entry. @@ -528,13 +528,13 @@ impl<'a, 'b, 'tcx, O:DataFlowOperator> PropagationContext<'a, 'b, 'tcx, O> { let (start, end) = self.dfcx.compute_id_range(cfgidx); let changed = { // (scoping mutable borrow of self.dfcx.on_entry) - let on_entry = self.dfcx.on_entry.slice_mut(start, end); + let on_entry = &mut self.dfcx.on_entry[start.. end]; bitwise(on_entry, pred_bits, &self.dfcx.oper) }; if changed { debug!("{} changed entry set for {:?} to {}", self.dfcx.analysis_name, cfgidx, - bits_to_string(self.dfcx.on_entry.slice(start, end))); + bits_to_string(&self.dfcx.on_entry[start.. end])); self.changed = true; } } diff --git a/src/librustc/middle/infer/region_inference/mod.rs b/src/librustc/middle/infer/region_inference/mod.rs index 0f487fffe5c..9339f435d8f 100644 --- a/src/librustc/middle/infer/region_inference/mod.rs +++ b/src/librustc/middle/infer/region_inference/mod.rs @@ -609,8 +609,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { pub fn vars_created_since_snapshot(&self, mark: &RegionSnapshot) -> Vec { - self.undo_log.borrow() - .slice_from(mark.length) + self.undo_log.borrow()[mark.length..] .iter() .filter_map(|&elt| match elt { AddVar(vid) => Some(vid), @@ -637,7 +636,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { debug!("result_index={}, r={:?}", result_index, r); for undo_entry in - self.undo_log.borrow().slice_from(mark.length).iter() + self.undo_log.borrow()[mark.length..].iter() { match undo_entry { &AddConstraint(ConstrainVarSubVar(a, b)) => { diff --git a/src/librustc/middle/subst.rs b/src/librustc/middle/subst.rs index 9ad2dd499cc..031eb26300f 100644 --- a/src/librustc/middle/subst.rs +++ b/src/librustc/middle/subst.rs @@ -373,12 +373,12 @@ impl VecPerParamSpace { pub fn get_slice<'a>(&'a self, space: ParamSpace) -> &'a [T] { let (start, limit) = self.limits(space); - self.content.slice(start, limit) + &self.content[start.. limit] } pub fn get_mut_slice<'a>(&'a mut self, space: ParamSpace) -> &'a mut [T] { let (start, limit) = self.limits(space); - self.content.slice_mut(start, limit) + &mut self.content[start.. limit] } pub fn opt_get<'a>(&'a self, diff --git a/src/librustc/session/search_paths.rs b/src/librustc/session/search_paths.rs index 0cf04fe0a00..dfc27d3ae68 100644 --- a/src/librustc/session/search_paths.rs +++ b/src/librustc/session/search_paths.rs @@ -36,13 +36,13 @@ impl SearchPaths { pub fn add_path(&mut self, path: &str) { let (kind, path) = if path.starts_with("native=") { - (PathKind::Native, path.slice_from("native=".len())) + (PathKind::Native, &path["native=".len()..]) } else if path.starts_with("crate=") { - (PathKind::Crate, path.slice_from("crate=".len())) + (PathKind::Crate, &path["crate=".len()..]) } else if path.starts_with("dependency=") { - (PathKind::Dependency, path.slice_from("dependency=".len())) + (PathKind::Dependency, &path["dependency=".len()..]) } else if path.starts_with("all=") { - (PathKind::All, path.slice_from("all=".len())) + (PathKind::All, &path["all=".len()..]) } else { (PathKind::All, path) }; diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index d5ad201eabf..0ade916f639 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -370,7 +370,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { for (i, &x) in new_loan_indices.iter().enumerate() { let old_loan = &self.all_loans[x]; - for &y in new_loan_indices.slice_from(i+1).iter() { + for &y in new_loan_indices[(i+1) ..].iter() { let new_loan = &self.all_loans[y]; self.report_error_if_loans_conflict(old_loan, new_loan); } diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index dacf620cbd1..efa948f0942 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -178,7 +178,7 @@ pub fn build_link_meta(sess: &Session, krate: &ast::Crate, fn truncated_hash_result(symbol_hasher: &mut Sha256) -> String { let output = symbol_hasher.result_bytes(); // 64 bits should be enough to avoid collisions. - output.slice_to(8).to_hex().to_string() + output[.. 8].to_hex().to_string() } diff --git a/src/librustc_trans/save/mod.rs b/src/librustc_trans/save/mod.rs index b12903c814c..c765698fc0c 100644 --- a/src/librustc_trans/save/mod.rs +++ b/src/librustc_trans/save/mod.rs @@ -157,7 +157,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { return; } - let sub_paths = sub_paths.slice(0, len-1); + let sub_paths = &sub_paths[.. (len-1)]; for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, @@ -174,7 +174,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { if len <= 1 { return; } - let sub_paths = sub_paths.slice_to(len-1); + let sub_paths = &sub_paths[.. (len-1)]; // write the trait part of the sub-path let (ref span, ref qualname) = sub_paths[len-2]; diff --git a/src/librustc_trans/trans/debuginfo.rs b/src/librustc_trans/trans/debuginfo.rs index 2f01f0328e2..7d0ff5f2adc 100644 --- a/src/librustc_trans/trans/debuginfo.rs +++ b/src/librustc_trans/trans/debuginfo.rs @@ -1615,8 +1615,8 @@ fn compile_unit_metadata(cx: &CrateContext) -> DIDescriptor { let prefix: &[u8] = &[dotdot[0], ::std::path::SEP_BYTE]; let mut path_bytes = p.as_vec().to_vec(); - if path_bytes.slice_to(2) != prefix && - path_bytes.slice_to(2) != dotdot { + if &path_bytes[..2] != prefix && + &path_bytes[..2] != dotdot { path_bytes.insert(0, prefix[0]); path_bytes.insert(1, prefix[1]); } @@ -4122,4 +4122,3 @@ fn needs_gdb_debug_scripts_section(ccx: &CrateContext) -> bool { !ccx.sess().target.target.options.is_like_windows && ccx.sess().opts.debuginfo != NoDebugInfo } - diff --git a/src/librustc_trans/trans/meth.rs b/src/librustc_trans/trans/meth.rs index 0fb0dffe930..2a893a6cfdf 100644 --- a/src/librustc_trans/trans/meth.rs +++ b/src/librustc_trans/trans/meth.rs @@ -494,7 +494,7 @@ pub fn trans_trait_callee_from_llval<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ty::ty_bare_fn(_, ref f) if f.abi == Rust || f.abi == RustCall => { let fake_sig = ty::Binder(ty::FnSig { - inputs: f.sig.0.inputs.slice_from(1).to_vec(), + inputs: f.sig.0.inputs[1..].to_vec(), output: f.sig.0.output, variadic: f.sig.0.variadic, }); @@ -634,7 +634,7 @@ pub fn trans_object_shim<'a, 'tcx>( } _ => { // skip the self parameter: - sig.inputs.slice_from(1) + &sig.inputs[1..] } }; diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 42b12c15866..428c5680a48 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -1321,7 +1321,7 @@ fn ty_of_method_or_bare_fn<'a, 'tcx>(this: &AstConv<'tcx>, // HACK(eddyb) replace the fake self type in the AST with the actual type. let input_params = if self_ty.is_some() { - decl.inputs.slice_from(1) + &decl.inputs[1..] } else { &decl.inputs[] }; @@ -1339,9 +1339,9 @@ fn ty_of_method_or_bare_fn<'a, 'tcx>(this: &AstConv<'tcx>, let lifetimes_for_params = if implied_output_region.is_none() { let input_tys = if self_ty.is_some() { // Skip the first argument if `self` is present. - self_and_input_tys.slice_from(1) + &self_and_input_tys[1..] } else { - self_and_input_tys.slice_from(0) + &self_and_input_tys[] }; let (ior, lfp) = find_implied_output_region(input_tys, input_pats); @@ -1665,7 +1665,7 @@ fn compute_opt_region_bound<'tcx>(tcx: &ty::ctxt<'tcx>, // of derived region bounds. If so, use that. Otherwise, report an // error. let r = derived_region_bounds[0]; - if derived_region_bounds.slice_from(1).iter().any(|r1| r != *r1) { + if derived_region_bounds[1..].iter().any(|r1| r != *r1) { tcx.sess.span_err( span, &format!("ambiguous lifetime bound, \ diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index 3b5027dbb9e..e4b28bf5648 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -531,7 +531,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { ast::ExprMethodCall(_, _, ref args) => { constrain_call(rcx, expr, Some(&*args[0]), - args.slice_from(1).iter().map(|e| &**e), false); + args[1..].iter().map(|e| &**e), false); visit::walk_expr(rcx, expr); } diff --git a/src/librustc_typeck/coherence/overlap.rs b/src/librustc_typeck/coherence/overlap.rs index ce7ba9ac11e..a7bad3dc789 100644 --- a/src/librustc_typeck/coherence/overlap.rs +++ b/src/librustc_typeck/coherence/overlap.rs @@ -65,7 +65,7 @@ impl<'cx, 'tcx> OverlapChecker<'cx, 'tcx> { continue; } - for &impl2_def_id in trait_impls.slice_from(i+1).iter() { + for &impl2_def_id in trait_impls[(i+1)..].iter() { self.check_if_impls_overlap(trait_def_id, impl1_def_id, impl2_def_id); diff --git a/src/librustdoc/html/escape.rs b/src/librustdoc/html/escape.rs index 6fb78d9a833..db7253c9ef3 100644 --- a/src/librustdoc/html/escape.rs +++ b/src/librustdoc/html/escape.rs @@ -29,7 +29,7 @@ impl<'a> fmt::String for Escape<'a> { for (i, ch) in s.bytes().enumerate() { match ch as char { '<' | '>' | '&' | '\'' | '"' => { - try!(fmt.write_str(pile_o_bits.slice(last, i))); + try!(fmt.write_str(&pile_o_bits[last.. i])); let s = match ch as char { '>' => ">", '<' => "<", @@ -46,7 +46,7 @@ impl<'a> fmt::String for Escape<'a> { } if last < s.len() { - try!(fmt.write_str(pile_o_bits.slice_from(last))); + try!(fmt.write_str(&pile_o_bits[last..])); } Ok(()) } diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index 0dbd13b4616..1c800771c70 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -146,7 +146,7 @@ extern { fn stripped_filtered_line<'a>(s: &'a str) -> Option<&'a str> { let trimmed = s.trim(); if trimmed.starts_with("# ") { - Some(trimmed.slice_from(2)) + Some(&trimmed[2..]) } else { None } diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index ab9700d966a..ea535a1490b 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -749,7 +749,7 @@ impl<'a> SourceCollector<'a> { // Remove the utf-8 BOM if any let contents = if contents.starts_with("\u{feff}") { - contents.slice_from(3) + &contents[3..] } else { contents }; @@ -1469,7 +1469,7 @@ fn full_path(cx: &Context, item: &clean::Item) -> String { fn shorter<'a>(s: Option<&'a str>) -> &'a str { match s { Some(s) => match s.find_str("\n\n") { - Some(pos) => s.slice_to(pos), + Some(pos) => &s[..pos], None => s, }, None => "" diff --git a/src/librustdoc/markdown.rs b/src/librustdoc/markdown.rs index dc98a56eb1a..594cf3dcd43 100644 --- a/src/librustdoc/markdown.rs +++ b/src/librustdoc/markdown.rs @@ -28,10 +28,10 @@ fn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) { for line in s.lines() { if line.starts_with("%") { // remove % - metadata.push(line.slice_from(1).trim_left()) + metadata.push(line[1..].trim_left()) } else { let line_start_byte = s.subslice_offset(line); - return (metadata, s.slice_from(line_start_byte)); + return (metadata, &s[line_start_byte..]); } } // if we're here, then all lines were metadata % lines. diff --git a/src/librustdoc/passes.rs b/src/librustdoc/passes.rs index 9a67b479106..34a23774e5b 100644 --- a/src/librustdoc/passes.rs +++ b/src/librustdoc/passes.rs @@ -357,7 +357,7 @@ pub fn unindent(s: &str) -> String { line.to_string() } else { assert!(line.len() >= min_indent); - line.slice_from(min_indent).to_string() + line[min_indent..].to_string() } }).collect::>().as_slice()); unindented.connect("\n") diff --git a/src/libstd/ffi/c_str.rs b/src/libstd/ffi/c_str.rs index d7f8eb2e415..6d6aaac22a2 100644 --- a/src/libstd/ffi/c_str.rs +++ b/src/libstd/ffi/c_str.rs @@ -115,7 +115,7 @@ impl Deref for CString { type Target = [libc::c_char]; fn deref(&self) -> &[libc::c_char] { - self.inner.slice_to(self.inner.len() - 1) + &self.inner[..(self.inner.len() - 1)] } } diff --git a/src/libstd/io/buffered.rs b/src/libstd/io/buffered.rs index 8c38bc009cc..2b293d6eef2 100644 --- a/src/libstd/io/buffered.rs +++ b/src/libstd/io/buffered.rs @@ -219,7 +219,7 @@ impl Writer for BufferedWriter { if buf.len() > self.buf.len() { self.inner.as_mut().unwrap().write(buf) } else { - let dst = self.buf.slice_from_mut(self.pos); + let dst = &mut self.buf[self.pos..]; slice::bytes::copy_memory(dst, buf); self.pos += buf.len(); Ok(()) diff --git a/src/libstd/io/comm_adapters.rs b/src/libstd/io/comm_adapters.rs index 4b0014c68f7..4649012d454 100644 --- a/src/libstd/io/comm_adapters.rs +++ b/src/libstd/io/comm_adapters.rs @@ -72,7 +72,7 @@ impl Buffer for ChanReader { if self.closed { Err(io::standard_error(io::EndOfFile)) } else { - Ok(self.buf.slice_from(self.pos)) + Ok(&self.buf[self.pos..]) } } @@ -88,7 +88,7 @@ impl Reader for ChanReader { loop { let count = match self.fill_buf().ok() { Some(src) => { - let dst = buf.slice_from_mut(num_read); + let dst = &mut buf[num_read..]; let count = cmp::min(src.len(), dst.len()); bytes::copy_memory(dst, &src[..count]); count diff --git a/src/libstd/io/mem.rs b/src/libstd/io/mem.rs index ee05a9e5596..884582cbaa8 100644 --- a/src/libstd/io/mem.rs +++ b/src/libstd/io/mem.rs @@ -160,7 +160,7 @@ impl Reader for MemReader { let write_len = min(buf.len(), self.buf.len() - self.pos); { let input = &self.buf[self.pos.. (self.pos + write_len)]; - let output = buf.slice_to_mut(write_len); + let output = &mut buf[.. write_len]; assert_eq!(input.len(), output.len()); slice::bytes::copy_memory(output, input); } @@ -205,11 +205,11 @@ impl<'a> Reader for &'a [u8] { let write_len = min(buf.len(), self.len()); { let input = &self[..write_len]; - let output = buf.slice_to_mut(write_len); + let output = &mut buf[.. write_len]; slice::bytes::copy_memory(output, input); } - *self = self.slice_from(write_len); + *self = &self[write_len..]; Ok(write_len) } @@ -270,7 +270,7 @@ impl<'a> BufWriter<'a> { impl<'a> Writer for BufWriter<'a> { #[inline] fn write(&mut self, src: &[u8]) -> IoResult<()> { - let dst = self.buf.slice_from_mut(self.pos); + let dst = &mut self.buf[self.pos..]; let dst_len = dst.len(); if dst_len == 0 { @@ -350,7 +350,7 @@ impl<'a> Reader for BufReader<'a> { let write_len = min(buf.len(), self.buf.len() - self.pos); { let input = &self.buf[self.pos.. (self.pos + write_len)]; - let output = buf.slice_to_mut(write_len); + let output = &mut buf[.. write_len]; assert_eq!(input.len(), output.len()); slice::bytes::copy_memory(output, input); } diff --git a/src/libstd/io/mod.rs b/src/libstd/io/mod.rs index dc21416df7b..ba7c81bf3fb 100644 --- a/src/libstd/io/mod.rs +++ b/src/libstd/io/mod.rs @@ -516,7 +516,7 @@ pub trait Reader { while read < min { let mut zeroes = 0; loop { - match self.read(buf.slice_from_mut(read)) { + match self.read(&mut buf[read..]) { Ok(0) => { zeroes += 1; if zeroes >= NO_PROGRESS_LIMIT { @@ -1481,7 +1481,7 @@ pub trait Buffer: Reader { { let mut start = 1; while start < width { - match try!(self.read(buf.slice_mut(start, width))) { + match try!(self.read(&mut buf[start .. width])) { n if n == width - start => break, n if n < width - start => { start += n; } _ => return Err(standard_error(InvalidInput)), diff --git a/src/libstd/io/net/ip.rs b/src/libstd/io/net/ip.rs index adc122ff447..2c79d7a373d 100644 --- a/src/libstd/io/net/ip.rs +++ b/src/libstd/io/net/ip.rs @@ -251,7 +251,7 @@ impl<'a> Parser<'a> { assert!(head.len() + tail.len() <= 8); let mut gs = [0u16; 8]; gs.clone_from_slice(head); - gs.slice_mut(8 - tail.len(), 8).clone_from_slice(tail); + gs[(8 - tail.len()) .. 8].clone_from_slice(tail); Ipv6Addr(gs[0], gs[1], gs[2], gs[3], gs[4], gs[5], gs[6], gs[7]) } diff --git a/src/libstd/io/util.rs b/src/libstd/io/util.rs index adfd88644cc..e4bf38a9ef5 100644 --- a/src/libstd/io/util.rs +++ b/src/libstd/io/util.rs @@ -48,7 +48,7 @@ impl Reader for LimitReader { } let len = cmp::min(self.limit, buf.len()); - let res = self.inner.read(buf.slice_to_mut(len)); + let res = self.inner.read(&mut buf[..len]); match res { Ok(len) => self.limit -= len, _ => {} diff --git a/src/libstd/num/strconv.rs b/src/libstd/num/strconv.rs index 67fe599ecd6..1d3bf484edb 100644 --- a/src/libstd/num/strconv.rs +++ b/src/libstd/num/strconv.rs @@ -379,14 +379,14 @@ pub fn float_to_str_bytes_common( // only resize buf if we actually remove digits if i < buf_max_i { - buf = buf.slice(0, i + 1).to_vec(); + buf = buf[.. (i + 1)].to_vec(); } } } // If exact and trailing '.', just cut that else { let max_i = buf.len() - 1; if buf[max_i] == b'.' { - buf = buf.slice(0, max_i).to_vec(); + buf = buf[.. max_i].to_vec(); } } diff --git a/src/libstd/rand/os.rs b/src/libstd/rand/os.rs index 68ba7e1dd29..bafbde2511d 100644 --- a/src/libstd/rand/os.rs +++ b/src/libstd/rand/os.rs @@ -65,7 +65,7 @@ mod imp { let mut read = 0; let len = v.len(); while read < len { - let result = getrandom(v.slice_from_mut(read)); + let result = getrandom(&mut v[read..]); if result == -1 { let err = errno() as libc::c_int; if err == libc::EINTR { diff --git a/src/libstd/rt/util.rs b/src/libstd/rt/util.rs index 235cedcda52..4023a0a4c10 100644 --- a/src/libstd/rt/util.rs +++ b/src/libstd/rt/util.rs @@ -130,7 +130,7 @@ pub fn abort(args: fmt::Arguments) -> ! { } impl<'a> fmt::Writer for BufWriter<'a> { fn write_str(&mut self, bytes: &str) -> fmt::Result { - let left = self.buf.slice_from_mut(self.pos); + let left = &mut self.buf[self.pos..]; let to_write = &bytes.as_bytes()[..cmp::min(bytes.len(), left.len())]; slice::bytes::copy_memory(left, to_write); self.pos += to_write.len(); diff --git a/src/libstd/sys/common/backtrace.rs b/src/libstd/sys/common/backtrace.rs index d8b85987236..d069d9ee3b8 100644 --- a/src/libstd/sys/common/backtrace.rs +++ b/src/libstd/sys/common/backtrace.rs @@ -42,10 +42,10 @@ pub fn demangle(writer: &mut Writer, s: &str) -> IoResult<()> { let mut valid = true; let mut inner = s; if s.len() > 4 && s.starts_with("_ZN") && s.ends_with("E") { - inner = s.slice(3, s.len() - 1); + inner = &s[3 .. s.len() - 1]; // On Windows, dbghelp strips leading underscores, so we accept "ZN...E" form too. } else if s.len() > 3 && s.starts_with("ZN") && s.ends_with("E") { - inner = s.slice(2, s.len() - 1); + inner = &s[2 .. s.len() - 1]; } else { valid = false; } @@ -83,11 +83,11 @@ pub fn demangle(writer: &mut Writer, s: &str) -> IoResult<()> { } let mut rest = inner; while rest.char_at(0).is_numeric() { - rest = rest.slice_from(1); + rest = &rest[1..]; } - let i: uint = inner.slice_to(inner.len() - rest.len()).parse().unwrap(); - inner = rest.slice_from(i); - rest = rest.slice_to(i); + let i: uint = inner[.. (inner.len() - rest.len())].parse().unwrap(); + inner = &rest[i..]; + rest = &rest[..i]; while rest.len() > 0 { if rest.starts_with("$") { macro_rules! demangle { @@ -128,8 +128,8 @@ pub fn demangle(writer: &mut Writer, s: &str) -> IoResult<()> { None => rest.len(), Some(i) => i, }; - try!(writer.write_str(rest.slice_to(idx))); - rest = rest.slice_from(idx); + try!(writer.write_str(&rest[..idx])); + rest = &rest[idx..]; } } } diff --git a/src/libstd/sys/unix/process.rs b/src/libstd/sys/unix/process.rs index 36bf696dba5..46639d7d8f0 100644 --- a/src/libstd/sys/unix/process.rs +++ b/src/libstd/sys/unix/process.rs @@ -125,9 +125,9 @@ impl Process { let mut bytes = [0; 8]; return match input.read(&mut bytes) { Ok(8) => { - assert!(combine(CLOEXEC_MSG_FOOTER) == combine(bytes.slice(4, 8)), + assert!(combine(CLOEXEC_MSG_FOOTER) == combine(&bytes[4.. 8]), "Validation on the CLOEXEC pipe failed: {:?}", bytes); - let errno = combine(bytes.slice(0, 4)); + let errno = combine(&bytes[0.. 4]); assert!(p.wait(0).is_ok(), "wait(0) should either return Ok or panic"); Err(super::decode_error(errno)) } diff --git a/src/libstd/sys/windows/fs.rs b/src/libstd/sys/windows/fs.rs index a7330f7c67c..cb8ef7eb66b 100644 --- a/src/libstd/sys/windows/fs.rs +++ b/src/libstd/sys/windows/fs.rs @@ -376,7 +376,7 @@ pub fn readlink(p: &Path) -> IoResult { }); let ret = match ret { Some(ref s) if s.starts_with(r"\\?\") => { // " - Ok(Path::new(s.slice_from(4))) + Ok(Path::new(&s[4..])) } Some(s) => Ok(Path::new(s)), None => Err(super::last_error()), diff --git a/src/libstd/sys/windows/os.rs b/src/libstd/sys/windows/os.rs index e9490dc95c9..36dc9b2afe4 100644 --- a/src/libstd/sys/windows/os.rs +++ b/src/libstd/sys/windows/os.rs @@ -146,7 +146,7 @@ pub fn fill_utf16_buf_and_decode(mut f: F) -> Option where done = true; } if k != 0 && done { - let sub = buf.slice(0, k as uint); + let sub = &buf[.. (k as uint)]; // We want to explicitly catch the case when the // closure returned invalid UTF-16, rather than // set `res` to None and continue. diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 4cdafb36eec..7852b077b53 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -271,9 +271,9 @@ impl<'a> StringReader<'a> { fn with_str_from_to(&self, start: BytePos, end: BytePos, f: F) -> T where F: FnOnce(&str) -> T, { - f(self.filemap.src.slice( - self.byte_offset(start).to_uint(), - self.byte_offset(end).to_uint())) + f(&self.filemap.src[ + self.byte_offset(start).to_uint().. + self.byte_offset(end).to_uint()]) } /// Converts CRLF to LF in the given string, raising an error on bare CR. diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 83a7504bc49..932c5ce23ea 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5223,7 +5223,7 @@ impl<'a> Parser<'a> { Some(i) => { let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); - for p in included_mod_stack.slice(i, len).iter() { + for p in included_mod_stack[i.. len].iter() { err.push_str(&p.display().as_cow()[]); err.push_str(" -> "); } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index b59e770c6ba..cf5066ae474 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1590,7 +1590,7 @@ impl<'a> State<'a> { ident: ast::SpannedIdent, tys: &[P], args: &[P]) -> IoResult<()> { - let base_args = args.slice_from(1); + let base_args = &args[1..]; try!(self.print_expr(&*args[0])); try!(word(&mut self.s, ".")); try!(self.print_ident(ident.node)); @@ -2312,7 +2312,7 @@ impl<'a> State<'a> { let args = if first { &decl.inputs[] } else { - decl.inputs.slice_from(1) + &decl.inputs[1..] }; for arg in args.iter() { diff --git a/src/libunicode/u_str.rs b/src/libunicode/u_str.rs index 13672a7b480..66cdf03a51e 100644 --- a/src/libunicode/u_str.rs +++ b/src/libunicode/u_str.rs @@ -249,8 +249,8 @@ impl<'a> Iterator for Graphemes<'a> { Some(cat) }; - let retstr = self.string.slice_to(idx); - self.string = self.string.slice_from(idx); + let retstr = &self.string[..idx]; + self.string = &self.string[idx..]; Some(retstr) } } @@ -350,8 +350,8 @@ impl<'a> DoubleEndedIterator for Graphemes<'a> { Some(cat) }; - let retstr = self.string.slice_from(idx); - self.string = self.string.slice_to(idx); + let retstr = &self.string[idx..]; + self.string = &self.string[..idx]; Some(retstr) } } -- cgit 1.4.1-3-g733a5 From db013f9f45a3083ccc9dd79299f110ec62e29704 Mon Sep 17 00:00:00 2001 From: Daniel Grunwald Date: Sun, 18 Jan 2015 21:43:03 +0100 Subject: Fix some grammar inconsistencies for the '..' range notation. Grammar changes: * allow 'for _ in 1..i {}' (fixes #20241) * allow 'for _ in 1.. {}' as infinite loop * prevent use of range notation in contexts where only operators of high precedence are expected (fixes #20811) Parser code cleanup: * remove RESTRICTION_NO_DOTS * make AS_PREC const and follow naming convention * make min_prec inclusive --- src/doc/reference.md | 7 ++--- src/libsyntax/ast_util.rs | 3 +- src/libsyntax/parse/parser.rs | 54 +++++++++++++++++++++++----------- src/test/compile-fail/range-3.rs | 16 ++++++++++ src/test/compile-fail/range-4.rs | 16 ++++++++++ src/test/run-pass/ranges-precedence.rs | 7 +++++ 6 files changed, 80 insertions(+), 23 deletions(-) create mode 100644 src/test/compile-fail/range-3.rs create mode 100644 src/test/compile-fail/range-4.rs (limited to 'src/libsyntax/parse/parser.rs') diff --git a/src/doc/reference.md b/src/doc/reference.md index 9ec4708eb2f..768293cba79 100644 --- a/src/doc/reference.md +++ b/src/doc/reference.md @@ -3135,18 +3135,17 @@ The precedence of Rust binary operators is ordered as follows, going from strong to weak: ```{.text .precedence} -* / % as +* / % + - << >> & ^ | -< > <= >= -== != +== != < > <= >= && || -= += .. ``` Operators at the same precedence level are evaluated left-to-right. [Unary diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 892b3c1e7f2..cf0aac5bf4a 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -336,8 +336,7 @@ pub fn operator_prec(op: ast::BinOp) -> usize { /// Precedence of the `as` operator, which is a binary operator /// not appearing in the prior table. -#[allow(non_upper_case_globals)] -pub static as_prec: usize = 12us; +pub const AS_PREC: usize = 12us; pub fn empty_generics() -> Generics { Generics { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index fc02cb4acb8..e59dbe52b76 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -62,7 +62,7 @@ use ast::{UnnamedField, UnsafeBlock}; use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple}; use ast::{Visibility, WhereClause}; use ast; -use ast_util::{self, as_prec, ident_to_path, operator_prec}; +use ast_util::{self, AS_PREC, ident_to_path, operator_prec}; use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp}; use diagnostic; use ext::tt::macro_parser; @@ -93,7 +93,6 @@ bitflags! { const RESTRICTION_STMT_EXPR = 0b0001, const RESTRICTION_NO_BAR_OP = 0b0010, const RESTRICTION_NO_STRUCT_LITERAL = 0b0100, - const RESTRICTION_NO_DOTS = 0b1000, } } @@ -2775,13 +2774,6 @@ impl<'a> Parser<'a> { hi = e.span.hi; ex = ExprAddrOf(m, e); } - token::DotDot if !self.restrictions.contains(RESTRICTION_NO_DOTS) => { - // A range, closed above: `..expr`. - self.bump(); - let e = self.parse_expr(); - hi = e.span.hi; - ex = self.mk_range(None, Some(e)); - } token::Ident(_, _) => { if !self.token.is_keyword(keywords::Box) { return self.parse_dot_or_call_expr(); @@ -2855,10 +2847,10 @@ impl<'a> Parser<'a> { self.check_no_chained_comparison(&*lhs, cur_op) } let cur_prec = operator_prec(cur_op); - if cur_prec > min_prec { + if cur_prec >= min_prec { self.bump(); let expr = self.parse_prefix_expr(); - let rhs = self.parse_more_binops(expr, cur_prec); + let rhs = self.parse_more_binops(expr, cur_prec + 1); let lhs_span = lhs.span; let rhs_span = rhs.span; let binary = self.mk_binary(cur_op, lhs, rhs); @@ -2869,7 +2861,7 @@ impl<'a> Parser<'a> { } } None => { - if as_prec > min_prec && self.eat_keyword(keywords::As) { + if AS_PREC >= min_prec && self.eat_keyword(keywords::As) { let rhs = self.parse_ty(); let _as = self.mk_expr(lhs.span.lo, rhs.span.hi, @@ -2905,8 +2897,24 @@ impl<'a> Parser<'a> { /// actually, this seems to be the main entry point for /// parsing an arbitrary expression. pub fn parse_assign_expr(&mut self) -> P { - let lhs = self.parse_binops(); - self.parse_assign_expr_with(lhs) + match self.token { + token::DotDot => { + // prefix-form of range notation '..expr' + // This has the same precedence as assignment expressions + // (much lower than other prefix expressions) to be consistent + // with the postfix-form 'expr..' + let lo = self.span.lo; + self.bump(); + let rhs = self.parse_binops(); + let hi = rhs.span.hi; + let ex = self.mk_range(None, Some(rhs)); + self.mk_expr(lo, hi, ex) + } + _ => { + let lhs = self.parse_binops(); + self.parse_assign_expr_with(lhs) + } + } } pub fn parse_assign_expr_with(&mut self, lhs: P) -> P { @@ -2938,11 +2946,11 @@ impl<'a> Parser<'a> { self.mk_expr(span.lo, rhs_span.hi, assign_op) } // A range expression, either `expr..expr` or `expr..`. - token::DotDot if !self.restrictions.contains(RESTRICTION_NO_DOTS) => { + token::DotDot => { self.bump(); - let opt_end = if self.token.can_begin_expr() { - let end = self.parse_expr_res(RESTRICTION_NO_DOTS); + let opt_end = if self.is_at_start_of_range_notation_rhs() { + let end = self.parse_binops(); Some(end) } else { None @@ -2960,6 +2968,18 @@ impl<'a> Parser<'a> { } } + fn is_at_start_of_range_notation_rhs(&self) -> bool { + if self.token.can_begin_expr() { + // parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`. + if self.token == token::OpenDelim(token::Brace) { + return !self.restrictions.contains(RESTRICTION_NO_STRUCT_LITERAL); + } + true + } else { + false + } + } + /// Parse an 'if' or 'if let' expression ('if' token already eaten) pub fn parse_if_expr(&mut self) -> P { if self.token.is_keyword(keywords::Let) { diff --git a/src/test/compile-fail/range-3.rs b/src/test/compile-fail/range-3.rs new file mode 100644 index 00000000000..fe79165236f --- /dev/null +++ b/src/test/compile-fail/range-3.rs @@ -0,0 +1,16 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test range syntax - syntax errors. + +pub fn main() { + let r = 1..2..3; + //~^ ERROR expected one of `.`, `;`, or an operator, found `..` +} \ No newline at end of file diff --git a/src/test/compile-fail/range-4.rs b/src/test/compile-fail/range-4.rs new file mode 100644 index 00000000000..bbd6ae289cc --- /dev/null +++ b/src/test/compile-fail/range-4.rs @@ -0,0 +1,16 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test range syntax - syntax errors. + +pub fn main() { + let r = ..1..2; + //~^ ERROR expected one of `.`, `;`, or an operator, found `..` +} \ No newline at end of file diff --git a/src/test/run-pass/ranges-precedence.rs b/src/test/run-pass/ranges-precedence.rs index f678eed8775..c947220f1f8 100644 --- a/src/test/run-pass/ranges-precedence.rs +++ b/src/test/run-pass/ranges-precedence.rs @@ -48,5 +48,12 @@ fn main() { assert!(x == &a[3..]); for _i in 2+4..10-3 {} + + let i = 42; + for _ in 1..i {} + for _ in 1.. { break; } + + let x = [1]..[2]; + assert!(x == (([1])..([2]))); } -- cgit 1.4.1-3-g733a5