diff options
| author | Paul Collier <paul@paulcollier.ca> | 2015-01-18 00:41:56 +0000 |
|---|---|---|
| committer | Paul Collier <paul@paulcollier.ca> | 2015-01-18 19:43:44 -0800 |
| commit | 3c32cd1be27f321658382e39d34f5d993d99ae8b (patch) | |
| tree | a82c1d5da8abe7c5fae2fe1f60c3bae2c3eee0e9 /src/libsyntax | |
| parent | 591337431df612dd4e0df8d46b6291358085ac7c (diff) | |
| download | rust-3c32cd1be27f321658382e39d34f5d993d99ae8b.tar.gz rust-3c32cd1be27f321658382e39d34f5d993d99ae8b.zip | |
libsyntax: 0u -> 0us, 0i -> 0is
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/ast_util.rs | 18 | ||||
| -rw-r--r-- | src/libsyntax/codemap.rs | 18 | ||||
| -rw-r--r-- | src/libsyntax/diagnostic.rs | 32 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/generic/mod.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 34 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 32 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 18 | ||||
| -rw-r--r-- | src/libsyntax/print/pp.rs | 34 | ||||
| -rw-r--r-- | src/libsyntax/print/pprust.rs | 58 | ||||
| -rw-r--r-- | src/libsyntax/test.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/util/small_vector.rs | 20 |
16 files changed, 156 insertions, 156 deletions
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 73f84f4fbe7..34fd498322c 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -322,15 +322,15 @@ pub fn struct_field_visibility(field: ast::StructField) -> Visibility { pub fn operator_prec(op: ast::BinOp) -> usize { match op { // 'as' sits here with 12 - BiMul | BiDiv | BiRem => 11u, - BiAdd | BiSub => 10u, - BiShl | BiShr => 9u, - BiBitAnd => 8u, - BiBitXor => 7u, - BiBitOr => 6u, - BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3u, - BiAnd => 2u, - BiOr => 1u + BiMul | BiDiv | BiRem => 11us, + BiAdd | BiSub => 10us, + BiShl | BiShr => 9us, + BiBitAnd => 8us, + BiBitXor => 7us, + BiBitOr => 6us, + BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3us, + BiAnd => 2us, + BiOr => 1us } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 07544f35b19..a5e10f42750 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -430,7 +430,7 @@ impl CodeMap { let lo = self.lookup_char_pos(sp.lo); let hi = self.lookup_char_pos(sp.hi); let mut lines = Vec::new(); - for i in range(lo.line - 1u, hi.line as usize) { + for i in range(lo.line - 1us, hi.line as usize) { lines.push(i); }; FileLines {file: lo.file, lines: lines} @@ -498,10 +498,10 @@ impl CodeMap { let files = self.files.borrow(); let files = &*files; let len = files.len(); - let mut a = 0u; + let mut a = 0us; let mut b = len; - while b - a > 1u { - let m = (a + b) / 2u; + while b - a > 1us { + let m = (a + b) / 2us; if files[m].start_pos > pos { b = m; } else { @@ -537,12 +537,12 @@ impl CodeMap { let files = self.files.borrow(); let f = (*files)[idx].clone(); - let mut a = 0u; + let mut a = 0us; { let lines = f.lines.borrow(); let mut b = lines.len(); - while b - a > 1u { - let m = (a + b) / 2u; + while b - a > 1us { + let m = (a + b) / 2us; if (*lines)[m] > pos { b = m; } else { a = m; } } } @@ -551,7 +551,7 @@ impl CodeMap { fn lookup_pos(&self, pos: BytePos) -> Loc { let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos); - let line = a + 1u; // Line numbers start at 1 + let line = a + 1us; // Line numbers start at 1 let chpos = self.bytepos_to_file_charpos(pos); let linebpos = (*f.lines.borrow())[a]; let linechpos = self.bytepos_to_file_charpos(linebpos); @@ -762,7 +762,7 @@ mod test { assert_eq!(file_lines.file.name, "blork.rs"); assert_eq!(file_lines.lines.len(), 1); - assert_eq!(file_lines.lines[0], 1u); + assert_eq!(file_lines.lines[0], 1us); } #[test] diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index c86991b6e40..6de466ea9bd 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -26,7 +26,7 @@ use term::WriterWrapper; use term; /// maximum number of lines we will print for each error; arbitrary. -static MAX_LINES: usize = 6u; +static MAX_LINES: usize = 6us; #[derive(Clone, Copy)] pub enum RenderSpan { @@ -151,20 +151,20 @@ impl Handler { self.bump_err_count(); } pub fn bump_err_count(&self) { - self.err_count.set(self.err_count.get() + 1u); + self.err_count.set(self.err_count.get() + 1us); } pub fn err_count(&self) -> usize { self.err_count.get() } pub fn has_errors(&self) -> bool { - self.err_count.get()> 0u + self.err_count.get() > 0us } pub fn abort_if_errors(&self) { let s; match self.err_count.get() { - 0u => return, - 1u => s = "aborting due to previous error".to_string(), - _ => { + 0us => return, + 1us => s = "aborting due to previous error".to_string(), + _ => { s = format!("aborting due to {} previous errors", self.err_count.get()); } @@ -448,7 +448,7 @@ fn highlight_lines(err: &mut EmitterWriter, let mut elided = false; let mut display_lines = &lines.lines[]; if display_lines.len() > MAX_LINES { - display_lines = &display_lines[0u..MAX_LINES]; + display_lines = &display_lines[0us..MAX_LINES]; elided = true; } // Print the offending lines @@ -459,32 +459,32 @@ fn highlight_lines(err: &mut EmitterWriter, } } if elided { - let last_line = display_lines[display_lines.len() - 1u]; - let s = format!("{}:{} ", fm.name, last_line + 1u); + let last_line = display_lines[display_lines.len() - 1us]; + let s = format!("{}:{} ", fm.name, last_line + 1us); try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len())); } // FIXME (#3260) // If there's one line at fault we can easily point to the problem - if lines.lines.len() == 1u { + if lines.lines.len() == 1us { let lo = cm.lookup_char_pos(sp.lo); - let mut digits = 0u; - let mut num = (lines.lines[0] + 1u) / 10u; + let mut digits = 0us; + let mut num = (lines.lines[0] + 1us) / 10us; // how many digits must be indent past? - while num > 0u { num /= 10u; digits += 1u; } + while num > 0us { num /= 10us; digits += 1us; } // indent past |name:## | and the 0-offset column location - let left = fm.name.len() + digits + lo.col.to_usize() + 3u; + let left = fm.name.len() + digits + lo.col.to_usize() + 3us; let mut s = String::new(); // Skip is the number of characters we need to skip because they are // part of the 'filename:line ' part of the previous line. - let skip = fm.name.len() + digits + 3u; + let skip = fm.name.len() + digits + 3us; for _ in range(0, skip) { s.push(' '); } if let Some(orig) = fm.get_line(lines.lines[0]) { - for pos in range(0u, left - skip) { + for pos in range(0us, left - skip) { let cur_char = orig.as_bytes()[pos] as char; // Whenever a tab occurs on the previous line, we insert one on // the error-point-squiggly-line as well (instead of a space). diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index ca3c1d36fba..b77c203acfb 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -748,7 +748,7 @@ impl<'a> MethodDef<'a> { let mut raw_fields = Vec::new(); // ~[[fields of self], // [fields of next Self arg], [etc]] let mut patterns = Vec::new(); - for i in range(0u, self_args.len()) { + for i in range(0us, self_args.len()) { let struct_path= cx.path(DUMMY_SP, vec!( type_ident )); let (pat, ident_expr) = trait_.create_struct_pattern(cx, @@ -837,8 +837,8 @@ impl<'a> MethodDef<'a> { /// (&A2(ref __self_0), /// &A2(ref __arg_1_0)) => (*__self_0).eq(&(*__arg_1_0)), /// _ => { - /// let __self_vi = match *self { A1(..) => 0u, A2(..) => 1u }; - /// let __arg_1_vi = match *__arg_1 { A1(..) => 0u, A2(..) => 1u }; + /// let __self_vi = match *self { A1(..) => 0us, A2(..) => 1us }; + /// let __arg_1_vi = match *__arg_1 { A1(..) => 0us, A2(..) => 1us }; /// false /// } /// } @@ -882,8 +882,8 @@ impl<'a> MethodDef<'a> { /// (Variant2, Variant2, Variant2) => ... // delegate Matching on Variant2 /// ... /// _ => { - /// let __this_vi = match this { Variant1 => 0u, Variant2 => 1u, ... }; - /// let __that_vi = match that { Variant1 => 0u, Variant2 => 1u, ... }; + /// let __this_vi = match this { Variant1 => 0us, Variant2 => 1us, ... }; + /// let __that_vi = match that { Variant1 => 0us, Variant2 => 1us, ... }; /// ... // catch-all remainder can inspect above variant index values. /// } /// } @@ -1045,13 +1045,13 @@ impl<'a> MethodDef<'a> { // // ``` // let __self0_vi = match self { - // A => 0u, B(..) => 1u, C(..) => 2u + // A => 0us, B(..) => 1us, C(..) => 2us // }; // let __self1_vi = match __arg1 { - // A => 0u, B(..) => 1u, C(..) => 2u + // A => 0us, B(..) => 1us, C(..) => 2us // }; // let __self2_vi = match __arg2 { - // A => 0u, B(..) => 1u, C(..) => 2u + // A => 0us, B(..) => 1us, C(..) => 2us // }; // ``` let mut index_let_stmts: Vec<P<ast::Stmt>> = Vec::new(); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 4f614c1a937..9aa602b39b4 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -273,7 +273,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, // in this file. // Token-tree macros: MacInvocTT(pth, tts, _) => { - if pth.segments.len() > 1u { + if pth.segments.len() > 1us { fld.cx.span_err(pth.span, "expected macro name without module \ separators"); @@ -844,7 +844,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { }, _ => unreachable!() }; - if pth.segments.len() > 1u { + if pth.segments.len() > 1us { fld.cx.span_err(pth.span, "expected macro name without module separators"); return DummyResult::raw_pat(span); } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index a5885ac6c5d..5339c3d77c6 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -716,7 +716,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // try removing it when enough of them are gone. let mut p = cx.new_parser_from_tts(tts); - p.quote_depth += 1u; + p.quote_depth += 1us; let cx_expr = p.parse_expr(); if !p.eat(&token::Comma) { diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 75d904a4632..d115f2ed620 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -171,11 +171,11 @@ pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: ByteP stack: vec![], top_elts: TtSeq(ms), sep: sep, - idx: 0u, + idx: 0us, up: None, matches: matches, - match_lo: 0u, - match_cur: 0u, + match_lo: 0us, + match_cur: 0us, match_hi: match_idx_hi, sp_lo: lo } @@ -238,7 +238,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) } } let mut ret_val = HashMap::new(); - let mut idx = 0u; + let mut idx = 0us; for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val, &mut idx) } ret_val } @@ -383,7 +383,7 @@ pub fn parse(sess: &ParseSess, if seq.op == ast::ZeroOrMore { let mut new_ei = ei.clone(); new_ei.match_cur += seq.num_captures; - new_ei.idx += 1u; + new_ei.idx += 1us; //we specifically matched zero repeats. for idx in range(ei.match_cur, ei.match_cur + seq.num_captures) { (&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp))); @@ -398,7 +398,7 @@ pub fn parse(sess: &ParseSess, cur_eis.push(box MatcherPos { stack: vec![], sep: seq.separator.clone(), - idx: 0u, + idx: 0us, matches: matches, match_lo: ei_t.match_cur, match_cur: ei_t.match_cur, @@ -442,20 +442,20 @@ pub fn parse(sess: &ParseSess, /* error messages here could be improved with links to orig. rules */ if token_name_eq(&tok, &token::Eof) { - if eof_eis.len() == 1u { + if eof_eis.len() == 1us { let mut v = Vec::new(); for dv in (&mut eof_eis[0]).matches.iter_mut() { v.push(dv.pop().unwrap()); } return Success(nameize(sess, ms, &v[])); - } else if eof_eis.len() > 1u { + } else if eof_eis.len() > 1us { return Error(sp, "ambiguity: multiple successful parses".to_string()); } else { return Failure(sp, "unexpected end of macro invocation".to_string()); } } else { - if (bb_eis.len() > 0u && next_eis.len() > 0u) - || bb_eis.len() > 1u { + if (bb_eis.len() > 0us && next_eis.len() > 0us) + || bb_eis.len() > 1us { let nts = bb_eis.iter().map(|ei| { match ei.top_elts.get_tt(ei.idx) { TtToken(_, MatchNt(bind, name, _, _)) => { @@ -469,12 +469,12 @@ pub fn parse(sess: &ParseSess, "local ambiguity: multiple parsing options: \ built-in NTs {} or {} other options.", nts, next_eis.len()).to_string()); - } else if bb_eis.len() == 0u && next_eis.len() == 0u { + } else if bb_eis.len() == 0us && next_eis.len() == 0us { return Failure(sp, format!("no rules expected the token `{}`", pprust::token_to_string(&tok)).to_string()); - } else if next_eis.len() > 0u { + } else if next_eis.len() > 0us { /* Now process the next token */ - while next_eis.len() > 0u { + while next_eis.len() > 0us { cur_eis.push(next_eis.pop().unwrap()); } rdr.next_token(); @@ -488,7 +488,7 @@ pub fn parse(sess: &ParseSess, let match_cur = ei.match_cur; (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( parse_nt(&mut rust_parser, name_string.get())))); - ei.idx += 1u; + ei.idx += 1us; ei.match_cur += 1; } _ => panic!() @@ -501,16 +501,16 @@ pub fn parse(sess: &ParseSess, } } - assert!(cur_eis.len() > 0u); + assert!(cur_eis.len() > 0us); } } pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { match name { "tt" => { - p.quote_depth += 1u; //but in theory, non-quoted tts might be useful + p.quote_depth += 1us; //but in theory, non-quoted tts might be useful let res = token::NtTT(P(p.parse_token_tree())); - p.quote_depth -= 1u; + p.quote_depth -= 1us; return res; } _ => {} diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 7936b9fcfc7..0bf20b8f3e1 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -223,7 +223,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { r.repeat_len.pop(); } } else { /* repeat */ - *r.repeat_idx.last_mut().unwrap() += 1u; + *r.repeat_idx.last_mut().unwrap() += 1us; r.stack.last_mut().unwrap().idx = 0; match r.stack.last().unwrap().sep.clone() { Some(tk) => { diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index ca9091856d6..2799696e8eb 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -62,7 +62,7 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle { pub fn strip_doc_comment_decoration(comment: &str) -> String { /// remove whitespace-only lines from the start/end of lines fn vertical_trim(lines: Vec<String> ) -> Vec<String> { - let mut i = 0u; + let mut i = 0us; let mut j = lines.len(); // first line of all-stars should be omitted if lines.len() > 0 && @@ -132,7 +132,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { } if comment.starts_with("/*") { - let lines = comment[3u..(comment.len() - 2u)] + let lines = comment[3us..(comment.len() - 2us)] .lines_any() .map(|s| s.to_string()) .collect::<Vec<String> >(); @@ -158,7 +158,7 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) { fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mut Vec<Comment>) { while is_whitespace(rdr.curr) && !rdr.is_eof() { - if rdr.col == CharPos(0u) && rdr.curr_is('\n') { + if rdr.col == CharPos(0us) && rdr.curr_is('\n') { push_blank_line_comment(rdr, &mut *comments); } rdr.bump(); @@ -305,7 +305,7 @@ fn read_block_comment(rdr: &mut StringReader, let mut style = if code_to_the_left { Trailing } else { Isolated }; rdr.consume_non_eol_whitespace(); - if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1u { + if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1us { style = Mixed; } debug!("<<< block comment"); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 2fcf43f9ead..d18bf554975 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -279,7 +279,7 @@ impl<'a> StringReader<'a> { /// Converts CRLF to LF in the given string, raising an error on bare CR. fn translate_crlf<'b>(&self, start: BytePos, s: &'b str, errmsg: &'b str) -> CowString<'b> { - let mut i = 0u; + let mut i = 0us; while i < s.len() { let str::CharRange { ch, next } = s.char_range_at(i); if ch == '\r' { @@ -331,10 +331,10 @@ impl<'a> StringReader<'a> { let byte_offset_diff = next.next - current_byte_offset; self.pos = self.pos + Pos::from_usize(byte_offset_diff); self.curr = Some(next.ch); - self.col = self.col + CharPos(1u); + self.col = self.col + CharPos(1us); if last_char == '\n' { self.filemap.next_line(self.last_pos); - self.col = CharPos(0u); + self.col = CharPos(0us); } if byte_offset_diff > 1 { @@ -472,7 +472,7 @@ impl<'a> StringReader<'a> { cmap.files.borrow_mut().push(self.filemap.clone()); let loc = cmap.lookup_char_pos_adj(self.last_pos); debug!("Skipping a shebang"); - if loc.line == 1u && loc.col == CharPos(0u) { + if loc.line == 1us && loc.col == CharPos(0us) { // FIXME: Add shebang "token", return it let start = self.last_pos; while !self.curr_is('\n') && !self.is_eof() { self.bump(); } @@ -646,7 +646,7 @@ impl<'a> StringReader<'a> { /// Scan through any digits (base `radix`) or underscores, and return how /// many digits there were. fn scan_digits(&mut self, radix: usize) -> usize { - let mut len = 0u; + let mut len = 0us; loop { let c = self.curr; if c == Some('_') { debug!("skipping a _"); self.bump(); continue; } @@ -799,14 +799,14 @@ impl<'a> StringReader<'a> { if self.curr == Some('{') { self.scan_unicode_escape(delim) } else { - let res = self.scan_hex_digits(4u, delim, false); + let res = self.scan_hex_digits(4us, delim, false); let sp = codemap::mk_sp(escaped_pos, self.last_pos); self.old_escape_warning(sp); res } } 'U' if !ascii_only => { - let res = self.scan_hex_digits(8u, delim, false); + let res = self.scan_hex_digits(8us, delim, false); let sp = codemap::mk_sp(escaped_pos, self.last_pos); self.old_escape_warning(sp); res @@ -937,11 +937,11 @@ impl<'a> StringReader<'a> { /// error if it isn't. fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) { match base { - 16u => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \ - supported"), - 8u => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"), - 2u => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"), - _ => () + 16us => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \ + supported"), + 8us => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"), + 2us => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"), + _ => () } } @@ -1189,7 +1189,7 @@ impl<'a> StringReader<'a> { 'r' => { let start_bpos = self.last_pos; self.bump(); - let mut hash_count = 0u; + let mut hash_count = 0us; while self.curr_is('#') { self.bump(); hash_count += 1; @@ -1374,7 +1374,7 @@ impl<'a> StringReader<'a> { fn scan_raw_byte_string(&mut self) -> token::Lit { let start_bpos = self.last_pos; self.bump(); - let mut hash_count = 0u; + let mut hash_count = 0us; while self.curr_is('#') { self.bump(); hash_count += 1; @@ -1616,9 +1616,9 @@ mod test { test!("1.0", Float, "1.0"); test!("1.0e10", Float, "1.0e10"); - assert_eq!(setup(&mk_sh(), "2u".to_string()).next_token().tok, + assert_eq!(setup(&mk_sh(), "2us".to_string()).next_token().tok, token::Literal(token::Integer(token::intern("2")), - Some(token::intern("u")))); + Some(token::intern("us")))); assert_eq!(setup(&mk_sh(), "r###\"raw\"###suffix".to_string()).next_token().tok, token::Literal(token::StrRaw(token::intern("raw"), 3), Some(token::intern("suffix")))); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 33da7c160d9..29414ef94b5 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -181,7 +181,7 @@ pub fn parse_tts_from_source_str(name: String, name, source ); - p.quote_depth += 1u; + p.quote_depth += 1us; // right now this is re-creating the token trees from ... token trees. maybe_aborted(p.parse_all_token_trees(),p) } @@ -325,7 +325,7 @@ pub mod with_hygiene { name, source ); - p.quote_depth += 1u; + p.quote_depth += 1us; // right now this is re-creating the token trees from ... token trees. maybe_aborted(p.parse_all_token_trees(),p) } @@ -574,7 +574,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) { if lit.len() == 1 { (lit.as_bytes()[0], 1) } else { - assert!(lit.as_bytes()[0] == b'\\', err(0i)); + assert!(lit.as_bytes()[0] == b'\\', err(0is)); let b = match lit.as_bytes()[1] { b'"' => b'"', b'n' => b'\n', @@ -684,9 +684,9 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> match suffix { Some(suf) if looks_like_width_suffix(&['f'], suf) => { match base { - 16u => sd.span_err(sp, "hexadecimal float literal is not supported"), - 8u => sd.span_err(sp, "octal float literal is not supported"), - 2u => sd.span_err(sp, "binary float literal is not supported"), + 16us => sd.span_err(sp, "hexadecimal float literal is not supported"), + 8us => sd.span_err(sp, "octal float literal is not supported"), + 2us => sd.span_err(sp, "binary float literal is not supported"), _ => () } let ident = token::intern_and_get_ident(&*s); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 88825cb3f34..ea7c80d3fc4 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -767,7 +767,7 @@ impl<'a> Parser<'a> { // would encounter a `>` and stop. This lets the parser handle trailing // commas in generic parameters, because it can stop either after // parsing a type or after parsing a comma. - for i in iter::count(0u, 1) { + for i in iter::count(0us, 1) { if self.check(&token::Gt) || self.token == token::BinOp(token::Shr) || self.token == token::Ge @@ -944,7 +944,7 @@ impl<'a> Parser<'a> { }; self.span = next.sp; self.token = next.tok; - self.tokens_consumed += 1u; + self.tokens_consumed += 1us; self.expected_tokens.clear(); // check after each token self.check_unknown_macro_variable(); @@ -2638,7 +2638,7 @@ impl<'a> Parser<'a> { } pub fn check_unknown_macro_variable(&mut self) { - if self.quote_depth == 0u { + if self.quote_depth == 0us { match self.token { token::SubstNt(name, _) => self.fatal(&format!("unknown macro variable `{}`", @@ -2707,7 +2707,7 @@ impl<'a> Parser<'a> { token_str)[]) }, /* we ought to allow different depths of unquotation */ - token::Dollar | token::SubstNt(..) if p.quote_depth > 0u => { + token::Dollar | token::SubstNt(..) if p.quote_depth > 0us => { p.parse_unquoted() } _ => { @@ -5079,7 +5079,7 @@ impl<'a> Parser<'a> { } } - if first && attrs_remaining_len > 0u { + if first && attrs_remaining_len > 0us { // We parsed attributes for the first item but didn't find it let last_span = self.last_span; self.span_err(last_span, @@ -5683,7 +5683,7 @@ impl<'a> Parser<'a> { return IoviItem(item); } if self.token.is_keyword(keywords::Unsafe) && - self.look_ahead(1u, |t| t.is_keyword(keywords::Trait)) + self.look_ahead(1us, |t| t.is_keyword(keywords::Trait)) { // UNSAFE TRAIT ITEM self.expect_keyword(keywords::Unsafe); @@ -5700,7 +5700,7 @@ impl<'a> Parser<'a> { return IoviItem(item); } if self.token.is_keyword(keywords::Unsafe) && - self.look_ahead(1u, |t| t.is_keyword(keywords::Impl)) + self.look_ahead(1us, |t| t.is_keyword(keywords::Impl)) { // IMPL ITEM self.expect_keyword(keywords::Unsafe); @@ -5731,7 +5731,7 @@ impl<'a> Parser<'a> { return IoviItem(item); } if self.token.is_keyword(keywords::Unsafe) - && self.look_ahead(1u, |t| *t != token::OpenDelim(token::Brace)) { + && self.look_ahead(1us, |t| *t != token::OpenDelim(token::Brace)) { // UNSAFE FUNCTION ITEM self.bump(); let abi = if self.eat_keyword(keywords::Extern) { @@ -6035,7 +6035,7 @@ impl<'a> Parser<'a> { } } } - let mut rename_to = path[path.len() - 1u]; + let mut rename_to = path[path.len() - 1us]; let path = ast::Path { span: mk_sp(lo, self.last_span.hi), global: false, diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index ca99fb2ef89..0b1bd282941 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -132,15 +132,15 @@ pub fn buf_str(toks: &[Token], let mut i = left; let mut l = lim; let mut s = string::String::from_str("["); - while i != right && l != 0u { - l -= 1u; + while i != right && l != 0us { + l -= 1us; if i != left { s.push_str(", "); } s.push_str(&format!("{}={}", szs[i], tok_str(&toks[i]))[]); - i += 1u; + i += 1us; i %= n; } s.push(']'); @@ -167,7 +167,7 @@ pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: usize) -> Printer { let n: usize = 3 * linewidth; debug!("mk_printer {}", linewidth); let token: Vec<Token> = repeat(Token::Eof).take(n).collect(); - let size: Vec<isize> = repeat(0i).take(n).collect(); + let size: Vec<isize> = repeat(0is).take(n).collect(); let scan_stack: Vec<usize> = repeat(0us).take(n).collect(); Printer { out: out, @@ -326,8 +326,8 @@ impl Printer { if self.scan_stack_empty { self.left_total = 1; self.right_total = 1; - self.left = 0u; - self.right = 0u; + self.left = 0us; + self.right = 0us; } else { self.advance_right(); } debug!("pp Begin({})/buffer ~[{},{}]", b.offset, self.left, self.right); @@ -355,8 +355,8 @@ impl Printer { if self.scan_stack_empty { self.left_total = 1; self.right_total = 1; - self.left = 0u; - self.right = 0u; + self.left = 0us; + self.right = 0us; } else { self.advance_right(); } debug!("pp Break({})/buffer ~[{},{}]", b.offset, self.left, self.right); @@ -410,7 +410,7 @@ impl Printer { if self.scan_stack_empty { self.scan_stack_empty = false; } else { - self.top += 1u; + self.top += 1us; self.top %= self.buf_len; assert!((self.top != self.bottom)); } @@ -422,7 +422,7 @@ impl Printer { if self.top == self.bottom { self.scan_stack_empty = true; } else { - self.top += self.buf_len - 1u; self.top %= self.buf_len; + self.top += self.buf_len - 1us; self.top %= self.buf_len; } return x; } @@ -436,12 +436,12 @@ impl Printer { if self.top == self.bottom { self.scan_stack_empty = true; } else { - self.bottom += 1u; self.bottom %= self.buf_len; + self.bottom += 1us; self.bottom %= self.buf_len; } return x; } pub fn advance_right(&mut self) { - self.right += 1u; + self.right += 1us; self.right %= self.buf_len; assert!((self.right != self.left)); } @@ -471,7 +471,7 @@ impl Printer { break; } - self.left += 1u; + self.left += 1us; self.left %= self.buf_len; left_size = self.size[self.left]; @@ -520,7 +520,7 @@ impl Printer { pub fn get_top(&mut self) -> PrintStackElem { let print_stack = &mut self.print_stack; let n = print_stack.len(); - if n != 0u { + if n != 0us { (*print_stack)[n - 1] } else { PrintStackElem { @@ -565,7 +565,7 @@ impl Printer { Token::End => { debug!("print End -> pop End"); let print_stack = &mut self.print_stack; - assert!((print_stack.len() != 0u)); + assert!((print_stack.len() != 0us)); print_stack.pop().unwrap(); Ok(()) } @@ -667,11 +667,11 @@ pub fn spaces(p: &mut Printer, n: usize) -> io::IoResult<()> { } pub fn zerobreak(p: &mut Printer) -> io::IoResult<()> { - spaces(p, 0u) + spaces(p, 0us) } pub fn space(p: &mut Printer) -> io::IoResult<()> { - spaces(p, 1u) + spaces(p, 1us) } pub fn hardbreak(p: &mut Printer) -> io::IoResult<()> { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index c537ca60b18..78a97f310d2 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -381,7 +381,7 @@ pub fn block_to_string(blk: &ast::Block) -> String { // containing cbox, will be closed by print-block at } try!(s.cbox(indent_unit)); // head-ibox, will be closed by print-block after { - try!(s.ibox(0u)); + try!(s.ibox(0us)); s.print_block(blk) }) } @@ -520,7 +520,7 @@ impl<'a> State<'a> { pub fn bclose_maybe_open (&mut self, span: codemap::Span, indented: usize, close_box: bool) -> IoResult<()> { try!(self.maybe_print_comment(span.hi)); - try!(self.break_offset_if_not_bol(1u, -(indented as isize))); + try!(self.break_offset_if_not_bol(1us, -(indented as isize))); try!(word(&mut self.s, "}")); if close_box { try!(self.end()); // close the outer-box @@ -595,7 +595,7 @@ impl<'a> State<'a> { pub fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], mut op: F) -> IoResult<()> where F: FnMut(&mut State, &T) -> IoResult<()>, { - try!(self.rbox(0u, b)); + try!(self.rbox(0us, b)); let mut first = true; for elt in elts.iter() { if first { first = false; } else { try!(self.word_space(",")); } @@ -613,13 +613,13 @@ impl<'a> State<'a> { F: FnMut(&mut State, &T) -> IoResult<()>, G: FnMut(&T) -> codemap::Span, { - try!(self.rbox(0u, b)); + try!(self.rbox(0us, b)); let len = elts.len(); - let mut i = 0u; + let mut i = 0us; for elt in elts.iter() { try!(self.maybe_print_comment(get_span(elt).hi)); try!(op(self, elt)); - i += 1u; + i += 1us; if i < len { try!(word(&mut self.s, ",")); try!(self.maybe_print_trailing_comment(get_span(elt), @@ -670,7 +670,7 @@ impl<'a> State<'a> { pub fn print_type(&mut self, ty: &ast::Ty) -> IoResult<()> { try!(self.maybe_print_comment(ty.span.lo)); - try!(self.ibox(0u)); + try!(self.ibox(0us)); match ty.node { ast::TyVec(ref ty) => { try!(word(&mut self.s, "[")); @@ -871,7 +871,7 @@ impl<'a> State<'a> { } ast::ItemTy(ref ty, ref params) => { try!(self.ibox(indent_unit)); - try!(self.ibox(0u)); + try!(self.ibox(0us)); try!(self.word_nbsp(&visibility_qualified(item.vis, "type")[])); try!(self.print_ident(item.ident)); try!(self.print_generics(params)); @@ -1262,7 +1262,7 @@ impl<'a> State<'a> { pub fn print_outer_attributes(&mut self, attrs: &[ast::Attribute]) -> IoResult<()> { - let mut count = 0u; + let mut count = 0us; for attr in attrs.iter() { match attr.node.style { ast::AttrOuter => { @@ -1280,7 +1280,7 @@ impl<'a> State<'a> { pub fn print_inner_attributes(&mut self, attrs: &[ast::Attribute]) -> IoResult<()> { - let mut count = 0u; + let mut count = 0us; for attr in attrs.iter() { match attr.node.style { ast::AttrInner => { @@ -1404,8 +1404,8 @@ impl<'a> State<'a> { match _else.node { // "another else-if" ast::ExprIf(ref i, ref then, ref e) => { - try!(self.cbox(indent_unit - 1u)); - try!(self.ibox(0u)); + try!(self.cbox(indent_unit - 1us)); + try!(self.ibox(0us)); try!(word(&mut self.s, " else if ")); try!(self.print_expr(&**i)); try!(space(&mut self.s)); @@ -1414,8 +1414,8 @@ impl<'a> State<'a> { } // "another else-if-let" ast::ExprIfLet(ref pat, ref expr, ref then, ref e) => { - try!(self.cbox(indent_unit - 1u)); - try!(self.ibox(0u)); + try!(self.cbox(indent_unit - 1us)); + try!(self.ibox(0us)); try!(word(&mut self.s, " else if let ")); try!(self.print_pat(&**pat)); try!(space(&mut self.s)); @@ -1427,8 +1427,8 @@ impl<'a> State<'a> { } // "final else" ast::ExprBlock(ref b) => { - try!(self.cbox(indent_unit - 1u)); - try!(self.ibox(0u)); + try!(self.cbox(indent_unit - 1us)); + try!(self.ibox(0us)); try!(word(&mut self.s, " else ")); self.print_block(&**b) } @@ -1594,7 +1594,7 @@ impl<'a> State<'a> { try!(self.print_expr(&*args[0])); try!(word(&mut self.s, ".")); try!(self.print_ident(ident.node)); - if tys.len() > 0u { + if tys.len() > 0us { try!(word(&mut self.s, "::<")); try!(self.commasep(Inconsistent, tys, |s, ty| s.print_type(&**ty))); @@ -1765,7 +1765,7 @@ impl<'a> State<'a> { // containing cbox, will be closed by print-block at } try!(self.cbox(indent_unit)); // head-box, will be closed by print-block after { - try!(self.ibox(0u)); + try!(self.ibox(0us)); try!(self.print_block(&**blk)); } ast::ExprAssign(ref lhs, ref rhs) => { @@ -2142,7 +2142,7 @@ impl<'a> State<'a> { }, |f| f.node.pat.span)); if etc { - if fields.len() != 0u { try!(self.word_space(",")); } + if fields.len() != 0us { try!(self.word_space(",")); } try!(word(&mut self.s, "..")); } try!(space(&mut self.s)); @@ -2209,7 +2209,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); } try!(self.cbox(indent_unit)); - try!(self.ibox(0u)); + try!(self.ibox(0us)); try!(self.print_outer_attributes(&arm.attrs[])); let mut first = true; for p in arm.pats.iter() { @@ -2295,7 +2295,7 @@ impl<'a> State<'a> { -> IoResult<()> { // It is unfortunate to duplicate the commasep logic, but we want the // self type and the args all in the same box. - try!(self.rbox(0u, Inconsistent)); + try!(self.rbox(0us, Inconsistent)); let mut first = true; for &explicit_self in opt_explicit_self.iter() { let m = match explicit_self { @@ -2472,7 +2472,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "<")); let mut ints = Vec::new(); - for i in range(0u, total) { + for i in range(0us, total) { ints.push(i); } @@ -2794,7 +2794,7 @@ impl<'a> State<'a> { if span.hi < (*cmnt).pos && (*cmnt).pos < next && span_line.line == comment_line.line { try!(self.print_comment(cmnt)); - self.cur_cmnt_and_lit.cur_cmnt += 1u; + self.cur_cmnt_and_lit.cur_cmnt += 1us; } } _ => () @@ -2812,7 +2812,7 @@ impl<'a> State<'a> { match self.next_comment() { Some(ref cmnt) => { try!(self.print_comment(cmnt)); - self.cur_cmnt_and_lit.cur_cmnt += 1u; + self.cur_cmnt_and_lit.cur_cmnt += 1us; } _ => break } @@ -2894,7 +2894,7 @@ impl<'a> State<'a> { while self.cur_cmnt_and_lit.cur_lit < lits.len() { let ltrl = (*lits)[self.cur_cmnt_and_lit.cur_lit].clone(); if ltrl.pos > pos { return None; } - self.cur_cmnt_and_lit.cur_lit += 1u; + self.cur_cmnt_and_lit.cur_lit += 1us; if ltrl.pos == pos { return Some(ltrl); } } None @@ -2909,7 +2909,7 @@ impl<'a> State<'a> { Some(ref cmnt) => { if (*cmnt).pos < pos { try!(self.print_comment(cmnt)); - self.cur_cmnt_and_lit.cur_cmnt += 1u; + self.cur_cmnt_and_lit.cur_cmnt += 1us; } else { break; } } _ => break @@ -2922,7 +2922,7 @@ impl<'a> State<'a> { cmnt: &comments::Comment) -> IoResult<()> { match cmnt.style { comments::Mixed => { - assert_eq!(cmnt.lines.len(), 1u); + assert_eq!(cmnt.lines.len(), 1us); try!(zerobreak(&mut self.s)); try!(word(&mut self.s, &cmnt.lines[0][])); zerobreak(&mut self.s) @@ -2941,11 +2941,11 @@ impl<'a> State<'a> { } comments::Trailing => { try!(word(&mut self.s, " ")); - if cmnt.lines.len() == 1u { + if cmnt.lines.len() == 1us { try!(word(&mut self.s, &cmnt.lines[0][])); hardbreak(&mut self.s) } else { - try!(self.ibox(0u)); + try!(self.ibox(0us)); for line in cmnt.lines.iter() { if !line.is_empty() { try!(word(&mut self.s, &line[])); diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 895268f96c8..ee8742825f4 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -345,8 +345,8 @@ fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool { let tparm_cnt = generics.ty_params.len(); // NB: inadequate check, but we're running // well before resolve, can't get too deep. - input_cnt == 1u - && no_output && tparm_cnt == 0u + input_cnt == 1us + && no_output && tparm_cnt == 0us } _ => false } diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index da358b70225..d8bac19805b 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -194,14 +194,14 @@ mod test { let v: SmallVector<isize> = SmallVector::zero(); assert_eq!(0, v.len()); - assert_eq!(1, SmallVector::one(1i).len()); - assert_eq!(5, SmallVector::many(vec!(1i, 2, 3, 4, 5)).len()); + assert_eq!(1, SmallVector::one(1is).len()); + assert_eq!(5, SmallVector::many(vec!(1is, 2, 3, 4, 5)).len()); } #[test] fn test_push_get() { let mut v = SmallVector::zero(); - v.push(1i); + v.push(1is); assert_eq!(1, v.len()); assert_eq!(&1, v.get(0)); v.push(2); @@ -227,11 +227,11 @@ mod test { let v: Vec<isize> = v.into_iter().collect(); assert_eq!(Vec::new(), v); - let v = SmallVector::one(1i); - assert_eq!(vec!(1i), v.into_iter().collect::<Vec<_>>()); + let v = SmallVector::one(1is); + assert_eq!(vec!(1is), v.into_iter().collect::<Vec<_>>()); - let v = SmallVector::many(vec!(1i, 2i, 3i)); - assert_eq!(vec!(1i, 2i, 3i), v.into_iter().collect::<Vec<_>>()); + let v = SmallVector::many(vec!(1is, 2is, 3is)); + assert_eq!(vec!(1is, 2is, 3is), v.into_iter().collect::<Vec<_>>()); } #[test] @@ -243,12 +243,12 @@ mod test { #[test] #[should_fail] fn test_expect_one_many() { - SmallVector::many(vec!(1i, 2)).expect_one(""); + SmallVector::many(vec!(1is, 2)).expect_one(""); } #[test] fn test_expect_one_one() { - assert_eq!(1i, SmallVector::one(1i).expect_one("")); - assert_eq!(1i, SmallVector::many(vec!(1i)).expect_one("")); + assert_eq!(1is, SmallVector::one(1is).expect_one("")); + assert_eq!(1is, SmallVector::many(vec!(1is)).expect_one("")); } } |
