diff options
| author | Richo Healey <richo@psych0tik.net> | 2014-05-22 16:57:53 -0700 |
|---|---|---|
| committer | Richo Healey <richo@psych0tik.net> | 2014-05-24 21:48:10 -0700 |
| commit | 553074506ecd139eb961fb91eb33ad9fd0183acb (patch) | |
| tree | 01682cf8147183250713acf5e8a77265aab7153c /src/libsyntax | |
| parent | bbb70cdd9cd982922cf7390459d53bde409699ae (diff) | |
| download | rust-553074506ecd139eb961fb91eb33ad9fd0183acb.tar.gz rust-553074506ecd139eb961fb91eb33ad9fd0183acb.zip | |
core: rename strbuf::StrBuf to string::String
[breaking-change]
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/ast_map.rs | 14 | ||||
| -rw-r--r-- | src/libsyntax/ast_util.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/codemap.rs | 22 | ||||
| -rw-r--r-- | src/libsyntax/crateid.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/diagnostic.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/ext/base.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/concat.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/concat_idents.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/show.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/format.rs | 22 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 70 | ||||
| -rw-r--r-- | src/libsyntax/ext/source_util.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/comments.rs | 34 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 40 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 20 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/print/pp.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/print/pprust.rs | 52 | ||||
| -rw-r--r-- | src/libsyntax/util/interner.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/util/parser_testing.rs | 16 |
24 files changed, 195 insertions, 195 deletions
diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index d0b820044da..a6cc12fef7c 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -22,7 +22,7 @@ use std::cell::RefCell; use std::fmt; use std::iter; use std::slice; -use std::strbuf::StrBuf; +use std::string::String; #[deriving(Clone, Eq)] pub enum PathElem { @@ -79,10 +79,10 @@ impl<'a, T: Copy> Iterator<T> for Values<'a, T> { /// The type of the iterator used by with_path. pub type PathElems<'a, 'b> = iter::Chain<Values<'a, PathElem>, LinkedPath<'b>>; -pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> StrBuf { +pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> String { let itr = token::get_ident_interner(); - path.fold(StrBuf::new(), |mut s, e| { + path.fold(String::new(), |mut s, e| { let e = itr.get(e.name()); if !s.is_empty() { s.push_str("::"); @@ -326,11 +326,11 @@ impl Map { self.with_path_next(id, None, f) } - pub fn path_to_str(&self, id: NodeId) -> StrBuf { + pub fn path_to_str(&self, id: NodeId) -> String { self.with_path(id, |path| path_to_str(path)) } - fn path_to_str_with_ident(&self, id: NodeId, i: Ident) -> StrBuf { + fn path_to_str_with_ident(&self, id: NodeId, i: Ident) -> String { self.with_path(id, |path| { path_to_str(path.chain(Some(PathName(i.name)).move_iter())) }) @@ -416,7 +416,7 @@ impl Map { .unwrap_or_else(|| fail!("AstMap.span: could not find span for id {}", id)) } - pub fn node_to_str(&self, id: NodeId) -> StrBuf { + pub fn node_to_str(&self, id: NodeId) -> String { node_id_to_str(self, id) } } @@ -663,7 +663,7 @@ pub fn map_decoded_item<F: FoldOps>(map: &Map, ii } -fn node_id_to_str(map: &Map, id: NodeId) -> StrBuf { +fn node_id_to_str(map: &Map, id: NodeId) -> String { match map.find(id) { Some(NodeItem(item)) => { let path_str = map.path_to_str_with_ident(id, item.ident); diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 370bc703b10..876e537fc8c 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -21,14 +21,14 @@ use visit; use std::cell::Cell; use std::cmp; -use std::strbuf::StrBuf; +use std::string::String; use std::u32; -pub fn path_name_i(idents: &[Ident]) -> StrBuf { +pub fn path_name_i(idents: &[Ident]) -> String { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") idents.iter().map(|i| { token::get_ident(*i).get().to_strbuf() - }).collect::<Vec<StrBuf>>().connect("::").to_strbuf() + }).collect::<Vec<String>>().connect("::").to_strbuf() } // totally scary function: ignores all but the last element, should have @@ -139,7 +139,7 @@ pub enum SuffixMode { // Get a string representation of a signed int type, with its value. // We want to avoid "45int" and "-3int" in favor of "45" and "-3" -pub fn int_ty_to_str(t: IntTy, val: Option<i64>, mode: SuffixMode) -> StrBuf { +pub fn int_ty_to_str(t: IntTy, val: Option<i64>, mode: SuffixMode) -> String { let s = match t { TyI if val.is_some() => match mode { AutoSuffix => "", @@ -172,7 +172,7 @@ pub fn int_ty_max(t: IntTy) -> u64 { // Get a string representation of an unsigned int type, with its value. // We want to avoid "42uint" in favor of "42u" -pub fn uint_ty_to_str(t: UintTy, val: Option<u64>, mode: SuffixMode) -> StrBuf { +pub fn uint_ty_to_str(t: UintTy, val: Option<u64>, mode: SuffixMode) -> String { let s = match t { TyU if val.is_some() => match mode { AutoSuffix => "", @@ -200,7 +200,7 @@ pub fn uint_ty_max(t: UintTy) -> u64 { } } -pub fn float_ty_to_str(t: FloatTy) -> StrBuf { +pub fn float_ty_to_str(t: FloatTy) -> String { match t { TyF32 => "f32".to_strbuf(), TyF64 => "f64".to_strbuf(), diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 07cf0a61a73..563bf15486d 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -24,7 +24,7 @@ source code snippets, etc. use serialize::{Encodable, Decodable, Encoder, Decoder}; use std::cell::RefCell; use std::rc::Rc; -use std::strbuf::StrBuf; +use std::string::String; pub trait Pos { fn from_uint(n: uint) -> Self; @@ -189,7 +189,7 @@ pub enum MacroFormat { pub struct NameAndSpan { /// The name of the macro that was invoked to create the thing /// with this Span. - pub name: StrBuf, + pub name: String, /// The format with which the macro was invoked. pub format: MacroFormat, /// The span of the macro definition itself. The macro may not @@ -220,7 +220,7 @@ pub struct ExpnInfo { pub callee: NameAndSpan } -pub type FileName = StrBuf; +pub type FileName = String; pub struct FileLines { pub file: Rc<FileMap>, @@ -242,7 +242,7 @@ pub struct FileMap { /// e.g. `<anon>` pub name: FileName, /// The complete source code - pub src: StrBuf, + pub src: String, /// The start position of this source in the CodeMap pub start_pos: BytePos, /// Locations of lines beginnings in the source code @@ -270,7 +270,7 @@ impl FileMap { } // get a line from the list of pre-computed line-beginnings - pub fn get_line(&self, line: int) -> StrBuf { + pub fn get_line(&self, line: int) -> String { let mut lines = self.lines.borrow_mut(); let begin: BytePos = *lines.get(line as uint) - self.start_pos; let begin = begin.to_uint(); @@ -307,7 +307,7 @@ impl CodeMap { } } - pub fn new_filemap(&self, filename: FileName, src: StrBuf) -> Rc<FileMap> { + pub fn new_filemap(&self, filename: FileName, src: String) -> Rc<FileMap> { let mut files = self.files.borrow_mut(); let start_pos = match files.last() { None => 0, @@ -318,9 +318,9 @@ impl CodeMap { // FIXME #12884: no efficient/safe way to remove from the start of a string // and reuse the allocation. let mut src = if src.as_slice().starts_with("\ufeff") { - StrBuf::from_str(src.as_slice().slice_from(3)) + String::from_str(src.as_slice().slice_from(3)) } else { - StrBuf::from_str(src.as_slice()) + String::from_str(src.as_slice()) }; // Append '\n' in case it's not already there. @@ -344,7 +344,7 @@ impl CodeMap { filemap } - pub fn mk_substr_filename(&self, sp: Span) -> StrBuf { + pub fn mk_substr_filename(&self, sp: Span) -> String { let pos = self.lookup_char_pos(sp.lo); (format!("<{}:{}:{}>", pos.file.name, @@ -367,7 +367,7 @@ impl CodeMap { } } - pub fn span_to_str(&self, sp: Span) -> StrBuf { + pub fn span_to_str(&self, sp: Span) -> String { if self.files.borrow().len() == 0 && sp == DUMMY_SP { return "no-location".to_strbuf(); } @@ -396,7 +396,7 @@ impl CodeMap { FileLines {file: lo.file, lines: lines} } - pub fn span_to_snippet(&self, sp: Span) -> Option<StrBuf> { + pub fn span_to_snippet(&self, sp: Span) -> Option<String> { let begin = self.lookup_byte_offset(sp.lo); let end = self.lookup_byte_offset(sp.hi); diff --git a/src/libsyntax/crateid.rs b/src/libsyntax/crateid.rs index dc699bb9850..06b3ed91a5a 100644 --- a/src/libsyntax/crateid.rs +++ b/src/libsyntax/crateid.rs @@ -24,11 +24,11 @@ use std::from_str::FromStr; pub struct CrateId { /// A path which represents the codes origin. By convention this is the /// URL, without `http://` or `https://` prefix, to the crate's repository - pub path: StrBuf, + pub path: String, /// The name of the crate. - pub name: StrBuf, + pub name: String, /// The version of the crate. - pub version: Option<StrBuf>, + pub version: Option<String>, } impl fmt::Show for CrateId { @@ -111,7 +111,7 @@ impl CrateId { } } - pub fn short_name_with_version(&self) -> StrBuf { + pub fn short_name_with_version(&self) -> String { (format!("{}-{}", self.name, self.version_or_default())).to_strbuf() } diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 446a8f93753..c6a25bc6129 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -17,7 +17,7 @@ use std::cell::{RefCell, Cell}; use std::fmt; use std::io; use std::iter::range; -use std::strbuf::StrBuf; +use std::string::String; use term; // maximum number of lines we will print for each error; arbitrary. @@ -405,7 +405,7 @@ fn highlight_lines(err: &mut EmitterWriter, // indent past |name:## | and the 0-offset column location let left = fm.name.len() + digits + lo.col.to_uint() + 3u; - let mut s = StrBuf::new(); + let mut s = String::new(); // Skip is the number of characters we need to skip because they are // part of the 'filename:line ' part of the previous line. let skip = fm.name.len() + digits + 3u; @@ -425,7 +425,7 @@ fn highlight_lines(err: &mut EmitterWriter, }; } try!(write!(&mut err.dst, "{}", s)); - let mut s = StrBuf::from_str("^"); + let mut s = String::from_str("^"); let hi = cm.lookup_char_pos(sp.hi); if hi.col != lo.col { // the ^ already takes up one space @@ -473,7 +473,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, let hi = cm.lookup_char_pos(sp.hi); // Span seems to use half-opened interval, so subtract 1 let skip = last_line_start.len() + hi.col.to_uint() - 1; - let mut s = StrBuf::new(); + let mut s = String::new(); for _ in range(0, skip) { s.push_char(' '); } @@ -508,7 +508,7 @@ fn print_macro_backtrace(w: &mut EmitterWriter, Ok(()) } -pub fn expect<T:Clone>(diag: &SpanHandler, opt: Option<T>, msg: || -> StrBuf) +pub fn expect<T:Clone>(diag: &SpanHandler, opt: Option<T>, msg: || -> String) -> T { match opt { Some(ref t) => (*t).clone(), diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 854f1d02219..0808533cb43 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -30,7 +30,7 @@ use collections::HashMap; // ast::MacInvocTT. pub struct MacroDef { - pub name: StrBuf, + pub name: String, pub ext: SyntaxExtension } @@ -364,8 +364,8 @@ pub fn syntax_expander_table() -> SyntaxEnv { pub struct MacroCrate { pub lib: Option<Path>, - pub macros: Vec<StrBuf>, - pub registrar_symbol: Option<StrBuf>, + pub macros: Vec<String>, + pub registrar_symbol: Option<String>, } pub trait CrateLoader { @@ -543,7 +543,7 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree], name: &str) - -> Option<StrBuf> { + -> Option<String> { if tts.len() != 1 { cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice()); } else { diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index b2baff8d286..83f45ca9f16 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -14,7 +14,7 @@ use ext::base; use ext::build::AstBuilder; use parse::token; -use std::strbuf::StrBuf; +use std::string::String; pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, sp: codemap::Span, @@ -24,7 +24,7 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, Some(e) => e, None => return base::DummyResult::expr(sp) }; - let mut accumulator = StrBuf::new(); + let mut accumulator = String::new(); for e in es.move_iter() { match e.node { ast::ExprLit(lit) => { diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 24478358d79..dad7f3e6979 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -16,11 +16,11 @@ use owned_slice::OwnedSlice; use parse::token; use parse::token::{str_to_ident}; -use std::strbuf::StrBuf; +use std::string::String; pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box<base::MacResult> { - let mut res_str = StrBuf::new(); + let mut res_str = String::new(); for (i, e) in tts.iter().enumerate() { if i & 1 == 1 { match *e { diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index 343100d3a8e..fb6a85e4e7c 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -18,7 +18,7 @@ use ext::deriving::generic::*; use parse::token; use collections::HashMap; -use std::strbuf::StrBuf; +use std::string::String; pub fn expand_deriving_show(cx: &mut ExtCtxt, span: Span, @@ -70,7 +70,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, } }; - let mut format_string = StrBuf::from_str(token::get_ident(name).get()); + let mut format_string = String::from_str(token::get_ident(name).get()); // the internal fields we're actually formatting let mut exprs = Vec::new(); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 658e4bafbe2..6c6bf520104 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -1118,7 +1118,7 @@ mod test { } } - fn expand_crate_str(crate_str: StrBuf) -> ast::Crate { + fn expand_crate_str(crate_str: String) -> ast::Crate { let ps = parse::new_parse_sess(); let crate_ast = string_to_parser(&ps, crate_str).parse_crate_mod(); // the cfg argument actually does matter, here... @@ -1136,7 +1136,7 @@ mod test { // println!("expanded: {:?}\n",expanded_ast); //mtwt_resolve_crate(expanded_ast) //} - //fn expand_and_resolve_and_pretty_print (crate_str: @str) -> StrBuf { + //fn expand_and_resolve_and_pretty_print (crate_str: @str) -> String { //let resolved_ast = expand_and_resolve(crate_str); //pprust::to_str(&resolved_ast,fake_print_crate,get_ident_interner()) //} diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index ad4b798cfe5..9a0b91be146 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -23,14 +23,14 @@ use collections::{HashMap, HashSet}; #[deriving(Eq)] enum ArgumentType { - Known(StrBuf), + Known(String), Unsigned, String, } enum Position { Exact(uint), - Named(StrBuf), + Named(String), } struct Context<'a, 'b> { @@ -45,13 +45,13 @@ struct Context<'a, 'b> { // Note that we keep a side-array of the ordering of the named arguments // found to be sure that we can translate them in the same order that they // were declared in. - names: HashMap<StrBuf, @ast::Expr>, - name_types: HashMap<StrBuf, ArgumentType>, - name_ordering: Vec<StrBuf>, + names: HashMap<String, @ast::Expr>, + name_types: HashMap<String, ArgumentType>, + name_ordering: Vec<String>, // Collection of the compiled `rt::Piece` structures pieces: Vec<@ast::Expr> , - name_positions: HashMap<StrBuf, uint>, + name_positions: HashMap<String, uint>, method_statics: Vec<@ast::Item> , // Updated as arguments are consumed or methods are entered @@ -74,10 +74,10 @@ pub enum Invocation { /// named arguments)) fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool, tts: &[ast::TokenTree]) - -> (Invocation, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<StrBuf>, - HashMap<StrBuf, @ast::Expr>)>) { + -> (Invocation, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<String>, + HashMap<String, @ast::Expr>)>) { let mut args = Vec::new(); - let mut names = HashMap::<StrBuf, @ast::Expr>::new(); + let mut names = HashMap::<String, @ast::Expr>::new(); let mut order = Vec::new(); let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(), @@ -855,8 +855,8 @@ pub fn expand_format_args_method(ecx: &mut ExtCtxt, sp: Span, pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, invocation: Invocation, efmt: @ast::Expr, args: Vec<@ast::Expr>, - name_ordering: Vec<StrBuf>, - names: HashMap<StrBuf, @ast::Expr>) -> @ast::Expr { + name_ordering: Vec<String>, + names: HashMap<String, @ast::Expr>) -> @ast::Expr { let arg_types = Vec::from_fn(args.len(), |_| None); let mut cx = Context { ecx: ecx, diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 4f6e95b0b69..5f330631819 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -55,7 +55,7 @@ pub mod rt { trait ToSource : ToTokens { // Takes a thing and generates a string containing rust code for it. - pub fn to_source() -> StrBuf; + pub fn to_source() -> String; // If you can make source, you can definitely make tokens. pub fn to_tokens(cx: &ExtCtxt) -> ~[TokenTree] { @@ -67,67 +67,67 @@ pub mod rt { pub trait ToSource { // Takes a thing and generates a string containing rust code for it. - fn to_source(&self) -> StrBuf; + fn to_source(&self) -> String; } impl ToSource for ast::Ident { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { get_ident(*self).get().to_strbuf() } } impl ToSource for @ast::Item { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { pprust::item_to_str(*self) } } impl<'a> ToSource for &'a [@ast::Item] { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { self.iter() .map(|i| i.to_source()) - .collect::<Vec<StrBuf>>() + .collect::<Vec<String>>() .connect("\n\n") .to_strbuf() } } impl ToSource for ast::Ty { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { pprust::ty_to_str(self) } } impl<'a> ToSource for &'a [ast::Ty] { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { self.iter() .map(|i| i.to_source()) - .collect::<Vec<StrBuf>>() + .collect::<Vec<String>>() .connect(", ") .to_strbuf() } } impl ToSource for Generics { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { pprust::generics_to_str(self) } } impl ToSource for @ast::Expr { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { pprust::expr_to_str(*self) } } impl ToSource for ast::Block { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { pprust::block_to_str(self) } } impl<'a> ToSource for &'a str { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitStr( token::intern_and_get_ident(*self), ast::CookedStr)); pprust::lit_to_str(&lit) @@ -135,41 +135,41 @@ pub mod rt { } impl ToSource for () { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { "()".to_strbuf() } } impl ToSource for bool { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitBool(*self)); pprust::lit_to_str(&lit) } } impl ToSource for char { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitChar(*self)); pprust::lit_to_str(&lit) } } impl ToSource for int { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI)); pprust::lit_to_str(&lit) } } impl ToSource for i8 { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI8)); pprust::lit_to_str(&lit) } } impl ToSource for i16 { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI16)); pprust::lit_to_str(&lit) } @@ -177,49 +177,49 @@ pub mod rt { impl ToSource for i32 { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI32)); pprust::lit_to_str(&lit) } } impl ToSource for i64 { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI64)); pprust::lit_to_str(&lit) } } impl ToSource for uint { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU)); pprust::lit_to_str(&lit) } } impl ToSource for u8 { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU8)); pprust::lit_to_str(&lit) } } impl ToSource for u16 { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU16)); pprust::lit_to_str(&lit) } } impl ToSource for u32 { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU32)); pprust::lit_to_str(&lit) } } impl ToSource for u64 { - fn to_source(&self) -> StrBuf { + fn to_source(&self) -> String { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU64)); pprust::lit_to_str(&lit) } @@ -271,15 +271,15 @@ pub mod rt { impl_to_tokens!(u64) pub trait ExtParseUtils { - fn parse_item(&self, s: StrBuf) -> @ast::Item; - fn parse_expr(&self, s: StrBuf) -> @ast::Expr; - fn parse_stmt(&self, s: StrBuf) -> @ast::Stmt; - fn parse_tts(&self, s: StrBuf) -> Vec<ast::TokenTree> ; + fn parse_item(&self, s: String) -> @ast::Item; + fn parse_expr(&self, s: String) -> @ast::Expr; + fn parse_stmt(&self, s: String) -> @ast::Stmt; + fn parse_tts(&self, s: String) -> Vec<ast::TokenTree> ; } impl<'a> ExtParseUtils for ExtCtxt<'a> { - fn parse_item(&self, s: StrBuf) -> @ast::Item { + fn parse_item(&self, s: String) -> @ast::Item { let res = parse::parse_item_from_source_str( "<quote expansion>".to_strbuf(), s, @@ -294,7 +294,7 @@ pub mod rt { } } - fn parse_stmt(&self, s: StrBuf) -> @ast::Stmt { + fn parse_stmt(&self, s: String) -> @ast::Stmt { parse::parse_stmt_from_source_str("<quote expansion>".to_strbuf(), s, self.cfg(), @@ -302,14 +302,14 @@ pub mod rt { self.parse_sess()) } - fn parse_expr(&self, s: StrBuf) -> @ast::Expr { + fn parse_expr(&self, s: String) -> @ast::Expr { parse::parse_expr_from_source_str("<quote expansion>".to_strbuf(), s, self.cfg(), self.parse_sess()) } - fn parse_tts(&self, s: StrBuf) -> Vec<ast::TokenTree> { + fn parse_tts(&self, s: String) -> Vec<ast::TokenTree> { parse::parse_tts_from_source_str("<quote expansion>".to_strbuf(), s, self.cfg(), @@ -375,7 +375,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt, base::MacExpr::new(expanded) } -fn ids_ext(strs: Vec<StrBuf> ) -> Vec<ast::Ident> { +fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> { strs.iter().map(|str| str_to_ident((*str).as_slice())).collect() } diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 452719d2dd8..04ce607e9f5 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -74,7 +74,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let string = cx.mod_path() .iter() .map(|x| token::get_ident(*x).get().to_strbuf()) - .collect::<Vec<StrBuf>>() + .collect::<Vec<String>>() .connect("::"); base::MacExpr::new(cx.expr_str( sp, diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index ce1c7da585f..8780620ced5 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -203,8 +203,8 @@ pub fn nameize(p_s: &ParseSess, ms: &[Matcher], res: &[Rc<NamedMatch>]) pub enum ParseResult { Success(HashMap<Ident, Rc<NamedMatch>>), - Failure(codemap::Span, StrBuf), - Error(codemap::Span, StrBuf) + Failure(codemap::Span, String), + Error(codemap::Span, String) } pub fn parse_or_else(sess: &ParseSess, @@ -387,7 +387,7 @@ pub fn parse(sess: &ParseSess, token::get_ident(bind))).to_strbuf() } _ => fail!() - } }).collect::<Vec<StrBuf>>().connect(" or "); + } }).collect::<Vec<String>>().connect(" or "); return Error(sp, format!( "local ambiguity: multiple parsing options: \ built-in NTs {} or {} other options.", diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 6d799eeae6c..055821c40fe 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -101,7 +101,7 @@ fn lookup_cur_matched(r: &TtReader, name: Ident) -> Rc<NamedMatch> { enum LockstepIterSize { LisUnconstrained, LisConstraint(uint, Ident), - LisContradiction(StrBuf), + LisContradiction(String), } fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize { diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 63b1bf44061..907e89622d0 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -19,7 +19,7 @@ use parse::token; use std::io; use std::str; -use std::strbuf::StrBuf; +use std::string::String; use std::uint; #[deriving(Clone, Eq)] @@ -33,7 +33,7 @@ pub enum CommentStyle { #[deriving(Clone)] pub struct Comment { pub style: CommentStyle, - pub lines: Vec<StrBuf>, + pub lines: Vec<String>, pub pos: BytePos, } @@ -53,9 +53,9 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle { } } -pub fn strip_doc_comment_decoration(comment: &str) -> StrBuf { +pub fn strip_doc_comment_decoration(comment: &str) -> String { /// remove whitespace-only lines from the start/end of lines - fn vertical_trim(lines: Vec<StrBuf> ) -> Vec<StrBuf> { + fn vertical_trim(lines: Vec<String> ) -> Vec<String> { let mut i = 0u; let mut j = lines.len(); // first line of all-stars should be omitted @@ -81,7 +81,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> StrBuf { } /// remove a "[ \t]*\*" block from each line, if possible - fn horizontal_trim(lines: Vec<StrBuf> ) -> Vec<StrBuf> { + fn horizontal_trim(lines: Vec<String> ) -> Vec<String> { let mut i = uint::MAX; let mut can_trim = true; let mut first = true; @@ -130,7 +130,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> StrBuf { let lines = comment.slice(3u, comment.len() - 2u) .lines_any() .map(|s| s.to_strbuf()) - .collect::<Vec<StrBuf> >(); + .collect::<Vec<String> >(); let lines = vertical_trim(lines); let lines = horizontal_trim(lines); @@ -141,8 +141,8 @@ pub fn strip_doc_comment_decoration(comment: &str) -> StrBuf { fail!("not a doc-comment: {}", comment); } -fn read_to_eol(rdr: &mut StringReader) -> StrBuf { - let mut val = StrBuf::new(); +fn read_to_eol(rdr: &mut StringReader) -> String { + let mut val = String::new(); while !rdr.curr_is('\n') && !is_eof(rdr) { val.push_char(rdr.curr.unwrap()); bump(rdr); @@ -151,7 +151,7 @@ fn read_to_eol(rdr: &mut StringReader) -> StrBuf { return val } -fn read_one_line_comment(rdr: &mut StringReader) -> StrBuf { +fn read_one_line_comment(rdr: &mut StringReader) -> String { let val = read_to_eol(rdr); assert!((val.as_slice()[0] == '/' as u8 && val.as_slice()[1] == '/' as u8) || @@ -202,7 +202,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, comments: &mut Vec<Comment>) { debug!(">>> line comments"); let p = rdr.last_pos; - let mut lines: Vec<StrBuf> = Vec::new(); + let mut lines: Vec<String> = Vec::new(); while rdr.curr_is('/') && nextch_is(rdr, '/') { let line = read_one_line_comment(rdr); debug!("{}", line); @@ -241,8 +241,8 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<uint> { return Some(cursor); } -fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<StrBuf> , - s: StrBuf, col: CharPos) { +fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String> , + s: String, col: CharPos) { let len = s.len(); let s1 = match all_whitespace(s.as_slice(), col) { Some(col) => { @@ -263,12 +263,12 @@ fn read_block_comment(rdr: &mut StringReader, comments: &mut Vec<Comment> ) { debug!(">>> block comment"); let p = rdr.last_pos; - let mut lines: Vec<StrBuf> = Vec::new(); + let mut lines: Vec<String> = Vec::new(); let col = rdr.col; bump(rdr); bump(rdr); - let mut curr_line = StrBuf::from_str("/*"); + let mut curr_line = String::from_str("/*"); // doc-comments are not really comments, they are attributes if (rdr.curr_is('*') && !nextch_is(rdr, '*')) || rdr.curr_is('!') { @@ -297,7 +297,7 @@ fn read_block_comment(rdr: &mut StringReader, trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col); - curr_line = StrBuf::new(); + curr_line = String::new(); bump(rdr); } else { curr_line.push_char(rdr.curr.unwrap()); @@ -356,7 +356,7 @@ fn consume_comment(rdr: &mut StringReader, #[deriving(Clone)] pub struct Literal { - pub lit: StrBuf, + pub lit: String, pub pos: BytePos, } @@ -364,7 +364,7 @@ pub struct Literal { // probably not a good thing. pub fn gather_comments_and_literals(span_diagnostic: &diagnostic::SpanHandler, - path: StrBuf, + path: String, srdr: &mut io::Reader) -> (Vec<Comment>, Vec<Literal>) { let src = srdr.read_to_end().unwrap(); diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 34116c3a4be..e045116c9e2 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -424,10 +424,10 @@ fn consume_block_comment(rdr: &mut StringReader) -> Option<TokenAndSpan> { if res.is_some() { res } else { consume_whitespace_and_comments(rdr) } } -fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<StrBuf> { +fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<String> { // \x00 hits the `return None` case immediately, so this is fine. let mut c = rdr.curr.unwrap_or('\x00'); - let mut rslt = StrBuf::new(); + let mut rslt = String::new(); if c == 'e' || c == 'E' { rslt.push_char(c); bump(rdr); @@ -449,8 +449,8 @@ fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<StrBuf> } } -fn scan_digits(rdr: &mut StringReader, radix: uint) -> StrBuf { - let mut rslt = StrBuf::new(); +fn scan_digits(rdr: &mut StringReader, radix: uint) -> String { + let mut rslt = String::new(); loop { let c = rdr.curr; if c == Some('_') { bump(rdr); continue; } @@ -858,7 +858,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token { return token::LIT_CHAR(c2); } '"' => { - let mut accum_str = StrBuf::new(); + let mut accum_str = String::new(); let start_bpos = rdr.last_pos; bump(rdr); while !rdr.curr_is('"') { @@ -1002,7 +1002,7 @@ mod test { // open a string reader for the given string fn setup<'a>(span_handler: &'a diagnostic::SpanHandler, - teststr: StrBuf) -> StringReader<'a> { + teststr: String) -> StringReader<'a> { let fm = span_handler.cm.new_filemap("zebra.rs".to_strbuf(), teststr); new_string_reader(span_handler, fm) } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 31a67ff92f5..c4947b528f1 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -77,8 +77,8 @@ pub fn parse_crate_attrs_from_file( inner } -pub fn parse_crate_from_source_str(name: StrBuf, - source: StrBuf, +pub fn parse_crate_from_source_str(name: String, + source: String, cfg: ast::CrateConfig, sess: &ParseSess) -> ast::Crate { @@ -89,8 +89,8 @@ pub fn parse_crate_from_source_str(name: StrBuf, maybe_aborted(p.parse_crate_mod(),p) } -pub fn parse_crate_attrs_from_source_str(name: StrBuf, - source: StrBuf, +pub fn parse_crate_attrs_from_source_str(name: String, + source: String, cfg: ast::CrateConfig, sess: &ParseSess) -> Vec<ast::Attribute> { @@ -102,8 +102,8 @@ pub fn parse_crate_attrs_from_source_str(name: StrBuf, inner } -pub fn parse_expr_from_source_str(name: StrBuf, - source: StrBuf, +pub fn parse_expr_from_source_str(name: String, + source: String, cfg: ast::CrateConfig, sess: &ParseSess) -> @ast::Expr { @@ -111,8 +111,8 @@ pub fn parse_expr_from_source_str(name: StrBuf, maybe_aborted(p.parse_expr(), p) } -pub fn parse_item_from_source_str(name: StrBuf, - source: StrBuf, +pub fn parse_item_from_source_str(name: String, + source: String, cfg: ast::CrateConfig, sess: &ParseSess) -> Option<@ast::Item> { @@ -121,8 +121,8 @@ pub fn parse_item_from_source_str(name: StrBuf, maybe_aborted(p.parse_item(attrs),p) } -pub fn parse_meta_from_source_str(name: StrBuf, - source: StrBuf, +pub fn parse_meta_from_source_str(name: String, + source: String, cfg: ast::CrateConfig, sess: &ParseSess) -> @ast::MetaItem { @@ -130,8 +130,8 @@ pub fn parse_meta_from_source_str(name: StrBuf, maybe_aborted(p.parse_meta_item(),p) } -pub fn parse_stmt_from_source_str(name: StrBuf, - source: StrBuf, +pub fn parse_stmt_from_source_str(name: String, + source: String, cfg: ast::CrateConfig, attrs: Vec<ast::Attribute> , sess: &ParseSess) @@ -145,8 +145,8 @@ pub fn parse_stmt_from_source_str(name: StrBuf, maybe_aborted(p.parse_stmt(attrs),p) } -pub fn parse_tts_from_source_str(name: StrBuf, - source: StrBuf, +pub fn parse_tts_from_source_str(name: String, + source: String, cfg: ast::CrateConfig, sess: &ParseSess) -> Vec<ast::TokenTree> { @@ -164,8 +164,8 @@ pub fn parse_tts_from_source_str(name: StrBuf, // Create a new parser from a source string pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, cfg: ast::CrateConfig, - name: StrBuf, - source: StrBuf) + name: String, + source: String) -> Parser<'a> { filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg) } @@ -185,7 +185,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, cfg: ast::CrateConfig, path: &Path, owns_directory: bool, - module_name: Option<StrBuf>, + module_name: Option<String>, sp: Span) -> Parser<'a> { let mut p = filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp)), cfg); p.owns_directory = owns_directory; @@ -244,7 +244,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) // given a session and a string, add the string to // the session's codemap and return the new filemap -pub fn string_to_filemap(sess: &ParseSess, source: StrBuf, path: StrBuf) +pub fn string_to_filemap(sess: &ParseSess, source: String, path: String) -> Rc<FileMap> { sess.span_diagnostic.cm.new_filemap(path, source) } @@ -293,7 +293,7 @@ mod test { use util::parser_testing::{string_to_expr, string_to_item}; use util::parser_testing::string_to_stmt; - fn to_json_str<'a, E: Encodable<json::Encoder<'a>, io::IoError>>(val: &E) -> StrBuf { + fn to_json_str<'a, E: Encodable<json::Encoder<'a>, io::IoError>>(val: &E) -> String { let mut writer = MemWriter::new(); let mut encoder = json::Encoder::new(&mut writer as &mut io::Writer); let _ = val.encode(&mut encoder); @@ -709,7 +709,7 @@ mod test { #[test] fn attrs_fix_bug () { string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) - -> Result<@Writer, StrBuf> { + -> Result<@Writer, String> { #[cfg(windows)] fn wb() -> c_int { (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index ae104707284..bfdf0361f05 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -79,7 +79,7 @@ use owned_slice::OwnedSlice; use collections::HashSet; use std::mem::replace; use std::rc::Rc; -use std::strbuf::StrBuf; +use std::string::String; #[allow(non_camel_case_types)] #[deriving(Eq)] @@ -350,7 +350,7 @@ pub struct Parser<'a> { /// Name of the root module this parser originated from. If `None`, then the /// name is not known. This does not change while the parser is descending /// into modules, and sub-parsers have new values for this name. - pub root_module_name: Option<StrBuf>, + pub root_module_name: Option<String>, } fn is_plain_ident_or_underscore(t: &token::Token) -> bool { @@ -359,12 +359,12 @@ fn is_plain_ident_or_underscore(t: &token::Token) -> bool { impl<'a> Parser<'a> { // convert a token to a string using self's reader - pub fn token_to_str(token: &token::Token) -> StrBuf { + pub fn token_to_str(token: &token::Token) -> String { token::to_str(token) } // convert the current token to a string using self's reader - pub fn this_token_to_str(&mut self) -> StrBuf { + pub fn this_token_to_str(&mut self) -> String { Parser::token_to_str(&self.token) } @@ -399,7 +399,7 @@ impl<'a> Parser<'a> { pub fn expect_one_of(&mut self, edible: &[token::Token], inedible: &[token::Token]) { - fn tokens_to_str(tokens: &[token::Token]) -> StrBuf { + fn tokens_to_str(tokens: &[token::Token]) -> String { let mut i = tokens.iter(); // This might be a sign we need a connect method on Iterator. let b = i.next() @@ -3883,7 +3883,7 @@ impl<'a> Parser<'a> { (ident, ItemImpl(generics, opt_trait, ty, meths), Some(inner_attrs)) } - // parse a::B<StrBuf,int> + // parse a::B<String,int> fn parse_trait_ref(&mut self) -> TraitRef { ast::TraitRef { path: self.parse_path(LifetimeAndTypesWithoutColons).path, @@ -3891,7 +3891,7 @@ impl<'a> Parser<'a> { } } - // parse B + C<StrBuf,int> + D + // parse B + C<String,int> + D fn parse_trait_ref_list(&mut self, ket: &token::Token) -> Vec<TraitRef> { self.parse_seq_to_before_end( ket, @@ -4214,12 +4214,12 @@ impl<'a> Parser<'a> { fn eval_src_mod_from_path(&mut self, path: Path, owns_directory: bool, - name: StrBuf, + name: String, id_sp: Span) -> (ast::Item_, Vec<ast::Attribute> ) { let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut(); match included_mod_stack.iter().position(|p| *p == path) { Some(i) => { - let mut err = StrBuf::from_str("circular modules: "); + let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); for p in included_mod_stack.slice(i, len).iter() { err.push_str(p.display().as_maybe_owned().as_slice()); @@ -4808,7 +4808,7 @@ impl<'a> Parser<'a> { // FAILURE TO PARSE ITEM if visibility != Inherited { - let mut s = StrBuf::from_str("unmatched visibility `"); + let mut s = String::from_str("unmatched visibility `"); if visibility == Public { s.push_str("pub") } else { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 17ce03ba213..e3788801293 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -21,7 +21,7 @@ use std::fmt; use std::path::BytesContainer; use std::mem; use std::rc::Rc; -use std::strbuf::StrBuf; +use std::string::String; #[allow(non_camel_case_types)] #[deriving(Clone, Encodable, Decodable, Eq, TotalEq, Hash, Show)] @@ -136,7 +136,7 @@ impl fmt::Show for Nonterminal { } } -pub fn binop_to_str(o: BinOp) -> StrBuf { +pub fn binop_to_str(o: BinOp) -> String { match o { PLUS => "+".to_strbuf(), MINUS => "-".to_strbuf(), @@ -151,7 +151,7 @@ pub fn binop_to_str(o: BinOp) -> StrBuf { } } -pub fn to_str(t: &Token) -> StrBuf { +pub fn to_str(t: &Token) -> String { match *t { EQ => "=".to_strbuf(), LT => "<".to_strbuf(), @@ -194,7 +194,7 @@ pub fn to_str(t: &Token) -> StrBuf { /* Literals */ LIT_CHAR(c) => { - let mut res = StrBuf::from_str("'"); + let mut res = String::from_str("'"); c.escape_default(|c| { res.push_char(c); }); @@ -207,7 +207,7 @@ pub fn to_str(t: &Token) -> StrBuf { ast_util::ForceSuffix), LIT_INT_UNSUFFIXED(i) => { (i as u64).to_str().to_strbuf() } LIT_FLOAT(s, t) => { - let mut body = StrBuf::from_str(get_ident(s).get()); + let mut body = String::from_str(get_ident(s).get()); if body.as_slice().ends_with(".") { body.push_char('0'); // `10.f` is not a float literal } @@ -215,7 +215,7 @@ pub fn to_str(t: &Token) -> StrBuf { body } LIT_FLOAT_UNSUFFIXED(s) => { - let mut body = StrBuf::from_str(get_ident(s).get()); + let mut body = String::from_str(get_ident(s).get()); if body.as_slice().ends_with(".") { body.push_char('0'); // `10.f` is not a float literal } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index b334aa63270..f45462da423 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -62,7 +62,7 @@ */ use std::io; -use std::strbuf::StrBuf; +use std::string::String; #[deriving(Clone, Eq)] pub enum Breaks { @@ -84,7 +84,7 @@ pub struct BeginToken { #[deriving(Clone)] pub enum Token { - String(StrBuf, int), + String(String, int), Break(BreakToken), Begin(BeginToken), End, @@ -109,7 +109,7 @@ impl Token { } } -pub fn tok_str(t: Token) -> StrBuf { +pub fn tok_str(t: Token) -> String { match t { String(s, len) => return format!("STR({},{})", s, len).to_strbuf(), Break(_) => return "BREAK".to_strbuf(), @@ -124,12 +124,12 @@ pub fn buf_str(toks: Vec<Token>, left: uint, right: uint, lim: uint) - -> StrBuf { + -> String { let n = toks.len(); assert_eq!(n, szs.len()); let mut i = left; let mut l = lim; - let mut s = StrBuf::from_str("["); + let mut s = String::from_str("["); while i != right && l != 0u { l -= 1u; if i != left { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 5500ca45753..c5fa6351630 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -31,7 +31,7 @@ use std::io; use std::mem; use std::rc::Rc; use std::str; -use std::strbuf::StrBuf; +use std::string::String; pub enum AnnNode<'a> { NodeBlock(&'a ast::Block), @@ -97,7 +97,7 @@ pub static default_columns: uint = 78u; pub fn print_crate<'a>(cm: &'a CodeMap, span_diagnostic: &diagnostic::SpanHandler, krate: &ast::Crate, - filename: StrBuf, + filename: String, input: &mut io::Reader, out: Box<io::Writer>, ann: &'a PpAnn, @@ -132,7 +132,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap, eof(&mut s.s) } -pub fn to_str(f: |&mut State| -> IoResult<()>) -> StrBuf { +pub fn to_str(f: |&mut State| -> IoResult<()>) -> String { let mut s = rust_printer(box MemWriter::new()); f(&mut s).unwrap(); eof(&mut s.s).unwrap(); @@ -148,61 +148,61 @@ pub fn to_str(f: |&mut State| -> IoResult<()>) -> StrBuf { } } -pub fn ty_to_str(ty: &ast::Ty) -> StrBuf { +pub fn ty_to_str(ty: &ast::Ty) -> String { to_str(|s| s.print_type(ty)) } -pub fn pat_to_str(pat: &ast::Pat) -> StrBuf { +pub fn pat_to_str(pat: &ast::Pat) -> String { to_str(|s| s.print_pat(pat)) } -pub fn expr_to_str(e: &ast::Expr) -> StrBuf { +pub fn expr_to_str(e: &ast::Expr) -> String { to_str(|s| s.print_expr(e)) } -pub fn lifetime_to_str(e: &ast::Lifetime) -> StrBuf { +pub fn lifetime_to_str(e: &ast::Lifetime) -> String { to_str(|s| s.print_lifetime(e)) } -pub fn tt_to_str(tt: &ast::TokenTree) -> StrBuf { +pub fn tt_to_str(tt: &ast::TokenTree) -> String { to_str(|s| s.print_tt(tt)) } -pub fn tts_to_str(tts: &[ast::TokenTree]) -> StrBuf { +pub fn tts_to_str(tts: &[ast::TokenTree]) -> String { to_str(|s| s.print_tts(tts)) } -pub fn stmt_to_str(stmt: &ast::Stmt) -> StrBuf { +pub fn stmt_to_str(stmt: &ast::Stmt) -> String { to_str(|s| s.print_stmt(stmt)) } -pub fn item_to_str(i: &ast::Item) -> StrBuf { +pub fn item_to_str(i: &ast::Item) -> String { to_str(|s| s.print_item(i)) } -pub fn generics_to_str(generics: &ast::Generics) -> StrBuf { +pub fn generics_to_str(generics: &ast::Generics) -> String { to_str(|s| s.print_generics(generics)) } -pub fn ty_method_to_str(p: &ast::TypeMethod) -> StrBuf { +pub fn ty_method_to_str(p: &ast::TypeMethod) -> String { to_str(|s| s.print_ty_method(p)) } -pub fn method_to_str(p: &ast::Method) -> StrBuf { +pub fn method_to_str(p: &ast::Method) -> String { to_str(|s| s.print_method(p)) } -pub fn fn_block_to_str(p: &ast::FnDecl) -> StrBuf { +pub fn fn_block_to_str(p: &ast::FnDecl) -> String { to_str(|s| s.print_fn_block_args(p)) } -pub fn path_to_str(p: &ast::Path) -> StrBuf { +pub fn path_to_str(p: &ast::Path) -> String { to_str(|s| s.print_path(p, false)) } pub fn fun_to_str(decl: &ast::FnDecl, fn_style: ast::FnStyle, name: ast::Ident, opt_explicit_self: Option<ast::ExplicitSelf_>, - generics: &ast::Generics) -> StrBuf { + generics: &ast::Generics) -> String { to_str(|s| { try!(s.print_fn(decl, Some(fn_style), abi::Rust, name, generics, opt_explicit_self, ast::Inherited)); @@ -211,7 +211,7 @@ pub fn fun_to_str(decl: &ast::FnDecl, fn_style: ast::FnStyle, name: ast::Ident, }) } -pub fn block_to_str(blk: &ast::Block) -> StrBuf { +pub fn block_to_str(blk: &ast::Block) -> String { to_str(|s| { // containing cbox, will be closed by print-block at } try!(s.cbox(indent_unit)); @@ -221,27 +221,27 @@ pub fn block_to_str(blk: &ast::Block) -> StrBuf { }) } -pub fn meta_item_to_str(mi: &ast::MetaItem) -> StrBuf { +pub fn meta_item_to_str(mi: &ast::MetaItem) -> String { to_str(|s| s.print_meta_item(mi)) } -pub fn attribute_to_str(attr: &ast::Attribute) -> StrBuf { +pub fn attribute_to_str(attr: &ast::Attribute) -> String { to_str(|s| s.print_attribute(attr)) } -pub fn lit_to_str(l: &ast::Lit) -> StrBuf { +pub fn lit_to_str(l: &ast::Lit) -> String { to_str(|s| s.print_literal(l)) } -pub fn explicit_self_to_str(explicit_self: ast::ExplicitSelf_) -> StrBuf { +pub fn explicit_self_to_str(explicit_self: ast::ExplicitSelf_) -> String { to_str(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {})) } -pub fn variant_to_str(var: &ast::Variant) -> StrBuf { +pub fn variant_to_str(var: &ast::Variant) -> String { to_str(|s| s.print_variant(var)) } -pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> StrBuf { +pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> String { match vis { ast::Public => format!("pub {}", s).to_strbuf(), ast::Inherited => s.to_strbuf() @@ -376,7 +376,7 @@ impl<'a> State<'a> { // Synthesizes a comment that was not textually present in the original source // file. - pub fn synth_comment(&mut self, text: StrBuf) -> IoResult<()> { + pub fn synth_comment(&mut self, text: String) -> IoResult<()> { try!(word(&mut self.s, "/*")); try!(space(&mut self.s)); try!(word(&mut self.s, text.as_slice())); @@ -2232,7 +2232,7 @@ impl<'a> State<'a> { match lit.node { ast::LitStr(ref st, style) => self.print_string(st.get(), style), ast::LitChar(ch) => { - let mut res = StrBuf::from_str("'"); + let mut res = String::from_str("'"); ch.escape_default(|c| res.push_char(c)); res.push_char('\''); word(&mut self.s, res.as_slice()) diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index c3c9596bfc4..9c88ab14143 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -92,7 +92,7 @@ impl<T: TotalEq + Hash + Clone + 'static> Interner<T> { #[deriving(Clone, Eq, Hash, Ord)] pub struct RcStr { - string: Rc<StrBuf>, + string: Rc<String>, } impl TotalEq for RcStr {} diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 359a8537b2b..33a038a1dca 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -16,21 +16,21 @@ use parse::parser::Parser; use parse::token; // map a string to tts, using a made-up filename: -pub fn string_to_tts(source_str: StrBuf) -> Vec<ast::TokenTree> { +pub fn string_to_tts(source_str: String) -> Vec<ast::TokenTree> { let ps = new_parse_sess(); filemap_to_tts(&ps, string_to_filemap(&ps, source_str, "bogofile".to_strbuf())) } // map string to parser (via tts) -pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: StrBuf) -> Parser<'a> { +pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a> { new_parser_from_source_str(ps, Vec::new(), "bogofile".to_strbuf(), source_str) } -fn with_error_checking_parse<T>(s: StrBuf, f: |&mut Parser| -> T) -> T { +fn with_error_checking_parse<T>(s: String, f: |&mut Parser| -> T) -> T { let ps = new_parse_sess(); let mut p = string_to_parser(&ps, s); let x = f(&mut p); @@ -39,28 +39,28 @@ fn with_error_checking_parse<T>(s: StrBuf, f: |&mut Parser| -> T) -> T { } // parse a string, return a crate. -pub fn string_to_crate (source_str : StrBuf) -> ast::Crate { +pub fn string_to_crate (source_str : String) -> ast::Crate { with_error_checking_parse(source_str, |p| { p.parse_crate_mod() }) } // parse a string, return an expr -pub fn string_to_expr (source_str : StrBuf) -> @ast::Expr { +pub fn string_to_expr (source_str : String) -> @ast::Expr { with_error_checking_parse(source_str, |p| { p.parse_expr() }) } // parse a string, return an item -pub fn string_to_item (source_str : StrBuf) -> Option<@ast::Item> { +pub fn string_to_item (source_str : String) -> Option<@ast::Item> { with_error_checking_parse(source_str, |p| { p.parse_item(Vec::new()) }) } // parse a string, return a stmt -pub fn string_to_stmt(source_str : StrBuf) -> @ast::Stmt { +pub fn string_to_stmt(source_str : String) -> @ast::Stmt { with_error_checking_parse(source_str, |p| { p.parse_stmt(Vec::new()) }) @@ -68,7 +68,7 @@ pub fn string_to_stmt(source_str : StrBuf) -> @ast::Stmt { // parse a string, return a pat. Uses "irrefutable"... which doesn't // (currently) affect parsing. -pub fn string_to_pat(source_str: StrBuf) -> @ast::Pat { +pub fn string_to_pat(source_str: String) -> @ast::Pat { string_to_parser(&new_parse_sess(), source_str).parse_pat() } |
