about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs2
-rw-r--r--src/libsyntax/ast_map/mod.rs38
-rw-r--r--src/libsyntax/ast_util.rs14
-rw-r--r--src/libsyntax/attr.rs13
-rw-r--r--src/libsyntax/codemap.rs12
-rw-r--r--src/libsyntax/diagnostic.rs38
-rw-r--r--src/libsyntax/diagnostics/plugin.rs6
-rw-r--r--src/libsyntax/ext/asm.rs3
-rw-r--r--src/libsyntax/ext/base.rs10
-rw-r--r--src/libsyntax/ext/build.rs3
-rw-r--r--src/libsyntax/ext/concat.rs8
-rw-r--r--src/libsyntax/ext/concat_idents.rs2
-rw-r--r--src/libsyntax/ext/deriving/bounds.rs3
-rw-r--r--src/libsyntax/ext/deriving/clone.rs9
-rw-r--r--src/libsyntax/ext/deriving/decodable.rs2
-rw-r--r--src/libsyntax/ext/deriving/encodable.rs3
-rw-r--r--src/libsyntax/ext/deriving/generic/mod.rs46
-rw-r--r--src/libsyntax/ext/deriving/mod.rs2
-rw-r--r--src/libsyntax/ext/deriving/show.rs2
-rw-r--r--src/libsyntax/ext/env.rs8
-rw-r--r--src/libsyntax/ext/expand.rs57
-rw-r--r--src/libsyntax/ext/format.rs27
-rw-r--r--src/libsyntax/ext/quote.rs6
-rw-r--r--src/libsyntax/ext/source_util.rs16
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs21
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs12
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs4
-rw-r--r--src/libsyntax/feature_gate.rs10
-rw-r--r--src/libsyntax/parse/attr.rs3
-rw-r--r--src/libsyntax/parse/lexer/comments.rs16
-rw-r--r--src/libsyntax/parse/lexer/mod.rs18
-rw-r--r--src/libsyntax/parse/mod.rs54
-rw-r--r--src/libsyntax/parse/obsolete.rs4
-rw-r--r--src/libsyntax/parse/parser.rs148
-rw-r--r--src/libsyntax/parse/token.rs24
-rw-r--r--src/libsyntax/print/pp.rs6
-rw-r--r--src/libsyntax/print/pprust.rs259
-rw-r--r--src/libsyntax/std_inject.rs8
-rw-r--r--src/libsyntax/test.rs26
-rw-r--r--src/libsyntax/util/interner.rs30
40 files changed, 481 insertions, 492 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index a294706ef2c..3eea5b27f19 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -189,7 +189,7 @@ impl<S: Encoder<E>, E> Encodable<S, E> for Ident {
 
 impl<D:Decoder<E>, E> Decodable<D, E> for Ident {
     fn decode(d: &mut D) -> Result<Ident, E> {
-        Ok(str_to_ident(try!(d.read_str()).as_slice()))
+        Ok(str_to_ident(try!(d.read_str())[]))
     }
 }
 
diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs
index a95c9e19906..e3eeb453c26 100644
--- a/src/libsyntax/ast_map/mod.rs
+++ b/src/libsyntax/ast_map/mod.rs
@@ -95,7 +95,7 @@ pub fn path_to_string<PI: Iterator<PathElem>>(path: PI) -> String {
         if !s.is_empty() {
             s.push_str("::");
         }
-        s.push_str(e.as_slice());
+        s.push_str(e[]);
         s
     }).to_string()
 }
@@ -472,20 +472,20 @@ impl<'ast> Map<'ast> {
         F: FnOnce(Option<&[Attribute]>) -> T,
     {
         let attrs = match self.get(id) {
-            NodeItem(i) => Some(i.attrs.as_slice()),
-            NodeForeignItem(fi) => Some(fi.attrs.as_slice()),
+            NodeItem(i) => Some(i.attrs[]),
+            NodeForeignItem(fi) => Some(fi.attrs[]),
             NodeTraitItem(ref tm) => match **tm {
-                RequiredMethod(ref type_m) => Some(type_m.attrs.as_slice()),
-                ProvidedMethod(ref m) => Some(m.attrs.as_slice()),
-                TypeTraitItem(ref typ) => Some(typ.attrs.as_slice()),
+                RequiredMethod(ref type_m) => Some(type_m.attrs[]),
+                ProvidedMethod(ref m) => Some(m.attrs[]),
+                TypeTraitItem(ref typ) => Some(typ.attrs[]),
             },
             NodeImplItem(ref ii) => {
                 match **ii {
-                    MethodImplItem(ref m) => Some(m.attrs.as_slice()),
-                    TypeImplItem(ref t) => Some(t.attrs.as_slice()),
+                    MethodImplItem(ref m) => Some(m.attrs[]),
+                    TypeImplItem(ref t) => Some(t.attrs[]),
                 }
             }
-            NodeVariant(ref v) => Some(v.node.attrs.as_slice()),
+            NodeVariant(ref v) => Some(v.node.attrs[]),
             // unit/tuple structs take the attributes straight from
             // the struct definition.
             // FIXME(eddyb) make this work again (requires access to the map).
@@ -504,8 +504,8 @@ impl<'ast> Map<'ast> {
     /// the iterator will produce node id's for items with paths
     /// such as `foo::bar::quux`, `bar::quux`, `other::bar::quux`, and
     /// any other such items it can find in the map.
-    pub fn nodes_matching_suffix<'a, S:Str>(&'a self, parts: &'a [S])
-                                 -> NodesMatchingSuffix<'a, 'ast, S> {
+    pub fn nodes_matching_suffix<'a>(&'a self, parts: &'a [String])
+                                 -> NodesMatchingSuffix<'a, 'ast> {
         NodesMatchingSuffix {
             map: self,
             item_name: parts.last().unwrap(),
@@ -565,14 +565,14 @@ impl<'ast> Map<'ast> {
     }
 }
 
-pub struct NodesMatchingSuffix<'a, 'ast:'a, S:'a> {
+pub struct NodesMatchingSuffix<'a, 'ast:'a> {
     map: &'a Map<'ast>,
-    item_name: &'a S,
-    in_which: &'a [S],
+    item_name: &'a String,
+    in_which: &'a [String],
     idx: NodeId,
 }
 
-impl<'a, 'ast, S:Str> NodesMatchingSuffix<'a, 'ast, S> {
+impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> {
     /// Returns true only if some suffix of the module path for parent
     /// matches `self.in_which`.
     ///
@@ -586,7 +586,7 @@ impl<'a, 'ast, S:Str> NodesMatchingSuffix<'a, 'ast, S> {
                 None => return false,
                 Some((node_id, name)) => (node_id, name),
             };
-            if part.as_slice() != mod_name.as_str() {
+            if part[] != mod_name.as_str() {
                 return false;
             }
             cursor = self.map.get_parent(mod_id);
@@ -624,12 +624,12 @@ impl<'a, 'ast, S:Str> NodesMatchingSuffix<'a, 'ast, S> {
     // We are looking at some node `n` with a given name and parent
     // id; do their names match what I am seeking?
     fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool {
-        name.as_str() == self.item_name.as_slice() &&
+        name.as_str() == self.item_name[] &&
             self.suffix_matches(parent_of_n)
     }
 }
 
-impl<'a, 'ast, S:Str> Iterator<NodeId> for NodesMatchingSuffix<'a, 'ast, S> {
+impl<'a, 'ast> Iterator<NodeId> for NodesMatchingSuffix<'a, 'ast> {
     fn next(&mut self) -> Option<NodeId> {
         loop {
             let idx = self.idx;
@@ -1037,7 +1037,7 @@ impl<'a> NodePrinter for pprust::State<'a> {
 
 fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
     let id_str = format!(" (id={})", id);
-    let id_str = if include_id { id_str.as_slice() } else { "" };
+    let id_str = if include_id { id_str[] } else { "" };
 
     match map.find(id) {
         Some(NodeItem(item)) => {
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs
index 02771809ae6..5727866d6ec 100644
--- a/src/libsyntax/ast_util.rs
+++ b/src/libsyntax/ast_util.rs
@@ -238,11 +238,11 @@ pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> Ident {
     match *trait_ref {
         Some(ref trait_ref) => {
             pretty.push('.');
-            pretty.push_str(pprust::path_to_string(&trait_ref.path).as_slice());
+            pretty.push_str(pprust::path_to_string(&trait_ref.path)[]);
         }
         None => {}
     }
-    token::gensym_ident(pretty.as_slice())
+    token::gensym_ident(pretty[])
 }
 
 pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod {
@@ -700,7 +700,7 @@ pub fn pat_is_ident(pat: P<ast::Pat>) -> bool {
 pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool {
     (a.span == b.span)
     && (a.global == b.global)
-    && (segments_name_eq(a.segments.as_slice(), b.segments.as_slice()))
+    && (segments_name_eq(a.segments[], b.segments[]))
 }
 
 // are two arrays of segments equal when compared unhygienically?
@@ -788,13 +788,13 @@ mod test {
     #[test] fn idents_name_eq_test() {
         assert!(segments_name_eq(
             [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
-                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice(),
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[],
             [Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}]
-                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice()));
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[]));
         assert!(!segments_name_eq(
             [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
-                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice(),
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[],
             [Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}]
-                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice()));
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[]));
     }
 }
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index 127cc5ed51d..b1158917b72 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -97,7 +97,7 @@ impl AttrMetaMethods for MetaItem {
 
     fn meta_item_list<'a>(&'a self) -> Option<&'a [P<MetaItem>]> {
         match self.node {
-            MetaList(_, ref l) => Some(l.as_slice()),
+            MetaList(_, ref l) => Some(l[]),
             _ => None
         }
     }
@@ -136,7 +136,7 @@ impl AttributeMethods for Attribute {
             let meta = mk_name_value_item_str(
                 InternedString::new("doc"),
                 token::intern_and_get_ident(strip_doc_comment_decoration(
-                        comment.get()).as_slice()));
+                        comment.get())[]));
             if self.node.style == ast::AttrOuter {
                 f(&mk_attr_outer(self.node.id, meta))
             } else {
@@ -296,9 +296,9 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr {
             }
             MetaList(ref n, ref items) if *n == "inline" => {
                 mark_used(attr);
-                if contains_name(items.as_slice(), "always") {
+                if contains_name(items[], "always") {
                     InlineAlways
-                } else if contains_name(items.as_slice(), "never") {
+                } else if contains_name(items[], "never") {
                     InlineNever
                 } else {
                     InlineHint
@@ -332,7 +332,7 @@ pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P<MetaItem>], cfg: &ast::Me
             !cfg_matches(diagnostic, cfgs, &*mis[0])
         }
         ast::MetaList(ref pred, _) => {
-            diagnostic.span_err(cfg.span, format!("invalid predicate `{}`", pred).as_slice());
+            diagnostic.span_err(cfg.span, format!("invalid predicate `{}`", pred)[]);
             false
         },
         ast::MetaWord(_) | ast::MetaNameValue(..) => contains(cfgs, cfg),
@@ -396,8 +396,7 @@ pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P<MetaItem>]) {
 
         if !set.insert(name.clone()) {
             diagnostic.span_fatal(meta.span,
-                                  format!("duplicate meta item `{}`",
-                                          name).as_slice());
+                                  format!("duplicate meta item `{}`", name)[]);
         }
     }
 }
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index c726e17a8fa..060e1d3f995 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -291,9 +291,9 @@ impl FileMap {
         lines.get(line_number).map(|&line| {
             let begin: BytePos = line - self.start_pos;
             let begin = begin.to_uint();
-            let slice = self.src.slice_from(begin);
+            let slice = self.src[begin..];
             match slice.find('\n') {
-                Some(e) => slice.slice_to(e),
+                Some(e) => slice[0..e],
                 None => slice
             }.to_string()
         })
@@ -338,9 +338,9 @@ impl CodeMap {
         // FIXME #12884: no efficient/safe way to remove from the start of a string
         // and reuse the allocation.
         let mut src = if src.starts_with("\u{feff}") {
-            String::from_str(src.slice_from(3))
+            String::from_str(src[3..])
         } else {
-            String::from_str(src.as_slice())
+            String::from_str(src[])
         };
 
         // Append '\n' in case it's not already there.
@@ -427,8 +427,8 @@ impl CodeMap {
         if begin.fm.start_pos != end.fm.start_pos {
             None
         } else {
-            Some(begin.fm.src.slice(begin.pos.to_uint(),
-                                    end.pos.to_uint()).to_string())
+            Some(begin.fm.src[begin.pos.to_uint()..
+                              end.pos.to_uint()].to_string())
         }
     }
 
diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs
index 4d765f49aca..88dfdf6e2d8 100644
--- a/src/libsyntax/diagnostic.rs
+++ b/src/libsyntax/diagnostic.rs
@@ -123,7 +123,7 @@ impl SpanHandler {
         panic!(ExplicitBug);
     }
     pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
-        self.span_bug(sp, format!("unimplemented {}", msg).as_slice());
+        self.span_bug(sp, format!("unimplemented {}", msg)[]);
     }
     pub fn handler<'a>(&'a self) -> &'a Handler {
         &self.handler
@@ -166,7 +166,7 @@ impl Handler {
                         self.err_count.get());
           }
         }
-        self.fatal(s.as_slice());
+        self.fatal(s[]);
     }
     pub fn warn(&self, msg: &str) {
         self.emit.borrow_mut().emit(None, msg, None, Warning);
@@ -182,7 +182,7 @@ impl Handler {
         panic!(ExplicitBug);
     }
     pub fn unimpl(&self, msg: &str) -> ! {
-        self.bug(format!("unimplemented {}", msg).as_slice());
+        self.bug(format!("unimplemented {}", msg)[]);
     }
     pub fn emit(&self,
                 cmsp: Option<(&codemap::CodeMap, Span)>,
@@ -277,7 +277,7 @@ fn print_maybe_styled(w: &mut EmitterWriter,
             // to be miscolored. We assume this is rare enough that we don't
             // have to worry about it.
             if msg.ends_with("\n") {
-                try!(t.write_str(msg.slice_to(msg.len()-1)));
+                try!(t.write_str(msg[0..msg.len()-1]));
                 try!(t.reset());
                 try!(t.write_str("\n"));
             } else {
@@ -299,16 +299,16 @@ fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level,
     }
 
     try!(print_maybe_styled(dst,
-                            format!("{}: ", lvl.to_string()).as_slice(),
+                            format!("{}: ", lvl.to_string())[],
                             term::attr::ForegroundColor(lvl.color())));
     try!(print_maybe_styled(dst,
-                            format!("{}", msg).as_slice(),
+                            format!("{}", msg)[],
                             term::attr::Bold));
 
     match code {
         Some(code) => {
             let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
-            try!(print_maybe_styled(dst, format!(" [{}]", code.clone()).as_slice(), style));
+            try!(print_maybe_styled(dst, format!(" [{}]", code.clone())[], style));
         }
         None => ()
     }
@@ -398,12 +398,12 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
         // the span)
         let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
         let ses = cm.span_to_string(span_end);
-        try!(print_diagnostic(dst, ses.as_slice(), lvl, msg, code));
+        try!(print_diagnostic(dst, ses[], lvl, msg, code));
         if rsp.is_full_span() {
             try!(custom_highlight_lines(dst, cm, sp, lvl, lines));
         }
     } else {
-        try!(print_diagnostic(dst, ss.as_slice(), lvl, msg, code));
+        try!(print_diagnostic(dst, ss[], lvl, msg, code));
         if rsp.is_full_span() {
             try!(highlight_lines(dst, cm, sp, lvl, lines));
         }
@@ -413,9 +413,9 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
         Some(code) =>
             match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
                 Some(_) => {
-                    try!(print_diagnostic(dst, ss.as_slice(), Help,
+                    try!(print_diagnostic(dst, ss[], Help,
                                           format!("pass `--explain {}` to see a detailed \
-                                                   explanation", code).as_slice(), None));
+                                                   explanation", code)[], None));
                 }
                 None => ()
             },
@@ -432,7 +432,7 @@ fn highlight_lines(err: &mut EmitterWriter,
     let fm = &*lines.file;
 
     let mut elided = false;
-    let mut display_lines = lines.lines.as_slice();
+    let mut display_lines = lines.lines[];
     if display_lines.len() > MAX_LINES {
         display_lines = display_lines[0u..MAX_LINES];
         elided = true;
@@ -494,7 +494,7 @@ fn highlight_lines(err: &mut EmitterWriter,
             }
         }
         try!(print_maybe_styled(err,
-                                format!("{}\n", s).as_slice(),
+                                format!("{}\n", s)[],
                                 term::attr::ForegroundColor(lvl.color())));
     }
     Ok(())
@@ -514,7 +514,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter,
                           -> io::IoResult<()> {
     let fm = &*lines.file;
 
-    let lines = lines.lines.as_slice();
+    let lines = lines.lines[];
     if lines.len() > MAX_LINES {
         if let Some(line) = fm.get_line(lines[0]) {
             try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
@@ -545,7 +545,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter,
     s.push('^');
     s.push('\n');
     print_maybe_styled(w,
-                       s.as_slice(),
+                       s[],
                        term::attr::ForegroundColor(lvl.color()))
 }
 
@@ -560,12 +560,12 @@ fn print_macro_backtrace(w: &mut EmitterWriter,
                 codemap::MacroAttribute => ("#[", "]"),
                 codemap::MacroBang => ("", "!")
             };
-            try!(print_diagnostic(w, ss.as_slice(), Note,
+            try!(print_diagnostic(w, ss[], Note,
                                   format!("in expansion of {}{}{}", pre,
                                           ei.callee.name,
-                                          post).as_slice(), None));
+                                          post)[], None));
             let ss = cm.span_to_string(ei.call_site);
-            try!(print_diagnostic(w, ss.as_slice(), Note, "expansion site", None));
+            try!(print_diagnostic(w, ss[], Note, "expansion site", None));
             Ok(Some(ei.call_site))
         }
         None => Ok(None)
@@ -578,6 +578,6 @@ pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where
 {
     match opt {
         Some(t) => t,
-        None => diag.handler().bug(msg().as_slice()),
+        None => diag.handler().bug(msg()[]),
     }
 }
diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs
index bcce5538314..90fc28014e6 100644
--- a/src/libsyntax/diagnostics/plugin.rs
+++ b/src/libsyntax/diagnostics/plugin.rs
@@ -58,7 +58,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
             Some(previous_span) => {
                 ecx.span_warn(span, format!(
                     "diagnostic code {} already used", token::get_ident(code).get()
-                ).as_slice());
+                )[]);
                 ecx.span_note(previous_span, "previous invocation");
             },
             None => ()
@@ -87,12 +87,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
         if diagnostics.insert(code.name, description).is_some() {
             ecx.span_err(span, format!(
                 "diagnostic code {} already registered", token::get_ident(*code).get()
-            ).as_slice());
+            )[]);
         }
     });
     let sym = Ident::new(token::gensym((
         "__register_diagnostic_".to_string() + token::get_ident(*code).get()
-    ).as_slice()));
+    )[]));
     MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter())
 }
 
diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs
index b138811187b..b77b822a6b2 100644
--- a/src/libsyntax/ext/asm.rs
+++ b/src/libsyntax/ext/asm.rs
@@ -100,8 +100,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                         Some(('=', _)) => None,
                         Some(('+', operand)) => {
                             Some(token::intern_and_get_ident(format!(
-                                        "={}",
-                                        operand).as_slice()))
+                                        "={}", operand)[]))
                         }
                         _ => {
                             cx.span_err(span, "output operand constraint lacks '=' or '+'");
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index aefbb2a1fea..62fe718b522 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -549,7 +549,7 @@ impl<'a> ExtCtxt<'a> {
     pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); }
     pub fn mod_path(&self) -> Vec<ast::Ident> {
         let mut v = Vec::new();
-        v.push(token::str_to_ident(self.ecfg.crate_name.as_slice()));
+        v.push(token::str_to_ident(self.ecfg.crate_name[]));
         v.extend(self.mod_path.iter().map(|a| *a));
         return v;
     }
@@ -558,7 +558,7 @@ impl<'a> ExtCtxt<'a> {
         if self.recursion_count > self.ecfg.recursion_limit {
             self.span_fatal(ei.call_site,
                             format!("recursion limit reached while expanding the macro `{}`",
-                                    ei.callee.name).as_slice());
+                                    ei.callee.name)[]);
         }
 
         let mut call_site = ei.call_site;
@@ -669,7 +669,7 @@ pub fn check_zero_tts(cx: &ExtCtxt,
                       tts: &[ast::TokenTree],
                       name: &str) {
     if tts.len() != 0 {
-        cx.span_err(sp, format!("{} takes no arguments", name).as_slice());
+        cx.span_err(sp, format!("{} takes no arguments", name)[]);
     }
 }
 
@@ -682,12 +682,12 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
                                -> Option<String> {
     let mut p = cx.new_parser_from_tts(tts);
     if p.token == token::Eof {
-        cx.span_err(sp, format!("{} takes 1 argument", name).as_slice());
+        cx.span_err(sp, format!("{} takes 1 argument", name)[]);
         return None
     }
     let ret = cx.expander().fold_expr(p.parse_expr());
     if p.token != token::Eof {
-        cx.span_err(sp, format!("{} takes 1 argument", name).as_slice());
+        cx.span_err(sp, format!("{} takes 1 argument", name)[]);
     }
     expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| {
         s.get().to_string()
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index 9d4992f7453..77165168746 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -712,8 +712,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         let loc = self.codemap().lookup_char_pos(span.lo);
         let expr_file = self.expr_str(span,
                                       token::intern_and_get_ident(loc.file
-                                                                  .name
-                                                                  .as_slice()));
+                                                                  .name[]));
         let expr_line = self.expr_uint(span, loc.line);
         let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line));
         let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs
index e2867c2fbab..03dd08fdf7f 100644
--- a/src/libsyntax/ext/concat.rs
+++ b/src/libsyntax/ext/concat.rs
@@ -40,14 +40,14 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
                     ast::LitInt(i, ast::UnsignedIntLit(_)) |
                     ast::LitInt(i, ast::SignedIntLit(_, ast::Plus)) |
                     ast::LitInt(i, ast::UnsuffixedIntLit(ast::Plus)) => {
-                        accumulator.push_str(format!("{}", i).as_slice());
+                        accumulator.push_str(format!("{}", i)[]);
                     }
                     ast::LitInt(i, ast::SignedIntLit(_, ast::Minus)) |
                     ast::LitInt(i, ast::UnsuffixedIntLit(ast::Minus)) => {
-                        accumulator.push_str(format!("-{}", i).as_slice());
+                        accumulator.push_str(format!("-{}", i)[]);
                     }
                     ast::LitBool(b) => {
-                        accumulator.push_str(format!("{}", b).as_slice());
+                        accumulator.push_str(format!("{}", b)[]);
                     }
                     ast::LitByte(..) |
                     ast::LitBinary(..) => {
@@ -62,5 +62,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
     }
     base::MacExpr::new(cx.expr_str(
             sp,
-            token::intern_and_get_ident(accumulator.as_slice())))
+            token::intern_and_get_ident(accumulator[])))
 }
diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs
index aa18b1be31a..2cf60d30a1b 100644
--- a/src/libsyntax/ext/concat_idents.rs
+++ b/src/libsyntax/ext/concat_idents.rs
@@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
             }
         }
     }
-    let res = str_to_ident(res_str.as_slice());
+    let res = str_to_ident(res_str[]);
 
     let e = P(ast::Expr {
         id: ast::DUMMY_NODE_ID,
diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs
index 3145b3bb1a4..c27a27fce6a 100644
--- a/src/libsyntax/ext/deriving/bounds.rs
+++ b/src/libsyntax/ext/deriving/bounds.rs
@@ -31,8 +31,7 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt,
                 ref tname => {
                     cx.span_bug(span,
                                 format!("expected built-in trait name but \
-                                         found {}",
-                                        *tname).as_slice())
+                                         found {}", *tname)[])
                 }
             }
         },
diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs
index a34764221b3..eedec6f37c8 100644
--- a/src/libsyntax/ext/deriving/clone.rs
+++ b/src/libsyntax/ext/deriving/clone.rs
@@ -80,13 +80,11 @@ fn cs_clone(
         EnumNonMatchingCollapsed (..) => {
             cx.span_bug(trait_span,
                         format!("non-matching enum variants in \
-                                 `deriving({})`",
-                                name).as_slice())
+                                 `deriving({})`", name)[])
         }
         StaticEnum(..) | StaticStruct(..) => {
             cx.span_bug(trait_span,
-                        format!("static method in `deriving({})`",
-                                name).as_slice())
+                        format!("static method in `deriving({})`", name)[])
         }
     }
 
@@ -103,8 +101,7 @@ fn cs_clone(
                 None => {
                     cx.span_bug(trait_span,
                                 format!("unnamed field in normal struct in \
-                                         `deriving({})`",
-                                        name).as_slice())
+                                         `deriving({})`", name)[])
                 }
             };
             cx.field_imm(field.span, ident, subcall(field))
diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs
index 0a8d59da896..a4c70ebbc8e 100644
--- a/src/libsyntax/ext/deriving/decodable.rs
+++ b/src/libsyntax/ext/deriving/decodable.rs
@@ -174,7 +174,7 @@ fn decode_static_fields<F>(cx: &mut ExtCtxt,
                 let fields = fields.iter().enumerate().map(|(i, &span)| {
                     getarg(cx, span,
                            token::intern_and_get_ident(format!("_field{}",
-                                                               i).as_slice()),
+                                                               i)[]),
                            i)
                 }).collect();
 
diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs
index 30851ebeaae..aac515ed81a 100644
--- a/src/libsyntax/ext/deriving/encodable.rs
+++ b/src/libsyntax/ext/deriving/encodable.rs
@@ -162,8 +162,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
                 let name = match name {
                     Some(id) => token::get_ident(id),
                     None => {
-                        token::intern_and_get_ident(format!("_field{}",
-                                                            i).as_slice())
+                        token::intern_and_get_ident(format!("_field{}", i)[])
                     }
                 };
                 let enc = cx.expr_method_call(span, self_.clone(),
diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs
index d8de3d2db97..a2b5c0b9e96 100644
--- a/src/libsyntax/ext/deriving/generic/mod.rs
+++ b/src/libsyntax/ext/deriving/generic/mod.rs
@@ -514,15 +514,15 @@ impl<'a> TraitDef<'a> {
                     self,
                     struct_def,
                     type_ident,
-                    self_args.as_slice(),
-                    nonself_args.as_slice())
+                    self_args[],
+                    nonself_args[])
             } else {
                 method_def.expand_struct_method_body(cx,
                                                      self,
                                                      struct_def,
                                                      type_ident,
-                                                     self_args.as_slice(),
-                                                     nonself_args.as_slice())
+                                                     self_args[],
+                                                     nonself_args[])
             };
 
             method_def.create_method(cx,
@@ -554,15 +554,15 @@ impl<'a> TraitDef<'a> {
                     self,
                     enum_def,
                     type_ident,
-                    self_args.as_slice(),
-                    nonself_args.as_slice())
+                    self_args[],
+                    nonself_args[])
             } else {
                 method_def.expand_enum_method_body(cx,
                                                    self,
                                                    enum_def,
                                                    type_ident,
                                                    self_args,
-                                                   nonself_args.as_slice())
+                                                   nonself_args[])
             };
 
             method_def.create_method(cx,
@@ -649,7 +649,7 @@ impl<'a> MethodDef<'a> {
 
         for (i, ty) in self.args.iter().enumerate() {
             let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics);
-            let ident = cx.ident_of(format!("__arg_{}", i).as_slice());
+            let ident = cx.ident_of(format!("__arg_{}", i)[]);
             arg_tys.push((ident, ast_ty));
 
             let arg_expr = cx.expr_ident(trait_.span, ident);
@@ -756,7 +756,7 @@ impl<'a> MethodDef<'a> {
                                              struct_path,
                                              struct_def,
                                              format!("__self_{}",
-                                                     i).as_slice(),
+                                                     i)[],
                                              ast::MutImmutable);
             patterns.push(pat);
             raw_fields.push(ident_expr);
@@ -912,22 +912,22 @@ impl<'a> MethodDef<'a> {
             .collect::<Vec<String>>();
 
         let self_arg_idents = self_arg_names.iter()
-            .map(|name|cx.ident_of(name.as_slice()))
+            .map(|name|cx.ident_of(name[]))
             .collect::<Vec<ast::Ident>>();
 
         // The `vi_idents` will be bound, solely in the catch-all, to
         // a series of let statements mapping each self_arg to a uint
         // corresponding to its variant index.
         let vi_idents: Vec<ast::Ident> = self_arg_names.iter()
-            .map(|name| { let vi_suffix = format!("{}_vi", name.as_slice());
-                          cx.ident_of(vi_suffix.as_slice()) })
+            .map(|name| { let vi_suffix = format!("{}_vi", name[]);
+                          cx.ident_of(vi_suffix[]) })
             .collect::<Vec<ast::Ident>>();
 
         // Builds, via callback to call_substructure_method, the
         // delegated expression that handles the catch-all case,
         // using `__variants_tuple` to drive logic if necessary.
         let catch_all_substructure = EnumNonMatchingCollapsed(
-            self_arg_idents, variants.as_slice(), vi_idents.as_slice());
+            self_arg_idents, variants[], vi_idents[]);
 
         // These arms are of the form:
         // (Variant1, Variant1, ...) => Body1
@@ -949,12 +949,12 @@ impl<'a> MethodDef<'a> {
                 let mut subpats = Vec::with_capacity(self_arg_names.len());
                 let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1);
                 let first_self_pat_idents = {
-                    let (p, idents) = mk_self_pat(cx, self_arg_names[0].as_slice());
+                    let (p, idents) = mk_self_pat(cx, self_arg_names[0][]);
                     subpats.push(p);
                     idents
                 };
                 for self_arg_name in self_arg_names.tail().iter() {
-                    let (p, idents) = mk_self_pat(cx, self_arg_name.as_slice());
+                    let (p, idents) = mk_self_pat(cx, self_arg_name[]);
                     subpats.push(p);
                     self_pats_idents.push(idents);
                 }
@@ -1010,7 +1010,7 @@ impl<'a> MethodDef<'a> {
                                                 &**variant,
                                                 field_tuples);
                 let arm_expr = self.call_substructure_method(
-                    cx, trait_, type_ident, self_args.as_slice(), nonself_args,
+                    cx, trait_, type_ident, self_args[], nonself_args,
                     &substructure);
 
                 cx.arm(sp, vec![single_pat], arm_expr)
@@ -1063,7 +1063,7 @@ impl<'a> MethodDef<'a> {
             }
 
             let arm_expr = self.call_substructure_method(
-                cx, trait_, type_ident, self_args.as_slice(), nonself_args,
+                cx, trait_, type_ident, self_args[], nonself_args,
                 &catch_all_substructure);
 
             // Builds the expression:
@@ -1267,7 +1267,7 @@ impl<'a> TraitDef<'a> {
                     cx.span_bug(sp, "a struct with named and unnamed fields in `deriving`");
                 }
             };
-            let ident = cx.ident_of(format!("{}_{}", prefix, i).as_slice());
+            let ident = cx.ident_of(format!("{}_{}", prefix, i)[]);
             paths.push(codemap::Spanned{span: sp, node: ident});
             let val = cx.expr(
                 sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident)))));
@@ -1313,7 +1313,7 @@ impl<'a> TraitDef<'a> {
                 let mut ident_expr = Vec::new();
                 for (i, va) in variant_args.iter().enumerate() {
                     let sp = self.set_expn_info(cx, va.ty.span);
-                    let ident = cx.ident_of(format!("{}_{}", prefix, i).as_slice());
+                    let ident = cx.ident_of(format!("{}_{}", prefix, i)[]);
                     let path1 = codemap::Spanned{span: sp, node: ident};
                     paths.push(path1);
                     let expr_path = cx.expr_path(cx.path_ident(sp, ident));
@@ -1356,7 +1356,7 @@ pub fn cs_fold<F>(use_foldl: bool,
                       field.span,
                       old,
                       field.self_.clone(),
-                      field.other.as_slice())
+                      field.other[])
                 })
             } else {
                 all_fields.iter().rev().fold(base, |old, field| {
@@ -1364,12 +1364,12 @@ pub fn cs_fold<F>(use_foldl: bool,
                       field.span,
                       old,
                       field.self_.clone(),
-                      field.other.as_slice())
+                      field.other[])
                 })
             }
         },
         EnumNonMatchingCollapsed(ref all_args, _, tuple) =>
-            enum_nonmatch_f(cx, trait_span, (all_args.as_slice(), tuple),
+            enum_nonmatch_f(cx, trait_span, (all_args[], tuple),
                             substructure.nonself_args),
         StaticEnum(..) | StaticStruct(..) => {
             cx.span_bug(trait_span, "static function in `deriving`")
@@ -1409,7 +1409,7 @@ pub fn cs_same_method<F>(f: F,
             f(cx, trait_span, called)
         },
         EnumNonMatchingCollapsed(ref all_self_args, _, tuple) =>
-            enum_nonmatch_f(cx, trait_span, (all_self_args.as_slice(), tuple),
+            enum_nonmatch_f(cx, trait_span, (all_self_args[], tuple),
                             substructure.nonself_args),
         StaticEnum(..) | StaticStruct(..) => {
             cx.span_bug(trait_span, "static function in `deriving`")
diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs
index 839e99c81d1..4a9076b07b5 100644
--- a/src/libsyntax/ext/deriving/mod.rs
+++ b/src/libsyntax/ext/deriving/mod.rs
@@ -115,7 +115,7 @@ pub fn expand_meta_deriving(cx: &mut ExtCtxt,
                                 cx.span_err(titem.span,
                                             format!("unknown `deriving` \
                                                      trait: `{}`",
-                                                    *tname).as_slice());
+                                                    *tname)[]);
                             }
                         };
                     }
diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs
index a68b521bbc9..19b45a1e610 100644
--- a/src/libsyntax/ext/deriving/show.rs
+++ b/src/libsyntax/ext/deriving/show.rs
@@ -127,7 +127,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
     let formatter = substr.nonself_args[0].clone();
 
     let meth = cx.ident_of("write_fmt");
-    let s = token::intern_and_get_ident(format_string.as_slice());
+    let s = token::intern_and_get_ident(format_string[]);
     let format_string = cx.expr_str(span, s);
 
     // phew, not our responsibility any more!
diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs
index 8c17b31f458..9fedc4a158e 100644
--- a/src/libsyntax/ext/env.rs
+++ b/src/libsyntax/ext/env.rs
@@ -30,7 +30,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT
         Some(v) => v
     };
 
-    let e = match os::getenv(var.as_slice()) {
+    let e = match os::getenv(var[]) {
       None => {
           cx.expr_path(cx.path_all(sp,
                                    true,
@@ -56,7 +56,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT
                                    cx.ident_of("Some")),
                               vec!(cx.expr_str(sp,
                                                token::intern_and_get_ident(
-                                          s.as_slice()))))
+                                          s[]))))
       }
     };
     MacExpr::new(e)
@@ -83,7 +83,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         None => {
             token::intern_and_get_ident(format!("environment variable `{}` \
                                                  not defined",
-                                                var).as_slice())
+                                                var)[])
         }
         Some(second) => {
             match expr_to_string(cx, second, "expected string literal") {
@@ -106,7 +106,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             cx.span_err(sp, msg.get());
             cx.expr_uint(sp, 0)
         }
-        Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s.as_slice()))
+        Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s[]))
     };
     MacExpr::new(e)
 }
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index b10ae7a09db..f2b6f6bfe16 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -293,7 +293,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                     fld.cx.span_err(
                         pth.span,
                         format!("macro undefined: '{}!'",
-                                extnamestr.get()).as_slice());
+                                extnamestr.get())[]);
 
                     // let compilation continue
                     None
@@ -309,7 +309,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                                 },
                             });
                         let fm = fresh_mark();
-                        let marked_before = mark_tts(tts.as_slice(), fm);
+                        let marked_before = mark_tts(tts[], fm);
 
                         // The span that we pass to the expanders we want to
                         // be the root of the call stack. That's the most
@@ -320,7 +320,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                         let opt_parsed = {
                             let expanded = expandfun.expand(fld.cx,
                                                             mac_span,
-                                                            marked_before.as_slice());
+                                                            marked_before[]);
                             parse_thunk(expanded)
                         };
                         let parsed = match opt_parsed {
@@ -329,8 +329,8 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                                 fld.cx.span_err(
                                     pth.span,
                                     format!("non-expression macro in expression position: {}",
-                                            extnamestr.get().as_slice()
-                                            ).as_slice());
+                                            extnamestr.get()[]
+                                            )[]);
                                 return None;
                             }
                         };
@@ -340,7 +340,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                         fld.cx.span_err(
                             pth.span,
                             format!("'{}' is not a tt-style macro",
-                                    extnamestr.get()).as_slice());
+                                    extnamestr.get())[]);
                         None
                     }
                 }
@@ -445,7 +445,7 @@ pub fn expand_item(it: P<ast::Item>, fld: &mut MacroExpander)
             if valid_ident {
                 fld.cx.mod_push(it.ident);
             }
-            let macro_escape = contains_macro_escape(new_attrs.as_slice());
+            let macro_escape = contains_macro_escape(new_attrs[]);
             let result = with_exts_frame!(fld.cx.syntax_env,
                                           macro_escape,
                                           noop_fold_item(it, fld));
@@ -553,7 +553,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
             None => {
                 fld.cx.span_err(path_span,
                                 format!("macro undefined: '{}!'",
-                                        extnamestr).as_slice());
+                                        extnamestr)[]);
                 // let compilation continue
                 return SmallVector::zero();
             }
@@ -566,7 +566,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
                                       format!("macro {}! expects no ident argument, \
                                         given '{}'",
                                       extnamestr,
-                                      token::get_ident(it.ident)).as_slice());
+                                      token::get_ident(it.ident))[]);
                         return SmallVector::zero();
                     }
                     fld.cx.bt_push(ExpnInfo {
@@ -578,14 +578,14 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
                         }
                     });
                     // mark before expansion:
-                    let marked_before = mark_tts(tts.as_slice(), fm);
-                    expander.expand(fld.cx, it.span, marked_before.as_slice())
+                    let marked_before = mark_tts(tts[], fm);
+                    expander.expand(fld.cx, it.span, marked_before[])
                 }
                 IdentTT(ref expander, span) => {
                     if it.ident.name == parse::token::special_idents::invalid.name {
                         fld.cx.span_err(path_span,
                                         format!("macro {}! expects an ident argument",
-                                                extnamestr.get()).as_slice());
+                                                extnamestr.get())[]);
                         return SmallVector::zero();
                     }
                     fld.cx.bt_push(ExpnInfo {
@@ -597,14 +597,14 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
                         }
                     });
                     // mark before expansion:
-                    let marked_tts = mark_tts(tts.as_slice(), fm);
+                    let marked_tts = mark_tts(tts[], fm);
                     expander.expand(fld.cx, it.span, it.ident, marked_tts)
                 }
                 LetSyntaxTT(ref expander, span) => {
                     if it.ident.name == parse::token::special_idents::invalid.name {
                         fld.cx.span_err(path_span,
                                         format!("macro {}! expects an ident argument",
-                                                extnamestr.get()).as_slice());
+                                                extnamestr.get())[]);
                         return SmallVector::zero();
                     }
                     fld.cx.bt_push(ExpnInfo {
@@ -621,7 +621,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
                 _ => {
                     fld.cx.span_err(it.span,
                                     format!("{}! is not legal in item position",
-                                            extnamestr.get()).as_slice());
+                                            extnamestr.get())[]);
                     return SmallVector::zero();
                 }
             }
@@ -639,8 +639,8 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
             // result of expanding a LetSyntaxTT, and thus doesn't
             // need to be marked. Not that it could be marked anyway.
             // create issue to recommend refactoring here?
-            fld.cx.syntax_env.insert(intern(name.as_slice()), ext);
-            if attr::contains_name(it.attrs.as_slice(), "macro_export") {
+            fld.cx.syntax_env.insert(intern(name[]), ext);
+            if attr::contains_name(it.attrs[], "macro_export") {
                 fld.cx.exported_macros.push(it);
             }
             SmallVector::zero()
@@ -654,7 +654,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
         Right(None) => {
             fld.cx.span_err(path_span,
                             format!("non-item macro in item position: {}",
-                                    extnamestr.get()).as_slice());
+                                    extnamestr.get())[]);
             return SmallVector::zero();
         }
     };
@@ -903,7 +903,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
             None => {
                 fld.cx.span_err(pth.span,
                                 format!("macro undefined: '{}!'",
-                                        extnamestr).as_slice());
+                                        extnamestr)[]);
                 // let compilation continue
                 return DummyResult::raw_pat(span);
             }
@@ -920,11 +920,11 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
                     });
 
                     let fm = fresh_mark();
-                    let marked_before = mark_tts(tts.as_slice(), fm);
+                    let marked_before = mark_tts(tts[], fm);
                     let mac_span = fld.cx.original_span();
                     let expanded = match expander.expand(fld.cx,
                                         mac_span,
-                                        marked_before.as_slice()).make_pat() {
+                                        marked_before[]).make_pat() {
                         Some(e) => e,
                         None => {
                             fld.cx.span_err(
@@ -932,7 +932,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
                                 format!(
                                     "non-pattern macro in pattern position: {}",
                                     extnamestr.get()
-                                ).as_slice()
+                                )[]
                             );
                             return DummyResult::raw_pat(span);
                         }
@@ -944,7 +944,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
                 _ => {
                     fld.cx.span_err(span,
                                     format!("{}! is not legal in pattern position",
-                                            extnamestr.get()).as_slice());
+                                            extnamestr.get())[]);
                     return DummyResult::raw_pat(span);
                 }
             }
@@ -1192,8 +1192,7 @@ pub fn expand_crate(parse_sess: &parse::ParseSess,
     let mut expander = MacroExpander::new(&mut cx);
 
     for ExportedMacros { crate_name, macros } in imported_macros.into_iter() {
-        let name = format!("<{} macros>", token::get_ident(crate_name))
-            .into_string();
+        let name = format!("<{} macros>", token::get_ident(crate_name));
 
         for source in macros.into_iter() {
             let item = parse::parse_item_from_source_str(name.clone(),
@@ -1238,7 +1237,7 @@ impl Folder for Marker {
             node: match node {
                 MacInvocTT(path, tts, ctxt) => {
                     MacInvocTT(self.fold_path(path),
-                               self.fold_tts(tts.as_slice()),
+                               self.fold_tts(tts[]),
                                mtwt::apply_mark(self.mark, ctxt))
                 }
             },
@@ -1415,9 +1414,9 @@ mod test {
         let attr2 = make_dummy_attr ("bar");
         let escape_attr = make_dummy_attr ("macro_escape");
         let attrs1 = vec!(attr1.clone(), escape_attr, attr2.clone());
-        assert_eq!(contains_macro_escape(attrs1.as_slice()),true);
+        assert_eq!(contains_macro_escape(attrs1[]),true);
         let attrs2 = vec!(attr1,attr2);
-        assert_eq!(contains_macro_escape(attrs2.as_slice()),false);
+        assert_eq!(contains_macro_escape(attrs2[]),false);
     }
 
     // make a MetaWord outer attribute with the given name
@@ -1729,7 +1728,7 @@ foo_module!();
                 let string = ident.get();
                 "xx" == string
             }).collect();
-        let cxbinds: &[&ast::Ident] = cxbinds.as_slice();
+        let cxbinds: &[&ast::Ident] = cxbinds[];
         let cxbind = match cxbinds {
             [b] => b,
             _ => panic!("expected just one binding for ext_cx")
diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs
index 95c7fcc564a..aad4045f00a 100644
--- a/src/libsyntax/ext/format.rs
+++ b/src/libsyntax/ext/format.rs
@@ -136,7 +136,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool,
                 _ => {
                     ecx.span_err(p.span,
                                  format!("expected ident for named argument, found `{}`",
-                                         p.this_token_to_string()).as_slice());
+                                         p.this_token_to_string())[]);
                     return (invocation, None);
                 }
             };
@@ -149,7 +149,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool,
                 Some(prev) => {
                     ecx.span_err(e.span,
                                  format!("duplicate argument named `{}`",
-                                         name).as_slice());
+                                         name)[]);
                     ecx.parse_sess.span_diagnostic.span_note(prev.span, "previously here");
                     continue
                 }
@@ -240,7 +240,7 @@ impl<'a, 'b> Context<'a, 'b> {
                     let msg = format!("invalid reference to argument `{}` ({})",
                                       arg, self.describe_num_args());
 
-                    self.ecx.span_err(self.fmtsp, msg.as_slice());
+                    self.ecx.span_err(self.fmtsp, msg[]);
                     return;
                 }
                 {
@@ -260,7 +260,7 @@ impl<'a, 'b> Context<'a, 'b> {
                     Some(e) => e.span,
                     None => {
                         let msg = format!("there is no argument named `{}`", name);
-                        self.ecx.span_err(self.fmtsp, msg.as_slice());
+                        self.ecx.span_err(self.fmtsp, msg[]);
                         return;
                     }
                 };
@@ -303,19 +303,19 @@ impl<'a, 'b> Context<'a, 'b> {
                                   format!("argument redeclared with type `{}` when \
                                            it was previously `{}`",
                                           *ty,
-                                          *cur).as_slice());
+                                          *cur)[]);
             }
             (&Known(ref cur), _) => {
                 self.ecx.span_err(sp,
                                   format!("argument used to format with `{}` was \
                                            attempted to not be used for formatting",
-                                           *cur).as_slice());
+                                           *cur)[]);
             }
             (_, &Known(ref ty)) => {
                 self.ecx.span_err(sp,
                                   format!("argument previously used as a format \
                                            argument attempted to be used as `{}`",
-                                           *ty).as_slice());
+                                           *ty)[]);
             }
             (_, _) => {
                 self.ecx.span_err(sp, "argument declared with multiple formats");
@@ -380,7 +380,7 @@ impl<'a, 'b> Context<'a, 'b> {
     /// Translate the accumulated string literals to a literal expression
     fn trans_literal_string(&mut self) -> P<ast::Expr> {
         let sp = self.fmtsp;
-        let s = token::intern_and_get_ident(self.literal.as_slice());
+        let s = token::intern_and_get_ident(self.literal[]);
         self.literal.clear();
         self.ecx.expr_str(sp, s)
     }
@@ -552,7 +552,7 @@ impl<'a, 'b> Context<'a, 'b> {
                 None => continue // error already generated
             };
 
-            let name = self.ecx.ident_of(format!("__arg{}", i).as_slice());
+            let name = self.ecx.ident_of(format!("__arg{}", i)[]);
             pats.push(self.ecx.pat_ident(e.span, name));
             locals.push(Context::format_arg(self.ecx, e.span, arg_ty,
                                             self.ecx.expr_ident(e.span, name)));
@@ -569,7 +569,7 @@ impl<'a, 'b> Context<'a, 'b> {
             };
 
             let lname = self.ecx.ident_of(format!("__arg{}",
-                                                  *name).as_slice());
+                                                  *name)[]);
             pats.push(self.ecx.pat_ident(e.span, lname));
             names[self.name_positions[*name]] =
                 Some(Context::format_arg(self.ecx, e.span, arg_ty,
@@ -652,7 +652,7 @@ impl<'a, 'b> Context<'a, 'b> {
                   -> P<ast::Expr> {
         let trait_ = match *ty {
             Known(ref tyname) => {
-                match tyname.as_slice() {
+                match tyname[] {
                     ""  => "Show",
                     "?" => "Show",
                     "e" => "LowerExp",
@@ -665,7 +665,7 @@ impl<'a, 'b> Context<'a, 'b> {
                     _ => {
                         ecx.span_err(sp,
                                      format!("unknown format trait `{}`",
-                                             *tyname).as_slice());
+                                             *tyname)[]);
                         "Dummy"
                     }
                 }
@@ -760,8 +760,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
     match parser.errors.remove(0) {
         Some(error) => {
             cx.ecx.span_err(cx.fmtsp,
-                            format!("invalid format string: {}",
-                                    error).as_slice());
+                            format!("invalid format string: {}", error)[]);
             return DummyResult::raw_expr(sp);
         }
         None => {}
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index c7cb41e2ece..368d4fa8447 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -474,7 +474,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt,
 }
 
 fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> {
-    strs.iter().map(|str| str_to_ident((*str).as_slice())).collect()
+    strs.iter().map(|str| str_to_ident((*str)[])).collect()
 }
 
 fn id_ext(str: &str) -> ast::Ident {
@@ -676,7 +676,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
             for i in range(0, tt.len()) {
                 seq.push(tt.get_tt(i));
             }
-            mk_tts(cx, seq.as_slice())
+            mk_tts(cx, seq[])
         }
         ast::TtToken(sp, ref tok) => {
             let e_sp = cx.expr_ident(sp, id_ext("_sp"));
@@ -765,7 +765,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
 
     let mut vector = vec!(stmt_let_sp, stmt_let_tt);
-    vector.extend(mk_tts(cx, tts.as_slice()).into_iter());
+    vector.extend(mk_tts(cx, tts[]).into_iter());
     let block = cx.expr_block(
         cx.block_all(sp,
                      Vec::new(),
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index 570231940aa..7c2c5c1530c 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -57,7 +57,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
 
     let topmost = cx.original_span_in_file();
     let loc = cx.codemap().lookup_char_pos(topmost.lo);
-    let filename = token::intern_and_get_ident(loc.file.name.as_slice());
+    let filename = token::intern_and_get_ident(loc.file.name[]);
     base::MacExpr::new(cx.expr_str(topmost, filename))
 }
 
@@ -65,7 +65,7 @@ pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                         -> Box<base::MacResult+'static> {
     let s = pprust::tts_to_string(tts);
     base::MacExpr::new(cx.expr_str(sp,
-                                   token::intern_and_get_ident(s.as_slice())))
+                                   token::intern_and_get_ident(s[])))
 }
 
 pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
@@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                    .connect("::");
     base::MacExpr::new(cx.expr_str(
             sp,
-            token::intern_and_get_ident(string.as_slice())))
+            token::intern_and_get_ident(string[])))
 }
 
 /// include! : parse the given file as an expr
@@ -137,7 +137,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             cx.span_err(sp,
                         format!("couldn't read {}: {}",
                                 file.display(),
-                                e).as_slice());
+                                e)[]);
             return DummyResult::expr(sp);
         }
         Ok(bytes) => bytes,
@@ -147,7 +147,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             // Add this input file to the code map to make it available as
             // dependency information
             let filename = file.display().to_string();
-            let interned = token::intern_and_get_ident(src.as_slice());
+            let interned = token::intern_and_get_ident(src[]);
             cx.codemap().new_filemap(filename, src);
 
             base::MacExpr::new(cx.expr_str(sp, interned))
@@ -155,7 +155,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         Err(_) => {
             cx.span_err(sp,
                         format!("{} wasn't a utf-8 file",
-                                file.display()).as_slice());
+                                file.display())[]);
             return DummyResult::expr(sp);
         }
     }
@@ -171,9 +171,7 @@ pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     match File::open(&file).read_to_end() {
         Err(e) => {
             cx.span_err(sp,
-                        format!("couldn't read {}: {}",
-                                file.display(),
-                                e).as_slice());
+                        format!("couldn't read {}: {}", file.display(), e)[]);
             return DummyResult::expr(sp);
         }
         Ok(bytes) => {
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index bc639c32380..73ef18b8449 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -153,7 +153,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint {
                 seq.num_captures
             }
             &TtDelimited(_, ref delim) => {
-                count_names(delim.tts.as_slice())
+                count_names(delim.tts[])
             }
             &TtToken(_, MatchNt(..)) => {
                 1
@@ -165,7 +165,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint {
 
 pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos)
                            -> Box<MatcherPos> {
-    let match_idx_hi = count_names(ms.as_slice());
+    let match_idx_hi = count_names(ms[]);
     let matches = Vec::from_fn(match_idx_hi, |_i| Vec::new());
     box MatcherPos {
         stack: vec![],
@@ -229,7 +229,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
                         p_s.span_diagnostic
                            .span_fatal(sp,
                                        format!("duplicated bind name: {}",
-                                               string.get()).as_slice())
+                                               string.get())[])
                     }
                 }
             }
@@ -254,13 +254,13 @@ pub fn parse_or_else(sess: &ParseSess,
                      rdr: TtReader,
                      ms: Vec<TokenTree> )
                      -> HashMap<Ident, Rc<NamedMatch>> {
-    match parse(sess, cfg, rdr, ms.as_slice()) {
+    match parse(sess, cfg, rdr, ms[]) {
         Success(m) => m,
         Failure(sp, str) => {
-            sess.span_diagnostic.span_fatal(sp, str.as_slice())
+            sess.span_diagnostic.span_fatal(sp, str[])
         }
         Error(sp, str) => {
-            sess.span_diagnostic.span_fatal(sp, str.as_slice())
+            sess.span_diagnostic.span_fatal(sp, str[])
         }
     }
 }
@@ -416,7 +416,7 @@ pub fn parse(sess: &ParseSess,
                         }
                     }
                     TtToken(sp, SubstNt(..)) => {
-                        return Error(sp, "Cannot transcribe in macro LHS".into_string())
+                        return Error(sp, "Cannot transcribe in macro LHS".to_string())
                     }
                     seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
                         let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
@@ -446,7 +446,7 @@ pub fn parse(sess: &ParseSess,
                 for dv in eof_eis[0].matches.iter_mut() {
                     v.push(dv.pop().unwrap());
                 }
-                return Success(nameize(sess, ms, v.as_slice()));
+                return Success(nameize(sess, ms, v[]));
             } else if eof_eis.len() > 1u {
                 return Error(sp, "ambiguity: multiple successful parses".to_string());
             } else {
@@ -521,7 +521,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
         _ => {
             let token_str = pprust::token_to_string(&p.token);
             p.fatal((format!("expected ident, found {}",
-                             token_str.as_slice())).as_slice())
+                             token_str[]))[])
         }
       },
       "path" => {
@@ -535,8 +535,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
         res
       }
       _ => {
-          p.fatal(format!("unsupported builtin nonterminal parser: {}",
-                          name).as_slice())
+          p.fatal(format!("unsupported builtin nonterminal parser: {}", name)[])
       }
     }
 }
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 92c68b7a9c7..08014dc1338 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -52,7 +52,7 @@ impl<'a> ParserAnyMacro<'a> {
                                following",
                               token_str);
             let span = parser.span;
-            parser.span_err(span, msg.as_slice());
+            parser.span_err(span, msg[]);
         }
     }
 }
@@ -124,8 +124,8 @@ impl TTMacroExpander for MacroRulesMacroExpander {
                           sp,
                           self.name,
                           arg,
-                          self.lhses.as_slice(),
-                          self.rhses.as_slice())
+                          self.lhses[],
+                          self.rhses[])
     }
 }
 
@@ -160,7 +160,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
         match **lhs {
           MatchedNonterminal(NtTT(ref lhs_tt)) => {
             let lhs_tt = match **lhs_tt {
-                TtDelimited(_, ref delim) => delim.tts.as_slice(),
+                TtDelimited(_, ref delim) => delim.tts[],
                 _ => cx.span_fatal(sp, "malformed macro lhs")
             };
             // `None` is because we're not interpolating
@@ -198,13 +198,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                 best_fail_spot = sp;
                 best_fail_msg = (*msg).clone();
               },
-              Error(sp, ref msg) => cx.span_fatal(sp, msg.as_slice())
+              Error(sp, ref msg) => cx.span_fatal(sp, msg[])
             }
           }
           _ => cx.bug("non-matcher found in parsed lhses")
         }
     }
-    cx.span_fatal(best_fail_spot, best_fail_msg.as_slice());
+    cx.span_fatal(best_fail_spot, best_fail_msg[]);
 }
 
 // Note that macro-by-example's input is also matched against a token tree:
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 378dbba07fa..deed0b78e87 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -223,7 +223,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                     }
                     LisContradiction(ref msg) => {
                         // FIXME #2887 blame macro invoker instead
-                        r.sp_diag.span_fatal(sp.clone(), msg.as_slice());
+                        r.sp_diag.span_fatal(sp.clone(), msg[]);
                     }
                     LisConstraint(len, _) => {
                         if len == 0 {
@@ -280,7 +280,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                                 r.sp_diag.span_fatal(
                                     r.cur_span, /* blame the macro writer */
                                     format!("variable '{}' is still repeating at this depth",
-                                            token::get_ident(ident)).as_slice());
+                                            token::get_ident(ident))[]);
                             }
                         }
                     }
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index 0e0a87c74f8..d53a4b0e8d1 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -133,7 +133,7 @@ impl<'a> Context<'a> {
             self.span_handler.span_err(span, explain);
             self.span_handler.span_help(span, format!("add #![feature({})] to the \
                                                        crate attributes to enable",
-                                                      feature).as_slice());
+                                                      feature)[]);
         }
     }
 
@@ -187,7 +187,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> {
         }
         match i.node {
             ast::ItemForeignMod(ref foreign_module) => {
-                if attr::contains_name(i.attrs.as_slice(), "link_args") {
+                if attr::contains_name(i.attrs[], "link_args") {
                     self.gate_feature("link_args", i.span,
                                       "the `link_args` attribute is not portable \
                                        across platforms, it is recommended to \
@@ -201,14 +201,14 @@ impl<'a, 'v> Visitor<'v> for Context<'a> {
             }
 
             ast::ItemFn(..) => {
-                if attr::contains_name(i.attrs.as_slice(), "plugin_registrar") {
+                if attr::contains_name(i.attrs[], "plugin_registrar") {
                     self.gate_feature("plugin_registrar", i.span,
                                       "compiler plugins are experimental and possibly buggy");
                 }
             }
 
             ast::ItemStruct(..) => {
-                if attr::contains_name(i.attrs.as_slice(), "simd") {
+                if attr::contains_name(i.attrs[], "simd") {
                     self.gate_feature("simd", i.span,
                                       "SIMD types are experimental and possibly buggy");
                 }
@@ -285,7 +285,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> {
     }
 
     fn visit_foreign_item(&mut self, i: &ast::ForeignItem) {
-        if attr::contains_name(i.attrs.as_slice(), "linkage") {
+        if attr::contains_name(i.attrs[], "linkage") {
             self.gate_feature("linkage", i.span,
                               "the `linkage` attribute is experimental \
                                and not portable across platforms")
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 41fee1556ab..41693d9d47a 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -92,8 +92,7 @@ impl<'a> ParserAttr for Parser<'a> {
             }
             _ => {
                 let token_str = self.this_token_to_string();
-                self.fatal(format!("expected `#`, found `{}`",
-                                   token_str).as_slice());
+                self.fatal(format!("expected `#`, found `{}`", token_str)[]);
             }
         };
 
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index 95bae63f58f..b8da8365f7e 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -82,7 +82,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
         while j > i && lines[j - 1].trim().is_empty() {
             j -= 1;
         }
-        return lines.slice(i, j).iter().map(|x| (*x).clone()).collect();
+        return lines[i..j].iter().map(|x| (*x).clone()).collect();
     }
 
     /// remove a "[ \t]*\*" block from each line, if possible
@@ -116,7 +116,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
 
         if can_trim {
             lines.iter().map(|line| {
-                line.slice(i + 1, line.len()).to_string()
+                line[i + 1..line.len()].to_string()
             }).collect()
         } else {
             lines
@@ -127,12 +127,12 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
     static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"];
     for prefix in ONLINERS.iter() {
         if comment.starts_with(*prefix) {
-            return comment.slice_from(prefix.len()).to_string();
+            return comment[prefix.len()..].to_string();
         }
     }
 
     if comment.starts_with("/*") {
-        let lines = comment.slice(3u, comment.len() - 2u)
+        let lines = comment[3u..comment.len() - 2u]
             .lines_any()
             .map(|s| s.to_string())
             .collect::<Vec<String> >();
@@ -187,7 +187,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool,
         let line = rdr.read_one_line_comment();
         debug!("{}", line);
         // Doc comments are not put in comments.
-        if is_doc_comment(line.as_slice()) {
+        if is_doc_comment(line[]) {
             break;
         }
         lines.push(line);
@@ -224,10 +224,10 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<uint> {
 fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String> ,
                                         s: String, col: CharPos) {
     let len = s.len();
-    let s1 = match all_whitespace(s.as_slice(), col) {
+    let s1 = match all_whitespace(s[], col) {
         Some(col) => {
             if col < len {
-                s.slice(col, len).to_string()
+                s[col..len].to_string()
             } else {
                 "".to_string()
             }
@@ -261,7 +261,7 @@ fn read_block_comment(rdr: &mut StringReader,
             rdr.bump();
             rdr.bump();
         }
-        if is_block_doc_comment(curr_line.as_slice()) {
+        if is_block_doc_comment(curr_line[]) {
             return
         }
         assert!(!curr_line.contains_char('\n'));
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index da908f46ff6..13d020f6ae3 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -194,7 +194,7 @@ impl<'a> StringReader<'a> {
         let mut m = m.to_string();
         m.push_str(": ");
         for c in c.escape_default() { m.push(c) }
-        self.fatal_span_(from_pos, to_pos, m.as_slice());
+        self.fatal_span_(from_pos, to_pos, m[]);
     }
 
     /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@@ -203,7 +203,7 @@ impl<'a> StringReader<'a> {
         let mut m = m.to_string();
         m.push_str(": ");
         for c in c.escape_default() { m.push(c) }
-        self.err_span_(from_pos, to_pos, m.as_slice());
+        self.err_span_(from_pos, to_pos, m[]);
     }
 
     /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
@@ -212,8 +212,8 @@ impl<'a> StringReader<'a> {
         m.push_str(": ");
         let from = self.byte_offset(from_pos).to_uint();
         let to = self.byte_offset(to_pos).to_uint();
-        m.push_str(self.filemap.src.as_slice().slice(from, to));
-        self.fatal_span_(from_pos, to_pos, m.as_slice());
+        m.push_str(self.filemap.src[from..to]);
+        self.fatal_span_(from_pos, to_pos, m[]);
     }
 
     /// Advance peek_tok and peek_span to refer to the next token, and
@@ -299,7 +299,7 @@ impl<'a> StringReader<'a> {
             while i < s.len() {
                 let str::CharRange { ch, next } = s.char_range_at(i);
                 if ch == '\r' {
-                    if j < i { buf.push_str(s.slice(j, i)); }
+                    if j < i { buf.push_str(s[j..i]); }
                     j = next;
                     if next >= s.len() || s.char_at(next) != '\n' {
                         let pos = start + BytePos(i as u32);
@@ -309,7 +309,7 @@ impl<'a> StringReader<'a> {
                 }
                 i = next;
             }
-            if j < s.len() { buf.push_str(s.slice_from(j)); }
+            if j < s.len() { buf.push_str(s[j..]); }
             buf
         }
     }
@@ -358,7 +358,7 @@ impl<'a> StringReader<'a> {
 
     pub fn nextnextch(&self) -> Option<char> {
         let offset = self.byte_offset(self.pos).to_uint();
-        let s = self.filemap.deref().src.as_slice();
+        let s = self.filemap.deref().src[];
         if offset >= s.len() { return None }
         let str::CharRange { next, .. } = s.char_range_at(offset);
         if next < s.len() {
@@ -554,7 +554,7 @@ impl<'a> StringReader<'a> {
                     self.translate_crlf(start_bpos, string,
                                         "bare CR not allowed in block doc-comment")
                 } else { string.into_cow() };
-                token::DocComment(token::intern(string.as_slice()))
+                token::DocComment(token::intern(string[]))
             } else {
                 token::Comment
             };
@@ -1108,7 +1108,7 @@ impl<'a> StringReader<'a> {
                 // expansion purposes. See #12512 for the gory details of why
                 // this is necessary.
                 let ident = self.with_str_from(start, |lifetime_name| {
-                    str_to_ident(format!("'{}", lifetime_name).as_slice())
+                    str_to_ident(format!("'{}", lifetime_name)[])
                 });
 
                 // Conjure up a "keyword checking ident" to make sure that
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 3d0877dd432..8cefb111fd1 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -251,17 +251,17 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
         Err(e) => {
             err(format!("couldn't read {}: {}",
                         path.display(),
-                        e).as_slice());
+                        e)[]);
             unreachable!()
         }
     };
-    match str::from_utf8(bytes.as_slice()) {
+    match str::from_utf8(bytes[]).ok() {
         Some(s) => {
             return string_to_filemap(sess, s.to_string(),
                                      path.as_str().unwrap().to_string())
         }
         None => {
-            err(format!("{} is not UTF-8 encoded", path.display()).as_slice())
+            err(format!("{} is not UTF-8 encoded", path.display())[])
         }
     }
     unreachable!()
@@ -391,10 +391,10 @@ pub fn char_lit(lit: &str) -> (char, int) {
     }
 
     let msg = format!("lexer should have rejected a bad character escape {}", lit);
-    let msg2 = msg.as_slice();
+    let msg2 = msg[];
 
     fn esc(len: uint, lit: &str) -> Option<(char, int)> {
-        num::from_str_radix(lit.slice(2, len), 16)
+        num::from_str_radix(lit[2..len], 16)
         .and_then(char::from_u32)
         .map(|x| (x, len as int))
     }
@@ -402,10 +402,10 @@ pub fn char_lit(lit: &str) -> (char, int) {
     let unicode_escape: || -> Option<(char, int)> = ||
         if lit.as_bytes()[2] == b'{' {
             let idx = lit.find('}').expect(msg2);
-            let subslice = lit.slice(3, idx);
+            let subslice = lit[3..idx];
             num::from_str_radix(subslice, 16)
                 .and_then(char::from_u32)
-                .map(|x| (x, subslice.char_len() as int + 4))
+                .map(|x| (x, subslice.chars().count() as int + 4))
         } else {
             esc(6, lit)
         };
@@ -429,7 +429,7 @@ pub fn str_lit(lit: &str) -> String {
     let error = |i| format!("lexer should have rejected {} at {}", lit, i);
 
     /// Eat everything up to a non-whitespace
-    fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharOffsets<'a>>) {
+    fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharIndices<'a>>) {
         loop {
             match it.peek().map(|x| x.1) {
                 Some(' ') | Some('\n') | Some('\r') | Some('\t') => {
@@ -464,7 +464,7 @@ pub fn str_lit(lit: &str) -> String {
                             eat(&mut chars);
                         } else {
                             // otherwise, a normal escape
-                            let (c, n) = char_lit(lit.slice_from(i));
+                            let (c, n) = char_lit(lit[i..]);
                             for _ in range(0, n - 1) { // we don't need to move past the first \
                                 chars.next();
                             }
@@ -527,7 +527,7 @@ pub fn raw_str_lit(lit: &str) -> String {
 fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
     s.len() > 1 &&
         first_chars.contains(&s.char_at(0)) &&
-        s.slice_from(1).chars().all(|c| '0' <= c && c <= '9')
+        s[1..].chars().all(|c| '0' <= c && c <= '9')
 }
 
 fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
@@ -540,7 +540,7 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
             if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
                 // if it looks like a width, lets try to be helpful.
                 sd.span_err(sp, &*format!("illegal width `{}` for float literal, \
-                                          valid widths are 32 and 64", suf.slice_from(1)));
+                                          valid widths are 32 and 64", suf[1..]));
             } else {
                 sd.span_err(sp, &*format!("illegal suffix `{}` for float literal, \
                                           valid suffixes are `f32` and `f64`", suf));
@@ -576,7 +576,7 @@ pub fn byte_lit(lit: &str) -> (u8, uint) {
             b'\'' => b'\'',
             b'0' => b'\0',
             _ => {
-                match ::std::num::from_str_radix::<u64>(lit.slice(2, 4), 16) {
+                match ::std::num::from_str_radix::<u64>(lit[2..4], 16) {
                     Some(c) =>
                         if c > 0xFF {
                             panic!(err(2))
@@ -626,7 +626,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> {
                     }
                     _ => {
                         // otherwise, a normal escape
-                        let (c, n) = byte_lit(lit.slice_from(i));
+                        let (c, n) = byte_lit(lit[i..]);
                         // we don't need to move past the first \
                         for _ in range(0, n - 1) {
                             chars.next();
@@ -655,7 +655,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
     // s can only be ascii, byte indexing is fine
 
     let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
-    let mut s = s2.as_slice();
+    let mut s = s2[];
 
     debug!("integer_lit: {}, {}", s, suffix);
 
@@ -688,7 +688,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
     }
 
     if base != 10 {
-        s = s.slice_from(2);
+        s = s[2..];
     }
 
     if let Some(suf) = suffix {
@@ -710,7 +710,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
                 if looks_like_width_suffix(&['i', 'u'], suf) {
                     sd.span_err(sp, &*format!("illegal width `{}` for integer literal; \
                                               valid widths are 8, 16, 32 and 64",
-                                              suf.slice_from(1)));
+                                              suf[1..]));
                 } else {
                     sd.span_err(sp, &*format!("illegal suffix `{}` for numeric literal", suf));
                 }
@@ -808,7 +808,7 @@ mod test {
     #[test]
     fn string_to_tts_macro () {
         let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
-        let tts: &[ast::TokenTree] = tts.as_slice();
+        let tts: &[ast::TokenTree] = tts[];
         match tts {
             [ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)),
              ast::TtToken(_, token::Not),
@@ -816,19 +816,19 @@ mod test {
              ast::TtDelimited(_, ref macro_delimed)]
             if name_macro_rules.as_str() == "macro_rules"
             && name_zip.as_str() == "zip" => {
-                match macro_delimed.tts.as_slice() {
+                match macro_delimed.tts[] {
                     [ast::TtDelimited(_, ref first_delimed),
                      ast::TtToken(_, token::FatArrow),
                      ast::TtDelimited(_, ref second_delimed)]
                     if macro_delimed.delim == token::Paren => {
-                        match first_delimed.tts.as_slice() {
+                        match first_delimed.tts[] {
                             [ast::TtToken(_, token::Dollar),
                              ast::TtToken(_, token::Ident(name, token::Plain))]
                             if first_delimed.delim == token::Paren
                             && name.as_str() == "a" => {},
                             _ => panic!("value 3: {}", **first_delimed),
                         }
-                        match second_delimed.tts.as_slice() {
+                        match second_delimed.tts[] {
                             [ast::TtToken(_, token::Dollar),
                              ast::TtToken(_, token::Ident(name, token::Plain))]
                             if second_delimed.delim == token::Paren
@@ -1106,24 +1106,24 @@ mod test {
         let use_s = "use foo::bar::baz;";
         let vitem = string_to_view_item(use_s.to_string());
         let vitem_s = view_item_to_string(&vitem);
-        assert_eq!(vitem_s.as_slice(), use_s);
+        assert_eq!(vitem_s[], use_s);
 
         let use_s = "use foo::bar as baz;";
         let vitem = string_to_view_item(use_s.to_string());
         let vitem_s = view_item_to_string(&vitem);
-        assert_eq!(vitem_s.as_slice(), use_s);
+        assert_eq!(vitem_s[], use_s);
     }
 
     #[test] fn parse_extern_crate() {
         let ex_s = "extern crate foo;";
         let vitem = string_to_view_item(ex_s.to_string());
         let vitem_s = view_item_to_string(&vitem);
-        assert_eq!(vitem_s.as_slice(), ex_s);
+        assert_eq!(vitem_s[], ex_s);
 
         let ex_s = "extern crate \"foo\" as bar;";
         let vitem = string_to_view_item(ex_s.to_string());
         let vitem_s = view_item_to_string(&vitem);
-        assert_eq!(vitem_s.as_slice(), ex_s);
+        assert_eq!(vitem_s[], ex_s);
     }
 
     fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
@@ -1161,9 +1161,9 @@ mod test {
         for &src in srcs.iter() {
             let spans = get_spans_of_pat_idents(src);
             let Span{lo:lo,hi:hi,..} = spans[0];
-            assert!("self" == src.slice(lo.to_uint(), hi.to_uint()),
+            assert!("self" == src[lo.to_uint()..hi.to_uint()],
                     "\"{}\" != \"self\". src=\"{}\"",
-                    src.slice(lo.to_uint(), hi.to_uint()), src)
+                    src[lo.to_uint()..hi.to_uint()], src)
         }
     }
 
@@ -1202,7 +1202,7 @@ mod test {
         let docs = item.attrs.iter().filter(|a| a.name().get() == "doc")
                     .map(|a| a.value_str().unwrap().get().to_string()).collect::<Vec<_>>();
         let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
-        assert_eq!(docs.as_slice(), b);
+        assert_eq!(docs[], b);
 
         let source = "/** doc comment\r\n *  with CRLF */\r\nfn foo() {}".to_string();
         let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap();
diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs
index a6ddcbf9ac4..e3c831c09ba 100644
--- a/src/libsyntax/parse/obsolete.rs
+++ b/src/libsyntax/parse/obsolete.rs
@@ -113,13 +113,13 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> {
               kind_str: &str,
               desc: &str) {
         self.span_err(sp,
-                      format!("obsolete syntax: {}", kind_str).as_slice());
+                      format!("obsolete syntax: {}", kind_str)[]);
 
         if !self.obsolete_set.contains(&kind) {
             self.sess
                 .span_diagnostic
                 .handler()
-                .note(format!("{}", desc).as_slice());
+                .note(format!("{}", desc)[]);
             self.obsolete_set.insert(kind);
         }
     }
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 19af118b190..7e53b28a09c 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -319,7 +319,7 @@ impl TokenType {
     fn to_string(&self) -> String {
         match *self {
             TokenType::Token(ref t) => format!("`{}`", Parser::token_to_string(t)),
-            TokenType::Operator => "an operator".into_string(),
+            TokenType::Operator => "an operator".to_string(),
         }
     }
 }
@@ -384,12 +384,12 @@ impl<'a> Parser<'a> {
         let token_str = Parser::token_to_string(t);
         let last_span = self.last_span;
         self.span_fatal(last_span, format!("unexpected token: `{}`",
-                                                token_str).as_slice());
+                                                token_str)[]);
     }
 
     pub fn unexpected(&mut self) -> ! {
         let this_token = self.this_token_to_string();
-        self.fatal(format!("unexpected token: `{}`", this_token).as_slice());
+        self.fatal(format!("unexpected token: `{}`", this_token)[]);
     }
 
     /// Expect and consume the token t. Signal an error if
@@ -403,7 +403,7 @@ impl<'a> Parser<'a> {
                 let this_token_str = self.this_token_to_string();
                 self.fatal(format!("expected `{}`, found `{}`",
                                    token_str,
-                                   this_token_str).as_slice())
+                                   this_token_str)[])
             }
         } else {
             self.expect_one_of(slice::ref_slice(t), &[]);
@@ -420,7 +420,7 @@ impl<'a> Parser<'a> {
             let mut i = tokens.iter();
             // This might be a sign we need a connect method on Iterator.
             let b = i.next()
-                     .map_or("".into_string(), |t| t.to_string());
+                     .map_or("".to_string(), |t| t.to_string());
             i.enumerate().fold(b, |mut b, (i, ref a)| {
                 if tokens.len() > 2 && i == tokens.len() - 2 {
                     b.push_str(", or ");
@@ -444,7 +444,7 @@ impl<'a> Parser<'a> {
             expected.push_all(&*self.expected_tokens);
             expected.sort_by(|a, b| a.to_string().cmp(&b.to_string()));
             expected.dedup();
-            let expect = tokens_to_string(expected.as_slice());
+            let expect = tokens_to_string(expected[]);
             let actual = self.this_token_to_string();
             self.fatal(
                 (if expected.len() != 1 {
@@ -455,7 +455,7 @@ impl<'a> Parser<'a> {
                     (format!("expected {}, found `{}`",
                              expect,
                              actual))
-                }).as_slice()
+                })[]
             )
         }
     }
@@ -488,7 +488,7 @@ impl<'a> Parser<'a> {
             // might be unit-struct construction; check for recoverableinput error.
             let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>();
             expected.push_all(inedible);
-            self.check_for_erroneous_unit_struct_expecting(expected.as_slice());
+            self.check_for_erroneous_unit_struct_expecting(expected[]);
         }
         self.expect_one_of(edible, inedible)
     }
@@ -505,9 +505,9 @@ impl<'a> Parser<'a> {
                .as_ref()
                .map_or(false, |t| t.is_ident() || t.is_path()) {
             let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>();
-            expected.push_all(inedible.as_slice());
+            expected.push_all(inedible[]);
             self.check_for_erroneous_unit_struct_expecting(
-                expected.as_slice());
+                expected[]);
         }
         self.expect_one_of(edible, inedible)
     }
@@ -530,7 +530,7 @@ impl<'a> Parser<'a> {
             _ => {
                 let token_str = self.this_token_to_string();
                 self.fatal((format!("expected ident, found `{}`",
-                                    token_str)).as_slice())
+                                    token_str))[])
             }
         }
     }
@@ -584,7 +584,7 @@ impl<'a> Parser<'a> {
             let id_interned_str = token::get_name(kw.to_name());
             let token_str = self.this_token_to_string();
             self.fatal(format!("expected `{}`, found `{}`",
-                               id_interned_str, token_str).as_slice())
+                               id_interned_str, token_str)[])
         }
     }
 
@@ -595,7 +595,7 @@ impl<'a> Parser<'a> {
             let span = self.span;
             self.span_err(span,
                           format!("expected identifier, found keyword `{}`",
-                                  token_str).as_slice());
+                                  token_str)[]);
         }
     }
 
@@ -604,7 +604,7 @@ impl<'a> Parser<'a> {
         if self.token.is_reserved_keyword() {
             let token_str = self.this_token_to_string();
             self.fatal(format!("`{}` is a reserved keyword",
-                               token_str).as_slice())
+                               token_str)[])
         }
     }
 
@@ -624,7 +624,7 @@ impl<'a> Parser<'a> {
                     Parser::token_to_string(&token::BinOp(token::And));
                 self.fatal(format!("expected `{}`, found `{}`",
                                    found_token,
-                                   token_str).as_slice())
+                                   token_str)[])
             }
         }
     }
@@ -645,7 +645,7 @@ impl<'a> Parser<'a> {
                     Parser::token_to_string(&token::BinOp(token::Or));
                 self.fatal(format!("expected `{}`, found `{}`",
                                    token_str,
-                                   found_token).as_slice())
+                                   found_token)[])
             }
         }
     }
@@ -711,7 +711,7 @@ impl<'a> Parser<'a> {
             let token_str = Parser::token_to_string(&token::Lt);
             self.fatal(format!("expected `{}`, found `{}`",
                                token_str,
-                               found_token).as_slice())
+                               found_token)[])
         }
     }
 
@@ -763,7 +763,7 @@ impl<'a> Parser<'a> {
                 let this_token_str = self.this_token_to_string();
                 self.fatal(format!("expected `{}`, found `{}`",
                                    gt_str,
-                                   this_token_str).as_slice())
+                                   this_token_str)[])
             }
         }
     }
@@ -1392,7 +1392,7 @@ impl<'a> Parser<'a> {
                     let (inner_attrs, body) =
                         p.parse_inner_attrs_and_block();
                     let mut attrs = attrs;
-                    attrs.push_all(inner_attrs.as_slice());
+                    attrs.push_all(inner_attrs[]);
                     ProvidedMethod(P(ast::Method {
                         attrs: attrs,
                         id: ast::DUMMY_NODE_ID,
@@ -1411,7 +1411,7 @@ impl<'a> Parser<'a> {
                   _ => {
                       let token_str = p.this_token_to_string();
                       p.fatal((format!("expected `;` or `{{`, found `{}`",
-                                       token_str)).as_slice())
+                                       token_str))[])
                   }
                 }
             }
@@ -1606,7 +1606,7 @@ impl<'a> Parser<'a> {
         } else {
             let this_token_str = self.this_token_to_string();
             let msg = format!("expected type, found `{}`", this_token_str);
-            self.fatal(msg.as_slice());
+            self.fatal(msg[]);
         };
 
         let sp = mk_sp(lo, self.last_span.hi);
@@ -1753,14 +1753,14 @@ impl<'a> Parser<'a> {
 
                     token::Str_(s) => {
                         (true,
-                         LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str()).as_slice()),
+                         LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str())[]),
                                 ast::CookedStr))
                     }
                     token::StrRaw(s, n) => {
                         (true,
                          LitStr(
                             token::intern_and_get_ident(
-                                parse::raw_str_lit(s.as_str()).as_slice()),
+                                parse::raw_str_lit(s.as_str())[]),
                             ast::RawStr(n)))
                     }
                     token::Binary(i) =>
@@ -2004,7 +2004,7 @@ impl<'a> Parser<'a> {
                 };
             }
             _ => {
-                self.fatal(format!("expected a lifetime name").as_slice());
+                self.fatal(format!("expected a lifetime name")[]);
             }
         }
     }
@@ -2042,7 +2042,7 @@ impl<'a> Parser<'a> {
                     let msg = format!("expected `,` or `>` after lifetime \
                                       name, found `{}`",
                                       this_token_str);
-                    self.fatal(msg.as_slice());
+                    self.fatal(msg[]);
                 }
             }
         }
@@ -2517,7 +2517,7 @@ impl<'a> Parser<'a> {
                     hi = self.span.hi;
                     self.bump();
 
-                    let index = from_str::<uint>(n.as_str());
+                    let index = n.as_str().parse::<uint>();
                     match index {
                         Some(n) => {
                             let id = spanned(dot, hi, n);
@@ -2535,16 +2535,16 @@ impl<'a> Parser<'a> {
                     let last_span = self.last_span;
                     let fstr = n.as_str();
                     self.span_err(last_span,
-                                  format!("unexpected token: `{}`", n.as_str()).as_slice());
+                                  format!("unexpected token: `{}`", n.as_str())[]);
                     if fstr.chars().all(|x| "0123456789.".contains_char(x)) {
-                        let float = match from_str::<f64>(fstr) {
+                        let float = match fstr.parse::<f64>() {
                             Some(f) => f,
                             None => continue,
                         };
                         self.span_help(last_span,
                             format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
                                     float.trunc() as uint,
-                                    float.fract().to_string()[1..]).as_slice());
+                                    float.fract().to_string()[1..])[]);
                     }
                     self.abort_if_errors();
 
@@ -2716,7 +2716,7 @@ impl<'a> Parser<'a> {
                   };
                   let token_str = p.this_token_to_string();
                   p.fatal(format!("incorrect close delimiter: `{}`",
-                                  token_str).as_slice())
+                                  token_str)[])
               },
               /* we ought to allow different depths of unquotation */
               token::Dollar if p.quote_depth > 0u => {
@@ -2734,7 +2734,7 @@ impl<'a> Parser<'a> {
                     let seq = match seq {
                         Spanned { node, .. } => node,
                     };
-                    let name_num = macro_parser::count_names(seq.as_slice());
+                    let name_num = macro_parser::count_names(seq[]);
                     TtSequence(mk_sp(sp.lo, p.span.hi),
                                Rc::new(SequenceRepetition {
                                    tts: seq,
@@ -2885,7 +2885,7 @@ impl<'a> Parser<'a> {
                         let this_token_to_string = self.this_token_to_string();
                         self.span_err(span,
                                       format!("expected expression, found `{}`",
-                                              this_token_to_string).as_slice());
+                                              this_token_to_string)[]);
                         let box_span = mk_sp(lo, self.last_span.hi);
                         self.span_help(box_span,
                                        "perhaps you meant `box() (foo)` instead?");
@@ -3264,7 +3264,7 @@ impl<'a> Parser<'a> {
                 if self.token != token::CloseDelim(token::Brace) {
                     let token_str = self.this_token_to_string();
                     self.fatal(format!("expected `{}`, found `{}`", "}",
-                                       token_str).as_slice())
+                                       token_str)[])
                 }
                 etc = true;
                 break;
@@ -3285,7 +3285,7 @@ impl<'a> Parser<'a> {
                     BindByRef(..) | BindByValue(MutMutable) => {
                         let token_str = self.this_token_to_string();
                         self.fatal(format!("unexpected `{}`",
-                                           token_str).as_slice())
+                                           token_str)[])
                     }
                     _ => {}
                 }
@@ -3563,7 +3563,7 @@ impl<'a> Parser<'a> {
             let span = self.span;
             let tok_str = self.this_token_to_string();
             self.span_fatal(span,
-                            format!("expected identifier, found `{}`", tok_str).as_slice());
+                            format!("expected identifier, found `{}`", tok_str)[]);
         }
         let ident = self.parse_ident();
         let last_span = self.last_span;
@@ -3664,7 +3664,7 @@ impl<'a> Parser<'a> {
 
         let lo = self.span.lo;
         if self.token.is_keyword(keywords::Let) {
-            check_expected_item(self, item_attrs.as_slice());
+            check_expected_item(self, item_attrs[]);
             self.expect_keyword(keywords::Let);
             let decl = self.parse_let();
             P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID)))
@@ -3673,7 +3673,7 @@ impl<'a> Parser<'a> {
             && self.look_ahead(1, |t| *t == token::Not) {
             // it's a macro invocation:
 
-            check_expected_item(self, item_attrs.as_slice());
+            check_expected_item(self, item_attrs[]);
 
             // Potential trouble: if we allow macros with paths instead of
             // idents, we'd need to look ahead past the whole path here...
@@ -3701,7 +3701,7 @@ impl<'a> Parser<'a> {
                     let tok_str = self.this_token_to_string();
                     self.fatal(format!("expected {}`(` or `{{`, found `{}`",
                                        ident_str,
-                                       tok_str).as_slice())
+                                       tok_str)[])
                 },
             };
 
@@ -3749,7 +3749,7 @@ impl<'a> Parser<'a> {
             }
         } else {
             let found_attrs = !item_attrs.is_empty();
-            let item_err = Parser::expected_item_err(item_attrs.as_slice());
+            let item_err = Parser::expected_item_err(item_attrs[]);
             match self.parse_item_or_view_item(item_attrs, false) {
                 IoviItem(i) => {
                     let hi = i.span.hi;
@@ -3793,7 +3793,7 @@ impl<'a> Parser<'a> {
             let sp = self.span;
             let tok = self.this_token_to_string();
             self.span_fatal_help(sp,
-                                 format!("expected `{{`, found `{}`", tok).as_slice(),
+                                 format!("expected `{{`, found `{}`", tok)[],
                                  "place this code inside a block");
         }
 
@@ -3847,13 +3847,13 @@ impl<'a> Parser<'a> {
         while self.token != token::CloseDelim(token::Brace) {
             // parsing items even when they're not allowed lets us give
             // better error messages and recover more gracefully.
-            attributes_box.push_all(self.parse_outer_attributes().as_slice());
+            attributes_box.push_all(self.parse_outer_attributes()[]);
             match self.token {
                 token::Semi => {
                     if !attributes_box.is_empty() {
                         let last_span = self.last_span;
                         self.span_err(last_span,
-                                      Parser::expected_item_err(attributes_box.as_slice()));
+                                      Parser::expected_item_err(attributes_box[]));
                         attributes_box = Vec::new();
                     }
                     self.bump(); // empty
@@ -3944,7 +3944,7 @@ impl<'a> Parser<'a> {
         if !attributes_box.is_empty() {
             let last_span = self.last_span;
             self.span_err(last_span,
-                          Parser::expected_item_err(attributes_box.as_slice()));
+                          Parser::expected_item_err(attributes_box[]));
         }
 
         let hi = self.span.hi;
@@ -4362,7 +4362,7 @@ impl<'a> Parser<'a> {
             _ => {
                 let token_str = self.this_token_to_string();
                 self.fatal(format!("expected `self`, found `{}`",
-                                   token_str).as_slice())
+                                   token_str)[])
             }
         }
     }
@@ -4516,7 +4516,7 @@ impl<'a> Parser<'a> {
                 _ => {
                     let token_str = self.this_token_to_string();
                     self.fatal(format!("expected `,` or `)`, found `{}`",
-                                       token_str).as_slice())
+                                       token_str)[])
                 }
             }
             }
@@ -4692,7 +4692,7 @@ impl<'a> Parser<'a> {
                 let (inner_attrs, body) = self.parse_inner_attrs_and_block();
                 let body_span = body.span;
                 let mut new_attrs = attrs;
-                new_attrs.push_all(inner_attrs.as_slice());
+                new_attrs.push_all(inner_attrs[]);
                 (ast::MethDecl(ident,
                                generics,
                                abi,
@@ -4849,7 +4849,7 @@ impl<'a> Parser<'a> {
             if fields.len() == 0 {
                 self.fatal(format!("unit-like struct definition should be \
                                     written as `struct {};`",
-                                   token::get_ident(class_name)).as_slice());
+                                   token::get_ident(class_name))[]);
             }
             self.bump();
         } else if self.check(&token::OpenDelim(token::Paren)) {
@@ -4873,7 +4873,7 @@ impl<'a> Parser<'a> {
             if fields.len() == 0 {
                 self.fatal(format!("unit-like struct definition should be \
                                     written as `struct {};`",
-                                   token::get_ident(class_name)).as_slice());
+                                   token::get_ident(class_name))[]);
             }
             self.expect(&token::Semi);
         } else if self.eat(&token::Semi) {
@@ -4884,7 +4884,7 @@ impl<'a> Parser<'a> {
             let token_str = self.this_token_to_string();
             self.fatal(format!("expected `{}`, `(`, or `;` after struct \
                                 name, found `{}`", "{",
-                               token_str).as_slice())
+                               token_str)[])
         }
 
         let _ = ast::DUMMY_NODE_ID;  // FIXME: Workaround for crazy bug.
@@ -4913,7 +4913,7 @@ impl<'a> Parser<'a> {
                 let token_str = self.this_token_to_string();
                 self.span_fatal_help(span,
                                      format!("expected `,`, or `}}`, found `{}`",
-                                             token_str).as_slice(),
+                                             token_str)[],
                                      "struct fields should be separated by commas")
             }
         }
@@ -4983,7 +4983,7 @@ impl<'a> Parser<'a> {
             let mut attrs = self.parse_outer_attributes();
             if first {
                 let mut tmp = attrs_remaining.clone();
-                tmp.push_all(attrs.as_slice());
+                tmp.push_all(attrs[]);
                 attrs = tmp;
                 first = false;
             }
@@ -5000,7 +5000,7 @@ impl<'a> Parser<'a> {
               _ => {
                   let token_str = self.this_token_to_string();
                   self.fatal(format!("expected item, found `{}`",
-                                     token_str).as_slice())
+                                     token_str)[])
               }
             }
         }
@@ -5009,7 +5009,7 @@ impl<'a> Parser<'a> {
             // We parsed attributes for the first item but didn't find it
             let last_span = self.last_span;
             self.span_err(last_span,
-                          Parser::expected_item_err(attrs_remaining.as_slice()));
+                          Parser::expected_item_err(attrs_remaining[]));
         }
 
         ast::Mod {
@@ -5079,7 +5079,7 @@ impl<'a> Parser<'a> {
                     -> (ast::Item_, Vec<ast::Attribute> ) {
         let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span));
         prefix.pop();
-        let mod_path = Path::new(".").join_many(self.mod_path_stack.as_slice());
+        let mod_path = Path::new(".").join_many(self.mod_path_stack[]);
         let dir_path = prefix.join(&mod_path);
         let mod_string = token::get_ident(id);
         let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name(
@@ -5089,8 +5089,8 @@ impl<'a> Parser<'a> {
                 let mod_name = mod_string.get().to_string();
                 let default_path_str = format!("{}.rs", mod_name);
                 let secondary_path_str = format!("{}/mod.rs", mod_name);
-                let default_path = dir_path.join(default_path_str.as_slice());
-                let secondary_path = dir_path.join(secondary_path_str.as_slice());
+                let default_path = dir_path.join(default_path_str[]);
+                let secondary_path = dir_path.join(secondary_path_str[]);
                 let default_exists = default_path.exists();
                 let secondary_exists = secondary_path.exists();
 
@@ -5105,13 +5105,13 @@ impl<'a> Parser<'a> {
                                    format!("maybe move this module `{0}` \
                                             to its own directory via \
                                             `{0}/mod.rs`",
-                                           this_module).as_slice());
+                                           this_module)[]);
                     if default_exists || secondary_exists {
                         self.span_note(id_sp,
                                        format!("... or maybe `use` the module \
                                                 `{}` instead of possibly \
                                                 redeclaring it",
-                                               mod_name).as_slice());
+                                               mod_name)[]);
                     }
                     self.abort_if_errors();
                 }
@@ -5122,12 +5122,12 @@ impl<'a> Parser<'a> {
                     (false, false) => {
                         self.span_fatal_help(id_sp,
                                              format!("file not found for module `{}`",
-                                                     mod_name).as_slice(),
+                                                     mod_name)[],
                                              format!("name the file either {} or {} inside \
                                                      the directory {}",
                                                      default_path_str,
                                                      secondary_path_str,
-                                                     dir_path.display()).as_slice());
+                                                     dir_path.display())[]);
                     }
                     (true, true) => {
                         self.span_fatal_help(
@@ -5136,7 +5136,7 @@ impl<'a> Parser<'a> {
                                      and {}",
                                     mod_name,
                                     default_path_str,
-                                    secondary_path_str).as_slice(),
+                                    secondary_path_str)[],
                             "delete or rename one of them to remove the ambiguity");
                     }
                 }
@@ -5158,11 +5158,11 @@ impl<'a> Parser<'a> {
                 let mut err = String::from_str("circular modules: ");
                 let len = included_mod_stack.len();
                 for p in included_mod_stack.slice(i, len).iter() {
-                    err.push_str(p.display().as_cow().as_slice());
+                    err.push_str(p.display().as_cow()[]);
                     err.push_str(" -> ");
                 }
-                err.push_str(path.display().as_cow().as_slice());
-                self.span_fatal(id_sp, err.as_slice());
+                err.push_str(path.display().as_cow()[]);
+                self.span_fatal(id_sp, err[]);
             }
             None => ()
         }
@@ -5243,7 +5243,7 @@ impl<'a> Parser<'a> {
         if !attrs_remaining.is_empty() {
             let last_span = self.last_span;
             self.span_err(last_span,
-                          Parser::expected_item_err(attrs_remaining.as_slice()));
+                          Parser::expected_item_err(attrs_remaining[]));
         }
         assert!(self.token == token::CloseDelim(token::Brace));
         ast::ForeignMod {
@@ -5284,7 +5284,7 @@ impl<'a> Parser<'a> {
                     self.span_help(span,
                                    format!("perhaps you meant to enclose the crate name `{}` in \
                                            a string?",
-                                          the_ident.as_str()).as_slice());
+                                          the_ident.as_str())[]);
                     None
                 } else {
                     None
@@ -5310,7 +5310,7 @@ impl<'a> Parser<'a> {
                 self.span_fatal(span,
                                 format!("expected extern crate name but \
                                          found `{}`",
-                                        token_str).as_slice());
+                                        token_str)[]);
             }
         };
 
@@ -5408,7 +5408,7 @@ impl<'a> Parser<'a> {
                     self.span_err(start_span,
                         format!("unit-like struct variant should be written \
                                  without braces, as `{},`",
-                                token::get_ident(ident)).as_slice());
+                                token::get_ident(ident))[]);
                 }
                 kind = StructVariantKind(struct_def);
             } else if self.check(&token::OpenDelim(token::Paren)) {
@@ -5493,7 +5493,7 @@ impl<'a> Parser<'a> {
                             format!("illegal ABI: expected one of [{}], \
                                      found `{}`",
                                     abi::all_names().connect(", "),
-                                    the_string).as_slice());
+                                    the_string)[]);
                         None
                     }
                 }
@@ -5555,7 +5555,7 @@ impl<'a> Parser<'a> {
                                  format!("`extern mod` is obsolete, use \
                                           `extern crate` instead \
                                           to refer to external \
-                                          crates.").as_slice())
+                                          crates.")[])
                 }
                 return self.parse_item_extern_crate(lo, visibility, attrs);
             }
@@ -5583,7 +5583,7 @@ impl<'a> Parser<'a> {
             let token_str = self.this_token_to_string();
             self.span_fatal(span,
                             format!("expected `{}` or `fn`, found `{}`", "{",
-                                    token_str).as_slice());
+                                    token_str)[]);
         }
 
         if self.eat_keyword(keywords::Virtual) {
@@ -5696,7 +5696,7 @@ impl<'a> Parser<'a> {
         if self.eat_keyword(keywords::Mod) {
             // MODULE ITEM
             let (ident, item_, extra_attrs) =
-                self.parse_item_mod(attrs.as_slice());
+                self.parse_item_mod(attrs[]);
             let last_span = self.last_span;
             let item = self.mk_item(lo,
                                     last_span.hi,
@@ -6031,7 +6031,7 @@ impl<'a> Parser<'a> {
                                   macros_allowed: bool)
                                   -> ParsedItemsAndViewItems {
         let mut attrs = first_item_attrs;
-        attrs.push_all(self.parse_outer_attributes().as_slice());
+        attrs.push_all(self.parse_outer_attributes()[]);
         // First, parse view items.
         let mut view_items : Vec<ast::ViewItem> = Vec::new();
         let mut items = Vec::new();
@@ -6113,7 +6113,7 @@ impl<'a> Parser<'a> {
                            macros_allowed: bool)
         -> ParsedItemsAndViewItems {
         let mut attrs = first_item_attrs;
-        attrs.push_all(self.parse_outer_attributes().as_slice());
+        attrs.push_all(self.parse_outer_attributes()[]);
         let mut foreign_items = Vec::new();
         loop {
             match self.parse_foreign_item(attrs, macros_allowed) {
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index dad369792d7..9e61eaae352 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -454,7 +454,7 @@ macro_rules! declare_special_idents_and_keywords {(
         $(init_vec.push($si_str);)*
         $(init_vec.push($sk_str);)*
         $(init_vec.push($rk_str);)*
-        interner::StrInterner::prefill(init_vec.as_slice())
+        interner::StrInterner::prefill(init_vec[])
     }
 }}
 
@@ -602,10 +602,14 @@ impl InternedString {
 
     #[inline]
     pub fn get<'a>(&'a self) -> &'a str {
-        self.string.as_slice()
+        self.string[]
     }
 }
 
+impl Deref<str> for InternedString {
+    fn deref(&self) -> &str { &*self.string }
+}
+
 impl BytesContainer for InternedString {
     fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
         // FIXME #12938: This is a workaround for the incorrect signature
@@ -620,49 +624,49 @@ impl BytesContainer for InternedString {
 
 impl fmt::Show for InternedString {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{}", self.string.as_slice())
+        write!(f, "{}", self.string[])
     }
 }
 
 #[allow(deprecated)]
 impl<'a> Equiv<&'a str> for InternedString {
     fn equiv(&self, other: & &'a str) -> bool {
-        (*other) == self.string.as_slice()
+        (*other) == self.string[]
     }
 }
 
 impl<'a> PartialEq<&'a str> for InternedString {
     #[inline(always)]
     fn eq(&self, other: & &'a str) -> bool {
-        PartialEq::eq(self.string.as_slice(), *other)
+        PartialEq::eq(self.string[], *other)
     }
     #[inline(always)]
     fn ne(&self, other: & &'a str) -> bool {
-        PartialEq::ne(self.string.as_slice(), *other)
+        PartialEq::ne(self.string[], *other)
     }
 }
 
 impl<'a> PartialEq<InternedString > for &'a str {
     #[inline(always)]
     fn eq(&self, other: &InternedString) -> bool {
-        PartialEq::eq(*self, other.string.as_slice())
+        PartialEq::eq(*self, other.string[])
     }
     #[inline(always)]
     fn ne(&self, other: &InternedString) -> bool {
-        PartialEq::ne(*self, other.string.as_slice())
+        PartialEq::ne(*self, other.string[])
     }
 }
 
 impl<D:Decoder<E>, E> Decodable<D, E> for InternedString {
     fn decode(d: &mut D) -> Result<InternedString, E> {
         Ok(get_name(get_ident_interner().intern(
-                    try!(d.read_str()).as_slice())))
+                    try!(d.read_str())[])))
     }
 }
 
 impl<S:Encoder<E>, E> Encodable<S, E> for InternedString {
     fn encode(&self, s: &mut S) -> Result<(), E> {
-        s.emit_str(self.string.as_slice())
+        s.emit_str(self.string[])
     }
 }
 
diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs
index bfa47a46e74..ab0e0f9585c 100644
--- a/src/libsyntax/print/pp.rs
+++ b/src/libsyntax/print/pp.rs
@@ -139,12 +139,12 @@ pub fn buf_str(toks: Vec<Token>,
         }
         s.push_str(format!("{}={}",
                            szs[i],
-                           tok_str(toks[i].clone())).as_slice());
+                           tok_str(toks[i].clone()))[]);
         i += 1u;
         i %= n;
     }
     s.push(']');
-    return s.into_string();
+    s
 }
 
 #[deriving(Copy)]
@@ -601,7 +601,7 @@ impl Printer {
             assert_eq!(l, len);
             // assert!(l <= space);
             self.space -= len;
-            self.print_str(s.as_slice())
+            self.print_str(s[])
           }
           Eof => {
             // Eof should never get here.
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index a9717a526ad..0d79b7cf925 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -30,6 +30,7 @@ use ptr::P;
 
 use std::{ascii, mem};
 use std::io::{mod, IoResult};
+use std::iter;
 
 pub enum AnnNode<'a> {
     NodeIdent(&'a ast::Ident),
@@ -113,7 +114,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap,
                                       out,
                                       ann,
                                       is_expanded);
-    try!(s.print_mod(&krate.module, krate.attrs.as_slice()));
+    try!(s.print_mod(&krate.module, krate.attrs[]));
     try!(s.print_remaining_comments());
     eof(&mut s.s)
 }
@@ -197,56 +198,56 @@ pub fn binop_to_string(op: BinOpToken) -> &'static str {
 
 pub fn token_to_string(tok: &Token) -> String {
     match *tok {
-        token::Eq                   => "=".into_string(),
-        token::Lt                   => "<".into_string(),
-        token::Le                   => "<=".into_string(),
-        token::EqEq                 => "==".into_string(),
-        token::Ne                   => "!=".into_string(),
-        token::Ge                   => ">=".into_string(),
-        token::Gt                   => ">".into_string(),
-        token::Not                  => "!".into_string(),
-        token::Tilde                => "~".into_string(),
-        token::OrOr                 => "||".into_string(),
-        token::AndAnd               => "&&".into_string(),
-        token::BinOp(op)            => binop_to_string(op).into_string(),
+        token::Eq                   => "=".to_string(),
+        token::Lt                   => "<".to_string(),
+        token::Le                   => "<=".to_string(),
+        token::EqEq                 => "==".to_string(),
+        token::Ne                   => "!=".to_string(),
+        token::Ge                   => ">=".to_string(),
+        token::Gt                   => ">".to_string(),
+        token::Not                  => "!".to_string(),
+        token::Tilde                => "~".to_string(),
+        token::OrOr                 => "||".to_string(),
+        token::AndAnd               => "&&".to_string(),
+        token::BinOp(op)            => binop_to_string(op).to_string(),
         token::BinOpEq(op)          => format!("{}=", binop_to_string(op)),
 
         /* Structural symbols */
-        token::At                   => "@".into_string(),
-        token::Dot                  => ".".into_string(),
-        token::DotDot               => "..".into_string(),
-        token::DotDotDot            => "...".into_string(),
-        token::Comma                => ",".into_string(),
-        token::Semi                 => ";".into_string(),
-        token::Colon                => ":".into_string(),
-        token::ModSep               => "::".into_string(),
-        token::RArrow               => "->".into_string(),
-        token::LArrow               => "<-".into_string(),
-        token::FatArrow             => "=>".into_string(),
-        token::OpenDelim(token::Paren) => "(".into_string(),
-        token::CloseDelim(token::Paren) => ")".into_string(),
-        token::OpenDelim(token::Bracket) => "[".into_string(),
-        token::CloseDelim(token::Bracket) => "]".into_string(),
-        token::OpenDelim(token::Brace) => "{".into_string(),
-        token::CloseDelim(token::Brace) => "}".into_string(),
-        token::Pound                => "#".into_string(),
-        token::Dollar               => "$".into_string(),
-        token::Question             => "?".into_string(),
+        token::At                   => "@".to_string(),
+        token::Dot                  => ".".to_string(),
+        token::DotDot               => "..".to_string(),
+        token::DotDotDot            => "...".to_string(),
+        token::Comma                => ",".to_string(),
+        token::Semi                 => ";".to_string(),
+        token::Colon                => ":".to_string(),
+        token::ModSep               => "::".to_string(),
+        token::RArrow               => "->".to_string(),
+        token::LArrow               => "<-".to_string(),
+        token::FatArrow             => "=>".to_string(),
+        token::OpenDelim(token::Paren) => "(".to_string(),
+        token::CloseDelim(token::Paren) => ")".to_string(),
+        token::OpenDelim(token::Bracket) => "[".to_string(),
+        token::CloseDelim(token::Bracket) => "]".to_string(),
+        token::OpenDelim(token::Brace) => "{".to_string(),
+        token::CloseDelim(token::Brace) => "}".to_string(),
+        token::Pound                => "#".to_string(),
+        token::Dollar               => "$".to_string(),
+        token::Question             => "?".to_string(),
 
         /* Literals */
         token::Literal(lit, suf) => {
             let mut out = match lit {
                 token::Byte(b)           => format!("b'{}'", b.as_str()),
                 token::Char(c)           => format!("'{}'", c.as_str()),
-                token::Float(c)          => c.as_str().into_string(),
-                token::Integer(c)        => c.as_str().into_string(),
+                token::Float(c)          => c.as_str().to_string(),
+                token::Integer(c)        => c.as_str().to_string(),
                 token::Str_(s)           => format!("\"{}\"", s.as_str()),
                 token::StrRaw(s, n)      => format!("r{delim}\"{string}\"{delim}",
-                                                    delim="#".repeat(n),
+                                                    delim=repeat("#", n),
                                                     string=s.as_str()),
                 token::Binary(v)         => format!("b\"{}\"", v.as_str()),
                 token::BinaryRaw(s, n)   => format!("br{delim}\"{string}\"{delim}",
-                                                    delim="#".repeat(n),
+                                                    delim=repeat("#", n),
                                                     string=s.as_str()),
             };
 
@@ -258,17 +259,17 @@ pub fn token_to_string(tok: &Token) -> String {
         }
 
         /* Name components */
-        token::Ident(s, _)          => token::get_ident(s).get().into_string(),
+        token::Ident(s, _)          => token::get_ident(s).get().to_string(),
         token::Lifetime(s)          => format!("{}", token::get_ident(s)),
-        token::Underscore           => "_".into_string(),
+        token::Underscore           => "_".to_string(),
 
         /* Other */
-        token::DocComment(s)        => s.as_str().into_string(),
+        token::DocComment(s)        => s.as_str().to_string(),
         token::SubstNt(s, _)        => format!("${}", s),
         token::MatchNt(s, t, _, _)  => format!("${}:{}", s, t),
-        token::Eof                  => "<eof>".into_string(),
-        token::Whitespace           => " ".into_string(),
-        token::Comment              => "/* */".into_string(),
+        token::Eof                  => "<eof>".to_string(),
+        token::Whitespace           => " ".to_string(),
+        token::Comment              => "/* */".to_string(),
         token::Shebang(s)           => format!("/* shebang: {}*/", s.as_str()),
 
         token::Interpolated(ref nt) => match *nt {
@@ -276,12 +277,12 @@ pub fn token_to_string(tok: &Token) -> String {
             token::NtMeta(ref e)  => meta_item_to_string(&**e),
             token::NtTy(ref e)    => ty_to_string(&**e),
             token::NtPath(ref e)  => path_to_string(&**e),
-            token::NtItem(..)     => "an interpolated item".into_string(),
-            token::NtBlock(..)    => "an interpolated block".into_string(),
-            token::NtStmt(..)     => "an interpolated statement".into_string(),
-            token::NtPat(..)      => "an interpolated pattern".into_string(),
-            token::NtIdent(..)    => "an interpolated identifier".into_string(),
-            token::NtTT(..)       => "an interpolated tt".into_string(),
+            token::NtItem(..)     => "an interpolated item".to_string(),
+            token::NtBlock(..)    => "an interpolated block".to_string(),
+            token::NtStmt(..)     => "an interpolated statement".to_string(),
+            token::NtPat(..)      => "an interpolated pattern".to_string(),
+            token::NtIdent(..)    => "an interpolated identifier".to_string(),
+            token::NtTT(..)       => "an interpolated tt".to_string(),
         }
     }
 }
@@ -577,7 +578,7 @@ impl<'a> State<'a> {
     pub fn synth_comment(&mut self, text: String) -> IoResult<()> {
         try!(word(&mut self.s, "/*"));
         try!(space(&mut self.s));
-        try!(word(&mut self.s, text.as_slice()));
+        try!(word(&mut self.s, text[]));
         try!(space(&mut self.s));
         word(&mut self.s, "*/")
     }
@@ -682,7 +683,7 @@ impl<'a> State<'a> {
             }
             ast::TyTup(ref elts) => {
                 try!(self.popen());
-                try!(self.commasep(Inconsistent, elts.as_slice(),
+                try!(self.commasep(Inconsistent, elts[],
                                    |s, ty| s.print_type(&**ty)));
                 if elts.len() == 1 {
                     try!(word(&mut self.s, ","));
@@ -737,10 +738,10 @@ impl<'a> State<'a> {
             }
             ast::TyObjectSum(ref ty, ref bounds) => {
                 try!(self.print_type(&**ty));
-                try!(self.print_bounds("+", bounds.as_slice()));
+                try!(self.print_bounds("+", bounds[]));
             }
             ast::TyPolyTraitRef(ref bounds) => {
-                try!(self.print_bounds("", bounds.as_slice()));
+                try!(self.print_bounds("", bounds[]));
             }
             ast::TyQPath(ref qpath) => {
                 try!(word(&mut self.s, "<"));
@@ -775,7 +776,7 @@ impl<'a> State<'a> {
                               item: &ast::ForeignItem) -> IoResult<()> {
         try!(self.hardbreak_if_not_bol());
         try!(self.maybe_print_comment(item.span.lo));
-        try!(self.print_outer_attributes(item.attrs.as_slice()));
+        try!(self.print_outer_attributes(item.attrs[]));
         match item.node {
             ast::ForeignItemFn(ref decl, ref generics) => {
                 try!(self.print_fn(&**decl, None, abi::Rust, item.ident, generics,
@@ -786,7 +787,7 @@ impl<'a> State<'a> {
             }
             ast::ForeignItemStatic(ref t, m) => {
                 try!(self.head(visibility_qualified(item.vis,
-                                                    "static").as_slice()));
+                                                    "static")[]));
                 if m {
                     try!(self.word_space("mut"));
                 }
@@ -822,12 +823,12 @@ impl<'a> State<'a> {
     pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> {
         try!(self.hardbreak_if_not_bol());
         try!(self.maybe_print_comment(item.span.lo));
-        try!(self.print_outer_attributes(item.attrs.as_slice()));
+        try!(self.print_outer_attributes(item.attrs[]));
         try!(self.ann.pre(self, NodeItem(item)));
         match item.node {
             ast::ItemStatic(ref ty, m, ref expr) => {
                 try!(self.head(visibility_qualified(item.vis,
-                                                    "static").as_slice()));
+                                                    "static")[]));
                 if m == ast::MutMutable {
                     try!(self.word_space("mut"));
                 }
@@ -844,7 +845,7 @@ impl<'a> State<'a> {
             }
             ast::ItemConst(ref ty, ref expr) => {
                 try!(self.head(visibility_qualified(item.vis,
-                                                    "const").as_slice()));
+                                                    "const")[]));
                 try!(self.print_ident(item.ident));
                 try!(self.word_space(":"));
                 try!(self.print_type(&**ty));
@@ -867,29 +868,29 @@ impl<'a> State<'a> {
                     item.vis
                 ));
                 try!(word(&mut self.s, " "));
-                try!(self.print_block_with_attrs(&**body, item.attrs.as_slice()));
+                try!(self.print_block_with_attrs(&**body, item.attrs[]));
             }
             ast::ItemMod(ref _mod) => {
                 try!(self.head(visibility_qualified(item.vis,
-                                                    "mod").as_slice()));
+                                                    "mod")[]));
                 try!(self.print_ident(item.ident));
                 try!(self.nbsp());
                 try!(self.bopen());
-                try!(self.print_mod(_mod, item.attrs.as_slice()));
+                try!(self.print_mod(_mod, item.attrs[]));
                 try!(self.bclose(item.span));
             }
             ast::ItemForeignMod(ref nmod) => {
                 try!(self.head("extern"));
-                try!(self.word_nbsp(nmod.abi.to_string().as_slice()));
+                try!(self.word_nbsp(nmod.abi.to_string()[]));
                 try!(self.bopen());
-                try!(self.print_foreign_mod(nmod, item.attrs.as_slice()));
+                try!(self.print_foreign_mod(nmod, item.attrs[]));
                 try!(self.bclose(item.span));
             }
             ast::ItemTy(ref ty, ref params) => {
                 try!(self.ibox(indent_unit));
                 try!(self.ibox(0u));
                 try!(self.word_nbsp(visibility_qualified(item.vis,
-                                                         "type").as_slice()));
+                                                         "type")[]));
                 try!(self.print_ident(item.ident));
                 try!(self.print_generics(params));
                 try!(self.end()); // end the inner ibox
@@ -911,7 +912,7 @@ impl<'a> State<'a> {
                 ));
             }
             ast::ItemStruct(ref struct_def, ref generics) => {
-                try!(self.head(visibility_qualified(item.vis,"struct").as_slice()));
+                try!(self.head(visibility_qualified(item.vis,"struct")[]));
                 try!(self.print_struct(&**struct_def, generics, item.ident, item.span));
             }
 
@@ -944,7 +945,7 @@ impl<'a> State<'a> {
 
                 try!(space(&mut self.s));
                 try!(self.bopen());
-                try!(self.print_inner_attributes(item.attrs.as_slice()));
+                try!(self.print_inner_attributes(item.attrs[]));
                 for impl_item in impl_items.iter() {
                     match *impl_item {
                         ast::MethodImplItem(ref meth) => {
@@ -970,7 +971,7 @@ impl<'a> State<'a> {
                     try!(self.print_trait_ref(tref));
                     try!(word(&mut self.s, "?"));
                 }
-                try!(self.print_bounds(":", bounds.as_slice()));
+                try!(self.print_bounds(":", bounds[]));
                 try!(self.print_where_clause(generics));
                 try!(word(&mut self.s, " "));
                 try!(self.bopen());
@@ -988,7 +989,7 @@ impl<'a> State<'a> {
                 try!(self.print_ident(item.ident));
                 try!(self.cbox(indent_unit));
                 try!(self.popen());
-                try!(self.print_tts(tts.as_slice()));
+                try!(self.print_tts(tts[]));
                 try!(self.pclose());
                 try!(word(&mut self.s, ";"));
                 try!(self.end());
@@ -1022,12 +1023,12 @@ impl<'a> State<'a> {
                           generics: &ast::Generics, ident: ast::Ident,
                           span: codemap::Span,
                           visibility: ast::Visibility) -> IoResult<()> {
-        try!(self.head(visibility_qualified(visibility, "enum").as_slice()));
+        try!(self.head(visibility_qualified(visibility, "enum")[]));
         try!(self.print_ident(ident));
         try!(self.print_generics(generics));
         try!(self.print_where_clause(generics));
         try!(space(&mut self.s));
-        self.print_variants(enum_definition.variants.as_slice(), span)
+        self.print_variants(enum_definition.variants[], span)
     }
 
     pub fn print_variants(&mut self,
@@ -1037,7 +1038,7 @@ impl<'a> State<'a> {
         for v in variants.iter() {
             try!(self.space_if_not_bol());
             try!(self.maybe_print_comment(v.span.lo));
-            try!(self.print_outer_attributes(v.node.attrs.as_slice()));
+            try!(self.print_outer_attributes(v.node.attrs[]));
             try!(self.ibox(indent_unit));
             try!(self.print_variant(&**v));
             try!(word(&mut self.s, ","));
@@ -1066,7 +1067,7 @@ impl<'a> State<'a> {
             if !struct_def.fields.is_empty() {
                 try!(self.popen());
                 try!(self.commasep(
-                    Inconsistent, struct_def.fields.as_slice(),
+                    Inconsistent, struct_def.fields[],
                     |s, field| {
                         match field.node.kind {
                             ast::NamedField(..) => panic!("unexpected named field"),
@@ -1094,7 +1095,7 @@ impl<'a> State<'a> {
                     ast::NamedField(ident, visibility) => {
                         try!(self.hardbreak_if_not_bol());
                         try!(self.maybe_print_comment(field.span.lo));
-                        try!(self.print_outer_attributes(field.node.attrs.as_slice()));
+                        try!(self.print_outer_attributes(field.node.attrs[]));
                         try!(self.print_visibility(visibility));
                         try!(self.print_ident(ident));
                         try!(self.word_nbsp(":"));
@@ -1118,7 +1119,7 @@ impl<'a> State<'a> {
     pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
         match *tt {
             ast::TtToken(_, ref tk) => {
-                try!(word(&mut self.s, token_to_string(tk).as_slice()));
+                try!(word(&mut self.s, token_to_string(tk)[]));
                 match *tk {
                     parse::token::DocComment(..) => {
                         hardbreak(&mut self.s)
@@ -1127,11 +1128,11 @@ impl<'a> State<'a> {
                 }
             }
             ast::TtDelimited(_, ref delimed) => {
-                try!(word(&mut self.s, token_to_string(&delimed.open_token()).as_slice()));
+                try!(word(&mut self.s, token_to_string(&delimed.open_token())[]));
                 try!(space(&mut self.s));
-                try!(self.print_tts(delimed.tts.as_slice()));
+                try!(self.print_tts(delimed.tts[]));
                 try!(space(&mut self.s));
-                word(&mut self.s, token_to_string(&delimed.close_token()).as_slice())
+                word(&mut self.s, token_to_string(&delimed.close_token())[])
             },
             ast::TtSequence(_, ref seq) => {
                 try!(word(&mut self.s, "$("));
@@ -1141,7 +1142,7 @@ impl<'a> State<'a> {
                 try!(word(&mut self.s, ")"));
                 match seq.separator {
                     Some(ref tk) => {
-                        try!(word(&mut self.s, token_to_string(tk).as_slice()));
+                        try!(word(&mut self.s, token_to_string(tk)[]));
                     }
                     None => {},
                 }
@@ -1172,7 +1173,7 @@ impl<'a> State<'a> {
                 if !args.is_empty() {
                     try!(self.popen());
                     try!(self.commasep(Consistent,
-                                       args.as_slice(),
+                                       args[],
                                        |s, arg| s.print_type(&*arg.ty)));
                     try!(self.pclose());
                 }
@@ -1196,7 +1197,7 @@ impl<'a> State<'a> {
     pub fn print_ty_method(&mut self, m: &ast::TypeMethod) -> IoResult<()> {
         try!(self.hardbreak_if_not_bol());
         try!(self.maybe_print_comment(m.span.lo));
-        try!(self.print_outer_attributes(m.attrs.as_slice()));
+        try!(self.print_outer_attributes(m.attrs[]));
         try!(self.print_ty_fn(None,
                               None,
                               m.unsafety,
@@ -1228,7 +1229,7 @@ impl<'a> State<'a> {
     pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> {
         try!(self.hardbreak_if_not_bol());
         try!(self.maybe_print_comment(meth.span.lo));
-        try!(self.print_outer_attributes(meth.attrs.as_slice()));
+        try!(self.print_outer_attributes(meth.attrs[]));
         match meth.node {
             ast::MethDecl(ident,
                           ref generics,
@@ -1246,7 +1247,7 @@ impl<'a> State<'a> {
                                    Some(&explicit_self.node),
                                    vis));
                 try!(word(&mut self.s, " "));
-                self.print_block_with_attrs(&**body, meth.attrs.as_slice())
+                self.print_block_with_attrs(&**body, meth.attrs[])
             },
             ast::MethMac(codemap::Spanned { node: ast::MacInvocTT(ref pth, ref tts, _),
                                             ..}) => {
@@ -1255,7 +1256,7 @@ impl<'a> State<'a> {
                 try!(word(&mut self.s, "! "));
                 try!(self.cbox(indent_unit));
                 try!(self.popen());
-                try!(self.print_tts(tts.as_slice()));
+                try!(self.print_tts(tts[]));
                 try!(self.pclose());
                 try!(word(&mut self.s, ";"));
                 self.end()
@@ -1522,7 +1523,7 @@ impl<'a> State<'a> {
             ast::ExprVec(ref exprs) => {
                 try!(self.ibox(indent_unit));
                 try!(word(&mut self.s, "["));
-                try!(self.commasep_exprs(Inconsistent, exprs.as_slice()));
+                try!(self.commasep_exprs(Inconsistent, exprs[]));
                 try!(word(&mut self.s, "]"));
                 try!(self.end());
             }
@@ -1542,7 +1543,7 @@ impl<'a> State<'a> {
                 try!(word(&mut self.s, "{"));
                 try!(self.commasep_cmnt(
                     Consistent,
-                    fields.as_slice(),
+                    fields[],
                     |s, field| {
                         try!(s.ibox(indent_unit));
                         try!(s.print_ident(field.ident.node));
@@ -1568,7 +1569,7 @@ impl<'a> State<'a> {
             }
             ast::ExprTup(ref exprs) => {
                 try!(self.popen());
-                try!(self.commasep_exprs(Inconsistent, exprs.as_slice()));
+                try!(self.commasep_exprs(Inconsistent, exprs[]));
                 if exprs.len() == 1 {
                     try!(word(&mut self.s, ","));
                 }
@@ -1576,7 +1577,7 @@ impl<'a> State<'a> {
             }
             ast::ExprCall(ref func, ref args) => {
                 try!(self.print_expr_maybe_paren(&**func));
-                try!(self.print_call_post(args.as_slice()));
+                try!(self.print_call_post(args[]));
             }
             ast::ExprMethodCall(ident, ref tys, ref args) => {
                 let base_args = args.slice_from(1);
@@ -1585,7 +1586,7 @@ impl<'a> State<'a> {
                 try!(self.print_ident(ident.node));
                 if tys.len() > 0u {
                     try!(word(&mut self.s, "::<"));
-                    try!(self.commasep(Inconsistent, tys.as_slice(),
+                    try!(self.commasep(Inconsistent, tys[],
                                        |s, ty| s.print_type(&**ty)));
                     try!(word(&mut self.s, ">"));
                 }
@@ -1795,11 +1796,11 @@ impl<'a> State<'a> {
                 try!(self.print_string(a.asm.get(), a.asm_str_style));
                 try!(self.word_space(":"));
 
-                try!(self.commasep(Inconsistent, a.outputs.as_slice(),
+                try!(self.commasep(Inconsistent, a.outputs[],
                                    |s, &(ref co, ref o, is_rw)| {
                     match co.get().slice_shift_char() {
                         Some(('=', operand)) if is_rw => {
-                            try!(s.print_string(format!("+{}", operand).as_slice(),
+                            try!(s.print_string(format!("+{}", operand)[],
                                                 ast::CookedStr))
                         }
                         _ => try!(s.print_string(co.get(), ast::CookedStr))
@@ -1812,7 +1813,7 @@ impl<'a> State<'a> {
                 try!(space(&mut self.s));
                 try!(self.word_space(":"));
 
-                try!(self.commasep(Inconsistent, a.inputs.as_slice(),
+                try!(self.commasep(Inconsistent, a.inputs[],
                                    |s, &(ref co, ref o)| {
                     try!(s.print_string(co.get(), ast::CookedStr));
                     try!(s.popen());
@@ -1823,7 +1824,7 @@ impl<'a> State<'a> {
                 try!(space(&mut self.s));
                 try!(self.word_space(":"));
 
-                try!(self.commasep(Inconsistent, a.clobbers.as_slice(),
+                try!(self.commasep(Inconsistent, a.clobbers[],
                                    |s, co| {
                     try!(s.print_string(co.get(), ast::CookedStr));
                     Ok(())
@@ -1877,7 +1878,7 @@ impl<'a> State<'a> {
     pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> {
         if self.encode_idents_with_hygiene {
             let encoded = ident.encode_with_hygiene();
-            try!(word(&mut self.s, encoded.as_slice()))
+            try!(word(&mut self.s, encoded[]))
         } else {
             try!(word(&mut self.s, token::get_ident(ident).get()))
         }
@@ -1885,7 +1886,7 @@ impl<'a> State<'a> {
     }
 
     pub fn print_uint(&mut self, i: uint) -> IoResult<()> {
-        word(&mut self.s, i.to_string().as_slice())
+        word(&mut self.s, i.to_string()[])
     }
 
     pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> {
@@ -1959,7 +1960,7 @@ impl<'a> State<'a> {
                     }
                     try!(self.commasep(
                         Inconsistent,
-                        data.types.as_slice(),
+                        data.types[],
                         |s, ty| s.print_type(&**ty)));
                         comma = true;
                 }
@@ -1982,7 +1983,7 @@ impl<'a> State<'a> {
                 try!(word(&mut self.s, "("));
                 try!(self.commasep(
                     Inconsistent,
-                    data.inputs.as_slice(),
+                    data.inputs[],
                     |s, ty| s.print_type(&**ty)));
                 try!(word(&mut self.s, ")"));
 
@@ -2035,7 +2036,7 @@ impl<'a> State<'a> {
                     Some(ref args) => {
                         if !args.is_empty() {
                             try!(self.popen());
-                            try!(self.commasep(Inconsistent, args.as_slice(),
+                            try!(self.commasep(Inconsistent, args[],
                                               |s, p| s.print_pat(&**p)));
                             try!(self.pclose());
                         }
@@ -2047,7 +2048,7 @@ impl<'a> State<'a> {
                 try!(self.nbsp());
                 try!(self.word_space("{"));
                 try!(self.commasep_cmnt(
-                    Consistent, fields.as_slice(),
+                    Consistent, fields[],
                     |s, f| {
                         try!(s.cbox(indent_unit));
                         if !f.node.is_shorthand {
@@ -2068,7 +2069,7 @@ impl<'a> State<'a> {
             ast::PatTup(ref elts) => {
                 try!(self.popen());
                 try!(self.commasep(Inconsistent,
-                                   elts.as_slice(),
+                                   elts[],
                                    |s, p| s.print_pat(&**p)));
                 if elts.len() == 1 {
                     try!(word(&mut self.s, ","));
@@ -2093,7 +2094,7 @@ impl<'a> State<'a> {
             ast::PatVec(ref before, ref slice, ref after) => {
                 try!(word(&mut self.s, "["));
                 try!(self.commasep(Inconsistent,
-                                   before.as_slice(),
+                                   before[],
                                    |s, p| s.print_pat(&**p)));
                 for p in slice.iter() {
                     if !before.is_empty() { try!(self.word_space(",")); }
@@ -2107,7 +2108,7 @@ impl<'a> State<'a> {
                     if !after.is_empty() { try!(self.word_space(",")); }
                 }
                 try!(self.commasep(Inconsistent,
-                                   after.as_slice(),
+                                   after[],
                                    |s, p| s.print_pat(&**p)));
                 try!(word(&mut self.s, "]"));
             }
@@ -2124,7 +2125,7 @@ impl<'a> State<'a> {
         }
         try!(self.cbox(indent_unit));
         try!(self.ibox(0u));
-        try!(self.print_outer_attributes(arm.attrs.as_slice()));
+        try!(self.print_outer_attributes(arm.attrs[]));
         let mut first = true;
         for p in arm.pats.iter() {
             if first {
@@ -2224,7 +2225,7 @@ impl<'a> State<'a> {
 
         // HACK(eddyb) ignore the separately printed self argument.
         let args = if first {
-            decl.inputs.as_slice()
+            decl.inputs[]
         } else {
             decl.inputs.slice_from(1)
         };
@@ -2386,7 +2387,7 @@ impl<'a> State<'a> {
             ints.push(i);
         }
 
-        try!(self.commasep(Inconsistent, ints.as_slice(), |s, &idx| {
+        try!(self.commasep(Inconsistent, ints[], |s, &idx| {
             if idx < generics.lifetimes.len() {
                 let lifetime = &generics.lifetimes[idx];
                 s.print_lifetime_def(lifetime)
@@ -2407,7 +2408,7 @@ impl<'a> State<'a> {
             try!(self.word_space("?"));
         }
         try!(self.print_ident(param.ident));
-        try!(self.print_bounds(":", param.bounds.as_slice()));
+        try!(self.print_bounds(":", param.bounds[]));
         match param.default {
             Some(ref default) => {
                 try!(space(&mut self.s));
@@ -2483,7 +2484,7 @@ impl<'a> State<'a> {
                 try!(word(&mut self.s, name.get()));
                 try!(self.popen());
                 try!(self.commasep(Consistent,
-                                   items.as_slice(),
+                                   items[],
                                    |s, i| s.print_meta_item(&**i)));
                 try!(self.pclose());
             }
@@ -2519,7 +2520,7 @@ impl<'a> State<'a> {
                     try!(self.print_path(path, false));
                     try!(word(&mut self.s, "::{"));
                 }
-                try!(self.commasep(Inconsistent, idents.as_slice(), |s, w| {
+                try!(self.commasep(Inconsistent, idents[], |s, w| {
                     match w.node {
                         ast::PathListIdent { name, .. } => {
                             s.print_ident(name)
@@ -2537,7 +2538,7 @@ impl<'a> State<'a> {
     pub fn print_view_item(&mut self, item: &ast::ViewItem) -> IoResult<()> {
         try!(self.hardbreak_if_not_bol());
         try!(self.maybe_print_comment(item.span.lo));
-        try!(self.print_outer_attributes(item.attrs.as_slice()));
+        try!(self.print_outer_attributes(item.attrs[]));
         try!(self.print_visibility(item.vis));
         match item.node {
             ast::ViewItemExternCrate(id, ref optional_path, _) => {
@@ -2679,7 +2680,7 @@ impl<'a> State<'a> {
             try!(self.pclose());
         }
 
-        try!(self.print_bounds(":", bounds.as_slice()));
+        try!(self.print_bounds(":", bounds[]));
 
         try!(self.print_fn_output(decl));
 
@@ -2738,7 +2739,7 @@ impl<'a> State<'a> {
         try!(self.maybe_print_comment(lit.span.lo));
         match self.next_lit(lit.span.lo) {
             Some(ref ltrl) => {
-                return word(&mut self.s, (*ltrl).lit.as_slice());
+                return word(&mut self.s, (*ltrl).lit[]);
             }
             _ => ()
         }
@@ -2748,7 +2749,7 @@ impl<'a> State<'a> {
                 let mut res = String::from_str("b'");
                 ascii::escape_default(byte, |c| res.push(c as char));
                 res.push('\'');
-                word(&mut self.s, res.as_slice())
+                word(&mut self.s, res[])
             }
             ast::LitChar(ch) => {
                 let mut res = String::from_str("'");
@@ -2756,27 +2757,27 @@ impl<'a> State<'a> {
                     res.push(c);
                 }
                 res.push('\'');
-                word(&mut self.s, res.as_slice())
+                word(&mut self.s, res[])
             }
             ast::LitInt(i, t) => {
                 match t {
                     ast::SignedIntLit(st, ast::Plus) => {
                         word(&mut self.s,
-                             ast_util::int_ty_to_string(st, Some(i as i64)).as_slice())
+                             ast_util::int_ty_to_string(st, Some(i as i64))[])
                     }
                     ast::SignedIntLit(st, ast::Minus) => {
                         let istr = ast_util::int_ty_to_string(st, Some(-(i as i64)));
                         word(&mut self.s,
-                             format!("-{}", istr).as_slice())
+                             format!("-{}", istr)[])
                     }
                     ast::UnsignedIntLit(ut) => {
-                        word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i)).as_slice())
+                        word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i))[])
                     }
                     ast::UnsuffixedIntLit(ast::Plus) => {
-                        word(&mut self.s, format!("{}", i).as_slice())
+                        word(&mut self.s, format!("{}", i)[])
                     }
                     ast::UnsuffixedIntLit(ast::Minus) => {
-                        word(&mut self.s, format!("-{}", i).as_slice())
+                        word(&mut self.s, format!("-{}", i)[])
                     }
                 }
             }
@@ -2785,7 +2786,7 @@ impl<'a> State<'a> {
                      format!(
                          "{}{}",
                          f.get(),
-                         ast_util::float_ty_to_string(t).as_slice()).as_slice())
+                         ast_util::float_ty_to_string(t)[])[])
             }
             ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()),
             ast::LitBool(val) => {
@@ -2797,7 +2798,7 @@ impl<'a> State<'a> {
                     ascii::escape_default(ch as u8,
                                           |ch| escaped.push(ch as char));
                 }
-                word(&mut self.s, format!("b\"{}\"", escaped).as_slice())
+                word(&mut self.s, format!("b\"{}\"", escaped)[])
             }
         }
     }
@@ -2838,7 +2839,7 @@ impl<'a> State<'a> {
             comments::Mixed => {
                 assert_eq!(cmnt.lines.len(), 1u);
                 try!(zerobreak(&mut self.s));
-                try!(word(&mut self.s, cmnt.lines[0].as_slice()));
+                try!(word(&mut self.s, cmnt.lines[0][]));
                 zerobreak(&mut self.s)
             }
             comments::Isolated => {
@@ -2847,7 +2848,7 @@ impl<'a> State<'a> {
                     // Don't print empty lines because they will end up as trailing
                     // whitespace
                     if !line.is_empty() {
-                        try!(word(&mut self.s, line.as_slice()));
+                        try!(word(&mut self.s, line[]));
                     }
                     try!(hardbreak(&mut self.s));
                 }
@@ -2856,13 +2857,13 @@ impl<'a> State<'a> {
             comments::Trailing => {
                 try!(word(&mut self.s, " "));
                 if cmnt.lines.len() == 1u {
-                    try!(word(&mut self.s, cmnt.lines[0].as_slice()));
+                    try!(word(&mut self.s, cmnt.lines[0][]));
                     hardbreak(&mut self.s)
                 } else {
                     try!(self.ibox(0u));
                     for line in cmnt.lines.iter() {
                         if !line.is_empty() {
-                            try!(word(&mut self.s, line.as_slice()));
+                            try!(word(&mut self.s, line[]));
                         }
                         try!(hardbreak(&mut self.s));
                     }
@@ -2891,11 +2892,11 @@ impl<'a> State<'a> {
             }
             ast::RawStr(n) => {
                 (format!("r{delim}\"{string}\"{delim}",
-                         delim="#".repeat(n),
+                         delim=repeat("#", n),
                          string=st))
             }
         };
-        word(&mut self.s, st.as_slice())
+        word(&mut self.s, st[])
     }
 
     pub fn next_comment(&mut self) -> Option<comments::Comment> {
@@ -2926,7 +2927,7 @@ impl<'a> State<'a> {
             Some(abi::Rust) => Ok(()),
             Some(abi) => {
                 try!(self.word_nbsp("extern"));
-                self.word_nbsp(abi.to_string().as_slice())
+                self.word_nbsp(abi.to_string()[])
             }
             None => Ok(())
         }
@@ -2937,7 +2938,7 @@ impl<'a> State<'a> {
         match opt_abi {
             Some(abi) => {
                 try!(self.word_nbsp("extern"));
-                self.word_nbsp(abi.to_string().as_slice())
+                self.word_nbsp(abi.to_string()[])
             }
             None => Ok(())
         }
@@ -2953,7 +2954,7 @@ impl<'a> State<'a> {
 
         if abi != abi::Rust {
             try!(self.word_nbsp("extern"));
-            try!(self.word_nbsp(abi.to_string().as_slice()));
+            try!(self.word_nbsp(abi.to_string()[]));
         }
 
         word(&mut self.s, "fn")
@@ -2967,6 +2968,8 @@ impl<'a> State<'a> {
     }
 }
 
+fn repeat(s: &str, n: uint) -> String { iter::repeat(s).take(n).collect() }
+
 #[cfg(test)]
 mod test {
     use super::*;
diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs
index e98be046586..e1c8ff5011b 100644
--- a/src/libsyntax/std_inject.rs
+++ b/src/libsyntax/std_inject.rs
@@ -40,7 +40,7 @@ pub fn maybe_inject_prelude(krate: ast::Crate) -> ast::Crate {
 }
 
 fn use_std(krate: &ast::Crate) -> bool {
-    !attr::contains_name(krate.attrs.as_slice(), "no_std")
+    !attr::contains_name(krate.attrs[], "no_std")
 }
 
 fn no_prelude(attrs: &[ast::Attribute]) -> bool {
@@ -56,7 +56,7 @@ impl<'a> fold::Folder for StandardLibraryInjector<'a> {
 
         // The name to use in `extern crate "name" as std;`
         let actual_crate_name = match self.alt_std_name {
-            Some(ref s) => token::intern_and_get_ident(s.as_slice()),
+            Some(ref s) => token::intern_and_get_ident(s[]),
             None => token::intern_and_get_ident("std"),
         };
 
@@ -118,7 +118,7 @@ impl<'a> fold::Folder for PreludeInjector<'a> {
         attr::mark_used(&no_std_attr);
         krate.attrs.push(no_std_attr);
 
-        if !no_prelude(krate.attrs.as_slice()) {
+        if !no_prelude(krate.attrs[]) {
             // only add `use std::prelude::*;` if there wasn't a
             // `#![no_implicit_prelude]` at the crate level.
             // fold_mod() will insert glob path.
@@ -138,7 +138,7 @@ impl<'a> fold::Folder for PreludeInjector<'a> {
     }
 
     fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {
-        if !no_prelude(item.attrs.as_slice()) {
+        if !no_prelude(item.attrs[]) {
             // only recur if there wasn't `#![no_implicit_prelude]`
             // on this item, i.e. this means that the prelude is not
             // implicitly imported though the whole subtree
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index 155cabb153c..bc7dda8c44a 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -73,14 +73,14 @@ pub fn modify_for_testing(sess: &ParseSess,
     // We generate the test harness when building in the 'test'
     // configuration, either with the '--test' or '--cfg test'
     // command line options.
-    let should_test = attr::contains_name(krate.config.as_slice(), "test");
+    let should_test = attr::contains_name(krate.config[], "test");
 
     // Check for #[reexport_test_harness_main = "some_name"] which
     // creates a `use some_name = __test::main;`. This needs to be
     // unconditional, so that the attribute is still marked as used in
     // non-test builds.
     let reexport_test_harness_main =
-        attr::first_attr_value_str_by_name(krate.attrs.as_slice(),
+        attr::first_attr_value_str_by_name(krate.attrs[],
                                            "reexport_test_harness_main");
 
     if should_test {
@@ -119,7 +119,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
             self.cx.path.push(ident);
         }
         debug!("current path: {}",
-               ast_util::path_name_i(self.cx.path.as_slice()));
+               ast_util::path_name_i(self.cx.path[]));
 
         if is_test_fn(&self.cx, &*i) || is_bench_fn(&self.cx, &*i) {
             match i.node {
@@ -277,8 +277,8 @@ fn strip_test_functions(krate: ast::Crate) -> ast::Crate {
     // When not compiling with --test we should not compile the
     // #[test] functions
     config::strip_items(krate, |attrs| {
-        !attr::contains_name(attrs.as_slice(), "test") &&
-        !attr::contains_name(attrs.as_slice(), "bench")
+        !attr::contains_name(attrs[], "test") &&
+        !attr::contains_name(attrs[], "bench")
     })
 }
 
@@ -291,7 +291,7 @@ enum HasTestSignature {
 
 
 fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
-    let has_test_attr = attr::contains_name(i.attrs.as_slice(), "test");
+    let has_test_attr = attr::contains_name(i.attrs[], "test");
 
     fn has_test_signature(i: &ast::Item) -> HasTestSignature {
         match &i.node {
@@ -329,7 +329,7 @@ fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
 }
 
 fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
-    let has_bench_attr = attr::contains_name(i.attrs.as_slice(), "bench");
+    let has_bench_attr = attr::contains_name(i.attrs[], "bench");
 
     fn has_test_signature(i: &ast::Item) -> bool {
         match i.node {
@@ -384,7 +384,7 @@ We're going to be building a module that looks more or less like:
 mod __test {
   extern crate test (name = "test", vers = "...");
   fn main() {
-    test::test_main_static(::os::args().as_slice(), tests)
+    test::test_main_static(::os::args()[], tests)
   }
 
   static tests : &'static [test::TestDescAndFn] = &[
@@ -510,8 +510,8 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
 }
 
 fn is_test_crate(krate: &ast::Crate) -> bool {
-    match attr::find_crate_name(krate.attrs.as_slice()) {
-        Some(ref s) if "test" == s.get().as_slice() => true,
+    match attr::find_crate_name(krate.attrs[]) {
+        Some(ref s) if "test" == s.get()[] => true,
         _ => false
     }
 }
@@ -551,11 +551,11 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P<ast::Expr> {
     // creates $name: $expr
     let field = |name, expr| ecx.field_imm(span, ecx.ident_of(name), expr);
 
-    debug!("encoding {}", ast_util::path_name_i(path.as_slice()));
+    debug!("encoding {}", ast_util::path_name_i(path[]));
 
     // path to the #[test] function: "foo::bar::baz"
-    let path_string = ast_util::path_name_i(path.as_slice());
-    let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string.as_slice()));
+    let path_string = ast_util::path_name_i(path[]);
+    let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string[]));
 
     // self::test::StaticTestName($name_expr)
     let name_expr = ecx.expr_call(span,
diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs
index 590a04ce221..97eb4316583 100644
--- a/src/libsyntax/util/interner.rs
+++ b/src/libsyntax/util/interner.rs
@@ -95,41 +95,37 @@ pub struct RcStr {
     string: Rc<String>,
 }
 
+impl RcStr {
+    pub fn new(string: &str) -> RcStr {
+        RcStr {
+            string: Rc::new(string.to_string()),
+        }
+    }
+}
+
 impl Eq for RcStr {}
 
 impl Ord for RcStr {
     fn cmp(&self, other: &RcStr) -> Ordering {
-        self.as_slice().cmp(other.as_slice())
-    }
-}
-
-impl Str for RcStr {
-    #[inline]
-    fn as_slice<'a>(&'a self) -> &'a str {
-        let s: &'a str = self.string.as_slice();
-        s
+        self[].cmp(other[])
     }
 }
 
 impl fmt::Show for RcStr {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         use std::fmt::Show;
-        self.as_slice().fmt(f)
+        self[].fmt(f)
     }
 }
 
 impl BorrowFrom<RcStr> for str {
     fn borrow_from(owned: &RcStr) -> &str {
-        owned.string.as_slice()
+        owned.string[]
     }
 }
 
-impl RcStr {
-    pub fn new(string: &str) -> RcStr {
-        RcStr {
-            string: Rc::new(string.into_string()),
-        }
-    }
+impl Deref<str> for RcStr {
+    fn deref(&self) -> &str { self.string[] }
 }
 
 /// A StrInterner differs from Interner<String> in that it accepts