about summary refs log tree commit diff
diff options
context:
space:
mode:
authorAlex Crichton <alex@alexcrichton.com>2014-12-10 19:46:38 -0800
committerAlex Crichton <alex@alexcrichton.com>2014-12-21 23:31:42 -0800
commit082bfde412176249dc7328e771a2a15d202824cf (patch)
tree4df3816d6ffea2f52bf5fa51fe385806ed529ba7
parent4908017d59da8694b9ceaf743baf1163c1e19086 (diff)
downloadrust-082bfde412176249dc7328e771a2a15d202824cf.tar.gz
rust-082bfde412176249dc7328e771a2a15d202824cf.zip
Fallout of std::str stabilization
-rw-r--r--src/compiletest/compiletest.rs6
-rw-r--r--src/compiletest/header.rs6
-rw-r--r--src/compiletest/runtest.rs2
-rw-r--r--src/doc/guide.md24
-rw-r--r--src/doc/reference.md2
-rw-r--r--src/libcollections/lib.rs4
-rw-r--r--src/libcollections/str.rs123
-rw-r--r--src/libcollections/string.rs22
-rw-r--r--src/libcore/option.rs6
-rw-r--r--src/libcore/result.rs2
-rw-r--r--src/libcore/str.rs36
-rw-r--r--src/libcoretest/lib.rs1
-rw-r--r--src/libcoretest/str.rs2
-rw-r--r--src/libfmt_macros/lib.rs21
-rw-r--r--src/libgetopts/lib.rs61
-rw-r--r--src/libgraphviz/lib.rs14
-rw-r--r--src/liblog/directive.rs2
-rw-r--r--src/liblog/lib.rs8
-rw-r--r--src/libregex/parse.rs57
-rw-r--r--src/libregex/re.rs28
-rw-r--r--src/librustc/lint/builtin.rs54
-rw-r--r--src/librustc/lint/context.rs56
-rw-r--r--src/librustc/metadata/creader.rs40
-rw-r--r--src/librustc/metadata/csearch.rs2
-rw-r--r--src/librustc/metadata/decoder.rs6
-rw-r--r--src/librustc/metadata/encoder.rs88
-rw-r--r--src/librustc/metadata/loader.rs36
-rw-r--r--src/librustc/metadata/tydecode.rs12
-rw-r--r--src/librustc/middle/astconv_util.rs2
-rw-r--r--src/librustc/middle/astencode.rs6
-rw-r--r--src/librustc/middle/cfg/construct.rs6
-rw-r--r--src/librustc/middle/cfg/graphviz.rs5
-rw-r--r--src/librustc/middle/check_loop.rs6
-rw-r--r--src/librustc/middle/check_match.rs18
-rw-r--r--src/librustc/middle/check_static.rs6
-rw-r--r--src/librustc/middle/check_static_recursion.rs2
-rw-r--r--src/librustc/middle/const_eval.rs8
-rw-r--r--src/librustc/middle/dataflow.rs6
-rw-r--r--src/librustc/middle/dependency_format.rs6
-rw-r--r--src/librustc/middle/expr_use_visitor.rs8
-rw-r--r--src/librustc/middle/infer/combine.rs4
-rw-r--r--src/librustc/middle/infer/error_reporting.rs74
-rw-r--r--src/librustc/middle/infer/higher_ranked/mod.rs4
-rw-r--r--src/librustc/middle/infer/mod.rs2
-rw-r--r--src/librustc/middle/infer/region_inference/graphviz.rs2
-rw-r--r--src/librustc/middle/infer/region_inference/mod.rs16
-rw-r--r--src/librustc/middle/liveness.rs12
-rw-r--r--src/librustc/middle/mem_categorization.rs8
-rw-r--r--src/librustc/middle/privacy.rs12
-rw-r--r--src/librustc/middle/reachable.rs10
-rw-r--r--src/librustc/middle/resolve_lifetime.rs6
-rw-r--r--src/librustc/middle/subst.rs4
-rw-r--r--src/librustc/middle/traits/coherence.rs2
-rw-r--r--src/librustc/middle/traits/select.rs15
-rw-r--r--src/librustc/middle/ty.rs68
-rw-r--r--src/librustc/plugin/load.rs6
-rw-r--r--src/librustc/session/config.rs58
-rw-r--r--src/librustc/session/mod.rs4
-rw-r--r--src/librustc/util/common.rs3
-rw-r--r--src/librustc/util/ppaux.rs24
-rw-r--r--src/librustc_back/archive.rs42
-rw-r--r--src/librustc_back/rpath.rs16
-rw-r--r--src/librustc_back/svh.rs4
-rw-r--r--src/librustc_back/target/mod.rs4
-rw-r--r--src/librustc_borrowck/borrowck/check_loans.rs42
-rw-r--r--src/librustc_borrowck/borrowck/fragments.rs38
-rw-r--r--src/librustc_borrowck/borrowck/gather_loans/mod.rs2
-rw-r--r--src/librustc_borrowck/borrowck/gather_loans/move_error.rs8
-rw-r--r--src/librustc_borrowck/borrowck/mod.rs38
-rw-r--r--src/librustc_borrowck/graphviz.rs4
-rw-r--r--src/librustc_driver/driver.rs30
-rw-r--r--src/librustc_driver/lib.rs28
-rw-r--r--src/librustc_driver/pretty.rs30
-rw-r--r--src/librustc_resolve/lib.rs126
-rw-r--r--src/librustc_trans/back/link.rs128
-rw-r--r--src/librustc_trans/back/lto.rs20
-rw-r--r--src/librustc_trans/back/write.rs88
-rw-r--r--src/librustc_trans/save/mod.rs146
-rw-r--r--src/librustc_trans/save/recorder.rs22
-rw-r--r--src/librustc_trans/save/span_utils.rs4
-rw-r--r--src/librustc_trans/trans/_match.rs36
-rw-r--r--src/librustc_trans/trans/adt.rs58
-rw-r--r--src/librustc_trans/trans/asm.rs14
-rw-r--r--src/librustc_trans/trans/base.rs104
-rw-r--r--src/librustc_trans/trans/builder.rs8
-rw-r--r--src/librustc_trans/trans/cabi.rs4
-rw-r--r--src/librustc_trans/trans/callee.rs12
-rw-r--r--src/librustc_trans/trans/cleanup.rs10
-rw-r--r--src/librustc_trans/trans/closure.rs22
-rw-r--r--src/librustc_trans/trans/common.rs14
-rw-r--r--src/librustc_trans/trans/consts.rs44
-rw-r--r--src/librustc_trans/trans/context.rs6
-rw-r--r--src/librustc_trans/trans/controlflow.rs14
-rw-r--r--src/librustc_trans/trans/datum.rs2
-rw-r--r--src/librustc_trans/trans/debuginfo.rs138
-rw-r--r--src/librustc_trans/trans/expr.rs50
-rw-r--r--src/librustc_trans/trans/foreign.rs30
-rw-r--r--src/librustc_trans/trans/glue.rs14
-rw-r--r--src/librustc_trans/trans/intrinsic.rs2
-rw-r--r--src/librustc_trans/trans/meth.rs10
-rw-r--r--src/librustc_trans/trans/monomorphize.rs22
-rw-r--r--src/librustc_trans/trans/type_.rs2
-rw-r--r--src/librustc_trans/trans/type_of.rs14
-rw-r--r--src/librustc_typeck/astconv.rs32
-rw-r--r--src/librustc_typeck/check/method/mod.rs6
-rw-r--r--src/librustc_typeck/check/method/probe.rs2
-rw-r--r--src/librustc_typeck/check/mod.rs62
-rw-r--r--src/librustc_typeck/check/regionck.rs14
-rw-r--r--src/librustc_typeck/check/regionmanip.rs2
-rw-r--r--src/librustc_typeck/check/vtable.rs12
-rw-r--r--src/librustc_typeck/coherence/mod.rs4
-rw-r--r--src/librustc_typeck/collect.rs40
-rw-r--r--src/librustc_typeck/lib.rs7
-rw-r--r--src/librustc_typeck/variance.rs8
-rw-r--r--src/librustdoc/externalfiles.rs2
-rw-r--r--src/librustdoc/html/format.rs10
-rw-r--r--src/librustdoc/html/highlight.rs2
-rw-r--r--src/librustdoc/html/render.rs8
-rw-r--r--src/librustdoc/passes.rs6
-rw-r--r--src/libserialize/json.rs192
-rw-r--r--src/libserialize/lib.rs1
-rw-r--r--src/libserialize/serialize.rs4
-rw-r--r--src/libstd/ascii.rs2
-rw-r--r--src/libstd/c_str.rs2
-rw-r--r--src/libstd/dynamic_lib.rs2
-rw-r--r--src/libstd/failure.rs2
-rw-r--r--src/libstd/io/mod.rs7
-rw-r--r--src/libstd/io/net/ip.rs2
-rw-r--r--src/libstd/io/process.rs4
-rw-r--r--src/libstd/io/stdio.rs2
-rw-r--r--src/libstd/num/strconv.rs2
-rw-r--r--src/libstd/os.rs12
-rw-r--r--src/libstd/path/mod.rs19
-rw-r--r--src/libstd/path/posix.rs5
-rw-r--r--src/libstd/path/windows.rs144
-rw-r--r--src/libstd/prelude.rs4
-rw-r--r--src/libstd/rt/backtrace.rs3
-rw-r--r--src/libstd/rt/mod.rs2
-rw-r--r--src/libstd/rt/unwind.rs2
-rw-r--r--src/libstd/rt/util.rs23
-rw-r--r--src/libstd/sys/common/backtrace.rs11
-rw-r--r--src/libstd/sys/windows/backtrace.rs2
-rw-r--r--src/libstd/sys/windows/fs.rs3
-rw-r--r--src/libstd/sys/windows/os.rs4
-rw-r--r--src/libstd/sys/windows/process.rs2
-rw-r--r--src/libstd/sys/windows/tty.rs2
-rw-r--r--src/libsyntax/ast.rs2
-rw-r--r--src/libsyntax/ast_map/mod.rs38
-rw-r--r--src/libsyntax/ast_util.rs14
-rw-r--r--src/libsyntax/attr.rs13
-rw-r--r--src/libsyntax/codemap.rs12
-rw-r--r--src/libsyntax/diagnostic.rs38
-rw-r--r--src/libsyntax/diagnostics/plugin.rs6
-rw-r--r--src/libsyntax/ext/asm.rs3
-rw-r--r--src/libsyntax/ext/base.rs10
-rw-r--r--src/libsyntax/ext/build.rs3
-rw-r--r--src/libsyntax/ext/concat.rs8
-rw-r--r--src/libsyntax/ext/concat_idents.rs2
-rw-r--r--src/libsyntax/ext/deriving/bounds.rs3
-rw-r--r--src/libsyntax/ext/deriving/clone.rs9
-rw-r--r--src/libsyntax/ext/deriving/decodable.rs2
-rw-r--r--src/libsyntax/ext/deriving/encodable.rs3
-rw-r--r--src/libsyntax/ext/deriving/generic/mod.rs46
-rw-r--r--src/libsyntax/ext/deriving/mod.rs2
-rw-r--r--src/libsyntax/ext/deriving/show.rs2
-rw-r--r--src/libsyntax/ext/env.rs8
-rw-r--r--src/libsyntax/ext/expand.rs57
-rw-r--r--src/libsyntax/ext/format.rs27
-rw-r--r--src/libsyntax/ext/quote.rs6
-rw-r--r--src/libsyntax/ext/source_util.rs16
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs21
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs12
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs4
-rw-r--r--src/libsyntax/feature_gate.rs10
-rw-r--r--src/libsyntax/parse/attr.rs3
-rw-r--r--src/libsyntax/parse/lexer/comments.rs16
-rw-r--r--src/libsyntax/parse/lexer/mod.rs18
-rw-r--r--src/libsyntax/parse/mod.rs54
-rw-r--r--src/libsyntax/parse/obsolete.rs4
-rw-r--r--src/libsyntax/parse/parser.rs148
-rw-r--r--src/libsyntax/parse/token.rs24
-rw-r--r--src/libsyntax/print/pp.rs6
-rw-r--r--src/libsyntax/print/pprust.rs259
-rw-r--r--src/libsyntax/std_inject.rs8
-rw-r--r--src/libsyntax/test.rs26
-rw-r--r--src/libsyntax/util/interner.rs30
-rw-r--r--src/libterm/terminfo/mod.rs2
-rw-r--r--src/libterm/terminfo/searcher.rs4
-rw-r--r--src/libtest/lib.rs8
-rw-r--r--src/libunicode/u_str.rs111
-rw-r--r--src/test/run-pass/issue-19340-1.rs2
-rw-r--r--src/test/run-pass/issue-19340-2.rs2
-rw-r--r--src/test/run-pass/issue-19367.rs8
193 files changed, 2142 insertions, 2229 deletions
diff --git a/src/compiletest/compiletest.rs b/src/compiletest/compiletest.rs
index 59be0152d58..bdbfbfd7c89 100644
--- a/src/compiletest/compiletest.rs
+++ b/src/compiletest/compiletest.rs
@@ -152,7 +152,7 @@ pub fn parse_config(args: Vec<String> ) -> Config {
             matches.opt_str("ratchet-metrics").map(|s| Path::new(s)),
         ratchet_noise_percent:
             matches.opt_str("ratchet-noise-percent")
-                   .and_then(|s| from_str::<f64>(s.as_slice())),
+                   .and_then(|s| s.as_slice().parse::<f64>()),
         runtool: matches.opt_str("runtool"),
         host_rustcflags: matches.opt_str("host-rustcflags"),
         target_rustcflags: matches.opt_str("target-rustcflags"),
@@ -190,9 +190,7 @@ pub fn log_config(config: &Config) {
     logv(c, format!("filter: {}",
                     opt_str(&config.filter
                                    .as_ref()
-                                   .map(|re| {
-                                       re.to_string().into_string()
-                                   }))));
+                                   .map(|re| re.to_string()))));
     logv(c, format!("runtool: {}", opt_str(&config.runtool)));
     logv(c, format!("host-rustcflags: {}",
                     opt_str(&config.host_rustcflags)));
diff --git a/src/compiletest/header.rs b/src/compiletest/header.rs
index 60ef76528e8..27be6c6d835 100644
--- a/src/compiletest/header.rs
+++ b/src/compiletest/header.rs
@@ -351,8 +351,8 @@ pub fn gdb_version_to_int(version_string: &str) -> int {
         panic!("{}", error_string);
     }
 
-    let major: int = from_str(components[0]).expect(error_string);
-    let minor: int = from_str(components[1]).expect(error_string);
+    let major: int = components[0].parse().expect(error_string);
+    let minor: int = components[1].parse().expect(error_string);
 
     return major * 1000 + minor;
 }
@@ -362,6 +362,6 @@ pub fn lldb_version_to_int(version_string: &str) -> int {
         "Encountered LLDB version string with unexpected format: {}",
         version_string);
     let error_string = error_string.as_slice();
-    let major: int = from_str(version_string).expect(error_string);
+    let major: int = version_string.parse().expect(error_string);
     return major;
 }
diff --git a/src/compiletest/runtest.rs b/src/compiletest/runtest.rs
index 567734b0dab..bf72250c470 100644
--- a/src/compiletest/runtest.rs
+++ b/src/compiletest/runtest.rs
@@ -1361,7 +1361,7 @@ fn split_maybe_args(argstr: &Option<String>) -> Vec<String> {
             s.as_slice()
              .split(' ')
              .filter_map(|s| {
-                 if s.is_whitespace() {
+                 if s.chars().all(|c| c.is_whitespace()) {
                      None
                  } else {
                      Some(s.to_string())
diff --git a/src/doc/guide.md b/src/doc/guide.md
index 1cd100e1598..3963ce6b85d 100644
--- a/src/doc/guide.md
+++ b/src/doc/guide.md
@@ -2257,10 +2257,10 @@ a function for that:
 let input = io::stdin().read_line()
                        .ok()
                        .expect("Failed to read line");
-let input_num: Option<uint> = from_str(input.as_slice());
+let input_num: Option<uint> = input.parse();
 ```
 
-The `from_str` function takes in a `&str` value and converts it into something.
+The `parse` function takes in a `&str` value and converts it into something.
 We tell it what kind of something with a type hint. Remember our type hint with
 `random()`? It looked like this:
 
@@ -2279,8 +2279,8 @@ In this case, we say `x` is a `uint` explicitly, so Rust is able to properly
 tell `random()` what to generate. In a similar fashion, both of these work:
 
 ```{rust,ignore}
-let input_num = from_str::<uint>("5");       // input_num: Option<uint>
-let input_num: Option<uint> = from_str("5"); // input_num: Option<uint>
+let input_num = "5".parse::<uint>();         // input_num: Option<uint>
+let input_num: Option<uint> = "5".parse();   // input_num: Option<uint>
 ```
 
 Anyway, with us now converting our input to a number, our code looks like this:
@@ -2301,7 +2301,7 @@ fn main() {
     let input = io::stdin().read_line()
                            .ok()
                            .expect("Failed to read line");
-    let input_num: Option<uint> = from_str(input.as_slice());
+    let input_num: Option<uint> = input.parse();
 
     println!("You guessed: {}", input_num);
 
@@ -2350,7 +2350,7 @@ fn main() {
     let input = io::stdin().read_line()
                            .ok()
                            .expect("Failed to read line");
-    let input_num: Option<uint> = from_str(input.as_slice());
+    let input_num: Option<uint> = input.parse();
 
     let num = match input_num {
         Some(num) => num,
@@ -2395,7 +2395,7 @@ Uh, what? But we did!
 
 ... actually, we didn't. See, when you get a line of input from `stdin()`,
 you get all the input. Including the `\n` character from you pressing Enter.
-Therefore, `from_str()` sees the string `"5\n"` and says "nope, that's not a
+Therefore, `parse()` sees the string `"5\n"` and says "nope, that's not a
 number; there's non-number stuff in there!" Luckily for us, `&str`s have an easy
 method we can use defined on them: `trim()`. One small modification, and our
 code looks like this:
@@ -2416,7 +2416,7 @@ fn main() {
     let input = io::stdin().read_line()
                            .ok()
                            .expect("Failed to read line");
-    let input_num: Option<uint> = from_str(input.as_slice().trim());
+    let input_num: Option<uint> = input.trim().parse();
 
     let num = match input_num {
         Some(num) => num,
@@ -2491,7 +2491,7 @@ fn main() {
         let input = io::stdin().read_line()
                                .ok()
                                .expect("Failed to read line");
-        let input_num: Option<uint> = from_str(input.as_slice().trim());
+        let input_num: Option<uint> = input.trim().parse();
 
         let num = match input_num {
             Some(num) => num,
@@ -2566,7 +2566,7 @@ fn main() {
         let input = io::stdin().read_line()
                                .ok()
                                .expect("Failed to read line");
-        let input_num: Option<uint> = from_str(input.as_slice().trim());
+        let input_num: Option<uint> = input.trim().parse();
 
         let num = match input_num {
             Some(num) => num,
@@ -2621,7 +2621,7 @@ fn main() {
         let input = io::stdin().read_line()
                                .ok()
                                .expect("Failed to read line");
-        let input_num: Option<uint> = from_str(input.as_slice().trim());
+        let input_num: Option<uint> = input.trim().parse();
 
         let num = match input_num {
             Some(num) => num,
@@ -2697,7 +2697,7 @@ fn main() {
         let input = io::stdin().read_line()
                                .ok()
                                .expect("Failed to read line");
-        let input_num: Option<uint> = from_str(input.as_slice().trim());
+        let input_num: Option<uint> = input.trim().parse();
 
         let num = match input_num {
             Some(num) => num,
diff --git a/src/doc/reference.md b/src/doc/reference.md
index 722230d3755..97184d53498 100644
--- a/src/doc/reference.md
+++ b/src/doc/reference.md
@@ -3177,7 +3177,7 @@ Some examples of call expressions:
 # fn add(x: int, y: int) -> int { 0 }
 
 let x: int = add(1, 2);
-let pi: Option<f32> = from_str("3.14");
+let pi: Option<f32> = "3.14".parse();
 ```
 
 ### Lambda expressions
diff --git a/src/libcollections/lib.rs b/src/libcollections/lib.rs
index 75d179319f7..363d30abd03 100644
--- a/src/libcollections/lib.rs
+++ b/src/libcollections/lib.rs
@@ -121,7 +121,7 @@ mod prelude {
     // in core and collections (may differ).
     pub use slice::{PartialEqSliceExt, OrdSliceExt};
     pub use slice::{AsSlice, SliceExt};
-    pub use str::{from_str, Str, StrPrelude};
+    pub use str::{from_str, Str};
 
     // from other crates.
     pub use alloc::boxed::Box;
@@ -129,7 +129,7 @@ mod prelude {
 
     // from collections.
     pub use slice::{CloneSliceExt, VectorVector};
-    pub use str::{IntoMaybeOwned, UnicodeStrPrelude, StrAllocating, StrVector};
+    pub use str::{IntoMaybeOwned, StrVector};
     pub use string::{String, ToString};
     pub use vec::Vec;
 }
diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs
index 8c9346639b3..5feae5e558e 100644
--- a/src/libcollections/str.rs
+++ b/src/libcollections/str.rs
@@ -55,25 +55,31 @@ use self::MaybeOwned::*;
 use self::RecompositionState::*;
 use self::DecompositionType::*;
 
-use core::prelude::*;
-
 use core::borrow::{BorrowFrom, Cow, ToOwned};
-use core::cmp::{mod, Equiv, PartialEq, Eq, PartialOrd, Ord, Ordering};
+use core::char::Char;
+use core::clone::Clone;
+use core::cmp::{Equiv, PartialEq, Eq, PartialOrd, Ord, Ordering};
+use core::cmp;
 use core::default::Default;
 use core::fmt;
 use core::hash;
 use core::iter::AdditiveIterator;
 use core::iter::{mod, range, Iterator, IteratorExt};
+use core::kinds::Sized;
+use core::ops;
+use core::option::Option::{mod, Some, None};
+use core::slice::AsSlice;
 use core::str as core_str;
 use unicode::str::{UnicodeStr, Utf16Encoder};
 
 use ring_buf::RingBuf;
-use string::{String, ToString};
+use slice::SliceExt;
+use string::String;
 use unicode;
 use vec::Vec;
 
 pub use core::str::{from_utf8, CharEq, Chars, CharIndices};
-pub use core::str::{Bytes, CharSplits};
+pub use core::str::{Bytes, CharSplits, is_utf8};
 pub use core::str::{CharSplitsN, Lines, LinesAny, MatchIndices, StrSplits};
 pub use core::str::{CharRange};
 pub use core::str::{FromStr, from_str, Utf8Error};
@@ -408,6 +414,7 @@ impl<'a> Iterator<u16> for Utf16Units<'a> {
 /// # Examples
 ///
 /// ```rust
+/// # #![allow(deprecated)]
 /// use std::str;
 /// let string = "orange";
 /// let new_string = str::replace(string, "or", "str");
@@ -441,7 +448,7 @@ Section: MaybeOwned
 /// A string type that can hold either a `String` or a `&str`.
 /// This can be useful as an optimization when an allocation is sometimes
 /// needed but not always.
-#[deprecated = "use stding::string::CowString"]
+#[deprecated = "use std::string::CowString"]
 pub enum MaybeOwned<'a> {
     /// A borrowed string.
     Slice(&'a str),
@@ -650,7 +657,11 @@ impl BorrowFrom<String> for str {
 
 #[unstable = "trait is unstable"]
 impl ToOwned<String> for str {
-    fn to_owned(&self) -> String { self.to_string() }
+    fn to_owned(&self) -> String {
+        unsafe {
+            String::from_utf8_unchecked(self.as_bytes().to_owned())
+        }
+    }
 }
 
 /// Unsafe string operations.
@@ -673,7 +684,7 @@ Section: Trait implementations
 */
 
 /// Any string that can be represented as a slice.
-pub trait StrExt for Sized?: Slice<uint, str> {
+pub trait StrExt for Sized?: ops::Slice<uint, str> {
     /// Escapes each char in `s` with `char::escape_default`.
     #[unstable = "return type may change to be an iterator"]
     fn escape_default(&self) -> String {
@@ -724,7 +735,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     }
 
     /// Given a string, makes a new string with repeated copies of it.
-    #[deprecated = "user repeat(self).take(n).collect() instead"]
+    #[deprecated = "use repeat(self).take(n).collect() instead"]
     fn repeat(&self, nn: uint) -> String {
         iter::repeat(self[]).take(nn).collect()
     }
@@ -766,7 +777,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// Returns an iterator over the string in Unicode Normalization Form D
     /// (canonical decomposition).
     #[inline]
-    #[unstable = "this functionality may only be provided by libunicode"]
+    #[unstable = "this functionality may be moved to libunicode"]
     fn nfd_chars<'a>(&'a self) -> Decompositions<'a> {
         Decompositions {
             iter: self[].chars(),
@@ -779,7 +790,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// Returns an iterator over the string in Unicode Normalization Form KD
     /// (compatibility decomposition).
     #[inline]
-    #[unstable = "this functionality may only be provided by libunicode"]
+    #[unstable = "this functionality may be moved to libunicode"]
     fn nfkd_chars<'a>(&'a self) -> Decompositions<'a> {
         Decompositions {
             iter: self[].chars(),
@@ -792,7 +803,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// An Iterator over the string in Unicode Normalization Form C
     /// (canonical decomposition followed by canonical composition).
     #[inline]
-    #[unstable = "this functionality may only be provided by libunicode"]
+    #[unstable = "this functionality may be moved to libunicode"]
     fn nfc_chars<'a>(&'a self) -> Recompositions<'a> {
         Recompositions {
             iter: self.nfd_chars(),
@@ -806,7 +817,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// An Iterator over the string in Unicode Normalization Form KC
     /// (compatibility decomposition followed by canonical composition).
     #[inline]
-    #[unstable = "this functionality may only be provided by libunicode"]
+    #[unstable = "this functionality may be moved to libunicode"]
     fn nfkc_chars<'a>(&'a self) -> Recompositions<'a> {
         Recompositions {
             iter: self.nfkd_chars(),
@@ -891,7 +902,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// let v: Vec<&str> = "Mary had a little lamb".split(' ').collect();
     /// assert_eq!(v, vec!["Mary", "had", "a", "little", "lamb"]);
     ///
-    /// let v: Vec<&str> = "abc1def2ghi".split(|c: char| c.is_numeric()).collect();
+    /// let v: Vec<&str> = "abc1def2ghi".split(|&: c: char| c.is_numeric()).collect();
     /// assert_eq!(v, vec!["abc", "def", "ghi"]);
     ///
     /// let v: Vec<&str> = "lionXXtigerXleopard".split('X').collect();
@@ -915,7 +926,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// let v: Vec<&str> = "Mary had a little lambda".splitn(2, ' ').collect();
     /// assert_eq!(v, vec!["Mary", "had", "a little lambda"]);
     ///
-    /// let v: Vec<&str> = "abc1def2ghi".splitn(1, |c: char| c.is_numeric()).collect();
+    /// let v: Vec<&str> = "abc1def2ghi".splitn(1, |&: c: char| c.is_numeric()).collect();
     /// assert_eq!(v, vec!["abc", "def2ghi"]);
     ///
     /// let v: Vec<&str> = "lionXXtigerXleopard".splitn(2, 'X').collect();
@@ -950,7 +961,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// let v: Vec<&str> = "Mary had a little lamb".split(' ').rev().collect();
     /// assert_eq!(v, vec!["lamb", "little", "a", "had", "Mary"]);
     ///
-    /// let v: Vec<&str> = "abc1def2ghi".split(|c: char| c.is_numeric()).rev().collect();
+    /// let v: Vec<&str> = "abc1def2ghi".split(|&: c: char| c.is_numeric()).rev().collect();
     /// assert_eq!(v, vec!["ghi", "def", "abc"]);
     ///
     /// let v: Vec<&str> = "lionXXtigerXleopard".split('X').rev().collect();
@@ -971,7 +982,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// let v: Vec<&str> = "Mary had a little lamb".rsplitn(2, ' ').collect();
     /// assert_eq!(v, vec!["lamb", "little", "Mary had a"]);
     ///
-    /// let v: Vec<&str> = "abc1def2ghi".rsplitn(1, |c: char| c.is_numeric()).collect();
+    /// let v: Vec<&str> = "abc1def2ghi".rsplitn(1, |&: c: char| c.is_numeric()).collect();
     /// assert_eq!(v, vec!["ghi", "abc1def"]);
     ///
     /// let v: Vec<&str> = "lionXXtigerXleopard".rsplitn(2, 'X').collect();
@@ -1071,10 +1082,11 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// # Example
     ///
     /// ```rust
+    /// # #![allow(deprecated)]
     /// // composed forms of `ö` and `é`
     /// let c = "Löwe 老虎 Léopard"; // German, Simplified Chinese, French
     /// // decomposed forms of `ö` and `é`
-    /// let d = "Lo\u0308we 老虎 Le\u0301opard";
+    /// let d = "Lo\u{0308}we 老虎 Le\u{0301}opard";
     ///
     /// assert_eq!(c.char_len(), 15);
     /// assert_eq!(d.char_len(), 17);
@@ -1225,10 +1237,10 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// # Example
     ///
     /// ```rust
-    /// assert_eq!("11foo1bar11".trim_chars('1'), "foo1bar")
+    /// assert_eq!("11foo1bar11".trim_chars('1'), "foo1bar");
     /// let x: &[_] = &['1', '2'];
-    /// assert_eq!("12foo1bar12".trim_chars(x), "foo1bar")
-    /// assert_eq!("123foo1bar123".trim_chars(|c: char| c.is_numeric()), "foo1bar")
+    /// assert_eq!("12foo1bar12".trim_chars(x), "foo1bar");
+    /// assert_eq!("123foo1bar123".trim_chars(|&: c: char| c.is_numeric()), "foo1bar");
     /// ```
     #[unstable = "awaiting pattern/matcher stabilization"]
     fn trim_chars<C: CharEq>(&self, to_trim: C) -> &str {
@@ -1244,10 +1256,10 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// # Example
     ///
     /// ```rust
-    /// assert_eq!("11foo1bar11".trim_left_chars('1'), "foo1bar11")
+    /// assert_eq!("11foo1bar11".trim_left_chars('1'), "foo1bar11");
     /// let x: &[_] = &['1', '2'];
-    /// assert_eq!("12foo1bar12".trim_left_chars(x), "foo1bar12")
-    /// assert_eq!("123foo1bar123".trim_left_chars(|c: char| c.is_numeric()), "foo1bar123")
+    /// assert_eq!("12foo1bar12".trim_left_chars(x), "foo1bar12");
+    /// assert_eq!("123foo1bar123".trim_left_chars(|&: c: char| c.is_numeric()), "foo1bar123");
     /// ```
     #[unstable = "awaiting pattern/matcher stabilization"]
     fn trim_left_chars<C: CharEq>(&self, to_trim: C) -> &str {
@@ -1263,10 +1275,10 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// # Example
     ///
     /// ```rust
-    /// assert_eq!("11foo1bar11".trim_right_chars('1'), "11foo1bar")
+    /// assert_eq!("11foo1bar11".trim_right_chars('1'), "11foo1bar");
     /// let x: &[_] = &['1', '2'];
-    /// assert_eq!("12foo1bar12".trim_right_chars(x), "12foo1bar")
-    /// assert_eq!("123foo1bar123".trim_right_chars(|c: char| c.is_numeric()), "123foo1bar")
+    /// assert_eq!("12foo1bar12".trim_right_chars(x), "12foo1bar");
+    /// assert_eq!("123foo1bar123".trim_right_chars(|&: c: char| c.is_numeric()), "123foo1bar");
     /// ```
     #[unstable = "awaiting pattern/matcher stabilization"]
     fn trim_right_chars<C: CharEq>(&self, to_trim: C) -> &str {
@@ -1434,7 +1446,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// assert_eq!(s.find('é'), Some(14));
     ///
     /// // the first space
-    /// assert_eq!(s.find(|c: char| c.is_whitespace()), Some(5));
+    /// assert_eq!(s.find(|&: c: char| c.is_whitespace()), Some(5));
     ///
     /// // neither are found
     /// let x: &[_] = &['1', '2'];
@@ -1462,7 +1474,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// assert_eq!(s.rfind('é'), Some(14));
     ///
     /// // the second space
-    /// assert_eq!(s.rfind(|c: char| c.is_whitespace()), Some(12));
+    /// assert_eq!(s.rfind(|&: c: char| c.is_whitespace()), Some(12));
     ///
     /// // searches for an occurrence of either `1` or `2`, but neither are found
     /// let x: &[_] = &['1', '2'];
@@ -1609,8 +1621,8 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// # Example
     ///
     /// ```rust
-    /// let gr1 = "a\u0310e\u0301o\u0308\u0332".graphemes(true).collect::<Vec<&str>>();
-    /// let b: &[_] = &["a\u0310", "e\u0301", "o\u0308\u0332"];
+    /// let gr1 = "a\u{310}e\u{301}o\u{308}\u{332}".graphemes(true).collect::<Vec<&str>>();
+    /// let b: &[_] = &["a\u{310}", "e\u{301}", "o\u{308}\u{332}"];
     /// assert_eq!(gr1.as_slice(), b);
     /// let gr2 = "a\r\nb🇷🇺🇸🇹".graphemes(true).collect::<Vec<&str>>();
     /// let b: &[_] = &["a", "\r\n", "b", "🇷🇺🇸🇹"];
@@ -1659,6 +1671,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// # Example
     ///
     /// ```rust
+    /// # #![allow(deprecated)]
     /// assert!(" \t\n".is_whitespace());
     /// assert!("".is_whitespace());
     ///
@@ -1677,6 +1690,7 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     /// # Example
     ///
     /// ```rust
+    /// # #![allow(deprecated)]
     /// assert!("Löwe老虎Léopard123".is_alphanumeric());
     /// assert!("".is_alphanumeric());
     ///
@@ -1718,25 +1732,39 @@ pub trait StrExt for Sized?: Slice<uint, str> {
     fn trim_right(&self) -> &str {
         UnicodeStr::trim_right(self[])
     }
+
+    /// Deprecated, call `.to_owned()` instead from the `std::borrow::ToOwned`
+    /// trait.
+    #[deprecated = "call `.to_owned()` on `std::borrow::ToOwned` instead"]
+    fn into_string(&self) -> String {
+        self[].to_owned()
+    }
 }
 
 impl StrExt for str {}
 
 #[cfg(test)]
 mod tests {
-    use prelude::*;
-    use core::default::Default;
-    use core::iter::AdditiveIterator;
-    use super::{eq_slice, from_utf8, is_utf8, is_utf16, raw};
-    use super::truncate_utf16_at_nul;
+    use std::iter::AdditiveIterator;
+    use std::iter::range;
+    use std::default::Default;
+    use std::char::Char;
+    use std::clone::Clone;
+    use std::cmp::{Ord, PartialOrd, Equiv};
+    use std::cmp::Ordering::{Equal, Greater, Less};
+    use std::option::Option::{mod, Some, None};
+    use std::result::Result::{Ok, Err};
+    use std::ptr::RawPtr;
+    use std::iter::{Iterator, IteratorExt, DoubleEndedIteratorExt};
+
+    use super::*;
     use super::MaybeOwned::{Owned, Slice};
+    use std::slice::{AsSlice, SliceExt};
+    use string::{String, ToString};
+    use vec::Vec;
+    use slice::CloneSliceExt;
 
-    #[test]
-    fn test_eq_slice() {
-        assert!((eq_slice("foobar".slice(0, 3), "foo")));
-        assert!((eq_slice("barfoo".slice(3, 6), "foo")));
-        assert!((!eq_slice("foo1", "foo2")));
-    }
+    use unicode::char::UnicodeChar;
 
     #[test]
     fn test_le() {
@@ -2267,6 +2295,7 @@ mod tests {
 
     #[test]
     fn test_is_utf16() {
+        use unicode::str::is_utf16;
         macro_rules! pos ( ($($e:expr),*) => { { $(assert!(is_utf16($e));)* } });
 
         // non-surrogates
@@ -3229,13 +3258,13 @@ mod tests {
     #[test]
     fn test_str_from_utf8() {
         let xs = b"hello";
-        assert_eq!(from_utf8(xs), Some("hello"));
+        assert_eq!(from_utf8(xs), Ok("hello"));
 
         let xs = "ศไทย中华Việt Nam".as_bytes();
-        assert_eq!(from_utf8(xs), Some("ศไทย中华Việt Nam"));
+        assert_eq!(from_utf8(xs), Ok("ศไทย中华Việt Nam"));
 
         let xs = b"hello\xFF";
-        assert_eq!(from_utf8(xs), None);
+        assert_eq!(from_utf8(xs), Err(Utf8Error::TooShort));
     }
 
     #[test]
@@ -3284,8 +3313,8 @@ mod tests {
 
     #[test]
     fn test_maybe_owned_into_string() {
-        assert_eq!(Slice("abcde").into_string(), String::from_str("abcde"));
-        assert_eq!(Owned(String::from_str("abcde")).into_string(),
+        assert_eq!(Slice("abcde").to_string(), String::from_str("abcde"));
+        assert_eq!(Owned(String::from_str("abcde")).to_string(),
                    String::from_str("abcde"));
     }
 
diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs
index 0e2b514d92d..6c2659b13f7 100644
--- a/src/libcollections/string.rs
+++ b/src/libcollections/string.rs
@@ -26,7 +26,7 @@ use unicode::str as unicode_str;
 use unicode::str::Utf16Item;
 
 use slice::CloneSliceExt;
-use str::{mod, CharRange, FromStr, StrExt, Owned, Utf8Error};
+use str::{mod, CharRange, FromStr, Utf8Error};
 use vec::{DerefVec, Vec, as_vec};
 
 /// A growable string stored as a UTF-8 encoded buffer.
@@ -94,13 +94,16 @@ impl String {
     /// # Examples
     ///
     /// ```rust
+    /// # #![allow(deprecated)]
+    /// use std::str::Utf8Error;
+    ///
     /// let hello_vec = vec![104, 101, 108, 108, 111];
     /// let s = String::from_utf8(hello_vec);
     /// assert_eq!(s, Ok("hello".to_string()));
     ///
     /// let invalid_vec = vec![240, 144, 128];
     /// let s = String::from_utf8(invalid_vec);
-    /// assert_eq!(s, Err(vec![240, 144, 128]));
+    /// assert_eq!(s, Err((vec![240, 144, 128], Utf8Error::TooShort)));
     /// ```
     #[inline]
     #[unstable = "error type may change"]
@@ -833,7 +836,7 @@ impl Default for String {
 #[experimental = "waiting on Show stabilization"]
 impl fmt::Show for String {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        (*self).fmt(f)
+        (**self).fmt(f)
     }
 }
 
@@ -841,7 +844,7 @@ impl fmt::Show for String {
 impl<H: hash::Writer> hash::Hash<H> for String {
     #[inline]
     fn hash(&self, hasher: &mut H) {
-        (*self).hash(hasher)
+        (**self).hash(hasher)
     }
 }
 
@@ -1026,6 +1029,7 @@ mod tests {
     use prelude::*;
     use test::Bencher;
 
+    use str::{StrExt, Utf8Error};
     use str;
     use super::as_string;
 
@@ -1044,14 +1048,16 @@ mod tests {
     #[test]
     fn test_from_utf8() {
         let xs = b"hello".to_vec();
-        assert_eq!(String::from_utf8(xs), Ok(String::from_str("hello")));
+        assert_eq!(String::from_utf8(xs),
+                   Ok(String::from_str("hello")));
 
         let xs = "ศไทย中华Việt Nam".as_bytes().to_vec();
-        assert_eq!(String::from_utf8(xs), Ok(String::from_str("ศไทย中华Việt Nam")));
+        assert_eq!(String::from_utf8(xs),
+                   Ok(String::from_str("ศไทย中华Việt Nam")));
 
         let xs = b"hello\xFF".to_vec();
         assert_eq!(String::from_utf8(xs),
-                   Err(b"hello\xFF".to_vec()));
+                   Err((b"hello\xFF".to_vec(), Utf8Error::TooShort)));
     }
 
     #[test]
@@ -1141,7 +1147,7 @@ mod tests {
             let s_as_utf16 = s.utf16_units().collect::<Vec<u16>>();
             let u_as_string = String::from_utf16(u.as_slice()).unwrap();
 
-            assert!(str::is_utf16(u.as_slice()));
+            assert!(::unicode::str::is_utf16(u.as_slice()));
             assert_eq!(s_as_utf16, u);
 
             assert_eq!(u_as_string, s);
diff --git a/src/libcore/option.rs b/src/libcore/option.rs
index 8adbba8b94b..d831a57893b 100644
--- a/src/libcore/option.rs
+++ b/src/libcore/option.rs
@@ -718,15 +718,15 @@ impl<T: Default> Option<T> {
     /// # Example
     ///
     /// Convert a string to an integer, turning poorly-formed strings
-    /// into 0 (the default value for integers). `from_str` converts
+    /// into 0 (the default value for integers). `parse` converts
     /// a string to any other type that implements `FromStr`, returning
     /// `None` on error.
     ///
     /// ```
     /// let good_year_from_input = "1909";
     /// let bad_year_from_input = "190blarg";
-    /// let good_year = from_str(good_year_from_input).unwrap_or_default();
-    /// let bad_year = from_str(bad_year_from_input).unwrap_or_default();
+    /// let good_year = good_year_from_input.parse().unwrap_or_default();
+    /// let bad_year = bad_year_from_input.parse().unwrap_or_default();
     ///
     /// assert_eq!(1909i, good_year);
     /// assert_eq!(0i, bad_year);
diff --git a/src/libcore/result.rs b/src/libcore/result.rs
index b59734a7d98..8014b4dc89d 100644
--- a/src/libcore/result.rs
+++ b/src/libcore/result.rs
@@ -458,7 +458,7 @@ impl<T, E> Result<T, E> {
     ///     let line: IoResult<String> = buffer.read_line();
     ///     // Convert the string line to a number using `map` and `from_str`
     ///     let val: IoResult<int> = line.map(|line| {
-    ///         from_str::<int>(line.as_slice().trim_right()).unwrap_or(0)
+    ///         line.as_slice().trim_right().parse::<int>().unwrap_or(0)
     ///     });
     ///     // Add the value if there were no errors, otherwise add 0
     ///     sum += val.ok().unwrap_or(0);
diff --git a/src/libcore/str.rs b/src/libcore/str.rs
index 60d4262a9b1..bfccc1e3f73 100644
--- a/src/libcore/str.rs
+++ b/src/libcore/str.rs
@@ -18,14 +18,13 @@
 
 use self::Searcher::{Naive, TwoWay, TwoWayLong};
 
-use char::{mod, Char};
 use clone::Clone;
 use cmp::{mod, Eq};
 use default::Default;
 use iter::range;
 use iter::{DoubleEndedIteratorExt, ExactSizeIterator};
 use iter::{Map, Iterator, IteratorExt, DoubleEndedIterator};
-use kinds::{Copy, Sized};
+use kinds::Sized;
 use mem;
 use num::Int;
 use ops::{Fn, FnMut};
@@ -60,9 +59,9 @@ impl FromStr for bool {
     /// # Examples
     ///
     /// ```rust
-    /// assert_eq!(from_str::<bool>("true"), Some(true));
-    /// assert_eq!(from_str::<bool>("false"), Some(false));
-    /// assert_eq!(from_str::<bool>("not even a boolean"), None);
+    /// assert_eq!("true".parse(), Some(true));
+    /// assert_eq!("false".parse(), Some(false));
+    /// assert_eq!("not even a boolean".parse::<bool>(), None);
     /// ```
     #[inline]
     fn from_str(s: &str) -> Option<bool> {
@@ -79,6 +78,7 @@ Section: Creating a string
 */
 
 /// Errors which can occur when attempting to interpret a byte slice as a `str`.
+#[deriving(Copy, Eq, PartialEq, Clone)]
 pub enum Utf8Error {
     /// An invalid byte was detected at the byte offset given.
     ///
@@ -334,6 +334,7 @@ impl<'a> DoubleEndedIterator<(uint, char)> for CharIndices<'a> {
 /// External iterator for a string's bytes.
 /// Use with the `std::iter` module.
 #[stable]
+#[deriving(Clone)]
 pub struct Bytes<'a> {
     inner: Map<&'a u8, u8, slice::Items<'a, u8>, BytesFn>,
 }
@@ -946,24 +947,7 @@ pub fn is_utf8(v: &[u8]) -> bool {
     run_utf8_validation_iterator(&mut v.iter()).is_ok()
 }
 
-/// Return a slice of `v` ending at (and not including) the first NUL
-/// (0).
-///
-/// # Example
-///
-/// ```rust
-/// use std::str;
-///
-/// // "abcd"
-/// let mut v = ['a' as u16, 'b' as u16, 'c' as u16, 'd' as u16];
-/// // no NULs so no change
-/// assert_eq!(str::truncate_utf16_at_nul(&v), v.as_slice());
-///
-/// // "ab\0d"
-/// v[2] = 0;
-/// let b: &[_] = &['a' as u16, 'b' as u16];
-/// assert_eq!(str::truncate_utf16_at_nul(&v), b);
-/// ```
+/// Deprecated function
 #[deprecated = "this function will be removed"]
 pub fn truncate_utf16_at_nul<'a>(v: &'a [u16]) -> &'a [u16] {
     match v.iter().position(|c| *c == 0) {
@@ -1595,6 +1579,8 @@ impl<'a> Default for &'a str {
 impl<'a> Iterator<&'a str> for Lines<'a> {
     #[inline]
     fn next(&mut self) -> Option<&'a str> { self.inner.next() }
+    #[inline]
+    fn size_hint(&self) -> (uint, Option<uint>) { self.inner.size_hint() }
 }
 impl<'a> DoubleEndedIterator<&'a str> for Lines<'a> {
     #[inline]
@@ -1603,6 +1589,8 @@ impl<'a> DoubleEndedIterator<&'a str> for Lines<'a> {
 impl<'a> Iterator<&'a str> for LinesAny<'a> {
     #[inline]
     fn next(&mut self) -> Option<&'a str> { self.inner.next() }
+    #[inline]
+    fn size_hint(&self) -> (uint, Option<uint>) { self.inner.size_hint() }
 }
 impl<'a> DoubleEndedIterator<&'a str> for LinesAny<'a> {
     #[inline]
@@ -1611,6 +1599,8 @@ impl<'a> DoubleEndedIterator<&'a str> for LinesAny<'a> {
 impl<'a> Iterator<u8> for Bytes<'a> {
     #[inline]
     fn next(&mut self) -> Option<u8> { self.inner.next() }
+    #[inline]
+    fn size_hint(&self) -> (uint, Option<uint>) { self.inner.size_hint() }
 }
 impl<'a> DoubleEndedIterator<u8> for Bytes<'a> {
     #[inline]
diff --git a/src/libcoretest/lib.rs b/src/libcoretest/lib.rs
index 05d862d7bc7..44029ebb7fa 100644
--- a/src/libcoretest/lib.rs
+++ b/src/libcoretest/lib.rs
@@ -13,6 +13,7 @@
 extern crate core;
 extern crate test;
 extern crate libc;
+extern crate unicode;
 
 mod any;
 mod atomic;
diff --git a/src/libcoretest/str.rs b/src/libcoretest/str.rs
index 763fcccdbfd..63d6e14a4a6 100644
--- a/src/libcoretest/str.rs
+++ b/src/libcoretest/str.rs
@@ -117,7 +117,7 @@ fn test_rev_split_char_iterator_no_trailing() {
 
 #[test]
 fn test_utf16_code_units() {
-    use core::str::Utf16Encoder;
+    use unicode::str::Utf16Encoder;
     assert_eq!(Utf16Encoder::new(vec!['é', '\U0001F4A9'].into_iter()).collect::<Vec<u16>>(),
                vec![0xE9, 0xD83D, 0xDCA9])
 }
diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs
index 106e467c169..c284fb7c9e3 100644
--- a/src/libfmt_macros/lib.rs
+++ b/src/libfmt_macros/lib.rs
@@ -23,7 +23,8 @@
        html_root_url = "http://doc.rust-lang.org/nightly/",
        html_playground_url = "http://play.rust-lang.org/")]
 
-#![feature(macro_rules, globs)]
+#![feature(macro_rules, globs, slicing_syntax)]
+
 pub use self::Piece::*;
 pub use self::Position::*;
 pub use self::Alignment::*;
@@ -136,7 +137,7 @@ pub enum Count<'a> {
 /// necessary there's probably lots of room for improvement performance-wise.
 pub struct Parser<'a> {
     input: &'a str,
-    cur: str::CharOffsets<'a>,
+    cur: str::CharIndices<'a>,
     /// Error messages accumulated during parsing
     pub errors: Vec<string::String>,
 }
@@ -208,13 +209,11 @@ impl<'a> Parser<'a> {
                 self.cur.next();
             }
             Some((_, other)) => {
-                self.err(format!("expected `{}`, found `{}`",
-                                 c,
-                                 other).as_slice());
+                self.err(format!("expected `{}`, found `{}`", c, other)[]);
             }
             None => {
                 self.err(format!("expected `{}` but string was terminated",
-                                 c).as_slice());
+                                 c)[]);
             }
         }
     }
@@ -237,12 +236,12 @@ impl<'a> Parser<'a> {
             // we may not consume the character, so clone the iterator
             match self.cur.clone().next() {
                 Some((pos, '}')) | Some((pos, '{')) => {
-                    return self.input.slice(start, pos);
+                    return self.input[start..pos];
                 }
                 Some(..) => { self.cur.next(); }
                 None => {
                     self.cur.next();
-                    return self.input.slice(start, self.input.len());
+                    return self.input[start..self.input.len()];
                 }
             }
         }
@@ -282,7 +281,7 @@ impl<'a> Parser<'a> {
             flags: 0,
             precision: CountImplied,
             width: CountImplied,
-            ty: self.input.slice(0, 0),
+            ty: self.input[0..0],
         };
         if !self.consume(':') { return spec }
 
@@ -391,7 +390,7 @@ impl<'a> Parser<'a> {
                 self.cur.next();
                 pos
             }
-            Some(..) | None => { return self.input.slice(0, 0); }
+            Some(..) | None => { return self.input[0..0]; }
         };
         let mut end;
         loop {
@@ -403,7 +402,7 @@ impl<'a> Parser<'a> {
                 None => { end = self.input.len(); break }
             }
         }
-        self.input.slice(start, end)
+        self.input[start..end]
     }
 
     /// Optionally parses an integer at the current position. This doesn't deal
diff --git a/src/libgetopts/lib.rs b/src/libgetopts/lib.rs
index e362c67cc50..0426f269376 100644
--- a/src/libgetopts/lib.rs
+++ b/src/libgetopts/lib.rs
@@ -85,7 +85,7 @@
        html_favicon_url = "http://www.rust-lang.org/favicon.ico",
        html_root_url = "http://doc.rust-lang.org/nightly/",
        html_playground_url = "http://play.rust-lang.org/")]
-#![feature(globs, phase)]
+#![feature(globs, phase, slicing_syntax)]
 #![feature(unboxed_closures)]
 #![deny(missing_docs)]
 
@@ -101,9 +101,8 @@ use self::Whitespace::*;
 use self::LengthLimit::*;
 
 use std::fmt;
-use std::result::Result::{Err, Ok};
+use std::iter::repeat;
 use std::result;
-use std::string::String;
 
 /// Name of an option. Either a string or a single char.
 #[deriving(Clone, PartialEq, Eq)]
@@ -282,7 +281,7 @@ impl OptGroup {
 
 impl Matches {
     fn opt_vals(&self, nm: &str) -> Vec<Optval> {
-        match find_opt(self.opts.as_slice(), Name::from_str(nm)) {
+        match find_opt(self.opts[], Name::from_str(nm)) {
             Some(id) => self.vals[id].clone(),
             None => panic!("No option '{}' defined", nm)
         }
@@ -310,8 +309,7 @@ impl Matches {
     /// Returns true if any of several options were matched.
     pub fn opts_present(&self, names: &[String]) -> bool {
         for nm in names.iter() {
-            match find_opt(self.opts.as_slice(),
-                           Name::from_str(nm.as_slice())) {
+            match find_opt(self.opts.as_slice(), Name::from_str(nm[])) {
                 Some(id) if !self.vals[id].is_empty() => return true,
                 _ => (),
             };
@@ -322,7 +320,7 @@ impl Matches {
     /// Returns the string argument supplied to one of several matching options or `None`.
     pub fn opts_str(&self, names: &[String]) -> Option<String> {
         for nm in names.iter() {
-            match self.opt_val(nm.as_slice()) {
+            match self.opt_val(nm[]) {
                 Some(Val(ref s)) => return Some(s.clone()),
                 _ => ()
             }
@@ -587,7 +585,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
     while i < l {
         let cur = args[i].clone();
         let curlen = cur.len();
-        if !is_arg(cur.as_slice()) {
+        if !is_arg(cur[]) {
             free.push(cur);
         } else if cur == "--" {
             let mut j = i + 1;
@@ -597,7 +595,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
             let mut names;
             let mut i_arg = None;
             if cur.as_bytes()[1] == b'-' {
-                let tail = cur.slice(2, curlen);
+                let tail = cur[2..curlen];
                 let tail_eq: Vec<&str> = tail.split('=').collect();
                 if tail_eq.len() <= 1 {
                     names = vec!(Long(tail.to_string()));
@@ -633,7 +631,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
                     };
 
                     if arg_follows && range.next < curlen {
-                        i_arg = Some(cur.slice(range.next, curlen).to_string());
+                        i_arg = Some(cur[range.next..curlen].to_string());
                         break;
                     }
 
@@ -660,7 +658,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
                             .push(Val((i_arg.clone())
                             .unwrap()));
                     } else if name_pos < names.len() || i + 1 == l ||
-                            is_arg(args[i + 1].as_slice()) {
+                            is_arg(args[i + 1][]) {
                         vals[optid].push(Given);
                     } else {
                         i += 1;
@@ -702,7 +700,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
 /// Derive a usage message from a set of long options.
 pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
 
-    let desc_sep = format!("\n{}", " ".repeat(24));
+    let desc_sep = format!("\n{}", repeat(" ").take(24).collect::<String>());
 
     let rows = opts.iter().map(|optref| {
         let OptGroup{short_name,
@@ -712,14 +710,14 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
                      hasarg,
                      ..} = (*optref).clone();
 
-        let mut row = " ".repeat(4);
+        let mut row = repeat(" ").take(4).collect::<String>();
 
         // short option
         match short_name.len() {
             0 => {}
             1 => {
                 row.push('-');
-                row.push_str(short_name.as_slice());
+                row.push_str(short_name[]);
                 row.push(' ');
             }
             _ => panic!("the short name should only be 1 ascii char long"),
@@ -730,7 +728,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
             0 => {}
             _ => {
                 row.push_str("--");
-                row.push_str(long_name.as_slice());
+                row.push_str(long_name[]);
                 row.push(' ');
             }
         }
@@ -738,23 +736,23 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
         // arg
         match hasarg {
             No => {}
-            Yes => row.push_str(hint.as_slice()),
+            Yes => row.push_str(hint[]),
             Maybe => {
                 row.push('[');
-                row.push_str(hint.as_slice());
+                row.push_str(hint[]);
                 row.push(']');
             }
         }
 
         // FIXME: #5516 should be graphemes not codepoints
         // here we just need to indent the start of the description
-        let rowlen = row.char_len();
+        let rowlen = row.chars().count();
         if rowlen < 24 {
             for _ in range(0, 24 - rowlen) {
                 row.push(' ');
             }
         } else {
-            row.push_str(desc_sep.as_slice())
+            row.push_str(desc_sep[]);
         }
 
         // Normalize desc to contain words separated by one space character
@@ -766,16 +764,14 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
 
         // FIXME: #5516 should be graphemes not codepoints
         let mut desc_rows = Vec::new();
-        each_split_within(desc_normalized_whitespace.as_slice(),
-                          54,
-                          |substr| {
+        each_split_within(desc_normalized_whitespace[], 54, |substr| {
             desc_rows.push(substr.to_string());
             true
         });
 
         // FIXME: #5516 should be graphemes not codepoints
         // wrapped description
-        row.push_str(desc_rows.connect(desc_sep.as_slice()).as_slice());
+        row.push_str(desc_rows.connect(desc_sep[])[]);
 
         row
     });
@@ -794,10 +790,10 @@ fn format_option(opt: &OptGroup) -> String {
     // Use short_name is possible, but fallback to long_name.
     if opt.short_name.len() > 0 {
         line.push('-');
-        line.push_str(opt.short_name.as_slice());
+        line.push_str(opt.short_name[]);
     } else {
         line.push_str("--");
-        line.push_str(opt.long_name.as_slice());
+        line.push_str(opt.long_name[]);
     }
 
     if opt.hasarg != No {
@@ -805,7 +801,7 @@ fn format_option(opt: &OptGroup) -> String {
         if opt.hasarg == Maybe {
             line.push('[');
         }
-        line.push_str(opt.hint.as_slice());
+        line.push_str(opt.hint[]);
         if opt.hasarg == Maybe {
             line.push(']');
         }
@@ -827,8 +823,7 @@ pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String {
     line.push_str(opts.iter()
                       .map(format_option)
                       .collect::<Vec<String>>()
-                      .connect(" ")
-                      .as_slice());
+                      .connect(" ")[]);
     line
 }
 
@@ -891,9 +886,9 @@ fn each_split_within<F>(ss: &str, lim: uint, mut it: F) -> bool where
             (B, Cr, UnderLim) => { B }
             (B, Cr, OverLim)  if (i - last_start + 1) > lim
                             => panic!("word starting with {} longer than limit!",
-                                    ss.slice(last_start, i + 1)),
+                                    ss[last_start..i + 1]),
             (B, Cr, OverLim)  => {
-                *cont = it(ss.slice(slice_start, last_end));
+                *cont = it(ss[slice_start..last_end]);
                 slice_start = last_start;
                 B
             }
@@ -903,7 +898,7 @@ fn each_split_within<F>(ss: &str, lim: uint, mut it: F) -> bool where
             }
             (B, Ws, OverLim)  => {
                 last_end = i;
-                *cont = it(ss.slice(slice_start, last_end));
+                *cont = it(ss[slice_start..last_end]);
                 A
             }
 
@@ -912,14 +907,14 @@ fn each_split_within<F>(ss: &str, lim: uint, mut it: F) -> bool where
                 B
             }
             (C, Cr, OverLim)  => {
-                *cont = it(ss.slice(slice_start, last_end));
+                *cont = it(ss[slice_start..last_end]);
                 slice_start = i;
                 last_start = i;
                 last_end = i;
                 B
             }
             (C, Ws, OverLim)  => {
-                *cont = it(ss.slice(slice_start, last_end));
+                *cont = it(ss[slice_start..last_end]);
                 A
             }
             (C, Ws, UnderLim) => {
diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs
index 34e19aa4a03..ce3df1090bd 100644
--- a/src/libgraphviz/lib.rs
+++ b/src/libgraphviz/lib.rs
@@ -448,8 +448,8 @@ impl<'a> LabelText<'a> {
     /// Renders text as string suitable for a label in a .dot file.
     pub fn escape(&self) -> String {
         match self {
-            &LabelStr(ref s) => (&**s).escape_default(),
-            &EscStr(ref s) => LabelText::escape_str(s.as_slice()),
+            &LabelStr(ref s) => s.escape_default(),
+            &EscStr(ref s) => LabelText::escape_str(s[]),
         }
     }
 
@@ -475,10 +475,10 @@ impl<'a> LabelText<'a> {
 
     /// Puts `suffix` on a line below this label, with a blank line separator.
     pub fn suffix_line(self, suffix: LabelText) -> LabelText<'static> {
-        let mut prefix = self.pre_escaped_content().into_string();
+        let mut prefix = self.pre_escaped_content().into_owned();
         let suffix = suffix.pre_escaped_content();
         prefix.push_str(r"\n\n");
-        prefix.push_str(suffix.as_slice());
+        prefix.push_str(suffix[]);
         EscStr(prefix.into_cow())
     }
 }
@@ -671,7 +671,7 @@ mod tests {
 
     impl<'a> Labeller<'a, Node, &'a Edge> for LabelledGraph {
         fn graph_id(&'a self) -> Id<'a> {
-            Id::new(self.name.as_slice()).unwrap()
+            Id::new(self.name[]).unwrap()
         }
         fn node_id(&'a self, n: &Node) -> Id<'a> {
             id_name(n)
@@ -735,7 +735,7 @@ mod tests {
     fn test_input(g: LabelledGraph) -> IoResult<String> {
         let mut writer = Vec::new();
         render(&g, &mut writer).unwrap();
-        (&mut writer.as_slice()).read_to_string()
+        (&mut writer[]).read_to_string()
     }
 
     // All of the tests use raw-strings as the format for the expected outputs,
@@ -847,7 +847,7 @@ r#"digraph hasse_diagram {
                  edge(1, 3, ";"),    edge(2, 3, ";"   )));
 
         render(&g, &mut writer).unwrap();
-        let r = (&mut writer.as_slice()).read_to_string();
+        let r = (&mut writer[]).read_to_string();
 
         assert_eq!(r.unwrap(),
 r#"digraph syntax_tree {
diff --git a/src/liblog/directive.rs b/src/liblog/directive.rs
index d1db0ec89a1..2b25a64affe 100644
--- a/src/liblog/directive.rs
+++ b/src/liblog/directive.rs
@@ -23,7 +23,7 @@ pub static LOG_LEVEL_NAMES: [&'static str, ..4] = ["ERROR", "WARN", "INFO",
 
 /// Parse an individual log level that is either a number or a symbolic log level
 fn parse_log_level(level: &str) -> Option<u32> {
-    from_str::<u32>(level).or_else(|| {
+    level.parse::<u32>().or_else(|| {
         let pos = LOG_LEVEL_NAMES.iter().position(|&name| name.eq_ignore_ascii_case(level));
         pos.map(|p| p as u32 + 1)
     }).map(|p| cmp::min(p, ::MAX_LOG_LEVEL))
diff --git a/src/liblog/lib.rs b/src/liblog/lib.rs
index 2bf9af90271..bc655c219f3 100644
--- a/src/liblog/lib.rs
+++ b/src/liblog/lib.rs
@@ -164,7 +164,7 @@
        html_favicon_url = "http://www.rust-lang.org/favicon.ico",
        html_root_url = "http://doc.rust-lang.org/nightly/",
        html_playground_url = "http://play.rust-lang.org/")]
-#![feature(macro_rules, unboxed_closures)]
+#![feature(macro_rules, unboxed_closures, slicing_syntax)]
 #![deny(missing_docs)]
 
 extern crate regex;
@@ -280,7 +280,7 @@ pub fn log(level: u32, loc: &'static LogLocation, args: &fmt::Arguments) {
     // Test the literal string from args against the current filter, if there
     // is one.
     match unsafe { FILTER.as_ref() } {
-        Some(filter) if !filter.is_match(args.to_string().as_slice()) => return,
+        Some(filter) if !filter.is_match(args.to_string()[]) => return,
         _ => {}
     }
 
@@ -375,7 +375,7 @@ fn enabled(level: u32,
     // Search for the longest match, the vector is assumed to be pre-sorted.
     for directive in iter.rev() {
         match directive.name {
-            Some(ref name) if !module.starts_with(name.as_slice()) => {},
+            Some(ref name) if !module.starts_with(name[]) => {},
             Some(..) | None => {
                 return level <= directive.level
             }
@@ -390,7 +390,7 @@ fn enabled(level: u32,
 /// `Once` primitive (and this function is called from that primitive).
 fn init() {
     let (mut directives, filter) = match os::getenv("RUST_LOG") {
-        Some(spec) => directive::parse_logging_spec(spec.as_slice()),
+        Some(spec) => directive::parse_logging_spec(spec[]),
         None => (Vec::new(), None),
     };
 
diff --git a/src/libregex/parse.rs b/src/libregex/parse.rs
index 78558a32266..0cd8df73c37 100644
--- a/src/libregex/parse.rs
+++ b/src/libregex/parse.rs
@@ -286,7 +286,7 @@ impl<'a> Parser<'a> {
             true => Ok(()),
             false => {
                 self.err(format!("Expected {} but got EOF.",
-                                 expected).as_slice())
+                                 expected)[])
             }
         }
     }
@@ -295,10 +295,10 @@ impl<'a> Parser<'a> {
         match self.next_char() {
             true if self.cur() == expected => Ok(()),
             true => self.err(format!("Expected '{}' but got '{}'.",
-                                     expected, self.cur()).as_slice()),
+                                     expected, self.cur())[]),
             false => {
                 self.err(format!("Expected '{}' but got EOF.",
-                                 expected).as_slice())
+                                 expected)[])
             }
         }
     }
@@ -443,14 +443,14 @@ impl<'a> Parser<'a> {
                         Literal(c3, _) => c2 = c3, // allow literal escapes below
                         ast =>
                             return self.err(format!("Expected a literal, but got {}.",
-                                                    ast).as_slice()),
+                                                    ast)[]),
                     }
                 }
                 if c2 < c {
                     return self.err(format!("Invalid character class \
                                              range '{}-{}'",
                                             c,
-                                            c2).as_slice())
+                                            c2)[])
                 }
                 ranges.push((c, self.cur()))
             } else {
@@ -488,7 +488,7 @@ impl<'a> Parser<'a> {
                 FLAG_EMPTY
             };
         let name = self.slice(name_start, closer - 1);
-        match find_class(ASCII_CLASSES, name.as_slice()) {
+        match find_class(ASCII_CLASSES, name[]) {
             None => None,
             Some(ranges) => {
                 self.chari = closer;
@@ -513,7 +513,7 @@ impl<'a> Parser<'a> {
                     return self.err(format!("No closing brace for counted \
                                              repetition starting at position \
                                              {}.",
-                                            start).as_slice())
+                                            start)[])
                 }
             };
         self.chari = closer;
@@ -524,7 +524,7 @@ impl<'a> Parser<'a> {
         // Parse the min and max values from the regex.
         let (mut min, mut max): (uint, Option<uint>);
         if !inner.contains(",") {
-            min = try!(self.parse_uint(inner.as_slice()));
+            min = try!(self.parse_uint(inner[]));
             max = Some(min);
         } else {
             let pieces: Vec<&str> = inner.splitn(1, ',').collect();
@@ -546,19 +546,19 @@ impl<'a> Parser<'a> {
         if min > MAX_REPEAT {
             return self.err(format!(
                 "{} exceeds maximum allowed repetitions ({})",
-                min, MAX_REPEAT).as_slice());
+                min, MAX_REPEAT)[]);
         }
         if max.is_some() {
             let m = max.unwrap();
             if m > MAX_REPEAT {
                 return self.err(format!(
                     "{} exceeds maximum allowed repetitions ({})",
-                    m, MAX_REPEAT).as_slice());
+                    m, MAX_REPEAT)[]);
             }
             if m < min {
                 return self.err(format!(
                     "Max repetitions ({}) cannot be smaller than min \
-                     repetitions ({}).", m, min).as_slice());
+                     repetitions ({}).", m, min)[]);
             }
         }
 
@@ -622,8 +622,7 @@ impl<'a> Parser<'a> {
                 Ok(AstClass(ranges, flags))
             }
             _ => {
-                self.err(format!("Invalid escape sequence '\\\\{}'",
-                                 c).as_slice())
+                self.err(format!("Invalid escape sequence '\\\\{}'", c)[])
             }
         }
     }
@@ -643,7 +642,7 @@ impl<'a> Parser<'a> {
                     Some(i) => i,
                     None => return self.err(format!(
                         "Missing '}}' for unclosed '{{' at position {}",
-                        self.chari).as_slice()),
+                        self.chari)[]),
                 };
             if closer - self.chari + 1 == 0 {
                 return self.err("No Unicode class name found.")
@@ -657,10 +656,10 @@ impl<'a> Parser<'a> {
             name = self.slice(self.chari + 1, self.chari + 2);
             self.chari += 1;
         }
-        match find_class(UNICODE_CLASSES, name.as_slice()) {
+        match find_class(UNICODE_CLASSES, name[]) {
             None => {
                 return self.err(format!("Could not find Unicode class '{}'",
-                                        name).as_slice())
+                                        name)[])
             }
             Some(ranges) => {
                 Ok(AstClass(ranges, negated | (self.flags & FLAG_NOCASE)))
@@ -683,11 +682,11 @@ impl<'a> Parser<'a> {
             }
         }
         let s = self.slice(start, end);
-        match num::from_str_radix::<u32>(s.as_slice(), 8) {
+        match num::from_str_radix::<u32>(s[], 8) {
             Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)),
             None => {
                 self.err(format!("Could not parse '{}' as octal number.",
-                                 s).as_slice())
+                                 s)[])
             }
         }
     }
@@ -705,12 +704,12 @@ impl<'a> Parser<'a> {
                 None => {
                     return self.err(format!("Missing '}}' for unclosed \
                                              '{{' at position {}",
-                                            start).as_slice())
+                                            start)[])
                 }
                 Some(i) => i,
             };
         self.chari = closer;
-        self.parse_hex_digits(self.slice(start, closer).as_slice())
+        self.parse_hex_digits(self.slice(start, closer)[])
     }
 
     // Parses a two-digit hex number.
@@ -730,8 +729,7 @@ impl<'a> Parser<'a> {
         match num::from_str_radix::<u32>(s, 16) {
             Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)),
             None => {
-                self.err(format!("Could not parse '{}' as hex number.",
-                                 s).as_slice())
+                self.err(format!("Could not parse '{}' as hex number.", s)[])
             }
         }
     }
@@ -757,7 +755,7 @@ impl<'a> Parser<'a> {
         }
         if self.names.contains(&name) {
             return self.err(format!("Duplicate capture group name '{}'.",
-                                    name).as_slice())
+                                    name)[])
         }
         self.names.push(name.clone());
         self.chari = closer;
@@ -791,7 +789,7 @@ impl<'a> Parser<'a> {
                     if sign < 0 {
                         return self.err(format!(
                             "Cannot negate flags twice in '{}'.",
-                            self.slice(start, self.chari + 1)).as_slice())
+                            self.slice(start, self.chari + 1))[])
                     }
                     sign = -1;
                     saw_flag = false;
@@ -802,7 +800,7 @@ impl<'a> Parser<'a> {
                         if !saw_flag {
                             return self.err(format!(
                                 "A valid flag does not follow negation in '{}'",
-                                self.slice(start, self.chari + 1)).as_slice())
+                                self.slice(start, self.chari + 1))[])
                         }
                         flags = flags ^ flags;
                     }
@@ -814,7 +812,7 @@ impl<'a> Parser<'a> {
                     return Ok(())
                 }
                 _ => return self.err(format!(
-                    "Unrecognized flag '{}'.", self.cur()).as_slice()),
+                    "Unrecognized flag '{}'.", self.cur())[]),
             }
         }
     }
@@ -908,11 +906,11 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_uint(&self, s: &str) -> Result<uint, Error> {
-        match from_str::<uint>(s) {
+        match s.parse::<uint>() {
             Some(i) => Ok(i),
             None => {
                 self.err(format!("Expected an unsigned integer but got '{}'.",
-                                 s).as_slice())
+                                 s)[])
             }
         }
     }
@@ -922,8 +920,7 @@ impl<'a> Parser<'a> {
             Some(c) => Ok(c),
             None => {
                 self.err(format!("Could not decode '{}' to unicode \
-                                  character.",
-                                 n).as_slice())
+                                  character.", n)[])
             }
         }
     }
diff --git a/src/libregex/re.rs b/src/libregex/re.rs
index 151587e423a..4e23e92c77e 100644
--- a/src/libregex/re.rs
+++ b/src/libregex/re.rs
@@ -417,7 +417,7 @@ impl Regex {
     /// # extern crate regex; #[phase(plugin)] extern crate regex_macros;
     /// # fn main() {
     /// let re = regex!("[^01]+");
-    /// assert_eq!(re.replace("1078910", "").as_slice(), "1010");
+    /// assert_eq!(re.replace("1078910", ""), "1010");
     /// # }
     /// ```
     ///
@@ -435,7 +435,7 @@ impl Regex {
     /// let result = re.replace("Springsteen, Bruce", |&: caps: &Captures| {
     ///     format!("{} {}", caps.at(2).unwrap_or(""), caps.at(1).unwrap_or(""))
     /// });
-    /// assert_eq!(result.as_slice(), "Bruce Springsteen");
+    /// assert_eq!(result, "Bruce Springsteen");
     /// # }
     /// ```
     ///
@@ -450,7 +450,7 @@ impl Regex {
     /// # fn main() {
     /// let re = regex!(r"(?P<last>[^,\s]+),\s+(?P<first>\S+)");
     /// let result = re.replace("Springsteen, Bruce", "$first $last");
-    /// assert_eq!(result.as_slice(), "Bruce Springsteen");
+    /// assert_eq!(result, "Bruce Springsteen");
     /// # }
     /// ```
     ///
@@ -469,7 +469,7 @@ impl Regex {
     ///
     /// let re = regex!(r"(?P<last>[^,\s]+),\s+(\S+)");
     /// let result = re.replace("Springsteen, Bruce", NoExpand("$2 $last"));
-    /// assert_eq!(result.as_slice(), "$2 $last");
+    /// assert_eq!(result, "$2 $last");
     /// # }
     /// ```
     pub fn replace<R: Replacer>(&self, text: &str, rep: R) -> String {
@@ -505,19 +505,19 @@ impl Regex {
             }
 
             let (s, e) = cap.pos(0).unwrap(); // captures only reports matches
-            new.push_str(text.slice(last_match, s));
-            new.push_str(rep.reg_replace(&cap).as_slice());
+            new.push_str(text[last_match..s]);
+            new.push_str(rep.reg_replace(&cap)[]);
             last_match = e;
         }
-        new.push_str(text.slice(last_match, text.len()));
+        new.push_str(text[last_match..text.len()]);
         return new;
     }
 
     /// Returns the original string of this regex.
     pub fn as_str<'a>(&'a self) -> &'a str {
         match *self {
-            Dynamic(ExDynamic { ref original, .. }) => original.as_slice(),
-            Native(ExNative { ref original, .. }) => original.as_slice(),
+            Dynamic(ExDynamic { ref original, .. }) => original[],
+            Native(ExNative { ref original, .. }) => original[],
         }
     }
 
@@ -608,13 +608,13 @@ impl<'r, 't> Iterator<&'t str> for RegexSplits<'r, 't> {
                 if self.last >= text.len() {
                     None
                 } else {
-                    let s = text.slice(self.last, text.len());
+                    let s = text[self.last..text.len()];
                     self.last = text.len();
                     Some(s)
                 }
             }
             Some((s, e)) => {
-                let matched = text.slice(self.last, s);
+                let matched = text[self.last..s];
                 self.last = e;
                 Some(matched)
             }
@@ -642,7 +642,7 @@ impl<'r, 't> Iterator<&'t str> for RegexSplitsN<'r, 't> {
         } else {
             self.cur += 1;
             if self.cur >= self.limit {
-                Some(text.slice(self.splits.last, text.len()))
+                Some(text[self.splits.last..text.len()])
             } else {
                 self.splits.next()
             }
@@ -769,13 +769,13 @@ impl<'t> Captures<'t> {
             let pre = refs.at(1).unwrap_or("");
             let name = refs.at(2).unwrap_or("");
             format!("{}{}", pre,
-                    match from_str::<uint>(name.as_slice()) {
+                    match name.parse::<uint>() {
                 None => self.name(name).unwrap_or("").to_string(),
                 Some(i) => self.at(i).unwrap_or("").to_string(),
             })
         });
         let re = Regex::new(r"\$\$").unwrap();
-        re.replace_all(text.as_slice(), NoExpand("$"))
+        re.replace_all(text[], NoExpand("$"))
     }
 
     /// Returns the number of captured groups.
diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs
index fddd49c8d88..0fd69ea25bc 100644
--- a/src/librustc/lint/builtin.rs
+++ b/src/librustc/lint/builtin.rs
@@ -250,10 +250,12 @@ impl LintPass for TypeLimits {
                         let (min, max) = float_ty_range(t);
                         let lit_val: f64 = match lit.node {
                             ast::LitFloat(ref v, _) |
-                            ast::LitFloatUnsuffixed(ref v) => match from_str(v.get()) {
-                                Some(f) => f,
-                                None => return
-                            },
+                            ast::LitFloatUnsuffixed(ref v) => {
+                                match v.parse() {
+                                    Some(f) => f,
+                                    None => return
+                                }
+                            }
                             _ => panic!()
                         };
                         if lit_val < min || lit_val > max {
@@ -507,7 +509,7 @@ impl BoxPointers {
         if n_uniq > 0 {
             let s = ty_to_string(cx.tcx, ty);
             let m = format!("type uses owned (Box type) pointers: {}", s);
-            cx.span_lint(BOX_POINTERS, span, m.as_slice());
+            cx.span_lint(BOX_POINTERS, span, m[]);
         }
     }
 }
@@ -587,7 +589,7 @@ impl LintPass for RawPointerDeriving {
     }
 
     fn check_item(&mut self, cx: &Context, item: &ast::Item) {
-        if !attr::contains_name(item.attrs.as_slice(), "automatically_derived") {
+        if !attr::contains_name(item.attrs[], "automatically_derived") {
             return
         }
         let did = match item.node {
@@ -766,11 +768,11 @@ impl LintPass for UnusedResults {
             ty::ty_enum(did, _) => {
                 if ast_util::is_local(did) {
                     if let ast_map::NodeItem(it) = cx.tcx.map.get(did.node) {
-                        warned |= check_must_use(cx, it.attrs.as_slice(), s.span);
+                        warned |= check_must_use(cx, it.attrs[], s.span);
                     }
                 } else {
                     csearch::get_item_attrs(&cx.sess().cstore, did, |attrs| {
-                        warned |= check_must_use(cx, attrs.as_slice(), s.span);
+                        warned |= check_must_use(cx, attrs[], s.span);
                     });
                 }
             }
@@ -792,7 +794,7 @@ impl LintPass for UnusedResults {
                             msg.push_str(s.get());
                         }
                     }
-                    cx.span_lint(UNUSED_MUST_USE, sp, msg.as_slice());
+                    cx.span_lint(UNUSED_MUST_USE, sp, msg[]);
                     return true;
                 }
             }
@@ -838,7 +840,7 @@ impl NonCamelCaseTypes {
             } else {
                 format!("{} `{}` should have a camel case name such as `{}`", sort, s, c)
             };
-            cx.span_lint(NON_CAMEL_CASE_TYPES, span, m.as_slice());
+            cx.span_lint(NON_CAMEL_CASE_TYPES, span, m[]);
         }
     }
 }
@@ -978,7 +980,7 @@ impl NonSnakeCase {
         if !is_snake_case(ident) {
             cx.span_lint(NON_SNAKE_CASE, span,
                 format!("{} `{}` should have a snake case name such as `{}`",
-                        sort, s, to_snake_case(s.get())).as_slice());
+                        sort, s, to_snake_case(s.get()))[]);
         }
     }
 }
@@ -1065,7 +1067,7 @@ impl LintPass for NonUpperCaseGlobals {
                         format!("static constant `{}` should have an uppercase name \
                                  such as `{}`",
                                 s.get(), s.get().chars().map(|c| c.to_uppercase())
-                                .collect::<String>().as_slice()).as_slice());
+                                .collect::<String>()[])[]);
                 }
             }
             _ => {}
@@ -1082,7 +1084,7 @@ impl LintPass for NonUpperCaseGlobals {
                         format!("static constant in pattern `{}` should have an uppercase \
                                  name such as `{}`",
                                 s.get(), s.get().chars().map(|c| c.to_uppercase())
-                                    .collect::<String>().as_slice()).as_slice());
+                                    .collect::<String>()[])[]);
                 }
             }
             _ => {}
@@ -1107,7 +1109,7 @@ impl UnusedParens {
             if !necessary {
                 cx.span_lint(UNUSED_PARENS, value.span,
                              format!("unnecessary parentheses around {}",
-                                     msg).as_slice())
+                                     msg)[])
             }
         }
 
@@ -1209,7 +1211,7 @@ impl LintPass for UnusedImportBraces {
                                     let m = format!("braces around {} is unnecessary",
                                                     token::get_ident(*name).get());
                                     cx.span_lint(UNUSED_IMPORT_BRACES, view_item.span,
-                                                 m.as_slice());
+                                                 m[]);
                                 },
                                 _ => ()
                             }
@@ -1248,7 +1250,7 @@ impl LintPass for NonShorthandFieldPatterns {
                     if ident.node.as_str() == fieldpat.node.ident.as_str() {
                         cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span,
                                      format!("the `{}:` in this pattern is redundant and can \
-                                              be removed", ident.node.as_str()).as_slice())
+                                              be removed", ident.node.as_str())[])
                     }
                 }
             }
@@ -1352,7 +1354,7 @@ impl LintPass for UnusedMut {
     fn check_expr(&mut self, cx: &Context, e: &ast::Expr) {
         if let ast::ExprMatch(_, ref arms, _) = e.node {
             for a in arms.iter() {
-                self.check_unused_mut_pat(cx, a.pats.as_slice())
+                self.check_unused_mut_pat(cx, a.pats[])
             }
         }
     }
@@ -1473,7 +1475,7 @@ impl MissingDoc {
         });
         if !has_doc {
             cx.span_lint(MISSING_DOCS, sp,
-                format!("missing documentation for {}", desc).as_slice());
+                format!("missing documentation for {}", desc)[]);
         }
     }
 }
@@ -1487,7 +1489,7 @@ impl LintPass for MissingDoc {
         let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| {
             attr.check_name("doc") && match attr.meta_item_list() {
                 None => false,
-                Some(l) => attr::contains_name(l.as_slice(), "hidden"),
+                Some(l) => attr::contains_name(l[], "hidden"),
             }
         });
         self.doc_hidden_stack.push(doc_hidden);
@@ -1509,7 +1511,7 @@ impl LintPass for MissingDoc {
     }
 
     fn check_crate(&mut self, cx: &Context, krate: &ast::Crate) {
-        self.check_missing_docs_attrs(cx, None, krate.attrs.as_slice(),
+        self.check_missing_docs_attrs(cx, None, krate.attrs[],
                                      krate.span, "crate");
     }
 
@@ -1523,7 +1525,7 @@ impl LintPass for MissingDoc {
             ast::ItemTy(..) => "a type alias",
             _ => return
         };
-        self.check_missing_docs_attrs(cx, Some(it.id), it.attrs.as_slice(),
+        self.check_missing_docs_attrs(cx, Some(it.id), it.attrs[],
                                      it.span, desc);
     }
 
@@ -1536,13 +1538,13 @@ impl LintPass for MissingDoc {
 
             // Otherwise, doc according to privacy. This will also check
             // doc for default methods defined on traits.
-            self.check_missing_docs_attrs(cx, Some(m.id), m.attrs.as_slice(),
+            self.check_missing_docs_attrs(cx, Some(m.id), m.attrs[],
                                           m.span, "a method");
         }
     }
 
     fn check_ty_method(&mut self, cx: &Context, tm: &ast::TypeMethod) {
-        self.check_missing_docs_attrs(cx, Some(tm.id), tm.attrs.as_slice(),
+        self.check_missing_docs_attrs(cx, Some(tm.id), tm.attrs[],
                                      tm.span, "a type method");
     }
 
@@ -1552,14 +1554,14 @@ impl LintPass for MissingDoc {
                 let cur_struct_def = *self.struct_def_stack.last()
                     .expect("empty struct_def_stack");
                 self.check_missing_docs_attrs(cx, Some(cur_struct_def),
-                                              sf.node.attrs.as_slice(), sf.span,
+                                              sf.node.attrs[], sf.span,
                                               "a struct field")
             }
         }
     }
 
     fn check_variant(&mut self, cx: &Context, v: &ast::Variant, _: &ast::Generics) {
-        self.check_missing_docs_attrs(cx, Some(v.node.id), v.node.attrs.as_slice(),
+        self.check_missing_docs_attrs(cx, Some(v.node.id), v.node.attrs[],
                                      v.span, "a variant");
         assert!(!self.in_variant);
         self.in_variant = true;
@@ -1675,7 +1677,7 @@ impl Stability {
             _ => format!("use of {} item", label)
         };
 
-        cx.span_lint(lint, span, msg.as_slice());
+        cx.span_lint(lint, span, msg[]);
     }
 
     fn is_internal(&self, cx: &Context, span: Span) -> bool {
diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs
index d8d9d653e62..ffae485364a 100644
--- a/src/librustc/lint/context.rs
+++ b/src/librustc/lint/context.rs
@@ -104,7 +104,7 @@ impl LintStore {
     }
 
     pub fn get_lints<'t>(&'t self) -> &'t [(&'static Lint, bool)] {
-        self.lints.as_slice()
+        self.lints[]
     }
 
     pub fn get_lint_groups<'t>(&'t self) -> Vec<(&'static str, Vec<LintId>, bool)> {
@@ -124,11 +124,11 @@ impl LintStore {
                 match (sess, from_plugin) {
                     // We load builtin lints first, so a duplicate is a compiler bug.
                     // Use early_error when handling -W help with no crate.
-                    (None, _) => early_error(msg.as_slice()),
-                    (Some(sess), false) => sess.bug(msg.as_slice()),
+                    (None, _) => early_error(msg[]),
+                    (Some(sess), false) => sess.bug(msg[]),
 
                     // A duplicate name from a plugin is a user error.
-                    (Some(sess), true)  => sess.err(msg.as_slice()),
+                    (Some(sess), true)  => sess.err(msg[]),
                 }
             }
 
@@ -149,11 +149,11 @@ impl LintStore {
             match (sess, from_plugin) {
                 // We load builtin lints first, so a duplicate is a compiler bug.
                 // Use early_error when handling -W help with no crate.
-                (None, _) => early_error(msg.as_slice()),
-                (Some(sess), false) => sess.bug(msg.as_slice()),
+                (None, _) => early_error(msg[]),
+                (Some(sess), false) => sess.bug(msg[]),
 
                 // A duplicate name from a plugin is a user error.
-                (Some(sess), true)  => sess.err(msg.as_slice()),
+                (Some(sess), true)  => sess.err(msg[]),
             }
         }
     }
@@ -260,8 +260,8 @@ impl LintStore {
                 let warning = format!("lint {} has been renamed to {}",
                                       lint_name, new_name);
                 match span {
-                    Some(span) => sess.span_warn(span, warning.as_slice()),
-                    None => sess.warn(warning.as_slice()),
+                    Some(span) => sess.span_warn(span, warning[]),
+                    None => sess.warn(warning[]),
                 };
                 Some(lint_id)
             }
@@ -271,13 +271,13 @@ impl LintStore {
 
     pub fn process_command_line(&mut self, sess: &Session) {
         for &(ref lint_name, level) in sess.opts.lint_opts.iter() {
-            match self.find_lint(lint_name.as_slice(), sess, None) {
+            match self.find_lint(lint_name[], sess, None) {
                 Some(lint_id) => self.set_level(lint_id, (level, CommandLine)),
                 None => {
                     match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone()))
                                                  .collect::<FnvHashMap<&'static str,
                                                                        Vec<LintId>>>()
-                                                 .get(lint_name.as_slice()) {
+                                                 .get(lint_name[]) {
                         Some(v) => {
                             v.iter()
                              .map(|lint_id: &LintId|
@@ -285,7 +285,7 @@ impl LintStore {
                              .collect::<Vec<()>>();
                         }
                         None => sess.err(format!("unknown {} flag: {}",
-                                                 level.as_str(), lint_name).as_slice()),
+                                                 level.as_str(), lint_name)[]),
                     }
                 }
             }
@@ -396,10 +396,10 @@ pub fn raw_emit_lint(sess: &Session, lint: &'static Lint,
     if level == Forbid { level = Deny; }
 
     match (level, span) {
-        (Warn, Some(sp)) => sess.span_warn(sp, msg.as_slice()),
-        (Warn, None)     => sess.warn(msg.as_slice()),
-        (Deny, Some(sp)) => sess.span_err(sp, msg.as_slice()),
-        (Deny, None)     => sess.err(msg.as_slice()),
+        (Warn, Some(sp)) => sess.span_warn(sp, msg[]),
+        (Warn, None)     => sess.warn(msg[]),
+        (Deny, Some(sp)) => sess.span_err(sp, msg[]),
+        (Deny, None)     => sess.err(msg[]),
         _ => sess.bug("impossible level in raw_emit_lint"),
     }
 
@@ -492,7 +492,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
                                 None => {
                                     self.span_lint(builtin::UNKNOWN_LINTS, span,
                                                format!("unknown `{}` attribute: `{}`",
-                                                       level.as_str(), lint_name).as_slice());
+                                                       level.as_str(), lint_name)[]);
                                     continue;
                                 }
                             }
@@ -508,7 +508,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
                     self.tcx.sess.span_err(span,
                                            format!("{}({}) overruled by outer forbid({})",
                                                    level.as_str(), lint_name,
-                                                   lint_name).as_slice());
+                                                   lint_name)[]);
                 } else if now != level {
                     let src = self.lints.get_level_source(lint_id).1;
                     self.level_stack.push((lint_id, (now, src)));
@@ -543,7 +543,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
 
 impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
     fn visit_item(&mut self, it: &ast::Item) {
-        self.with_lint_attrs(it.attrs.as_slice(), |cx| {
+        self.with_lint_attrs(it.attrs[], |cx| {
             run_lints!(cx, check_item, it);
             cx.visit_ids(|v| v.visit_item(it));
             visit::walk_item(cx, it);
@@ -551,14 +551,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
     }
 
     fn visit_foreign_item(&mut self, it: &ast::ForeignItem) {
-        self.with_lint_attrs(it.attrs.as_slice(), |cx| {
+        self.with_lint_attrs(it.attrs[], |cx| {
             run_lints!(cx, check_foreign_item, it);
             visit::walk_foreign_item(cx, it);
         })
     }
 
     fn visit_view_item(&mut self, i: &ast::ViewItem) {
-        self.with_lint_attrs(i.attrs.as_slice(), |cx| {
+        self.with_lint_attrs(i.attrs[], |cx| {
             run_lints!(cx, check_view_item, i);
             cx.visit_ids(|v| v.visit_view_item(i));
             visit::walk_view_item(cx, i);
@@ -584,7 +584,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
                 body: &'v ast::Block, span: Span, id: ast::NodeId) {
         match fk {
             visit::FkMethod(_, _, m) => {
-                self.with_lint_attrs(m.attrs.as_slice(), |cx| {
+                self.with_lint_attrs(m.attrs[], |cx| {
                     run_lints!(cx, check_fn, fk, decl, body, span, id);
                     cx.visit_ids(|v| {
                         v.visit_fn(fk, decl, body, span, id);
@@ -600,7 +600,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
     }
 
     fn visit_ty_method(&mut self, t: &ast::TypeMethod) {
-        self.with_lint_attrs(t.attrs.as_slice(), |cx| {
+        self.with_lint_attrs(t.attrs[], |cx| {
             run_lints!(cx, check_ty_method, t);
             visit::walk_ty_method(cx, t);
         })
@@ -617,14 +617,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
     }
 
     fn visit_struct_field(&mut self, s: &ast::StructField) {
-        self.with_lint_attrs(s.node.attrs.as_slice(), |cx| {
+        self.with_lint_attrs(s.node.attrs[], |cx| {
             run_lints!(cx, check_struct_field, s);
             visit::walk_struct_field(cx, s);
         })
     }
 
     fn visit_variant(&mut self, v: &ast::Variant, g: &ast::Generics) {
-        self.with_lint_attrs(v.node.attrs.as_slice(), |cx| {
+        self.with_lint_attrs(v.node.attrs[], |cx| {
             run_lints!(cx, check_variant, v, g);
             visit::walk_variant(cx, v, g);
             run_lints!(cx, check_variant_post, v, g);
@@ -718,7 +718,7 @@ impl<'a, 'tcx> IdVisitingOperation for Context<'a, 'tcx> {
             None => {}
             Some(lints) => {
                 for (lint_id, span, msg) in lints.into_iter() {
-                    self.span_lint(lint_id.lint, span, msg.as_slice())
+                    self.span_lint(lint_id.lint, span, msg[])
                 }
             }
         }
@@ -764,7 +764,7 @@ pub fn check_crate(tcx: &ty::ctxt,
     let mut cx = Context::new(tcx, krate, exported_items);
 
     // Visit the whole crate.
-    cx.with_lint_attrs(krate.attrs.as_slice(), |cx| {
+    cx.with_lint_attrs(krate.attrs[], |cx| {
         cx.visit_id(ast::CRATE_NODE_ID);
         cx.visit_ids(|v| {
             v.visited_outermost = true;
@@ -784,7 +784,7 @@ pub fn check_crate(tcx: &ty::ctxt,
         for &(lint, span, ref msg) in v.iter() {
             tcx.sess.span_bug(span,
                               format!("unprocessed lint {} at {}: {}",
-                                      lint.as_str(), tcx.map.node_to_string(*id), *msg).as_slice())
+                                      lint.as_str(), tcx.map.node_to_string(*id), *msg)[])
         }
     }
 
diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs
index 323b084afdc..98b57511957 100644
--- a/src/librustc/metadata/creader.rs
+++ b/src/librustc/metadata/creader.rs
@@ -95,11 +95,11 @@ fn warn_if_multiple_versions(diag: &SpanHandler, cstore: &CStore) {
     for (name, dupes) in map.into_iter() {
         if dupes.len() == 1 { continue }
         diag.handler().warn(
-            format!("using multiple versions of crate `{}`", name).as_slice());
+            format!("using multiple versions of crate `{}`", name)[]);
         for dupe in dupes.into_iter() {
             let data = cstore.get_crate_data(dupe);
             diag.span_note(data.span, "used here");
-            loader::note_crate_name(diag, data.name().as_slice());
+            loader::note_crate_name(diag, data.name()[]);
         }
     }
 }
@@ -117,7 +117,7 @@ fn should_link(i: &ast::ViewItem) -> bool {
     i.attrs.iter().all(|attr| {
         attr.name().get() != "phase" ||
             attr.meta_item_list().map_or(false, |phases| {
-                attr::contains_name(phases.as_slice(), "link")
+                attr::contains_name(phases[], "link")
             })
     })
 }
@@ -131,8 +131,8 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
         Some(info) => {
             let (cnum, _, _) = resolve_crate(e,
                                              &None,
-                                             info.ident.as_slice(),
-                                             info.name.as_slice(),
+                                             info.ident[],
+                                             info.name[],
                                              None,
                                              i.span);
             e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
@@ -157,7 +157,7 @@ fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
             let name = match *path_opt {
                 Some((ref path_str, _)) => {
                     let name = path_str.get().to_string();
-                    validate_crate_name(Some(e.sess), name.as_slice(),
+                    validate_crate_name(Some(e.sess), name[],
                                         Some(i.span));
                     name
                 }
@@ -188,7 +188,7 @@ pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option<Span>) {
     for c in s.chars() {
         if c.is_alphanumeric() { continue }
         if c == '_' || c == '-' { continue }
-        err(format!("invalid character `{}` in crate name: `{}`", c, s).as_slice());
+        err(format!("invalid character `{}` in crate name: `{}`", c, s)[]);
     }
     match sess {
         Some(sess) => sess.abort_if_errors(),
@@ -246,7 +246,7 @@ fn visit_item(e: &Env, i: &ast::Item) {
                                 } else {
                                     e.sess.span_err(m.span,
                                         format!("unknown kind: `{}`",
-                                                k).as_slice());
+                                                k)[]);
                                     cstore::NativeUnknown
                                 }
                             }
@@ -327,7 +327,7 @@ fn existing_match(e: &Env, name: &str,
         match e.sess.opts.externs.get(name) {
             Some(locs) => {
                 let found = locs.iter().any(|l| {
-                    let l = fs::realpath(&Path::new(l.as_slice())).ok();
+                    let l = fs::realpath(&Path::new(l[])).ok();
                     l == source.dylib || l == source.rlib
                 });
                 if found {
@@ -405,7 +405,7 @@ fn resolve_crate<'a>(e: &mut Env,
                 crate_name: name,
                 hash: hash.map(|a| &*a),
                 filesearch: e.sess.target_filesearch(),
-                triple: e.sess.opts.target_triple.as_slice(),
+                triple: e.sess.opts.target_triple[],
                 root: root,
                 rejected_via_hash: vec!(),
                 rejected_via_triple: vec!(),
@@ -431,8 +431,8 @@ fn resolve_crate_deps(e: &mut Env,
     decoder::get_crate_deps(cdata).iter().map(|dep| {
         debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
         let (local_cnum, _, _) = resolve_crate(e, root,
-                                               dep.name.as_slice(),
-                                               dep.name.as_slice(),
+                                               dep.name[],
+                                               dep.name[],
                                                Some(&dep.hash),
                                                span);
         (dep.cnum, local_cnum)
@@ -455,14 +455,14 @@ impl<'a> PluginMetadataReader<'a> {
 
     pub fn read_plugin_metadata(&mut self, krate: &ast::ViewItem) -> PluginMetadata {
         let info = extract_crate_info(&self.env, krate).unwrap();
-        let target_triple = self.env.sess.opts.target_triple.as_slice();
+        let target_triple = self.env.sess.opts.target_triple[];
         let is_cross = target_triple != config::host_triple();
         let mut should_link = info.should_link && !is_cross;
         let mut load_ctxt = loader::Context {
             sess: self.env.sess,
             span: krate.span,
-            ident: info.ident.as_slice(),
-            crate_name: info.name.as_slice(),
+            ident: info.ident[],
+            crate_name: info.name[],
             hash: None,
             filesearch: self.env.sess.host_filesearch(),
             triple: config::host_triple(),
@@ -483,7 +483,7 @@ impl<'a> PluginMetadataReader<'a> {
                     let message = format!("crate `{}` contains a plugin_registrar fn but \
                                   only a version for triple `{}` could be found (need {})",
                                   info.ident, target_triple, config::host_triple());
-                    self.env.sess.span_err(krate.span, message.as_slice());
+                    self.env.sess.span_err(krate.span, message[]);
                     // need to abort now because the syntax expansion
                     // code will shortly attempt to load and execute
                     // code from the found library.
@@ -502,7 +502,7 @@ impl<'a> PluginMetadataReader<'a> {
             let message = format!("plugin crate `{}` only found in rlib format, \
                                    but must be available in dylib format",
                                   info.ident);
-            self.env.sess.span_err(krate.span, message.as_slice());
+            self.env.sess.span_err(krate.span, message[]);
             // No need to abort because the loading code will just ignore this
             // empty dylib.
         }
@@ -511,11 +511,11 @@ impl<'a> PluginMetadataReader<'a> {
             macros: macros,
             registrar_symbol: registrar,
         };
-        if should_link && existing_match(&self.env, info.name.as_slice(),
+        if should_link && existing_match(&self.env, info.name[],
                                          None).is_none() {
             // register crate now to avoid double-reading metadata
-            register_crate(&mut self.env, &None, info.ident.as_slice(),
-                           info.name.as_slice(), krate.span, library);
+            register_crate(&mut self.env, &None, info.ident[],
+                           info.name[], krate.span, library);
         }
         pc
     }
diff --git a/src/librustc/metadata/csearch.rs b/src/librustc/metadata/csearch.rs
index b702f4925d8..13342bf82cf 100644
--- a/src/librustc/metadata/csearch.rs
+++ b/src/librustc/metadata/csearch.rs
@@ -95,7 +95,7 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec<ast_map::PathElem>
 
     // FIXME #1920: This path is not always correct if the crate is not linked
     // into the root namespace.
-    let mut r = vec![ast_map::PathMod(token::intern(cdata.name.as_slice()))];
+    let mut r = vec![ast_map::PathMod(token::intern(cdata.name[]))];
     r.push_all(path.as_slice());
     r
 }
diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs
index d8168814c6c..b869501237c 100644
--- a/src/librustc/metadata/decoder.rs
+++ b/src/librustc/metadata/decoder.rs
@@ -221,7 +221,7 @@ fn each_reexport<F>(d: rbml::Doc, f: F) -> bool where
 fn variant_disr_val(d: rbml::Doc) -> Option<ty::Disr> {
     reader::maybe_get_doc(d, tag_disr_val).and_then(|val_doc| {
         reader::with_doc_data(val_doc, |data| {
-            str::from_utf8(data).and_then(from_str)
+            str::from_utf8(data).ok().and_then(|s| s.parse())
         })
     })
 }
@@ -1160,7 +1160,7 @@ pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
     }
     reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| {
         let name = docstr(depdoc, tag_crate_dep_crate_name);
-        let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash).as_slice());
+        let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash)[]);
         deps.push(CrateDep {
             cnum: crate_num,
             name: name,
@@ -1345,7 +1345,7 @@ pub fn get_dylib_dependency_formats(cdata: Cmd)
         if spec.len() == 0 { continue }
         let cnum = spec.split(':').nth(0).unwrap();
         let link = spec.split(':').nth(1).unwrap();
-        let cnum = from_str(cnum).unwrap();
+        let cnum = cnum.parse().unwrap();
         let cnum = match cdata.cnum_map.get(&cnum) {
             Some(&n) => n,
             None => panic!("didn't find a crate in the cnum_map")
diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs
index e5dae926db9..6782b3a7481 100644
--- a/src/librustc/metadata/encoder.rs
+++ b/src/librustc/metadata/encoder.rs
@@ -95,7 +95,7 @@ fn encode_impl_type_basename(rbml_w: &mut Encoder, name: ast::Ident) {
 }
 
 pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) {
-    rbml_w.wr_tagged_str(tag_def_id, def_to_string(id).as_slice());
+    rbml_w.wr_tagged_str(tag_def_id, def_to_string(id)[]);
 }
 
 #[deriving(Clone)]
@@ -154,7 +154,7 @@ fn encode_variant_id(rbml_w: &mut Encoder, vid: DefId) {
     rbml_w.end_tag();
 
     rbml_w.start_tag(tag_mod_child);
-    rbml_w.wr_str(s.as_slice());
+    rbml_w.wr_str(s[]);
     rbml_w.end_tag();
 }
 
@@ -264,7 +264,7 @@ fn encode_symbol(ecx: &EncodeContext,
         }
         None => {
             ecx.diag.handler().bug(
-                format!("encode_symbol: id not found {}", id).as_slice());
+                format!("encode_symbol: id not found {}", id)[]);
         }
     }
     rbml_w.end_tag();
@@ -332,8 +332,8 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
         encode_name(rbml_w, variant.node.name.name);
         encode_parent_item(rbml_w, local_def(id));
         encode_visibility(rbml_w, variant.node.vis);
-        encode_attributes(rbml_w, variant.node.attrs.as_slice());
-        encode_repr_attrs(rbml_w, ecx, variant.node.attrs.as_slice());
+        encode_attributes(rbml_w, variant.node.attrs[]);
+        encode_repr_attrs(rbml_w, ecx, variant.node.attrs[]);
 
         let stab = stability::lookup(ecx.tcx, ast_util::local_def(variant.node.id));
         encode_stability(rbml_w, stab);
@@ -344,9 +344,9 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
                 let fields = ty::lookup_struct_fields(ecx.tcx, def_id);
                 let idx = encode_info_for_struct(ecx,
                                                  rbml_w,
-                                                 fields.as_slice(),
+                                                 fields[],
                                                  index);
-                encode_struct_fields(rbml_w, fields.as_slice(), def_id);
+                encode_struct_fields(rbml_w, fields[], def_id);
                 encode_index(rbml_w, idx, write_i64);
             }
         }
@@ -386,12 +386,12 @@ fn encode_reexported_static_method(rbml_w: &mut Encoder,
             exp.name, token::get_name(method_name));
     rbml_w.start_tag(tag_items_data_item_reexport);
     rbml_w.start_tag(tag_items_data_item_reexport_def_id);
-    rbml_w.wr_str(def_to_string(method_def_id).as_slice());
+    rbml_w.wr_str(def_to_string(method_def_id)[]);
     rbml_w.end_tag();
     rbml_w.start_tag(tag_items_data_item_reexport_name);
     rbml_w.wr_str(format!("{}::{}",
                           exp.name,
-                          token::get_name(method_name)).as_slice());
+                          token::get_name(method_name))[]);
     rbml_w.end_tag();
     rbml_w.end_tag();
 }
@@ -529,7 +529,7 @@ fn encode_reexports(ecx: &EncodeContext,
                        id);
                 rbml_w.start_tag(tag_items_data_item_reexport);
                 rbml_w.start_tag(tag_items_data_item_reexport_def_id);
-                rbml_w.wr_str(def_to_string(exp.def_id).as_slice());
+                rbml_w.wr_str(def_to_string(exp.def_id)[]);
                 rbml_w.end_tag();
                 rbml_w.start_tag(tag_items_data_item_reexport_name);
                 rbml_w.wr_str(exp.name.as_str());
@@ -562,13 +562,13 @@ fn encode_info_for_mod(ecx: &EncodeContext,
     // Encode info about all the module children.
     for item in md.items.iter() {
         rbml_w.start_tag(tag_mod_child);
-        rbml_w.wr_str(def_to_string(local_def(item.id)).as_slice());
+        rbml_w.wr_str(def_to_string(local_def(item.id))[]);
         rbml_w.end_tag();
 
         each_auxiliary_node_id(&**item, |auxiliary_node_id| {
             rbml_w.start_tag(tag_mod_child);
             rbml_w.wr_str(def_to_string(local_def(
-                        auxiliary_node_id)).as_slice());
+                        auxiliary_node_id))[]);
             rbml_w.end_tag();
             true
         });
@@ -580,7 +580,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
                    did, ecx.tcx.map.node_to_string(did));
 
             rbml_w.start_tag(tag_mod_impl);
-            rbml_w.wr_str(def_to_string(local_def(did)).as_slice());
+            rbml_w.wr_str(def_to_string(local_def(did))[]);
             rbml_w.end_tag();
         }
     }
@@ -615,7 +615,7 @@ fn encode_visibility(rbml_w: &mut Encoder, visibility: ast::Visibility) {
         ast::Public => 'y',
         ast::Inherited => 'i',
     };
-    rbml_w.wr_str(ch.to_string().as_slice());
+    rbml_w.wr_str(ch.to_string()[]);
     rbml_w.end_tag();
 }
 
@@ -627,7 +627,7 @@ fn encode_unboxed_closure_kind(rbml_w: &mut Encoder,
         ty::FnMutUnboxedClosureKind => 'm',
         ty::FnOnceUnboxedClosureKind => 'o',
     };
-    rbml_w.wr_str(ch.to_string().as_slice());
+    rbml_w.wr_str(ch.to_string()[]);
     rbml_w.end_tag();
 }
 
@@ -788,7 +788,7 @@ fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder,
         rbml_w.end_tag();
 
         rbml_w.wr_tagged_str(tag_region_param_def_def_id,
-                             def_to_string(param.def_id).as_slice());
+                             def_to_string(param.def_id)[]);
 
         rbml_w.wr_tagged_u64(tag_region_param_def_space,
                              param.space.to_uint() as u64);
@@ -864,9 +864,9 @@ fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
     encode_path(rbml_w, impl_path.chain(Some(elem).into_iter()));
     match ast_item_opt {
         Some(&ast::MethodImplItem(ref ast_method)) => {
-            encode_attributes(rbml_w, ast_method.attrs.as_slice());
+            encode_attributes(rbml_w, ast_method.attrs[]);
             let any_types = !pty.generics.types.is_empty();
-            if any_types || is_default_impl || should_inline(ast_method.attrs.as_slice()) {
+            if any_types || is_default_impl || should_inline(ast_method.attrs[]) {
                 encode_inlined_item(ecx, rbml_w, IIImplItemRef(local_def(parent_id),
                                                                ast_item_opt.unwrap()));
             }
@@ -909,7 +909,7 @@ fn encode_info_for_associated_type(ecx: &EncodeContext,
     match typedef_opt {
         None => {}
         Some(typedef) => {
-            encode_attributes(rbml_w, typedef.attrs.as_slice());
+            encode_attributes(rbml_w, typedef.attrs[]);
             encode_type(ecx, rbml_w, ty::node_id_to_type(ecx.tcx,
                                                          typedef.id));
         }
@@ -1043,7 +1043,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
         encode_path(rbml_w, path);
         encode_visibility(rbml_w, vis);
         encode_stability(rbml_w, stab);
-        encode_attributes(rbml_w, item.attrs.as_slice());
+        encode_attributes(rbml_w, item.attrs[]);
         rbml_w.end_tag();
       }
       ast::ItemConst(_, _) => {
@@ -1069,8 +1069,8 @@ fn encode_info_for_item(ecx: &EncodeContext,
         encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id));
         encode_name(rbml_w, item.ident.name);
         encode_path(rbml_w, path);
-        encode_attributes(rbml_w, item.attrs.as_slice());
-        if tps_len > 0u || should_inline(item.attrs.as_slice()) {
+        encode_attributes(rbml_w, item.attrs[]);
+        if tps_len > 0u || should_inline(item.attrs[]) {
             encode_inlined_item(ecx, rbml_w, IIItemRef(item));
         }
         if tps_len == 0 {
@@ -1086,7 +1086,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
         encode_info_for_mod(ecx,
                             rbml_w,
                             m,
-                            item.attrs.as_slice(),
+                            item.attrs[],
                             item.id,
                             path,
                             item.ident,
@@ -1103,7 +1103,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
         // Encode all the items in this module.
         for foreign_item in fm.items.iter() {
             rbml_w.start_tag(tag_mod_child);
-            rbml_w.wr_str(def_to_string(local_def(foreign_item.id)).as_slice());
+            rbml_w.wr_str(def_to_string(local_def(foreign_item.id))[]);
             rbml_w.end_tag();
         }
         encode_visibility(rbml_w, vis);
@@ -1131,8 +1131,8 @@ fn encode_info_for_item(ecx: &EncodeContext,
         encode_item_variances(rbml_w, ecx, item.id);
         encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id));
         encode_name(rbml_w, item.ident.name);
-        encode_attributes(rbml_w, item.attrs.as_slice());
-        encode_repr_attrs(rbml_w, ecx, item.attrs.as_slice());
+        encode_attributes(rbml_w, item.attrs[]);
+        encode_repr_attrs(rbml_w, ecx, item.attrs[]);
         for v in (*enum_definition).variants.iter() {
             encode_variant_id(rbml_w, local_def(v.node.id));
         }
@@ -1149,7 +1149,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
         encode_enum_variant_info(ecx,
                                  rbml_w,
                                  item.id,
-                                 (*enum_definition).variants.as_slice(),
+                                 (*enum_definition).variants[],
                                  index);
       }
       ast::ItemStruct(ref struct_def, _) => {
@@ -1161,7 +1161,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
            class itself */
         let idx = encode_info_for_struct(ecx,
                                          rbml_w,
-                                         fields.as_slice(),
+                                         fields[],
                                          index);
 
         /* Index the class*/
@@ -1175,16 +1175,16 @@ fn encode_info_for_item(ecx: &EncodeContext,
 
         encode_item_variances(rbml_w, ecx, item.id);
         encode_name(rbml_w, item.ident.name);
-        encode_attributes(rbml_w, item.attrs.as_slice());
+        encode_attributes(rbml_w, item.attrs[]);
         encode_path(rbml_w, path.clone());
         encode_stability(rbml_w, stab);
         encode_visibility(rbml_w, vis);
-        encode_repr_attrs(rbml_w, ecx, item.attrs.as_slice());
+        encode_repr_attrs(rbml_w, ecx, item.attrs[]);
 
         /* Encode def_ids for each field and method
          for methods, write all the stuff get_trait_method
         needs to know*/
-        encode_struct_fields(rbml_w, fields.as_slice(), def_id);
+        encode_struct_fields(rbml_w, fields[], def_id);
 
         encode_inlined_item(ecx, rbml_w, IIItemRef(item));
 
@@ -1216,7 +1216,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
         encode_family(rbml_w, 'i');
         encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id));
         encode_name(rbml_w, item.ident.name);
-        encode_attributes(rbml_w, item.attrs.as_slice());
+        encode_attributes(rbml_w, item.attrs[]);
         encode_unsafety(rbml_w, unsafety);
         match ty.node {
             ast::TyPath(ref path, _) if path.segments
@@ -1319,7 +1319,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
         encode_generics(rbml_w, ecx, &trait_def.generics, tag_item_generics);
         encode_trait_ref(rbml_w, ecx, &*trait_def.trait_ref, tag_item_trait_ref);
         encode_name(rbml_w, item.ident.name);
-        encode_attributes(rbml_w, item.attrs.as_slice());
+        encode_attributes(rbml_w, item.attrs[]);
         encode_visibility(rbml_w, vis);
         encode_stability(rbml_w, stab);
         for &method_def_id in ty::trait_item_def_ids(tcx, def_id).iter() {
@@ -1337,7 +1337,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
             rbml_w.end_tag();
 
             rbml_w.start_tag(tag_mod_child);
-            rbml_w.wr_str(def_to_string(method_def_id.def_id()).as_slice());
+            rbml_w.wr_str(def_to_string(method_def_id.def_id())[]);
             rbml_w.end_tag();
         }
         encode_path(rbml_w, path.clone());
@@ -1422,14 +1422,14 @@ fn encode_info_for_item(ecx: &EncodeContext,
             };
             match trait_item {
                 &ast::RequiredMethod(ref m) => {
-                    encode_attributes(rbml_w, m.attrs.as_slice());
+                    encode_attributes(rbml_w, m.attrs[]);
                     encode_trait_item(rbml_w);
                     encode_item_sort(rbml_w, 'r');
                     encode_method_argument_names(rbml_w, &*m.decl);
                 }
 
                 &ast::ProvidedMethod(ref m) => {
-                    encode_attributes(rbml_w, m.attrs.as_slice());
+                    encode_attributes(rbml_w, m.attrs[]);
                     encode_trait_item(rbml_w);
                     encode_item_sort(rbml_w, 'p');
                     encode_inlined_item(ecx, rbml_w, IITraitItemRef(def_id, trait_item));
@@ -1438,7 +1438,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
 
                 &ast::TypeTraitItem(ref associated_type) => {
                     encode_attributes(rbml_w,
-                                      associated_type.attrs.as_slice());
+                                      associated_type.attrs[]);
                     encode_item_sort(rbml_w, 't');
                 }
             }
@@ -1802,7 +1802,7 @@ fn encode_macro_def(ecx: &EncodeContext,
     let def = ecx.tcx.sess.codemap().span_to_snippet(*span)
         .expect("Unable to find source for macro");
     rbml_w.start_tag(tag_macro_def);
-    rbml_w.wr_str(def.as_slice());
+    rbml_w.wr_str(def[]);
     rbml_w.end_tag();
 }
 
@@ -1849,7 +1849,7 @@ fn encode_struct_field_attrs(rbml_w: &mut Encoder, krate: &ast::Crate) {
         fn visit_struct_field(&mut self, field: &ast::StructField) {
             self.rbml_w.start_tag(tag_struct_field);
             self.rbml_w.wr_tagged_u32(tag_struct_field_id, field.node.id);
-            encode_attributes(self.rbml_w, field.node.attrs.as_slice());
+            encode_attributes(self.rbml_w, field.node.attrs[]);
             self.rbml_w.end_tag();
         }
     }
@@ -1921,13 +1921,13 @@ fn encode_misc_info(ecx: &EncodeContext,
     rbml_w.start_tag(tag_misc_info_crate_items);
     for item in krate.module.items.iter() {
         rbml_w.start_tag(tag_mod_child);
-        rbml_w.wr_str(def_to_string(local_def(item.id)).as_slice());
+        rbml_w.wr_str(def_to_string(local_def(item.id))[]);
         rbml_w.end_tag();
 
         each_auxiliary_node_id(&**item, |auxiliary_node_id| {
             rbml_w.start_tag(tag_mod_child);
             rbml_w.wr_str(def_to_string(local_def(
-                        auxiliary_node_id)).as_slice());
+                        auxiliary_node_id))[]);
             rbml_w.end_tag();
             true
         });
@@ -2096,17 +2096,17 @@ fn encode_metadata_inner(wr: &mut SeekableMemWriter,
 
     let mut rbml_w = writer::Encoder::new(wr);
 
-    encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name.as_slice());
+    encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name[]);
     encode_crate_triple(&mut rbml_w,
                         tcx.sess
                            .opts
                            .target_triple
-                           .as_slice());
+                           []);
     encode_hash(&mut rbml_w, &ecx.link_meta.crate_hash);
     encode_dylib_dependency_formats(&mut rbml_w, &ecx);
 
     let mut i = rbml_w.writer.tell().unwrap();
-    encode_attributes(&mut rbml_w, krate.attrs.as_slice());
+    encode_attributes(&mut rbml_w, krate.attrs[]);
     stats.attr_bytes = rbml_w.writer.tell().unwrap() - i;
 
     i = rbml_w.writer.tell().unwrap();
diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs
index bc34b0b45e9..5f554eb9c1e 100644
--- a/src/librustc/metadata/loader.rs
+++ b/src/librustc/metadata/loader.rs
@@ -316,14 +316,14 @@ impl<'a> Context<'a> {
             &Some(ref r) => format!("{} which `{}` depends on",
                                     message, r.ident)
         };
-        self.sess.span_err(self.span, message.as_slice());
+        self.sess.span_err(self.span, message[]);
 
         if self.rejected_via_triple.len() > 0 {
             let mismatches = self.rejected_via_triple.iter();
             for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
                 self.sess.fileline_note(self.span,
                     format!("crate `{}`, path #{}, triple {}: {}",
-                            self.ident, i+1, got, path.display()).as_slice());
+                            self.ident, i+1, got, path.display())[]);
             }
         }
         if self.rejected_via_hash.len() > 0 {
@@ -333,7 +333,7 @@ impl<'a> Context<'a> {
             for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() {
                 self.sess.fileline_note(self.span,
                     format!("crate `{}` path {}{}: {}",
-                            self.ident, "#", i+1, path.display()).as_slice());
+                            self.ident, "#", i+1, path.display())[]);
             }
             match self.root {
                 &None => {}
@@ -341,7 +341,7 @@ impl<'a> Context<'a> {
                     for (i, path) in r.paths().iter().enumerate() {
                         self.sess.fileline_note(self.span,
                             format!("crate `{}` path #{}: {}",
-                                    r.ident, i+1, path.display()).as_slice());
+                                    r.ident, i+1, path.display())[]);
                     }
                 }
             }
@@ -387,7 +387,7 @@ impl<'a> Context<'a> {
                 None => return FileDoesntMatch,
                 Some(file) => file,
             };
-            let (hash, rlib) = if file.starts_with(rlib_prefix.as_slice()) &&
+            let (hash, rlib) = if file.starts_with(rlib_prefix[]) &&
                     file.ends_with(".rlib") {
                 (file.slice(rlib_prefix.len(), file.len() - ".rlib".len()),
                  true)
@@ -448,26 +448,26 @@ impl<'a> Context<'a> {
             _ => {
                 self.sess.span_err(self.span,
                     format!("multiple matching crates for `{}`",
-                            self.crate_name).as_slice());
+                            self.crate_name)[]);
                 self.sess.note("candidates:");
                 for lib in libraries.iter() {
                     match lib.dylib {
                         Some(ref p) => {
                             self.sess.note(format!("path: {}",
-                                                   p.display()).as_slice());
+                                                   p.display())[]);
                         }
                         None => {}
                     }
                     match lib.rlib {
                         Some(ref p) => {
                             self.sess.note(format!("path: {}",
-                                                   p.display()).as_slice());
+                                                   p.display())[]);
                         }
                         None => {}
                     }
                     let data = lib.metadata.as_slice();
                     let name = decoder::get_crate_name(data);
-                    note_crate_name(self.sess.diagnostic(), name.as_slice());
+                    note_crate_name(self.sess.diagnostic(), name[]);
                 }
                 None
             }
@@ -521,11 +521,11 @@ impl<'a> Context<'a> {
                                    format!("multiple {} candidates for `{}` \
                                             found",
                                            flavor,
-                                           self.crate_name).as_slice());
+                                           self.crate_name)[]);
                 self.sess.span_note(self.span,
                                     format!(r"candidate #1: {}",
                                             ret.as_ref().unwrap()
-                                               .display()).as_slice());
+                                               .display())[]);
                 error = 1;
                 ret = None;
             }
@@ -533,7 +533,7 @@ impl<'a> Context<'a> {
                 error += 1;
                 self.sess.span_note(self.span,
                                     format!(r"candidate #{}: {}", error,
-                                            lib.display()).as_slice());
+                                            lib.display())[]);
                 continue
             }
             *slot = Some(metadata);
@@ -608,17 +608,17 @@ impl<'a> Context<'a> {
         let mut rlibs = HashSet::new();
         let mut dylibs = HashSet::new();
         {
-            let mut locs = locs.iter().map(|l| Path::new(l.as_slice())).filter(|loc| {
+            let mut locs = locs.iter().map(|l| Path::new(l[])).filter(|loc| {
                 if !loc.exists() {
                     sess.err(format!("extern location for {} does not exist: {}",
-                                     self.crate_name, loc.display()).as_slice());
+                                     self.crate_name, loc.display())[]);
                     return false;
                 }
                 let file = match loc.filename_str() {
                     Some(file) => file,
                     None => {
                         sess.err(format!("extern location for {} is not a file: {}",
-                                         self.crate_name, loc.display()).as_slice());
+                                         self.crate_name, loc.display())[]);
                         return false;
                     }
                 };
@@ -626,12 +626,12 @@ impl<'a> Context<'a> {
                     return true
                 } else {
                     let (ref prefix, ref suffix) = dylibname;
-                    if file.starts_with(prefix.as_slice()) && file.ends_with(suffix.as_slice()) {
+                    if file.starts_with(prefix[]) && file.ends_with(suffix[]) {
                         return true
                     }
                 }
                 sess.err(format!("extern location for {} is of an unknown type: {}",
-                                 self.crate_name, loc.display()).as_slice());
+                                 self.crate_name, loc.display())[]);
                 false
             });
 
@@ -664,7 +664,7 @@ impl<'a> Context<'a> {
 }
 
 pub fn note_crate_name(diag: &SpanHandler, name: &str) {
-    diag.handler().note(format!("crate name: {}", name).as_slice());
+    diag.handler().note(format!("crate name: {}", name)[]);
 }
 
 impl ArchiveMetadata {
diff --git a/src/librustc/metadata/tydecode.rs b/src/librustc/metadata/tydecode.rs
index 9d3a2c1d667..7683506f0f4 100644
--- a/src/librustc/metadata/tydecode.rs
+++ b/src/librustc/metadata/tydecode.rs
@@ -233,7 +233,7 @@ fn parse_trait_store(st: &mut PState, conv: conv_did) -> ty::TraitStore {
         '&' => ty::RegionTraitStore(parse_region(st, conv), parse_mutability(st)),
         c => {
             st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'",
-                                    c).as_slice())
+                                    c)[])
         }
     }
 }
@@ -287,7 +287,7 @@ fn parse_bound_region(st: &mut PState, conv: conv_did) -> ty::BoundRegion {
         }
         '[' => {
             let def = parse_def(st, RegionParameter, |x,y| conv(x,y));
-            let ident = token::str_to_ident(parse_str(st, ']').as_slice());
+            let ident = token::str_to_ident(parse_str(st, ']')[]);
             ty::BrNamed(def, ident.name)
         }
         'f' => {
@@ -318,7 +318,7 @@ fn parse_region(st: &mut PState, conv: conv_did) -> ty::Region {
         assert_eq!(next(st), '|');
         let index = parse_uint(st);
         assert_eq!(next(st), '|');
-        let nm = token::str_to_ident(parse_str(st, ']').as_slice());
+        let nm = token::str_to_ident(parse_str(st, ']')[]);
         ty::ReEarlyBound(node_id, space, index, nm.name)
       }
       'f' => {
@@ -560,7 +560,7 @@ fn parse_abi_set(st: &mut PState) -> abi::Abi {
     assert_eq!(next(st), '[');
     scan(st, |c| c == ']', |bytes| {
         let abi_str = str::from_utf8(bytes).unwrap();
-        abi::lookup(abi_str.as_slice()).expect(abi_str)
+        abi::lookup(abi_str[]).expect(abi_str)
     })
 }
 
@@ -639,12 +639,12 @@ pub fn parse_def_id(buf: &[u8]) -> ast::DefId {
     let crate_part = buf[0u..colon_idx];
     let def_part = buf[colon_idx + 1u..len];
 
-    let crate_num = match str::from_utf8(crate_part).and_then(from_str::<uint>) {
+    let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| s.parse::<uint>()) {
        Some(cn) => cn as ast::CrateNum,
        None => panic!("internal error: parse_def_id: crate number expected, found {}",
                      crate_part)
     };
-    let def_num = match str::from_utf8(def_part).and_then(from_str::<uint>) {
+    let def_num = match str::from_utf8(def_part).ok().and_then(|s| s.parse::<uint>()) {
        Some(dn) => dn as ast::NodeId,
        None => panic!("internal error: parse_def_id: id expected, found {}",
                      def_part)
diff --git a/src/librustc/middle/astconv_util.rs b/src/librustc/middle/astconv_util.rs
index 6b90bcd60e7..060e2f67faf 100644
--- a/src/librustc/middle/astconv_util.rs
+++ b/src/librustc/middle/astconv_util.rs
@@ -48,7 +48,7 @@ pub fn ast_ty_to_prim_ty<'tcx>(tcx: &ty::ctxt<'tcx>, ast_ty: &ast::Ty)
                 None => {
                     tcx.sess.span_bug(ast_ty.span,
                                       format!("unbound path {}",
-                                              path.repr(tcx)).as_slice())
+                                              path.repr(tcx))[])
                 }
                 Some(&d) => d
             };
diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs
index 69fbd59fd92..ce86d6805b2 100644
--- a/src/librustc/middle/astencode.rs
+++ b/src/librustc/middle/astencode.rs
@@ -132,7 +132,7 @@ pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata,
             // Do an Option dance to use the path after it is moved below.
             let s = ast_map::path_to_string(ast_map::Values(path.iter()));
             path_as_str = Some(s);
-            path_as_str.as_ref().map(|x| x.as_slice())
+            path_as_str.as_ref().map(|x| x[])
         });
         let mut ast_dsr = reader::Decoder::new(ast_doc);
         let from_id_range = Decodable::decode(&mut ast_dsr).unwrap();
@@ -1876,7 +1876,7 @@ fn decode_side_tables(dcx: &DecodeContext,
             None => {
                 dcx.tcx.sess.bug(
                     format!("unknown tag found in side tables: {:x}",
-                            tag).as_slice());
+                            tag)[]);
             }
             Some(value) => {
                 let val_doc = entry_doc.get(c::tag_table_val as uint);
@@ -1961,7 +1961,7 @@ fn decode_side_tables(dcx: &DecodeContext,
                     _ => {
                         dcx.tcx.sess.bug(
                             format!("unknown tag found in side tables: {:x}",
-                                    tag).as_slice());
+                                    tag)[]);
                     }
                 }
             }
diff --git a/src/librustc/middle/cfg/construct.rs b/src/librustc/middle/cfg/construct.rs
index 82bed254031..2d50757782d 100644
--- a/src/librustc/middle/cfg/construct.rs
+++ b/src/librustc/middle/cfg/construct.rs
@@ -362,7 +362,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
                 let mut cond_exit = discr_exit;
                 for arm in arms.iter() {
                     cond_exit = self.add_dummy_node(&[cond_exit]);        // 2
-                    let pats_exit = self.pats_any(arm.pats.as_slice(),
+                    let pats_exit = self.pats_any(arm.pats[],
                                                   cond_exit);            // 3
                     let guard_exit = self.opt_expr(&arm.guard,
                                                    pats_exit);           // 4
@@ -617,14 +617,14 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
                         self.tcx.sess.span_bug(
                             expr.span,
                             format!("no loop scope for id {}",
-                                    loop_id).as_slice());
+                                    loop_id)[]);
                     }
 
                     r => {
                         self.tcx.sess.span_bug(
                             expr.span,
                             format!("bad entry `{}` in def_map for label",
-                                    r).as_slice());
+                                    r)[]);
                     }
                 }
             }
diff --git a/src/librustc/middle/cfg/graphviz.rs b/src/librustc/middle/cfg/graphviz.rs
index e33f44967f1..13bd22a67c4 100644
--- a/src/librustc/middle/cfg/graphviz.rs
+++ b/src/librustc/middle/cfg/graphviz.rs
@@ -50,7 +50,7 @@ fn replace_newline_with_backslash_l(s: String) -> String {
 }
 
 impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> {
-    fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name.as_slice()).unwrap() }
+    fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name[]).unwrap() }
 
     fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> {
         dot::Id::new(format!("N{}", i.node_id())).unwrap()
@@ -83,8 +83,7 @@ impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> {
             let s = self.ast_map.node_to_string(node_id);
             // left-aligns the lines
             let s = replace_newline_with_backslash_l(s);
-            label.push_str(format!("exiting scope_{} {}", i,
-                                   s.as_slice()).as_slice());
+            label.push_str(format!("exiting scope_{} {}", i, s[])[]);
         }
         dot::EscStr(label.into_cow())
     }
diff --git a/src/librustc/middle/check_loop.rs b/src/librustc/middle/check_loop.rs
index cb454f94dc7..5a08d7c179d 100644
--- a/src/librustc/middle/check_loop.rs
+++ b/src/librustc/middle/check_loop.rs
@@ -74,13 +74,11 @@ impl<'a> CheckLoopVisitor<'a> {
             Loop => {}
             Closure => {
                 self.sess.span_err(span,
-                                   format!("`{}` inside of a closure",
-                                           name).as_slice());
+                                   format!("`{}` inside of a closure", name)[]);
             }
             Normal => {
                 self.sess.span_err(span,
-                                   format!("`{}` outside of loop",
-                                           name).as_slice());
+                                   format!("`{}` outside of loop", name)[]);
             }
         }
     }
diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs
index 9a94eb97931..da1bd09ceff 100644
--- a/src/librustc/middle/check_match.rs
+++ b/src/librustc/middle/check_match.rs
@@ -162,7 +162,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
                 // First, check legality of move bindings.
                 check_legality_of_move_bindings(cx,
                                                 arm.guard.is_some(),
-                                                arm.pats.as_slice());
+                                                arm.pats[]);
 
                 // Second, if there is a guard on each arm, make sure it isn't
                 // assigning or borrowing anything mutably.
@@ -199,7 +199,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
             }
 
             // Fourth, check for unreachable arms.
-            check_arms(cx, inlined_arms.as_slice(), source);
+            check_arms(cx, inlined_arms[], source);
 
             // Finally, check if the whole match expression is exhaustive.
             // Check for empty enum, because is_useful only works on inhabited types.
@@ -231,7 +231,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
                     pat.span,
                     format!("refutable pattern in `for` loop binding: \
                             `{}` not covered",
-                            pat_to_string(uncovered_pat)).as_slice());
+                            pat_to_string(uncovered_pat))[]);
             });
 
             // Check legality of move bindings.
@@ -304,7 +304,7 @@ fn check_arms(cx: &MatchCheckCtxt,
         for pat in pats.iter() {
             let v = vec![&**pat];
 
-            match is_useful(cx, &seen, v.as_slice(), LeaveOutWitness) {
+            match is_useful(cx, &seen, v[], LeaveOutWitness) {
                 NotUseful => {
                     match source {
                         ast::MatchSource::IfLetDesugar { .. } => {
@@ -356,7 +356,7 @@ fn raw_pat<'a>(p: &'a Pat) -> &'a Pat {
 fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix) {
     match is_useful(cx, matrix, &[DUMMY_WILD_PAT], ConstructWitness) {
         UsefulWithWitness(pats) => {
-            let witness = match pats.as_slice() {
+            let witness = match pats[] {
                 [ref witness] => &**witness,
                 [] => DUMMY_WILD_PAT,
                 _ => unreachable!()
@@ -610,7 +610,7 @@ fn is_useful(cx: &MatchCheckCtxt,
                         UsefulWithWitness(pats) => UsefulWithWitness({
                             let arity = constructor_arity(cx, &c, left_ty);
                             let mut result = {
-                                let pat_slice = pats.as_slice();
+                                let pat_slice = pats[];
                                 let subpats = Vec::from_fn(arity, |i| {
                                     pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p)
                                 });
@@ -657,10 +657,10 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix,
                          witness: WitnessPreference) -> Usefulness {
     let arity = constructor_arity(cx, &ctor, lty);
     let matrix = Matrix(m.iter().filter_map(|r| {
-        specialize(cx, r.as_slice(), &ctor, 0u, arity)
+        specialize(cx, r[], &ctor, 0u, arity)
     }).collect());
     match specialize(cx, v, &ctor, 0u, arity) {
-        Some(v) => is_useful(cx, &matrix, v.as_slice(), witness),
+        Some(v) => is_useful(cx, &matrix, v[], witness),
         None => NotUseful
     }
 }
@@ -1047,7 +1047,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
                             format!("binding pattern {} is not an \
                                      identifier: {}",
                                     p.id,
-                                    p.node).as_slice());
+                                    p.node)[]);
                     }
                 }
             }
diff --git a/src/librustc/middle/check_static.rs b/src/librustc/middle/check_static.rs
index 21e94d69366..6ff34d62500 100644
--- a/src/librustc/middle/check_static.rs
+++ b/src/librustc/middle/check_static.rs
@@ -112,7 +112,7 @@ impl<'a, 'tcx> CheckStaticVisitor<'a, 'tcx> {
         };
 
         self.tcx.sess.span_err(e.span, format!("mutable statics are not allowed \
-                                                to have {}", suffix).as_slice());
+                                                to have {}", suffix)[]);
     }
 
     fn check_static_type(&self, e: &ast::Expr) {
@@ -168,7 +168,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckStaticVisitor<'a, 'tcx> {
             ty::ty_enum(did, _) if ty::has_dtor(self.tcx, did) => {
                 self.tcx.sess.span_err(e.span,
                                        format!("{} are not allowed to have \
-                                                destructors", self.msg()).as_slice())
+                                                destructors", self.msg())[])
             }
             _ => {}
         }
@@ -232,7 +232,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckStaticVisitor<'a, 'tcx> {
                         let msg = "constants cannot refer to other statics, \
                                    insert an intermediate constant \
                                    instead";
-                        self.tcx.sess.span_err(e.span, msg.as_slice());
+                        self.tcx.sess.span_err(e.span, msg[]);
                     }
                     _ => {}
                 }
diff --git a/src/librustc/middle/check_static_recursion.rs b/src/librustc/middle/check_static_recursion.rs
index 90242a3252e..c36b4aa7f23 100644
--- a/src/librustc/middle/check_static_recursion.rs
+++ b/src/librustc/middle/check_static_recursion.rs
@@ -105,7 +105,7 @@ impl<'a, 'ast, 'v> Visitor<'v> for CheckItemRecursionVisitor<'a, 'ast> {
                           _ => {
                             self.sess.span_err(e.span,
                               format!("expected item, found {}",
-                                      self.ast_map.node_to_string(def_id.node)).as_slice());
+                                      self.ast_map.node_to_string(def_id.node))[]);
                             return;
                           },
                         }
diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs
index 62f1a30f8e7..5b89912dd03 100644
--- a/src/librustc/middle/const_eval.rs
+++ b/src/librustc/middle/const_eval.rs
@@ -117,7 +117,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt,
             None => None,
             Some(ast_map::NodeItem(it)) => match it.node {
                 ast::ItemEnum(ast::EnumDef { ref variants }, _) => {
-                    variant_expr(variants.as_slice(), variant_def.node)
+                    variant_expr(variants[], variant_def.node)
                 }
                 _ => None
             },
@@ -138,7 +138,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt,
                     // NOTE this doesn't do the right thing, it compares inlined
                     // NodeId's to the original variant_def's NodeId, but they
                     // come from different crates, so they will likely never match.
-                    variant_expr(variants.as_slice(), variant_def.node).map(|e| e.id)
+                    variant_expr(variants[], variant_def.node).map(|e| e.id)
                 }
                 _ => None
             },
@@ -364,7 +364,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr) -> P<ast::Pat> {
 pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val {
     match eval_const_expr_partial(tcx, e) {
         Ok(r) => r,
-        Err(s) => tcx.sess.span_fatal(e.span, s.as_slice())
+        Err(s) => tcx.sess.span_fatal(e.span, s[])
     }
 }
 
@@ -603,7 +603,7 @@ pub fn lit_to_const(lit: &ast::Lit) -> const_val {
         ast::LitInt(n, ast::UnsignedIntLit(_)) => const_uint(n),
         ast::LitFloat(ref n, _) |
         ast::LitFloatUnsuffixed(ref n) => {
-            const_float(from_str::<f64>(n.get()).unwrap() as f64)
+            const_float(n.get().parse::<f64>().unwrap() as f64)
         }
         ast::LitBool(b) => const_bool(b)
     }
diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs
index 17ebd1b94a7..a2d417ca345 100644
--- a/src/librustc/middle/dataflow.rs
+++ b/src/librustc/middle/dataflow.rs
@@ -311,7 +311,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
                 let mut t = on_entry.to_vec();
                 self.apply_gen_kill(cfgidx, t.as_mut_slice());
                 temp_bits = t;
-                temp_bits.as_slice()
+                temp_bits[]
             }
         };
         debug!("{} each_bit_for_node({}, cfgidx={}) bits={}",
@@ -420,7 +420,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
                 let bits = self.kills.slice_mut(start, end);
                 debug!("{} add_kills_from_flow_exits flow_exit={} bits={} [before]",
                        self.analysis_name, flow_exit, mut_bits_to_string(bits));
-                bits.clone_from_slice(orig_kills.as_slice());
+                bits.clone_from_slice(orig_kills[]);
                 debug!("{} add_kills_from_flow_exits flow_exit={} bits={} [after]",
                        self.analysis_name, flow_exit, mut_bits_to_string(bits));
             }
@@ -553,7 +553,7 @@ fn bits_to_string(words: &[uint]) -> String {
         let mut v = word;
         for _ in range(0u, uint::BYTES) {
             result.push(sep);
-            result.push_str(format!("{:02x}", v & 0xFF).as_slice());
+            result.push_str(format!("{:02x}", v & 0xFF)[]);
             v >>= 8;
             sep = '-';
         }
diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs
index 3cb628c2e65..6b56ece28bd 100644
--- a/src/librustc/middle/dependency_format.rs
+++ b/src/librustc/middle/dependency_format.rs
@@ -118,7 +118,7 @@ fn calculate_type(sess: &session::Session,
                 let src = sess.cstore.get_used_crate_source(cnum).unwrap();
                 if src.rlib.is_some() { return }
                 sess.err(format!("dependency `{}` not found in rlib format",
-                                 data.name).as_slice());
+                                 data.name)[]);
             });
             return Vec::new();
         }
@@ -197,7 +197,7 @@ fn calculate_type(sess: &session::Session,
                                  match kind {
                                      cstore::RequireStatic => "rlib",
                                      cstore::RequireDynamic => "dylib",
-                                 }).as_slice());
+                                 })[]);
             }
         }
     }
@@ -222,7 +222,7 @@ fn add_library(sess: &session::Session,
                 let data = sess.cstore.get_crate_data(cnum);
                 sess.err(format!("cannot satisfy dependencies so `{}` only \
                                   shows up once",
-                                 data.name).as_slice());
+                                 data.name)[]);
                 sess.help("having upstream crates all available in one format \
                            will likely make this go away");
             }
diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs
index abc3c8d0d8f..4ee0064b0e6 100644
--- a/src/librustc/middle/expr_use_visitor.rs
+++ b/src/librustc/middle/expr_use_visitor.rs
@@ -678,7 +678,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
                         self.tcx().sess.span_bug(
                             callee.span,
                             format!("unexpected callee type {}",
-                                    callee_ty.repr(self.tcx())).as_slice())
+                                    callee_ty.repr(self.tcx()))[])
                     }
                 };
                 match overloaded_call_type {
@@ -869,7 +869,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
                         ty::ty_rptr(r, ref m) => (m.mutbl, r),
                         _ => self.tcx().sess.span_bug(expr.span,
                                 format!("bad overloaded deref type {}",
-                                    method_ty.repr(self.tcx())).as_slice())
+                                    method_ty.repr(self.tcx()))[])
                     };
                     let bk = ty::BorrowKind::from_mutbl(m);
                     self.delegate.borrow(expr.id, expr.span, cmt,
@@ -1186,7 +1186,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
                             // pattern.
 
                             let msg = format!("Pattern has unexpected type: {}", def);
-                            tcx.sess.span_bug(pat.span, msg.as_slice())
+                            tcx.sess.span_bug(pat.span, msg[])
                         }
 
                         Some(def) => {
@@ -1195,7 +1195,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
                             // should not resolve.
 
                             let msg = format!("Pattern has unexpected def: {}", def);
-                            tcx.sess.span_bug(pat.span, msg.as_slice())
+                            tcx.sess.span_bug(pat.span, msg[])
                         }
                     }
                 }
diff --git a/src/librustc/middle/infer/combine.rs b/src/librustc/middle/infer/combine.rs
index 82ddbcee5a7..11ab44ba09f 100644
--- a/src/librustc/middle/infer/combine.rs
+++ b/src/librustc/middle/infer/combine.rs
@@ -141,7 +141,7 @@ pub trait Combine<'tcx> {
                             for _ in a_regions.iter() {
                                 invariance.push(ty::Invariant);
                             }
-                            invariance.as_slice()
+                            invariance[]
                         }
                     };
 
@@ -411,7 +411,7 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C,
             format!("{}: bot and var types should have been handled ({},{})",
                     this.tag(),
                     a.repr(this.infcx().tcx),
-                    b.repr(this.infcx().tcx)).as_slice());
+                    b.repr(this.infcx().tcx))[]);
       }
 
       (&ty::ty_err, _) | (_, &ty::ty_err) => {
diff --git a/src/librustc/middle/infer/error_reporting.rs b/src/librustc/middle/infer/error_reporting.rs
index b4c1c0b396b..0ea3d415ec5 100644
--- a/src/librustc/middle/infer/error_reporting.rs
+++ b/src/librustc/middle/infer/error_reporting.rs
@@ -199,9 +199,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
                                 ref trace_origins,
                                 ref same_regions) => {
                     if !same_regions.is_empty() {
-                        self.report_processed_errors(var_origins.as_slice(),
-                                                     trace_origins.as_slice(),
-                                                     same_regions.as_slice());
+                        self.report_processed_errors(var_origins[],
+                                                     trace_origins[],
+                                                     same_regions[]);
                     }
                 }
             }
@@ -374,7 +374,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
             format!("{}: {} ({})",
                  message_root_str,
                  expected_found_str,
-                 ty::type_err_to_str(self.tcx, terr)).as_slice());
+                 ty::type_err_to_str(self.tcx, terr))[]);
 
         match trace.origin {
             infer::MatchExpressionArm(_, arm_span) =>
@@ -438,13 +438,13 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
                     origin.span(),
                     format!(
                         "the parameter type `{}` may not live long enough",
-                        param_ty.user_string(self.tcx)).as_slice());
+                        param_ty.user_string(self.tcx))[]);
                 self.tcx.sess.span_help(
                     origin.span(),
                     format!(
                         "consider adding an explicit lifetime bound `{}: {}`...",
                         param_ty.user_string(self.tcx),
-                        sub.user_string(self.tcx)).as_slice());
+                        sub.user_string(self.tcx))[]);
             }
 
             ty::ReStatic => {
@@ -453,12 +453,12 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
                     origin.span(),
                     format!(
                         "the parameter type `{}` may not live long enough",
-                        param_ty.user_string(self.tcx)).as_slice());
+                        param_ty.user_string(self.tcx))[]);
                 self.tcx.sess.span_help(
                     origin.span(),
                     format!(
                         "consider adding an explicit lifetime bound `{}: 'static`...",
-                        param_ty.user_string(self.tcx)).as_slice());
+                        param_ty.user_string(self.tcx))[]);
             }
 
             _ => {
@@ -467,16 +467,16 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
                     origin.span(),
                     format!(
                         "the parameter type `{}` may not live long enough",
-                        param_ty.user_string(self.tcx)).as_slice());
+                        param_ty.user_string(self.tcx))[]);
                 self.tcx.sess.span_help(
                     origin.span(),
                     format!(
                         "consider adding an explicit lifetime bound to `{}`",
-                        param_ty.user_string(self.tcx)).as_slice());
+                        param_ty.user_string(self.tcx))[]);
                 note_and_explain_region(
                     self.tcx,
                     format!("the parameter type `{}` must be valid for ",
-                            param_ty.user_string(self.tcx)).as_slice(),
+                            param_ty.user_string(self.tcx))[],
                     sub,
                     "...");
             }
@@ -518,7 +518,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
                             ty::local_var_name_str(self.tcx,
                                                    upvar_id.var_id)
                                 .get()
-                                .to_string()).as_slice());
+                                .to_string())[]);
                 note_and_explain_region(
                     self.tcx,
                     "...the borrowed pointer is valid for ",
@@ -530,7 +530,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
                             ty::local_var_name_str(self.tcx,
                                                    upvar_id.var_id)
                                 .get()
-                                .to_string()).as_slice(),
+                                .to_string())[],
                     sup,
                     "");
             }
@@ -576,7 +576,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
                             outlive the enclosing closure",
                             ty::local_var_name_str(self.tcx,
                                                    id).get()
-                                                      .to_string()).as_slice());
+                                                      .to_string())[]);
                 note_and_explain_region(
                     self.tcx,
                     "captured variable is valid for ",
@@ -618,7 +618,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
                     span,
                     format!("the type `{}` does not fulfill the \
                              required lifetime",
-                            self.ty_to_string(ty)).as_slice());
+                            self.ty_to_string(ty))[]);
                 note_and_explain_region(self.tcx,
                                         "type must outlive ",
                                         sub,
@@ -644,7 +644,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
                     span,
                     format!("the type `{}` (provided as the value of \
                              a type parameter) is not valid at this point",
-                            self.ty_to_string(ty)).as_slice());
+                            self.ty_to_string(ty))[]);
                 note_and_explain_region(self.tcx,
                                         "type must outlive ",
                                         sub,
@@ -710,7 +710,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
                     span,
                     format!("type of expression contains references \
                              that are not valid during the expression: `{}`",
-                            self.ty_to_string(t)).as_slice());
+                            self.ty_to_string(t))[]);
                 note_and_explain_region(
                     self.tcx,
                     "type is only valid for ",
@@ -732,7 +732,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
                     span,
                     format!("in type `{}`, reference has a longer lifetime \
                              than the data it references",
-                            self.ty_to_string(ty)).as_slice());
+                            self.ty_to_string(ty))[]);
                 note_and_explain_region(
                     self.tcx,
                     "the pointer is valid for ",
@@ -857,7 +857,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
         let (fn_decl, generics, unsafety, ident, expl_self, span)
                                     = node_inner.expect("expect item fn");
         let taken = lifetimes_in_scope(self.tcx, scope_id);
-        let life_giver = LifeGiver::with_taken(taken.as_slice());
+        let life_giver = LifeGiver::with_taken(taken[]);
         let rebuilder = Rebuilder::new(self.tcx, fn_decl, expl_self,
                                        generics, same_regions, &life_giver);
         let (fn_decl, expl_self, generics) = rebuilder.rebuild();
@@ -933,7 +933,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
             }
             expl_self_opt = self.rebuild_expl_self(expl_self_opt, lifetime,
                                                    &anon_nums, &region_names);
-            inputs = self.rebuild_args_ty(inputs.as_slice(), lifetime,
+            inputs = self.rebuild_args_ty(inputs[], lifetime,
                                           &anon_nums, &region_names);
             output = self.rebuild_output(&output, lifetime, &anon_nums, &region_names);
             ty_params = self.rebuild_ty_params(ty_params, lifetime,
@@ -968,7 +968,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
                 names.push(lt_name);
             }
             names.sort();
-            let name = token::str_to_ident(names[0].as_slice()).name;
+            let name = token::str_to_ident(names[0][]).name;
             return (name_to_dummy_lifetime(name), Kept);
         }
         return (self.life_giver.give_lifetime(), Fresh);
@@ -1219,7 +1219,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
                                 .sess
                                 .fatal(format!(
                                         "unbound path {}",
-                                        pprust::path_to_string(path)).as_slice())
+                                        pprust::path_to_string(path))[])
                         }
                         Some(&d) => d
                     };
@@ -1417,7 +1417,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
                                               opt_explicit_self, generics);
         let msg = format!("consider using an explicit lifetime \
                            parameter as shown: {}", suggested_fn);
-        self.tcx.sess.span_help(span, msg.as_slice());
+        self.tcx.sess.span_help(span, msg[]);
     }
 
     fn report_inference_failure(&self,
@@ -1455,7 +1455,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
             var_origin.span(),
             format!("cannot infer an appropriate lifetime{} \
                     due to conflicting requirements",
-                    var_description).as_slice());
+                    var_description)[]);
     }
 
     fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) {
@@ -1500,7 +1500,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
                         self.tcx.sess.span_note(
                             trace.origin.span(),
                             format!("...so that {} ({})",
-                                    desc, values_str).as_slice());
+                                    desc, values_str)[]);
                     }
                     None => {
                         // Really should avoid printing this error at
@@ -1509,7 +1509,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
                         // doing right now. - nmatsakis
                         self.tcx.sess.span_note(
                             trace.origin.span(),
-                            format!("...so that {}", desc).as_slice());
+                            format!("...so that {}", desc)[]);
                     }
                 }
             }
@@ -1526,7 +1526,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
                         "...so that closure can access `{}`",
                         ty::local_var_name_str(self.tcx, upvar_id.var_id)
                             .get()
-                            .to_string()).as_slice())
+                            .to_string())[])
             }
             infer::InfStackClosure(span) => {
                 self.tcx.sess.span_note(
@@ -1551,7 +1551,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
                             does not outlive the enclosing closure",
                             ty::local_var_name_str(
                                 self.tcx,
-                                id).get().to_string()).as_slice());
+                                id).get().to_string())[]);
             }
             infer::IndexSlice(span) => {
                 self.tcx.sess.span_note(
@@ -1595,7 +1595,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
                     span,
                     format!("...so type `{}` of expression is valid during the \
                              expression",
-                            self.ty_to_string(t)).as_slice());
+                            self.ty_to_string(t))[]);
             }
             infer::BindingTypeIsNotValidAtDecl(span) => {
                 self.tcx.sess.span_note(
@@ -1607,14 +1607,14 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
                     span,
                     format!("...so that the reference type `{}` \
                              does not outlive the data it points at",
-                            self.ty_to_string(ty)).as_slice());
+                            self.ty_to_string(ty))[]);
             }
             infer::RelateParamBound(span, t) => {
                 self.tcx.sess.span_note(
                     span,
                     format!("...so that the type `{}` \
                              will meet the declared lifetime bounds",
-                            self.ty_to_string(t)).as_slice());
+                            self.ty_to_string(t))[]);
             }
             infer::RelateDefaultParamBound(span, t) => {
                 self.tcx.sess.span_note(
@@ -1622,13 +1622,13 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
                     format!("...so that type parameter \
                              instantiated with `{}`, \
                              will meet its declared lifetime bounds",
-                            self.ty_to_string(t)).as_slice());
+                            self.ty_to_string(t))[]);
             }
             infer::RelateRegionParamBound(span) => {
                 self.tcx.sess.span_note(
                     span,
                     format!("...so that the declared lifetime parameter bounds \
-                                are satisfied").as_slice());
+                                are satisfied")[]);
             }
         }
     }
@@ -1677,7 +1677,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt,
         Some(node) => match node {
             ast_map::NodeItem(item) => match item.node {
                 ast::ItemFn(_, _, _, ref gen, _) => {
-                    taken.push_all(gen.lifetimes.as_slice());
+                    taken.push_all(gen.lifetimes[]);
                     None
                 },
                 _ => None
@@ -1685,7 +1685,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt,
             ast_map::NodeImplItem(ii) => {
                 match *ii {
                     ast::MethodImplItem(ref m) => {
-                        taken.push_all(m.pe_generics().lifetimes.as_slice());
+                        taken.push_all(m.pe_generics().lifetimes[]);
                         Some(m.id)
                     }
                     ast::TypeImplItem(_) => None,
@@ -1744,10 +1744,10 @@ impl LifeGiver {
         let mut lifetime;
         loop {
             let mut s = String::from_str("'");
-            s.push_str(num_to_string(self.counter.get()).as_slice());
+            s.push_str(num_to_string(self.counter.get())[]);
             if !self.taken.contains(&s) {
                 lifetime = name_to_dummy_lifetime(
-                                    token::str_to_ident(s.as_slice()).name);
+                                    token::str_to_ident(s[]).name);
                 self.generated.borrow_mut().push(lifetime);
                 break;
             }
diff --git a/src/librustc/middle/infer/higher_ranked/mod.rs b/src/librustc/middle/infer/higher_ranked/mod.rs
index ab0f98ec74a..2a19f37e7d4 100644
--- a/src/librustc/middle/infer/higher_ranked/mod.rs
+++ b/src/librustc/middle/infer/higher_ranked/mod.rs
@@ -189,7 +189,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C
                 span,
                 format!("region {} is not associated with \
                          any bound region from A!",
-                        r0).as_slice())
+                        r0)[])
         }
     }
 
@@ -339,7 +339,7 @@ fn var_ids<'tcx, T: Combine<'tcx>>(combiner: &T,
             r => {
                 combiner.infcx().tcx.sess.span_bug(
                     combiner.trace().origin.span(),
-                    format!("found non-region-vid: {}", r).as_slice());
+                    format!("found non-region-vid: {}", r)[]);
             }
         }).collect()
 }
diff --git a/src/librustc/middle/infer/mod.rs b/src/librustc/middle/infer/mod.rs
index 25eadae5b92..6d031c86507 100644
--- a/src/librustc/middle/infer/mod.rs
+++ b/src/librustc/middle/infer/mod.rs
@@ -992,7 +992,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
 
                 self.tcx.sess.span_err(sp, format!("{}{}",
                     mk_msg(resolved_expected.map(|t| self.ty_to_string(t)), actual_ty),
-                    error_str).as_slice());
+                    error_str)[]);
 
                 for err in err.iter() {
                     ty::note_and_explain_type_err(self.tcx, *err)
diff --git a/src/librustc/middle/infer/region_inference/graphviz.rs b/src/librustc/middle/infer/region_inference/graphviz.rs
index 3e55f6fa896..0ca1a593ce7 100644
--- a/src/librustc/middle/infer/region_inference/graphviz.rs
+++ b/src/librustc/middle/infer/region_inference/graphviz.rs
@@ -60,7 +60,7 @@ pub fn maybe_print_constraints_for<'a, 'tcx>(region_vars: &RegionVarBindings<'a,
     }
 
     let requested_node : Option<ast::NodeId> =
-        os::getenv("RUST_REGION_GRAPH_NODE").and_then(|s|from_str(s.as_slice()));
+        os::getenv("RUST_REGION_GRAPH_NODE").and_then(|s| s.parse());
 
     if requested_node.is_some() && requested_node != Some(subject_node) {
         return;
diff --git a/src/librustc/middle/infer/region_inference/mod.rs b/src/librustc/middle/infer/region_inference/mod.rs
index bcaf39cc8db..661f7e56429 100644
--- a/src/librustc/middle/infer/region_inference/mod.rs
+++ b/src/librustc/middle/infer/region_inference/mod.rs
@@ -464,7 +464,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
                 origin.span(),
                 format!("cannot relate bound region: {} <= {}",
                         sub.repr(self.tcx),
-                        sup.repr(self.tcx)).as_slice());
+                        sup.repr(self.tcx))[]);
           }
           (_, ReStatic) => {
             // all regions are subregions of static, so we can ignore this
@@ -724,7 +724,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
             self.tcx.sess.bug(
                 format!("cannot relate bound region: LUB({}, {})",
                         a.repr(self.tcx),
-                        b.repr(self.tcx)).as_slice());
+                        b.repr(self.tcx))[]);
           }
 
           (ReStatic, _) | (_, ReStatic) => {
@@ -741,7 +741,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
                 format!("lub_concrete_regions invoked with \
                          non-concrete regions: {}, {}",
                         a,
-                        b).as_slice());
+                        b)[]);
           }
 
           (ReFree(ref fr), ReScope(s_id)) |
@@ -824,7 +824,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
               self.tcx.sess.bug(
                   format!("cannot relate bound region: GLB({}, {})",
                           a.repr(self.tcx),
-                          b.repr(self.tcx)).as_slice());
+                          b.repr(self.tcx))[]);
             }
 
             (ReStatic, r) | (r, ReStatic) => {
@@ -844,7 +844,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
                     format!("glb_concrete_regions invoked with \
                              non-concrete regions: {}, {}",
                             a,
-                            b).as_slice());
+                            b)[]);
             }
 
             (ReFree(ref fr), ReScope(s_id)) |
@@ -965,7 +965,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
         self.expansion(var_data.as_mut_slice());
         self.contraction(var_data.as_mut_slice());
         let values =
-            self.extract_values_and_collect_conflicts(var_data.as_slice(),
+            self.extract_values_and_collect_conflicts(var_data[],
                                                       errors);
         self.collect_concrete_region_errors(&values, errors);
         values
@@ -1403,7 +1403,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
                     for var {}, lower_bounds={}, upper_bounds={}",
                     node_idx,
                     lower_bounds.repr(self.tcx),
-                    upper_bounds.repr(self.tcx)).as_slice());
+                    upper_bounds.repr(self.tcx))[]);
     }
 
     fn collect_error_for_contracting_node(
@@ -1447,7 +1447,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
             format!("collect_error_for_contracting_node() could not find error \
                      for var {}, upper_bounds={}",
                     node_idx,
-                    upper_bounds.repr(self.tcx)).as_slice());
+                    upper_bounds.repr(self.tcx))[]);
     }
 
     fn collect_concrete_regions(&self,
diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs
index b76d798941e..798daf8d541 100644
--- a/src/librustc/middle/liveness.rs
+++ b/src/librustc/middle/liveness.rs
@@ -323,7 +323,7 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> {
             self.tcx
                 .sess
                 .span_bug(span, format!("no variable registered for id {}",
-                                        node_id).as_slice());
+                                        node_id)[]);
           }
         }
     }
@@ -594,7 +594,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
             self.ir.tcx.sess.span_bug(
                 span,
                 format!("no live node registered for node {}",
-                        node_id).as_slice());
+                        node_id)[]);
           }
         }
     }
@@ -1129,7 +1129,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
           // Uninteresting cases: just propagate in rev exec order
 
           ast::ExprVec(ref exprs) => {
-            self.propagate_through_exprs(exprs.as_slice(), succ)
+            self.propagate_through_exprs(exprs[], succ)
           }
 
           ast::ExprRepeat(ref element, ref count) => {
@@ -1154,7 +1154,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
             } else {
                 succ
             };
-            let succ = self.propagate_through_exprs(args.as_slice(), succ);
+            let succ = self.propagate_through_exprs(args[], succ);
             self.propagate_through_expr(&**f, succ)
           }
 
@@ -1167,11 +1167,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
             } else {
                 succ
             };
-            self.propagate_through_exprs(args.as_slice(), succ)
+            self.propagate_through_exprs(args[], succ)
           }
 
           ast::ExprTup(ref exprs) => {
-            self.propagate_through_exprs(exprs.as_slice(), succ)
+            self.propagate_through_exprs(exprs[], succ)
           }
 
           ast::ExprBinary(op, ref l, ref r) if ast_util::lazy_binop(op) => {
diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs
index dce75579ca0..1c2ceea7716 100644
--- a/src/librustc/middle/mem_categorization.rs
+++ b/src/librustc/middle/mem_categorization.rs
@@ -238,7 +238,7 @@ pub fn deref_kind<'tcx>(tcx: &ty::ctxt<'tcx>, t: Ty<'tcx>) -> deref_kind {
       None => {
         tcx.sess.bug(
             format!("deref_kind() invoked on non-derefable type {}",
-                    ty_to_string(tcx, t)).as_slice());
+                    ty_to_string(tcx, t))[]);
       }
     }
 }
@@ -635,7 +635,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
                           span,
                           format!("Upvar of non-closure {} - {}",
                                   fn_node_id,
-                                  ty.repr(self.tcx())).as_slice());
+                                  ty.repr(self.tcx()))[]);
                   }
               }
           }
@@ -917,7 +917,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
                 self.tcx().sess.span_bug(
                     node.span(),
                     format!("Explicit deref of non-derefable type: {}",
-                            base_cmt.ty.repr(self.tcx())).as_slice());
+                            base_cmt.ty.repr(self.tcx()))[]);
             }
         }
     }
@@ -996,7 +996,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
                         self.tcx().sess.span_bug(
                             elt.span(),
                             format!("Explicit index of non-index type `{}`",
-                                    base_cmt.ty.repr(self.tcx())).as_slice());
+                                    base_cmt.ty.repr(self.tcx()))[]);
                     }
                 }
             }
diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs
index f8b4ae73a1c..6f63ae166fe 100644
--- a/src/librustc/middle/privacy.rs
+++ b/src/librustc/middle/privacy.rs
@@ -615,10 +615,10 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
         match result {
             None => true,
             Some((span, msg, note)) => {
-                self.tcx.sess.span_err(span, msg.as_slice());
+                self.tcx.sess.span_err(span, msg[]);
                 match note {
                     Some((span, msg)) => {
-                        self.tcx.sess.span_note(span, msg.as_slice())
+                        self.tcx.sess.span_note(span, msg[])
                     }
                     None => {},
                 }
@@ -720,7 +720,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
             UnnamedField(idx) => format!("field #{} of {} is private",
                                          idx + 1, struct_desc),
         };
-        self.tcx.sess.span_err(span, msg.as_slice());
+        self.tcx.sess.span_err(span, msg[]);
     }
 
     // Given the ID of a method, checks to ensure it's in scope.
@@ -742,7 +742,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
                                              method_id,
                                              None,
                                              format!("method `{}`",
-                                                     string).as_slice()));
+                                                     string)[]));
     }
 
     // Checks that a path is in scope.
@@ -759,9 +759,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
                 self.ensure_public(span,
                                    def,
                                    Some(origdid),
-                                   format!("{} `{}`",
-                                           tyname,
-                                           name).as_slice())
+                                   format!("{} `{}`", tyname, name)[])
             };
 
             match self.last_private_map[path_id] {
diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs
index 38d3b859c9d..4d83075480b 100644
--- a/src/librustc/middle/reachable.rs
+++ b/src/librustc/middle/reachable.rs
@@ -50,7 +50,7 @@ fn generics_require_inlining(generics: &ast::Generics) -> bool {
 // monomorphized or it was marked with `#[inline]`. This will only return
 // true for functions.
 fn item_might_be_inlined(item: &ast::Item) -> bool {
-    if attributes_specify_inlining(item.attrs.as_slice()) {
+    if attributes_specify_inlining(item.attrs[]) {
         return true
     }
 
@@ -65,7 +65,7 @@ fn item_might_be_inlined(item: &ast::Item) -> bool {
 
 fn method_might_be_inlined(tcx: &ty::ctxt, method: &ast::Method,
                            impl_src: ast::DefId) -> bool {
-    if attributes_specify_inlining(method.attrs.as_slice()) ||
+    if attributes_specify_inlining(method.attrs[]) ||
         generics_require_inlining(method.pe_generics()) {
         return true
     }
@@ -202,7 +202,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
                     ast::MethodImplItem(ref method) => {
                         if generics_require_inlining(method.pe_generics()) ||
                                 attributes_specify_inlining(
-                                    method.attrs.as_slice()) {
+                                    method.attrs[]) {
                             true
                         } else {
                             let impl_did = self.tcx
@@ -249,7 +249,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
                 None => {
                     self.tcx.sess.bug(format!("found unmapped ID in worklist: \
                                                {}",
-                                              search_item).as_slice())
+                                              search_item)[])
                 }
             }
         }
@@ -341,7 +341,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
                     .bug(format!("found unexpected thingy in worklist: {}",
                                  self.tcx
                                      .map
-                                     .node_to_string(search_item)).as_slice())
+                                     .node_to_string(search_item))[])
             }
         }
     }
diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs
index be191801626..bc9dc6b399d 100644
--- a/src/librustc/middle/resolve_lifetime.rs
+++ b/src/librustc/middle/resolve_lifetime.rs
@@ -409,7 +409,7 @@ impl<'a> LifetimeContext<'a> {
         self.sess.span_err(
             lifetime_ref.span,
             format!("use of undeclared lifetime name `{}`",
-                    token::get_name(lifetime_ref.name)).as_slice());
+                    token::get_name(lifetime_ref.name))[]);
     }
 
     fn check_lifetime_defs(&mut self, old_scope: Scope, lifetimes: &Vec<ast::LifetimeDef>) {
@@ -423,7 +423,7 @@ impl<'a> LifetimeContext<'a> {
                         lifetime.lifetime.span,
                         format!("illegal lifetime parameter name: `{}`",
                                 token::get_name(lifetime.lifetime.name))
-                            .as_slice());
+                            []);
                 }
             }
 
@@ -437,7 +437,7 @@ impl<'a> LifetimeContext<'a> {
                         format!("lifetime name `{}` declared twice in \
                                 the same scope",
                                 token::get_name(lifetime_j.lifetime.name))
-                            .as_slice());
+                            []);
                 }
             }
 
diff --git a/src/librustc/middle/subst.rs b/src/librustc/middle/subst.rs
index 30a47ff9132..a5e8e4695af 100644
--- a/src/librustc/middle/subst.rs
+++ b/src/librustc/middle/subst.rs
@@ -620,7 +620,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> {
                                      (space={}, index={})",
                                     region_name.as_str(),
                                     self.root_ty.repr(self.tcx()),
-                                    space, i).as_slice());
+                                    space, i)[]);
                             }
                         }
                 }
@@ -677,7 +677,7 @@ impl<'a,'tcx> SubstFolder<'a,'tcx> {
                             p.space,
                             p.idx,
                             self.root_ty.repr(self.tcx()),
-                            self.substs.repr(self.tcx())).as_slice());
+                            self.substs.repr(self.tcx()))[]);
             }
         };
 
diff --git a/src/librustc/middle/traits/coherence.rs b/src/librustc/middle/traits/coherence.rs
index 9804f6d222a..d48685ce27d 100644
--- a/src/librustc/middle/traits/coherence.rs
+++ b/src/librustc/middle/traits/coherence.rs
@@ -154,7 +154,7 @@ pub fn ty_is_local<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
         ty::ty_err => {
             tcx.sess.bug(
                 format!("ty_is_local invoked on unexpected type: {}",
-                        ty.repr(tcx)).as_slice())
+                        ty.repr(tcx))[])
         }
     }
 }
diff --git a/src/librustc/middle/traits/select.rs b/src/librustc/middle/traits/select.rs
index 8ba28b61006..2b42849a87b 100644
--- a/src/librustc/middle/traits/select.rs
+++ b/src/librustc/middle/traits/select.rs
@@ -709,7 +709,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
 
         let all_bounds =
             util::transitive_bounds(
-                self.tcx(), caller_trait_refs.as_slice());
+                self.tcx(), caller_trait_refs[]);
 
         let matching_bounds =
             all_bounds.filter(
@@ -762,7 +762,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 self.tcx().sess.span_bug(
                     obligation.cause.span,
                     format!("No entry for unboxed closure: {}",
-                            closure_def_id.repr(self.tcx())).as_slice());
+                            closure_def_id.repr(self.tcx()))[]);
             }
         };
 
@@ -1281,7 +1281,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 self.tcx().sess.bug(
                     format!(
                         "asked to assemble builtin bounds of unexpected type: {}",
-                        self_ty.repr(self.tcx())).as_slice());
+                        self_ty.repr(self.tcx()))[]);
             }
         };
 
@@ -1436,7 +1436,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 self.tcx().sess.span_bug(
                     obligation.cause.span,
                     format!("builtin bound for {} was ambig",
-                            obligation.repr(self.tcx())).as_slice());
+                            obligation.repr(self.tcx()))[]);
             }
         }
     }
@@ -1554,7 +1554,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 self.tcx().sess.span_bug(
                     obligation.cause.span,
                     format!("Fn pointer candidate for inappropriate self type: {}",
-                            self_ty.repr(self.tcx())).as_slice());
+                            self_ty.repr(self.tcx()))[]);
             }
         };
 
@@ -1595,7 +1595,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 self.tcx().sess.span_bug(
                     obligation.cause.span,
                     format!("No entry for unboxed closure: {}",
-                            closure_def_id.repr(self.tcx())).as_slice());
+                            closure_def_id.repr(self.tcx()))[]);
             }
         };
 
@@ -1692,8 +1692,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 self.tcx().sess.bug(
                     format!("Impl {} was matchable against {} but now is not",
                             impl_def_id.repr(self.tcx()),
-                            obligation.repr(self.tcx()))
-                        .as_slice());
+                            obligation.repr(self.tcx()))[]);
             }
         }
     }
diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs
index 50a6fb9d0ca..edaf2f16721 100644
--- a/src/librustc/middle/ty.rs
+++ b/src/librustc/middle/ty.rs
@@ -1891,7 +1891,7 @@ impl<'tcx> ParameterEnvironment<'tcx> {
             _ => {
                 cx.sess.bug(format!("ParameterEnvironment::from_item(): \
                                      `{}` is not an item",
-                                    cx.map.node_to_string(id)).as_slice())
+                                    cx.map.node_to_string(id))[])
             }
         }
     }
@@ -1960,7 +1960,7 @@ impl UnboxedClosureKind {
         };
         match result {
             Ok(trait_did) => trait_did,
-            Err(err) => cx.sess.fatal(err.as_slice()),
+            Err(err) => cx.sess.fatal(err[]),
         }
     }
 }
@@ -2596,7 +2596,7 @@ pub fn sequence_element_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
         ty_str => mk_mach_uint(ast::TyU8),
         ty_open(ty) => sequence_element_type(cx, ty),
         _ => cx.sess.bug(format!("sequence_element_type called on non-sequence value: {}",
-                                 ty_to_string(cx, ty)).as_slice()),
+                                 ty_to_string(cx, ty))[]),
     }
 }
 
@@ -2972,7 +2972,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
             ty_struct(did, ref substs) => {
                 let flds = struct_fields(cx, did, substs);
                 let mut res =
-                    TypeContents::union(flds.as_slice(),
+                    TypeContents::union(flds[],
                                         |f| tc_mt(cx, f.mt, cache));
 
                 if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) {
@@ -2989,21 +2989,21 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
                 // FIXME(#14449): `borrowed_contents` below assumes `&mut`
                 // unboxed closure.
                 let upvars = unboxed_closure_upvars(cx, did, substs);
-                TypeContents::union(upvars.as_slice(),
+                TypeContents::union(upvars[],
                                     |f| tc_ty(cx, f.ty, cache)) |
                     borrowed_contents(r, MutMutable)
             }
 
             ty_tup(ref tys) => {
-                TypeContents::union(tys.as_slice(),
+                TypeContents::union(tys[],
                                     |ty| tc_ty(cx, *ty, cache))
             }
 
             ty_enum(did, ref substs) => {
                 let variants = substd_enum_variants(cx, did, substs);
                 let mut res =
-                    TypeContents::union(variants.as_slice(), |variant| {
-                        TypeContents::union(variant.args.as_slice(),
+                    TypeContents::union(variants[], |variant| {
+                        TypeContents::union(variant.args[],
                                             |arg_ty| {
                             tc_ty(cx, *arg_ty, cache)
                         })
@@ -3068,7 +3068,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
                 kind_bounds_to_contents(
                     cx,
                     tp_def.bounds.builtin_bounds,
-                    tp_def.bounds.trait_bounds.as_slice())
+                    tp_def.bounds.trait_bounds[])
             }
 
             ty_infer(_) => {
@@ -3658,7 +3658,7 @@ pub fn close_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
     match ty.sty {
         ty_open(ty) => mk_rptr(cx, ReStatic, mt {ty: ty, mutbl:ast::MutImmutable}),
         _ => cx.sess.bug(format!("Trying to close a non-open type {}",
-                                 ty_to_string(cx, ty)).as_slice())
+                                 ty_to_string(cx, ty))[])
     }
 }
 
@@ -3759,7 +3759,7 @@ pub fn node_id_to_trait_ref<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId)
         Some(ty) => ty.clone(),
         None => cx.sess.bug(
             format!("node_id_to_trait_ref: no trait ref for node `{}`",
-                    cx.map.node_to_string(id)).as_slice())
+                    cx.map.node_to_string(id))[])
     }
 }
 
@@ -3772,7 +3772,7 @@ pub fn node_id_to_type<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) -> Ty<'tcx> {
        Some(ty) => ty,
        None => cx.sess.bug(
            format!("node_id_to_type: no type for node `{}`",
-                   cx.map.node_to_string(id)).as_slice())
+                   cx.map.node_to_string(id))[])
     }
 }
 
@@ -3865,7 +3865,7 @@ pub fn ty_region(tcx: &ctxt,
             tcx.sess.span_bug(
                 span,
                 format!("ty_region() invoked on an inappropriate ty: {}",
-                        s).as_slice());
+                        s)[]);
         }
     }
 }
@@ -3926,11 +3926,11 @@ pub fn expr_span(cx: &ctxt, id: NodeId) -> Span {
         Some(f) => {
             cx.sess.bug(format!("Node id {} is not an expr: {}",
                                 id,
-                                f).as_slice());
+                                f)[]);
         }
         None => {
             cx.sess.bug(format!("Node id {} is not present \
-                                in the node map", id).as_slice());
+                                in the node map", id)[]);
         }
     }
 }
@@ -3946,14 +3946,14 @@ pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString {
                     cx.sess.bug(
                         format!("Variable id {} maps to {}, not local",
                                 id,
-                                pat).as_slice());
+                                pat)[]);
                 }
             }
         }
         r => {
             cx.sess.bug(format!("Variable id {} maps to {}, not local",
                                 id,
-                                r).as_slice());
+                                r)[]);
         }
     }
 }
@@ -3996,7 +3996,7 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>,
                             cx.sess.bug(
                                 format!("add_env adjustment on non-bare-fn: \
                                          {}",
-                                        b).as_slice());
+                                        b)[]);
                         }
                     }
                 }
@@ -4024,7 +4024,7 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>,
                                                 {}",
                                                 i,
                                                 ty_to_string(cx, adjusted_ty))
-                                                          .as_slice());
+                                                          []);
                                 }
                             }
                         }
@@ -4087,7 +4087,7 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>,
             }
             _ => cx.sess.span_bug(span,
                                   format!("UnsizeLength with bad sty: {}",
-                                          ty_to_string(cx, ty)).as_slice())
+                                          ty_to_string(cx, ty))[])
         },
         &UnsizeStruct(box ref k, tp_index) => match ty.sty {
             ty_struct(did, ref substs) => {
@@ -4099,7 +4099,7 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>,
             }
             _ => cx.sess.span_bug(span,
                                   format!("UnsizeStruct with bad sty: {}",
-                                          ty_to_string(cx, ty)).as_slice())
+                                          ty_to_string(cx, ty))[])
         },
         &UnsizeVtable(TyTrait { ref principal, bounds }, _) => {
             mk_trait(cx, (*principal).clone(), bounds)
@@ -4112,7 +4112,7 @@ pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> def::Def {
         Some(&def) => def,
         None => {
             tcx.sess.span_bug(expr.span, format!(
-                "no def-map entry for expr {}", expr.id).as_slice());
+                "no def-map entry for expr {}", expr.id)[]);
         }
     }
 }
@@ -4206,7 +4206,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind {
                         expr.span,
                         format!("uncategorized def for expr {}: {}",
                                 expr.id,
-                                def).as_slice());
+                                def)[]);
                 }
             }
         }
@@ -4331,7 +4331,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
         token::get_name(name),
         fields.iter()
               .map(|f| token::get_name(f.name).get().to_string())
-              .collect::<Vec<String>>()).as_slice());
+              .collect::<Vec<String>>())[]);
 }
 
 pub fn impl_or_trait_item_idx(id: ast::Name, trait_items: &[ImplOrTraitItem])
@@ -4565,7 +4565,7 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
                 match item.node {
                     ItemTrait(_, _, _, _, ref ms) => {
                         let (_, p) =
-                            ast_util::split_trait_methods(ms.as_slice());
+                            ast_util::split_trait_methods(ms[]);
                         p.iter()
                          .map(|m| {
                             match impl_or_trait_item(
@@ -4584,14 +4584,14 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
                     _ => {
                         cx.sess.bug(format!("provided_trait_methods: `{}` is \
                                              not a trait",
-                                            id).as_slice())
+                                            id)[])
                     }
                 }
             }
             _ => {
                 cx.sess.bug(format!("provided_trait_methods: `{}` is not a \
                                      trait",
-                                    id).as_slice())
+                                    id)[])
             }
         }
     } else {
@@ -4827,7 +4827,7 @@ impl<'tcx> VariantInfo<'tcx> {
             },
             ast::StructVariantKind(ref struct_def) => {
 
-                let fields: &[StructField] = struct_def.fields.as_slice();
+                let fields: &[StructField] = struct_def.fields[];
 
                 assert!(fields.len() > 0);
 
@@ -4978,7 +4978,7 @@ pub fn enum_variants<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
                                                 cx.sess
                                                   .span_err(e.span,
                                                             format!("expected constant: {}",
-                                                                    *err).as_slice());
+                                                                    *err)[]);
                                             }
                                         },
                                     None => {}
@@ -5258,7 +5258,7 @@ pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec<field_ty> {
             _ => {
                 cx.sess.bug(
                     format!("ID not mapped to struct fields: {}",
-                            cx.map.node_to_string(did.node)).as_slice());
+                            cx.map.node_to_string(did.node))[]);
             }
         }
     } else {
@@ -5291,7 +5291,7 @@ pub fn struct_fields<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId, substs: &Substs<'tc
 pub fn tup_fields<'tcx>(v: &[Ty<'tcx>]) -> Vec<field<'tcx>> {
     v.iter().enumerate().map(|(i, &f)| {
        field {
-            name: token::intern(i.to_string().as_slice()),
+            name: token::intern(i.to_string()[]),
             mt: mt {
                 ty: f,
                 mutbl: MutImmutable
@@ -5470,7 +5470,7 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint {
             };
             tcx.sess.span_err(count_expr.span, format!(
                 "expected positive integer for repeat count, found {}",
-                found).as_slice());
+                found)[]);
         }
         Err(_) => {
             let found = match count_expr.node {
@@ -5485,7 +5485,7 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint {
             };
             tcx.sess.span_err(count_expr.span, format!(
                 "expected constant integer for repeat count, found {}",
-                found).as_slice());
+                found)[]);
         }
     }
     0
@@ -6244,7 +6244,7 @@ pub fn with_freevars<T, F>(tcx: &ty::ctxt, fid: ast::NodeId, f: F) -> T where
 {
     match tcx.freevars.borrow().get(&fid) {
         None => f(&[]),
-        Some(d) => f(d.as_slice())
+        Some(d) => f(d[])
     }
 }
 
diff --git a/src/librustc/plugin/load.rs b/src/librustc/plugin/load.rs
index 5c2fe0854ee..a2e33454320 100644
--- a/src/librustc/plugin/load.rs
+++ b/src/librustc/plugin/load.rs
@@ -141,17 +141,17 @@ impl<'a> PluginLoader<'a> {
             // this is fatal: there are almost certainly macros we need
             // inside this crate, so continue would spew "macro undefined"
             // errors
-            Err(err) => self.sess.span_fatal(vi.span, err.as_slice())
+            Err(err) => self.sess.span_fatal(vi.span, err[])
         };
 
         unsafe {
             let registrar =
-                match lib.symbol(symbol.as_slice()) {
+                match lib.symbol(symbol[]) {
                     Ok(registrar) => {
                         mem::transmute::<*mut u8,PluginRegistrarFun>(registrar)
                     }
                     // again fatal if we can't register macros
-                    Err(err) => self.sess.span_fatal(vi.span, err.as_slice())
+                    Err(err) => self.sess.span_fatal(vi.span, err[])
                 };
 
             self.plugins.registrars.push(registrar);
diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs
index 0652645907b..335b7489063 100644
--- a/src/librustc/session/config.rs
+++ b/src/librustc/session/config.rs
@@ -555,17 +555,17 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions
                 match (value, opt_type_desc) {
                     (Some(..), None) => {
                         early_error(format!("codegen option `{}` takes no \
-                                             value", key).as_slice())
+                                             value", key)[])
                     }
                     (None, Some(type_desc)) => {
                         early_error(format!("codegen option `{0}` requires \
                                              {1} (-C {0}=<value>)",
-                                            key, type_desc).as_slice())
+                                            key, type_desc)[])
                     }
                     (Some(value), Some(type_desc)) => {
                         early_error(format!("incorrect value `{}` for codegen \
                                              option `{}` - {} was expected",
-                                             value, key, type_desc).as_slice())
+                                             value, key, type_desc)[])
                     }
                     (None, None) => unreachable!()
                 }
@@ -575,7 +575,7 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions
         }
         if !found {
             early_error(format!("unknown codegen option: `{}`",
-                                key).as_slice());
+                                key)[]);
         }
     }
     return cg;
@@ -588,10 +588,10 @@ pub fn default_lib_output() -> CrateType {
 pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
     use syntax::parse::token::intern_and_get_ident as intern;
 
-    let end = sess.target.target.target_endian.as_slice();
-    let arch = sess.target.target.arch.as_slice();
-    let wordsz = sess.target.target.target_word_size.as_slice();
-    let os = sess.target.target.target_os.as_slice();
+    let end = sess.target.target.target_endian[];
+    let arch = sess.target.target.arch[];
+    let wordsz = sess.target.target.target_word_size[];
+    let os = sess.target.target.target_os[];
 
     let fam = match sess.target.target.options.is_like_windows {
         true  => InternedString::new("windows"),
@@ -627,23 +627,23 @@ pub fn build_configuration(sess: &Session) -> ast::CrateConfig {
         append_configuration(&mut user_cfg, InternedString::new("test"))
     }
     let mut v = user_cfg.into_iter().collect::<Vec<_>>();
-    v.push_all(default_cfg.as_slice());
+    v.push_all(default_cfg[]);
     v
 }
 
 pub fn build_target_config(opts: &Options, sp: &SpanHandler) -> Config {
-    let target = match Target::search(opts.target_triple.as_slice()) {
+    let target = match Target::search(opts.target_triple[]) {
         Ok(t) => t,
         Err(e) => {
-            sp.handler().fatal((format!("Error loading target specification: {}", e)).as_slice());
+            sp.handler().fatal((format!("Error loading target specification: {}", e))[]);
     }
     };
 
-    let (int_type, uint_type) = match target.target_word_size.as_slice() {
+    let (int_type, uint_type) = match target.target_word_size[] {
         "32" => (ast::TyI32, ast::TyU32),
         "64" => (ast::TyI64, ast::TyU64),
         w    => sp.handler().fatal((format!("target specification was invalid: unrecognized \
-                                            target-word-size {}", w)).as_slice())
+                                            target-word-size {}", w))[])
     };
 
     Config {
@@ -756,7 +756,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
 
     let unparsed_crate_types = matches.opt_strs("crate-type");
     let crate_types = parse_crate_types_from_list(unparsed_crate_types)
-        .unwrap_or_else(|e| early_error(e.as_slice()));
+        .unwrap_or_else(|e| early_error(e[]));
 
     let mut lint_opts = vec!();
     let mut describe_lints = false;
@@ -766,7 +766,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
             if lint_name == "help" {
                 describe_lints = true;
             } else {
-                lint_opts.push((lint_name.replace("-", "_").into_string(), level));
+                lint_opts.push((lint_name.replace("-", "_"), level));
             }
         }
     }
@@ -784,7 +784,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
         }
         if this_bit == 0 {
             early_error(format!("unknown debug flag: {}",
-                                *debug_flag).as_slice())
+                                *debug_flag)[])
         }
         debugging_opts |= this_bit;
     }
@@ -829,7 +829,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
                     "dep-info" => OutputTypeDepInfo,
                     _ => {
                         early_error(format!("unknown emission type: `{}`",
-                                            part).as_slice())
+                                            part)[])
                     }
                 };
                 output_types.push(output_type)
@@ -868,7 +868,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
                 Some(arg) => {
                     early_error(format!("optimization level needs to be \
                                          between 0-3 (instead was `{}`)",
-                                        arg).as_slice());
+                                        arg)[]);
                 }
             }
         } else {
@@ -906,7 +906,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
             Some(arg) => {
                 early_error(format!("debug info level needs to be between \
                                      0-2 (instead was `{}`)",
-                                    arg).as_slice());
+                                    arg)[]);
             }
         }
     } else {
@@ -923,7 +923,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
     };
 
     let addl_lib_search_paths = matches.opt_strs("L").iter().map(|s| {
-        Path::new(s.as_slice())
+        Path::new(s[])
     }).collect();
 
     let libs = matches.opt_strs("l").into_iter().map(|s| {
@@ -937,7 +937,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
             (_, s) => {
                 early_error(format!("unknown library kind `{}`, expected \
                                      one of dylib, framework, or static",
-                                    s).as_slice());
+                                    s)[]);
             }
         };
         (name.to_string(), kind)
@@ -982,7 +982,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
         //             --debuginfo");
     }
 
-    let color = match matches.opt_str("color").as_ref().map(|s| s.as_slice()) {
+    let color = match matches.opt_str("color").as_ref().map(|s| s[]) {
         Some("auto")   => Auto,
         Some("always") => Always,
         Some("never")  => Never,
@@ -992,7 +992,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
         Some(arg) => {
             early_error(format!("argument for --color must be auto, always \
                                  or never (instead was `{}`)",
-                                arg).as_slice())
+                                arg)[])
         }
     };
 
@@ -1093,7 +1093,7 @@ mod test {
     #[test]
     fn test_switch_implies_cfg_test() {
         let matches =
-            &match getopts(&["--test".to_string()], optgroups().as_slice()) {
+            &match getopts(&["--test".to_string()], optgroups()[]) {
               Ok(m) => m,
               Err(f) => panic!("test_switch_implies_cfg_test: {}", f)
             };
@@ -1101,7 +1101,7 @@ mod test {
         let sessopts = build_session_options(matches);
         let sess = build_session(sessopts, None, registry);
         let cfg = build_configuration(&sess);
-        assert!((attr::contains_name(cfg.as_slice(), "test")));
+        assert!((attr::contains_name(cfg[], "test")));
     }
 
     // When the user supplies --test and --cfg test, don't implicitly add
@@ -1110,7 +1110,7 @@ mod test {
     fn test_switch_implies_cfg_test_unless_cfg_test() {
         let matches =
             &match getopts(&["--test".to_string(), "--cfg=test".to_string()],
-                           optgroups().as_slice()) {
+                           optgroups()[]) {
               Ok(m) => m,
               Err(f) => {
                 panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f)
@@ -1130,7 +1130,7 @@ mod test {
         {
             let matches = getopts(&[
                 "-Awarnings".to_string()
-            ], optgroups().as_slice()).unwrap();
+            ], optgroups()[]).unwrap();
             let registry = diagnostics::registry::Registry::new(&[]);
             let sessopts = build_session_options(&matches);
             let sess = build_session(sessopts, None, registry);
@@ -1141,7 +1141,7 @@ mod test {
             let matches = getopts(&[
                 "-Awarnings".to_string(),
                 "-Dwarnings".to_string()
-            ], optgroups().as_slice()).unwrap();
+            ], optgroups()[]).unwrap();
             let registry = diagnostics::registry::Registry::new(&[]);
             let sessopts = build_session_options(&matches);
             let sess = build_session(sessopts, None, registry);
@@ -1151,7 +1151,7 @@ mod test {
         {
             let matches = getopts(&[
                 "-Adead_code".to_string()
-            ], optgroups().as_slice()).unwrap();
+            ], optgroups()[]).unwrap();
             let registry = diagnostics::registry::Registry::new(&[]);
             let sessopts = build_session_options(&matches);
             let sess = build_session(sessopts, None, registry);
diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs
index 8516ece202c..37bdd1673e9 100644
--- a/src/librustc/session/mod.rs
+++ b/src/librustc/session/mod.rs
@@ -172,7 +172,7 @@ impl Session {
     // cases later on
     pub fn impossible_case(&self, sp: Span, msg: &str) -> ! {
         self.span_bug(sp,
-                      format!("impossible case reached: {}", msg).as_slice());
+                      format!("impossible case reached: {}", msg)[]);
     }
     pub fn verbose(&self) -> bool { self.debugging_opt(config::VERBOSE) }
     pub fn time_passes(&self) -> bool { self.debugging_opt(config::TIME_PASSES) }
@@ -211,7 +211,7 @@ impl Session {
     }
     pub fn target_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> {
         filesearch::FileSearch::new(self.sysroot(),
-                                    self.opts.target_triple.as_slice(),
+                                    self.opts.target_triple[],
                                     &self.opts.addl_lib_search_paths)
     }
     pub fn host_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> {
diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs
index bc6fb1be075..e1448364a9e 100644
--- a/src/librustc/util/common.rs
+++ b/src/librustc/util/common.rs
@@ -14,6 +14,7 @@ use std::cell::{RefCell, Cell};
 use std::collections::HashMap;
 use std::fmt::Show;
 use std::hash::{Hash, Hasher};
+use std::iter::repeat;
 use std::time::Duration;
 
 use syntax::ast;
@@ -48,7 +49,7 @@ pub fn time<T, U, F>(do_it: bool, what: &str, u: U, f: F) -> T where
     };
     let rv = rv.unwrap();
 
-    println!("{}time: {}.{:03} \t{}", "  ".repeat(old),
+    println!("{}time: {}.{:03} \t{}", repeat("  ").take(old).collect::<String>(),
              dur.num_seconds(), dur.num_milliseconds() % 1000, what);
     DEPTH.with(|slot| slot.set(old));
 
diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs
index 85a06125e23..5f61c04d366 100644
--- a/src/librustc/util/ppaux.rs
+++ b/src/librustc/util/ppaux.rs
@@ -55,12 +55,12 @@ pub fn note_and_explain_region(cx: &ctxt,
       (ref str, Some(span)) => {
         cx.sess.span_note(
             span,
-            format!("{}{}{}", prefix, *str, suffix).as_slice());
+            format!("{}{}{}", prefix, *str, suffix)[]);
         Some(span)
       }
       (ref str, None) => {
         cx.sess.note(
-            format!("{}{}{}", prefix, *str, suffix).as_slice());
+            format!("{}{}{}", prefix, *str, suffix)[]);
         None
       }
     }
@@ -269,7 +269,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
         };
 
         if abi != abi::Rust {
-            s.push_str(format!("extern {} ", abi.to_string()).as_slice());
+            s.push_str(format!("extern {} ", abi.to_string())[]);
         };
 
         s.push_str("fn");
@@ -293,7 +293,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
         match cty.store {
             ty::UniqTraitStore => {}
             ty::RegionTraitStore(region, _) => {
-                s.push_str(region_to_string(cx, "", true, region).as_slice());
+                s.push_str(region_to_string(cx, "", true, region)[]);
             }
         }
 
@@ -312,7 +312,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
                 assert_eq!(cty.onceness, ast::Once);
                 s.push_str("proc");
                 push_sig_to_string(cx, &mut s, '(', ')', &cty.sig,
-                                   bounds_str.as_slice());
+                                   bounds_str[]);
             }
             ty::RegionTraitStore(..) => {
                 match cty.onceness {
@@ -320,7 +320,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
                     ast::Once => s.push_str("once ")
                 }
                 push_sig_to_string(cx, &mut s, '|', '|', &cty.sig,
-                                   bounds_str.as_slice());
+                                   bounds_str[]);
             }
         }
 
@@ -353,7 +353,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
             ty::FnConverging(t) => {
                 if !ty::type_is_nil(t) {
                    s.push_str(" -> ");
-                   s.push_str(ty_to_string(cx, t).as_slice());
+                   s.push_str(ty_to_string(cx, t)[]);
                 }
             }
             ty::FnDiverging => {
@@ -390,7 +390,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
         }
         ty_rptr(r, ref tm) => {
             let mut buf = region_ptr_to_string(cx, r);
-            buf.push_str(mt_to_string(cx, tm).as_slice());
+            buf.push_str(mt_to_string(cx, tm)[]);
             buf
         }
         ty_open(typ) =>
@@ -400,7 +400,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
                 .iter()
                 .map(|elem| ty_to_string(cx, *elem))
                 .collect::<Vec<_>>();
-            match strs.as_slice() {
+            match strs[] {
                 [ref string] => format!("({},)", string),
                 strs => format!("({})", strs.connect(", "))
             }
@@ -551,7 +551,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>,
 pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String {
     let mut s = typ.repr(cx).to_string();
     if s.len() >= 32u {
-        s = s.slice(0u, 32u).to_string();
+        s = s[0u..32u].to_string();
     }
     return s;
 }
@@ -616,7 +616,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for [T] {
 
 impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice<T> {
     fn repr(&self, tcx: &ctxt<'tcx>) -> String {
-        repr_vec(tcx, self.as_slice())
+        repr_vec(tcx, self[])
     }
 }
 
@@ -624,7 +624,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice<T> {
 // autoderef cannot convert the &[T] handler
 impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec<T> {
     fn repr(&self, tcx: &ctxt<'tcx>) -> String {
-        repr_vec(tcx, self.as_slice())
+        repr_vec(tcx, self[])
     }
 }
 
diff --git a/src/librustc_back/archive.rs b/src/librustc_back/archive.rs
index 3a451070316..0bd4265e487 100644
--- a/src/librustc_back/archive.rs
+++ b/src/librustc_back/archive.rs
@@ -53,7 +53,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option<String>,
           args: &str, cwd: Option<&Path>,
           paths: &[&Path]) -> ProcessOutput {
     let ar = match *maybe_ar_prog {
-        Some(ref ar) => ar.as_slice(),
+        Some(ref ar) => ar[],
         None => "ar"
     };
     let mut cmd = Command::new(ar);
@@ -75,22 +75,22 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option<String>,
             if !o.status.success() {
                 handler.err(format!("{} failed with: {}",
                                  cmd,
-                                 o.status).as_slice());
+                                 o.status)[]);
                 handler.note(format!("stdout ---\n{}",
                                   str::from_utf8(o.output
-                                                  .as_slice()).unwrap())
-                          .as_slice());
+                                                  []).unwrap())
+                          []);
                 handler.note(format!("stderr ---\n{}",
                                   str::from_utf8(o.error
-                                                  .as_slice()).unwrap())
-                          .as_slice());
+                                                  []).unwrap())
+                          []);
                 handler.abort_if_errors();
             }
             o
         },
         Err(e) => {
-            handler.err(format!("could not exec `{}`: {}", ar.as_slice(),
-                             e).as_slice());
+            handler.err(format!("could not exec `{}`: {}", ar[],
+                             e)[]);
             handler.abort_if_errors();
             panic!("rustc::back::archive::run_ar() should not reach this point");
         }
@@ -106,16 +106,16 @@ pub fn find_library(name: &str, osprefix: &str, ossuffix: &str,
 
     for path in search_paths.iter() {
         debug!("looking for {} inside {}", name, path.display());
-        let test = path.join(oslibname.as_slice());
+        let test = path.join(oslibname[]);
         if test.exists() { return test }
         if oslibname != unixlibname {
-            let test = path.join(unixlibname.as_slice());
+            let test = path.join(unixlibname[]);
             if test.exists() { return test }
         }
     }
     handler.fatal(format!("could not find native static library `{}`, \
                            perhaps an -L flag is missing?",
-                          name).as_slice());
+                          name)[]);
 }
 
 impl<'a> Archive<'a> {
@@ -147,7 +147,7 @@ impl<'a> Archive<'a> {
     /// Lists all files in an archive
     pub fn files(&self) -> Vec<String> {
         let output = run_ar(self.handler, &self.maybe_ar_prog, "t", None, &[&self.dst]);
-        let output = str::from_utf8(output.output.as_slice()).unwrap();
+        let output = str::from_utf8(output.output[]).unwrap();
         // use lines_any because windows delimits output with `\r\n` instead of
         // just `\n`
         output.lines_any().map(|s| s.to_string()).collect()
@@ -179,9 +179,9 @@ impl<'a> ArchiveBuilder<'a> {
     /// search in the relevant locations for a library named `name`.
     pub fn add_native_library(&mut self, name: &str) -> io::IoResult<()> {
         let location = find_library(name,
-                                    self.archive.slib_prefix.as_slice(),
-                                    self.archive.slib_suffix.as_slice(),
-                                    self.archive.lib_search_paths.as_slice(),
+                                    self.archive.slib_prefix[],
+                                    self.archive.slib_suffix[],
+                                    self.archive.lib_search_paths[],
                                     self.archive.handler);
         self.add_archive(&location, name, |_| false)
     }
@@ -197,12 +197,12 @@ impl<'a> ArchiveBuilder<'a> {
         // as simple comparison is not enough - there
         // might be also an extra name suffix
         let obj_start = format!("{}", name);
-        let obj_start = obj_start.as_slice();
+        let obj_start = obj_start[];
         // Ignoring all bytecode files, no matter of
         // name
         let bc_ext = ".bytecode.deflate";
 
-        self.add_archive(rlib, name.as_slice(), |fname: &str| {
+        self.add_archive(rlib, name[], |fname: &str| {
             let skip_obj = lto && fname.starts_with(obj_start)
                 && fname.ends_with(".o");
             skip_obj || fname.ends_with(bc_ext) || fname == METADATA_FILENAME
@@ -239,7 +239,7 @@ impl<'a> ArchiveBuilder<'a> {
             // allow running `ar s file.a` to update symbols only.
             if self.should_update_symbols {
                 run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
-                       "s", Some(self.work_dir.path()), args.as_slice());
+                       "s", Some(self.work_dir.path()), args[]);
             }
             return self.archive;
         }
@@ -259,7 +259,7 @@ impl<'a> ArchiveBuilder<'a> {
                 // Add the archive members seen so far, without updating the
                 // symbol table (`S`).
                 run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
-                       "cruS", Some(self.work_dir.path()), args.as_slice());
+                       "cruS", Some(self.work_dir.path()), args[]);
 
                 args.clear();
                 args.push(&abs_dst);
@@ -274,7 +274,7 @@ impl<'a> ArchiveBuilder<'a> {
         // necessary.
         let flags = if self.should_update_symbols { "crus" } else { "cruS" };
         run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
-               flags, Some(self.work_dir.path()), args.as_slice());
+               flags, Some(self.work_dir.path()), args[]);
 
         self.archive
     }
@@ -316,7 +316,7 @@ impl<'a> ArchiveBuilder<'a> {
             } else {
                 filename
             };
-            let new_filename = self.work_dir.path().join(filename.as_slice());
+            let new_filename = self.work_dir.path().join(filename[]);
             try!(fs::rename(file, &new_filename));
             self.members.push(Path::new(filename));
         }
diff --git a/src/librustc_back/rpath.rs b/src/librustc_back/rpath.rs
index 1f8549098d9..1056ac928e6 100644
--- a/src/librustc_back/rpath.rs
+++ b/src/librustc_back/rpath.rs
@@ -44,15 +44,15 @@ pub fn get_rpath_flags<F, G>(config: RPathConfig<F, G>) -> Vec<String> where
         l.map(|p| p.clone())
     }).collect::<Vec<_>>();
 
-    let rpaths = get_rpaths(config, libs.as_slice());
-    flags.push_all(rpaths_to_flags(rpaths.as_slice()).as_slice());
+    let rpaths = get_rpaths(config, libs[]);
+    flags.push_all(rpaths_to_flags(rpaths[])[]);
     flags
 }
 
 fn rpaths_to_flags(rpaths: &[String]) -> Vec<String> {
     let mut ret = Vec::new();
     for rpath in rpaths.iter() {
-        ret.push(format!("-Wl,-rpath,{}", (*rpath).as_slice()));
+        ret.push(format!("-Wl,-rpath,{}", (*rpath)[]));
     }
     return ret;
 }
@@ -82,14 +82,14 @@ fn get_rpaths<F, G>(mut config: RPathConfig<F, G>, libs: &[Path]) -> Vec<String>
         }
     }
 
-    log_rpaths("relative", rel_rpaths.as_slice());
-    log_rpaths("fallback", fallback_rpaths.as_slice());
+    log_rpaths("relative", rel_rpaths[]);
+    log_rpaths("fallback", fallback_rpaths[]);
 
     let mut rpaths = rel_rpaths;
-    rpaths.push_all(fallback_rpaths.as_slice());
+    rpaths.push_all(fallback_rpaths[]);
 
     // Remove duplicates
-    let rpaths = minimize_rpaths(rpaths.as_slice());
+    let rpaths = minimize_rpaths(rpaths[]);
     return rpaths;
 }
 
@@ -140,7 +140,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
     let mut set = HashSet::new();
     let mut minimized = Vec::new();
     for rpath in rpaths.iter() {
-        if set.insert(rpath.as_slice()) {
+        if set.insert(rpath[]) {
             minimized.push(rpath.clone());
         }
     }
diff --git a/src/librustc_back/svh.rs b/src/librustc_back/svh.rs
index 98fa659ba55..d40c9ee8af6 100644
--- a/src/librustc_back/svh.rs
+++ b/src/librustc_back/svh.rs
@@ -65,7 +65,7 @@ impl Svh {
     }
 
     pub fn as_str<'a>(&'a self) -> &'a str {
-        self.hash.as_slice()
+        self.hash[]
     }
 
     pub fn calculate(metadata: &Vec<String>, krate: &ast::Crate) -> Svh {
@@ -358,7 +358,7 @@ mod svh_visitor {
             fn macro_name(macro: &Mac) -> token::InternedString {
                 match &macro.node {
                     &MacInvocTT(ref path, ref _tts, ref _stx_ctxt) => {
-                        let s = path.segments.as_slice();
+                        let s = path.segments[];
                         assert_eq!(s.len(), 1);
                         content(s[0].identifier)
                     }
diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs
index d12cb356e3f..99a25bebf40 100644
--- a/src/librustc_back/target/mod.rs
+++ b/src/librustc_back/target/mod.rs
@@ -224,7 +224,7 @@ impl Target {
                 Some(val) => val,
                 None =>
                     handler.fatal((format!("Field {} in target specification is required", name))
-                                  .as_slice())
+                                  [])
             }
         };
 
@@ -365,7 +365,7 @@ impl Target {
 
         let target_path = os::getenv("RUST_TARGET_PATH").unwrap_or(String::new());
 
-        let paths = os::split_paths(target_path.as_slice());
+        let paths = os::split_paths(target_path[]);
         // FIXME 16351: add a sane default search path?
 
         for dir in paths.iter() {
diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs
index 3bf817b42b0..568bb023b68 100644
--- a/src/librustc_borrowck/borrowck/check_loans.rs
+++ b/src/librustc_borrowck/borrowck/check_loans.rs
@@ -469,7 +469,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
                         new_loan.span,
                         format!("cannot borrow `{}`{} as mutable \
                                 more than once at a time",
-                                nl, new_loan_msg).as_slice())
+                                nl, new_loan_msg)[])
                 }
 
                 (ty::UniqueImmBorrow, _) => {
@@ -477,7 +477,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
                         new_loan.span,
                         format!("closure requires unique access to `{}` \
                                 but {} is already borrowed{}",
-                                nl, ol_pronoun, old_loan_msg).as_slice());
+                                nl, ol_pronoun, old_loan_msg)[]);
                 }
 
                 (_, ty::UniqueImmBorrow) => {
@@ -485,7 +485,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
                         new_loan.span,
                         format!("cannot borrow `{}`{} as {} because \
                                 previous closure requires unique access",
-                                nl, new_loan_msg, new_loan.kind.to_user_str()).as_slice());
+                                nl, new_loan_msg, new_loan.kind.to_user_str())[]);
                 }
 
                 (_, _) => {
@@ -498,7 +498,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
                                 new_loan.kind.to_user_str(),
                                 ol_pronoun,
                                 old_loan.kind.to_user_str(),
-                                old_loan_msg).as_slice());
+                                old_loan_msg)[]);
                 }
             }
 
@@ -507,7 +507,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
                     self.bccx.span_note(
                         span,
                         format!("borrow occurs due to use of `{}` in closure",
-                                nl).as_slice());
+                                nl)[]);
                 }
                 _ => { }
             }
@@ -556,7 +556,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
 
             self.bccx.span_note(
                 old_loan.span,
-                format!("{}; {}", borrow_summary, rule_summary).as_slice());
+                format!("{}; {}", borrow_summary, rule_summary)[]);
 
             let old_loan_span = self.tcx().map.span(old_loan.kill_scope.node_id());
             self.bccx.span_end_note(old_loan_span,
@@ -626,13 +626,13 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
                 self.bccx.span_err(
                     span,
                     format!("cannot use `{}` because it was mutably borrowed",
-                            self.bccx.loan_path_to_string(copy_path).as_slice())
-                    .as_slice());
+                            self.bccx.loan_path_to_string(copy_path)[])
+                    []);
                 self.bccx.span_note(
                     loan_span,
                     format!("borrow of `{}` occurs here",
-                            self.bccx.loan_path_to_string(&*loan_path).as_slice())
-                    .as_slice());
+                            self.bccx.loan_path_to_string(&*loan_path)[])
+                    []);
             }
         }
     }
@@ -651,20 +651,20 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
                 let err_message = match move_kind {
                     move_data::Captured =>
                         format!("cannot move `{}` into closure because it is borrowed",
-                                self.bccx.loan_path_to_string(move_path).as_slice()),
+                                self.bccx.loan_path_to_string(move_path)[]),
                     move_data::Declared |
                     move_data::MoveExpr |
                     move_data::MovePat =>
                         format!("cannot move out of `{}` because it is borrowed",
-                                self.bccx.loan_path_to_string(move_path).as_slice())
+                                self.bccx.loan_path_to_string(move_path)[])
                 };
 
-                self.bccx.span_err(span, err_message.as_slice());
+                self.bccx.span_err(span, err_message[]);
                 self.bccx.span_note(
                     loan_span,
                     format!("borrow of `{}` occurs here",
-                            self.bccx.loan_path_to_string(&*loan_path).as_slice())
-                    .as_slice());
+                            self.bccx.loan_path_to_string(&*loan_path)[])
+                    []);
             }
         }
     }
@@ -814,7 +814,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
                     self.bccx.span_err(
                         assignment_span,
                         format!("cannot assign to {}",
-                                self.bccx.cmt_to_string(&*assignee_cmt)).as_slice());
+                                self.bccx.cmt_to_string(&*assignee_cmt))[]);
                     self.bccx.span_help(
                         self.tcx().map.span(upvar_id.closure_expr_id),
                         "consider changing this closure to take self by mutable reference");
@@ -823,7 +823,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
                         assignment_span,
                         format!("cannot assign to {} {}",
                                 assignee_cmt.mutbl.to_user_str(),
-                                self.bccx.cmt_to_string(&*assignee_cmt)).as_slice());
+                                self.bccx.cmt_to_string(&*assignee_cmt))[]);
                 }
             }
             _ => match opt_loan_path(&assignee_cmt) {
@@ -833,14 +833,14 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
                         format!("cannot assign to {} {} `{}`",
                                 assignee_cmt.mutbl.to_user_str(),
                                 self.bccx.cmt_to_string(&*assignee_cmt),
-                                self.bccx.loan_path_to_string(&*lp)).as_slice());
+                                self.bccx.loan_path_to_string(&*lp))[]);
                 }
                 None => {
                     self.bccx.span_err(
                         assignment_span,
                         format!("cannot assign to {} {}",
                                 assignee_cmt.mutbl.to_user_str(),
-                                self.bccx.cmt_to_string(&*assignee_cmt)).as_slice());
+                                self.bccx.cmt_to_string(&*assignee_cmt))[]);
                 }
             }
         }
@@ -960,10 +960,10 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
         self.bccx.span_err(
             span,
             format!("cannot assign to `{}` because it is borrowed",
-                    self.bccx.loan_path_to_string(loan_path)).as_slice());
+                    self.bccx.loan_path_to_string(loan_path))[]);
         self.bccx.span_note(
             loan.span,
             format!("borrow of `{}` occurs here",
-                    self.bccx.loan_path_to_string(loan_path)).as_slice());
+                    self.bccx.loan_path_to_string(loan_path))[]);
     }
 }
diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs
index 25ed5182555..dbbc52cf362 100644
--- a/src/librustc_borrowck/borrowck/fragments.rs
+++ b/src/librustc_borrowck/borrowck/fragments.rs
@@ -124,12 +124,12 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>,
         let attrs : &[ast::Attribute];
         attrs = match tcx.map.find(id) {
             Some(ast_map::NodeItem(ref item)) =>
-                item.attrs.as_slice(),
+                item.attrs[],
             Some(ast_map::NodeImplItem(&ast::MethodImplItem(ref m))) =>
-                m.attrs.as_slice(),
+                m.attrs[],
             Some(ast_map::NodeTraitItem(&ast::ProvidedMethod(ref m))) =>
-                m.attrs.as_slice(),
-            _ => [].as_slice(),
+                m.attrs[],
+            _ => [][],
         };
 
         let span_err =
@@ -145,7 +145,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>,
         for (i, mpi) in vec_rc.iter().enumerate() {
             let render = || this.path_loan_path(*mpi).user_string(tcx);
             if span_err {
-                tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).as_slice());
+                tcx.sess.span_err(sp, format!("{}: `{}`", kind, render())[]);
             }
             if print {
                 println!("id:{} {}[{}] `{}`", id, kind, i, render());
@@ -157,7 +157,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>,
         for (i, f) in vec_rc.iter().enumerate() {
             let render = || f.loan_path_user_string(this, tcx);
             if span_err {
-                tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).as_slice());
+                tcx.sess.span_err(sp, format!("{}: `{}`", kind, render())[]);
             }
             if print {
                 println!("id:{} {}[{}] `{}`", id, kind, i, render());
@@ -199,11 +199,11 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) {
     // First, filter out duplicates
     moved.sort();
     moved.dedup();
-    debug!("fragments 1 moved: {}", path_lps(moved.as_slice()));
+    debug!("fragments 1 moved: {}", path_lps(moved[]));
 
     assigned.sort();
     assigned.dedup();
-    debug!("fragments 1 assigned: {}", path_lps(assigned.as_slice()));
+    debug!("fragments 1 assigned: {}", path_lps(assigned[]));
 
     // Second, build parents from the moved and assigned.
     for m in moved.iter() {
@@ -223,14 +223,14 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) {
 
     parents.sort();
     parents.dedup();
-    debug!("fragments 2 parents: {}", path_lps(parents.as_slice()));
+    debug!("fragments 2 parents: {}", path_lps(parents[]));
 
     // Third, filter the moved and assigned fragments down to just the non-parents
-    moved.retain(|f| non_member(*f, parents.as_slice()));
-    debug!("fragments 3 moved: {}", path_lps(moved.as_slice()));
+    moved.retain(|f| non_member(*f, parents[]));
+    debug!("fragments 3 moved: {}", path_lps(moved[]));
 
-    assigned.retain(|f| non_member(*f, parents.as_slice()));
-    debug!("fragments 3 assigned: {}", path_lps(assigned.as_slice()));
+    assigned.retain(|f| non_member(*f, parents[]));
+    debug!("fragments 3 assigned: {}", path_lps(assigned[]));
 
     // Fourth, build the leftover from the moved, assigned, and parents.
     for m in moved.iter() {
@@ -248,16 +248,16 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) {
 
     unmoved.sort();
     unmoved.dedup();
-    debug!("fragments 4 unmoved: {}", frag_lps(unmoved.as_slice()));
+    debug!("fragments 4 unmoved: {}", frag_lps(unmoved[]));
 
     // Fifth, filter the leftover fragments down to its core.
     unmoved.retain(|f| match *f {
         AllButOneFrom(_) => true,
-        Just(mpi) => non_member(mpi, parents.as_slice()) &&
-            non_member(mpi, moved.as_slice()) &&
-            non_member(mpi, assigned.as_slice())
+        Just(mpi) => non_member(mpi, parents[]) &&
+            non_member(mpi, moved[]) &&
+            non_member(mpi, assigned[])
     });
-    debug!("fragments 5 unmoved: {}", frag_lps(unmoved.as_slice()));
+    debug!("fragments 5 unmoved: {}", frag_lps(unmoved[]));
 
     // Swap contents back in.
     fragments.unmoved_fragments = unmoved;
@@ -434,7 +434,7 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>,
             let msg = format!("type {} ({}) is not fragmentable",
                               parent_ty.repr(tcx), sty_and_variant_info);
             let opt_span = origin_id.and_then(|id|tcx.map.opt_span(id));
-            tcx.sess.opt_span_bug(opt_span, msg.as_slice())
+            tcx.sess.opt_span_bug(opt_span, msg[])
         }
     }
 }
diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs
index 08d12f8282b..d7f50ccc6ba 100644
--- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs
+++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs
@@ -310,7 +310,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> {
                         self.tcx().sess.span_bug(
                             cmt.span,
                             format!("invalid borrow lifetime: {}",
-                                    loan_region).as_slice());
+                                    loan_region)[]);
                     }
                 };
                 debug!("loan_scope = {}", loan_scope);
diff --git a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs
index fbe78152a60..73b345a70af 100644
--- a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs
+++ b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs
@@ -120,7 +120,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
             bccx.span_err(
                 move_from.span,
                 format!("cannot move out of {}",
-                        bccx.cmt_to_string(&*move_from)).as_slice());
+                        bccx.cmt_to_string(&*move_from))[]);
         }
 
         mc::cat_downcast(ref b, _) |
@@ -132,7 +132,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
                         move_from.span,
                         format!("cannot move out of type `{}`, \
                                  which defines the `Drop` trait",
-                                b.ty.user_string(bccx.tcx)).as_slice());
+                                b.ty.user_string(bccx.tcx))[]);
                 },
                 _ => panic!("this path should not cause illegal move")
             }
@@ -155,10 +155,10 @@ fn note_move_destination(bccx: &BorrowckCtxt,
             format!("to prevent the move, \
                      use `ref {0}` or `ref mut {0}` to capture value by \
                      reference",
-                    pat_name).as_slice());
+                    pat_name)[]);
     } else {
         bccx.span_note(move_to_span,
                        format!("and here (use `ref {0}` or `ref mut {0}`)",
-                               pat_name).as_slice());
+                               pat_name)[]);
     }
 }
diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs
index 9be87b533f2..a13001b7968 100644
--- a/src/librustc_borrowck/borrowck/mod.rs
+++ b/src/librustc_borrowck/borrowck/mod.rs
@@ -146,7 +146,7 @@ fn borrowck_fn(this: &mut BorrowckCtxt,
     check_loans::check_loans(this,
                              &loan_dfcx,
                              flowed_moves,
-                             all_loans.as_slice(),
+                             all_loans[],
                              id,
                              decl,
                              body);
@@ -527,7 +527,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
     pub fn report(&self, err: BckError<'tcx>) {
         self.span_err(
             err.span,
-            self.bckerr_to_string(&err).as_slice());
+            self.bckerr_to_string(&err)[]);
         self.note_and_explain_bckerr(err);
     }
 
@@ -549,7 +549,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                     use_span,
                     format!("{} of possibly uninitialized variable: `{}`",
                             verb,
-                            self.loan_path_to_string(lp)).as_slice());
+                            self.loan_path_to_string(lp))[]);
                 (self.loan_path_to_string(moved_lp),
                  String::new())
             }
@@ -591,7 +591,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                     format!("{} of {}moved value: `{}`",
                             verb,
                             msg,
-                            nl).as_slice());
+                            nl)[]);
                 (ol, moved_lp_msg)
             }
         };
@@ -610,7 +610,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                         self.tcx.sess.bug(format!("MoveExpr({}) maps to \
                                                    {}, not Expr",
                                                   the_move.id,
-                                                  r).as_slice())
+                                                  r)[])
                     }
                 };
                 let (suggestion, _) = move_suggestion(self.tcx, param_env, expr_ty,
@@ -621,7 +621,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                             ol,
                             moved_lp_msg,
                             expr_ty.user_string(self.tcx),
-                            suggestion).as_slice());
+                            suggestion)[]);
             }
 
             move_data::MovePat => {
@@ -632,7 +632,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                              which is moved by default",
                             ol,
                             moved_lp_msg,
-                            pat_ty.user_string(self.tcx)).as_slice());
+                            pat_ty.user_string(self.tcx))[]);
                 self.tcx.sess.span_help(span,
                     "use `ref` to override");
             }
@@ -648,7 +648,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                         self.tcx.sess.bug(format!("Captured({}) maps to \
                                                    {}, not Expr",
                                                   the_move.id,
-                                                  r).as_slice())
+                                                  r)[])
                     }
                 };
                 let (suggestion, help) = move_suggestion(self.tcx,
@@ -663,7 +663,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                             ol,
                             moved_lp_msg,
                             expr_ty.user_string(self.tcx),
-                            suggestion).as_slice());
+                            suggestion)[]);
                 self.tcx.sess.span_help(expr_span, help);
             }
         }
@@ -696,7 +696,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
         self.tcx.sess.span_err(
             span,
             format!("re-assignment of immutable variable `{}`",
-                    self.loan_path_to_string(lp)).as_slice());
+                    self.loan_path_to_string(lp))[]);
         self.tcx.sess.span_note(assign.span, "prior assignment occurs here");
     }
 
@@ -822,12 +822,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                 self.tcx.sess.span_err(
                     span,
                     format!("{} in an aliasable location",
-                             prefix).as_slice());
+                             prefix)[]);
             }
             mc::AliasableClosure(id) => {
                 self.tcx.sess.span_err(span,
                                        format!("{} in a captured outer \
-                                               variable in an `Fn` closure", prefix).as_slice());
+                                               variable in an `Fn` closure", prefix)[]);
                 span_help!(self.tcx.sess, self.tcx.map.span(id),
                            "consider changing this closure to take self by mutable reference");
             }
@@ -835,12 +835,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
             mc::AliasableStaticMut(..) => {
                 self.tcx.sess.span_err(
                     span,
-                    format!("{} in a static location", prefix).as_slice());
+                    format!("{} in a static location", prefix)[]);
             }
             mc::AliasableBorrowed => {
                 self.tcx.sess.span_err(
                     span,
-                    format!("{} in a `&` reference", prefix).as_slice());
+                    format!("{} in a `&` reference", prefix)[]);
             }
         }
 
@@ -908,12 +908,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                 note_and_explain_region(
                     self.tcx,
                     format!("{} would have to be valid for ",
-                            descr).as_slice(),
+                            descr)[],
                     loan_scope,
                     "...");
                 note_and_explain_region(
                     self.tcx,
-                    format!("...but {} is only valid for ", descr).as_slice(),
+                    format!("...but {} is only valid for ", descr)[],
                     ptr_scope,
                     "");
             }
@@ -933,7 +933,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                 out.push('(');
                 self.append_loan_path_to_string(&**lp_base, out);
                 out.push_str(DOWNCAST_PRINTED_OPERATOR);
-                out.push_str(ty::item_path_str(self.tcx, variant_def_id).as_slice());
+                out.push_str(ty::item_path_str(self.tcx, variant_def_id)[]);
                 out.push(')');
             }
 
@@ -947,7 +947,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                     }
                     mc::PositionalField(idx) => {
                         out.push('.');
-                        out.push_str(idx.to_string().as_slice());
+                        out.push_str(idx.to_string()[]);
                     }
                 }
             }
@@ -979,7 +979,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
                 out.push('(');
                 self.append_autoderefd_loan_path_to_string(&**lp_base, out);
                 out.push(':');
-                out.push_str(ty::item_path_str(self.tcx, variant_def_id).as_slice());
+                out.push_str(ty::item_path_str(self.tcx, variant_def_id)[]);
                 out.push(')');
             }
 
diff --git a/src/librustc_borrowck/graphviz.rs b/src/librustc_borrowck/graphviz.rs
index 3427be1443b..e2813c8e988 100644
--- a/src/librustc_borrowck/graphviz.rs
+++ b/src/librustc_borrowck/graphviz.rs
@@ -59,7 +59,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
             if seen_one { sets.push_str(" "); } else { seen_one = true; }
             sets.push_str(variant.short_name());
             sets.push_str(": ");
-            sets.push_str(self.dataflow_for_variant(e, n, variant).as_slice());
+            sets.push_str(self.dataflow_for_variant(e, n, variant)[]);
         }
         sets
     }
@@ -88,7 +88,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
                 set.push_str(", ");
             }
             let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp);
-            set.push_str(loan_str.as_slice());
+            set.push_str(loan_str[]);
             saw_some = true;
             true
         });
diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs
index 60b890b0370..20bb9c2f4fd 100644
--- a/src/librustc_driver/driver.rs
+++ b/src/librustc_driver/driver.rs
@@ -58,12 +58,12 @@ pub fn compile_input(sess: Session,
             let outputs = build_output_filenames(input,
                                                  outdir,
                                                  output,
-                                                 krate.attrs.as_slice(),
+                                                 krate.attrs[],
                                                  &sess);
-            let id = link::find_crate_name(Some(&sess), krate.attrs.as_slice(),
+            let id = link::find_crate_name(Some(&sess), krate.attrs[],
                                            input);
             let expanded_crate
-                = match phase_2_configure_and_expand(&sess, krate, id.as_slice(),
+                = match phase_2_configure_and_expand(&sess, krate, id[],
                                                      addl_plugins) {
                     None => return,
                     Some(k) => k
@@ -75,7 +75,7 @@ pub fn compile_input(sess: Session,
         let mut forest = ast_map::Forest::new(expanded_crate);
         let ast_map = assign_node_ids_and_map(&sess, &mut forest);
 
-        write_out_deps(&sess, input, &outputs, id.as_slice());
+        write_out_deps(&sess, input, &outputs, id[]);
 
         if stop_after_phase_2(&sess) { return; }
 
@@ -163,9 +163,9 @@ pub fn phase_2_configure_and_expand(sess: &Session,
     let time_passes = sess.time_passes();
 
     *sess.crate_types.borrow_mut() =
-        collect_crate_types(sess, krate.attrs.as_slice());
+        collect_crate_types(sess, krate.attrs[]);
     *sess.crate_metadata.borrow_mut() =
-        collect_crate_metadata(sess, krate.attrs.as_slice());
+        collect_crate_metadata(sess, krate.attrs[]);
 
     time(time_passes, "gated feature checking", (), |_| {
         let (features, unknown_features) =
@@ -257,8 +257,8 @@ pub fn phase_2_configure_and_expand(sess: &Session,
             if cfg!(windows) {
                 _old_path = os::getenv("PATH").unwrap_or(_old_path);
                 let mut new_path = sess.host_filesearch().get_dylib_search_paths();
-                new_path.extend(os::split_paths(_old_path.as_slice()).into_iter());
-                os::setenv("PATH", os::join_paths(new_path.as_slice()).unwrap());
+                new_path.extend(os::split_paths(_old_path[]).into_iter());
+                os::setenv("PATH", os::join_paths(new_path[]).unwrap());
             }
             let cfg = syntax::ext::expand::ExpansionConfig {
                 crate_name: crate_name.to_string(),
@@ -503,7 +503,7 @@ pub fn phase_5_run_llvm_passes(sess: &Session,
         time(sess.time_passes(), "LLVM passes", (), |_|
             write::run_passes(sess,
                               trans,
-                              sess.opts.output_types.as_slice(),
+                              sess.opts.output_types[],
                               outputs));
     }
 
@@ -517,14 +517,14 @@ pub fn phase_6_link_output(sess: &Session,
                            outputs: &OutputFilenames) {
     let old_path = os::getenv("PATH").unwrap_or_else(||String::new());
     let mut new_path = sess.host_filesearch().get_tools_search_paths();
-    new_path.extend(os::split_paths(old_path.as_slice()).into_iter());
-    os::setenv("PATH", os::join_paths(new_path.as_slice()).unwrap());
+    new_path.extend(os::split_paths(old_path[]).into_iter());
+    os::setenv("PATH", os::join_paths(new_path[]).unwrap());
 
     time(sess.time_passes(), "linking", (), |_|
          link::link_binary(sess,
                            trans,
                            outputs,
-                           trans.link.crate_name.as_slice()));
+                           trans.link.crate_name[]));
 
     os::setenv("PATH", old_path);
 }
@@ -613,7 +613,7 @@ fn write_out_deps(sess: &Session,
         // write Makefile-compatible dependency rules
         let files: Vec<String> = sess.codemap().files.borrow()
                                    .iter().filter(|fmap| fmap.is_real_file())
-                                   .map(|fmap| escape_dep_filename(fmap.name.as_slice()))
+                                   .map(|fmap| escape_dep_filename(fmap.name[]))
                                    .collect();
         let mut file = try!(io::File::create(&deps_filename));
         for path in out_filenames.iter() {
@@ -627,7 +627,7 @@ fn write_out_deps(sess: &Session,
         Ok(()) => {}
         Err(e) => {
             sess.fatal(format!("error writing dependencies to `{}`: {}",
-                               deps_filename.display(), e).as_slice());
+                               deps_filename.display(), e)[]);
         }
     }
 }
@@ -698,7 +698,7 @@ pub fn collect_crate_types(session: &Session,
         if !res {
             session.warn(format!("dropping unsupported crate type `{}` \
                                    for target `{}`",
-                                 *crate_type, session.opts.target_triple).as_slice());
+                                 *crate_type, session.opts.target_triple)[]);
         }
 
         res
diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs
index 6944c733456..1fb90d7860e 100644
--- a/src/librustc_driver/lib.rs
+++ b/src/librustc_driver/lib.rs
@@ -55,6 +55,7 @@ use rustc::DIAGNOSTICS;
 
 use std::any::AnyRefExt;
 use std::io;
+use std::iter::repeat;
 use std::os;
 use std::thread;
 
@@ -88,12 +89,12 @@ fn run_compiler(args: &[String]) {
     let descriptions = diagnostics::registry::Registry::new(&DIAGNOSTICS);
     match matches.opt_str("explain") {
         Some(ref code) => {
-            match descriptions.find_description(code.as_slice()) {
+            match descriptions.find_description(code[]) {
                 Some(ref description) => {
                     println!("{}", description);
                 }
                 None => {
-                    early_error(format!("no extended information for {}", code).as_slice());
+                    early_error(format!("no extended information for {}", code)[]);
                 }
             }
             return;
@@ -119,7 +120,7 @@ fn run_compiler(args: &[String]) {
             early_error("no input filename given");
         }
         1u => {
-            let ifile = matches.free[0].as_slice();
+            let ifile = matches.free[0][];
             if ifile == "-" {
                 let contents = io::stdin().read_to_end().unwrap();
                 let src = String::from_utf8(contents).unwrap();
@@ -138,7 +139,7 @@ fn run_compiler(args: &[String]) {
     }
 
     let pretty = matches.opt_default("pretty", "normal").map(|a| {
-        pretty::parse_pretty(&sess, a.as_slice())
+        pretty::parse_pretty(&sess, a[])
     });
     match pretty.into_iter().next() {
         Some((ppm, opt_uii)) => {
@@ -261,7 +262,8 @@ Available lint options:
         .map(|&s| s.name.width(true))
         .max().unwrap_or(0);
     let padded = |x: &str| {
-        let mut s = " ".repeat(max_name_len - x.char_len());
+        let mut s = repeat(" ").take(max_name_len - x.chars().count())
+                               .collect::<String>();
         s.push_str(x);
         s
     };
@@ -274,7 +276,7 @@ Available lint options:
         for lint in lints.into_iter() {
             let name = lint.name_lower().replace("_", "-");
             println!("    {}  {:7.7}  {}",
-                     padded(name.as_slice()), lint.default_level.as_str(), lint.desc);
+                     padded(name[]), lint.default_level.as_str(), lint.desc);
         }
         println!("\n");
     };
@@ -287,7 +289,8 @@ Available lint options:
         .map(|&(s, _)| s.width(true))
         .max().unwrap_or(0);
     let padded = |x: &str| {
-        let mut s = " ".repeat(max_name_len - x.char_len());
+        let mut s = repeat(" ").take(max_name_len - x.chars().count())
+                               .collect::<String>();
         s.push_str(x);
         s
     };
@@ -303,7 +306,7 @@ Available lint options:
             let desc = to.into_iter().map(|x| x.as_str().replace("_", "-"))
                          .collect::<Vec<String>>().connect(", ");
             println!("    {}  {}",
-                     padded(name.as_slice()), desc);
+                     padded(name[]), desc);
         }
         println!("\n");
     };
@@ -367,10 +370,10 @@ pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
     }
 
     let matches =
-        match getopts::getopts(args.as_slice(), config::optgroups().as_slice()) {
+        match getopts::getopts(args[], config::optgroups()[]) {
             Ok(m) => m,
             Err(f) => {
-                early_error(f.to_string().as_slice());
+                early_error(f.to_string()[]);
             }
         };
 
@@ -518,7 +521,7 @@ pub fn monitor<F:FnOnce()+Send>(f: F) {
                     "run with `RUST_BACKTRACE=1` for a backtrace".to_string(),
                 ];
                 for note in xs.iter() {
-                    emitter.emit(None, note.as_slice(), None, diagnostic::Note)
+                    emitter.emit(None, note[], None, diagnostic::Note)
                 }
 
                 match r.read_to_string() {
@@ -526,8 +529,7 @@ pub fn monitor<F:FnOnce()+Send>(f: F) {
                     Err(e) => {
                         emitter.emit(None,
                                      format!("failed to read internal \
-                                              stderr: {}",
-                                             e).as_slice(),
+                                              stderr: {}", e)[],
                                      None,
                                      diagnostic::Error)
                     }
diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs
index 2eb9d2c67a7..4b10ca92e70 100644
--- a/src/librustc_driver/pretty.rs
+++ b/src/librustc_driver/pretty.rs
@@ -71,10 +71,10 @@ pub fn parse_pretty(sess: &Session, name: &str) -> (PpMode, Option<UserIdentifie
             sess.fatal(format!(
                 "argument to `pretty` must be one of `normal`, \
                  `expanded`, `flowgraph=<nodeid>`, `typed`, `identified`, \
-                 or `expanded,identified`; got {}", name).as_slice());
+                 or `expanded,identified`; got {}", name)[]);
         }
     };
-    let opt_second = opt_second.and_then::<UserIdentifiedItem, _>(from_str);
+    let opt_second = opt_second.and_then(|s| s.parse::<UserIdentifiedItem>());
     (first, opt_second)
 }
 
@@ -276,7 +276,7 @@ impl<'tcx> pprust::PpAnn for TypedAnnotation<'tcx> {
                 try!(pp::word(&mut s.s,
                               ppaux::ty_to_string(
                                   tcx,
-                                  ty::expr_ty(tcx, expr)).as_slice()));
+                                  ty::expr_ty(tcx, expr))[]));
                 s.pclose()
             }
             _ => Ok(())
@@ -311,7 +311,7 @@ pub enum UserIdentifiedItem {
 
 impl FromStr for UserIdentifiedItem {
     fn from_str(s: &str) -> Option<UserIdentifiedItem> {
-        from_str(s).map(ItemViaNode).or_else(|| {
+        s.parse().map(ItemViaNode).or_else(|| {
             let v : Vec<_> = s.split_str("::")
                 .map(|x|x.to_string())
                 .collect();
@@ -322,7 +322,7 @@ impl FromStr for UserIdentifiedItem {
 
 enum NodesMatchingUII<'a, 'ast: 'a> {
     NodesMatchingDirect(option::IntoIter<ast::NodeId>),
-    NodesMatchingSuffix(ast_map::NodesMatchingSuffix<'a, 'ast, String>),
+    NodesMatchingSuffix(ast_map::NodesMatchingSuffix<'a, 'ast>),
 }
 
 impl<'a, 'ast> Iterator<ast::NodeId> for NodesMatchingUII<'a, 'ast> {
@@ -348,7 +348,7 @@ impl UserIdentifiedItem {
             ItemViaNode(node_id) =>
                 NodesMatchingDirect(Some(node_id).into_iter()),
             ItemViaPath(ref parts) =>
-                NodesMatchingSuffix(map.nodes_matching_suffix(parts.as_slice())),
+                NodesMatchingSuffix(map.nodes_matching_suffix(parts[])),
         }
     }
 
@@ -360,7 +360,7 @@ impl UserIdentifiedItem {
                         user_option,
                         self.reconstructed_input(),
                         is_wrong_because);
-            sess.fatal(message.as_slice())
+            sess.fatal(message[])
         };
 
         let mut saw_node = ast::DUMMY_NODE_ID;
@@ -414,12 +414,12 @@ pub fn pretty_print_input(sess: Session,
                           opt_uii: Option<UserIdentifiedItem>,
                           ofile: Option<Path>) {
     let krate = driver::phase_1_parse_input(&sess, cfg, input);
-    let id = link::find_crate_name(Some(&sess), krate.attrs.as_slice(), input);
+    let id = link::find_crate_name(Some(&sess), krate.attrs[], input);
 
     let is_expanded = needs_expansion(&ppm);
     let compute_ast_map = needs_ast_map(&ppm, &opt_uii);
     let krate = if compute_ast_map {
-        match driver::phase_2_configure_and_expand(&sess, krate, id.as_slice(), None) {
+        match driver::phase_2_configure_and_expand(&sess, krate, id[], None) {
             None => return,
             Some(k) => k
         }
@@ -438,7 +438,7 @@ pub fn pretty_print_input(sess: Session,
     };
 
     let src_name = driver::source_name(input);
-    let src = sess.codemap().get_filemap(src_name.as_slice())
+    let src = sess.codemap().get_filemap(src_name[])
                             .src.as_bytes().to_vec();
     let mut rdr = MemReader::new(src);
 
@@ -499,7 +499,7 @@ pub fn pretty_print_input(sess: Session,
             debug!("pretty printing flow graph for {}", opt_uii);
             let uii = opt_uii.unwrap_or_else(|| {
                 sess.fatal(format!("`pretty flowgraph=..` needs NodeId (int) or
-                                     unique path suffix (b::c::d)").as_slice())
+                                     unique path suffix (b::c::d)")[])
 
             });
             let ast_map = ast_map.expect("--pretty flowgraph missing ast_map");
@@ -507,7 +507,7 @@ pub fn pretty_print_input(sess: Session,
 
             let node = ast_map.find(nodeid).unwrap_or_else(|| {
                 sess.fatal(format!("--pretty flowgraph couldn't find id: {}",
-                                   nodeid).as_slice())
+                                   nodeid)[])
             });
 
             let code = blocks::Code::from_node(node);
@@ -526,8 +526,8 @@ pub fn pretty_print_input(sess: Session,
                     // point to what was found, if there's an
                     // accessible span.
                     match ast_map.opt_span(nodeid) {
-                        Some(sp) => sess.span_fatal(sp, message.as_slice()),
-                        None => sess.fatal(message.as_slice())
+                        Some(sp) => sess.span_fatal(sp, message[]),
+                        None => sess.fatal(message[])
                     }
                 }
             }
@@ -587,7 +587,7 @@ fn print_flowgraph<W:io::Writer>(variants: Vec<borrowck_dot::Variant>,
             let m = "graphviz::render failed";
             io::IoError {
                 detail: Some(match orig_detail {
-                    None => m.into_string(),
+                    None => m.to_string(),
                     Some(d) => format!("{}: {}", m, d)
                 }),
                 ..ioerr
diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs
index d4a0b49436d..bf9e9294307 100644
--- a/src/librustc_resolve/lib.rs
+++ b/src/librustc_resolve/lib.rs
@@ -97,8 +97,8 @@ use std::mem::replace;
 use std::rc::{Rc, Weak};
 use std::uint;
 
-// Definition mapping
-pub type DefMap = RefCell<NodeMap<Def>>;
+mod check_unused;
+mod record_exports;
 
 #[deriving(Copy)]
 struct BindingInfo {
@@ -1119,14 +1119,14 @@ impl<'a> Resolver<'a> {
                     self.resolve_error(sp,
                         format!("duplicate definition of {} `{}`",
                              namespace_error_to_string(duplicate_type),
-                             token::get_name(name)).as_slice());
+                             token::get_name(name))[]);
                     {
                         let r = child.span_for_namespace(ns);
                         for sp in r.iter() {
                             self.session.span_note(*sp,
                                  format!("first definition of {} `{}` here",
                                       namespace_error_to_string(duplicate_type),
-                                      token::get_name(name)).as_slice());
+                                      token::get_name(name))[]);
                         }
                     }
                 }
@@ -2147,7 +2147,7 @@ impl<'a> Resolver<'a> {
                 debug!("(building import directive) building import \
                         directive: {}::{}",
                        self.names_to_string(module_.imports.borrow().last().unwrap()
-                                                 .module_path.as_slice()),
+                                                 .module_path[]),
                        token::get_name(target));
 
                 let mut import_resolutions = module_.import_resolutions
@@ -2265,10 +2265,10 @@ impl<'a> Resolver<'a> {
                     let msg = format!("unresolved import `{}`{}",
                                       self.import_path_to_string(
                                           import_directive.module_path
-                                                          .as_slice(),
+                                                          [],
                                           import_directive.subclass),
                                       help);
-                    self.resolve_error(span, msg.as_slice());
+                    self.resolve_error(span, msg[]);
                 }
                 Indeterminate => break, // Bail out. We'll come around next time.
                 Success(()) => () // Good. Continue.
@@ -2298,7 +2298,7 @@ impl<'a> Resolver<'a> {
                                         .iter()
                                         .map(|seg| seg.identifier.name)
                                         .collect();
-        self.names_to_string(names.as_slice())
+        self.names_to_string(names[])
     }
 
     fn import_directive_subclass_to_string(&mut self,
@@ -2340,7 +2340,7 @@ impl<'a> Resolver<'a> {
 
         debug!("(resolving import for module) resolving import `{}::...` in \
                 `{}`",
-               self.names_to_string(module_path.as_slice()),
+               self.names_to_string(module_path[]),
                self.module_to_string(&*module_));
 
         // First, resolve the module path for the directive, if necessary.
@@ -2349,7 +2349,7 @@ impl<'a> Resolver<'a> {
             Some((self.graph_root.get_module(), LastMod(AllPublic)))
         } else {
             match self.resolve_module_path(module_.clone(),
-                                           module_path.as_slice(),
+                                           module_path[],
                                            DontUseLexicalScope,
                                            import_directive.span,
                                            ImportSearch) {
@@ -2941,7 +2941,7 @@ impl<'a> Resolver<'a> {
                                     ValueNS => "value",
                                   },
                                   token::get_name(name).get());
-                self.session.span_err(import_span, msg.as_slice());
+                self.session.span_err(import_span, msg[]);
             }
             Some(_) | None => {}
         }
@@ -2956,7 +2956,7 @@ impl<'a> Resolver<'a> {
         if !name_bindings.defined_in_namespace_with(namespace, IMPORTABLE) {
             let msg = format!("`{}` is not directly importable",
                               token::get_name(name));
-            self.session.span_err(import_span, msg.as_slice());
+            self.session.span_err(import_span, msg[]);
         }
     }
 
@@ -2981,7 +2981,7 @@ impl<'a> Resolver<'a> {
                                        crate in this module \
                                        (maybe you meant `use {0}::*`?)",
                                       token::get_name(name).get());
-                    self.session.span_err(import_span, msg.as_slice());
+                    self.session.span_err(import_span, msg[]);
                 }
                 Some(_) | None => {}
             }
@@ -3003,7 +3003,7 @@ impl<'a> Resolver<'a> {
                     let msg = format!("import `{}` conflicts with value \
                                        in this module",
                                       token::get_name(name).get());
-                    self.session.span_err(import_span, msg.as_slice());
+                    self.session.span_err(import_span, msg[]);
                     if let Some(span) = value.value_span {
                         self.session.span_note(span,
                                                "conflicting value here");
@@ -3021,7 +3021,7 @@ impl<'a> Resolver<'a> {
                             let msg = format!("import `{}` conflicts with type in \
                                                this module",
                                               token::get_name(name).get());
-                            self.session.span_err(import_span, msg.as_slice());
+                            self.session.span_err(import_span, msg[]);
                             if let Some(span) = ty.type_span {
                                 self.session.span_note(span,
                                                        "note conflicting type here")
@@ -3034,7 +3034,7 @@ impl<'a> Resolver<'a> {
                                         let msg = format!("inherent implementations \
                                                            are only allowed on types \
                                                            defined in the current module");
-                                        self.session.span_err(span, msg.as_slice());
+                                        self.session.span_err(span, msg[]);
                                         self.session.span_note(import_span,
                                                                "import from other module here")
                                     }
@@ -3043,7 +3043,7 @@ impl<'a> Resolver<'a> {
                                     let msg = format!("import `{}` conflicts with existing \
                                                        submodule",
                                                       token::get_name(name).get());
-                                    self.session.span_err(import_span, msg.as_slice());
+                                    self.session.span_err(import_span, msg[]);
                                     if let Some(span) = ty.type_span {
                                         self.session.span_note(span,
                                                                "note conflicting module here")
@@ -3073,7 +3073,7 @@ impl<'a> Resolver<'a> {
                 .span_err(span,
                           format!("an external crate named `{}` has already \
                                    been imported into this module",
-                                  token::get_name(name).get()).as_slice());
+                                  token::get_name(name).get())[]);
         }
     }
 
@@ -3092,7 +3092,7 @@ impl<'a> Resolver<'a> {
                           format!("the name `{}` conflicts with an external \
                                    crate that has been imported into this \
                                    module",
-                                  token::get_name(name).get()).as_slice());
+                                  token::get_name(name).get())[]);
         }
     }
 
@@ -3140,7 +3140,7 @@ impl<'a> Resolver<'a> {
                     let segment_name = token::get_name(name);
                     let module_name = self.module_to_string(&*search_module);
                     let mut span = span;
-                    let msg = if "???" == module_name.as_slice() {
+                    let msg = if "???" == module_name[] {
                         span.hi = span.lo + Pos::from_uint(segment_name.get().len());
 
                         match search_parent_externals(name,
@@ -3253,14 +3253,14 @@ impl<'a> Resolver<'a> {
         match module_prefix_result {
             Failed(None) => {
                 let mpath = self.names_to_string(module_path);
-                let mpath = mpath.as_slice();
+                let mpath = mpath[];
                 match mpath.rfind(':') {
                     Some(idx) => {
                         let msg = format!("Could not find `{}` in `{}`",
                                             // idx +- 1 to account for the
                                             // colons on either side
-                                            mpath.slice_from(idx + 1),
-                                            mpath.slice_to(idx - 1));
+                                            mpath[idx + 1..],
+                                            mpath[0..idx - 1]);
                         return Failed(Some((span, msg)));
                     },
                     None => {
@@ -3431,7 +3431,7 @@ impl<'a> Resolver<'a> {
                                               true) {
                 Failed(Some((span, msg))) =>
                     self.resolve_error(span, format!("failed to resolve. {}",
-                                                     msg)),
+                                                     msg)[]),
                 Failed(None) => (), // Continue up the search chain.
                 Indeterminate => {
                     // We couldn't see through the higher scope because of an
@@ -3686,8 +3686,8 @@ impl<'a> Resolver<'a> {
                                    "unresolved import");
             } else {
                 let err = format!("unresolved import (maybe you meant `{}::*`?)",
-                                  sn.slice(0, sn.len()));
-                self.resolve_error((*imports)[index].span, err.as_slice());
+                                  sn);
+                self.resolve_error((*imports)[index].span, err[]);
             }
         }
 
@@ -3779,7 +3779,7 @@ impl<'a> Resolver<'a> {
         match def_like {
             DlDef(d @ DefUpvar(..)) => {
                 self.session.span_bug(span,
-                    format!("unexpected {} in bindings", d).as_slice())
+                    format!("unexpected {} in bindings", d)[])
             }
             DlDef(d @ DefLocal(_)) => {
                 let node_id = d.def_id().node;
@@ -3995,7 +3995,7 @@ impl<'a> Resolver<'a> {
                                             generics,
                                             implemented_traits,
                                             &**self_type,
-                                            impl_items.as_slice());
+                                            impl_items[]);
             }
 
             ItemTrait(_, ref generics, ref unbound, ref bounds, ref trait_items) => {
@@ -4080,7 +4080,7 @@ impl<'a> Resolver<'a> {
             ItemStruct(ref struct_def, ref generics) => {
                 self.resolve_struct(item.id,
                                     generics,
-                                    struct_def.fields.as_slice());
+                                    struct_def.fields[]);
             }
 
             ItemMod(ref module_) => {
@@ -4153,7 +4153,7 @@ impl<'a> Resolver<'a> {
                                                     parameter in this type \
                                                     parameter list",
                                                    token::get_name(
-                                                       name)).as_slice())
+                                                       name))[])
                     }
                     seen_bindings.insert(name);
 
@@ -4330,7 +4330,7 @@ impl<'a> Resolver<'a> {
                 };
 
                 let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str);
-                self.resolve_error(trait_reference.path.span, msg.as_slice());
+                self.resolve_error(trait_reference.path.span, msg[]);
             }
             Some(def) => {
                 match def {
@@ -4342,14 +4342,14 @@ impl<'a> Resolver<'a> {
                         self.resolve_error(trait_reference.path.span,
                                            format!("`{}` is not a trait",
                                                    self.path_names_to_string(
-                                                       &trait_reference.path)));
+                                                       &trait_reference.path))[]);
 
                         // If it's a typedef, give a note
                         if let DefTy(..) = def {
                             self.session.span_note(
                                 trait_reference.path.span,
                                 format!("`type` aliases cannot be used for traits")
-                                    .as_slice());
+                                    []);
                         }
                     }
                 }
@@ -4546,7 +4546,7 @@ impl<'a> Resolver<'a> {
                 self.resolve_error(span,
                                     format!("method `{}` is not a member of trait `{}`",
                                             token::get_name(name),
-                                            path_str).as_slice());
+                                            path_str)[]);
             }
         }
     }
@@ -4613,7 +4613,7 @@ impl<'a> Resolver<'a> {
                         format!("variable `{}` from pattern #1 is \
                                   not bound in pattern #{}",
                                 token::get_name(key),
-                                i + 1).as_slice());
+                                i + 1)[]);
                   }
                   Some(binding_i) => {
                     if binding_0.binding_mode != binding_i.binding_mode {
@@ -4622,7 +4622,7 @@ impl<'a> Resolver<'a> {
                             format!("variable `{}` is bound with different \
                                       mode in pattern #{} than in pattern #1",
                                     token::get_name(key),
-                                    i + 1).as_slice());
+                                    i + 1)[]);
                     }
                   }
                 }
@@ -4635,7 +4635,7 @@ impl<'a> Resolver<'a> {
                         format!("variable `{}` from pattern {}{} is \
                                   not bound in pattern {}1",
                                 token::get_name(key),
-                                "#", i + 1, "#").as_slice());
+                                "#", i + 1, "#")[]);
                 }
             }
         }
@@ -4752,7 +4752,7 @@ impl<'a> Resolver<'a> {
                     None => {
                         let msg = format!("use of undeclared type name `{}`",
                                           self.path_names_to_string(path));
-                        self.resolve_error(ty.span, msg.as_slice());
+                        self.resolve_error(ty.span, msg[]);
                     }
                 }
             }
@@ -4832,7 +4832,7 @@ impl<'a> Resolver<'a> {
                                 format!("declaration of `{}` shadows an enum \
                                          variant or unit-like struct in \
                                          scope",
-                                        token::get_name(renamed)).as_slice());
+                                        token::get_name(renamed))[]);
                         }
                         FoundConst(ref def, lp) if mode == RefutableMode => {
                             debug!("(resolving pattern) resolving `{}` to \
@@ -4884,7 +4884,7 @@ impl<'a> Resolver<'a> {
                                                             list",
                                                            token::get_ident(
                                                                ident))
-                                                   .as_slice())
+                                                   [])
                             } else if bindings_list.get(&renamed) ==
                                     Some(&pat_id) {
                                 // Then this is a duplicate variable in the
@@ -4893,7 +4893,7 @@ impl<'a> Resolver<'a> {
                                     format!("identifier `{}` is bound \
                                              more than once in the same \
                                              pattern",
-                                            token::get_ident(ident)).as_slice());
+                                            token::get_ident(ident))[]);
                             }
                             // Else, not bound in the same pattern: do
                             // nothing.
@@ -4922,7 +4922,7 @@ impl<'a> Resolver<'a> {
                                         path.segments
                                             .last()
                                             .unwrap()
-                                            .identifier)).as_slice());
+                                            .identifier))[]);
                         }
                         None => {
                             self.resolve_error(path.span,
@@ -4931,7 +4931,7 @@ impl<'a> Resolver<'a> {
                                         path.segments
                                             .last()
                                             .unwrap()
-                                            .identifier)).as_slice());
+                                            .identifier))[]);
                         }
                     }
 
@@ -4962,7 +4962,7 @@ impl<'a> Resolver<'a> {
                                     def: {}", result);
                             let msg = format!("`{}` does not name a structure",
                                               self.path_names_to_string(path));
-                            self.resolve_error(path.span, msg.as_slice());
+                            self.resolve_error(path.span, msg[]);
                         }
                     }
                 }
@@ -5024,7 +5024,7 @@ impl<'a> Resolver<'a> {
                 match err {
                     Some((span, msg)) => {
                         self.resolve_error(span, format!("failed to resolve: {}",
-                                                         msg));
+                                                         msg)[]);
                     }
                     None => ()
                 }
@@ -5220,7 +5220,7 @@ impl<'a> Resolver<'a> {
         let last_private;
         let module = self.current_module.clone();
         match self.resolve_module_path(module,
-                                       module_path.as_slice(),
+                                       module_path[],
                                        UseLexicalScope,
                                        path.span,
                                        PathSearch) {
@@ -5235,7 +5235,7 @@ impl<'a> Resolver<'a> {
                 };
 
                 self.resolve_error(span, format!("failed to resolve. {}",
-                                                 msg.as_slice()));
+                                                 msg)[]);
                 return None;
             }
             Indeterminate => panic!("indeterminate unexpected"),
@@ -5278,7 +5278,7 @@ impl<'a> Resolver<'a> {
         let containing_module;
         let last_private;
         match self.resolve_module_path_from_root(root_module,
-                                                 module_path.as_slice(),
+                                                 module_path[],
                                                  0,
                                                  path.span,
                                                  PathSearch,
@@ -5288,13 +5288,13 @@ impl<'a> Resolver<'a> {
                     Some((span, msg)) => (span, msg),
                     None => {
                         let msg = format!("Use of undeclared module `::{}`",
-                                          self.names_to_string(module_path.as_slice()));
+                                          self.names_to_string(module_path[]));
                         (path.span, msg)
                     }
                 };
 
                 self.resolve_error(span, format!("failed to resolve. {}",
-                                                 msg.as_slice()));
+                                                 msg)[]);
                 return None;
             }
 
@@ -5335,7 +5335,7 @@ impl<'a> Resolver<'a> {
             }
             TypeNS => {
                 let name = ident.name;
-                self.search_ribs(self.type_ribs.as_slice(), name, span)
+                self.search_ribs(self.type_ribs[], name, span)
             }
         };
 
@@ -5389,7 +5389,8 @@ impl<'a> Resolver<'a> {
             Failed(err) => {
                 match err {
                     Some((span, msg)) =>
-                        self.resolve_error(span, format!("failed to resolve. {}", msg)),
+                        self.resolve_error(span, format!("failed to resolve. {}",
+                                                         msg)[]),
                     None => ()
                 }
 
@@ -5409,9 +5410,9 @@ impl<'a> Resolver<'a> {
         rs
     }
 
-    fn resolve_error<T: Str>(&self, span: Span, s: T) {
+    fn resolve_error(&self, span: Span, s: &str) {
         if self.emit_errors {
-            self.session.span_err(span, s.as_slice());
+            self.session.span_err(span, s);
         }
     }
 
@@ -5446,7 +5447,7 @@ impl<'a> Resolver<'a> {
                 }
             } else {
                 match this.resolve_module_path(root,
-                                                name_path.as_slice(),
+                                                name_path[],
                                                 UseLexicalScope,
                                                 span,
                                                 PathSearch) {
@@ -5484,7 +5485,7 @@ impl<'a> Resolver<'a> {
         let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::<Vec<_>>();
 
         // Look for a method in the current self type's impl module.
-        match get_module(self, path.span, name_path.as_slice()) {
+        match get_module(self, path.span, name_path[]) {
             Some(module) => match module.children.borrow().get(&name) {
                 Some(binding) => {
                     let p_str = self.path_names_to_string(&path);
@@ -5695,7 +5696,7 @@ impl<'a> Resolver<'a> {
                                 def: {}", result);
                         let msg = format!("`{}` does not name a structure",
                                           self.path_names_to_string(path));
-                        self.resolve_error(path.span, msg.as_slice());
+                        self.resolve_error(path.span, msg[]);
                     }
                 }
 
@@ -5751,13 +5752,13 @@ impl<'a> Resolver<'a> {
 
             ExprBreak(Some(label)) | ExprAgain(Some(label)) => {
                 let renamed = mtwt::resolve(label);
-                match self.search_ribs(self.label_ribs.as_slice(),
+                match self.search_ribs(self.label_ribs[],
                                        renamed, expr.span) {
                     None => {
                         self.resolve_error(
                             expr.span,
                             format!("use of undeclared label `{}`",
-                                    token::get_ident(label)).as_slice())
+                                    token::get_ident(label))[])
                     }
                     Some(DlDef(def @ DefLabel(_))) => {
                         // Since this def is a label, it is never read.
@@ -5893,7 +5894,7 @@ impl<'a> Resolver<'a> {
                                   then {}",
                                  node_id,
                                  *entry.get(),
-                                 def).as_slice());
+                                 def)[]);
             },
             Vacant(entry) => { entry.set(def); },
         }
@@ -5909,7 +5910,7 @@ impl<'a> Resolver<'a> {
                 self.resolve_error(pat.span,
                                    format!("cannot use `ref` binding mode \
                                             with {}",
-                                           descr).as_slice());
+                                           descr)[]);
             }
         }
     }
@@ -5945,8 +5946,7 @@ impl<'a> Resolver<'a> {
             return "???".to_string();
         }
         self.names_to_string(names.into_iter().rev()
-                                  .collect::<Vec<ast::Name>>()
-                                  .as_slice())
+                                  .collect::<Vec<ast::Name>>()[])
     }
 
     #[allow(dead_code)]   // useful for debugging
diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs
index 5617110bfec..ec61d3a6953 100644
--- a/src/librustc_trans/back/link.rs
+++ b/src/librustc_trans/back/link.rs
@@ -126,7 +126,7 @@ pub fn find_crate_name(sess: Option<&Session>,
                        attrs: &[ast::Attribute],
                        input: &Input) -> String {
     let validate = |s: String, span: Option<Span>| {
-        creader::validate_crate_name(sess, s.as_slice(), span);
+        creader::validate_crate_name(sess, s[], span);
         s
     };
 
@@ -144,7 +144,7 @@ pub fn find_crate_name(sess: Option<&Session>,
                     let msg = format!("--crate-name and #[crate_name] are \
                                        required to match, but `{}` != `{}`",
                                       s, name);
-                    sess.span_err(attr.span, msg.as_slice());
+                    sess.span_err(attr.span, msg[]);
                 }
             }
             return validate(s.clone(), None);
@@ -190,17 +190,17 @@ fn symbol_hash<'tcx>(tcx: &ty::ctxt<'tcx>,
     // to be independent of one another in the crate.
 
     symbol_hasher.reset();
-    symbol_hasher.input_str(link_meta.crate_name.as_slice());
+    symbol_hasher.input_str(link_meta.crate_name[]);
     symbol_hasher.input_str("-");
     symbol_hasher.input_str(link_meta.crate_hash.as_str());
     for meta in tcx.sess.crate_metadata.borrow().iter() {
-        symbol_hasher.input_str(meta.as_slice());
+        symbol_hasher.input_str(meta[]);
     }
     symbol_hasher.input_str("-");
-    symbol_hasher.input_str(encoder::encoded_ty(tcx, t).as_slice());
+    symbol_hasher.input_str(encoder::encoded_ty(tcx, t)[]);
     // Prefix with 'h' so that it never blends into adjacent digits
     let mut hash = String::from_str("h");
-    hash.push_str(truncated_hash_result(symbol_hasher).as_slice());
+    hash.push_str(truncated_hash_result(symbol_hasher)[]);
     hash
 }
 
@@ -249,7 +249,7 @@ pub fn sanitize(s: &str) -> String {
                 let mut tstr = String::new();
                 for c in c.escape_unicode() { tstr.push(c) }
                 result.push('$');
-                result.push_str(tstr.slice_from(1));
+                result.push_str(tstr[1..]);
             }
         }
     }
@@ -258,7 +258,7 @@ pub fn sanitize(s: &str) -> String {
     if result.len() > 0u &&
         result.as_bytes()[0] != '_' as u8 &&
         ! (result.as_bytes()[0] as char).is_xid_start() {
-        return format!("_{}", result.as_slice());
+        return format!("_{}", result[]);
     }
 
     return result;
@@ -284,12 +284,12 @@ pub fn mangle<PI: Iterator<PathElem>>(mut path: PI,
 
     fn push(n: &mut String, s: &str) {
         let sani = sanitize(s);
-        n.push_str(format!("{}{}", sani.len(), sani).as_slice());
+        n.push_str(format!("{}{}", sani.len(), sani)[]);
     }
 
     // First, connect each component with <len, name> pairs.
     for e in path {
-        push(&mut n, token::get_name(e.name()).get().as_slice())
+        push(&mut n, token::get_name(e.name()).get()[])
     }
 
     match hash {
@@ -327,17 +327,17 @@ pub fn mangle_exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, path: PathEl
     hash.push(EXTRA_CHARS.as_bytes()[extra2] as char);
     hash.push(EXTRA_CHARS.as_bytes()[extra3] as char);
 
-    exported_name(path, hash.as_slice())
+    exported_name(path, hash[])
 }
 
 pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                                                       t: Ty<'tcx>,
                                                       name: &str) -> String {
     let s = ppaux::ty_to_string(ccx.tcx(), t);
-    let path = [PathName(token::intern(s.as_slice())),
+    let path = [PathName(token::intern(s[])),
                 gensym_name(name)];
     let hash = get_symbol_hash(ccx, t);
-    mangle(ast_map::Values(path.iter()), Some(hash.as_slice()))
+    mangle(ast_map::Values(path.iter()), Some(hash[]))
 }
 
 pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String {
@@ -357,7 +357,7 @@ pub fn remove(sess: &Session, path: &Path) {
         Err(e) => {
             sess.err(format!("failed to remove {}: {}",
                              path.display(),
-                             e).as_slice());
+                             e)[]);
         }
     }
 }
@@ -372,7 +372,7 @@ pub fn link_binary(sess: &Session,
     for &crate_type in sess.crate_types.borrow().iter() {
         if invalid_output_for_target(sess, crate_type) {
             sess.bug(format!("invalid output type `{}` for target os `{}`",
-                             crate_type, sess.opts.target_triple).as_slice());
+                             crate_type, sess.opts.target_triple)[]);
         }
         let out_file = link_binary_output(sess, trans, crate_type, outputs,
                                           crate_name);
@@ -437,8 +437,8 @@ pub fn filename_for_input(sess: &Session,
             out_filename.with_filename(format!("lib{}.rlib", libname))
         }
         config::CrateTypeDylib => {
-            let (prefix, suffix) = (sess.target.target.options.dll_prefix.as_slice(),
-                                    sess.target.target.options.dll_suffix.as_slice());
+            let (prefix, suffix) = (sess.target.target.options.dll_prefix[],
+                                    sess.target.target.options.dll_suffix[]);
             out_filename.with_filename(format!("{}{}{}",
                                                prefix,
                                                libname,
@@ -448,7 +448,7 @@ pub fn filename_for_input(sess: &Session,
             out_filename.with_filename(format!("lib{}.a", libname))
         }
         config::CrateTypeExecutable => {
-            let suffix = sess.target.target.options.exe_suffix.as_slice();
+            let suffix = sess.target.target.options.exe_suffix[];
             out_filename.with_filename(format!("{}{}", libname, suffix))
         }
     }
@@ -477,12 +477,12 @@ fn link_binary_output(sess: &Session,
     if !out_is_writeable {
         sess.fatal(format!("output file {} is not writeable -- check its \
                             permissions.",
-                           out_filename.display()).as_slice());
+                           out_filename.display())[]);
     }
     else if !obj_is_writeable {
         sess.fatal(format!("object file {} is not writeable -- check its \
                             permissions.",
-                           obj_filename.display()).as_slice());
+                           obj_filename.display())[]);
     }
 
     match crate_type {
@@ -507,7 +507,7 @@ fn archive_search_paths(sess: &Session) -> Vec<Path> {
     let mut rustpath = filesearch::rust_path();
     rustpath.push(sess.target_filesearch().get_lib_path());
     let mut search: Vec<Path> = sess.opts.addl_lib_search_paths.borrow().clone();
-    search.push_all(rustpath.as_slice());
+    search.push_all(rustpath[]);
     return search;
 }
 
@@ -536,7 +536,7 @@ fn link_rlib<'a>(sess: &'a Session,
     for &(ref l, kind) in sess.cstore.get_used_libraries().borrow().iter() {
         match kind {
             cstore::NativeStatic => {
-                ab.add_native_library(l.as_slice()).unwrap();
+                ab.add_native_library(l[]).unwrap();
             }
             cstore::NativeFramework | cstore::NativeUnknown => {}
         }
@@ -584,12 +584,12 @@ fn link_rlib<'a>(sess: &'a Session,
             let tmpdir = TempDir::new("rustc").ok().expect("needs a temp dir");
             let metadata = tmpdir.path().join(METADATA_FILENAME);
             match fs::File::create(&metadata).write(trans.metadata
-                                                         .as_slice()) {
+                                                         []) {
                 Ok(..) => {}
                 Err(e) => {
                     sess.err(format!("failed to write {}: {}",
                                      metadata.display(),
-                                     e).as_slice());
+                                     e)[]);
                     sess.abort_if_errors();
                 }
             }
@@ -605,27 +605,27 @@ fn link_rlib<'a>(sess: &'a Session,
                 // extension to it. This is to work around a bug in LLDB that
                 // would cause it to crash if the name of a file in an archive
                 // was exactly 16 bytes.
-                let bc_filename = obj_filename.with_extension(format!("{}.bc", i).as_slice());
+                let bc_filename = obj_filename.with_extension(format!("{}.bc", i)[]);
                 let bc_deflated_filename = obj_filename.with_extension(
-                    format!("{}.bytecode.deflate", i).as_slice());
+                    format!("{}.bytecode.deflate", i)[]);
 
                 let bc_data = match fs::File::open(&bc_filename).read_to_end() {
                     Ok(buffer) => buffer,
                     Err(e) => sess.fatal(format!("failed to read bytecode: {}",
-                                                 e).as_slice())
+                                                 e)[])
                 };
 
-                let bc_data_deflated = match flate::deflate_bytes(bc_data.as_slice()) {
+                let bc_data_deflated = match flate::deflate_bytes(bc_data[]) {
                     Some(compressed) => compressed,
                     None => sess.fatal(format!("failed to compress bytecode from {}",
-                                               bc_filename.display()).as_slice())
+                                               bc_filename.display())[])
                 };
 
                 let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) {
                     Ok(file) => file,
                     Err(e) => {
                         sess.fatal(format!("failed to create compressed bytecode \
-                                            file: {}", e).as_slice())
+                                            file: {}", e)[])
                     }
                 };
 
@@ -634,7 +634,7 @@ fn link_rlib<'a>(sess: &'a Session,
                     Ok(()) => {}
                     Err(e) => {
                         sess.err(format!("failed to write compressed bytecode: \
-                                          {}", e).as_slice());
+                                          {}", e)[]);
                         sess.abort_if_errors()
                     }
                 };
@@ -674,7 +674,7 @@ fn write_rlib_bytecode_object_v1<T: Writer>(writer: &mut T,
     try! { writer.write(RLIB_BYTECODE_OBJECT_MAGIC) };
     try! { writer.write_le_u32(1) };
     try! { writer.write_le_u64(bc_data_deflated_size) };
-    try! { writer.write(bc_data_deflated.as_slice()) };
+    try! { writer.write(bc_data_deflated[]) };
 
     let number_of_bytes_written_so_far =
         RLIB_BYTECODE_OBJECT_MAGIC.len() +                // magic id
@@ -725,11 +725,11 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
         let p = match *path {
             Some(ref p) => p.clone(), None => {
                 sess.err(format!("could not find rlib for: `{}`",
-                                 name).as_slice());
+                                 name)[]);
                 continue
             }
         };
-        ab.add_rlib(&p, name.as_slice(), sess.lto()).unwrap();
+        ab.add_rlib(&p, name[], sess.lto()).unwrap();
 
         let native_libs = csearch::get_native_libraries(&sess.cstore, cnum);
         all_native_libs.extend(native_libs.into_iter());
@@ -751,7 +751,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
             cstore::NativeUnknown => "library",
             cstore::NativeFramework => "framework",
         };
-        sess.note(format!("{}: {}", name, *lib).as_slice());
+        sess.note(format!("{}: {}", name, *lib)[]);
     }
 }
 
@@ -765,12 +765,12 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
 
     // The invocations of cc share some flags across platforms
     let pname = get_cc_prog(sess);
-    let mut cmd = Command::new(pname.as_slice());
+    let mut cmd = Command::new(pname[]);
 
-    cmd.args(sess.target.target.options.pre_link_args.as_slice());
+    cmd.args(sess.target.target.options.pre_link_args[]);
     link_args(&mut cmd, sess, dylib, tmpdir.path(),
               trans, obj_filename, out_filename);
-    cmd.args(sess.target.target.options.post_link_args.as_slice());
+    cmd.args(sess.target.target.options.post_link_args[]);
     if !sess.target.target.options.no_compiler_rt {
         cmd.arg("-lcompiler-rt");
     }
@@ -790,11 +790,11 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
             if !prog.status.success() {
                 sess.err(format!("linking with `{}` failed: {}",
                                  pname,
-                                 prog.status).as_slice());
-                sess.note(format!("{}", &cmd).as_slice());
+                                 prog.status)[]);
+                sess.note(format!("{}", &cmd)[]);
                 let mut output = prog.error.clone();
-                output.push_all(prog.output.as_slice());
-                sess.note(str::from_utf8(output.as_slice()).unwrap());
+                output.push_all(prog.output[]);
+                sess.note(str::from_utf8(output[]).unwrap());
                 sess.abort_if_errors();
             }
             debug!("linker stderr:\n{}", String::from_utf8(prog.error).unwrap());
@@ -803,7 +803,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
         Err(e) => {
             sess.err(format!("could not exec the linker `{}`: {}",
                              pname,
-                             e).as_slice());
+                             e)[]);
             sess.abort_if_errors();
         }
     }
@@ -815,7 +815,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
         match Command::new("dsymutil").arg(out_filename).output() {
             Ok(..) => {}
             Err(e) => {
-                sess.err(format!("failed to run dsymutil: {}", e).as_slice());
+                sess.err(format!("failed to run dsymutil: {}", e)[]);
                 sess.abort_if_errors();
             }
         }
@@ -864,7 +864,7 @@ fn link_args(cmd: &mut Command,
 
             let mut v = b"-Wl,-force_load,".to_vec();
             v.push_all(morestack.as_vec());
-            cmd.arg(v.as_slice());
+            cmd.arg(v[]);
         } else {
             cmd.args(&["-Wl,--whole-archive", "-lmorestack", "-Wl,--no-whole-archive"]);
         }
@@ -989,7 +989,7 @@ fn link_args(cmd: &mut Command,
             if sess.opts.cg.rpath {
                 let mut v = "-Wl,-install_name,@rpath/".as_bytes().to_vec();
                 v.push_all(out_filename.filename().unwrap());
-                cmd.arg(v.as_slice());
+                cmd.arg(v[]);
             }
         } else {
             cmd.arg("-shared");
@@ -1001,7 +1001,7 @@ fn link_args(cmd: &mut Command,
     // addl_lib_search_paths
     if sess.opts.cg.rpath {
         let sysroot = sess.sysroot();
-        let target_triple = sess.opts.target_triple.as_slice();
+        let target_triple = sess.opts.target_triple[];
         let get_install_prefix_lib_path = |:| {
             let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX");
             let tlib = filesearch::relative_target_lib_path(sysroot, target_triple);
@@ -1018,14 +1018,14 @@ fn link_args(cmd: &mut Command,
             get_install_prefix_lib_path: get_install_prefix_lib_path,
             realpath: ::util::fs::realpath
         };
-        cmd.args(rpath::get_rpath_flags(rpath_config).as_slice());
+        cmd.args(rpath::get_rpath_flags(rpath_config)[]);
     }
 
     // Finally add all the linker arguments provided on the command line along
     // with any #[link_args] attributes found inside the crate
     let empty = Vec::new();
-    cmd.args(sess.opts.cg.link_args.as_ref().unwrap_or(&empty).as_slice());
-    cmd.args(used_link_args.as_slice());
+    cmd.args(sess.opts.cg.link_args.as_ref().unwrap_or(&empty)[]);
+    cmd.args(used_link_args[]);
 }
 
 // # Native library linking
@@ -1083,14 +1083,14 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) {
         } else {
             // -force_load is the OSX equivalent of --whole-archive, but it
             // involves passing the full path to the library to link.
-            let lib = archive::find_library(l.as_slice(),
-                                            sess.target.target.options.staticlib_prefix.as_slice(),
-                                            sess.target.target.options.staticlib_suffix.as_slice(),
-                                            search_path.as_slice(),
+            let lib = archive::find_library(l[],
+                                            sess.target.target.options.staticlib_prefix[],
+                                            sess.target.target.options.staticlib_suffix[],
+                                            search_path[],
                                             &sess.diagnostic().handler);
             let mut v = b"-Wl,-force_load,".to_vec();
             v.push_all(lib.as_vec());
-            cmd.arg(v.as_slice());
+            cmd.arg(v[]);
         }
     }
     if takes_hints {
@@ -1103,7 +1103,7 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) {
                 cmd.arg(format!("-l{}", l));
             }
             cstore::NativeFramework => {
-                cmd.arg("-framework").arg(l.as_slice());
+                cmd.arg("-framework").arg(l[]);
             }
             cstore::NativeStatic => unreachable!(),
         }
@@ -1184,9 +1184,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
         // against the archive.
         if sess.lto() {
             let name = cratepath.filename_str().unwrap();
-            let name = name.slice(3, name.len() - 5); // chop off lib/.rlib
+            let name = name[3..name.len() - 5]; // chop off lib/.rlib
             time(sess.time_passes(),
-                 format!("altering {}.rlib", name).as_slice(),
+                 format!("altering {}.rlib", name)[],
                  (), |()| {
                 let dst = tmpdir.join(cratepath.filename().unwrap());
                 match fs::copy(&cratepath, &dst) {
@@ -1195,7 +1195,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
                         sess.err(format!("failed to copy {} to {}: {}",
                                          cratepath.display(),
                                          dst.display(),
-                                         e).as_slice());
+                                         e)[]);
                         sess.abort_if_errors();
                     }
                 }
@@ -1207,7 +1207,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
                     Err(e) => {
                         sess.err(format!("failed to chmod {} when preparing \
                                           for LTO: {}", dst.display(),
-                                         e).as_slice());
+                                         e)[]);
                         sess.abort_if_errors();
                     }
                 }
@@ -1221,9 +1221,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
                     maybe_ar_prog: sess.opts.cg.ar.clone()
                 };
                 let mut archive = Archive::open(config);
-                archive.remove_file(format!("{}.o", name).as_slice());
+                archive.remove_file(format!("{}.o", name)[]);
                 let files = archive.files();
-                if files.iter().any(|s| s.as_slice().ends_with(".o")) {
+                if files.iter().any(|s| s[].ends_with(".o")) {
                     cmd.arg(dst);
                 }
             });
@@ -1245,7 +1245,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
 
         let mut v = "-l".as_bytes().to_vec();
         v.push_all(unlib(&sess.target, cratepath.filestem().unwrap()));
-        cmd.arg(v.as_slice());
+        cmd.arg(v[]);
     }
 }
 
@@ -1287,7 +1287,7 @@ fn add_upstream_native_libraries(cmd: &mut Command, sess: &Session) {
                 }
                 cstore::NativeFramework => {
                     cmd.arg("-framework");
-                    cmd.arg(lib.as_slice());
+                    cmd.arg(lib[]);
                 }
                 cstore::NativeStatic => {
                     sess.bug("statics shouldn't be propagated");
diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs
index b9357280d06..1271330897e 100644
--- a/src/librustc_trans/back/lto.rs
+++ b/src/librustc_trans/back/lto.rs
@@ -53,21 +53,21 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
             Some(p) => p,
             None => {
                 sess.fatal(format!("could not find rlib for: `{}`",
-                                   name).as_slice());
+                                   name)[]);
             }
         };
 
         let archive = ArchiveRO::open(&path).expect("wanted an rlib");
         let file = path.filename_str().unwrap();
-        let file = file.slice(3, file.len() - 5); // chop off lib/.rlib
+        let file = file[3..file.len() - 5]; // chop off lib/.rlib
         debug!("reading {}", file);
         for i in iter::count(0u, 1) {
             let bc_encoded = time(sess.time_passes(),
-                                  format!("check for {}.{}.bytecode.deflate", name, i).as_slice(),
+                                  format!("check for {}.{}.bytecode.deflate", name, i)[],
                                   (),
                                   |_| {
                                       archive.read(format!("{}.{}.bytecode.deflate",
-                                                           file, i).as_slice())
+                                                           file, i)[])
                                   });
             let bc_encoded = match bc_encoded {
                 Some(data) => data,
@@ -75,7 +75,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
                     if i == 0 {
                         // No bitcode was found at all.
                         sess.fatal(format!("missing compressed bytecode in {}",
-                                           path.display()).as_slice());
+                                           path.display())[]);
                     }
                     // No more bitcode files to read.
                     break;
@@ -98,12 +98,12 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
                             Some(inflated) => inflated,
                             None => {
                                 sess.fatal(format!("failed to decompress bc of `{}`",
-                                                   name).as_slice())
+                                                   name)[])
                             }
                         }
                     } else {
                         sess.fatal(format!("Unsupported bytecode format version {}",
-                                           version).as_slice())
+                                           version)[])
                     }
                 })
             } else {
@@ -114,7 +114,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
                         Some(bc) => bc,
                         None => {
                             sess.fatal(format!("failed to decompress bc of `{}`",
-                                               name).as_slice())
+                                               name)[])
                         }
                     }
                 })
@@ -123,7 +123,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
             let ptr = bc_decoded.as_slice().as_ptr();
             debug!("linking {}, part {}", name, i);
             time(sess.time_passes(),
-                 format!("ll link {}.{}", name, i).as_slice(),
+                 format!("ll link {}.{}", name, i)[],
                  (),
                  |()| unsafe {
                 if !llvm::LLVMRustLinkInExternalBitcode(llmod,
@@ -131,7 +131,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
                                                         bc_decoded.len() as libc::size_t) {
                     write::llvm_err(sess.diagnostic().handler(),
                                     format!("failed to load bc of `{}`",
-                                            name.as_slice()));
+                                            name[]));
                 }
             });
         }
diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs
index 60b5b32e89f..5be66d42920 100644
--- a/src/librustc_trans/back/write.rs
+++ b/src/librustc_trans/back/write.rs
@@ -46,13 +46,13 @@ pub fn llvm_err(handler: &diagnostic::Handler, msg: String) -> ! {
     unsafe {
         let cstr = llvm::LLVMRustGetLastError();
         if cstr == ptr::null() {
-            handler.fatal(msg.as_slice());
+            handler.fatal(msg[]);
         } else {
             let err = CString::new(cstr, true);
             let err = String::from_utf8_lossy(err.as_bytes());
             handler.fatal(format!("{}: {}",
-                                  msg.as_slice(),
-                                  err.as_slice()).as_slice());
+                                  msg[],
+                                  err[])[]);
         }
     }
 }
@@ -103,13 +103,13 @@ impl SharedEmitter {
             match diag.code {
                 Some(ref code) => {
                     handler.emit_with_code(None,
-                                           diag.msg.as_slice(),
-                                           code.as_slice(),
+                                           diag.msg[],
+                                           code[],
                                            diag.lvl);
                 },
                 None => {
                     handler.emit(None,
-                                 diag.msg.as_slice(),
+                                 diag.msg[],
                                  diag.lvl);
                 },
             }
@@ -164,8 +164,8 @@ fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel {
 
 fn create_target_machine(sess: &Session) -> TargetMachineRef {
     let reloc_model_arg = match sess.opts.cg.relocation_model {
-        Some(ref s) => s.as_slice(),
-        None => sess.target.target.options.relocation_model.as_slice()
+        Some(ref s) => s[],
+        None => sess.target.target.options.relocation_model[]
     };
     let reloc_model = match reloc_model_arg {
         "pic" => llvm::RelocPIC,
@@ -176,7 +176,7 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef {
             sess.err(format!("{} is not a valid relocation mode",
                              sess.opts
                                  .cg
-                                 .relocation_model).as_slice());
+                                 .relocation_model)[]);
             sess.abort_if_errors();
             unreachable!();
         }
@@ -197,8 +197,8 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef {
     let fdata_sections = ffunction_sections;
 
     let code_model_arg = match sess.opts.cg.code_model {
-        Some(ref s) => s.as_slice(),
-        None => sess.target.target.options.code_model.as_slice()
+        Some(ref s) => s[],
+        None => sess.target.target.options.code_model[]
     };
 
     let code_model = match code_model_arg {
@@ -211,19 +211,19 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef {
             sess.err(format!("{} is not a valid code model",
                              sess.opts
                                  .cg
-                                 .code_model).as_slice());
+                                 .code_model)[]);
             sess.abort_if_errors();
             unreachable!();
         }
     };
 
-    let triple = sess.target.target.llvm_target.as_slice();
+    let triple = sess.target.target.llvm_target[];
 
     let tm = unsafe {
         triple.with_c_str(|t| {
             let cpu = match sess.opts.cg.target_cpu {
-                Some(ref s) => s.as_slice(),
-                None => sess.target.target.options.cpu.as_slice()
+                Some(ref s) => s[],
+                None => sess.target.target.options.cpu[]
             };
             cpu.with_c_str(|cpu| {
                 target_feature(sess).with_c_str(|features| {
@@ -350,13 +350,13 @@ unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef,
     match cgcx.lto_ctxt {
         Some((sess, _)) => {
             sess.codemap().with_expn_info(ExpnId::from_llvm_cookie(cookie), |info| match info {
-                Some(ei) => sess.span_err(ei.call_site, msg.as_slice()),
-                None     => sess.err(msg.as_slice()),
+                Some(ei) => sess.span_err(ei.call_site, msg[]),
+                None     => sess.err(msg[]),
             });
         }
 
         None => {
-            cgcx.handler.err(msg.as_slice());
+            cgcx.handler.err(msg[]);
             cgcx.handler.note("build without -C codegen-units for more exact errors");
         }
     }
@@ -380,8 +380,8 @@ unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_vo
                 cgcx.handler.note(format!("optimization {} for {} at {}: {}",
                                           opt.kind.describe(),
                                           pass_name,
-                                          if loc.is_empty() { "[unknown]" } else { loc.as_slice() },
-                                          llvm::twine_to_string(opt.message)).as_slice());
+                                          if loc.is_empty() { "[unknown]" } else { loc[] },
+                                          llvm::twine_to_string(opt.message))[]);
             }
         }
 
@@ -413,7 +413,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
 
     if config.emit_no_opt_bc {
         let ext = format!("{}.no-opt.bc", name_extra);
-        output_names.with_extension(ext.as_slice()).with_c_str(|buf| {
+        output_names.with_extension(ext[]).with_c_str(|buf| {
             llvm::LLVMWriteBitcodeToFile(llmod, buf);
         })
     }
@@ -445,7 +445,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
                 pass.with_c_str(|s| {
                     if !llvm::LLVMRustAddPass(mpm, s) {
                         cgcx.handler.warn(format!("unknown pass {}, ignoring",
-                                                  *pass).as_slice());
+                                                  *pass)[]);
                     }
                 })
             }
@@ -467,7 +467,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
 
                     if config.emit_lto_bc {
                         let name = format!("{}.lto.bc", name_extra);
-                        output_names.with_extension(name.as_slice()).with_c_str(|buf| {
+                        output_names.with_extension(name[]).with_c_str(|buf| {
                             llvm::LLVMWriteBitcodeToFile(llmod, buf);
                         })
                     }
@@ -501,7 +501,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
 
     if config.emit_bc {
         let ext = format!("{}.bc", name_extra);
-        output_names.with_extension(ext.as_slice()).with_c_str(|buf| {
+        output_names.with_extension(ext[]).with_c_str(|buf| {
             llvm::LLVMWriteBitcodeToFile(llmod, buf);
         })
     }
@@ -509,7 +509,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
     time(config.time_passes, "codegen passes", (), |()| {
         if config.emit_ir {
             let ext = format!("{}.ll", name_extra);
-            output_names.with_extension(ext.as_slice()).with_c_str(|output| {
+            output_names.with_extension(ext[]).with_c_str(|output| {
                 with_codegen(tm, llmod, config.no_builtins, |cpm| {
                     llvm::LLVMRustPrintModule(cpm, llmod, output);
                 })
@@ -517,14 +517,14 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
         }
 
         if config.emit_asm {
-            let path = output_names.with_extension(format!("{}.s", name_extra).as_slice());
+            let path = output_names.with_extension(format!("{}.s", name_extra)[]);
             with_codegen(tm, llmod, config.no_builtins, |cpm| {
                 write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::AssemblyFileType);
             });
         }
 
         if config.emit_obj {
-            let path = output_names.with_extension(format!("{}.o", name_extra).as_slice());
+            let path = output_names.with_extension(format!("{}.o", name_extra)[]);
             with_codegen(tm, llmod, config.no_builtins, |cpm| {
                 write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::ObjectFileType);
             });
@@ -638,7 +638,7 @@ pub fn run_passes(sess: &Session,
 
     // Process the work items, optionally using worker threads.
     if sess.opts.cg.codegen_units == 1 {
-        run_work_singlethreaded(sess, trans.reachable.as_slice(), work_items);
+        run_work_singlethreaded(sess, trans.reachable[], work_items);
     } else {
         run_work_multithreaded(sess, work_items, sess.opts.cg.codegen_units);
     }
@@ -666,7 +666,7 @@ pub fn run_passes(sess: &Session,
                 // 2) Multiple codegen units, with `-o some_name`.  We have
                 //    no good solution for this case, so warn the user.
                 sess.warn(format!("ignoring -o because multiple .{} files were produced",
-                                  ext).as_slice());
+                                  ext)[]);
             } else {
                 // 3) Multiple codegen units, but no `-o some_name`.  We
                 //    just leave the `foo.0.x` files in place.
@@ -699,20 +699,20 @@ pub fn run_passes(sess: &Session,
             };
 
         let pname = get_cc_prog(sess);
-        let mut cmd = Command::new(pname.as_slice());
+        let mut cmd = Command::new(pname[]);
 
-        cmd.args(sess.target.target.options.pre_link_args.as_slice());
+        cmd.args(sess.target.target.options.pre_link_args[]);
         cmd.arg("-nostdlib");
 
         for index in range(0, trans.modules.len()) {
-            cmd.arg(crate_output.with_extension(format!("{}.o", index).as_slice()));
+            cmd.arg(crate_output.with_extension(format!("{}.o", index)[]));
         }
 
         cmd.arg("-r")
            .arg("-o")
            .arg(windows_output_path.as_ref().unwrap_or(output_path));
 
-        cmd.args(sess.target.target.options.post_link_args.as_slice());
+        cmd.args(sess.target.target.options.post_link_args[]);
 
         if (sess.opts.debugging_opts & config::PRINT_LINK_ARGS) != 0 {
             println!("{}", &cmd);
@@ -725,14 +725,14 @@ pub fn run_passes(sess: &Session,
             Ok(status) => {
                 if !status.success() {
                     sess.err(format!("linking of {} with `{}` failed",
-                                     output_path.display(), cmd).as_slice());
+                                     output_path.display(), cmd)[]);
                     sess.abort_if_errors();
                 }
             },
             Err(e) => {
                 sess.err(format!("could not exec the linker `{}`: {}",
                                  pname,
-                                 e).as_slice());
+                                 e)[]);
                 sess.abort_if_errors();
             },
         }
@@ -817,12 +817,12 @@ pub fn run_passes(sess: &Session,
         for i in range(0, trans.modules.len()) {
             if modules_config.emit_obj {
                 let ext = format!("{}.o", i);
-                remove(sess, &crate_output.with_extension(ext.as_slice()));
+                remove(sess, &crate_output.with_extension(ext[]));
             }
 
             if modules_config.emit_bc && !keep_numbered_bitcode {
                 let ext = format!("{}.bc", i);
-                remove(sess, &crate_output.with_extension(ext.as_slice()));
+                remove(sess, &crate_output.with_extension(ext[]));
             }
         }
 
@@ -948,7 +948,7 @@ fn run_work_multithreaded(sess: &Session,
 
 pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
     let pname = get_cc_prog(sess);
-    let mut cmd = Command::new(pname.as_slice());
+    let mut cmd = Command::new(pname[]);
 
     cmd.arg("-c").arg("-o").arg(outputs.path(config::OutputTypeObject))
                            .arg(outputs.temp_path(config::OutputTypeAssembly));
@@ -959,18 +959,18 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
             if !prog.status.success() {
                 sess.err(format!("linking with `{}` failed: {}",
                                  pname,
-                                 prog.status).as_slice());
-                sess.note(format!("{}", &cmd).as_slice());
+                                 prog.status)[]);
+                sess.note(format!("{}", &cmd)[]);
                 let mut note = prog.error.clone();
-                note.push_all(prog.output.as_slice());
-                sess.note(str::from_utf8(note.as_slice()).unwrap());
+                note.push_all(prog.output[]);
+                sess.note(str::from_utf8(note[]).unwrap());
                 sess.abort_if_errors();
             }
         },
         Err(e) => {
             sess.err(format!("could not exec the linker `{}`: {}",
                              pname,
-                             e).as_slice());
+                             e)[]);
             sess.abort_if_errors();
         }
     }
@@ -1003,7 +1003,7 @@ unsafe fn configure_llvm(sess: &Session) {
         if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
 
         for arg in sess.opts.cg.llvm_args.iter() {
-            add((*arg).as_slice());
+            add((*arg)[]);
         }
     }
 
diff --git a/src/librustc_trans/save/mod.rs b/src/librustc_trans/save/mod.rs
index 1a4f06663ef..0183aa8c2aa 100644
--- a/src/librustc_trans/save/mod.rs
+++ b/src/librustc_trans/save/mod.rs
@@ -94,7 +94,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
 
         // dump info about all the external crates referenced from this crate
         self.sess.cstore.iter_crate_data(|n, cmd| {
-            self.fmt.external_crate_str(krate.span, cmd.name.as_slice(), n);
+            self.fmt.external_crate_str(krate.span, cmd.name[], n);
         });
         self.fmt.recorder.record("end_external_crates\n");
     }
@@ -143,7 +143,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
         for &(ref span, ref qualname) in sub_paths.iter() {
             self.fmt.sub_mod_ref_str(path.span,
                                      *span,
-                                     qualname.as_slice(),
+                                     qualname[],
                                      self.cur_scope);
         }
     }
@@ -161,7 +161,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
         for &(ref span, ref qualname) in sub_paths.iter() {
             self.fmt.sub_mod_ref_str(path.span,
                                      *span,
-                                     qualname.as_slice(),
+                                     qualname[],
                                      self.cur_scope);
         }
     }
@@ -180,7 +180,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
         let (ref span, ref qualname) = sub_paths[len-2];
         self.fmt.sub_type_ref_str(path.span,
                                   *span,
-                                  qualname.as_slice());
+                                  qualname[]);
 
         // write the other sub-paths
         if len <= 2 {
@@ -190,7 +190,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
         for &(ref span, ref qualname) in sub_paths.iter() {
             self.fmt.sub_mod_ref_str(path.span,
                                      *span,
-                                     qualname.as_slice(),
+                                     qualname[],
                                      self.cur_scope);
         }
     }
@@ -199,7 +199,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
     fn lookup_type_ref(&self, ref_id: NodeId) -> Option<DefId> {
         if !self.analysis.ty_cx.def_map.borrow().contains_key(&ref_id) {
             self.sess.bug(format!("def_map has no key for {} in lookup_type_ref",
-                                  ref_id).as_slice());
+                                  ref_id)[]);
         }
         let def = (*self.analysis.ty_cx.def_map.borrow())[ref_id];
         match def {
@@ -212,7 +212,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
         let def_map = self.analysis.ty_cx.def_map.borrow();
         if !def_map.contains_key(&ref_id) {
             self.sess.span_bug(span, format!("def_map has no key for {} in lookup_def_kind",
-                                             ref_id).as_slice());
+                                             ref_id)[]);
         }
         let def = (*def_map)[ref_id];
         match def {
@@ -241,7 +241,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
             def::DefMethod(..) |
             def::DefPrimTy(_) => {
                 self.sess.span_bug(span, format!("lookup_def_kind for unexpected item: {}",
-                                                 def).as_slice());
+                                                 def)[]);
             },
         }
     }
@@ -262,8 +262,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                                     span_utils.span_for_last_ident(p.span),
                                     id,
                                     qualname,
-                                    path_to_string(p).as_slice(),
-                                    typ.as_slice());
+                                    path_to_string(p)[],
+                                    typ[]);
             }
             self.collected_paths.clear();
         }
@@ -285,14 +285,14 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                     match item.node {
                         ast::ItemImpl(_, _, _, ref ty, _) => {
                             let mut result = String::from_str("<");
-                            result.push_str(ty_to_string(&**ty).as_slice());
+                            result.push_str(ty_to_string(&**ty)[]);
 
                             match ty::trait_of_item(&self.analysis.ty_cx,
                                                     ast_util::local_def(method.id)) {
                                 Some(def_id) => {
                                     result.push_str(" as ");
                                     result.push_str(
-                                        ty::item_path_str(&self.analysis.ty_cx, def_id).as_slice());
+                                        ty::item_path_str(&self.analysis.ty_cx, def_id)[]);
                                 },
                                 None => {}
                             }
@@ -302,7 +302,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                         _ => {
                             self.sess.span_bug(method.span,
                                                format!("Container {} for method {} not an impl?",
-                                                       impl_id.node, method.id).as_slice());
+                                                       impl_id.node, method.id)[]);
                         },
                     }
                 },
@@ -312,7 +312,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                                                impl_id.node,
                                                method.id,
                                                self.analysis.ty_cx.map.get(impl_id.node)
-                                              ).as_slice());
+                                              )[]);
                 },
             },
             None => match ty::trait_of_item(&self.analysis.ty_cx,
@@ -328,20 +328,20 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                         _ => {
                             self.sess.span_bug(method.span,
                                                format!("Could not find container {} for method {}",
-                                                       def_id.node, method.id).as_slice());
+                                                       def_id.node, method.id)[]);
                         }
                     }
                 },
                 None => {
                     self.sess.span_bug(method.span,
                                        format!("Could not find container for method {}",
-                                               method.id).as_slice());
+                                               method.id)[]);
                 },
             },
         };
 
         qualname.push_str(get_ident(method.pe_ident()).get());
-        let qualname = qualname.as_slice();
+        let qualname = qualname[];
 
         // record the decl for this def (if it has one)
         let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx,
@@ -430,13 +430,13 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                     Some(sub_span) => self.fmt.field_str(field.span,
                                                          Some(sub_span),
                                                          field.node.id,
-                                                         name.get().as_slice(),
-                                                         qualname.as_slice(),
-                                                         typ.as_slice(),
+                                                         name.get()[],
+                                                         qualname[],
+                                                         typ[],
                                                          scope_id),
                     None => self.sess.span_bug(field.span,
                                                format!("Could not find sub-span for field {}",
-                                                       qualname).as_slice()),
+                                                       qualname)[]),
                 }
             },
             _ => (),
@@ -463,7 +463,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
             self.fmt.typedef_str(full_span,
                                  Some(*param_ss),
                                  param.id,
-                                 name.as_slice(),
+                                 name[],
                                  "");
         }
         self.visit_generics(generics);
@@ -480,10 +480,10 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
         self.fmt.fn_str(item.span,
                         sub_span,
                         item.id,
-                        qualname.as_slice(),
+                        qualname[],
                         self.cur_scope);
 
-        self.process_formals(&decl.inputs, qualname.as_slice());
+        self.process_formals(&decl.inputs, qualname[]);
 
         // walk arg and return types
         for arg in decl.inputs.iter() {
@@ -497,7 +497,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
         // walk the body
         self.nest(item.id, |v| v.visit_block(&*body));
 
-        self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id);
+        self.process_generic_params(ty_params, item.span, qualname[], item.id);
     }
 
     fn process_static(&mut self,
@@ -519,9 +519,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                             sub_span,
                             item.id,
                             get_ident(item.ident).get(),
-                            qualname.as_slice(),
-                            value.as_slice(),
-                            ty_to_string(&*typ).as_slice(),
+                            qualname[],
+                            value[],
+                            ty_to_string(&*typ)[],
                             self.cur_scope);
 
         // walk type and init value
@@ -542,9 +542,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                             sub_span,
                             item.id,
                             get_ident(item.ident).get(),
-                            qualname.as_slice(),
+                            qualname[],
                             "",
-                            ty_to_string(&*typ).as_slice(),
+                            ty_to_string(&*typ)[],
                             self.cur_scope);
 
         // walk type and init value
@@ -568,17 +568,17 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                             sub_span,
                             item.id,
                             ctor_id,
-                            qualname.as_slice(),
+                            qualname[],
                             self.cur_scope,
-                            val.as_slice());
+                            val[]);
 
         // fields
         for field in def.fields.iter() {
-            self.process_struct_field_def(field, qualname.as_slice(), item.id);
+            self.process_struct_field_def(field, qualname[], item.id);
             self.visit_ty(&*field.node.ty);
         }
 
-        self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id);
+        self.process_generic_params(ty_params, item.span, qualname[], item.id);
     }
 
     fn process_enum(&mut self,
@@ -591,12 +591,12 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
             Some(sub_span) => self.fmt.enum_str(item.span,
                                                 Some(sub_span),
                                                 item.id,
-                                                enum_name.as_slice(),
+                                                enum_name[],
                                                 self.cur_scope,
-                                                val.as_slice()),
+                                                val[]),
             None => self.sess.span_bug(item.span,
                                        format!("Could not find subspan for enum {}",
-                                               enum_name).as_slice()),
+                                               enum_name)[]),
         }
         for variant in enum_definition.variants.iter() {
             let name = get_ident(variant.node.name);
@@ -612,9 +612,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                                                self.span.span_for_first_ident(variant.span),
                                                variant.node.id,
                                                name,
-                                               qualname.as_slice(),
-                                               enum_name.as_slice(),
-                                               val.as_slice(),
+                                               qualname[],
+                                               enum_name[],
+                                               val[],
                                                item.id);
                     for arg in args.iter() {
                         self.visit_ty(&*arg.ty);
@@ -630,20 +630,20 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                         self.span.span_for_first_ident(variant.span),
                         variant.node.id,
                         ctor_id,
-                        qualname.as_slice(),
-                        enum_name.as_slice(),
-                        val.as_slice(),
+                        qualname[],
+                        enum_name[],
+                        val[],
                         item.id);
 
                     for field in struct_def.fields.iter() {
-                        self.process_struct_field_def(field, enum_name.as_slice(), variant.node.id);
+                        self.process_struct_field_def(field, enum_name[], variant.node.id);
                         self.visit_ty(&*field.node.ty);
                     }
                 }
             }
         }
 
-        self.process_generic_params(ty_params, item.span, enum_name.as_slice(), item.id);
+        self.process_generic_params(ty_params, item.span, enum_name[], item.id);
     }
 
     fn process_impl(&mut self,
@@ -703,9 +703,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
         self.fmt.trait_str(item.span,
                            sub_span,
                            item.id,
-                           qualname.as_slice(),
+                           qualname[],
                            self.cur_scope,
-                           val.as_slice());
+                           val[]);
 
         // super-traits
         for super_bound in trait_refs.iter() {
@@ -737,7 +737,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
         }
 
         // walk generics and methods
-        self.process_generic_params(generics, item.span, qualname.as_slice(), item.id);
+        self.process_generic_params(generics, item.span, qualname[], item.id);
         for method in methods.iter() {
             self.visit_trait_item(method)
         }
@@ -755,9 +755,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
         self.fmt.mod_str(item.span,
                          sub_span,
                          item.id,
-                         qualname.as_slice(),
+                         qualname[],
                          self.cur_scope,
-                         filename.as_slice());
+                         filename[]);
 
         self.nest(item.id, |v| visit::walk_mod(v, m));
     }
@@ -773,7 +773,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
         if !def_map.contains_key(&ex.id) {
             self.sess.span_bug(ex.span,
                                format!("def_map has no key for {} in visit_expr",
-                                       ex.id).as_slice());
+                                       ex.id)[]);
         }
         let def = &(*def_map)[ex.id];
         let sub_span = self.span.span_for_last_ident(ex.span);
@@ -840,7 +840,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                                                              self.cur_scope),
             _ => self.sess.span_bug(ex.span,
                                     format!("Unexpected def kind while looking up path in '{}'",
-                                            self.span.snippet(ex.span)).as_slice()),
+                                            self.span.snippet(ex.span))[]),
         }
         // modules or types in the path prefix
         match *def {
@@ -961,7 +961,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                                self.cur_scope);
 
         // walk receiver and args
-        visit::walk_exprs(self, args.as_slice());
+        visit::walk_exprs(self, args[]);
     }
 
     fn process_pat(&mut self, p:&ast::Pat) {
@@ -978,7 +978,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
                     None => {
                         self.sess.span_bug(p.span,
                                            format!("Could not find struct_def for `{}`",
-                                                   self.span.snippet(p.span)).as_slice());
+                                                   self.span.snippet(p.span))[]);
                     }
                 };
                 for &Spanned { node: ref field, span } in fields.iter() {
@@ -1062,11 +1062,11 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
                 self.fmt.typedef_str(item.span,
                                      sub_span,
                                      item.id,
-                                     qualname.as_slice(),
-                                     value.as_slice());
+                                     qualname[],
+                                     value[]);
 
                 self.visit_ty(&**ty);
-                self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id);
+                self.process_generic_params(ty_params, item.span, qualname[], item.id);
             },
             ast::ItemMac(_) => (),
             _ => visit::walk_item(self, item),
@@ -1123,12 +1123,12 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
                     None => {
                         self.sess.span_bug(method_type.span,
                                            format!("Could not find trait for method {}",
-                                                   method_type.id).as_slice());
+                                                   method_type.id)[]);
                     },
                 };
 
                 qualname.push_str(get_ident(method_type.ident).get());
-                let qualname = qualname.as_slice();
+                let qualname = qualname[];
 
                 let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn);
                 self.fmt.method_decl_str(method_type.span,
@@ -1243,7 +1243,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
                                           id,
                                           cnum,
                                           name,
-                                          s.as_slice(),
+                                          s[],
                                           self.cur_scope);
             },
         }
@@ -1349,8 +1349,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
                 }
 
                 let mut id = String::from_str("$");
-                id.push_str(ex.id.to_string().as_slice());
-                self.process_formals(&decl.inputs, id.as_slice());
+                id.push_str(ex.id.to_string()[]);
+                self.process_formals(&decl.inputs, id[]);
 
                 // walk arg and return types
                 for arg in decl.inputs.iter() {
@@ -1393,7 +1393,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
         // process collected paths
         for &(id, ref p, ref immut, ref_kind) in self.collected_paths.iter() {
             let value = if *immut {
-                self.span.snippet(p.span).into_string()
+                self.span.snippet(p.span).to_string()
             } else {
                 "<mutable>".to_string()
             };
@@ -1402,15 +1402,15 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
             if !def_map.contains_key(&id) {
                 self.sess.span_bug(p.span,
                                    format!("def_map has no key for {} in visit_arm",
-                                           id).as_slice());
+                                           id)[]);
             }
             let def = &(*def_map)[id];
             match *def {
                 def::DefLocal(id)  => self.fmt.variable_str(p.span,
                                                             sub_span,
                                                             id,
-                                                            path_to_string(p).as_slice(),
-                                                            value.as_slice(),
+                                                            path_to_string(p)[],
+                                                            value[],
                                                             ""),
                 def::DefVariant(_,id,_) => self.fmt.ref_str(ref_kind,
                                                             p.span,
@@ -1462,9 +1462,9 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
             self.fmt.variable_str(p.span,
                                   sub_span,
                                   id,
-                                  path_to_string(p).as_slice(),
-                                  value.as_slice(),
-                                  typ.as_slice());
+                                  path_to_string(p)[],
+                                  value[],
+                                  typ[]);
         }
         self.collected_paths.clear();
 
@@ -1482,7 +1482,7 @@ pub fn process_crate(sess: &Session,
         return;
     }
 
-    let cratename = match attr::find_crate_name(krate.attrs.as_slice()) {
+    let cratename = match attr::find_crate_name(krate.attrs[]) {
         Some(name) => name.get().to_string(),
         None => {
             info!("Could not find crate name, using 'unknown_crate'");
@@ -1503,7 +1503,7 @@ pub fn process_crate(sess: &Session,
 
     match fs::mkdir_recursive(&root_path, io::USER_RWX) {
         Err(e) => sess.err(format!("Could not create directory {}: {}",
-                           root_path.display(), e).as_slice()),
+                           root_path.display(), e)[]),
         _ => (),
     }
 
@@ -1520,7 +1520,7 @@ pub fn process_crate(sess: &Session,
         Ok(f) => box f,
         Err(e) => {
             let disp = root_path.display();
-            sess.fatal(format!("Could not open {}: {}", disp, e).as_slice());
+            sess.fatal(format!("Could not open {}: {}", disp, e)[]);
         }
     };
     root_path.pop();
@@ -1546,7 +1546,7 @@ pub fn process_crate(sess: &Session,
         cur_scope: 0
     };
 
-    visitor.dump_crate_info(cratename.as_slice(), krate);
+    visitor.dump_crate_info(cratename[], krate);
 
     visit::walk_crate(&mut visitor, krate);
 }
diff --git a/src/librustc_trans/save/recorder.rs b/src/librustc_trans/save/recorder.rs
index 37d9e5d9940..08670864ade 100644
--- a/src/librustc_trans/save/recorder.rs
+++ b/src/librustc_trans/save/recorder.rs
@@ -41,7 +41,7 @@ impl Recorder {
         assert!(self.dump_spans);
         let result = format!("span,kind,{},{},text,\"{}\"\n",
                              kind, su.extent_str(span), escape(su.snippet(span)));
-        self.record(result.as_slice());
+        self.record(result[]);
     }
 }
 
@@ -158,15 +158,15 @@ impl<'a> FmtStrs<'a> {
         if values.len() != fields.len() {
             self.span.sess.span_bug(span, format!(
                 "Mismatch between length of fields for '{}', expected '{}', found '{}'",
-                kind, fields.len(), values.len()).as_slice());
+                kind, fields.len(), values.len())[]);
         }
 
         let values = values.iter().map(|s| {
             // Never take more than 1020 chars
             if s.len() > 1020 {
-                s.slice_to(1020)
+                s[..1020]
             } else {
-                s.as_slice()
+                s[]
             }
         });
 
@@ -182,7 +182,7 @@ impl<'a> FmtStrs<'a> {
             }
         )));
         Some(strs.fold(String::new(), |mut s, ss| {
-            s.push_str(ss.as_slice());
+            s.push_str(ss[]);
             s
         }))
     }
@@ -196,7 +196,7 @@ impl<'a> FmtStrs<'a> {
         if needs_span {
             self.span.sess.span_bug(span, format!(
                 "Called record_without_span for '{}' which does requires a span",
-                label).as_slice());
+                label)[]);
         }
         assert!(!dump_spans);
 
@@ -210,9 +210,9 @@ impl<'a> FmtStrs<'a> {
         };
 
         let mut result = String::from_str(label);
-        result.push_str(values_str.as_slice());
+        result.push_str(values_str[]);
         result.push_str("\n");
-        self.recorder.record(result.as_slice());
+        self.recorder.record(result[]);
     }
 
     pub fn record_with_span(&mut self,
@@ -235,7 +235,7 @@ impl<'a> FmtStrs<'a> {
         if !needs_span {
             self.span.sess.span_bug(span,
                                     format!("Called record_with_span for '{}' \
-                                             which does not require a span", label).as_slice());
+                                             which does not require a span", label)[]);
         }
 
         let values_str = match self.make_values_str(label, fields, values, span) {
@@ -243,7 +243,7 @@ impl<'a> FmtStrs<'a> {
             None => return,
         };
         let result = format!("{},{}{}\n", label, self.span.extent_str(sub_span), values_str);
-        self.recorder.record(result.as_slice());
+        self.recorder.record(result[]);
     }
 
     pub fn check_and_record(&mut self,
@@ -273,7 +273,7 @@ impl<'a> FmtStrs<'a> {
         // variable def's node id
         let mut qualname = String::from_str(name);
         qualname.push_str("$");
-        qualname.push_str(id.to_string().as_slice());
+        qualname.push_str(id.to_string()[]);
         self.check_and_record(Variable,
                               span,
                               sub_span,
diff --git a/src/librustc_trans/save/span_utils.rs b/src/librustc_trans/save/span_utils.rs
index 49e8e0fd347..a92d3c06e64 100644
--- a/src/librustc_trans/save/span_utils.rs
+++ b/src/librustc_trans/save/span_utils.rs
@@ -218,7 +218,7 @@ impl<'a> SpanUtils<'a> {
             let loc = self.sess.codemap().lookup_char_pos(span.lo);
             self.sess.span_bug(span,
                 format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}",
-                        self.snippet(span), loc.file.name, loc.line).as_slice());
+                        self.snippet(span), loc.file.name, loc.line)[]);
         }
         if result.is_none() && prev.tok.is_ident() && bracket_count == 0 {
             return self.make_sub_span(span, Some(prev.sp));
@@ -244,7 +244,7 @@ impl<'a> SpanUtils<'a> {
                     let loc = self.sess.codemap().lookup_char_pos(span.lo);
                     self.sess.span_bug(span, format!(
                         "Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}",
-                         self.snippet(span), loc.file.name, loc.line).as_slice());
+                         self.snippet(span), loc.file.name, loc.line)[]);
                 }
                 return result
             }
diff --git a/src/librustc_trans/trans/_match.rs b/src/librustc_trans/trans/_match.rs
index 2bcd723fc83..33fd14a441b 100644
--- a/src/librustc_trans/trans/_match.rs
+++ b/src/librustc_trans/trans/_match.rs
@@ -427,7 +427,7 @@ fn enter_match<'a, 'b, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
     let _indenter = indenter();
 
     m.iter().filter_map(|br| {
-        e(br.pats.as_slice()).map(|pats| {
+        e(br.pats[]).map(|pats| {
             let this = br.pats[col];
             let mut bound_ptrs = br.bound_ptrs.clone();
             match this.node {
@@ -548,7 +548,7 @@ fn enter_opt<'a, 'p, 'blk, 'tcx>(
         param_env: param_env,
     };
     enter_match(bcx, dm, m, col, val, |pats|
-        check_match::specialize(&mcx, pats.as_slice(), &ctor, col, variant_size)
+        check_match::specialize(&mcx, pats[], &ctor, col, variant_size)
     )
 }
 
@@ -790,7 +790,7 @@ fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
         let did = langcall(cx,
                            None,
                            format!("comparison of `{}`",
-                                   cx.ty_to_string(rhs_t)).as_slice(),
+                                   cx.ty_to_string(rhs_t))[],
                            StrEqFnLangItem);
         callee::trans_lang_call(cx, did, &[lhs, rhs], None)
     }
@@ -943,7 +943,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
             if has_nested_bindings(m, col) {
                 let expanded = expand_nested_bindings(bcx, m, col, val);
                 compile_submatch_continue(bcx,
-                                          expanded.as_slice(),
+                                          expanded[],
                                           vals,
                                           chk,
                                           col,
@@ -1035,8 +1035,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
                                         field_vals.len())
             );
             let mut vals = field_vals;
-            vals.push_all(vals_left.as_slice());
-            compile_submatch(bcx, pats.as_slice(), vals.as_slice(), chk, has_genuine_default);
+            vals.push_all(vals_left[]);
+            compile_submatch(bcx, pats[], vals[], chk, has_genuine_default);
             return;
         }
         _ => ()
@@ -1189,10 +1189,10 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
         }
         let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val);
         let mut opt_vals = unpacked;
-        opt_vals.push_all(vals_left.as_slice());
+        opt_vals.push_all(vals_left[]);
         compile_submatch(opt_cx,
-                         opt_ms.as_slice(),
-                         opt_vals.as_slice(),
+                         opt_ms[],
+                         opt_vals[],
                          branch_chk.as_ref().unwrap_or(chk),
                          has_genuine_default);
     }
@@ -1211,8 +1211,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
             }
             _ => {
                 compile_submatch(else_cx,
-                                 defaults.as_slice(),
-                                 vals_left.as_slice(),
+                                 defaults[],
+                                 vals_left[],
                                  chk,
                                  has_genuine_default);
             }
@@ -1333,7 +1333,7 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat,
                                  "__llmatch");
                 trmode = TrByCopy(alloca_no_lifetime(bcx,
                                          llvariable_ty,
-                                         bcx.ident(ident).as_slice()));
+                                         bcx.ident(ident)[]));
             }
             ast::BindByValue(_) => {
                 // in this case, the final type of the variable will be T,
@@ -1341,13 +1341,13 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat,
                 // above
                 llmatch = alloca_no_lifetime(bcx,
                                  llvariable_ty.ptr_to(),
-                                 bcx.ident(ident).as_slice());
+                                 bcx.ident(ident)[]);
                 trmode = TrByMove;
             }
             ast::BindByRef(_) => {
                 llmatch = alloca_no_lifetime(bcx,
                                  llvariable_ty,
-                                 bcx.ident(ident).as_slice());
+                                 bcx.ident(ident)[]);
                 trmode = TrByRef;
             }
         };
@@ -1415,7 +1415,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
         && arm.pats.last().unwrap().node == ast::PatWild(ast::PatWildSingle)
     });
 
-    compile_submatch(bcx, matches.as_slice(), &[discr_datum.val], &chk, has_default);
+    compile_submatch(bcx, matches[], &[discr_datum.val], &chk, has_default);
 
     let mut arm_cxs = Vec::new();
     for arm_data in arm_datas.iter() {
@@ -1429,7 +1429,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
         arm_cxs.push(bcx);
     }
 
-    bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs.as_slice());
+    bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs[]);
     return bcx;
 }
 
@@ -1581,7 +1581,7 @@ fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>,
     let var_ty = node_id_type(bcx, p_id);
 
     // Allocate memory on stack for the binding.
-    let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident).as_slice());
+    let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident)[]);
 
     // Subtle: be sure that we *populate* the memory *before*
     // we schedule the cleanup.
@@ -1619,7 +1619,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
 
     if bcx.sess().asm_comments() {
         add_comment(bcx, format!("bind_irrefutable_pat(pat={})",
-                                 pat.repr(bcx.tcx())).as_slice());
+                                 pat.repr(bcx.tcx()))[]);
     }
 
     let _indenter = indenter();
diff --git a/src/librustc_trans/trans/adt.rs b/src/librustc_trans/trans/adt.rs
index f7edb281b9e..9794611dd84 100644
--- a/src/librustc_trans/trans/adt.rs
+++ b/src/librustc_trans/trans/adt.rs
@@ -156,7 +156,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                                      t: Ty<'tcx>) -> Repr<'tcx> {
     match t.sty {
         ty::ty_tup(ref elems) => {
-            Univariant(mk_struct(cx, elems.as_slice(), false, t), false)
+            Univariant(mk_struct(cx, elems[], false, t), false)
         }
         ty::ty_struct(def_id, ref substs) => {
             let fields = ty::lookup_struct_fields(cx.tcx(), def_id);
@@ -167,16 +167,16 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
             let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag();
             if dtor { ftys.push(ty::mk_bool()); }
 
-            Univariant(mk_struct(cx, ftys.as_slice(), packed, t), dtor)
+            Univariant(mk_struct(cx, ftys[], packed, t), dtor)
         }
         ty::ty_unboxed_closure(def_id, _, ref substs) => {
             let upvars = ty::unboxed_closure_upvars(cx.tcx(), def_id, substs);
             let upvar_types = upvars.iter().map(|u| u.ty).collect::<Vec<_>>();
-            Univariant(mk_struct(cx, upvar_types.as_slice(), false, t), false)
+            Univariant(mk_struct(cx, upvar_types[], false, t), false)
         }
         ty::ty_enum(def_id, ref substs) => {
             let cases = get_cases(cx.tcx(), def_id, substs);
-            let hint = *ty::lookup_repr_hints(cx.tcx(), def_id).as_slice().get(0)
+            let hint = *ty::lookup_repr_hints(cx.tcx(), def_id)[].get(0)
                 .unwrap_or(&attr::ReprAny);
 
             let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag();
@@ -186,7 +186,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                 // (Typechecking will reject discriminant-sizing attrs.)
                 assert_eq!(hint, attr::ReprAny);
                 let ftys = if dtor { vec!(ty::mk_bool()) } else { vec!() };
-                return Univariant(mk_struct(cx, ftys.as_slice(), false, t),
+                return Univariant(mk_struct(cx, ftys[], false, t),
                                   dtor);
             }
 
@@ -209,7 +209,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                 cx.sess().bug(format!("non-C-like enum {} with specified \
                                       discriminants",
                                       ty::item_path_str(cx.tcx(),
-                                                        def_id)).as_slice());
+                                                        def_id))[]);
             }
 
             if cases.len() == 1 {
@@ -218,7 +218,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                 assert_eq!(hint, attr::ReprAny);
                 let mut ftys = cases[0].tys.clone();
                 if dtor { ftys.push(ty::mk_bool()); }
-                return Univariant(mk_struct(cx, ftys.as_slice(), false, t),
+                return Univariant(mk_struct(cx, ftys[], false, t),
                                   dtor);
             }
 
@@ -227,7 +227,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                 let mut discr = 0;
                 while discr < 2 {
                     if cases[1 - discr].is_zerolen(cx, t) {
-                        let st = mk_struct(cx, cases[discr].tys.as_slice(),
+                        let st = mk_struct(cx, cases[discr].tys[],
                                            false, t);
                         match cases[discr].find_ptr(cx) {
                             Some(ThinPointer(_)) if st.fields.len() == 1 => {
@@ -260,17 +260,17 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
 
             let fields : Vec<_> = cases.iter().map(|c| {
                 let mut ftys = vec!(ty_of_inttype(ity));
-                ftys.push_all(c.tys.as_slice());
+                ftys.push_all(c.tys[]);
                 if dtor { ftys.push(ty::mk_bool()); }
-                mk_struct(cx, ftys.as_slice(), false, t)
+                mk_struct(cx, ftys[], false, t)
             }).collect();
 
-            ensure_enum_fits_in_address_space(cx, ity, fields.as_slice(), t);
+            ensure_enum_fits_in_address_space(cx, ity, fields[], t);
 
             General(ity, fields, dtor)
         }
         _ => cx.sess().bug(format!("adt::represent_type called on non-ADT type: {}",
-                           ty_to_string(cx.tcx(), t)).as_slice())
+                           ty_to_string(cx.tcx(), t))[])
     }
 }
 
@@ -290,7 +290,7 @@ pub enum PointerField {
 impl<'tcx> Case<'tcx> {
     fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>)
                       -> bool {
-        mk_struct(cx, self.tys.as_slice(), false, scapegoat).size == 0
+        mk_struct(cx, self.tys[], false, scapegoat).size == 0
     }
 
     fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option<PointerField> {
@@ -352,9 +352,9 @@ fn mk_struct<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
            .map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
     };
 
-    ensure_struct_fits_in_address_space(cx, lltys.as_slice(), packed, scapegoat);
+    ensure_struct_fits_in_address_space(cx, lltys[], packed, scapegoat);
 
-    let llty_rec = Type::struct_(cx, lltys.as_slice(), packed);
+    let llty_rec = Type::struct_(cx, lltys[], packed);
     Struct {
         size: machine::llsize_of_alloc(cx, llty_rec),
         align: machine::llalign_of_min(cx, llty_rec),
@@ -403,7 +403,7 @@ fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntTyp
             return ity;
         }
         attr::ReprExtern => {
-            attempts = match cx.sess().target.target.arch.as_slice() {
+            attempts = match cx.sess().target.target.arch[] {
                 // WARNING: the ARM EABI has two variants; the one corresponding to `at_least_32`
                 // appears to be used on Linux and NetBSD, but some systems may use the variant
                 // corresponding to `choose_shortest`.  However, we don't run on those yet...?
@@ -530,7 +530,7 @@ pub fn finish_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
     match *r {
         CEnum(..) | General(..) | RawNullablePointer { .. } => { }
         Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } =>
-            llty.set_struct_body(struct_llfields(cx, st, false, false).as_slice(),
+            llty.set_struct_body(struct_llfields(cx, st, false, false)[],
                                  st.packed)
     }
 }
@@ -546,7 +546,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
         Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } => {
             match name {
                 None => {
-                    Type::struct_(cx, struct_llfields(cx, st, sizing, dst).as_slice(),
+                    Type::struct_(cx, struct_llfields(cx, st, sizing, dst)[],
                                   st.packed)
                 }
                 Some(name) => { assert_eq!(sizing, false); Type::named_struct(cx, name) }
@@ -565,7 +565,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
             // of the size.
             //
             // FIXME #10604: this breaks when vector types are present.
-            let (size, align) = union_size_and_align(sts.as_slice());
+            let (size, align) = union_size_and_align(sts[]);
             let align_s = align as u64;
             let discr_ty = ll_inttype(cx, ity);
             let discr_size = machine::llsize_of_alloc(cx, discr_ty);
@@ -586,10 +586,10 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                            Type::array(&discr_ty, align_s / discr_size - 1),
                            pad_ty);
             match name {
-                None => Type::struct_(cx, fields.as_slice(), false),
+                None => Type::struct_(cx, fields[], false),
                 Some(name) => {
                     let mut llty = Type::named_struct(cx, name);
-                    llty.set_struct_body(fields.as_slice(), false);
+                    llty.set_struct_body(fields[], false);
                     llty
                 }
             }
@@ -847,7 +847,7 @@ pub fn struct_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, st: &Struct<'tcx>, v
     let val = if needs_cast {
         let ccx = bcx.ccx();
         let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::<Vec<_>>();
-        let real_ty = Type::struct_(ccx, fields.as_slice(), st.packed);
+        let real_ty = Type::struct_(ccx, fields[], st.packed);
         PointerCast(bcx, val, real_ty.ptr_to())
     } else {
         val
@@ -879,14 +879,14 @@ pub fn fold_variants<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
 
             for (discr, case) in cases.iter().enumerate() {
                 let mut variant_cx = fcx.new_temp_block(
-                    format!("enum-variant-iter-{}", discr.to_string()).as_slice()
+                    format!("enum-variant-iter-{}", discr.to_string())[]
                 );
                 let rhs_val = C_integral(ll_inttype(ccx, ity), discr as u64, true);
                 AddCase(llswitch, rhs_val, variant_cx.llbb);
 
                 let fields = case.fields.iter().map(|&ty|
                     type_of::type_of(bcx.ccx(), ty)).collect::<Vec<_>>();
-                let real_ty = Type::struct_(ccx, fields.as_slice(), case.packed);
+                let real_ty = Type::struct_(ccx, fields[], case.packed);
                 let variant_value = PointerCast(variant_cx, value, real_ty.ptr_to());
 
                 variant_cx = f(variant_cx, case, variant_value);
@@ -961,14 +961,14 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr
             let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true);
             let mut f = vec![lldiscr];
             f.push_all(vals);
-            let mut contents = build_const_struct(ccx, case, f.as_slice());
+            let mut contents = build_const_struct(ccx, case, f[]);
             contents.push_all(&[padding(ccx, max_sz - case.size)]);
-            C_struct(ccx, contents.as_slice(), false)
+            C_struct(ccx, contents[], false)
         }
         Univariant(ref st, _dro) => {
             assert!(discr == 0);
             let contents = build_const_struct(ccx, st, vals);
-            C_struct(ccx, contents.as_slice(), st.packed)
+            C_struct(ccx, contents[], st.packed)
         }
         RawNullablePointer { nndiscr, nnty, .. } => {
             if discr == nndiscr {
@@ -982,7 +982,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr
             if discr == nndiscr {
                 C_struct(ccx, build_const_struct(ccx,
                                                  nonnull,
-                                                 vals).as_slice(),
+                                                 vals)[],
                          false)
             } else {
                 let vals = nonnull.fields.iter().map(|&ty| {
@@ -992,7 +992,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr
                 }).collect::<Vec<ValueRef>>();
                 C_struct(ccx, build_const_struct(ccx,
                                                  nonnull,
-                                                 vals.as_slice()).as_slice(),
+                                                 vals[])[],
                          false)
             }
         }
diff --git a/src/librustc_trans/trans/asm.rs b/src/librustc_trans/trans/asm.rs
index e3afe22897e..b8bee100082 100644
--- a/src/librustc_trans/trans/asm.rs
+++ b/src/librustc_trans/trans/asm.rs
@@ -72,7 +72,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
                                     callee::DontAutorefArg)
         })
     }).collect::<Vec<_>>();
-    inputs.push_all(ext_inputs.as_slice());
+    inputs.push_all(ext_inputs[]);
 
     // no failure occurred preparing operands, no need to cleanup
     fcx.pop_custom_cleanup_scope(temp_scope);
@@ -92,18 +92,18 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
         if !clobbers.is_empty() {
             clobbers.push(',');
         }
-        clobbers.push_str(more_clobbers.as_slice());
+        clobbers.push_str(more_clobbers[]);
     }
 
     // Add the clobbers to our constraints list
     if clobbers.len() != 0 && constraints.len() != 0 {
         constraints.push(',');
-        constraints.push_str(clobbers.as_slice());
+        constraints.push_str(clobbers[]);
     } else {
-        constraints.push_str(clobbers.as_slice());
+        constraints.push_str(clobbers[]);
     }
 
-    debug!("Asm Constraints: {}", constraints.as_slice());
+    debug!("Asm Constraints: {}", constraints[]);
 
     let num_outputs = outputs.len();
 
@@ -113,7 +113,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
     } else if num_outputs == 1 {
         output_types[0]
     } else {
-        Type::struct_(bcx.ccx(), output_types.as_slice(), false)
+        Type::struct_(bcx.ccx(), output_types[], false)
     };
 
     let dialect = match ia.dialect {
@@ -126,7 +126,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
             InlineAsmCall(bcx,
                           a,
                           c,
-                          inputs.as_slice(),
+                          inputs[],
                           output_type,
                           ia.volatile,
                           ia.alignstack,
diff --git a/src/librustc_trans/trans/base.rs b/src/librustc_trans/trans/base.rs
index ca1e0d7de72..a18d403bd95 100644
--- a/src/librustc_trans/trans/base.rs
+++ b/src/librustc_trans/trans/base.rs
@@ -249,7 +249,7 @@ fn get_extern_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<'tcx>,
     let f = decl_rust_fn(ccx, fn_ty, name);
 
     csearch::get_item_attrs(&ccx.sess().cstore, did, |attrs| {
-        set_llvm_fn_attrs(ccx, attrs.as_slice(), f)
+        set_llvm_fn_attrs(ccx, attrs[], f)
     });
 
     ccx.externs().borrow_mut().insert(name.to_string(), f);
@@ -302,7 +302,7 @@ pub fn decl_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         _ => panic!("expected closure or fn")
     };
 
-    let llfty = type_of_rust_fn(ccx, env, inputs.as_slice(), output, abi);
+    let llfty = type_of_rust_fn(ccx, env, inputs[], output, abi);
     debug!("decl_rust_fn(input count={},type={})",
            inputs.len(),
            ccx.tn().type_to_string(llfty));
@@ -369,7 +369,7 @@ fn require_alloc_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
         Err(s) => {
             bcx.sess().fatal(format!("allocation of `{}` {}",
                                      bcx.ty_to_string(info_ty),
-                                     s).as_slice());
+                                     s)[]);
         }
     }
 }
@@ -510,7 +510,7 @@ pub fn unset_split_stack(f: ValueRef) {
 // silently mangles such symbols, breaking our linkage model.
 pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: String) {
     if ccx.all_llvm_symbols().borrow().contains(&sym) {
-        ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym).as_slice());
+        ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym)[]);
     }
     ccx.all_llvm_symbols().borrow_mut().insert(sym);
 }
@@ -546,7 +546,7 @@ pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                                      ty::mk_nil(ccx.tcx()));
         get_extern_fn(ccx,
                       &mut *ccx.externs().borrow_mut(),
-                      name.as_slice(),
+                      name[],
                       llvm::CCallConv,
                       llty,
                       dtor_ty)
@@ -796,8 +796,8 @@ pub fn iter_structural_ty<'a, 'blk, 'tcx>(cx: Block<'blk, 'tcx>,
                       let variant_cx =
                           fcx.new_temp_block(
                               format!("enum-iter-variant-{}",
-                                      variant.disr_val.to_string().as_slice())
-                                     .as_slice());
+                                      variant.disr_val.to_string()[])
+                                     []);
                       match adt::trans_case(cx, &*repr, variant.disr_val) {
                           _match::SingleResult(r) => {
                               AddCase(llswitch, r.val, variant_cx.llbb)
@@ -822,7 +822,7 @@ pub fn iter_structural_ty<'a, 'blk, 'tcx>(cx: Block<'blk, 'tcx>,
       }
       _ => {
           cx.sess().unimpl(format!("type in iter_structural_ty: {}",
-                                   ty_to_string(cx.tcx(), t)).as_slice())
+                                   ty_to_string(cx.tcx(), t))[])
       }
     }
     return cx;
@@ -904,7 +904,7 @@ pub fn fail_if_zero_or_overflows<'blk, 'tcx>(
         }
         _ => {
             cx.sess().bug(format!("fail-if-zero on unexpected type: {}",
-                                  ty_to_string(cx.tcx(), rhs_t)).as_slice());
+                                  ty_to_string(cx.tcx(), rhs_t))[]);
         }
     };
     let bcx = with_cond(cx, is_zero, |bcx| {
@@ -958,19 +958,19 @@ pub fn trans_external_path<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         ty::ty_bare_fn(ref fn_ty) => {
             match ccx.sess().target.target.adjust_abi(fn_ty.abi) {
                 Rust | RustCall => {
-                    get_extern_rust_fn(ccx, t, name.as_slice(), did)
+                    get_extern_rust_fn(ccx, t, name[], did)
                 }
                 RustIntrinsic => {
                     ccx.sess().bug("unexpected intrinsic in trans_external_path")
                 }
                 _ => {
                     foreign::register_foreign_item_fn(ccx, fn_ty.abi, t,
-                                                      name.as_slice())
+                                                      name[])
                 }
             }
         }
         ty::ty_closure(_) => {
-            get_extern_rust_fn(ccx, t, name.as_slice(), did)
+            get_extern_rust_fn(ccx, t, name[], did)
         }
         _ => {
             get_extern_const(ccx, did, t)
@@ -1024,7 +1024,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
 
         let llresult = Invoke(bcx,
                               llfn,
-                              llargs.as_slice(),
+                              llargs[],
                               normal_bcx.llbb,
                               landing_pad,
                               Some(attributes));
@@ -1040,7 +1040,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
             None => debuginfo::clear_source_location(bcx.fcx)
         };
 
-        let llresult = Call(bcx, llfn, llargs.as_slice(), Some(attributes));
+        let llresult = Call(bcx, llfn, llargs[], Some(attributes));
         return (llresult, bcx);
     }
 }
@@ -1157,7 +1157,7 @@ pub fn call_lifetime_end(cx: Block, ptr: ValueRef) {
 pub fn call_memcpy(cx: Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, align: u32) {
     let _icx = push_ctxt("call_memcpy");
     let ccx = cx.ccx();
-    let key = match ccx.sess().target.target.target_word_size.as_slice() {
+    let key = match ccx.sess().target.target.target_word_size[] {
         "32" => "llvm.memcpy.p0i8.p0i8.i32",
         "64" => "llvm.memcpy.p0i8.p0i8.i64",
         tws => panic!("Unsupported target word size for memcpy: {}", tws),
@@ -1204,7 +1204,7 @@ fn memzero<'a, 'tcx>(b: &Builder<'a, 'tcx>, llptr: ValueRef, ty: Ty<'tcx>) {
 
     let llty = type_of::type_of(ccx, ty);
 
-    let intrinsic_key = match ccx.sess().target.target.target_word_size.as_slice() {
+    let intrinsic_key = match ccx.sess().target.target.target_word_size[] {
         "32" => "llvm.memset.p0i8.i32",
         "64" => "llvm.memset.p0i8.i64",
         tws => panic!("Unsupported target word size for memset: {}", tws),
@@ -1691,7 +1691,7 @@ fn copy_unboxed_closure_args_to_allocas<'blk, 'tcx>(
                                                          "argtuple",
                                                          arg_scope_id));
     let untupled_arg_types = match monomorphized_arg_types[0].sty {
-        ty::ty_tup(ref types) => types.as_slice(),
+        ty::ty_tup(ref types) => types[],
         _ => {
             bcx.tcx().sess.span_bug(args[0].pat.span,
                                     "first arg to `rust-call` ABI function \
@@ -1879,12 +1879,12 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
 
     let arg_datums = if abi != RustCall {
         create_datums_for_fn_args(&fcx,
-                                  monomorphized_arg_types.as_slice())
+                                  monomorphized_arg_types[])
     } else {
         create_datums_for_fn_args_under_call_abi(
             bcx,
             arg_scope,
-            monomorphized_arg_types.as_slice())
+            monomorphized_arg_types[])
     };
 
     bcx = match closure_env.kind {
@@ -1892,16 +1892,16 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
             copy_args_to_allocas(&fcx,
                                  arg_scope,
                                  bcx,
-                                 decl.inputs.as_slice(),
+                                 decl.inputs[],
                                  arg_datums)
         }
         closure::UnboxedClosure(..) => {
             copy_unboxed_closure_args_to_allocas(
                 bcx,
                 arg_scope,
-                decl.inputs.as_slice(),
+                decl.inputs[],
                 arg_datums,
-                monomorphized_arg_types.as_slice())
+                monomorphized_arg_types[])
         }
     };
 
@@ -2018,7 +2018,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
         _ => ccx.sess().bug(
             format!("trans_enum_variant_constructor: \
                      unexpected ctor return type {}",
-                     ctor_ty.repr(tcx)).as_slice())
+                     ctor_ty.repr(tcx))[])
     };
 
     // Get location to store the result. If the user does not care about
@@ -2041,7 +2041,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
                 bcx = expr::trans_adt(bcx,
                                       result_ty,
                                       disr,
-                                      fields.as_slice(),
+                                      fields[],
                                       None,
                                       expr::SaveIn(llresult),
                                       call_info);
@@ -2090,7 +2090,7 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx
         _ => ccx.sess().bug(
             format!("trans_enum_variant_or_tuple_like_struct: \
                      unexpected ctor return type {}",
-                    ty_to_string(ccx.tcx(), ctor_ty)).as_slice())
+                    ty_to_string(ccx.tcx(), ctor_ty))[])
     };
 
     let arena = TypedArena::new();
@@ -2102,7 +2102,7 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx
 
     let arg_tys = ty::ty_fn_args(ctor_ty);
 
-    let arg_datums = create_datums_for_fn_args(&fcx, arg_tys.as_slice());
+    let arg_datums = create_datums_for_fn_args(&fcx, arg_tys[]);
 
     if !type_is_zero_size(fcx.ccx, result_ty.unwrap()) {
         let dest = fcx.get_ret_slot(bcx, result_ty, "eret_slot");
@@ -2166,7 +2166,7 @@ fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span,
                             lvlsrc, Some(sp),
                             format!("enum variant is more than three times larger \
                                      ({} bytes) than the next largest (ignoring padding)",
-                                    largest).as_slice());
+                                    largest)[]);
 
         ccx.sess().span_note(enum_def.variants[largest_index].span,
                              "this variant is the largest");
@@ -2284,7 +2284,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
     match item.node {
       ast::ItemFn(ref decl, _fn_style, abi, ref generics, ref body) => {
         if !generics.is_type_parameterized() {
-            let trans_everywhere = attr::requests_inline(item.attrs.as_slice());
+            let trans_everywhere = attr::requests_inline(item.attrs[]);
             // Ignore `trans_everywhere` for cross-crate inlined items
             // (`from_external`).  `trans_item` will be called once for each
             // compilation unit that references the item, so it will still get
@@ -2295,7 +2295,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
                     foreign::trans_rust_fn_with_foreign_abi(ccx,
                                                             &**decl,
                                                             &**body,
-                                                            item.attrs.as_slice(),
+                                                            item.attrs[],
                                                             llfn,
                                                             &Substs::trans_empty(),
                                                             item.id,
@@ -2307,7 +2307,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
                              llfn,
                              &Substs::trans_empty(),
                              item.id,
-                             item.attrs.as_slice());
+                             item.attrs[]);
                 }
                 update_linkage(ccx,
                                llfn,
@@ -2324,7 +2324,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
       ast::ItemImpl(_, ref generics, _, _, ref impl_items) => {
         meth::trans_impl(ccx,
                          item.ident,
-                         impl_items.as_slice(),
+                         impl_items[],
                          generics,
                          item.id);
       }
@@ -2350,7 +2350,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
 
           // Do static_assert checking. It can't really be done much earlier
           // because we need to get the value of the bool out of LLVM
-          if attr::contains_name(item.attrs.as_slice(), "static_assert") {
+          if attr::contains_name(item.attrs[], "static_assert") {
               if m == ast::MutMutable {
                   ccx.sess().span_fatal(expr.span,
                                         "cannot have static_assert on a mutable \
@@ -2427,7 +2427,7 @@ fn register_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         _ => panic!("expected bare rust fn")
     };
 
-    let llfn = decl_rust_fn(ccx, node_type, sym.as_slice());
+    let llfn = decl_rust_fn(ccx, node_type, sym[]);
     finish_register_fn(ccx, sp, sym, node_id, llfn);
     llfn
 }
@@ -2472,7 +2472,7 @@ pub fn get_fn_llvm_attributes<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<
 
             match fn_sig.0.inputs[1].sty {
                 ty::ty_tup(ref t_in) => {
-                    inputs.push_all(t_in.as_slice());
+                    inputs.push_all(t_in[]);
                     inputs
                 }
                 _ => ccx.sess().bug("expected tuple'd inputs")
@@ -2607,7 +2607,7 @@ pub fn register_fn_llvmty(ccx: &CrateContext,
                           llfty: Type) -> ValueRef {
     debug!("register_fn_llvmty id={} sym={}", node_id, sym);
 
-    let llfn = decl_fn(ccx, sym.as_slice(), cc, llfty, ty::FnConverging(ty::mk_nil(ccx.tcx())));
+    let llfn = decl_fn(ccx, sym[], cc, llfty, ty::FnConverging(ty::mk_nil(ccx.tcx())));
     finish_register_fn(ccx, sp, sym, node_id, llfn);
     llfn
 }
@@ -2659,7 +2659,7 @@ pub fn create_entry_wrapper(ccx: &CrateContext,
             let (start_fn, args) = if use_start_lang_item {
                 let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) {
                     Ok(id) => id,
-                    Err(s) => { ccx.sess().fatal(s.as_slice()); }
+                    Err(s) => { ccx.sess().fatal(s[]); }
                 };
                 let start_fn = if start_def_id.krate == ast::LOCAL_CRATE {
                     get_item_val(ccx, start_def_id.node)
@@ -2750,7 +2750,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
     let val = match item {
         ast_map::NodeItem(i) => {
             let ty = ty::node_id_to_type(ccx.tcx(), i.id);
-            let sym = || exported_name(ccx, id, ty, i.attrs.as_slice());
+            let sym = || exported_name(ccx, id, ty, i.attrs[]);
 
             let v = match i.node {
                 ast::ItemStatic(_, _, ref expr) => {
@@ -2773,16 +2773,16 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
                         } else {
                             llvm::LLVMTypeOf(v)
                         };
-                        if contains_null(sym.as_slice()) {
+                        if contains_null(sym[]) {
                             ccx.sess().fatal(
                                 format!("Illegal null byte in export_name \
-                                         value: `{}`", sym).as_slice());
+                                         value: `{}`", sym)[]);
                         }
                         let g = sym.with_c_str(|buf| {
                             llvm::LLVMAddGlobal(ccx.llmod(), llty, buf)
                         });
 
-                        if attr::contains_name(i.attrs.as_slice(),
+                        if attr::contains_name(i.attrs[],
                                                "thread_local") {
                             llvm::set_thread_local(g, true);
                         }
@@ -2807,19 +2807,19 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
                                                                    sym,
                                                                    i.id)
                     };
-                    set_llvm_fn_attrs(ccx, i.attrs.as_slice(), llfn);
+                    set_llvm_fn_attrs(ccx, i.attrs[], llfn);
                     llfn
                 }
 
                 _ => panic!("get_item_val: weird result in table")
             };
 
-            match attr::first_attr_value_str_by_name(i.attrs.as_slice(),
+            match attr::first_attr_value_str_by_name(i.attrs[],
                                                      "link_section") {
                 Some(sect) => {
                     if contains_null(sect.get()) {
                         ccx.sess().fatal(format!("Illegal null byte in link_section value: `{}`",
-                                                 sect.get()).as_slice());
+                                                 sect.get())[]);
                     }
                     unsafe {
                         sect.get().with_c_str(|buf| {
@@ -2863,7 +2863,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
                     let abi = ccx.tcx().map.get_foreign_abi(id);
                     let ty = ty::node_id_to_type(ccx.tcx(), ni.id);
                     let name = foreign::link_name(&*ni);
-                    foreign::register_foreign_item_fn(ccx, abi, ty, name.get().as_slice())
+                    foreign::register_foreign_item_fn(ccx, abi, ty, name.get()[])
                 }
                 ast::ForeignItemStatic(..) => {
                     foreign::register_static(ccx, &*ni)
@@ -2886,7 +2886,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
             let sym = exported_name(ccx,
                                     id,
                                     ty,
-                                    enm.attrs.as_slice());
+                                    enm.attrs[]);
 
             llfn = match enm.node {
                 ast::ItemEnum(_, _) => {
@@ -2914,7 +2914,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
                                     id,
                                     ty,
                                     struct_item.attrs
-                                               .as_slice());
+                                               []);
             let llfn = register_fn(ccx, struct_item.span,
                                    sym, ctor_id, ty);
             set_inline_hint(llfn);
@@ -2923,7 +2923,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
 
         ref variant => {
             ccx.sess().bug(format!("get_item_val(): unexpected variant: {}",
-                                   variant).as_slice())
+                                   variant)[])
         }
     };
 
@@ -2944,10 +2944,10 @@ fn register_method(ccx: &CrateContext, id: ast::NodeId,
                    m: &ast::Method) -> ValueRef {
     let mty = ty::node_id_to_type(ccx.tcx(), id);
 
-    let sym = exported_name(ccx, id, mty, m.attrs.as_slice());
+    let sym = exported_name(ccx, id, mty, m.attrs[]);
 
     let llfn = register_fn(ccx, m.span, sym, id, mty);
-    set_llvm_fn_attrs(ccx, m.attrs.as_slice(), llfn);
+    set_llvm_fn_attrs(ccx, m.attrs[], llfn);
     llfn
 }
 
@@ -2986,7 +2986,7 @@ pub fn write_metadata(cx: &SharedCrateContext, krate: &ast::Crate) -> Vec<u8> {
         Some(compressed) => compressed,
         None => cx.sess().fatal("failed to compress metadata"),
     }.as_slice());
-    let llmeta = C_bytes_in_context(cx.metadata_llcx(), compressed.as_slice());
+    let llmeta = C_bytes_in_context(cx.metadata_llcx(), compressed[]);
     let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false);
     let name = format!("rust_metadata_{}_{}",
                        cx.link_meta().crate_name,
@@ -3114,7 +3114,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>)
     let link_meta = link::build_link_meta(&tcx.sess, krate, name);
 
     let codegen_units = tcx.sess.opts.cg.codegen_units;
-    let shared_ccx = SharedCrateContext::new(link_meta.crate_name.as_slice(),
+    let shared_ccx = SharedCrateContext::new(link_meta.crate_name[],
                                              codegen_units,
                                              tcx,
                                              export_map,
@@ -3216,7 +3216,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>)
         llmod: shared_ccx.metadata_llmod(),
     };
     let formats = shared_ccx.tcx().dependency_formats.borrow().clone();
-    let no_builtins = attr::contains_name(krate.attrs.as_slice(), "no_builtins");
+    let no_builtins = attr::contains_name(krate.attrs[], "no_builtins");
 
     let translation = CrateTranslation {
         modules: modules,
diff --git a/src/librustc_trans/trans/builder.rs b/src/librustc_trans/trans/builder.rs
index cf940b13846..1b9c9d221b9 100644
--- a/src/librustc_trans/trans/builder.rs
+++ b/src/librustc_trans/trans/builder.rs
@@ -555,7 +555,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
         } else {
             let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
             self.count_insn("gepi");
-            self.inbounds_gep(base, v.as_slice())
+            self.inbounds_gep(base, v[])
         }
     }
 
@@ -763,8 +763,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
             let s = format!("{} ({})",
                             text,
                             self.ccx.sess().codemap().span_to_string(sp));
-            debug!("{}", s.as_slice());
-            self.add_comment(s.as_slice());
+            debug!("{}", s[]);
+            self.add_comment(s[]);
         }
     }
 
@@ -801,7 +801,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
         }).collect::<Vec<_>>();
 
         debug!("Asm Output Type: {}", self.ccx.tn().type_to_string(output));
-        let fty = Type::func(argtys.as_slice(), &output);
+        let fty = Type::func(argtys[], &output);
         unsafe {
             let v = llvm::LLVMInlineAsm(
                 fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
diff --git a/src/librustc_trans/trans/cabi.rs b/src/librustc_trans/trans/cabi.rs
index ad2a6db1222..9ea158fbe21 100644
--- a/src/librustc_trans/trans/cabi.rs
+++ b/src/librustc_trans/trans/cabi.rs
@@ -107,7 +107,7 @@ pub fn compute_abi_info(ccx: &CrateContext,
                         atys: &[Type],
                         rty: Type,
                         ret_def: bool) -> FnType {
-    match ccx.sess().target.target.arch.as_slice() {
+    match ccx.sess().target.target.arch[] {
         "x86" => cabi_x86::compute_abi_info(ccx, atys, rty, ret_def),
         "x86_64" => if ccx.sess().target.target.options.is_like_windows {
             cabi_x86_win64::compute_abi_info(ccx, atys, rty, ret_def)
@@ -117,6 +117,6 @@ pub fn compute_abi_info(ccx: &CrateContext,
         "arm" => cabi_arm::compute_abi_info(ccx, atys, rty, ret_def),
         "mips" => cabi_mips::compute_abi_info(ccx, atys, rty, ret_def),
         a => ccx.sess().fatal((format!("unrecognized arch \"{}\" in target specification", a))
-                              .as_slice()),
+                              []),
     }
 }
diff --git a/src/librustc_trans/trans/callee.rs b/src/librustc_trans/trans/callee.rs
index 1a753901f7e..ec3a81afaa0 100644
--- a/src/librustc_trans/trans/callee.rs
+++ b/src/librustc_trans/trans/callee.rs
@@ -122,7 +122,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
                     expr.span,
                     format!("type of callee is neither bare-fn nor closure: \
                              {}",
-                            bcx.ty_to_string(datum.ty)).as_slice());
+                            bcx.ty_to_string(datum.ty))[]);
             }
         }
     }
@@ -208,7 +208,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
                 bcx.tcx().sess.span_bug(
                     ref_expr.span,
                     format!("cannot translate def {} \
-                             to a callable thing!", def).as_slice());
+                             to a callable thing!", def)[]);
             }
         }
     }
@@ -288,7 +288,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>(
 
             _ => {
                 tcx.sess.bug(format!("trans_fn_pointer_shim invoked on invalid type: {}",
-                                           bare_fn_ty.repr(tcx)).as_slice());
+                                           bare_fn_ty.repr(tcx))[]);
             }
         };
     let tuple_input_ty = ty::mk_tup(tcx, input_tys.to_vec());
@@ -310,7 +310,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>(
     let llfn =
         decl_internal_rust_fn(ccx,
                               tuple_fn_ty,
-                              function_name.as_slice());
+                              function_name[]);
 
     //
     let block_arena = TypedArena::new();
@@ -345,7 +345,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>(
                            None,
                            bare_fn_ty,
                            |bcx, _| Callee { bcx: bcx, data: Fn(llfnpointer) },
-                           ArgVals(llargs.as_slice()),
+                           ArgVals(llargs[]),
                            dest).bcx;
 
     finish_fn(&fcx, bcx, output_ty);
@@ -813,7 +813,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
 
         bcx = foreign::trans_native_call(bcx, callee_ty,
                                          llfn, opt_llretslot.unwrap(),
-                                         llargs.as_slice(), arg_tys);
+                                         llargs[], arg_tys);
     }
 
     fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_cleanup_scope);
diff --git a/src/librustc_trans/trans/cleanup.rs b/src/librustc_trans/trans/cleanup.rs
index fb2c432ef5c..c1bb21c496a 100644
--- a/src/librustc_trans/trans/cleanup.rs
+++ b/src/librustc_trans/trans/cleanup.rs
@@ -404,7 +404,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> {
 
         self.ccx.sess().bug(
             format!("no cleanup scope {} found",
-                    self.ccx.tcx().map.node_to_string(cleanup_scope)).as_slice());
+                    self.ccx.tcx().map.node_to_string(cleanup_scope))[]);
     }
 
     /// Schedules a cleanup to occur in the top-most scope, which must be a temporary scope.
@@ -586,7 +586,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
                     LoopExit(id, _) => {
                         self.ccx.sess().bug(format!(
                                 "cannot exit from scope {}, \
-                                not in scope", id).as_slice());
+                                not in scope", id)[]);
                     }
                 }
             }
@@ -655,7 +655,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
                 let name = scope.block_name("clean");
                 debug!("generating cleanups for {}", name);
                 let bcx_in = self.new_block(label.is_unwind(),
-                                            name.as_slice(),
+                                            name[],
                                             None);
                 let mut bcx_out = bcx_in;
                 for cleanup in scope.cleanups.iter().rev() {
@@ -702,7 +702,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
                 Some(llbb) => { return llbb; }
                 None => {
                     let name = last_scope.block_name("unwind");
-                    pad_bcx = self.new_block(true, name.as_slice(), None);
+                    pad_bcx = self.new_block(true, name[], None);
                     last_scope.cached_landing_pad = Some(pad_bcx.llbb);
                 }
             }
@@ -1020,7 +1020,7 @@ pub fn temporary_scope(tcx: &ty::ctxt,
         }
         None => {
             tcx.sess.bug(format!("no temporary scope available for expr {}",
-                                 id).as_slice())
+                                 id)[])
         }
     }
 }
diff --git a/src/librustc_trans/trans/closure.rs b/src/librustc_trans/trans/closure.rs
index d5d954f5a90..8e56ef3c6f3 100644
--- a/src/librustc_trans/trans/closure.rs
+++ b/src/librustc_trans/trans/closure.rs
@@ -177,7 +177,7 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
     let tcx = ccx.tcx();
 
     // compute the type of the closure
-    let cdata_ty = mk_closure_tys(tcx, bound_values.as_slice());
+    let cdata_ty = mk_closure_tys(tcx, bound_values[]);
 
     // cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a
     // tuple.  This could be a ptr in uniq or a box or on stack,
@@ -206,7 +206,7 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
 
         if ccx.sess().asm_comments() {
             add_comment(bcx, format!("Copy {} into closure",
-                                     bv.to_string(ccx)).as_slice());
+                                     bv.to_string(ccx))[]);
         }
 
         let bound_data = GEPi(bcx, llbox, &[0u, abi::BOX_FIELD_BODY, i]);
@@ -444,7 +444,7 @@ pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
     let s = tcx.map.with_path(id, |path| {
         mangle_internal_name_by_path_and_seq(path, "closure")
     });
-    let llfn = decl_internal_rust_fn(ccx, fty, s.as_slice());
+    let llfn = decl_internal_rust_fn(ccx, fty, s[]);
 
     // set an inline hint for all closures
     set_inline_hint(llfn);
@@ -468,7 +468,7 @@ pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                   &[],
                   ty::ty_fn_ret(fty),
                   ty::ty_fn_abi(fty),
-                  ClosureEnv::new(freevars.as_slice(),
+                  ClosureEnv::new(freevars[],
                                   BoxedClosure(cdata_ty, store)));
     fill_fn_pair(bcx, dest_addr, llfn, llbox);
     bcx
@@ -514,7 +514,7 @@ pub fn get_or_create_declaration_if_unboxed_closure<'blk, 'tcx>(bcx: Block<'blk,
         mangle_internal_name_by_path_and_seq(path, "unboxed_closure")
     });
 
-    let llfn = decl_internal_rust_fn(ccx, function_type, symbol.as_slice());
+    let llfn = decl_internal_rust_fn(ccx, function_type, symbol[]);
 
     // set an inline hint for all closures
     set_inline_hint(llfn);
@@ -563,7 +563,7 @@ pub fn trans_unboxed_closure<'blk, 'tcx>(
                   &[],
                   ty::ty_fn_ret(function_type),
                   ty::ty_fn_abi(function_type),
-                  ClosureEnv::new(freevars.as_slice(),
+                  ClosureEnv::new(freevars[],
                                   UnboxedClosure(freevar_mode)));
 
     // Don't hoist this to the top of the function. It's perfectly legitimate
@@ -614,7 +614,7 @@ pub fn get_wrapper_for_bare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
             ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
                                     expected a statically resolved fn, got \
                                     {}",
-                                    def).as_slice());
+                                    def)[]);
         }
     };
 
@@ -632,7 +632,7 @@ pub fn get_wrapper_for_bare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         _ => {
             ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
                                     expected a closure ty, got {}",
-                                    closure_ty.repr(tcx)).as_slice());
+                                    closure_ty.repr(tcx))[]);
         }
     };
 
@@ -640,9 +640,9 @@ pub fn get_wrapper_for_bare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         mangle_internal_name_by_path_and_seq(path, "as_closure")
     });
     let llfn = if is_local {
-        decl_internal_rust_fn(ccx, closure_ty, name.as_slice())
+        decl_internal_rust_fn(ccx, closure_ty, name[])
     } else {
-        decl_rust_fn(ccx, closure_ty, name.as_slice())
+        decl_rust_fn(ccx, closure_ty, name[])
     };
 
     ccx.closure_bare_wrapper_cache().borrow_mut().insert(fn_ptr, llfn);
@@ -663,7 +663,7 @@ pub fn get_wrapper_for_bare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
 
     let args = create_datums_for_fn_args(&fcx,
                                          ty::ty_fn_args(closure_ty)
-                                            .as_slice());
+                                            []);
     let mut llargs = Vec::new();
     match fcx.llretslotptr.get() {
         Some(llretptr) => {
diff --git a/src/librustc_trans/trans/common.rs b/src/librustc_trans/trans/common.rs
index 61f27bcfa7a..9a3e39ff10b 100644
--- a/src/librustc_trans/trans/common.rs
+++ b/src/librustc_trans/trans/common.rs
@@ -117,7 +117,7 @@ pub fn gensym_name(name: &str) -> PathElem {
     let num = token::gensym(name).uint();
     // use one colon which will get translated to a period by the mangler, and
     // we're guaranteed that `num` is globally unique for this crate.
-    PathName(token::gensym(format!("{}:{}", name, num).as_slice()))
+    PathName(token::gensym(format!("{}:{}", name, num)[]))
 }
 
 #[deriving(Copy)]
@@ -436,7 +436,7 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
             Some(v) => v.clone(),
             None => {
                 self.tcx().sess.bug(format!(
-                    "no def associated with node id {}", nid).as_slice());
+                    "no def associated with node id {}", nid)[]);
             }
         }
     }
@@ -817,7 +817,7 @@ pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                 span,
                 format!("Encountered error `{}` selecting `{}` during trans",
                         e.repr(tcx),
-                        trait_ref.repr(tcx)).as_slice())
+                        trait_ref.repr(tcx))[])
         }
     };
 
@@ -844,7 +844,7 @@ pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                     span,
                     format!("Encountered errors `{}` fulfilling `{}` during trans",
                             errors.repr(tcx),
-                            trait_ref.repr(tcx)).as_slice());
+                            trait_ref.repr(tcx))[]);
             }
         }
     }
@@ -892,7 +892,7 @@ pub fn node_id_substs<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
             format!("type parameters for node {} include inference types: \
                      {}",
                     node,
-                    substs.repr(bcx.tcx())).as_slice());
+                    substs.repr(bcx.tcx()))[]);
     }
 
     let substs = substs.erase_regions();
@@ -909,8 +909,8 @@ pub fn langcall(bcx: Block,
         Err(s) => {
             let msg = format!("{} {}", msg, s);
             match span {
-                Some(span) => bcx.tcx().sess.span_fatal(span, msg.as_slice()),
-                None => bcx.tcx().sess.fatal(msg.as_slice()),
+                Some(span) => bcx.tcx().sess.span_fatal(span, msg[]),
+                None => bcx.tcx().sess.fatal(msg[]),
             }
         }
     }
diff --git a/src/librustc_trans/trans/consts.rs b/src/librustc_trans/trans/consts.rs
index e4f0543b5e7..4f7d0f8fe75 100644
--- a/src/librustc_trans/trans/consts.rs
+++ b/src/librustc_trans/trans/consts.rs
@@ -54,7 +54,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit)
                 _ => cx.sess().span_bug(lit.span,
                         format!("integer literal has type {} (expected int \
                                  or uint)",
-                                ty_to_string(cx.tcx(), lit_int_ty)).as_slice())
+                                ty_to_string(cx.tcx(), lit_int_ty))[])
             }
         }
         ast::LitFloat(ref fs, t) => {
@@ -74,7 +74,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit)
         }
         ast::LitBool(b) => C_bool(cx, b),
         ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()),
-        ast::LitBinary(ref data) => C_binary_slice(cx, data.as_slice()),
+        ast::LitBinary(ref data) => C_binary_slice(cx, data[]),
     }
 }
 
@@ -95,9 +95,9 @@ fn const_vec(cx: &CrateContext, e: &ast::Expr,
                       .collect::<Vec<_>>();
     // If the vector contains enums, an LLVM array won't work.
     let v = if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
-        C_struct(cx, vs.as_slice(), false)
+        C_struct(cx, vs[], false)
     } else {
-        C_array(llunitty, vs.as_slice())
+        C_array(llunitty, vs[])
     };
     (v, llunitty)
 }
@@ -152,13 +152,13 @@ fn const_deref<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, v: ValueRef,
                 }
                 _ => {
                     cx.sess().bug(format!("unexpected dereferenceable type {}",
-                                          ty_to_string(cx.tcx(), t)).as_slice())
+                                          ty_to_string(cx.tcx(), t))[])
                 }
             }
         }
         None => {
             cx.sess().bug(format!("cannot dereference const of type {}",
-                                  ty_to_string(cx.tcx(), t)).as_slice())
+                                  ty_to_string(cx.tcx(), t))[])
         }
     }
 }
@@ -203,7 +203,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr)
                     cx.sess()
                       .span_bug(e.span,
                                 format!("unexpected static function: {}",
-                                        store).as_slice())
+                                        store)[])
                 }
                 ty::AdjustDerefRef(ref adj) => {
                     let mut ty = ety;
@@ -264,7 +264,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr)
                                         }
                                         _ => cx.sess().span_bug(e.span,
                                             format!("unimplemented type in const unsize: {}",
-                                                    ty_to_string(cx.tcx(), ty)).as_slice())
+                                                    ty_to_string(cx.tcx(), ty))[])
                                     }
                                 }
                                 _ => {
@@ -272,7 +272,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr)
                                       .span_bug(e.span,
                                                 format!("unimplemented const \
                                                          autoref {}",
-                                                        autoref).as_slice())
+                                                        autoref)[])
                                 }
                             }
                         }
@@ -293,7 +293,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr)
         }
         cx.sess().bug(format!("const {} of type {} has size {} instead of {}",
                          e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety),
-                         csize, tsize).as_slice());
+                         csize, tsize)[]);
     }
     (llconst, ety_adjusted)
 }
@@ -443,7 +443,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
                       _ => cx.sess().span_bug(base.span,
                                               format!("index-expr base must be a vector \
                                                        or string type, found {}",
-                                                      ty_to_string(cx.tcx(), bt)).as_slice())
+                                                      ty_to_string(cx.tcx(), bt))[])
                   },
                   ty::ty_rptr(_, mt) => match mt.ty.sty {
                       ty::ty_vec(_, Some(u)) => {
@@ -452,12 +452,12 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
                       _ => cx.sess().span_bug(base.span,
                                               format!("index-expr base must be a vector \
                                                        or string type, found {}",
-                                                      ty_to_string(cx.tcx(), bt)).as_slice())
+                                                      ty_to_string(cx.tcx(), bt))[])
                   },
                   _ => cx.sess().span_bug(base.span,
                                           format!("index-expr base must be a vector \
                                                    or string type, found {}",
-                                                  ty_to_string(cx.tcx(), bt)).as_slice())
+                                                  ty_to_string(cx.tcx(), bt))[])
               };
 
               let len = llvm::LLVMConstIntGetZExtValue(len) as u64;
@@ -558,8 +558,8 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
           ast::ExprTup(ref es) => {
               let ety = ty::expr_ty(cx.tcx(), e);
               let repr = adt::represent_type(cx, ety);
-              let vals = map_list(es.as_slice());
-              adt::trans_const(cx, &*repr, 0, vals.as_slice())
+              let vals = map_list(es[]);
+              adt::trans_const(cx, &*repr, 0, vals[])
           }
           ast::ExprStruct(_, ref fs, ref base_opt) => {
               let ety = ty::expr_ty(cx.tcx(), e);
@@ -590,7 +590,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
                           }
                       }
                   }).collect::<Vec<_>>();
-                  adt::trans_const(cx, &*repr, discr, cs.as_slice())
+                  adt::trans_const(cx, &*repr, discr, cs[])
               })
           }
           ast::ExprVec(ref es) => {
@@ -607,9 +607,9 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
             };
             let vs = Vec::from_elem(n, const_expr(cx, &**elem).0);
             if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
-                C_struct(cx, vs.as_slice(), false)
+                C_struct(cx, vs[], false)
             } else {
-                C_array(llunitty, vs.as_slice())
+                C_array(llunitty, vs[])
             }
           }
           ast::ExprPath(ref pth) => {
@@ -655,8 +655,8 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
                   Some(def::DefStruct(_)) => {
                       let ety = ty::expr_ty(cx.tcx(), e);
                       let repr = adt::represent_type(cx, ety);
-                      let arg_vals = map_list(args.as_slice());
-                      adt::trans_const(cx, &*repr, 0, arg_vals.as_slice())
+                      let arg_vals = map_list(args[]);
+                      adt::trans_const(cx, &*repr, 0, arg_vals[])
                   }
                   Some(def::DefVariant(enum_did, variant_did, _)) => {
                       let ety = ty::expr_ty(cx.tcx(), e);
@@ -664,11 +664,11 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
                       let vinfo = ty::enum_variant_with_id(cx.tcx(),
                                                            enum_did,
                                                            variant_did);
-                      let arg_vals = map_list(args.as_slice());
+                      let arg_vals = map_list(args[]);
                       adt::trans_const(cx,
                                        &*repr,
                                        vinfo.disr_val,
-                                       arg_vals.as_slice())
+                                       arg_vals[])
                   }
                   _ => cx.sess().span_bug(e.span, "expected a struct or variant def")
               }
diff --git a/src/librustc_trans/trans/context.rs b/src/librustc_trans/trans/context.rs
index 7b962a93990..2c71dd831fb 100644
--- a/src/librustc_trans/trans/context.rs
+++ b/src/librustc_trans/trans/context.rs
@@ -284,7 +284,7 @@ impl<'tcx> SharedCrateContext<'tcx> {
             // such as a function name in the module.
             // 1. http://llvm.org/bugs/show_bug.cgi?id=11479
             let llmod_id = format!("{}.{}.rs", crate_name, i);
-            let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id.as_slice());
+            let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id[]);
             shared_ccx.local_ccxs.push(local_ccx);
         }
 
@@ -374,7 +374,7 @@ impl<'tcx> LocalCrateContext<'tcx> {
                                           .target
                                           .target
                                           .data_layout
-                                          .as_slice());
+                                          []);
 
             let dbg_cx = if shared.tcx.sess.opts.debuginfo != NoDebugInfo {
                 Some(debuginfo::CrateDebugContext::new(llmod))
@@ -726,7 +726,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
     pub fn report_overbig_object(&self, obj: Ty<'tcx>) -> ! {
         self.sess().fatal(
             format!("the type `{}` is too big for the current architecture",
-                    obj.repr(self.tcx())).as_slice())
+                    obj.repr(self.tcx()))[])
     }
 }
 
diff --git a/src/librustc_trans/trans/controlflow.rs b/src/librustc_trans/trans/controlflow.rs
index 135e192a2fd..3b24ded6717 100644
--- a/src/librustc_trans/trans/controlflow.rs
+++ b/src/librustc_trans/trans/controlflow.rs
@@ -48,7 +48,7 @@ pub fn trans_stmt<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
     debug!("trans_stmt({})", s.repr(cx.tcx()));
 
     if cx.sess().asm_comments() {
-        add_span_comment(cx, s.span, s.repr(cx.tcx()).as_slice());
+        add_span_comment(cx, s.span, s.repr(cx.tcx())[]);
     }
 
     let mut bcx = cx;
@@ -188,7 +188,7 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
     }
 
     let name = format!("then-block-{}-", thn.id);
-    let then_bcx_in = bcx.fcx.new_id_block(name.as_slice(), thn.id);
+    let then_bcx_in = bcx.fcx.new_id_block(name[], thn.id);
     let then_bcx_out = trans_block(then_bcx_in, &*thn, dest);
     trans::debuginfo::clear_source_location(bcx.fcx);
 
@@ -437,7 +437,7 @@ pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                 Some(&def::DefLabel(loop_id)) => loop_id,
                 ref r => {
                     bcx.tcx().sess.bug(format!("{} in def-map for label",
-                                               r).as_slice())
+                                               r)[])
                 }
             }
         }
@@ -501,7 +501,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
 
     let v_str = C_str_slice(ccx, fail_str);
     let loc = bcx.sess().codemap().lookup_char_pos(sp.lo);
-    let filename = token::intern_and_get_ident(loc.file.name.as_slice());
+    let filename = token::intern_and_get_ident(loc.file.name[]);
     let filename = C_str_slice(ccx, filename);
     let line = C_uint(ccx, loc.line);
     let expr_file_line_const = C_struct(ccx, &[v_str, filename, line], false);
@@ -510,7 +510,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
     let did = langcall(bcx, Some(sp), "", PanicFnLangItem);
     let bcx = callee::trans_lang_call(bcx,
                                       did,
-                                      args.as_slice(),
+                                      args[],
                                       Some(expr::Ignore)).bcx;
     Unreachable(bcx);
     return bcx;
@@ -526,7 +526,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
 
     // Extract the file/line from the span
     let loc = bcx.sess().codemap().lookup_char_pos(sp.lo);
-    let filename = token::intern_and_get_ident(loc.file.name.as_slice());
+    let filename = token::intern_and_get_ident(loc.file.name[]);
 
     // Invoke the lang item
     let filename = C_str_slice(ccx,  filename);
@@ -537,7 +537,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
     let did = langcall(bcx, Some(sp), "", PanicBoundsCheckFnLangItem);
     let bcx = callee::trans_lang_call(bcx,
                                       did,
-                                      args.as_slice(),
+                                      args[],
                                       Some(expr::Ignore)).bcx;
     Unreachable(bcx);
     return bcx;
diff --git a/src/librustc_trans/trans/datum.rs b/src/librustc_trans/trans/datum.rs
index 75473dc58bf..9ab4e92b511 100644
--- a/src/librustc_trans/trans/datum.rs
+++ b/src/librustc_trans/trans/datum.rs
@@ -463,7 +463,7 @@ impl<'tcx> Datum<'tcx, Lvalue> {
             }
             _ => bcx.tcx().sess.bug(
                 format!("Unexpected unsized type in get_element: {}",
-                        bcx.ty_to_string(self.ty)).as_slice())
+                        bcx.ty_to_string(self.ty))[])
         };
         Datum {
             val: val,
diff --git a/src/librustc_trans/trans/debuginfo.rs b/src/librustc_trans/trans/debuginfo.rs
index 51e3a83f81f..2545de34ed8 100644
--- a/src/librustc_trans/trans/debuginfo.rs
+++ b/src/librustc_trans/trans/debuginfo.rs
@@ -284,7 +284,7 @@ impl<'tcx> TypeMap<'tcx> {
                                        metadata: DIType) {
         if self.type_to_metadata.insert(type_, metadata).is_some() {
             cx.sess().bug(format!("Type metadata for Ty '{}' is already in the TypeMap!",
-                                   ppaux::ty_to_string(cx.tcx(), type_)).as_slice());
+                                   ppaux::ty_to_string(cx.tcx(), type_))[]);
         }
     }
 
@@ -297,7 +297,7 @@ impl<'tcx> TypeMap<'tcx> {
         if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() {
             let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id);
             cx.sess().bug(format!("Type metadata for unique id '{}' is already in the TypeMap!",
-                                  unique_type_id_str.as_slice()).as_slice());
+                                  unique_type_id_str[])[]);
         }
     }
 
@@ -378,14 +378,14 @@ impl<'tcx> TypeMap<'tcx> {
                         self.get_unique_type_id_of_type(cx, component_type);
                     let component_type_id =
                         self.get_unique_type_id_as_string(component_type_id);
-                    unique_type_id.push_str(component_type_id.as_slice());
+                    unique_type_id.push_str(component_type_id[]);
                 }
             },
             ty::ty_uniq(inner_type) => {
                 unique_type_id.push('~');
                 let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
                 let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
-                unique_type_id.push_str(inner_type_id.as_slice());
+                unique_type_id.push_str(inner_type_id[]);
             },
             ty::ty_ptr(ty::mt { ty: inner_type, mutbl } ) => {
                 unique_type_id.push('*');
@@ -395,7 +395,7 @@ impl<'tcx> TypeMap<'tcx> {
 
                 let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
                 let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
-                unique_type_id.push_str(inner_type_id.as_slice());
+                unique_type_id.push_str(inner_type_id[]);
             },
             ty::ty_rptr(_, ty::mt { ty: inner_type, mutbl }) => {
                 unique_type_id.push('&');
@@ -405,12 +405,12 @@ impl<'tcx> TypeMap<'tcx> {
 
                 let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
                 let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
-                unique_type_id.push_str(inner_type_id.as_slice());
+                unique_type_id.push_str(inner_type_id[]);
             },
             ty::ty_vec(inner_type, optional_length) => {
                 match optional_length {
                     Some(len) => {
-                        unique_type_id.push_str(format!("[{}]", len).as_slice());
+                        unique_type_id.push_str(format!("[{}]", len)[]);
                     }
                     None => {
                         unique_type_id.push_str("[]");
@@ -419,7 +419,7 @@ impl<'tcx> TypeMap<'tcx> {
 
                 let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
                 let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
-                unique_type_id.push_str(inner_type_id.as_slice());
+                unique_type_id.push_str(inner_type_id[]);
             },
             ty::ty_trait(ref trait_data) => {
                 unique_type_id.push_str("trait ");
@@ -444,7 +444,7 @@ impl<'tcx> TypeMap<'tcx> {
                         self.get_unique_type_id_of_type(cx, parameter_type);
                     let parameter_type_id =
                         self.get_unique_type_id_as_string(parameter_type_id);
-                    unique_type_id.push_str(parameter_type_id.as_slice());
+                    unique_type_id.push_str(parameter_type_id[]);
                     unique_type_id.push(',');
                 }
 
@@ -457,7 +457,7 @@ impl<'tcx> TypeMap<'tcx> {
                     ty::FnConverging(ret_ty) => {
                         let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty);
                         let return_type_id = self.get_unique_type_id_as_string(return_type_id);
-                        unique_type_id.push_str(return_type_id.as_slice());
+                        unique_type_id.push_str(return_type_id[]);
                     }
                     ty::FnDiverging => {
                         unique_type_id.push_str("!");
@@ -478,8 +478,8 @@ impl<'tcx> TypeMap<'tcx> {
             },
             _ => {
                 cx.sess().bug(format!("get_unique_type_id_of_type() - unexpected type: {}, {}",
-                                      ppaux::ty_to_string(cx.tcx(), type_).as_slice(),
-                                      type_.sty).as_slice())
+                                      ppaux::ty_to_string(cx.tcx(), type_)[],
+                                      type_.sty)[])
             }
         };
 
@@ -522,7 +522,7 @@ impl<'tcx> TypeMap<'tcx> {
 
             output.push_str(crate_hash.as_str());
             output.push_str("/");
-            output.push_str(format!("{:x}", def_id.node).as_slice());
+            output.push_str(format!("{:x}", def_id.node)[]);
 
             // Maybe check that there is no self type here.
 
@@ -535,7 +535,7 @@ impl<'tcx> TypeMap<'tcx> {
                         type_map.get_unique_type_id_of_type(cx, type_parameter);
                     let param_type_id =
                         type_map.get_unique_type_id_as_string(param_type_id);
-                    output.push_str(param_type_id.as_slice());
+                    output.push_str(param_type_id[]);
                     output.push(',');
                 }
 
@@ -577,7 +577,7 @@ impl<'tcx> TypeMap<'tcx> {
                 self.get_unique_type_id_of_type(cx, parameter_type);
             let parameter_type_id =
                 self.get_unique_type_id_as_string(parameter_type_id);
-            unique_type_id.push_str(parameter_type_id.as_slice());
+            unique_type_id.push_str(parameter_type_id[]);
             unique_type_id.push(',');
         }
 
@@ -591,7 +591,7 @@ impl<'tcx> TypeMap<'tcx> {
             ty::FnConverging(ret_ty) => {
                 let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty);
                 let return_type_id = self.get_unique_type_id_as_string(return_type_id);
-                unique_type_id.push_str(return_type_id.as_slice());
+                unique_type_id.push_str(return_type_id[]);
             }
             ty::FnDiverging => {
                 unique_type_id.push_str("!");
@@ -622,7 +622,7 @@ impl<'tcx> TypeMap<'tcx> {
         let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type);
         let enum_variant_type_id = format!("{}::{}",
                                            self.get_unique_type_id_as_string(enum_type_id)
-                                               .as_slice(),
+                                               [],
                                            variant_name);
         let interner_key = self.unique_id_interner.intern(Rc::new(enum_variant_type_id));
         UniqueTypeId(interner_key)
@@ -793,19 +793,19 @@ pub fn create_global_var_metadata(cx: &CrateContext,
                                          create_global_var_metadata() -
                                          Captured var-id refers to \
                                          unexpected ast_item variant: {}",
-                                        var_item).as_slice())
+                                        var_item)[])
                 }
             }
         },
         _ => cx.sess().bug(format!("debuginfo::create_global_var_metadata() \
                                     - Captured var-id refers to unexpected \
                                     ast_map variant: {}",
-                                   var_item).as_slice())
+                                   var_item)[])
     };
 
     let (file_metadata, line_number) = if span != codemap::DUMMY_SP {
         let loc = span_start(cx, span);
-        (file_metadata(cx, loc.file.name.as_slice()), loc.line as c_uint)
+        (file_metadata(cx, loc.file.name[]), loc.line as c_uint)
     } else {
         (UNKNOWN_FILE_METADATA, UNKNOWN_LINE_NUMBER)
     };
@@ -816,7 +816,7 @@ pub fn create_global_var_metadata(cx: &CrateContext,
     let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id));
     let var_name = token::get_ident(ident).get().to_string();
     let linkage_name =
-        namespace_node.mangled_name_of_contained_item(var_name.as_slice());
+        namespace_node.mangled_name_of_contained_item(var_name[]);
     let var_scope = namespace_node.scope;
 
     var_name.with_c_str(|var_name| {
@@ -857,7 +857,7 @@ pub fn create_local_var_metadata(bcx: Block, local: &ast::Local) {
             None => {
                 bcx.sess().span_bug(span,
                     format!("no entry in lllocals table for {}",
-                            node_id).as_slice());
+                            node_id)[]);
             }
         };
 
@@ -911,7 +911,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                                 "debuginfo::create_captured_var_metadata() - \
                                  Captured var-id refers to unexpected \
                                  ast_map variant: {}",
-                                 ast_item).as_slice());
+                                 ast_item)[]);
                 }
             }
         }
@@ -921,7 +921,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                         format!("debuginfo::create_captured_var_metadata() - \
                                  Captured var-id refers to unexpected \
                                  ast_map variant: {}",
-                                ast_item).as_slice());
+                                ast_item)[]);
         }
     };
 
@@ -1028,7 +1028,7 @@ pub fn create_argument_metadata(bcx: Block, arg: &ast::Arg) {
             None => {
                 bcx.sess().span_bug(span,
                     format!("no entry in lllocals table for {}",
-                            node_id).as_slice());
+                            node_id)[]);
             }
         };
 
@@ -1286,7 +1286,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
             match expr.node {
                 ast::ExprClosure(_, _, ref fn_decl, ref top_level_block) => {
                     let name = format!("fn{}", token::gensym("fn"));
-                    let name = token::str_to_ident(name.as_slice());
+                    let name = token::str_to_ident(name[]);
                     (name, &**fn_decl,
                         // This is not quite right. It should actually inherit
                         // the generics of the enclosing function.
@@ -1318,7 +1318,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                     cx.sess()
                       .bug(format!("create_function_debug_context: \
                                     unexpected sort of node: {}",
-                                    fnitem).as_slice())
+                                    fnitem)[])
                 }
             }
         }
@@ -1329,7 +1329,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
         }
         _ => cx.sess().bug(format!("create_function_debug_context: \
                                     unexpected sort of node: {}",
-                                   fnitem).as_slice())
+                                   fnitem)[])
     };
 
     // This can be the case for functions inlined from another crate
@@ -1338,7 +1338,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
     }
 
     let loc = span_start(cx, span);
-    let file_metadata = file_metadata(cx, loc.file.name.as_slice());
+    let file_metadata = file_metadata(cx, loc.file.name[]);
 
     let function_type_metadata = unsafe {
         let fn_signature = get_function_signature(cx,
@@ -1365,7 +1365,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
     let (linkage_name, containing_scope) = if has_path {
         let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id));
         let linkage_name = namespace_node.mangled_name_of_contained_item(
-            function_name.as_slice());
+            function_name[]);
         let containing_scope = namespace_node.scope;
         (linkage_name, containing_scope)
     } else {
@@ -1451,7 +1451,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
             signature.push(type_metadata(cx, arg_type, codemap::DUMMY_SP));
         }
 
-        return create_DIArray(DIB(cx), signature.as_slice());
+        return create_DIArray(DIB(cx), signature[]);
     }
 
     fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
@@ -1484,7 +1484,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                 actual_self_type,
                 true);
 
-            name_to_append_suffix_to.push_str(actual_self_type_name.as_slice());
+            name_to_append_suffix_to.push_str(actual_self_type_name[]);
 
             if generics.is_type_parameterized() {
                 name_to_append_suffix_to.push_str(",");
@@ -1524,7 +1524,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
             let actual_type_name = compute_debuginfo_type_name(cx,
                                                                actual_type,
                                                                true);
-            name_to_append_suffix_to.push_str(actual_type_name.as_slice());
+            name_to_append_suffix_to.push_str(actual_type_name[]);
 
             if index != generics.ty_params.len() - 1 {
                 name_to_append_suffix_to.push_str(",");
@@ -1552,7 +1552,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
 
         name_to_append_suffix_to.push('>');
 
-        return create_DIArray(DIB(cx), template_params.as_slice());
+        return create_DIArray(DIB(cx), template_params[]);
     }
 }
 
@@ -1650,7 +1650,7 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
     let cx: &CrateContext = bcx.ccx();
 
     let filename = span_start(cx, span).file.name.clone();
-    let file_metadata = file_metadata(cx, filename.as_slice());
+    let file_metadata = file_metadata(cx, filename[]);
 
     let name = token::get_ident(variable_ident);
     let loc = span_start(cx, span);
@@ -1737,7 +1737,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile {
     let work_dir = cx.sess().working_dir.as_str().unwrap();
     let file_name =
         if full_path.starts_with(work_dir) {
-            full_path.slice(work_dir.len() + 1u, full_path.len())
+            full_path[work_dir.len() + 1u..full_path.len()]
         } else {
             full_path
         };
@@ -1771,7 +1771,7 @@ fn scope_metadata(fcx: &FunctionContext,
 
             fcx.ccx.sess().span_bug(error_reporting_span,
                 format!("debuginfo: Could not find scope info for node {}",
-                        node).as_slice());
+                        node)[]);
         }
     }
 }
@@ -1971,7 +1971,7 @@ impl<'tcx> RecursiveTypeDescription<'tcx> {
                         cx.sess().bug(format!("Forward declaration of potentially recursive type \
                                               '{}' was not found in TypeMap!",
                                               ppaux::ty_to_string(cx.tcx(), unfinished_type))
-                                      .as_slice());
+                                      []);
                     }
                 }
 
@@ -1983,7 +1983,7 @@ impl<'tcx> RecursiveTypeDescription<'tcx> {
                 set_members_of_composite_type(cx,
                                               metadata_stub,
                                               llvm_type,
-                                              member_descriptions.as_slice());
+                                              member_descriptions[]);
                 return MetadataCreationResult::new(metadata_stub, true);
             }
         }
@@ -2055,7 +2055,7 @@ fn prepare_struct_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
 
     let struct_metadata_stub = create_struct_stub(cx,
                                                   struct_llvm_type,
-                                                  struct_name.as_slice(),
+                                                  struct_name[],
                                                   unique_type_id,
                                                   containing_scope);
 
@@ -2116,7 +2116,7 @@ fn prepare_tuple_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
         unique_type_id,
         create_struct_stub(cx,
                            tuple_llvm_type,
-                           tuple_name.as_slice(),
+                           tuple_name[],
                            unique_type_id,
                            UNKNOWN_SCOPE_METADATA),
         tuple_llvm_type,
@@ -2176,7 +2176,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
                         set_members_of_composite_type(cx,
                                                       variant_type_metadata,
                                                       variant_llvm_type,
-                                                      member_descriptions.as_slice());
+                                                      member_descriptions[]);
                         MemberDescription {
                             name: "".to_string(),
                             llvm_type: variant_llvm_type,
@@ -2209,7 +2209,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
                     set_members_of_composite_type(cx,
                                                   variant_type_metadata,
                                                   variant_llvm_type,
-                                                  member_descriptions.as_slice());
+                                                  member_descriptions[]);
                     vec![
                         MemberDescription {
                             name: "".to_string(),
@@ -2309,7 +2309,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
                 set_members_of_composite_type(cx,
                                               variant_type_metadata,
                                               variant_llvm_type,
-                                              variant_member_descriptions.as_slice());
+                                              variant_member_descriptions[]);
 
                 // Encode the information about the null variant in the union
                 // member's name.
@@ -2388,7 +2388,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                                     .iter()
                                     .map(|&t| type_of::type_of(cx, t))
                                     .collect::<Vec<_>>()
-                                    .as_slice(),
+                                    [],
                       struct_def.packed);
     // Could do some consistency checks here: size, align, field count, discr type
 
@@ -2412,7 +2412,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
         Some(ref names) => {
             names.iter()
                  .map(|ident| {
-                     token::get_ident(*ident).get().to_string().into_string()
+                     token::get_ident(*ident).get().to_string()
                  }).collect()
         }
         None => variant_info.args.iter().map(|_| "".to_string()).collect()
@@ -2455,7 +2455,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
 
     let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id);
     let loc = span_start(cx, definition_span);
-    let file_metadata = file_metadata(cx, loc.file.name.as_slice());
+    let file_metadata = file_metadata(cx, loc.file.name[]);
 
     let variants = ty::enum_variants(cx.tcx(), enum_def_id);
 
@@ -2502,7 +2502,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                             UNKNOWN_LINE_NUMBER,
                             bytes_to_bits(discriminant_size),
                             bytes_to_bits(discriminant_align),
-                            create_DIArray(DIB(cx), enumerators_metadata.as_slice()),
+                            create_DIArray(DIB(cx), enumerators_metadata[]),
                             discriminant_base_type_metadata)
                     }
                 });
@@ -2644,7 +2644,7 @@ fn set_members_of_composite_type(cx: &CrateContext,
                                         Please use a rustc built with anewer \
                                         version of LLVM.",
                                        llvm_version_major,
-                                       llvm_version_minor).as_slice());
+                                       llvm_version_minor)[]);
             } else {
                 cx.sess().bug("debuginfo::set_members_of_composite_type() - \
                                Already completed forward declaration re-encountered.");
@@ -2683,7 +2683,7 @@ fn set_members_of_composite_type(cx: &CrateContext,
         .collect();
 
     unsafe {
-        let type_array = create_DIArray(DIB(cx), member_metadata.as_slice());
+        let type_array = create_DIArray(DIB(cx), member_metadata[]);
         llvm::LLVMDICompositeTypeSetTypeArray(composite_type_metadata, type_array);
     }
 }
@@ -2784,7 +2784,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
 
     let member_llvm_types = slice_llvm_type.field_types();
     assert!(slice_layout_is_correct(cx,
-                                    member_llvm_types.as_slice(),
+                                    member_llvm_types[],
                                     element_type));
     let member_descriptions = [
         MemberDescription {
@@ -2806,11 +2806,11 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
     assert!(member_descriptions.len() == member_llvm_types.len());
 
     let loc = span_start(cx, span);
-    let file_metadata = file_metadata(cx, loc.file.name.as_slice());
+    let file_metadata = file_metadata(cx, loc.file.name[]);
 
     let metadata = composite_type_metadata(cx,
                                            slice_llvm_type,
-                                           slice_type_name.as_slice(),
+                                           slice_type_name[],
                                            unique_type_id,
                                            &member_descriptions,
                                            UNKNOWN_SCOPE_METADATA,
@@ -2856,7 +2856,7 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
             llvm::LLVMDIBuilderCreateSubroutineType(
                 DIB(cx),
                 UNKNOWN_FILE_METADATA,
-                create_DIArray(DIB(cx), signature_metadata.as_slice()))
+                create_DIArray(DIB(cx), signature_metadata[]))
         },
         false);
 }
@@ -2882,7 +2882,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
             let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_type);
             cx.sess().bug(format!("debuginfo: Unexpected trait-object type in \
                                    trait_pointer_metadata(): {}",
-                                   pp_type_name.as_slice()).as_slice());
+                                   pp_type_name[])[]);
         }
     };
 
@@ -2896,7 +2896,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
 
     composite_type_metadata(cx,
                             trait_llvm_type,
-                            trait_type_name.as_slice(),
+                            trait_type_name[],
                             unique_type_id,
                             &[],
                             containing_scope,
@@ -3019,13 +3019,13 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
         ty::ty_tup(ref elements) => {
             prepare_tuple_metadata(cx,
                                    t,
-                                   elements.as_slice(),
+                                   elements[],
                                    unique_type_id,
                                    usage_site_span).finalize(cx)
         }
         _ => {
             cx.sess().bug(format!("debuginfo: unexpected type in type_metadata: {}",
-                                  sty).as_slice())
+                                  sty)[])
         }
     };
 
@@ -3043,9 +3043,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                                                  type id '{}' to already be in \
                                                  the debuginfo::TypeMap but it \
                                                  was not. (Ty = {})",
-                                                unique_type_id_str.as_slice(),
+                                                unique_type_id_str[],
                                                 ppaux::ty_to_string(cx.tcx(), t));
-                    cx.sess().span_bug(usage_site_span, error_message.as_slice());
+                    cx.sess().span_bug(usage_site_span, error_message[]);
                 }
             };
 
@@ -3058,9 +3058,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                                                      UniqueTypeId maps in \
                                                      debuginfo::TypeMap. \
                                                      UniqueTypeId={}, Ty={}",
-                            unique_type_id_str.as_slice(),
+                            unique_type_id_str[],
                             ppaux::ty_to_string(cx.tcx(), t));
-                        cx.sess().span_bug(usage_site_span, error_message.as_slice());
+                        cx.sess().span_bug(usage_site_span, error_message[]);
                     }
                 }
                 None => {
@@ -3266,7 +3266,7 @@ fn create_scope_map(cx: &CrateContext,
     {
         // Create a new lexical scope and push it onto the stack
         let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo);
-        let file_metadata = file_metadata(cx, loc.file.name.as_slice());
+        let file_metadata = file_metadata(cx, loc.file.name[]);
         let parent_scope = scope_stack.last().unwrap().scope_metadata;
 
         let scope_metadata = unsafe {
@@ -3391,7 +3391,7 @@ fn create_scope_map(cx: &CrateContext,
                         let file_metadata = file_metadata(cx,
                                                           loc.file
                                                              .name
-                                                             .as_slice());
+                                                             []);
                         let parent_scope = scope_stack.last().unwrap().scope_metadata;
 
                         let scope_metadata = unsafe {
@@ -3925,7 +3925,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
         ty::ty_open(_) |
         ty::ty_param(_) => {
             cx.sess().bug(format!("debuginfo: Trying to create type name for \
-                unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t)).as_slice());
+                unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t))[]);
         }
     }
 
@@ -4008,13 +4008,13 @@ impl NamespaceTreeNode {
                 None => {}
             }
             let string = token::get_name(node.name);
-            output.push_str(format!("{}", string.get().len()).as_slice());
+            output.push_str(format!("{}", string.get().len())[]);
             output.push_str(string.get());
         }
 
         let mut name = String::from_str("_ZN");
         fill_nested(self, &mut name);
-        name.push_str(format!("{}", item_name.len()).as_slice());
+        name.push_str(format!("{}", item_name.len())[]);
         name.push_str(item_name);
         name.push('E');
         name
@@ -4022,7 +4022,7 @@ impl NamespaceTreeNode {
 }
 
 fn crate_root_namespace<'a>(cx: &'a CrateContext) -> &'a str {
-    cx.link_meta().crate_name.as_slice()
+    cx.link_meta().crate_name[]
 }
 
 fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc<NamespaceTreeNode> {
@@ -4099,7 +4099,7 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc<NamespaceTree
             None => {
                 cx.sess().bug(format!("debuginfo::namespace_for_item(): \
                                        path too short for {}",
-                                      def_id).as_slice());
+                                      def_id)[]);
             }
         }
     })
diff --git a/src/librustc_trans/trans/expr.rs b/src/librustc_trans/trans/expr.rs
index 81892e5fa83..36f23f4a0ca 100644
--- a/src/librustc_trans/trans/expr.rs
+++ b/src/librustc_trans/trans/expr.rs
@@ -311,7 +311,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                     unsized_info(bcx, k, id, ty_substs[tp_index], |t| t)
                 }
                 _ => bcx.sess().bug(format!("UnsizeStruct with bad sty: {}",
-                                          bcx.ty_to_string(unadjusted_ty)).as_slice())
+                                          bcx.ty_to_string(unadjusted_ty))[])
             },
             &ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => {
                 let substs = principal.substs().with_self_ty(unadjusted_ty).erase_regions();
@@ -442,7 +442,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
         let unboxed_ty = match datum_ty.sty {
             ty::ty_uniq(t) => t,
             _ => bcx.sess().bug(format!("Expected ty_uniq, found {}",
-                                        bcx.ty_to_string(datum_ty)).as_slice())
+                                        bcx.ty_to_string(datum_ty))[])
         };
         let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span));
 
@@ -660,7 +660,7 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                 expr.span,
                 format!("trans_rvalue_datum_unadjusted reached \
                          fall-through case: {}",
-                        expr.node).as_slice());
+                        expr.node)[]);
         }
     }
 }
@@ -1007,7 +1007,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                 expr.span,
                 format!("trans_rvalue_stmt_unadjusted reached \
                          fall-through case: {}",
-                        expr.node).as_slice());
+                        expr.node)[]);
         }
     }
 }
@@ -1033,14 +1033,14 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
             controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest)
         }
         ast::ExprMatch(ref discr, ref arms, _) => {
-            _match::trans_match(bcx, expr, &**discr, arms.as_slice(), dest)
+            _match::trans_match(bcx, expr, &**discr, arms[], dest)
         }
         ast::ExprBlock(ref blk) => {
             controlflow::trans_block(bcx, &**blk, dest)
         }
         ast::ExprStruct(_, ref fields, ref base) => {
             trans_struct(bcx,
-                         fields.as_slice(),
+                         fields[],
                          base.as_ref().map(|e| &**e),
                          expr.span,
                          expr.id,
@@ -1052,7 +1052,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
             trans_adt(bcx,
                       expr_ty(bcx, expr),
                       0,
-                      numbered_fields.as_slice(),
+                      numbered_fields[],
                       None,
                       dest,
                       Some(NodeInfo { id: expr.id, span: expr.span }))
@@ -1096,13 +1096,13 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                 trans_overloaded_call(bcx,
                                       expr,
                                       &**f,
-                                      args.as_slice(),
+                                      args[],
                                       Some(dest))
             } else {
                 callee::trans_call(bcx,
                                    expr,
                                    &**f,
-                                   callee::ArgExprs(args.as_slice()),
+                                   callee::ArgExprs(args[]),
                                    dest)
             }
         }
@@ -1110,7 +1110,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
             callee::trans_method_call(bcx,
                                       expr,
                                       &*args[0],
-                                      callee::ArgExprs(args.as_slice()),
+                                      callee::ArgExprs(args[]),
                                       dest)
         }
         ast::ExprBinary(op, ref lhs, ref rhs) => {
@@ -1159,7 +1159,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                 expr.span,
                 format!("trans_rvalue_dps_unadjusted reached fall-through \
                          case: {}",
-                        expr.node).as_slice());
+                        expr.node)[]);
         }
     }
 }
@@ -1207,7 +1207,7 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
         _ => {
             bcx.tcx().sess.span_bug(ref_expr.span, format!(
                 "Non-DPS def {} referened by {}",
-                def, bcx.node_id_to_string(ref_expr.id)).as_slice());
+                def, bcx.node_id_to_string(ref_expr.id))[]);
         }
     }
 }
@@ -1234,7 +1234,7 @@ fn trans_def_fn_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
             bcx.tcx().sess.span_bug(ref_expr.span, format!(
                     "trans_def_fn_unadjusted invoked on: {} for {}",
                     def,
-                    ref_expr.repr(bcx.tcx())).as_slice());
+                    ref_expr.repr(bcx.tcx()))[]);
         }
     };
 
@@ -1257,7 +1257,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                 None => {
                     bcx.sess().bug(format!(
                         "trans_local_var: no llval for upvar {} found",
-                        nid).as_slice());
+                        nid)[]);
                 }
             }
         }
@@ -1267,7 +1267,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                 None => {
                     bcx.sess().bug(format!(
                         "trans_local_var: no datum for local/arg {} found",
-                        nid).as_slice());
+                        nid)[]);
                 }
             };
             debug!("take_local(nid={}, v={}, ty={})",
@@ -1277,7 +1277,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
         _ => {
             bcx.sess().unimpl(format!(
                 "unsupported def type in trans_local_var: {}",
-                def).as_slice());
+                def)[]);
         }
     }
 }
@@ -1294,11 +1294,11 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
 {
     match ty.sty {
         ty::ty_struct(did, ref substs) => {
-            op(0, struct_fields(tcx, did, substs).as_slice())
+            op(0, struct_fields(tcx, did, substs)[])
         }
 
         ty::ty_tup(ref v) => {
-            op(0, tup_fields(v.as_slice()).as_slice())
+            op(0, tup_fields(v[])[])
         }
 
         ty::ty_enum(_, ref substs) => {
@@ -1308,7 +1308,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
                     tcx.sess.bug(format!(
                         "cannot get field types from the enum type {} \
                          without a node ID",
-                        ty.repr(tcx)).as_slice());
+                        ty.repr(tcx))[]);
                 }
                 Some(node_id) => {
                     let def = tcx.def_map.borrow()[node_id].clone();
@@ -1319,7 +1319,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
                             op(variant_info.disr_val,
                                struct_fields(tcx,
                                              variant_id,
-                                             substs).as_slice())
+                                             substs)[])
                         }
                         _ => {
                             tcx.sess.bug("resolve didn't map this expr to a \
@@ -1333,7 +1333,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
         _ => {
             tcx.sess.bug(format!(
                 "cannot get field types from the type {}",
-                ty.repr(tcx)).as_slice());
+                ty.repr(tcx))[]);
         }
     }
 }
@@ -1388,7 +1388,7 @@ fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
         trans_adt(bcx,
                   ty,
                   discr,
-                  numbered_fields.as_slice(),
+                  numbered_fields[],
                   optbase,
                   dest,
                   Some(NodeInfo { id: expr_id, span: expr_span }))
@@ -2025,7 +2025,7 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                                             t_in.repr(bcx.tcx()),
                                             k_in,
                                             t_out.repr(bcx.tcx()),
-                                            k_out).as_slice())
+                                            k_out)[])
                 }
             }
         }
@@ -2034,7 +2034,7 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                                     t_in.repr(bcx.tcx()),
                                     k_in,
                                     t_out.repr(bcx.tcx()),
-                                    k_out).as_slice())
+                                    k_out)[])
     };
     return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock();
 }
@@ -2196,7 +2196,7 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
             bcx.tcx().sess.span_bug(
                 expr.span,
                 format!("deref invoked on expr of illegal type {}",
-                        datum.ty.repr(bcx.tcx())).as_slice());
+                        datum.ty.repr(bcx.tcx()))[]);
         }
     };
 
diff --git a/src/librustc_trans/trans/foreign.rs b/src/librustc_trans/trans/foreign.rs
index d0720319930..d7e3476a470 100644
--- a/src/librustc_trans/trans/foreign.rs
+++ b/src/librustc_trans/trans/foreign.rs
@@ -106,7 +106,7 @@ pub fn register_static(ccx: &CrateContext,
     let llty = type_of::type_of(ccx, ty);
 
     let ident = link_name(foreign_item);
-    match attr::first_attr_value_str_by_name(foreign_item.attrs.as_slice(),
+    match attr::first_attr_value_str_by_name(foreign_item.attrs[],
                                              "linkage") {
         // If this is a static with a linkage specified, then we need to handle
         // it a little specially. The typesystem prevents things like &T and
@@ -231,13 +231,13 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
         ty::ty_bare_fn(ref fn_ty) => (fn_ty.abi, fn_ty.sig.clone()),
         _ => ccx.sess().bug("trans_native_call called on non-function type")
     };
-    let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys.as_slice());
+    let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys[]);
     let fn_type = cabi::compute_abi_info(ccx,
-                                         llsig.llarg_tys.as_slice(),
+                                         llsig.llarg_tys[],
                                          llsig.llret_ty,
                                          llsig.ret_def);
 
-    let arg_tys: &[cabi::ArgType] = fn_type.arg_tys.as_slice();
+    let arg_tys: &[cabi::ArgType] = fn_type.arg_tys[];
 
     let mut llargs_foreign = Vec::new();
 
@@ -363,7 +363,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
 
     let llforeign_retval = CallWithConv(bcx,
                                         llfn,
-                                        llargs_foreign.as_slice(),
+                                        llargs_foreign[],
                                         cc,
                                         Some(attrs));
 
@@ -433,7 +433,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) {
                 abi => {
                     let ty = ty::node_id_to_type(ccx.tcx(), foreign_item.id);
                     register_foreign_item_fn(ccx, abi, ty,
-                                             lname.get().as_slice());
+                                             lname.get()[]);
                     // Unlike for other items, we shouldn't call
                     // `base::update_linkage` here.  Foreign items have
                     // special linkage requirements, which are handled
@@ -563,7 +563,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                 ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \
                                         expected a bare fn ty",
                                        ccx.tcx().map.path_to_string(id),
-                                       t.repr(tcx)).as_slice());
+                                       t.repr(tcx))[]);
             }
         };
 
@@ -571,7 +571,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                ccx.tcx().map.path_to_string(id),
                id, t.repr(tcx));
 
-        let llfn = base::decl_internal_rust_fn(ccx, t, ps.as_slice());
+        let llfn = base::decl_internal_rust_fn(ccx, t, ps[]);
         base::set_llvm_fn_attrs(ccx, attrs, llfn);
         base::trans_fn(ccx, decl, body, llfn, param_substs, id, &[]);
         llfn
@@ -744,7 +744,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         debug!("calling llrustfn = {}, t = {}",
                ccx.tn().val_to_string(llrustfn), t.repr(ccx.tcx()));
         let attributes = base::get_fn_llvm_attributes(ccx, t);
-        let llrust_ret_val = builder.call(llrustfn, llrust_args.as_slice(), Some(attributes));
+        let llrust_ret_val = builder.call(llrustfn, llrust_args[], Some(attributes));
 
         // Get the return value where the foreign fn expects it.
         let llforeign_ret_ty = match tys.fn_ty.ret_ty.cast {
@@ -811,9 +811,9 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
 // the massive simplifications that have occurred.
 
 pub fn link_name(i: &ast::ForeignItem) -> InternedString {
-    match attr::first_attr_value_str_by_name(i.attrs.as_slice(), "link_name") {
+    match attr::first_attr_value_str_by_name(i.attrs[], "link_name") {
         Some(ln) => ln.clone(),
-        None => match weak_lang_items::link_name(i.attrs.as_slice()) {
+        None => match weak_lang_items::link_name(i.attrs[]) {
             Some(name) => name,
             None => token::get_ident(i.ident),
         }
@@ -854,7 +854,7 @@ fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
     };
     let llsig = foreign_signature(ccx, &fn_sig, fn_sig.0.inputs.as_slice());
     let fn_ty = cabi::compute_abi_info(ccx,
-                                       llsig.llarg_tys.as_slice(),
+                                       llsig.llarg_tys[],
                                        llsig.llret_ty,
                                        llsig.ret_def);
     debug!("foreign_types_for_fn_ty(\
@@ -863,9 +863,9 @@ fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
            fn_ty={} -> {}, \
            ret_def={}",
            ty.repr(ccx.tcx()),
-           ccx.tn().types_to_str(llsig.llarg_tys.as_slice()),
+           ccx.tn().types_to_str(llsig.llarg_tys[]),
            ccx.tn().type_to_string(llsig.llret_ty),
-           ccx.tn().types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::<Vec<_>>().as_slice()),
+           ccx.tn().types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::<Vec<_>>()[]),
            ccx.tn().type_to_string(fn_ty.ret_ty.ty),
            llsig.ret_def);
 
@@ -915,7 +915,7 @@ fn lltype_for_fn_from_foreign_types(ccx: &CrateContext, tys: &ForeignTypes) -> T
     if tys.fn_sig.0.variadic {
         Type::variadic_func(llargument_tys.as_slice(), &llreturn_ty)
     } else {
-        Type::func(llargument_tys.as_slice(), &llreturn_ty)
+        Type::func(llargument_tys[], &llreturn_ty)
     }
 }
 
diff --git a/src/librustc_trans/trans/glue.rs b/src/librustc_trans/trans/glue.rs
index dea095ecaf5..c1089ea3ad1 100644
--- a/src/librustc_trans/trans/glue.rs
+++ b/src/librustc_trans/trans/glue.rs
@@ -160,7 +160,7 @@ pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Val
 
     let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) {
         Some(old_sym) => {
-            let glue = decl_cdecl_fn(ccx, old_sym.as_slice(), llfnty, ty::mk_nil(ccx.tcx()));
+            let glue = decl_cdecl_fn(ccx, old_sym[], llfnty, ty::mk_nil(ccx.tcx()));
             (glue, None)
         },
         None => {
@@ -231,7 +231,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
             f.sig.0.inputs[0]
         }
         _ => bcx.sess().bug(format!("Expected function type, found {}",
-                                    bcx.ty_to_string(fty)).as_slice())
+                                    bcx.ty_to_string(fty))[])
     };
 
     let (struct_data, info) = if ty::type_is_sized(bcx.tcx(), t) {
@@ -350,7 +350,7 @@ fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info:
             (Mul(bcx, info, C_uint(bcx.ccx(), unit_size)), C_uint(bcx.ccx(), 8u))
         }
         _ => bcx.sess().bug(format!("Unexpected unsized type, found {}",
-                                    bcx.ty_to_string(t)).as_slice())
+                                    bcx.ty_to_string(t))[])
     }
 }
 
@@ -422,7 +422,7 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
                         bcx.sess().warn(format!("Ignoring drop flag in destructor for {}\
                                                  because the struct is unsized. See issue\
                                                  #16758",
-                                                bcx.ty_to_string(t)).as_slice());
+                                                bcx.ty_to_string(t))[]);
                         trans_struct_drop(bcx, t, v0, dtor, did, substs)
                     }
                 }
@@ -504,7 +504,7 @@ pub fn declare_tydesc<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>)
     note_unique_llvm_symbol(ccx, name);
 
     let ty_name = token::intern_and_get_ident(
-        ppaux::ty_to_string(ccx.tcx(), t).as_slice());
+        ppaux::ty_to_string(ccx.tcx(), t)[]);
     let ty_name = C_str_slice(ccx, ty_name);
 
     debug!("--- declare_tydesc {}", ppaux::ty_to_string(ccx.tcx(), t));
@@ -523,8 +523,8 @@ fn declare_generic_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>,
     let fn_nm = mangle_internal_name_by_type_and_seq(
         ccx,
         t,
-        format!("glue_{}", name).as_slice());
-    let llfn = decl_cdecl_fn(ccx, fn_nm.as_slice(), llfnty, ty::mk_nil(ccx.tcx()));
+        format!("glue_{}", name)[]);
+    let llfn = decl_cdecl_fn(ccx, fn_nm[], llfnty, ty::mk_nil(ccx.tcx()));
     note_unique_llvm_symbol(ccx, fn_nm.clone());
     return (fn_nm, llfn);
 }
diff --git a/src/librustc_trans/trans/intrinsic.rs b/src/librustc_trans/trans/intrinsic.rs
index a6f7c849f4d..cc506e409c5 100644
--- a/src/librustc_trans/trans/intrinsic.rs
+++ b/src/librustc_trans/trans/intrinsic.rs
@@ -118,7 +118,7 @@ pub fn check_intrinsics(ccx: &CrateContext) {
                             ""
                         } else {
                             "s"
-                        }).as_slice());
+                        })[]);
         }
         if ty::type_is_fat_ptr(ccx.tcx(), transmute_restriction.to) ||
            ty::type_is_fat_ptr(ccx.tcx(), transmute_restriction.from) {
diff --git a/src/librustc_trans/trans/meth.rs b/src/librustc_trans/trans/meth.rs
index 15f6d7bc3f4..25b8cefa68f 100644
--- a/src/librustc_trans/trans/meth.rs
+++ b/src/librustc_trans/trans/meth.rs
@@ -77,7 +77,7 @@ pub fn trans_impl(ccx: &CrateContext,
         match *impl_item {
             ast::MethodImplItem(ref method) => {
                 if method.pe_generics().ty_params.len() == 0u {
-                    let trans_everywhere = attr::requests_inline(method.attrs.as_slice());
+                    let trans_everywhere = attr::requests_inline(method.attrs[]);
                     for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) {
                         let llfn = get_item_val(ccx, method.id);
                         trans_fn(ccx,
@@ -293,7 +293,7 @@ pub fn trans_static_method_callee(bcx: Block,
         _ => {
             bcx.tcx().sess.bug(
                 format!("static call to invalid vtable: {}",
-                        vtbl.repr(bcx.tcx())).as_slice());
+                        vtbl.repr(bcx.tcx()))[]);
         }
     }
 }
@@ -375,7 +375,7 @@ fn trans_monomorphized_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
         traits::VtableParam(..) => {
             bcx.sess().bug(
                 format!("resolved vtable bad vtable {} in trans",
-                        vtable.repr(bcx.tcx())).as_slice());
+                        vtable.repr(bcx.tcx()))[]);
         }
     }
 }
@@ -566,7 +566,7 @@ pub fn get_vtable<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                 bcx.sess().bug(
                     format!("resolved vtable for {} to bad vtable {} in trans",
                             trait_ref.repr(bcx.tcx()),
-                            vtable.repr(bcx.tcx())).as_slice());
+                            vtable.repr(bcx.tcx()))[]);
             }
         }
     });
@@ -598,7 +598,7 @@ pub fn make_vtable<I: Iterator<ValueRef>>(ccx: &CrateContext,
     let components: Vec<_> = head.into_iter().chain(ptrs).collect();
 
     unsafe {
-        let tbl = C_struct(ccx, components.as_slice(), false);
+        let tbl = C_struct(ccx, components[], false);
         let sym = token::gensym("vtable");
         let vt_gvar = format!("vtable{}", sym.uint()).with_c_str(|buf| {
             llvm::LLVMAddGlobal(ccx.llmod(), val_ty(tbl).to_ref(), buf)
diff --git a/src/librustc_trans/trans/monomorphize.rs b/src/librustc_trans/trans/monomorphize.rs
index cb3c56ad277..2a6aff56513 100644
--- a/src/librustc_trans/trans/monomorphize.rs
+++ b/src/librustc_trans/trans/monomorphize.rs
@@ -122,7 +122,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
 
         hash = format!("h{}", state.result());
         ccx.tcx().map.with_path(fn_id.node, |path| {
-            exported_name(path, hash.as_slice())
+            exported_name(path, hash[])
         })
     };
 
@@ -132,9 +132,9 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
     let mut hash_id = Some(hash_id);
     let mk_lldecl = |abi: abi::Abi| {
         let lldecl = if abi != abi::Rust {
-            foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s.as_slice())
+            foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s[])
         } else {
-            decl_internal_rust_fn(ccx, mono_ty, s.as_slice())
+            decl_internal_rust_fn(ccx, mono_ty, s[])
         };
 
         ccx.monomorphized().borrow_mut().insert(hash_id.take().unwrap(), lldecl);
@@ -168,12 +168,12 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                   ..
               } => {
                   let d = mk_lldecl(abi);
-                  let needs_body = setup_lldecl(d, i.attrs.as_slice());
+                  let needs_body = setup_lldecl(d, i.attrs[]);
                   if needs_body {
                       if abi != abi::Rust {
                           foreign::trans_rust_fn_with_foreign_abi(
                               ccx, &**decl, &**body, &[], d, psubsts, fn_id.node,
-                              Some(hash.as_slice()));
+                              Some(hash[]));
                       } else {
                           trans_fn(ccx, &**decl, &**body, d, psubsts, fn_id.node, &[]);
                       }
@@ -197,7 +197,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                     trans_enum_variant(ccx,
                                        parent,
                                        &*v,
-                                       args.as_slice(),
+                                       args[],
                                        this_tv.disr_val,
                                        psubsts,
                                        d);
@@ -211,7 +211,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
             match *ii {
                 ast::MethodImplItem(ref mth) => {
                     let d = mk_lldecl(abi::Rust);
-                    let needs_body = setup_lldecl(d, mth.attrs.as_slice());
+                    let needs_body = setup_lldecl(d, mth.attrs[]);
                     if needs_body {
                         trans_fn(ccx,
                                  mth.pe_fn_decl(),
@@ -232,7 +232,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
             match *method {
                 ast::ProvidedMethod(ref mth) => {
                     let d = mk_lldecl(abi::Rust);
-                    let needs_body = setup_lldecl(d, mth.attrs.as_slice());
+                    let needs_body = setup_lldecl(d, mth.attrs[]);
                     if needs_body {
                         trans_fn(ccx, mth.pe_fn_decl(), mth.pe_body(), d,
                                  psubsts, mth.id, &[]);
@@ -241,7 +241,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                 }
                 _ => {
                     ccx.sess().bug(format!("can't monomorphize a {}",
-                                           map_node).as_slice())
+                                           map_node)[])
                 }
             }
         }
@@ -249,7 +249,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
             let d = mk_lldecl(abi::Rust);
             set_inline_hint(d);
             base::trans_tuple_struct(ccx,
-                                     struct_def.fields.as_slice(),
+                                     struct_def.fields[],
                                      struct_def.ctor_id.expect("ast-mapped tuple struct \
                                                                 didn't have a ctor id"),
                                      psubsts,
@@ -267,7 +267,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         ast_map::NodePat(..) |
         ast_map::NodeLocal(..) => {
             ccx.sess().bug(format!("can't monomorphize a {}",
-                                   map_node).as_slice())
+                                   map_node)[])
         }
     };
 
diff --git a/src/librustc_trans/trans/type_.rs b/src/librustc_trans/trans/type_.rs
index 51a0533a7bb..45a2a343066 100644
--- a/src/librustc_trans/trans/type_.rs
+++ b/src/librustc_trans/trans/type_.rs
@@ -102,7 +102,7 @@ impl Type {
     }
 
     pub fn int(ccx: &CrateContext) -> Type {
-        match ccx.tcx().sess.target.target.target_word_size.as_slice() {
+        match ccx.tcx().sess.target.target.target_word_size[] {
             "32" => Type::i32(ccx),
             "64" => Type::i64(ccx),
             tws => panic!("Unsupported target word size for int: {}", tws),
diff --git a/src/librustc_trans/trans/type_of.rs b/src/librustc_trans/trans/type_of.rs
index 2801e0ccead..2ef0006814a 100644
--- a/src/librustc_trans/trans/type_of.rs
+++ b/src/librustc_trans/trans/type_of.rs
@@ -137,7 +137,7 @@ pub fn type_of_rust_fn<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
     let input_tys = inputs.iter().map(|&arg_ty| type_of_explicit_arg(cx, arg_ty));
     atys.extend(input_tys);
 
-    Type::func(atys.as_slice(), &lloutputtype)
+    Type::func(atys[], &lloutputtype)
 }
 
 // Given a function type and a count of ty params, construct an llvm type
@@ -187,7 +187,7 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ
     let llsizingty = match t.sty {
         _ if !ty::lltype_is_sized(cx.tcx(), t) => {
             cx.sess().bug(format!("trying to take the sizing type of {}, an unsized type",
-                                  ppaux::ty_to_string(cx.tcx(), t)).as_slice())
+                                  ppaux::ty_to_string(cx.tcx(), t))[])
         }
 
         ty::ty_bool => Type::bool(cx),
@@ -241,7 +241,7 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ
 
         ty::ty_infer(..) | ty::ty_param(..) | ty::ty_err(..) => {
             cx.sess().bug(format!("fictitious type {} in sizing_type_of()",
-                                  ppaux::ty_to_string(cx.tcx(), t)).as_slice())
+                                  ppaux::ty_to_string(cx.tcx(), t))[])
         }
         ty::ty_vec(_, None) | ty::ty_trait(..) | ty::ty_str => panic!("unreachable")
     };
@@ -318,7 +318,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type {
           let repr = adt::represent_type(cx, t);
           let tps = substs.types.get_slice(subst::TypeSpace);
           let name = llvm_type_name(cx, an_enum, did, tps);
-          adt::incomplete_type_of(cx, &*repr, name.as_slice())
+          adt::incomplete_type_of(cx, &*repr, name[])
       }
       ty::ty_unboxed_closure(did, _, ref substs) => {
           // Only create the named struct, but don't fill it in. We
@@ -329,7 +329,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type {
           // contents of the VecPerParamSpace to to construct the llvm
           // name
           let name = llvm_type_name(cx, an_unboxed_closure, did, substs.types.as_slice());
-          adt::incomplete_type_of(cx, &*repr, name.as_slice())
+          adt::incomplete_type_of(cx, &*repr, name[])
       }
 
       ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) | ty::ty_ptr(ty::mt{ty, ..}) => {
@@ -389,7 +389,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type {
               let repr = adt::represent_type(cx, t);
               let tps = substs.types.get_slice(subst::TypeSpace);
               let name = llvm_type_name(cx, a_struct, did, tps);
-              adt::incomplete_type_of(cx, &*repr, name.as_slice())
+              adt::incomplete_type_of(cx, &*repr, name[])
           }
       }
 
@@ -408,7 +408,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type {
           }
           ty::ty_trait(..) => Type::opaque_trait(cx),
           _ => cx.sess().bug(format!("ty_open with sized type: {}",
-                                     ppaux::ty_to_string(cx.tcx(), t)).as_slice())
+                                     ppaux::ty_to_string(cx.tcx(), t))[])
       },
 
       ty::ty_infer(..) => cx.sess().bug("type_of with ty_infer"),
diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs
index 175763c874e..8e7452f30d3 100644
--- a/src/librustc_typeck/astconv.rs
+++ b/src/librustc_typeck/astconv.rs
@@ -168,7 +168,7 @@ pub fn opt_ast_region_to_region<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
                                     format!("`{}`", name)
                                 } else {
                                     format!("one of `{}`'s {} elided lifetimes", name, n)
-                                }.as_slice());
+                                }[]);
 
                                 if len == 2 && i == 0 {
                                     m.push_str(" or ");
@@ -323,7 +323,7 @@ fn create_substs_for_ast_path<'tcx,AC,RS>(
                                    format!("wrong number of type arguments: {} {}, found {}",
                                            expected,
                                            required_ty_param_count,
-                                           supplied_ty_param_count).as_slice());
+                                           supplied_ty_param_count)[]);
     } else if supplied_ty_param_count > formal_ty_param_count {
         let expected = if required_ty_param_count < formal_ty_param_count {
             "expected at most"
@@ -334,7 +334,7 @@ fn create_substs_for_ast_path<'tcx,AC,RS>(
                                    format!("wrong number of type arguments: {} {}, found {}",
                                            expected,
                                            formal_ty_param_count,
-                                           supplied_ty_param_count).as_slice());
+                                           supplied_ty_param_count)[]);
     }
 
     if supplied_ty_param_count > required_ty_param_count
@@ -723,7 +723,7 @@ pub fn ast_ty_to_builtin_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
                         .sess
                         .span_bug(ast_ty.span,
                                   format!("unbound path {}",
-                                          path.repr(this.tcx())).as_slice())
+                                          path.repr(this.tcx()))[])
                 }
                 Some(&d) => d
             };
@@ -920,10 +920,10 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
                 ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None)
             }
             ast::TyObjectSum(ref ty, ref bounds) => {
-                match ast_ty_to_trait_ref(this, rscope, &**ty, bounds.as_slice()) {
+                match ast_ty_to_trait_ref(this, rscope, &**ty, bounds[]) {
                     Ok(trait_ref) => {
                         trait_ref_to_object_type(this, rscope, ast_ty.span,
-                                                 trait_ref, bounds.as_slice())
+                                                 trait_ref, bounds[])
                     }
                     Err(ErrorReported) => {
                         ty::mk_err()
@@ -977,7 +977,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
                 ty::mk_closure(tcx, fn_decl)
             }
             ast::TyPolyTraitRef(ref bounds) => {
-                conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds.as_slice())
+                conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds[])
             }
             ast::TyPath(ref path, id) => {
                 let a_def = match tcx.def_map.borrow().get(&id) {
@@ -985,7 +985,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
                         tcx.sess
                            .span_bug(ast_ty.span,
                                      format!("unbound path {}",
-                                             path.repr(tcx)).as_slice())
+                                             path.repr(tcx))[])
                     }
                     Some(&d) => d
                 };
@@ -1019,7 +1019,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
                     def::DefMod(id) => {
                         tcx.sess.span_fatal(ast_ty.span,
                             format!("found module name used as a type: {}",
-                                    tcx.map.node_to_string(id.node)).as_slice());
+                                    tcx.map.node_to_string(id.node))[]);
                     }
                     def::DefPrimTy(_) => {
                         panic!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call");
@@ -1038,7 +1038,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
                                                           .last()
                                                           .unwrap()
                                                           .identifier)
-                                                  .get()).as_slice());
+                                                  .get())[]);
                         ty::mk_err()
                     }
                     def::DefAssociatedPath(typ, assoc_ident) => {
@@ -1084,7 +1084,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
                         tcx.sess.span_fatal(ast_ty.span,
                                             format!("found value name used \
                                                      as a type: {}",
-                                                    a_def).as_slice());
+                                                    a_def)[]);
                     }
                 }
             }
@@ -1112,7 +1112,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
                             ast_ty.span,
                             format!("expected constant expr for array \
                                      length: {}",
-                                    *r).as_slice());
+                                    *r)[]);
                     }
                 }
             }
@@ -1235,7 +1235,7 @@ fn ty_of_method_or_bare_fn<'a, 'tcx, AC: AstConv<'tcx>>(
     let input_params = if self_ty.is_some() {
         decl.inputs.slice_from(1)
     } else {
-        decl.inputs.as_slice()
+        decl.inputs[]
     };
     let input_tys = input_params.iter().map(|a| ty_of_arg(this, &rb, a, None));
     let input_pats: Vec<String> = input_params.iter()
@@ -1502,7 +1502,7 @@ pub fn conv_existential_bounds_from_partitioned_bounds<'tcx, AC, RS>(
         this.tcx().sess.span_err(
             b.trait_ref.path.span,
             format!("only the builtin traits can be used \
-                     as closure or object bounds").as_slice());
+                     as closure or object bounds")[]);
     }
 
     let region_bound = compute_region_bound(this,
@@ -1572,7 +1572,7 @@ fn compute_opt_region_bound<'tcx>(tcx: &ty::ctxt<'tcx>,
         tcx.sess.span_err(
             span,
             format!("ambiguous lifetime bound, \
-                     explicit lifetime bound required").as_slice());
+                     explicit lifetime bound required")[]);
     }
     return Some(r);
 }
@@ -1598,7 +1598,7 @@ fn compute_region_bound<'tcx, AC: AstConv<'tcx>, RS:RegionScope>(
                 None => {
                     this.tcx().sess.span_err(
                         span,
-                        format!("explicit lifetime bound required").as_slice());
+                        format!("explicit lifetime bound required")[]);
                     ty::ReStatic
                 }
             }
diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs
index 3b7eb22e56c..74e690bf68f 100644
--- a/src/librustc_typeck/check/method/mod.rs
+++ b/src/librustc_typeck/check/method/mod.rs
@@ -269,7 +269,7 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>,
                                 span,
                                 format!(
                                     "trait method is &self but first arg is: {}",
-                                    transformed_self_ty.repr(fcx.tcx())).as_slice());
+                                    transformed_self_ty.repr(fcx.tcx()))[]);
                         }
                     }
                 }
@@ -279,7 +279,7 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>,
                         span,
                         format!(
                             "unexpected explicit self type in operator method: {}",
-                            method_ty.explicit_self).as_slice());
+                            method_ty.explicit_self)[]);
                 }
             }
         }
@@ -333,7 +333,7 @@ pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
             if is_field {
                 cx.sess.span_note(span,
                     format!("use `(s.{0})(...)` if you meant to call the \
-                            function stored in the `{0}` field", method_ustring).as_slice());
+                            function stored in the `{0}` field", method_ustring)[]);
             }
 
             if static_sources.len() > 0 {
diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs
index b5776f9aeb3..961b664e404 100644
--- a/src/librustc_typeck/check/method/probe.rs
+++ b/src/librustc_typeck/check/method/probe.rs
@@ -557,7 +557,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> {
                     self.tcx().sess.span_bug(
                         self.span,
                         format!("No entry for unboxed closure: {}",
-                                closure_def_id.repr(self.tcx())).as_slice());
+                                closure_def_id.repr(self.tcx()))[]);
                 }
             };
 
diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs
index cd9a09efe08..6b7ca399ad2 100644
--- a/src/librustc_typeck/check/mod.rs
+++ b/src/librustc_typeck/check/mod.rs
@@ -518,7 +518,7 @@ fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
     // The free region references will be bound the node_id of the body block.
     let fn_sig = liberate_late_bound_regions(tcx, CodeExtent::from_node_id(body.id), fn_sig);
 
-    let arg_tys = fn_sig.inputs.as_slice();
+    let arg_tys = fn_sig.inputs[];
     let ret_ty = fn_sig.output;
 
     debug!("check_fn(arg_tys={}, ret_ty={}, fn_id={})",
@@ -616,7 +616,7 @@ pub fn check_item(ccx: &CrateCtxt, it: &ast::Item) {
       ast::ItemEnum(ref enum_definition, _) => {
         check_enum_variants(ccx,
                             it.span,
-                            enum_definition.variants.as_slice(),
+                            enum_definition.variants[],
                             it.id);
       }
       ast::ItemFn(ref decl, _, _, _, ref body) => {
@@ -915,7 +915,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>,
                         but not in the trait",
                         token::get_name(trait_m.name),
                         ppaux::explicit_self_category_to_str(
-                            &impl_m.explicit_self)).as_slice());
+                            &impl_m.explicit_self))[]);
             return;
         }
         (_, &ty::StaticExplicitSelfCategory) => {
@@ -925,7 +925,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>,
                         but not in the impl",
                         token::get_name(trait_m.name),
                         ppaux::explicit_self_category_to_str(
-                            &trait_m.explicit_self)).as_slice());
+                            &trait_m.explicit_self))[]);
             return;
         }
         _ => {
@@ -1229,7 +1229,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>,
                 span,
                 format!("lifetime parameters or bounds on method `{}` do \
                          not match the trait declaration",
-                        token::get_name(impl_m.name)).as_slice());
+                        token::get_name(impl_m.name))[]);
             return false;
         }
 
@@ -1281,7 +1281,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>,
                          from its counterpart `{}` \
                          declared in the trait",
                         impl_param.name.user_string(tcx),
-                        trait_param.name.user_string(tcx)).as_slice());
+                        trait_param.name.user_string(tcx))[]);
                 true
             } else {
                 false
@@ -1291,14 +1291,14 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>,
                 tcx.sess.span_note(
                     span,
                     format!("the impl is missing the following bounds: `{}`",
-                            missing.user_string(tcx)).as_slice());
+                            missing.user_string(tcx))[]);
             }
 
             if extra.len() != 0 {
                 tcx.sess.span_note(
                     span,
                     format!("the impl has the following extra bounds: `{}`",
-                            extra.user_string(tcx)).as_slice());
+                            extra.user_string(tcx))[]);
             }
 
             if err {
@@ -1557,7 +1557,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                 self.tcx().sess.span_bug(
                     span,
                     format!("no type for local variable {}",
-                            nid).as_slice());
+                            nid)[]);
             }
         }
     }
@@ -1805,7 +1805,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             Some(&t) => t,
             None => {
                 self.tcx().sess.bug(format!("no type for expr in fcx {}",
-                                            self.tag()).as_slice());
+                                            self.tag())[]);
             }
         }
     }
@@ -1835,7 +1835,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                 self.tcx().sess.bug(
                     format!("no type for node {}: {} in fcx {}",
                             id, self.tcx().map.node_to_string(id),
-                            self.tag()).as_slice());
+                            self.tag())[]);
             }
         }
     }
@@ -2392,7 +2392,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
         Ok(trait_did) => trait_did,
         Err(ref err_string) => {
             fcx.tcx().sess.span_err(iterator_expr.span,
-                                    err_string.as_slice());
+                                    err_string[]);
             return ty::mk_err()
         }
     };
@@ -2419,7 +2419,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
                                         format!("`for` loop expression has type `{}` which does \
                                                 not implement the `Iterator` trait; \
                                                 maybe try .iter()",
-                                                ty_string).as_slice());
+                                                ty_string)[]);
             }
             ty::mk_err()
         }
@@ -2457,7 +2457,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
                                             format!("`next` method of the `Iterator` \
                                                     trait has an unexpected type `{}`",
                                                     fcx.infcx().ty_to_string(return_type))
-                                            .as_slice());
+                                            []);
                     ty::mk_err()
                 }
             }
@@ -2484,7 +2484,7 @@ fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
 
         check_argument_types(fcx,
                              sp,
-                             err_inputs.as_slice(),
+                             err_inputs[],
                              callee_expr,
                              args_no_rcvr,
                              autoref_args,
@@ -2941,7 +2941,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
         // Call the generic checker.
         check_argument_types(fcx,
                              call_expr.span,
-                             fn_sig.inputs.as_slice(),
+                             fn_sig.inputs[],
                              f,
                              args,
                              AutorefArgs::No,
@@ -3306,7 +3306,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
                     ty::ty_struct(base_id, ref substs) => {
                         debug!("struct named {}", ppaux::ty_to_string(tcx, base_t));
                         let fields = ty::lookup_struct_fields(tcx, base_id);
-                        lookup_field_ty(tcx, base_id, fields.as_slice(),
+                        lookup_field_ty(tcx, base_id, fields[],
                                         field.node.name, &(*substs))
                     }
                     _ => None
@@ -3369,7 +3369,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
                         if tuple_like {
                             debug!("tuple struct named {}", ppaux::ty_to_string(tcx, base_t));
                             let fields = ty::lookup_struct_fields(tcx, base_id);
-                            lookup_tup_field_ty(tcx, base_id, fields.as_slice(),
+                            lookup_tup_field_ty(tcx, base_id, fields[],
                                                 idx.node, &(*substs))
                         } else {
                             None
@@ -3522,7 +3522,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
                                        class_id,
                                        id,
                                        struct_substs,
-                                       class_fields.as_slice(),
+                                       class_fields[],
                                        fields,
                                        base_expr.is_none());
         if ty::type_is_error(fcx.node_ty(id)) {
@@ -3564,7 +3564,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
                                        variant_id,
                                        id,
                                        substitutions,
-                                       variant_fields.as_slice(),
+                                       variant_fields[],
                                        fields,
                                        true);
         fcx.write_ty(id, enum_type);
@@ -3936,8 +3936,8 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
           let f_ty = fcx.expr_ty(&**f);
 
           let args: Vec<_> = args.iter().map(|x| x).collect();
-          if !try_overloaded_call(fcx, expr, &**f, f_ty, args.as_slice()) {
-              check_call(fcx, expr, &**f, args.as_slice());
+          if !try_overloaded_call(fcx, expr, &**f, f_ty, args[]) {
+              check_call(fcx, expr, &**f, args[]);
               let args_err = args.iter().fold(false,
                  |rest_err, a| {
                      // is this not working?
@@ -3949,7 +3949,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
           }
       }
       ast::ExprMethodCall(ident, ref tps, ref args) => {
-        check_method_call(fcx, expr, ident, args.as_slice(), tps.as_slice(), lvalue_pref);
+        check_method_call(fcx, expr, ident, args[], tps[], lvalue_pref);
         let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a));
         let  args_err = arg_tys.fold(false,
              |rest_err, a| {
@@ -4074,7 +4074,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
         let struct_id = match def {
             Some(def::DefVariant(enum_id, variant_id, true)) => {
                 check_struct_enum_variant(fcx, id, expr.span, enum_id,
-                                          variant_id, fields.as_slice());
+                                          variant_id, fields[]);
                 enum_id
             }
             Some(def::DefTrait(def_id)) => {
@@ -4083,7 +4083,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
                     pprust::path_to_string(path));
                 check_struct_fields_on_error(fcx,
                                              id,
-                                             fields.as_slice(),
+                                             fields[],
                                              base_expr);
                 def_id
             },
@@ -4096,7 +4096,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
                                                  id,
                                                  expr.span,
                                                  struct_did,
-                                                 fields.as_slice(),
+                                                 fields[],
                                                  base_expr.as_ref().map(|e| &**e));
                     }
                     _ => {
@@ -4105,7 +4105,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
                             pprust::path_to_string(path));
                         check_struct_fields_on_error(fcx,
                                                      id,
-                                                     fields.as_slice(),
+                                                     fields[],
                                                      base_expr);
                     }
                 }
@@ -4146,7 +4146,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
                                          fcx.infcx()
                                             .ty_to_string(
                                                 actual_structure_type),
-                                         type_error_description).as_slice());
+                                         type_error_description)[]);
                     ty::note_and_explain_type_err(tcx, &type_error);
                 }
             }
@@ -4755,7 +4755,7 @@ pub fn check_enum_variants(ccx: &CrateCtxt,
     }
 
     let hint = *ty::lookup_repr_hints(ccx.tcx, ast::DefId { krate: ast::LOCAL_CRATE, node: id })
-                    .as_slice().get(0).unwrap_or(&attr::ReprAny);
+                    [].get(0).unwrap_or(&attr::ReprAny);
 
     if hint != attr::ReprAny && vs.len() <= 1 {
         if vs.len() == 1 {
@@ -5438,7 +5438,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
             "get_tydesc" => {
               let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
                   Ok(t) => t,
-                  Err(s) => { tcx.sess.span_fatal(it.span, s.as_slice()); }
+                  Err(s) => { tcx.sess.span_fatal(it.span, s[]); }
               };
               let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
                   ty: tydesc_ty,
@@ -5454,7 +5454,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
                                 ty::mk_struct(ccx.tcx, did,
                                               subst::Substs::empty())),
                     Err(msg) => {
-                        tcx.sess.span_fatal(it.span, msg.as_slice());
+                        tcx.sess.span_fatal(it.span, msg[]);
                     }
                 }
             },
diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs
index 8e70b8ff0da..22502c0dd1a 100644
--- a/src/librustc_typeck/check/regionck.rs
+++ b/src/librustc_typeck/check/regionck.rs
@@ -251,7 +251,7 @@ fn region_of_def(fcx: &FnCtxt, def: def::Def) -> ty::Region {
         }
         _ => {
             tcx.sess.bug(format!("unexpected def in region_of_def: {}",
-                                 def).as_slice())
+                                 def)[])
         }
     }
 }
@@ -345,13 +345,13 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> {
             Some(f) => f,
             None => {
                 self.tcx().sess.bug(
-                    format!("No fn-sig entry for id={}", id).as_slice());
+                    format!("No fn-sig entry for id={}", id)[]);
             }
         };
 
         let len = self.region_param_pairs.len();
-        self.relate_free_regions(fn_sig.as_slice(), body.id);
-        link_fn_args(self, CodeExtent::from_node_id(body.id), fn_decl.inputs.as_slice());
+        self.relate_free_regions(fn_sig[], body.id);
+        link_fn_args(self, CodeExtent::from_node_id(body.id), fn_decl.inputs[]);
         self.visit_block(body);
         self.visit_region_obligations(body.id);
         self.region_param_pairs.truncate(len);
@@ -738,7 +738,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
         }
 
         ast::ExprMatch(ref discr, ref arms, _) => {
-            link_match(rcx, &**discr, arms.as_slice());
+            link_match(rcx, &**discr, arms[]);
 
             visit::walk_expr(rcx, expr);
         }
@@ -1186,7 +1186,7 @@ fn constrain_autoderefs<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
                     ty::ty_rptr(r, ref m) => (m.mutbl, r),
                     _ => rcx.tcx().sess.span_bug(deref_expr.span,
                             format!("bad overloaded deref type {}",
-                                    method.ty.repr(rcx.tcx())).as_slice())
+                                    method.ty.repr(rcx.tcx()))[])
                 };
                 {
                     let mc = mc::MemCategorizationContext::new(rcx);
@@ -1560,7 +1560,7 @@ fn link_reborrowed_region<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>,
                         span,
                         format!("Illegal upvar id: {}",
                                 upvar_id.repr(
-                                    rcx.tcx())).as_slice());
+                                    rcx.tcx()))[]);
                 }
             }
         }
diff --git a/src/librustc_typeck/check/regionmanip.rs b/src/librustc_typeck/check/regionmanip.rs
index 112ad1fb5b9..eaf638e388e 100644
--- a/src/librustc_typeck/check/regionmanip.rs
+++ b/src/librustc_typeck/check/regionmanip.rs
@@ -138,7 +138,7 @@ impl<'a, 'tcx> Wf<'a, 'tcx> {
             ty::ty_open(_) => {
                 self.tcx.sess.bug(
                     format!("Unexpected type encountered while doing wf check: {}",
-                            ty.repr(self.tcx)).as_slice());
+                            ty.repr(self.tcx))[]);
             }
         }
     }
diff --git a/src/librustc_typeck/check/vtable.rs b/src/librustc_typeck/check/vtable.rs
index 4db795a1fda..e23bf46b564 100644
--- a/src/librustc_typeck/check/vtable.rs
+++ b/src/librustc_typeck/check/vtable.rs
@@ -77,7 +77,7 @@ pub fn check_object_cast<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
                 source_expr.span,
                 format!("can only cast an boxed pointer \
                          to a boxed object, not a {}",
-                        ty::ty_sort_string(fcx.tcx(), source_ty)).as_slice());
+                        ty::ty_sort_string(fcx.tcx(), source_ty))[]);
         }
 
         (_, &ty::ty_rptr(..)) => {
@@ -85,7 +85,7 @@ pub fn check_object_cast<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
                 source_expr.span,
                 format!("can only cast a &-pointer \
                          to an &-object, not a {}",
-                        ty::ty_sort_string(fcx.tcx(), source_ty)).as_slice());
+                        ty::ty_sort_string(fcx.tcx(), source_ty))[]);
         }
 
         _ => {
@@ -164,7 +164,7 @@ fn check_object_safety_inner<'tcx>(tcx: &ty::ctxt<'tcx>,
             trait_name);
 
         for msg in errors {
-            tcx.sess.note(msg.as_slice());
+            tcx.sess.note(msg[]);
         }
     }
 
@@ -455,7 +455,7 @@ pub fn maybe_report_ambiguity<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
                     format!(
                         "unable to infer enough type information about `{}`; type annotations \
                          required",
-                        self_ty.user_string(fcx.tcx())).as_slice());
+                        self_ty.user_string(fcx.tcx()))[]);
             } else {
                 fcx.tcx().sess.span_err(
                     obligation.cause.span,
@@ -464,7 +464,7 @@ pub fn maybe_report_ambiguity<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
                          locate the impl of the trait `{}` for \
                          the type `{}`; type annotations required",
                         trait_ref.user_string(fcx.tcx()),
-                        self_ty.user_string(fcx.tcx())).as_slice());
+                        self_ty.user_string(fcx.tcx()))[]);
                 note_obligation_cause(fcx, obligation);
             }
         }
@@ -477,7 +477,7 @@ pub fn maybe_report_ambiguity<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
                  cannot locate the impl of the trait `{}` for \
                  the type `{}`",
                 trait_ref.user_string(fcx.tcx()),
-                self_ty.user_string(fcx.tcx())).as_slice());
+                self_ty.user_string(fcx.tcx()))[]);
     }
 }
 
diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs
index 5d0bb6622c2..c08eeb6e13e 100644
--- a/src/librustc_typeck/coherence/mod.rs
+++ b/src/librustc_typeck/coherence/mod.rs
@@ -488,7 +488,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> {
                                  format!("the trait `Copy` may not be \
                                           implemented for this type; field \
                                           `{}` does not implement `Copy`",
-                                         token::get_name(name)).as_slice())
+                                         token::get_name(name))[])
                 }
                 Err(ty::VariantDoesNotImplementCopy(name)) => {
                     tcx.sess
@@ -496,7 +496,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> {
                                  format!("the trait `Copy` may not be \
                                           implemented for this type; variant \
                                           `{}` does not implement `Copy`",
-                                         token::get_name(name)).as_slice())
+                                         token::get_name(name))[])
                 }
                 Err(ty::TypeIsStructural) => {
                     tcx.sess
diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs
index 3f59b50337f..22c9a2e7b32 100644
--- a/src/librustc_typeck/collect.rs
+++ b/src/librustc_typeck/collect.rs
@@ -171,7 +171,7 @@ impl<'a, 'tcx> AstConv<'tcx> for CrateCtxt<'a, 'tcx> {
             x => {
                 self.tcx.sess.bug(format!("unexpected sort of node \
                                            in get_item_ty(): {}",
-                                          x).as_slice());
+                                          x)[]);
             }
         }
     }
@@ -217,7 +217,7 @@ pub fn get_enum_variant_types<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
             ast::TupleVariantKind(ref args) if args.len() > 0 => {
                 let rs = ExplicitRscope;
                 let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, &*va.ty)).collect();
-                ty::mk_ctor_fn(tcx, input_tys.as_slice(), enum_ty)
+                ty::mk_ctor_fn(tcx, input_tys[], enum_ty)
             }
 
             ast::TupleVariantKind(_) => {
@@ -270,7 +270,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
                                     ccx,
                                     trait_id,
                                     &trait_def.generics,
-                                    trait_items.as_slice(),
+                                    trait_items[],
                                     &m.id,
                                     &m.ident.name,
                                     &m.explicit_self,
@@ -284,7 +284,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
                                     ccx,
                                     trait_id,
                                     &trait_def.generics,
-                                    trait_items.as_slice(),
+                                    trait_items[],
                                     &m.id,
                                     &m.pe_ident().name,
                                     m.pe_explicit_self(),
@@ -379,7 +379,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
             let tmcx = TraitMethodCtxt {
                 ccx: ccx,
                 trait_id: local_def(trait_id),
-                trait_items: trait_items.as_slice(),
+                trait_items: trait_items[],
                 method_generics: &ty_generics,
             };
             let trait_self_ty = ty::mk_self_type(tmcx.tcx(),
@@ -1040,7 +1040,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) {
             write_ty_to_tcx(tcx, it.id, pty.ty);
             get_enum_variant_types(ccx,
                                    pty.ty,
-                                   enum_definition.variants.as_slice(),
+                                   enum_definition.variants[],
                                    generics);
         },
         ast::ItemImpl(_,
@@ -1086,7 +1086,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) {
                                             ast_trait_ref.ref_id).def_id())
                     }
                 },
-                impl_items: impl_items.as_slice(),
+                impl_items: impl_items[],
                 impl_generics: &ty_generics,
             };
 
@@ -1184,7 +1184,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) {
                                                          local_def(it.id));
             let convert_method_context =
                 TraitConvertMethodContext(local_def(it.id),
-                                          trait_methods.as_slice());
+                                          trait_methods[]);
             convert_methods(ccx,
                             convert_method_context,
                             TraitContainer(local_def(it.id)),
@@ -1279,7 +1279,7 @@ pub fn convert_struct<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
                         |field| (*tcx.tcache.borrow())[
                             local_def(field.node.id)].ty).collect();
                 let ctor_fn_ty = ty::mk_ctor_fn(tcx,
-                                                inputs.as_slice(),
+                                                inputs[],
                                                 selfty);
                 write_ty_to_tcx(tcx, ctor_id, ctor_fn_ty);
                 tcx.tcache.borrow_mut().insert(local_def(ctor_id),
@@ -1320,7 +1320,7 @@ fn get_trait_def<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
         ast_map::NodeItem(item) => trait_def_of_item(ccx, &*item),
         _ => {
             ccx.tcx.sess.bug(format!("get_trait_def({}): not an item",
-                                     trait_id.node).as_slice())
+                                     trait_id.node)[])
         }
     }
 }
@@ -1345,7 +1345,7 @@ pub fn trait_def_of_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
         ref s => {
             tcx.sess.span_bug(
                 it.span,
-                format!("trait_def_of_item invoked on {}", s).as_slice());
+                format!("trait_def_of_item invoked on {}", s)[]);
         }
     };
 
@@ -1585,8 +1585,8 @@ fn ty_generics_for_type_or_impl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
                                           -> ty::Generics<'tcx> {
     ty_generics(ccx,
                 subst::TypeSpace,
-                generics.lifetimes.as_slice(),
-                generics.ty_params.as_slice(),
+                generics.lifetimes[],
+                generics.ty_params[],
                 ty::Generics::empty(),
                 &generics.where_clause,
                 create_type_parameters_for_associated_types)
@@ -1602,8 +1602,8 @@ fn ty_generics_for_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
     let mut generics =
         ty_generics(ccx,
                     subst::TypeSpace,
-                    ast_generics.lifetimes.as_slice(),
-                    ast_generics.ty_params.as_slice(),
+                    ast_generics.lifetimes[],
+                    ast_generics.ty_params[],
                     ty::Generics::empty(),
                     &ast_generics.where_clause,
                     DontCreateTypeParametersForAssociatedTypes);
@@ -1672,8 +1672,8 @@ fn ty_generics_for_fn_or_method<'tcx,AC>(
     let early_lifetimes = resolve_lifetime::early_bound_lifetimes(generics);
     ty_generics(this,
                 subst::FnSpace,
-                early_lifetimes.as_slice(),
-                generics.ty_params.as_slice(),
+                early_lifetimes[],
+                generics.ty_params[],
                 base_generics,
                 &generics.where_clause,
                 create_type_parameters_for_associated_types)
@@ -1701,7 +1701,7 @@ fn add_unsized_bound<'tcx,AC>(this: &AC,
                                                        a default. \
                                                        Only `Sized?` is \
                                                        supported",
-                                                      desc).as_slice());
+                                                      desc)[]);
                     ty::try_add_builtin_trait(this.tcx(),
                                               kind_id,
                                               bounds);
@@ -1973,7 +1973,7 @@ fn get_or_create_type_parameter_def<'tcx,AC>(this: &AC,
     let bounds = compute_bounds(this,
                                 param.ident.name,
                                 param_ty,
-                                param.bounds.as_slice(),
+                                param.bounds[],
                                 &param.unbound,
                                 param.span);
     let default = match param.default {
@@ -2054,7 +2054,7 @@ fn check_bounds_compatible<'tcx>(tcx: &ty::ctxt<'tcx>,
     if !param_bounds.builtin_bounds.contains(&ty::BoundSized) {
         ty::each_bound_trait_and_supertraits(
             tcx,
-            param_bounds.trait_bounds.as_slice(),
+            param_bounds.trait_bounds[],
             |trait_ref| {
                 let trait_def = ty::lookup_trait_def(tcx, trait_ref.def_id());
                 if trait_def.bounds.builtin_bounds.contains(&ty::BoundSized) {
diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs
index 49c5f13fa73..5a8f58274cc 100644
--- a/src/librustc_typeck/lib.rs
+++ b/src/librustc_typeck/lib.rs
@@ -196,7 +196,7 @@ fn require_same_types<'a, 'tcx, M>(tcx: &ty::ctxt<'tcx>,
                               format!("{}: {}",
                                       msg(),
                                       ty::type_err_to_str(tcx,
-                                                          terr)).as_slice());
+                                                          terr))[]);
             ty::note_and_explain_type_err(tcx, terr);
             false
         }
@@ -245,7 +245,7 @@ fn check_main_fn_ty(ccx: &CrateCtxt,
                               format!("main has a non-function type: found \
                                        `{}`",
                                       ppaux::ty_to_string(tcx,
-                                                       main_t)).as_slice());
+                                                       main_t))[]);
         }
     }
 }
@@ -296,8 +296,7 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
             tcx.sess.span_bug(start_span,
                               format!("start has a non-function type: found \
                                        `{}`",
-                                      ppaux::ty_to_string(tcx,
-                                                       start_t)).as_slice());
+                                      ppaux::ty_to_string(tcx, start_t))[]);
         }
     }
 }
diff --git a/src/librustc_typeck/variance.rs b/src/librustc_typeck/variance.rs
index ef0d1bc3859..754294a3b8e 100644
--- a/src/librustc_typeck/variance.rs
+++ b/src/librustc_typeck/variance.rs
@@ -556,7 +556,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
             None => {
                 self.tcx().sess.bug(format!(
                         "no inferred index entry for {}",
-                        self.tcx().map.node_to_string(param_id)).as_slice());
+                        self.tcx().map.node_to_string(param_id))[]);
             }
         }
     }
@@ -834,7 +834,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
                 self.tcx().sess.bug(
                     format!("unexpected type encountered in \
                             variance inference: {}",
-                            ty.repr(self.tcx())).as_slice());
+                            ty.repr(self.tcx()))[]);
             }
         }
     }
@@ -911,7 +911,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
                     .sess
                     .bug(format!("unexpected region encountered in variance \
                                   inference: {}",
-                                 region.repr(self.tcx())).as_slice());
+                                 region.repr(self.tcx()))[]);
             }
         }
     }
@@ -1046,7 +1046,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> {
             // attribute and report an error with various results if found.
             if ty::has_attr(tcx, item_def_id, "rustc_variance") {
                 let found = item_variances.repr(tcx);
-                tcx.sess.span_err(tcx.map.span(item_id), found.as_slice());
+                tcx.sess.span_err(tcx.map.span(item_id), found[]);
             }
 
             let newly_added = tcx.item_variance_map.borrow_mut()
diff --git a/src/librustdoc/externalfiles.rs b/src/librustdoc/externalfiles.rs
index 08fb94a801c..25a20e5998b 100644
--- a/src/librustdoc/externalfiles.rs
+++ b/src/librustdoc/externalfiles.rs
@@ -36,7 +36,7 @@ impl ExternalHtml {
 pub fn load_string(input: &Path) -> io::IoResult<Option<String>> {
     let mut f = try!(io::File::open(input));
     let d = try!(f.read_to_end());
-    Ok(str::from_utf8(d.as_slice()).map(|s| s.to_string()))
+    Ok(str::from_utf8(d.as_slice()).map(|s| s.to_string()).ok())
 }
 
 macro_rules! load_or_return {
diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs
index e01cbbc812b..a2d5530692c 100644
--- a/src/librustdoc/html/format.rs
+++ b/src/librustdoc/html/format.rs
@@ -16,7 +16,7 @@
 //! them in the future to instead emit any format desired.
 
 use std::fmt;
-use std::string::String;
+use std::iter::repeat;
 
 use syntax::ast;
 use syntax::ast_util;
@@ -198,12 +198,12 @@ fn resolved_path(w: &mut fmt::Formatter, did: ast::DefId, p: &clean::Path,
     path(w, p, print_all,
         |cache, loc| {
             if ast_util::is_local(did) || cache.inlined.contains(&did) {
-                Some(("../".repeat(loc.len())).to_string())
+                Some(repeat("../").take(loc.len()).collect::<String>())
             } else {
                 match cache.extern_locations[did.krate] {
                     render::Remote(ref s) => Some(s.to_string()),
                     render::Local => {
-                        Some(("../".repeat(loc.len())).to_string())
+                        Some(repeat("../").take(loc.len()).collect::<String>())
                     }
                     render::Unknown => None,
                 }
@@ -324,7 +324,7 @@ fn primitive_link(f: &mut fmt::Formatter,
             let len = CURRENT_LOCATION_KEY.with(|s| s.borrow().len());
             let len = if len == 0 {0} else {len - 1};
             try!(write!(f, "<a href='{}primitive.{}.html'>",
-                        "../".repeat(len),
+                        repeat("../").take(len).collect::<String>(),
                         prim.to_url_str()));
             needs_termination = true;
         }
@@ -337,7 +337,7 @@ fn primitive_link(f: &mut fmt::Formatter,
                 render::Remote(ref s) => Some(s.to_string()),
                 render::Local => {
                     let len = CURRENT_LOCATION_KEY.with(|s| s.borrow().len());
-                    Some("../".repeat(len))
+                    Some(repeat("../").take(len).collect::<String>())
                 }
                 render::Unknown => None,
             };
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index 111650f565c..c936f6a0819 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -34,7 +34,7 @@ pub fn highlight(src: &str, class: Option<&str>, id: Option<&str>) -> String {
          class,
          id,
          &mut out).unwrap();
-    String::from_utf8_lossy(out[]).into_string()
+    String::from_utf8_lossy(out[]).into_owned()
 }
 
 /// Exhausts the `lexer` writing the output into `out`.
diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs
index efec620bca7..dc31cfae99c 100644
--- a/src/librustdoc/html/render.rs
+++ b/src/librustdoc/html/render.rs
@@ -42,8 +42,8 @@ use std::fmt;
 use std::io::fs::PathExtensions;
 use std::io::{fs, File, BufferedWriter, BufferedReader};
 use std::io;
+use std::iter::repeat;
 use std::str;
-use std::string::String;
 use std::sync::Arc;
 
 use externalfiles::ExternalHtml;
@@ -1186,7 +1186,8 @@ impl Context {
                                     &Sidebar{ cx: cx, item: it },
                                     &Item{ cx: cx, item: it }));
             } else {
-                let mut url = "../".repeat(cx.current.len());
+                let mut url = repeat("../").take(cx.current.len())
+                                           .collect::<String>();
                 match cache().paths.get(&it.def_id) {
                     Some(&(ref names, _)) => {
                         for name in names[..names.len() - 1].iter() {
@@ -1382,7 +1383,8 @@ impl<'a> fmt::Show for Item<'a> {
             let amt = if self.ismodule() { cur.len() - 1 } else { cur.len() };
             for (i, component) in cur.iter().enumerate().take(amt) {
                 try!(write!(fmt, "<a href='{}index.html'>{}</a>::<wbr>",
-                            "../".repeat(cur.len() - i - 1),
+                            repeat("../").take(cur.len() - i - 1)
+                                         .collect::<String>(),
                             component.as_slice()));
             }
         }
diff --git a/src/librustdoc/passes.rs b/src/librustdoc/passes.rs
index e368d7f9332..9a67b479106 100644
--- a/src/librustdoc/passes.rs
+++ b/src/librustdoc/passes.rs
@@ -319,7 +319,7 @@ pub fn unindent(s: &str) -> String {
         let ignore_previous_indents =
             saw_first_line &&
             !saw_second_line &&
-            !line.is_whitespace();
+            !line.chars().all(|c| c.is_whitespace());
 
         let min_indent = if ignore_previous_indents {
             uint::MAX
@@ -331,7 +331,7 @@ pub fn unindent(s: &str) -> String {
             saw_second_line = true;
         }
 
-        if line.is_whitespace() {
+        if line.chars().all(|c| c.is_whitespace()) {
             min_indent
         } else {
             saw_first_line = true;
@@ -353,7 +353,7 @@ pub fn unindent(s: &str) -> String {
     if lines.len() >= 1 {
         let mut unindented = vec![ lines[0].trim().to_string() ];
         unindented.push_all(lines.tail().iter().map(|&line| {
-            if line.is_whitespace() {
+            if line.chars().all(|c| c.is_whitespace()) {
                 line.to_string()
             } else {
                 assert!(line.len() >= min_indent);
diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs
index 3181e28a121..c4f071994dc 100644
--- a/src/libserialize/json.rs
+++ b/src/libserialize/json.rs
@@ -202,10 +202,11 @@ use std::collections::{HashMap, BTreeMap};
 use std::{char, f64, fmt, io, num, str};
 use std::mem::{swap, transmute};
 use std::num::{Float, FPNaN, FPInfinite, Int};
-use std::str::{FromStr, ScalarValue};
+use std::str::{FromStr};
 use std::string;
-use std::vec::Vec;
 use std::ops;
+use unicode::str as unicode_str;
+use unicode::str::Utf16Item;
 
 use Encodable;
 
@@ -1001,7 +1002,7 @@ impl Json {
     /// Returns None otherwise.
     pub fn as_string<'a>(&'a self) -> Option<&'a str> {
         match *self {
-            Json::String(ref s) => Some(s.as_slice()),
+            Json::String(ref s) => Some(s[]),
             _ => None
         }
     }
@@ -1585,8 +1586,8 @@ impl<T: Iterator<char>> Parser<T> {
                             }
 
                             let buf = [n1, try!(self.decode_hex_escape())];
-                            match str::utf16_items(buf.as_slice()).next() {
-                                Some(ScalarValue(c)) => res.push(c),
+                            match unicode_str::utf16_items(&buf).next() {
+                                Some(Utf16Item::ScalarValue(c)) => res.push(c),
                                 _ => return self.error(LoneLeadingSurrogateInHexEscape),
                             }
                         }
@@ -1934,7 +1935,7 @@ pub fn from_reader(rdr: &mut io::Reader) -> Result<Json, BuilderError> {
         Ok(c)  => c,
         Err(e) => return Err(io_error_to_error(e))
     };
-    let s = match str::from_utf8(contents.as_slice()) {
+    let s = match str::from_utf8(contents.as_slice()).ok() {
         Some(s) => s,
         _       => return Err(SyntaxError(NotUtf8, 0, 0))
     };
@@ -1970,7 +1971,7 @@ macro_rules! expect {
     ($e:expr, Null) => ({
         match $e {
             Json::Null => Ok(()),
-            other => Err(ExpectedError("Null".into_string(),
+            other => Err(ExpectedError("Null".to_string(),
                                        format!("{}", other)))
         }
     });
@@ -1991,20 +1992,20 @@ macro_rules! read_primitive {
             match self.pop() {
                 Json::I64(f) => match num::cast(f) {
                     Some(f) => Ok(f),
-                    None => Err(ExpectedError("Number".into_string(), format!("{}", f))),
+                    None => Err(ExpectedError("Number".to_string(), format!("{}", f))),
                 },
                 Json::U64(f) => match num::cast(f) {
                     Some(f) => Ok(f),
-                    None => Err(ExpectedError("Number".into_string(), format!("{}", f))),
+                    None => Err(ExpectedError("Number".to_string(), format!("{}", f))),
                 },
-                Json::F64(f) => Err(ExpectedError("Integer".into_string(), format!("{}", f))),
+                Json::F64(f) => Err(ExpectedError("Integer".to_string(), format!("{}", f))),
                 // re: #12967.. a type w/ numeric keys (ie HashMap<uint, V> etc)
                 // is going to have a string here, as per JSON spec.
                 Json::String(s) => match std::str::from_str(s.as_slice()) {
                     Some(f) => Ok(f),
-                    None => Err(ExpectedError("Number".into_string(), s)),
+                    None => Err(ExpectedError("Number".to_string(), s)),
                 },
-                value => Err(ExpectedError("Number".into_string(), format!("{}", value))),
+                value => Err(ExpectedError("Number".to_string(), format!("{}", value))),
             }
         }
     }
@@ -2036,13 +2037,13 @@ impl ::Decoder<DecoderError> for Decoder {
             Json::String(s) => {
                 // re: #12967.. a type w/ numeric keys (ie HashMap<uint, V> etc)
                 // is going to have a string here, as per JSON spec.
-                match std::str::from_str(s.as_slice()) {
+                match s.parse() {
                     Some(f) => Ok(f),
-                    None => Err(ExpectedError("Number".into_string(), s)),
+                    None => Err(ExpectedError("Number".to_string(), s)),
                 }
             },
             Json::Null => Ok(f64::NAN),
-            value => Err(ExpectedError("Number".into_string(), format!("{}", value)))
+            value => Err(ExpectedError("Number".to_string(), format!("{}", value)))
         }
     }
 
@@ -2060,7 +2061,7 @@ impl ::Decoder<DecoderError> for Decoder {
                 _ => ()
             }
         }
-        Err(ExpectedError("single character string".into_string(), format!("{}", s)))
+        Err(ExpectedError("single character string".to_string(), format!("{}", s)))
     }
 
     fn read_str(&mut self) -> DecodeResult<string::String> {
@@ -2080,36 +2081,35 @@ impl ::Decoder<DecoderError> for Decoder {
         let name = match self.pop() {
             Json::String(s) => s,
             Json::Object(mut o) => {
-                let n = match o.remove(&"variant".into_string()) {
+                let n = match o.remove(&"variant".to_string()) {
                     Some(Json::String(s)) => s,
                     Some(val) => {
-                        return Err(ExpectedError("String".into_string(), format!("{}", val)))
+                        return Err(ExpectedError("String".to_string(), format!("{}", val)))
                     }
                     None => {
-                        return Err(MissingFieldError("variant".into_string()))
+                        return Err(MissingFieldError("variant".to_string()))
                     }
                 };
-                match o.remove(&"fields".into_string()) {
+                match o.remove(&"fields".to_string()) {
                     Some(Json::Array(l)) => {
                         for field in l.into_iter().rev() {
                             self.stack.push(field);
                         }
                     },
                     Some(val) => {
-                        return Err(ExpectedError("Array".into_string(), format!("{}", val)))
+                        return Err(ExpectedError("Array".to_string(), format!("{}", val)))
                     }
                     None => {
-                        return Err(MissingFieldError("fields".into_string()))
+                        return Err(MissingFieldError("fields".to_string()))
                     }
                 }
                 n
             }
             json => {
-                return Err(ExpectedError("String or Object".into_string(), format!("{}", json)))
+                return Err(ExpectedError("String or Object".to_string(), format!("{}", json)))
             }
         };
-        let idx = match names.iter()
-                             .position(|n| str::eq_slice(*n, name.as_slice())) {
+        let idx = match names.iter().position(|n| *n == name[]) {
             Some(idx) => idx,
             None => return Err(UnknownVariantError(name))
         };
@@ -2319,7 +2319,7 @@ impl ToJson for bool {
 }
 
 impl ToJson for str {
-    fn to_json(&self) -> Json { Json::String(self.into_string()) }
+    fn to_json(&self) -> Json { Json::String(self.to_string()) }
 }
 
 impl ToJson for string::String {
@@ -2450,9 +2450,9 @@ mod tests {
     #[test]
     fn test_decode_option_malformed() {
         check_err::<OptionData>("{ \"opt\": [] }",
-                                ExpectedError("Number".into_string(), "[]".into_string()));
+                                ExpectedError("Number".to_string(), "[]".to_string()));
         check_err::<OptionData>("{ \"opt\": false }",
-                                ExpectedError("Number".into_string(), "false".into_string()));
+                                ExpectedError("Number".to_string(), "false".to_string()));
     }
 
     #[deriving(PartialEq, Encodable, Decodable, Show)]
@@ -2538,11 +2538,11 @@ mod tests {
 
     #[test]
     fn test_write_str() {
-        assert_eq!(String("".into_string()).to_string(), "\"\"");
-        assert_eq!(String("".into_string()).to_pretty_str(), "\"\"");
+        assert_eq!(String("".to_string()).to_string(), "\"\"");
+        assert_eq!(String("".to_string()).to_pretty_str(), "\"\"");
 
-        assert_eq!(String("homura".into_string()).to_string(), "\"homura\"");
-        assert_eq!(String("madoka".into_string()).to_pretty_str(), "\"madoka\"");
+        assert_eq!(String("homura".to_string()).to_string(), "\"homura\"");
+        assert_eq!(String("madoka".to_string()).to_pretty_str(), "\"madoka\"");
     }
 
     #[test]
@@ -2571,7 +2571,7 @@ mod tests {
         let long_test_array = Array(vec![
             Boolean(false),
             Null,
-            Array(vec![String("foo\nbar".into_string()), F64(3.5)])]);
+            Array(vec![String("foo\nbar".to_string()), F64(3.5)])]);
 
         assert_eq!(long_test_array.to_string(),
             "[false,null,[\"foo\\nbar\",3.5]]");
@@ -2596,12 +2596,12 @@ mod tests {
 
         assert_eq!(
             mk_object(&[
-                ("a".into_string(), Boolean(true))
+                ("a".to_string(), Boolean(true))
             ]).to_string(),
             "{\"a\":true}"
         );
         assert_eq!(
-            mk_object(&[("a".into_string(), Boolean(true))]).to_pretty_str(),
+            mk_object(&[("a".to_string(), Boolean(true))]).to_pretty_str(),
             "\
             {\n  \
                 \"a\": true\n\
@@ -2609,9 +2609,9 @@ mod tests {
         );
 
         let complex_obj = mk_object(&[
-                ("b".into_string(), Array(vec![
-                    mk_object(&[("c".into_string(), String("\x0c\r".into_string()))]),
-                    mk_object(&[("d".into_string(), String("".into_string()))])
+                ("b".to_string(), Array(vec![
+                    mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
+                    mk_object(&[("d".to_string(), String("".to_string()))])
                 ]))
             ]);
 
@@ -2640,10 +2640,10 @@ mod tests {
         );
 
         let a = mk_object(&[
-            ("a".into_string(), Boolean(true)),
-            ("b".into_string(), Array(vec![
-                mk_object(&[("c".into_string(), String("\x0c\r".into_string()))]),
-                mk_object(&[("d".into_string(), String("".into_string()))])
+            ("a".to_string(), Boolean(true)),
+            ("b".to_string(), Array(vec![
+                mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
+                mk_object(&[("d".to_string(), String("".to_string()))])
             ]))
         ]);
 
@@ -2678,7 +2678,7 @@ mod tests {
             "\"Dog\""
         );
 
-        let animal = Frog("Henry".into_string(), 349);
+        let animal = Frog("Henry".to_string(), 349);
         assert_eq!(
             with_str_writer(|writer| {
                 let mut encoder = Encoder::new(writer);
@@ -2731,7 +2731,7 @@ mod tests {
     fn test_write_char() {
         check_encoder_for_simple!('a', "\"a\"");
         check_encoder_for_simple!('\t', "\"\\t\"");
-        check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\"");
+        check_encoder_for_simple!('\u{a0}', "\"\u{a0}\"");
         check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\"");
         check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\"");
     }
@@ -2839,7 +2839,7 @@ mod tests {
         assert_eq!(v, i64::MAX);
 
         let res: DecodeResult<i64> = super::decode("765.25252");
-        assert_eq!(res, Err(ExpectedError("Integer".into_string(), "765.25252".into_string())));
+        assert_eq!(res, Err(ExpectedError("Integer".to_string(), "765.25252".to_string())));
     }
 
     #[test]
@@ -2847,16 +2847,16 @@ mod tests {
         assert_eq!(from_str("\""),    Err(SyntaxError(EOFWhileParsingString, 1, 2)));
         assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
 
-        assert_eq!(from_str("\"\""), Ok(String("".into_string())));
-        assert_eq!(from_str("\"foo\""), Ok(String("foo".into_string())));
-        assert_eq!(from_str("\"\\\"\""), Ok(String("\"".into_string())));
-        assert_eq!(from_str("\"\\b\""), Ok(String("\x08".into_string())));
-        assert_eq!(from_str("\"\\n\""), Ok(String("\n".into_string())));
-        assert_eq!(from_str("\"\\r\""), Ok(String("\r".into_string())));
-        assert_eq!(from_str("\"\\t\""), Ok(String("\t".into_string())));
-        assert_eq!(from_str(" \"foo\" "), Ok(String("foo".into_string())));
-        assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".into_string())));
-        assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".into_string())));
+        assert_eq!(from_str("\"\""), Ok(String("".to_string())));
+        assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string())));
+        assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string())));
+        assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string())));
+        assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string())));
+        assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string())));
+        assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string())));
+        assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string())));
+        assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string())));
+        assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string())));
     }
 
     #[test]
@@ -2922,7 +2922,7 @@ mod tests {
         assert_eq!(t, (1u, 2, 3));
 
         let t: (uint, string::String) = super::decode("[1, \"two\"]").unwrap();
-        assert_eq!(t, (1u, "two".into_string()));
+        assert_eq!(t, (1u, "two".to_string()));
     }
 
     #[test]
@@ -2952,22 +2952,22 @@ mod tests {
 
         assert_eq!(from_str("{}").unwrap(), mk_object(&[]));
         assert_eq!(from_str("{\"a\": 3}").unwrap(),
-                  mk_object(&[("a".into_string(), U64(3))]));
+                  mk_object(&[("a".to_string(), U64(3))]));
 
         assert_eq!(from_str(
                       "{ \"a\": null, \"b\" : true }").unwrap(),
                   mk_object(&[
-                      ("a".into_string(), Null),
-                      ("b".into_string(), Boolean(true))]));
+                      ("a".to_string(), Null),
+                      ("b".to_string(), Boolean(true))]));
         assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(),
                   mk_object(&[
-                      ("a".into_string(), Null),
-                      ("b".into_string(), Boolean(true))]));
+                      ("a".to_string(), Null),
+                      ("b".to_string(), Boolean(true))]));
         assert_eq!(from_str(
                       "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(),
                   mk_object(&[
-                      ("a".into_string(), F64(1.0)),
-                      ("b".into_string(), Array(vec![Boolean(true)]))
+                      ("a".to_string(), F64(1.0)),
+                      ("b".to_string(), Array(vec![Boolean(true)]))
                   ]));
         assert_eq!(from_str(
                       "{\
@@ -2979,12 +2979,12 @@ mod tests {
                           ]\
                       }").unwrap(),
                   mk_object(&[
-                      ("a".into_string(), F64(1.0)),
-                      ("b".into_string(), Array(vec![
+                      ("a".to_string(), F64(1.0)),
+                      ("b".to_string(), Array(vec![
                           Boolean(true),
-                          String("foo\nbar".into_string()),
+                          String("foo\nbar".to_string()),
                           mk_object(&[
-                              ("c".into_string(), mk_object(&[("d".into_string(), Null)]))
+                              ("c".to_string(), mk_object(&[("d".to_string(), Null)]))
                           ])
                       ]))
                   ]));
@@ -3003,7 +3003,7 @@ mod tests {
             v,
             Outer {
                 inner: vec![
-                    Inner { a: (), b: 2, c: vec!["abc".into_string(), "xyz".into_string()] }
+                    Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] }
                 ]
             }
         );
@@ -3029,7 +3029,7 @@ mod tests {
         assert_eq!(value, None);
 
         let value: Option<string::String> = super::decode("\"jodhpurs\"").unwrap();
-        assert_eq!(value, Some("jodhpurs".into_string()));
+        assert_eq!(value, Some("jodhpurs".to_string()));
     }
 
     #[test]
@@ -3039,7 +3039,7 @@ mod tests {
 
         let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}";
         let value: Animal = super::decode(s).unwrap();
-        assert_eq!(value, Frog("Henry".into_string(), 349));
+        assert_eq!(value, Frog("Henry".to_string(), 349));
     }
 
     #[test]
@@ -3048,8 +3048,8 @@ mod tests {
                   \"fields\":[\"Henry\", 349]}}";
         let mut map: BTreeMap<string::String, Animal> = super::decode(s).unwrap();
 
-        assert_eq!(map.remove(&"a".into_string()), Some(Dog));
-        assert_eq!(map.remove(&"b".into_string()), Some(Frog("Henry".into_string(), 349)));
+        assert_eq!(map.remove(&"a".to_string()), Some(Dog));
+        assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349)));
     }
 
     #[test]
@@ -3089,30 +3089,30 @@ mod tests {
     }
     #[test]
     fn test_decode_errors_struct() {
-        check_err::<DecodeStruct>("[]", ExpectedError("Object".into_string(), "[]".into_string()));
+        check_err::<DecodeStruct>("[]", ExpectedError("Object".to_string(), "[]".to_string()));
         check_err::<DecodeStruct>("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}",
-                                  ExpectedError("Number".into_string(), "true".into_string()));
+                                  ExpectedError("Number".to_string(), "true".to_string()));
         check_err::<DecodeStruct>("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}",
-                                  ExpectedError("Boolean".into_string(), "[]".into_string()));
+                                  ExpectedError("Boolean".to_string(), "[]".to_string()));
         check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}",
-                                  ExpectedError("String".into_string(), "{}".into_string()));
+                                  ExpectedError("String".to_string(), "{}".to_string()));
         check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}",
-                                  ExpectedError("Array".into_string(), "null".into_string()));
+                                  ExpectedError("Array".to_string(), "null".to_string()));
         check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\"}",
-                                  MissingFieldError("w".into_string()));
+                                  MissingFieldError("w".to_string()));
     }
     #[test]
     fn test_decode_errors_enum() {
         check_err::<DecodeEnum>("{}",
-                                MissingFieldError("variant".into_string()));
+                                MissingFieldError("variant".to_string()));
         check_err::<DecodeEnum>("{\"variant\": 1}",
-                                ExpectedError("String".into_string(), "1".into_string()));
+                                ExpectedError("String".to_string(), "1".to_string()));
         check_err::<DecodeEnum>("{\"variant\": \"A\"}",
-                                MissingFieldError("fields".into_string()));
+                                MissingFieldError("fields".to_string()));
         check_err::<DecodeEnum>("{\"variant\": \"A\", \"fields\": null}",
-                                ExpectedError("Array".into_string(), "null".into_string()));
+                                ExpectedError("Array".to_string(), "null".to_string()));
         check_err::<DecodeEnum>("{\"variant\": \"C\", \"fields\": []}",
-                                UnknownVariantError("C".into_string()));
+                                UnknownVariantError("C".to_string()));
     }
 
     #[test]
@@ -3325,15 +3325,15 @@ mod tests {
 
         let mut tree = BTreeMap::new();
 
-        tree.insert("hello".into_string(), String("guten tag".into_string()));
-        tree.insert("goodbye".into_string(), String("sayonara".into_string()));
+        tree.insert("hello".to_string(), String("guten tag".to_string()));
+        tree.insert("goodbye".to_string(), String("sayonara".to_string()));
 
         let json = Array(
             // The following layout below should look a lot like
             // the pretty-printed JSON (indent * x)
             vec!
             ( // 0x
-                String("greetings".into_string()), // 1x
+                String("greetings".to_string()), // 1x
                 Object(tree), // 1x + 2x + 2x + 1x
             ) // 0x
             // End JSON array (7 lines)
@@ -3397,7 +3397,7 @@ mod tests {
         };
         let mut decoder = Decoder::new(json_obj);
         let result: Result<HashMap<uint, bool>, DecoderError> = Decodable::decode(&mut decoder);
-        assert_eq!(result, Err(ExpectedError("Number".into_string(), "a".into_string())));
+        assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string())));
     }
 
     fn assert_stream_equal(src: &str,
@@ -3424,7 +3424,7 @@ mod tests {
             r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#,
             vec![
                 (ObjectStart,             vec![]),
-                  (StringValue("bar".into_string()),   vec![Key("foo")]),
+                  (StringValue("bar".to_string()),   vec![Key("foo")]),
                   (ArrayStart,            vec![Key("array")]),
                     (U64Value(0),         vec![Key("array"), Index(0)]),
                     (U64Value(1),         vec![Key("array"), Index(1)]),
@@ -3515,7 +3515,7 @@ mod tests {
                   (F64Value(1.0),               vec![Key("a")]),
                   (ArrayStart,                  vec![Key("b")]),
                     (BooleanValue(true),        vec![Key("b"), Index(0)]),
-                    (StringValue("foo\nbar".into_string()),  vec![Key("b"), Index(1)]),
+                    (StringValue("foo\nbar".to_string()),  vec![Key("b"), Index(1)]),
                     (ObjectStart,               vec![Key("b"), Index(2)]),
                       (ObjectStart,             vec![Key("b"), Index(2), Key("c")]),
                         (NullValue,             vec![Key("b"), Index(2), Key("c"), Key("d")]),
@@ -3648,7 +3648,7 @@ mod tests {
         assert!(stack.last_is_index());
         assert!(stack.get(0) == Index(1));
 
-        stack.push_key("foo".into_string());
+        stack.push_key("foo".to_string());
 
         assert!(stack.len() == 2);
         assert!(stack.is_equal_to(&[Index(1), Key("foo")]));
@@ -3660,7 +3660,7 @@ mod tests {
         assert!(stack.get(0) == Index(1));
         assert!(stack.get(1) == Key("foo"));
 
-        stack.push_key("bar".into_string());
+        stack.push_key("bar".to_string());
 
         assert!(stack.len() == 3);
         assert!(stack.is_equal_to(&[Index(1), Key("foo"), Key("bar")]));
@@ -3721,8 +3721,8 @@ mod tests {
         assert_eq!(f64::NAN.to_json(), Null);
         assert_eq!(true.to_json(), Boolean(true));
         assert_eq!(false.to_json(), Boolean(false));
-        assert_eq!("abc".to_json(), String("abc".into_string()));
-        assert_eq!("abc".into_string().to_json(), String("abc".into_string()));
+        assert_eq!("abc".to_json(), String("abc".to_string()));
+        assert_eq!("abc".to_string().to_json(), String("abc".to_string()));
         assert_eq!((1u, 2u).to_json(), array2);
         assert_eq!((1u, 2u, 3u).to_json(), array3);
         assert_eq!([1u, 2].to_json(), array2);
@@ -3734,8 +3734,8 @@ mod tests {
         tree_map.insert("b".into_string(), 2);
         assert_eq!(tree_map.to_json(), object);
         let mut hash_map = HashMap::new();
-        hash_map.insert("a".into_string(), 1u);
-        hash_map.insert("b".into_string(), 2);
+        hash_map.insert("a".to_string(), 1u);
+        hash_map.insert("b".to_string(), 2);
         assert_eq!(hash_map.to_json(), object);
         assert_eq!(Some(15i).to_json(), I64(15));
         assert_eq!(Some(15u).to_json(), U64(15));
@@ -3778,7 +3778,7 @@ mod tests {
     }
 
     fn big_json() -> string::String {
-        let mut src = "[\n".into_string();
+        let mut src = "[\n".to_string();
         for _ in range(0i, 500) {
             src.push_str(r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": \
                             [1,2,3]},"#);
diff --git a/src/libserialize/lib.rs b/src/libserialize/lib.rs
index e700d102fef..fdbc5051f72 100644
--- a/src/libserialize/lib.rs
+++ b/src/libserialize/lib.rs
@@ -32,6 +32,7 @@ extern crate test;
 
 #[phase(plugin, link)]
 extern crate log;
+extern crate unicode;
 
 extern crate collections;
 
diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs
index 00c5158309e..558f9e603e1 100644
--- a/src/libserialize/serialize.rs
+++ b/src/libserialize/serialize.rs
@@ -308,13 +308,13 @@ impl<E, S:Encoder<E>> Encodable<S, E> for str {
 
 impl<E, S:Encoder<E>> Encodable<S, E> for String {
     fn encode(&self, s: &mut S) -> Result<(), E> {
-        s.emit_str(self.as_slice())
+        s.emit_str(self[])
     }
 }
 
 impl<E, D:Decoder<E>> Decodable<D, E> for String {
     fn decode(d: &mut D) -> Result<String, E> {
-        Ok(String::from_str(try!(d.read_str()).as_slice()))
+        d.read_str()
     }
 }
 
diff --git a/src/libstd/ascii.rs b/src/libstd/ascii.rs
index 08b17f25e29..2c49beca98d 100644
--- a/src/libstd/ascii.rs
+++ b/src/libstd/ascii.rs
@@ -23,7 +23,7 @@ use ops::FnMut;
 use option::Option;
 use option::Option::{Some, None};
 use slice::{SliceExt, AsSlice};
-use str::{Str, StrPrelude};
+use str::{Str, StrExt};
 use string::{String, IntoString};
 use vec::Vec;
 
diff --git a/src/libstd/c_str.rs b/src/libstd/c_str.rs
index f1c8e8950a2..fb44961017f 100644
--- a/src/libstd/c_str.rs
+++ b/src/libstd/c_str.rs
@@ -228,7 +228,7 @@ impl CString {
     #[inline]
     pub fn as_str<'a>(&'a self) -> Option<&'a str> {
         let buf = self.as_bytes_no_nul();
-        str::from_utf8(buf)
+        str::from_utf8(buf).ok()
     }
 
     /// Return a CString iterator.
diff --git a/src/libstd/dynamic_lib.rs b/src/libstd/dynamic_lib.rs
index 4d8c7d67b8c..368abe7cb12 100644
--- a/src/libstd/dynamic_lib.rs
+++ b/src/libstd/dynamic_lib.rs
@@ -280,7 +280,7 @@ pub mod dl {
     use result::Result;
     use result::Result::{Ok, Err};
     use slice::SliceExt;
-    use str::StrPrelude;
+    use str::StrExt;
     use str;
     use string::String;
     use vec::Vec;
diff --git a/src/libstd/failure.rs b/src/libstd/failure.rs
index 8e1e3dc4af9..7010eae6dba 100644
--- a/src/libstd/failure.rs
+++ b/src/libstd/failure.rs
@@ -41,7 +41,7 @@ pub fn on_fail(obj: &(Any+Send), file: &'static str, line: uint) {
     let msg = match obj.downcast_ref::<&'static str>() {
         Some(s) => *s,
         None => match obj.downcast_ref::<String>() {
-            Some(s) => s.as_slice(),
+            Some(s) => s[],
             None => "Box<Any>",
         }
     };
diff --git a/src/libstd/io/mod.rs b/src/libstd/io/mod.rs
index dbf61b132e0..233ad781093 100644
--- a/src/libstd/io/mod.rs
+++ b/src/libstd/io/mod.rs
@@ -242,10 +242,11 @@ use result::Result;
 use result::Result::{Ok, Err};
 use sys;
 use slice::SliceExt;
-use str::StrPrelude;
+use str::StrExt;
 use str;
 use string::String;
 use uint;
+use unicode;
 use unicode::char::UnicodeChar;
 use vec::Vec;
 
@@ -1505,7 +1506,7 @@ pub trait Buffer: Reader {
     /// valid utf-8 encoded codepoint as the next few bytes in the stream.
     fn read_char(&mut self) -> IoResult<char> {
         let first_byte = try!(self.read_byte());
-        let width = str::utf8_char_width(first_byte);
+        let width = unicode::str::utf8_char_width(first_byte);
         if width == 1 { return Ok(first_byte as char) }
         if width == 0 { return Err(standard_error(InvalidInput)) } // not utf8
         let mut buf = [first_byte, 0, 0, 0];
@@ -1519,7 +1520,7 @@ pub trait Buffer: Reader {
                 }
             }
         }
-        match str::from_utf8(buf[..width]) {
+        match str::from_utf8(buf[..width]).ok() {
             Some(s) => Ok(s.char_at(0)),
             None => Err(standard_error(InvalidInput))
         }
diff --git a/src/libstd/io/net/ip.rs b/src/libstd/io/net/ip.rs
index 71776b6c46a..89a649d55bd 100644
--- a/src/libstd/io/net/ip.rs
+++ b/src/libstd/io/net/ip.rs
@@ -25,8 +25,8 @@ use ops::FnOnce;
 use option::Option;
 use option::Option::{None, Some};
 use result::Result::{Ok, Err};
-use str::{FromStr, StrPrelude};
 use slice::{CloneSliceExt, SliceExt};
+use str::{FromStr, StrExt};
 use vec::Vec;
 
 pub type Port = u16;
diff --git a/src/libstd/io/process.rs b/src/libstd/io/process.rs
index 9da1117f227..4a0a3936424 100644
--- a/src/libstd/io/process.rs
+++ b/src/libstd/io/process.rs
@@ -1082,7 +1082,7 @@ mod tests {
 
         let prog = env_cmd().env_set_all(new_env.as_slice()).spawn().unwrap();
         let result = prog.wait_with_output().unwrap();
-        let output = String::from_utf8_lossy(result.output.as_slice()).into_string();
+        let output = String::from_utf8_lossy(result.output.as_slice()).to_string();
 
         assert!(output.contains("RUN_TEST_NEW_ENV=123"),
                 "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output);
@@ -1092,7 +1092,7 @@ mod tests {
     fn test_add_to_env() {
         let prog = env_cmd().env("RUN_TEST_NEW_ENV", "123").spawn().unwrap();
         let result = prog.wait_with_output().unwrap();
-        let output = String::from_utf8_lossy(result.output.as_slice()).into_string();
+        let output = String::from_utf8_lossy(result.output.as_slice()).to_string();
 
         assert!(output.contains("RUN_TEST_NEW_ENV=123"),
                 "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output);
diff --git a/src/libstd/io/stdio.rs b/src/libstd/io/stdio.rs
index 36dd5492356..1c5ceaf2450 100644
--- a/src/libstd/io/stdio.rs
+++ b/src/libstd/io/stdio.rs
@@ -43,7 +43,7 @@ use ops::{Deref, DerefMut, FnOnce};
 use result::Result::{Ok, Err};
 use rt;
 use slice::SliceExt;
-use str::StrPrelude;
+use str::StrExt;
 use string::String;
 use sys::{fs, tty};
 use sync::{Arc, Mutex, MutexGuard, Once, ONCE_INIT};
diff --git a/src/libstd/num/strconv.rs b/src/libstd/num/strconv.rs
index b3e4dd52f89..d6331f3c718 100644
--- a/src/libstd/num/strconv.rs
+++ b/src/libstd/num/strconv.rs
@@ -20,7 +20,7 @@ use char::{mod, Char};
 use num::{mod, Int, Float, FPNaN, FPInfinite, ToPrimitive};
 use ops::FnMut;
 use slice::{SliceExt, CloneSliceExt};
-use str::StrPrelude;
+use str::StrExt;
 use string::String;
 use vec::Vec;
 
diff --git a/src/libstd/os.rs b/src/libstd/os.rs
index a16ee982f5c..ceb9a4102f6 100644
--- a/src/libstd/os.rs
+++ b/src/libstd/os.rs
@@ -52,7 +52,7 @@ use result::Result;
 use result::Result::{Err, Ok};
 use slice::{AsSlice, SliceExt};
 use slice::CloneSliceExt;
-use str::{Str, StrPrelude, StrAllocating};
+use str::{Str, StrExt};
 use string::{String, ToString};
 use sync::atomic::{AtomicInt, INIT_ATOMIC_INT, SeqCst};
 use vec::Vec;
@@ -134,8 +134,8 @@ fn with_env_lock<T, F>(f: F) -> T where
 /// ```
 pub fn env() -> Vec<(String,String)> {
     env_as_bytes().into_iter().map(|(k,v)| {
-        let k = String::from_utf8_lossy(k.as_slice()).into_string();
-        let v = String::from_utf8_lossy(v.as_slice()).into_string();
+        let k = String::from_utf8_lossy(k.as_slice()).into_owned();
+        let v = String::from_utf8_lossy(v.as_slice()).into_owned();
         (k,v)
     }).collect()
 }
@@ -185,7 +185,7 @@ pub fn env_as_bytes() -> Vec<(Vec<u8>,Vec<u8>)> {
 /// }
 /// ```
 pub fn getenv(n: &str) -> Option<String> {
-    getenv_as_bytes(n).map(|v| String::from_utf8_lossy(v.as_slice()).into_string())
+    getenv_as_bytes(n).map(|v| String::from_utf8_lossy(v.as_slice()).into_owned())
 }
 
 #[cfg(unix)]
@@ -707,7 +707,7 @@ fn real_args_as_bytes() -> Vec<Vec<u8>> {
 fn real_args() -> Vec<String> {
     real_args_as_bytes().into_iter()
                         .map(|v| {
-                            String::from_utf8_lossy(v.as_slice()).into_string()
+                            String::from_utf8_lossy(v.as_slice()).into_owned()
                         }).collect()
 }
 
@@ -729,7 +729,7 @@ fn real_args() -> Vec<String> {
         // Push it onto the list.
         let ptr = ptr as *const u16;
         let buf = slice::from_raw_buf(&ptr, len);
-        let opt_s = String::from_utf16(os_imp::truncate_utf16_at_nul(buf));
+        let opt_s = String::from_utf16(sys::os::truncate_utf16_at_nul(buf));
         opt_s.expect("CommandLineToArgvW returned invalid UTF-16")
     });
 
diff --git a/src/libstd/path/mod.rs b/src/libstd/path/mod.rs
index ed4bb6ee081..30f3f56bc1c 100644
--- a/src/libstd/path/mod.rs
+++ b/src/libstd/path/mod.rs
@@ -69,7 +69,7 @@ use iter::IteratorExt;
 use option::Option;
 use option::Option::{None, Some};
 use str;
-use str::{CowString, MaybeOwned, Str, StrPrelude};
+use str::{CowString, MaybeOwned, Str, StrExt};
 use string::String;
 use slice::{AsSlice, CloneSliceExt};
 use slice::{PartialEqSliceExt, SliceExt};
@@ -197,7 +197,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe {
     /// ```
     #[inline]
     fn as_str<'a>(&'a self) -> Option<&'a str> {
-        str::from_utf8(self.as_vec())
+        str::from_utf8(self.as_vec()).ok()
     }
 
     /// Returns the path as a byte vector
@@ -293,7 +293,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe {
     /// ```
     #[inline]
     fn dirname_str<'a>(&'a self) -> Option<&'a str> {
-        str::from_utf8(self.dirname())
+        str::from_utf8(self.dirname()).ok()
     }
 
     /// Returns the file component of `self`, as a byte vector.
@@ -327,7 +327,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe {
     /// ```
     #[inline]
     fn filename_str<'a>(&'a self) -> Option<&'a str> {
-        self.filename().and_then(str::from_utf8)
+        self.filename().and_then(|s| str::from_utf8(s).ok())
     }
 
     /// Returns the stem of the filename of `self`, as a byte vector.
@@ -373,7 +373,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe {
     /// ```
     #[inline]
     fn filestem_str<'a>(&'a self) -> Option<&'a str> {
-        self.filestem().and_then(str::from_utf8)
+        self.filestem().and_then(|s| str::from_utf8(s).ok())
     }
 
     /// Returns the extension of the filename of `self`, as an optional byte vector.
@@ -420,7 +420,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe {
     /// ```
     #[inline]
     fn extension_str<'a>(&'a self) -> Option<&'a str> {
-        self.extension().and_then(str::from_utf8)
+        self.extension().and_then(|s| str::from_utf8(s).ok())
     }
 
     /// Replaces the filename portion of the path with the given byte vector or string.
@@ -793,7 +793,7 @@ pub trait BytesContainer for Sized? {
     /// Returns the receiver interpreted as a utf-8 string, if possible
     #[inline]
     fn container_as_str<'a>(&'a self) -> Option<&'a str> {
-        str::from_utf8(self.container_as_bytes())
+        str::from_utf8(self.container_as_bytes()).ok()
     }
     /// Returns whether .container_as_str() is guaranteed to not fail
     // FIXME (#8888): Remove unused arg once ::<for T> works
@@ -870,7 +870,7 @@ impl BytesContainer for String {
     }
     #[inline]
     fn container_as_str(&self) -> Option<&str> {
-        Some(self.as_slice())
+        Some(self[])
     }
     #[inline]
     fn is_str(_: Option<&String>) -> bool { true }
@@ -886,7 +886,7 @@ impl BytesContainer for [u8] {
 impl BytesContainer for Vec<u8> {
     #[inline]
     fn container_as_bytes(&self) -> &[u8] {
-        self.as_slice()
+        self[]
     }
 }
 
@@ -897,6 +897,7 @@ impl BytesContainer for CString {
     }
 }
 
+#[allow(deprecated)]
 impl<'a> BytesContainer for str::MaybeOwned<'a> {
     #[inline]
     fn container_as_bytes<'b>(&'b self) -> &'b [u8] {
diff --git a/src/libstd/path/posix.rs b/src/libstd/path/posix.rs
index 88907951673..a514837492a 100644
--- a/src/libstd/path/posix.rs
+++ b/src/libstd/path/posix.rs
@@ -401,7 +401,10 @@ impl Path {
     /// Returns an iterator that yields each component of the path as Option<&str>.
     /// See components() for details.
     pub fn str_components<'a>(&'a self) -> StrComponents<'a> {
-        self.components().map(str::from_utf8)
+        fn from_utf8(s: &[u8]) -> Option<&str> {
+            str::from_utf8(s).ok()
+        }
+        self.components().map(from_utf8)
     }
 }
 
diff --git a/src/libstd/path/windows.rs b/src/libstd/path/windows.rs
index c2c17103554..277c675c22d 100644
--- a/src/libstd/path/windows.rs
+++ b/src/libstd/path/windows.rs
@@ -25,9 +25,9 @@ use iter::{Iterator, IteratorExt, Map};
 use mem;
 use option::Option;
 use option::Option::{Some, None};
-use slice::{AsSlice, SliceExt};
-use str::{CharSplits, FromStr, Str, StrAllocating, StrVector, StrPrelude};
-use string::String;
+use slice::SliceExt;
+use str::{CharSplits, FromStr, StrVector, StrExt};
+use string::{String, ToString};
 use unicode::char::UnicodeChar;
 use vec::Vec;
 
@@ -187,30 +187,30 @@ impl GenericPathUnsafe for Path {
                 s.push_str("..");
                 s.push(SEP);
                 s.push_str(filename);
-                self.update_normalized(s);
+                self.update_normalized(s[]);
             }
             None => {
                 self.update_normalized(filename);
             }
-            Some((_,idxa,end)) if self.repr.slice(idxa,end) == ".." => {
+            Some((_,idxa,end)) if self.repr[idxa..end] == ".." => {
                 let mut s = String::with_capacity(end + 1 + filename.len());
-                s.push_str(self.repr.slice_to(end));
+                s.push_str(self.repr[0..end]);
                 s.push(SEP);
                 s.push_str(filename);
-                self.update_normalized(s);
+                self.update_normalized(s[]);
             }
             Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => {
                 let mut s = String::with_capacity(idxb + filename.len());
-                s.push_str(self.repr.slice_to(idxb));
+                s.push_str(self.repr[0..idxb]);
                 s.push_str(filename);
-                self.update_normalized(s);
+                self.update_normalized(s[]);
             }
             Some((idxb,_,_)) => {
                 let mut s = String::with_capacity(idxb + 1 + filename.len());
-                s.push_str(self.repr.slice_to(idxb));
+                s.push_str(self.repr[0..idxb]);
                 s.push(SEP);
                 s.push_str(filename);
-                self.update_normalized(s);
+                self.update_normalized(s[]);
             }
         }
     }
@@ -229,12 +229,12 @@ impl GenericPathUnsafe for Path {
         let path = path.container_as_str().unwrap();
         fn is_vol_abs(path: &str, prefix: Option<PathPrefix>) -> bool {
             // assume prefix is Some(DiskPrefix)
-            let rest = path.slice_from(prefix_len(prefix));
+            let rest = path[prefix_len(prefix)..];
             !rest.is_empty() && rest.as_bytes()[0].is_ascii() && is_sep(rest.as_bytes()[0] as char)
         }
         fn shares_volume(me: &Path, path: &str) -> bool {
             // path is assumed to have a prefix of Some(DiskPrefix)
-            let repr = me.repr.as_slice();
+            let repr = me.repr[];
             match me.prefix {
                 Some(DiskPrefix) => {
                     repr.as_bytes()[0] == path.as_bytes()[0].to_ascii().to_uppercase().as_byte()
@@ -266,7 +266,7 @@ impl GenericPathUnsafe for Path {
                         else { None };
             let pathlen = path_.as_ref().map_or(path.len(), |p| p.len());
             let mut s = String::with_capacity(me.repr.len() + 1 + pathlen);
-            s.push_str(me.repr.as_slice());
+            s.push_str(me.repr[]);
             let plen = me.prefix_len();
             // if me is "C:" we don't want to add a path separator
             match me.prefix {
@@ -278,9 +278,9 @@ impl GenericPathUnsafe for Path {
             }
             match path_ {
                 None => s.push_str(path),
-                Some(p) => s.push_str(p.as_slice())
+                Some(p) => s.push_str(p[]),
             };
-            me.update_normalized(s)
+            me.update_normalized(s[])
         }
 
         if !path.is_empty() {
@@ -288,7 +288,7 @@ impl GenericPathUnsafe for Path {
             match prefix {
                 Some(DiskPrefix) if !is_vol_abs(path, prefix) && shares_volume(self, path) => {
                     // cwd-relative path, self is on the same volume
-                    append_path(self, path.slice_from(prefix_len(prefix)));
+                    append_path(self, path[prefix_len(prefix)..]);
                 }
                 Some(_) => {
                     // absolute path, or cwd-relative and self is not same volume
@@ -334,7 +334,7 @@ impl GenericPath for Path {
     /// Always returns a `Some` value.
     #[inline]
     fn as_str<'a>(&'a self) -> Option<&'a str> {
-        Some(self.repr.as_slice())
+        Some(self.repr[])
     }
 
     #[inline]
@@ -356,21 +356,17 @@ impl GenericPath for Path {
     /// Always returns a `Some` value.
     fn dirname_str<'a>(&'a self) -> Option<&'a str> {
         Some(match self.sepidx_or_prefix_len() {
-            None if ".." == self.repr => self.repr.as_slice(),
+            None if ".." == self.repr => self.repr[],
             None => ".",
-            Some((_,idxa,end)) if self.repr.slice(idxa, end) == ".." => {
-                self.repr.as_slice()
-            }
-            Some((idxb,_,end)) if self.repr.slice(idxb, end) == "\\" => {
-                self.repr.as_slice()
-            }
-            Some((0,idxa,_)) => self.repr.slice_to(idxa),
+            Some((_,idxa,end)) if self.repr[idxa..end] == ".." => self.repr[],
+            Some((idxb,_,end)) if self.repr[idxb..end] == "\\" => self.repr[],
+            Some((0,idxa,_)) => self.repr[0..idxa],
             Some((idxb,idxa,_)) => {
                 match self.prefix {
                     Some(DiskPrefix) | Some(VerbatimDiskPrefix) if idxb == self.prefix_len() => {
-                        self.repr.slice_to(idxa)
+                        self.repr[0..idxa]
                     }
-                    _ => self.repr.slice_to(idxb)
+                    _ => self.repr[0..idxb]
                 }
             }
         })
@@ -384,13 +380,13 @@ impl GenericPath for Path {
     /// See `GenericPath::filename_str` for info.
     /// Always returns a `Some` value if `filename` returns a `Some` value.
     fn filename_str<'a>(&'a self) -> Option<&'a str> {
-        let repr = self.repr.as_slice();
+        let repr = self.repr[];
         match self.sepidx_or_prefix_len() {
             None if "." == repr || ".." == repr => None,
             None => Some(repr),
-            Some((_,idxa,end)) if repr.slice(idxa, end) == ".." => None,
+            Some((_,idxa,end)) if repr[idxa..end] == ".." => None,
             Some((_,idxa,end)) if idxa == end => None,
-            Some((_,idxa,end)) => Some(repr.slice(idxa, end))
+            Some((_,idxa,end)) => Some(repr[idxa..end])
         }
     }
 
@@ -422,7 +418,7 @@ impl GenericPath for Path {
                 true
             }
             Some((idxb,idxa,end)) if idxb == idxa && idxb == end => false,
-            Some((idxb,_,end)) if self.repr.slice(idxb, end) == "\\" => false,
+            Some((idxb,_,end)) if self.repr[idxb..end] == "\\" => false,
             Some((idxb,idxa,_)) => {
                 let trunc = match self.prefix {
                     Some(DiskPrefix) | Some(VerbatimDiskPrefix) | None => {
@@ -442,15 +438,15 @@ impl GenericPath for Path {
         if self.prefix.is_some() {
             Some(Path::new(match self.prefix {
                 Some(DiskPrefix) if self.is_absolute() => {
-                    self.repr.slice_to(self.prefix_len()+1)
+                    self.repr[0..self.prefix_len()+1]
                 }
                 Some(VerbatimDiskPrefix) => {
-                    self.repr.slice_to(self.prefix_len()+1)
+                    self.repr[0..self.prefix_len()+1]
                 }
-                _ => self.repr.slice_to(self.prefix_len())
+                _ => self.repr[0..self.prefix_len()]
             }))
         } else if is_vol_relative(self) {
-            Some(Path::new(self.repr.slice_to(1)))
+            Some(Path::new(self.repr[0..1]))
         } else {
             None
         }
@@ -469,7 +465,7 @@ impl GenericPath for Path {
     fn is_absolute(&self) -> bool {
         match self.prefix {
             Some(DiskPrefix) => {
-                let rest = self.repr.slice_from(self.prefix_len());
+                let rest = self.repr[self.prefix_len()..];
                 rest.len() > 0 && rest.as_bytes()[0] == SEP_BYTE
             }
             Some(_) => true,
@@ -644,15 +640,15 @@ impl Path {
     /// Does not distinguish between absolute and cwd-relative paths, e.g.
     /// C:\foo and C:foo.
     pub fn str_components<'a>(&'a self) -> StrComponents<'a> {
-        let repr = self.repr.as_slice();
+        let repr = self.repr[];
         let s = match self.prefix {
             Some(_) => {
                 let plen = self.prefix_len();
                 if repr.len() > plen && repr.as_bytes()[plen] == SEP_BYTE {
-                    repr.slice_from(plen+1)
-                } else { repr.slice_from(plen) }
+                    repr[plen+1..]
+                } else { repr[plen..] }
             }
-            None if repr.as_bytes()[0] == SEP_BYTE => repr.slice_from(1),
+            None if repr.as_bytes()[0] == SEP_BYTE => repr[1..],
             None => repr
         };
         let ret = s.split_terminator(SEP).map(Some);
@@ -670,8 +666,8 @@ impl Path {
     }
 
     fn equiv_prefix(&self, other: &Path) -> bool {
-        let s_repr = self.repr.as_slice();
-        let o_repr = other.repr.as_slice();
+        let s_repr = self.repr[];
+        let o_repr = other.repr[];
         match (self.prefix, other.prefix) {
             (Some(DiskPrefix), Some(VerbatimDiskPrefix)) => {
                 self.is_absolute() &&
@@ -688,28 +684,28 @@ impl Path {
                     o_repr.as_bytes()[4].to_ascii().to_lowercase()
             }
             (Some(UNCPrefix(_,_)), Some(VerbatimUNCPrefix(_,_))) => {
-                s_repr.slice(2, self.prefix_len()) == o_repr.slice(8, other.prefix_len())
+                s_repr[2..self.prefix_len()] == o_repr[8..other.prefix_len()]
             }
             (Some(VerbatimUNCPrefix(_,_)), Some(UNCPrefix(_,_))) => {
-                s_repr.slice(8, self.prefix_len()) == o_repr.slice(2, other.prefix_len())
+                s_repr[8..self.prefix_len()] == o_repr[2..other.prefix_len()]
             }
             (None, None) => true,
             (a, b) if a == b => {
-                s_repr.slice_to(self.prefix_len()) == o_repr.slice_to(other.prefix_len())
+                s_repr[0..self.prefix_len()] == o_repr[0..other.prefix_len()]
             }
             _ => false
         }
     }
 
-    fn normalize_<S: StrAllocating>(s: S) -> (Option<PathPrefix>, String) {
+    fn normalize_(s: &str) -> (Option<PathPrefix>, String) {
         // make borrowck happy
         let (prefix, val) = {
-            let prefix = parse_prefix(s.as_slice());
-            let path = Path::normalize__(s.as_slice(), prefix);
+            let prefix = parse_prefix(s);
+            let path = Path::normalize__(s, prefix);
             (prefix, path)
         };
         (prefix, match val {
-            None => s.into_string(),
+            None => s.to_string(),
             Some(val) => val
         })
     }
@@ -749,7 +745,7 @@ impl Path {
                         match prefix.unwrap() {
                             DiskPrefix => {
                                 let len = prefix_len(prefix) + is_abs as uint;
-                                let mut s = String::from_str(s.slice_to(len));
+                                let mut s = String::from_str(s[0..len]);
                                 unsafe {
                                     let v = s.as_mut_vec();
                                     v[0] = (*v)[0].to_ascii().to_uppercase().as_byte();
@@ -764,7 +760,7 @@ impl Path {
                             }
                             VerbatimDiskPrefix => {
                                 let len = prefix_len(prefix) + is_abs as uint;
-                                let mut s = String::from_str(s.slice_to(len));
+                                let mut s = String::from_str(s[0..len]);
                                 unsafe {
                                     let v = s.as_mut_vec();
                                     v[4] = (*v)[4].to_ascii().to_uppercase().as_byte();
@@ -774,14 +770,14 @@ impl Path {
                             _ => {
                                 let plen = prefix_len(prefix);
                                 if s.len() > plen {
-                                    Some(String::from_str(s.slice_to(plen)))
+                                    Some(String::from_str(s[0..plen]))
                                 } else { None }
                             }
                         }
                     } else if is_abs && comps.is_empty() {
                         Some(String::from_char(1, SEP))
                     } else {
-                        let prefix_ = s.slice_to(prefix_len(prefix));
+                        let prefix_ = s[0..prefix_len(prefix)];
                         let n = prefix_.len() +
                                 if is_abs { comps.len() } else { comps.len() - 1} +
                                 comps.iter().map(|v| v.len()).sum();
@@ -793,16 +789,16 @@ impl Path {
                                 s.push(':');
                             }
                             Some(VerbatimDiskPrefix) => {
-                                s.push_str(prefix_.slice_to(4));
+                                s.push_str(prefix_[0..4]);
                                 s.push(prefix_.as_bytes()[4].to_ascii()
                                                    .to_uppercase().as_char());
-                                s.push_str(prefix_.slice_from(5));
+                                s.push_str(prefix_[5..]);
                             }
                             Some(UNCPrefix(a,b)) => {
                                 s.push_str("\\\\");
-                                s.push_str(prefix_.slice(2, a+2));
+                                s.push_str(prefix_[2..a+2]);
                                 s.push(SEP);
-                                s.push_str(prefix_.slice(3+a, 3+a+b));
+                                s.push_str(prefix_[3+a..3+a+b]);
                             }
                             Some(_) => s.push_str(prefix_),
                             None => ()
@@ -827,8 +823,8 @@ impl Path {
 
     fn update_sepidx(&mut self) {
         let s = if self.has_nonsemantic_trailing_slash() {
-                    self.repr.slice_to(self.repr.len()-1)
-                } else { self.repr.as_slice() };
+                    self.repr[0..self.repr.len()-1]
+                } else { self.repr[] };
         let idx = s.rfind(if !prefix_is_verbatim(self.prefix) { is_sep }
                           else { is_sep_verbatim });
         let prefixlen = self.prefix_len();
@@ -860,8 +856,8 @@ impl Path {
             self.repr.as_bytes()[self.repr.len()-1] == SEP_BYTE
     }
 
-    fn update_normalized<S: Str>(&mut self, s: S) {
-        let (prefix, path) = Path::normalize_(s.as_slice());
+    fn update_normalized(&mut self, s: &str) {
+        let (prefix, path) = Path::normalize_(s);
         self.repr = path;
         self.prefix = prefix;
         self.update_sepidx();
@@ -903,17 +899,17 @@ pub fn is_verbatim(path: &Path) -> bool {
 /// non-verbatim, the non-verbatim version is returned.
 /// Otherwise, None is returned.
 pub fn make_non_verbatim(path: &Path) -> Option<Path> {
-    let repr = path.repr.as_slice();
+    let repr = path.repr[];
     let new_path = match path.prefix {
         Some(VerbatimPrefix(_)) | Some(DeviceNSPrefix(_)) => return None,
         Some(UNCPrefix(_,_)) | Some(DiskPrefix) | None => return Some(path.clone()),
         Some(VerbatimDiskPrefix) => {
             // \\?\D:\
-            Path::new(repr.slice_from(4))
+            Path::new(repr[4..])
         }
         Some(VerbatimUNCPrefix(_,_)) => {
             // \\?\UNC\server\share
-            Path::new(format!(r"\{}", repr.slice_from(7)))
+            Path::new(format!(r"\{}", repr[7..]))
         }
     };
     if new_path.prefix.is_none() {
@@ -922,8 +918,8 @@ pub fn make_non_verbatim(path: &Path) -> Option<Path> {
         return None;
     }
     // now ensure normalization didn't change anything
-    if repr.slice_from(path.prefix_len()) ==
-        new_path.repr.slice_from(new_path.prefix_len()) {
+    if repr[path.prefix_len()..] ==
+        new_path.repr[new_path.prefix_len()..] {
         Some(new_path)
     } else {
         None
@@ -988,13 +984,13 @@ pub enum PathPrefix {
 fn parse_prefix<'a>(mut path: &'a str) -> Option<PathPrefix> {
     if path.starts_with("\\\\") {
         // \\
-        path = path.slice_from(2);
+        path = path[2..];
         if path.starts_with("?\\") {
             // \\?\
-            path = path.slice_from(2);
+            path = path[2..];
             if path.starts_with("UNC\\") {
                 // \\?\UNC\server\share
-                path = path.slice_from(4);
+                path = path[4..];
                 let (idx_a, idx_b) = match parse_two_comps(path, is_sep_verbatim) {
                     Some(x) => x,
                     None => (path.len(), 0)
@@ -1015,7 +1011,7 @@ fn parse_prefix<'a>(mut path: &'a str) -> Option<PathPrefix> {
             }
         } else if path.starts_with(".\\") {
             // \\.\path
-            path = path.slice_from(2);
+            path = path[2..];
             let idx = path.find('\\').unwrap_or(path.len());
             return Some(DeviceNSPrefix(idx));
         }
@@ -1040,7 +1036,7 @@ fn parse_prefix<'a>(mut path: &'a str) -> Option<PathPrefix> {
             None => return None,
             Some(x) => x
         };
-        path = path.slice_from(idx_a+1);
+        path = path[idx_a+1..];
         let idx_b = path.find(f).unwrap_or(path.len());
         Some((idx_a, idx_b))
     }
@@ -1050,8 +1046,8 @@ fn parse_prefix<'a>(mut path: &'a str) -> Option<PathPrefix> {
 fn normalize_helper<'a>(s: &'a str, prefix: Option<PathPrefix>) -> (bool, Option<Vec<&'a str>>) {
     let f = if !prefix_is_verbatim(prefix) { is_sep } else { is_sep_verbatim };
     let is_abs = s.len() > prefix_len(prefix) && f(s.char_at(prefix_len(prefix)));
-    let s_ = s.slice_from(prefix_len(prefix));
-    let s_ = if is_abs { s_.slice_from(1) } else { s_ };
+    let s_ = s[prefix_len(prefix)..];
+    let s_ = if is_abs { s_[1..] } else { s_ };
 
     if is_abs && s_.is_empty() {
         return (is_abs, match prefix {
diff --git a/src/libstd/prelude.rs b/src/libstd/prelude.rs
index f77627711a7..49b888d17f4 100644
--- a/src/libstd/prelude.rs
+++ b/src/libstd/prelude.rs
@@ -79,11 +79,11 @@
 #[doc(no_inline)] pub use result::Result;
 #[doc(no_inline)] pub use result::Result::{Ok, Err};
 #[doc(no_inline)] pub use io::{Buffer, Writer, Reader, Seek, BufferPrelude};
-#[doc(no_inline)] pub use str::{Str, StrVector, StrPrelude};
-#[doc(no_inline)] pub use str::{StrAllocating, UnicodeStrPrelude};
 #[doc(no_inline)] pub use core::prelude::{Tuple1, Tuple2, Tuple3, Tuple4};
 #[doc(no_inline)] pub use core::prelude::{Tuple5, Tuple6, Tuple7, Tuple8};
 #[doc(no_inline)] pub use core::prelude::{Tuple9, Tuple10, Tuple11, Tuple12};
+#[doc(no_inline)] pub use str::{Str, StrVector};
+#[doc(no_inline)] pub use str::StrExt;
 #[doc(no_inline)] pub use slice::AsSlice;
 #[doc(no_inline)] pub use slice::{VectorVector, PartialEqSliceExt};
 #[doc(no_inline)] pub use slice::{CloneSliceExt, OrdSliceExt, SliceExt};
diff --git a/src/libstd/rt/backtrace.rs b/src/libstd/rt/backtrace.rs
index 4a692bccf9e..775e9bb526f 100644
--- a/src/libstd/rt/backtrace.rs
+++ b/src/libstd/rt/backtrace.rs
@@ -12,7 +12,8 @@
 
 #![allow(non_camel_case_types)]
 
-use option::Option::{Some, None};
+use prelude::*;
+
 use os;
 use sync::atomic;
 
diff --git a/src/libstd/rt/mod.rs b/src/libstd/rt/mod.rs
index 8d9c1268e7e..d64336569c6 100644
--- a/src/libstd/rt/mod.rs
+++ b/src/libstd/rt/mod.rs
@@ -91,7 +91,7 @@ fn lang_start(main: *const u8, argc: int, argv: *const *const u8) -> int {
         // created. Note that this isn't necessary in general for new threads,
         // but we just do this to name the main thread and to give it correct
         // info about the stack bounds.
-        let thread: Thread = NewThread::new(Some("<main>".into_string()));
+        let thread: Thread = NewThread::new(Some("<main>".to_string()));
         thread_info::set((my_stack_bottom, my_stack_top),
                          sys::thread::guard::main(),
                          thread);
diff --git a/src/libstd/rt/unwind.rs b/src/libstd/rt/unwind.rs
index f572141642c..eb15a7ba378 100644
--- a/src/libstd/rt/unwind.rs
+++ b/src/libstd/rt/unwind.rs
@@ -512,7 +512,7 @@ pub fn begin_unwind_fmt(msg: &fmt::Arguments, file_line: &(&'static str, uint))
     let mut v = Vec::new();
     let _ = write!(&mut VecWriter { v: &mut v }, "{}", msg);
 
-    let msg = box String::from_utf8_lossy(v.as_slice()).into_string();
+    let msg = box String::from_utf8_lossy(v.as_slice()).into_owned();
     begin_unwind_inner(msg, file_line)
 }
 
diff --git a/src/libstd/rt/util.rs b/src/libstd/rt/util.rs
index 77500ca74d0..d8cd8455deb 100644
--- a/src/libstd/rt/util.rs
+++ b/src/libstd/rt/util.rs
@@ -10,16 +10,16 @@
 //
 // ignore-lexer-test FIXME #15677
 
-use core::prelude::*;
+use prelude::*;
 
-use core::cmp;
-use core::fmt;
-use core::intrinsics;
-use core::slice;
-use core::str;
-
-use libc::{mod, uintptr_t};
+use cmp;
+use fmt;
+use intrinsics;
+use libc::uintptr_t;
+use libc;
 use os;
+use slice;
+use str;
 use sync::atomic;
 
 /// Dynamically inquire about whether we're running under V.
@@ -52,7 +52,7 @@ pub fn min_stack() -> uint {
         0 => {}
         n => return n - 1,
     }
-    let amt = os::getenv("RUST_MIN_STACK").and_then(|s| from_str(s.as_slice()));
+    let amt = os::getenv("RUST_MIN_STACK").and_then(|s| s.parse());
     let amt = amt.unwrap_or(2 * 1024 * 1024);
     // 0 is our sentinel value, so ensure that we'll never see 0 after
     // initialization has run
@@ -65,7 +65,7 @@ pub fn min_stack() -> uint {
 pub fn default_sched_threads() -> uint {
     match os::getenv("RUST_THREADS") {
         Some(nstr) => {
-            let opt_n: Option<uint> = from_str(nstr.as_slice());
+            let opt_n: Option<uint> = nstr.parse();
             match opt_n {
                 Some(n) if n > 0 => n,
                 _ => panic!("`RUST_THREADS` is `{}`, should be a positive integer", nstr)
@@ -113,9 +113,8 @@ impl fmt::FormatWriter for Stdio {
 }
 
 pub fn dumb_print(args: &fmt::Arguments) {
-    use fmt::FormatWriter;
     let mut w = Stderr;
-    let _ = w.write_fmt(args);
+    let _ = write!(&mut w, "{}", args);
 }
 
 pub fn abort(args: &fmt::Arguments) -> ! {
diff --git a/src/libstd/sys/common/backtrace.rs b/src/libstd/sys/common/backtrace.rs
index a39c8d6d8fe..1d646eb06b1 100644
--- a/src/libstd/sys/common/backtrace.rs
+++ b/src/libstd/sys/common/backtrace.rs
@@ -8,12 +8,9 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use io::{IoResult, Writer};
-use iter::{Iterator, IteratorExt};
-use option::Option::{Some, None};
-use result::Result::{Ok, Err};
-use str::{StrPrelude, from_str};
-use unicode::char::UnicodeChar;
+use prelude::*;
+
+use io::IoResult;
 
 #[cfg(target_word_size = "64")] pub const HEX_WIDTH: uint = 18;
 #[cfg(target_word_size = "32")] pub const HEX_WIDTH: uint = 10;
@@ -85,7 +82,7 @@ pub fn demangle(writer: &mut Writer, s: &str) -> IoResult<()> {
             while rest.char_at(0).is_numeric() {
                 rest = rest.slice_from(1);
             }
-            let i: uint = from_str(inner.slice_to(inner.len() - rest.len())).unwrap();
+            let i: uint = inner.slice_to(inner.len() - rest.len()).parse().unwrap();
             inner = rest.slice_from(i);
             rest = rest.slice_to(i);
             while rest.len() > 0 {
diff --git a/src/libstd/sys/windows/backtrace.rs b/src/libstd/sys/windows/backtrace.rs
index f2f543dd969..42c8f7705e1 100644
--- a/src/libstd/sys/windows/backtrace.rs
+++ b/src/libstd/sys/windows/backtrace.rs
@@ -32,7 +32,7 @@ use path::Path;
 use result::Result::{Ok, Err};
 use sync::{StaticMutex, MUTEX_INIT};
 use slice::SliceExt;
-use str::StrPrelude;
+use str::StrExt;
 use dynamic_lib::DynamicLibrary;
 
 use sys_common::backtrace::*;
diff --git a/src/libstd/sys/windows/fs.rs b/src/libstd/sys/windows/fs.rs
index d5bf8c5b629..15eddd569be 100644
--- a/src/libstd/sys/windows/fs.rs
+++ b/src/libstd/sys/windows/fs.rs
@@ -23,6 +23,7 @@ use io;
 
 use prelude::*;
 use sys;
+use sys::os;
 use sys_common::{keep_going, eof, mkerr_libc};
 
 use io::{FilePermission, Write, UnstableFileStat, Open, FileAccess, FileMode};
@@ -262,7 +263,7 @@ pub fn readdir(p: &Path) -> IoResult<Vec<Path>> {
             let mut more_files = 1 as libc::BOOL;
             while more_files != 0 {
                 {
-                    let filename = str::truncate_utf16_at_nul(&wfd.cFileName);
+                    let filename = os::truncate_utf16_at_nul(&wfd.cFileName);
                     match String::from_utf16(filename) {
                         Some(filename) => paths.push(Path::new(filename)),
                         None => {
diff --git a/src/libstd/sys/windows/os.rs b/src/libstd/sys/windows/os.rs
index e1016048e58..e007b46b261 100644
--- a/src/libstd/sys/windows/os.rs
+++ b/src/libstd/sys/windows/os.rs
@@ -168,7 +168,7 @@ pub fn getcwd() -> IoResult<Path> {
         }
     }
 
-    match String::from_utf16(::str::truncate_utf16_at_nul(&buf)) {
+    match String::from_utf16(truncate_utf16_at_nul(&buf)) {
         Some(ref cwd) => Ok(Path::new(cwd)),
         None => Err(IoError {
             kind: OtherIoError,
@@ -279,7 +279,7 @@ pub fn load_self() -> Option<Vec<u8>> {
     unsafe {
         fill_utf16_buf_and_decode(|buf, sz| {
             libc::GetModuleFileNameW(0u as libc::DWORD, buf, sz)
-        }).map(|s| s.into_string().into_bytes())
+        }).map(|s| s.to_string().into_bytes())
     }
 }
 
diff --git a/src/libstd/sys/windows/process.rs b/src/libstd/sys/windows/process.rs
index 8945c155e66..0c2c76077dd 100644
--- a/src/libstd/sys/windows/process.rs
+++ b/src/libstd/sys/windows/process.rs
@@ -122,7 +122,7 @@ impl Process {
 
         use mem;
         use iter::{Iterator, IteratorExt};
-        use str::StrPrelude;
+        use str::StrExt;
 
         if cfg.gid().is_some() || cfg.uid().is_some() {
             return Err(IoError {
diff --git a/src/libstd/sys/windows/tty.rs b/src/libstd/sys/windows/tty.rs
index 51679bb2003..f793de5bb57 100644
--- a/src/libstd/sys/windows/tty.rs
+++ b/src/libstd/sys/windows/tty.rs
@@ -111,7 +111,7 @@ impl TTY {
     }
 
     pub fn write(&mut self, buf: &[u8]) -> IoResult<()> {
-        let utf16 = match from_utf8(buf) {
+        let utf16 = match from_utf8(buf).ok() {
             Some(utf8) => {
                 utf8.utf16_units().collect::<Vec<u16>>()
             }
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index a294706ef2c..3eea5b27f19 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -189,7 +189,7 @@ impl<S: Encoder<E>, E> Encodable<S, E> for Ident {
 
 impl<D:Decoder<E>, E> Decodable<D, E> for Ident {
     fn decode(d: &mut D) -> Result<Ident, E> {
-        Ok(str_to_ident(try!(d.read_str()).as_slice()))
+        Ok(str_to_ident(try!(d.read_str())[]))
     }
 }
 
diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs
index a95c9e19906..e3eeb453c26 100644
--- a/src/libsyntax/ast_map/mod.rs
+++ b/src/libsyntax/ast_map/mod.rs
@@ -95,7 +95,7 @@ pub fn path_to_string<PI: Iterator<PathElem>>(path: PI) -> String {
         if !s.is_empty() {
             s.push_str("::");
         }
-        s.push_str(e.as_slice());
+        s.push_str(e[]);
         s
     }).to_string()
 }
@@ -472,20 +472,20 @@ impl<'ast> Map<'ast> {
         F: FnOnce(Option<&[Attribute]>) -> T,
     {
         let attrs = match self.get(id) {
-            NodeItem(i) => Some(i.attrs.as_slice()),
-            NodeForeignItem(fi) => Some(fi.attrs.as_slice()),
+            NodeItem(i) => Some(i.attrs[]),
+            NodeForeignItem(fi) => Some(fi.attrs[]),
             NodeTraitItem(ref tm) => match **tm {
-                RequiredMethod(ref type_m) => Some(type_m.attrs.as_slice()),
-                ProvidedMethod(ref m) => Some(m.attrs.as_slice()),
-                TypeTraitItem(ref typ) => Some(typ.attrs.as_slice()),
+                RequiredMethod(ref type_m) => Some(type_m.attrs[]),
+                ProvidedMethod(ref m) => Some(m.attrs[]),
+                TypeTraitItem(ref typ) => Some(typ.attrs[]),
             },
             NodeImplItem(ref ii) => {
                 match **ii {
-                    MethodImplItem(ref m) => Some(m.attrs.as_slice()),
-                    TypeImplItem(ref t) => Some(t.attrs.as_slice()),
+                    MethodImplItem(ref m) => Some(m.attrs[]),
+                    TypeImplItem(ref t) => Some(t.attrs[]),
                 }
             }
-            NodeVariant(ref v) => Some(v.node.attrs.as_slice()),
+            NodeVariant(ref v) => Some(v.node.attrs[]),
             // unit/tuple structs take the attributes straight from
             // the struct definition.
             // FIXME(eddyb) make this work again (requires access to the map).
@@ -504,8 +504,8 @@ impl<'ast> Map<'ast> {
     /// the iterator will produce node id's for items with paths
     /// such as `foo::bar::quux`, `bar::quux`, `other::bar::quux`, and
     /// any other such items it can find in the map.
-    pub fn nodes_matching_suffix<'a, S:Str>(&'a self, parts: &'a [S])
-                                 -> NodesMatchingSuffix<'a, 'ast, S> {
+    pub fn nodes_matching_suffix<'a>(&'a self, parts: &'a [String])
+                                 -> NodesMatchingSuffix<'a, 'ast> {
         NodesMatchingSuffix {
             map: self,
             item_name: parts.last().unwrap(),
@@ -565,14 +565,14 @@ impl<'ast> Map<'ast> {
     }
 }
 
-pub struct NodesMatchingSuffix<'a, 'ast:'a, S:'a> {
+pub struct NodesMatchingSuffix<'a, 'ast:'a> {
     map: &'a Map<'ast>,
-    item_name: &'a S,
-    in_which: &'a [S],
+    item_name: &'a String,
+    in_which: &'a [String],
     idx: NodeId,
 }
 
-impl<'a, 'ast, S:Str> NodesMatchingSuffix<'a, 'ast, S> {
+impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> {
     /// Returns true only if some suffix of the module path for parent
     /// matches `self.in_which`.
     ///
@@ -586,7 +586,7 @@ impl<'a, 'ast, S:Str> NodesMatchingSuffix<'a, 'ast, S> {
                 None => return false,
                 Some((node_id, name)) => (node_id, name),
             };
-            if part.as_slice() != mod_name.as_str() {
+            if part[] != mod_name.as_str() {
                 return false;
             }
             cursor = self.map.get_parent(mod_id);
@@ -624,12 +624,12 @@ impl<'a, 'ast, S:Str> NodesMatchingSuffix<'a, 'ast, S> {
     // We are looking at some node `n` with a given name and parent
     // id; do their names match what I am seeking?
     fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool {
-        name.as_str() == self.item_name.as_slice() &&
+        name.as_str() == self.item_name[] &&
             self.suffix_matches(parent_of_n)
     }
 }
 
-impl<'a, 'ast, S:Str> Iterator<NodeId> for NodesMatchingSuffix<'a, 'ast, S> {
+impl<'a, 'ast> Iterator<NodeId> for NodesMatchingSuffix<'a, 'ast> {
     fn next(&mut self) -> Option<NodeId> {
         loop {
             let idx = self.idx;
@@ -1037,7 +1037,7 @@ impl<'a> NodePrinter for pprust::State<'a> {
 
 fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
     let id_str = format!(" (id={})", id);
-    let id_str = if include_id { id_str.as_slice() } else { "" };
+    let id_str = if include_id { id_str[] } else { "" };
 
     match map.find(id) {
         Some(NodeItem(item)) => {
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs
index 02771809ae6..5727866d6ec 100644
--- a/src/libsyntax/ast_util.rs
+++ b/src/libsyntax/ast_util.rs
@@ -238,11 +238,11 @@ pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> Ident {
     match *trait_ref {
         Some(ref trait_ref) => {
             pretty.push('.');
-            pretty.push_str(pprust::path_to_string(&trait_ref.path).as_slice());
+            pretty.push_str(pprust::path_to_string(&trait_ref.path)[]);
         }
         None => {}
     }
-    token::gensym_ident(pretty.as_slice())
+    token::gensym_ident(pretty[])
 }
 
 pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod {
@@ -700,7 +700,7 @@ pub fn pat_is_ident(pat: P<ast::Pat>) -> bool {
 pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool {
     (a.span == b.span)
     && (a.global == b.global)
-    && (segments_name_eq(a.segments.as_slice(), b.segments.as_slice()))
+    && (segments_name_eq(a.segments[], b.segments[]))
 }
 
 // are two arrays of segments equal when compared unhygienically?
@@ -788,13 +788,13 @@ mod test {
     #[test] fn idents_name_eq_test() {
         assert!(segments_name_eq(
             [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
-                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice(),
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[],
             [Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}]
-                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice()));
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[]));
         assert!(!segments_name_eq(
             [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
-                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice(),
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[],
             [Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}]
-                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice()));
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[]));
     }
 }
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index 127cc5ed51d..b1158917b72 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -97,7 +97,7 @@ impl AttrMetaMethods for MetaItem {
 
     fn meta_item_list<'a>(&'a self) -> Option<&'a [P<MetaItem>]> {
         match self.node {
-            MetaList(_, ref l) => Some(l.as_slice()),
+            MetaList(_, ref l) => Some(l[]),
             _ => None
         }
     }
@@ -136,7 +136,7 @@ impl AttributeMethods for Attribute {
             let meta = mk_name_value_item_str(
                 InternedString::new("doc"),
                 token::intern_and_get_ident(strip_doc_comment_decoration(
-                        comment.get()).as_slice()));
+                        comment.get())[]));
             if self.node.style == ast::AttrOuter {
                 f(&mk_attr_outer(self.node.id, meta))
             } else {
@@ -296,9 +296,9 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr {
             }
             MetaList(ref n, ref items) if *n == "inline" => {
                 mark_used(attr);
-                if contains_name(items.as_slice(), "always") {
+                if contains_name(items[], "always") {
                     InlineAlways
-                } else if contains_name(items.as_slice(), "never") {
+                } else if contains_name(items[], "never") {
                     InlineNever
                 } else {
                     InlineHint
@@ -332,7 +332,7 @@ pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P<MetaItem>], cfg: &ast::Me
             !cfg_matches(diagnostic, cfgs, &*mis[0])
         }
         ast::MetaList(ref pred, _) => {
-            diagnostic.span_err(cfg.span, format!("invalid predicate `{}`", pred).as_slice());
+            diagnostic.span_err(cfg.span, format!("invalid predicate `{}`", pred)[]);
             false
         },
         ast::MetaWord(_) | ast::MetaNameValue(..) => contains(cfgs, cfg),
@@ -396,8 +396,7 @@ pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P<MetaItem>]) {
 
         if !set.insert(name.clone()) {
             diagnostic.span_fatal(meta.span,
-                                  format!("duplicate meta item `{}`",
-                                          name).as_slice());
+                                  format!("duplicate meta item `{}`", name)[]);
         }
     }
 }
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index c726e17a8fa..060e1d3f995 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -291,9 +291,9 @@ impl FileMap {
         lines.get(line_number).map(|&line| {
             let begin: BytePos = line - self.start_pos;
             let begin = begin.to_uint();
-            let slice = self.src.slice_from(begin);
+            let slice = self.src[begin..];
             match slice.find('\n') {
-                Some(e) => slice.slice_to(e),
+                Some(e) => slice[0..e],
                 None => slice
             }.to_string()
         })
@@ -338,9 +338,9 @@ impl CodeMap {
         // FIXME #12884: no efficient/safe way to remove from the start of a string
         // and reuse the allocation.
         let mut src = if src.starts_with("\u{feff}") {
-            String::from_str(src.slice_from(3))
+            String::from_str(src[3..])
         } else {
-            String::from_str(src.as_slice())
+            String::from_str(src[])
         };
 
         // Append '\n' in case it's not already there.
@@ -427,8 +427,8 @@ impl CodeMap {
         if begin.fm.start_pos != end.fm.start_pos {
             None
         } else {
-            Some(begin.fm.src.slice(begin.pos.to_uint(),
-                                    end.pos.to_uint()).to_string())
+            Some(begin.fm.src[begin.pos.to_uint()..
+                              end.pos.to_uint()].to_string())
         }
     }
 
diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs
index 4d765f49aca..88dfdf6e2d8 100644
--- a/src/libsyntax/diagnostic.rs
+++ b/src/libsyntax/diagnostic.rs
@@ -123,7 +123,7 @@ impl SpanHandler {
         panic!(ExplicitBug);
     }
     pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
-        self.span_bug(sp, format!("unimplemented {}", msg).as_slice());
+        self.span_bug(sp, format!("unimplemented {}", msg)[]);
     }
     pub fn handler<'a>(&'a self) -> &'a Handler {
         &self.handler
@@ -166,7 +166,7 @@ impl Handler {
                         self.err_count.get());
           }
         }
-        self.fatal(s.as_slice());
+        self.fatal(s[]);
     }
     pub fn warn(&self, msg: &str) {
         self.emit.borrow_mut().emit(None, msg, None, Warning);
@@ -182,7 +182,7 @@ impl Handler {
         panic!(ExplicitBug);
     }
     pub fn unimpl(&self, msg: &str) -> ! {
-        self.bug(format!("unimplemented {}", msg).as_slice());
+        self.bug(format!("unimplemented {}", msg)[]);
     }
     pub fn emit(&self,
                 cmsp: Option<(&codemap::CodeMap, Span)>,
@@ -277,7 +277,7 @@ fn print_maybe_styled(w: &mut EmitterWriter,
             // to be miscolored. We assume this is rare enough that we don't
             // have to worry about it.
             if msg.ends_with("\n") {
-                try!(t.write_str(msg.slice_to(msg.len()-1)));
+                try!(t.write_str(msg[0..msg.len()-1]));
                 try!(t.reset());
                 try!(t.write_str("\n"));
             } else {
@@ -299,16 +299,16 @@ fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level,
     }
 
     try!(print_maybe_styled(dst,
-                            format!("{}: ", lvl.to_string()).as_slice(),
+                            format!("{}: ", lvl.to_string())[],
                             term::attr::ForegroundColor(lvl.color())));
     try!(print_maybe_styled(dst,
-                            format!("{}", msg).as_slice(),
+                            format!("{}", msg)[],
                             term::attr::Bold));
 
     match code {
         Some(code) => {
             let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
-            try!(print_maybe_styled(dst, format!(" [{}]", code.clone()).as_slice(), style));
+            try!(print_maybe_styled(dst, format!(" [{}]", code.clone())[], style));
         }
         None => ()
     }
@@ -398,12 +398,12 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
         // the span)
         let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
         let ses = cm.span_to_string(span_end);
-        try!(print_diagnostic(dst, ses.as_slice(), lvl, msg, code));
+        try!(print_diagnostic(dst, ses[], lvl, msg, code));
         if rsp.is_full_span() {
             try!(custom_highlight_lines(dst, cm, sp, lvl, lines));
         }
     } else {
-        try!(print_diagnostic(dst, ss.as_slice(), lvl, msg, code));
+        try!(print_diagnostic(dst, ss[], lvl, msg, code));
         if rsp.is_full_span() {
             try!(highlight_lines(dst, cm, sp, lvl, lines));
         }
@@ -413,9 +413,9 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
         Some(code) =>
             match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
                 Some(_) => {
-                    try!(print_diagnostic(dst, ss.as_slice(), Help,
+                    try!(print_diagnostic(dst, ss[], Help,
                                           format!("pass `--explain {}` to see a detailed \
-                                                   explanation", code).as_slice(), None));
+                                                   explanation", code)[], None));
                 }
                 None => ()
             },
@@ -432,7 +432,7 @@ fn highlight_lines(err: &mut EmitterWriter,
     let fm = &*lines.file;
 
     let mut elided = false;
-    let mut display_lines = lines.lines.as_slice();
+    let mut display_lines = lines.lines[];
     if display_lines.len() > MAX_LINES {
         display_lines = display_lines[0u..MAX_LINES];
         elided = true;
@@ -494,7 +494,7 @@ fn highlight_lines(err: &mut EmitterWriter,
             }
         }
         try!(print_maybe_styled(err,
-                                format!("{}\n", s).as_slice(),
+                                format!("{}\n", s)[],
                                 term::attr::ForegroundColor(lvl.color())));
     }
     Ok(())
@@ -514,7 +514,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter,
                           -> io::IoResult<()> {
     let fm = &*lines.file;
 
-    let lines = lines.lines.as_slice();
+    let lines = lines.lines[];
     if lines.len() > MAX_LINES {
         if let Some(line) = fm.get_line(lines[0]) {
             try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
@@ -545,7 +545,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter,
     s.push('^');
     s.push('\n');
     print_maybe_styled(w,
-                       s.as_slice(),
+                       s[],
                        term::attr::ForegroundColor(lvl.color()))
 }
 
@@ -560,12 +560,12 @@ fn print_macro_backtrace(w: &mut EmitterWriter,
                 codemap::MacroAttribute => ("#[", "]"),
                 codemap::MacroBang => ("", "!")
             };
-            try!(print_diagnostic(w, ss.as_slice(), Note,
+            try!(print_diagnostic(w, ss[], Note,
                                   format!("in expansion of {}{}{}", pre,
                                           ei.callee.name,
-                                          post).as_slice(), None));
+                                          post)[], None));
             let ss = cm.span_to_string(ei.call_site);
-            try!(print_diagnostic(w, ss.as_slice(), Note, "expansion site", None));
+            try!(print_diagnostic(w, ss[], Note, "expansion site", None));
             Ok(Some(ei.call_site))
         }
         None => Ok(None)
@@ -578,6 +578,6 @@ pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where
 {
     match opt {
         Some(t) => t,
-        None => diag.handler().bug(msg().as_slice()),
+        None => diag.handler().bug(msg()[]),
     }
 }
diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs
index bcce5538314..90fc28014e6 100644
--- a/src/libsyntax/diagnostics/plugin.rs
+++ b/src/libsyntax/diagnostics/plugin.rs
@@ -58,7 +58,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
             Some(previous_span) => {
                 ecx.span_warn(span, format!(
                     "diagnostic code {} already used", token::get_ident(code).get()
-                ).as_slice());
+                )[]);
                 ecx.span_note(previous_span, "previous invocation");
             },
             None => ()
@@ -87,12 +87,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
         if diagnostics.insert(code.name, description).is_some() {
             ecx.span_err(span, format!(
                 "diagnostic code {} already registered", token::get_ident(*code).get()
-            ).as_slice());
+            )[]);
         }
     });
     let sym = Ident::new(token::gensym((
         "__register_diagnostic_".to_string() + token::get_ident(*code).get()
-    ).as_slice()));
+    )[]));
     MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter())
 }
 
diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs
index b138811187b..b77b822a6b2 100644
--- a/src/libsyntax/ext/asm.rs
+++ b/src/libsyntax/ext/asm.rs
@@ -100,8 +100,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                         Some(('=', _)) => None,
                         Some(('+', operand)) => {
                             Some(token::intern_and_get_ident(format!(
-                                        "={}",
-                                        operand).as_slice()))
+                                        "={}", operand)[]))
                         }
                         _ => {
                             cx.span_err(span, "output operand constraint lacks '=' or '+'");
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index aefbb2a1fea..62fe718b522 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -549,7 +549,7 @@ impl<'a> ExtCtxt<'a> {
     pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); }
     pub fn mod_path(&self) -> Vec<ast::Ident> {
         let mut v = Vec::new();
-        v.push(token::str_to_ident(self.ecfg.crate_name.as_slice()));
+        v.push(token::str_to_ident(self.ecfg.crate_name[]));
         v.extend(self.mod_path.iter().map(|a| *a));
         return v;
     }
@@ -558,7 +558,7 @@ impl<'a> ExtCtxt<'a> {
         if self.recursion_count > self.ecfg.recursion_limit {
             self.span_fatal(ei.call_site,
                             format!("recursion limit reached while expanding the macro `{}`",
-                                    ei.callee.name).as_slice());
+                                    ei.callee.name)[]);
         }
 
         let mut call_site = ei.call_site;
@@ -669,7 +669,7 @@ pub fn check_zero_tts(cx: &ExtCtxt,
                       tts: &[ast::TokenTree],
                       name: &str) {
     if tts.len() != 0 {
-        cx.span_err(sp, format!("{} takes no arguments", name).as_slice());
+        cx.span_err(sp, format!("{} takes no arguments", name)[]);
     }
 }
 
@@ -682,12 +682,12 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
                                -> Option<String> {
     let mut p = cx.new_parser_from_tts(tts);
     if p.token == token::Eof {
-        cx.span_err(sp, format!("{} takes 1 argument", name).as_slice());
+        cx.span_err(sp, format!("{} takes 1 argument", name)[]);
         return None
     }
     let ret = cx.expander().fold_expr(p.parse_expr());
     if p.token != token::Eof {
-        cx.span_err(sp, format!("{} takes 1 argument", name).as_slice());
+        cx.span_err(sp, format!("{} takes 1 argument", name)[]);
     }
     expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| {
         s.get().to_string()
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index 9d4992f7453..77165168746 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -712,8 +712,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         let loc = self.codemap().lookup_char_pos(span.lo);
         let expr_file = self.expr_str(span,
                                       token::intern_and_get_ident(loc.file
-                                                                  .name
-                                                                  .as_slice()));
+                                                                  .name[]));
         let expr_line = self.expr_uint(span, loc.line);
         let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line));
         let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs
index e2867c2fbab..03dd08fdf7f 100644
--- a/src/libsyntax/ext/concat.rs
+++ b/src/libsyntax/ext/concat.rs
@@ -40,14 +40,14 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
                     ast::LitInt(i, ast::UnsignedIntLit(_)) |
                     ast::LitInt(i, ast::SignedIntLit(_, ast::Plus)) |
                     ast::LitInt(i, ast::UnsuffixedIntLit(ast::Plus)) => {
-                        accumulator.push_str(format!("{}", i).as_slice());
+                        accumulator.push_str(format!("{}", i)[]);
                     }
                     ast::LitInt(i, ast::SignedIntLit(_, ast::Minus)) |
                     ast::LitInt(i, ast::UnsuffixedIntLit(ast::Minus)) => {
-                        accumulator.push_str(format!("-{}", i).as_slice());
+                        accumulator.push_str(format!("-{}", i)[]);
                     }
                     ast::LitBool(b) => {
-                        accumulator.push_str(format!("{}", b).as_slice());
+                        accumulator.push_str(format!("{}", b)[]);
                     }
                     ast::LitByte(..) |
                     ast::LitBinary(..) => {
@@ -62,5 +62,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
     }
     base::MacExpr::new(cx.expr_str(
             sp,
-            token::intern_and_get_ident(accumulator.as_slice())))
+            token::intern_and_get_ident(accumulator[])))
 }
diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs
index aa18b1be31a..2cf60d30a1b 100644
--- a/src/libsyntax/ext/concat_idents.rs
+++ b/src/libsyntax/ext/concat_idents.rs
@@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
             }
         }
     }
-    let res = str_to_ident(res_str.as_slice());
+    let res = str_to_ident(res_str[]);
 
     let e = P(ast::Expr {
         id: ast::DUMMY_NODE_ID,
diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs
index 3145b3bb1a4..c27a27fce6a 100644
--- a/src/libsyntax/ext/deriving/bounds.rs
+++ b/src/libsyntax/ext/deriving/bounds.rs
@@ -31,8 +31,7 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt,
                 ref tname => {
                     cx.span_bug(span,
                                 format!("expected built-in trait name but \
-                                         found {}",
-                                        *tname).as_slice())
+                                         found {}", *tname)[])
                 }
             }
         },
diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs
index a34764221b3..eedec6f37c8 100644
--- a/src/libsyntax/ext/deriving/clone.rs
+++ b/src/libsyntax/ext/deriving/clone.rs
@@ -80,13 +80,11 @@ fn cs_clone(
         EnumNonMatchingCollapsed (..) => {
             cx.span_bug(trait_span,
                         format!("non-matching enum variants in \
-                                 `deriving({})`",
-                                name).as_slice())
+                                 `deriving({})`", name)[])
         }
         StaticEnum(..) | StaticStruct(..) => {
             cx.span_bug(trait_span,
-                        format!("static method in `deriving({})`",
-                                name).as_slice())
+                        format!("static method in `deriving({})`", name)[])
         }
     }
 
@@ -103,8 +101,7 @@ fn cs_clone(
                 None => {
                     cx.span_bug(trait_span,
                                 format!("unnamed field in normal struct in \
-                                         `deriving({})`",
-                                        name).as_slice())
+                                         `deriving({})`", name)[])
                 }
             };
             cx.field_imm(field.span, ident, subcall(field))
diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs
index 0a8d59da896..a4c70ebbc8e 100644
--- a/src/libsyntax/ext/deriving/decodable.rs
+++ b/src/libsyntax/ext/deriving/decodable.rs
@@ -174,7 +174,7 @@ fn decode_static_fields<F>(cx: &mut ExtCtxt,
                 let fields = fields.iter().enumerate().map(|(i, &span)| {
                     getarg(cx, span,
                            token::intern_and_get_ident(format!("_field{}",
-                                                               i).as_slice()),
+                                                               i)[]),
                            i)
                 }).collect();
 
diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs
index 30851ebeaae..aac515ed81a 100644
--- a/src/libsyntax/ext/deriving/encodable.rs
+++ b/src/libsyntax/ext/deriving/encodable.rs
@@ -162,8 +162,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
                 let name = match name {
                     Some(id) => token::get_ident(id),
                     None => {
-                        token::intern_and_get_ident(format!("_field{}",
-                                                            i).as_slice())
+                        token::intern_and_get_ident(format!("_field{}", i)[])
                     }
                 };
                 let enc = cx.expr_method_call(span, self_.clone(),
diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs
index d8de3d2db97..a2b5c0b9e96 100644
--- a/src/libsyntax/ext/deriving/generic/mod.rs
+++ b/src/libsyntax/ext/deriving/generic/mod.rs
@@ -514,15 +514,15 @@ impl<'a> TraitDef<'a> {
                     self,
                     struct_def,
                     type_ident,
-                    self_args.as_slice(),
-                    nonself_args.as_slice())
+                    self_args[],
+                    nonself_args[])
             } else {
                 method_def.expand_struct_method_body(cx,
                                                      self,
                                                      struct_def,
                                                      type_ident,
-                                                     self_args.as_slice(),
-                                                     nonself_args.as_slice())
+                                                     self_args[],
+                                                     nonself_args[])
             };
 
             method_def.create_method(cx,
@@ -554,15 +554,15 @@ impl<'a> TraitDef<'a> {
                     self,
                     enum_def,
                     type_ident,
-                    self_args.as_slice(),
-                    nonself_args.as_slice())
+                    self_args[],
+                    nonself_args[])
             } else {
                 method_def.expand_enum_method_body(cx,
                                                    self,
                                                    enum_def,
                                                    type_ident,
                                                    self_args,
-                                                   nonself_args.as_slice())
+                                                   nonself_args[])
             };
 
             method_def.create_method(cx,
@@ -649,7 +649,7 @@ impl<'a> MethodDef<'a> {
 
         for (i, ty) in self.args.iter().enumerate() {
             let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics);
-            let ident = cx.ident_of(format!("__arg_{}", i).as_slice());
+            let ident = cx.ident_of(format!("__arg_{}", i)[]);
             arg_tys.push((ident, ast_ty));
 
             let arg_expr = cx.expr_ident(trait_.span, ident);
@@ -756,7 +756,7 @@ impl<'a> MethodDef<'a> {
                                              struct_path,
                                              struct_def,
                                              format!("__self_{}",
-                                                     i).as_slice(),
+                                                     i)[],
                                              ast::MutImmutable);
             patterns.push(pat);
             raw_fields.push(ident_expr);
@@ -912,22 +912,22 @@ impl<'a> MethodDef<'a> {
             .collect::<Vec<String>>();
 
         let self_arg_idents = self_arg_names.iter()
-            .map(|name|cx.ident_of(name.as_slice()))
+            .map(|name|cx.ident_of(name[]))
             .collect::<Vec<ast::Ident>>();
 
         // The `vi_idents` will be bound, solely in the catch-all, to
         // a series of let statements mapping each self_arg to a uint
         // corresponding to its variant index.
         let vi_idents: Vec<ast::Ident> = self_arg_names.iter()
-            .map(|name| { let vi_suffix = format!("{}_vi", name.as_slice());
-                          cx.ident_of(vi_suffix.as_slice()) })
+            .map(|name| { let vi_suffix = format!("{}_vi", name[]);
+                          cx.ident_of(vi_suffix[]) })
             .collect::<Vec<ast::Ident>>();
 
         // Builds, via callback to call_substructure_method, the
         // delegated expression that handles the catch-all case,
         // using `__variants_tuple` to drive logic if necessary.
         let catch_all_substructure = EnumNonMatchingCollapsed(
-            self_arg_idents, variants.as_slice(), vi_idents.as_slice());
+            self_arg_idents, variants[], vi_idents[]);
 
         // These arms are of the form:
         // (Variant1, Variant1, ...) => Body1
@@ -949,12 +949,12 @@ impl<'a> MethodDef<'a> {
                 let mut subpats = Vec::with_capacity(self_arg_names.len());
                 let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1);
                 let first_self_pat_idents = {
-                    let (p, idents) = mk_self_pat(cx, self_arg_names[0].as_slice());
+                    let (p, idents) = mk_self_pat(cx, self_arg_names[0][]);
                     subpats.push(p);
                     idents
                 };
                 for self_arg_name in self_arg_names.tail().iter() {
-                    let (p, idents) = mk_self_pat(cx, self_arg_name.as_slice());
+                    let (p, idents) = mk_self_pat(cx, self_arg_name[]);
                     subpats.push(p);
                     self_pats_idents.push(idents);
                 }
@@ -1010,7 +1010,7 @@ impl<'a> MethodDef<'a> {
                                                 &**variant,
                                                 field_tuples);
                 let arm_expr = self.call_substructure_method(
-                    cx, trait_, type_ident, self_args.as_slice(), nonself_args,
+                    cx, trait_, type_ident, self_args[], nonself_args,
                     &substructure);
 
                 cx.arm(sp, vec![single_pat], arm_expr)
@@ -1063,7 +1063,7 @@ impl<'a> MethodDef<'a> {
             }
 
             let arm_expr = self.call_substructure_method(
-                cx, trait_, type_ident, self_args.as_slice(), nonself_args,
+                cx, trait_, type_ident, self_args[], nonself_args,
                 &catch_all_substructure);
 
             // Builds the expression:
@@ -1267,7 +1267,7 @@ impl<'a> TraitDef<'a> {
                     cx.span_bug(sp, "a struct with named and unnamed fields in `deriving`");
                 }
             };
-            let ident = cx.ident_of(format!("{}_{}", prefix, i).as_slice());
+            let ident = cx.ident_of(format!("{}_{}", prefix, i)[]);
             paths.push(codemap::Spanned{span: sp, node: ident});
             let val = cx.expr(
                 sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident)))));
@@ -1313,7 +1313,7 @@ impl<'a> TraitDef<'a> {
                 let mut ident_expr = Vec::new();
                 for (i, va) in variant_args.iter().enumerate() {
                     let sp = self.set_expn_info(cx, va.ty.span);
-                    let ident = cx.ident_of(format!("{}_{}", prefix, i).as_slice());
+                    let ident = cx.ident_of(format!("{}_{}", prefix, i)[]);
                     let path1 = codemap::Spanned{span: sp, node: ident};
                     paths.push(path1);
                     let expr_path = cx.expr_path(cx.path_ident(sp, ident));
@@ -1356,7 +1356,7 @@ pub fn cs_fold<F>(use_foldl: bool,
                       field.span,
                       old,
                       field.self_.clone(),
-                      field.other.as_slice())
+                      field.other[])
                 })
             } else {
                 all_fields.iter().rev().fold(base, |old, field| {
@@ -1364,12 +1364,12 @@ pub fn cs_fold<F>(use_foldl: bool,
                       field.span,
                       old,
                       field.self_.clone(),
-                      field.other.as_slice())
+                      field.other[])
                 })
             }
         },
         EnumNonMatchingCollapsed(ref all_args, _, tuple) =>
-            enum_nonmatch_f(cx, trait_span, (all_args.as_slice(), tuple),
+            enum_nonmatch_f(cx, trait_span, (all_args[], tuple),
                             substructure.nonself_args),
         StaticEnum(..) | StaticStruct(..) => {
             cx.span_bug(trait_span, "static function in `deriving`")
@@ -1409,7 +1409,7 @@ pub fn cs_same_method<F>(f: F,
             f(cx, trait_span, called)
         },
         EnumNonMatchingCollapsed(ref all_self_args, _, tuple) =>
-            enum_nonmatch_f(cx, trait_span, (all_self_args.as_slice(), tuple),
+            enum_nonmatch_f(cx, trait_span, (all_self_args[], tuple),
                             substructure.nonself_args),
         StaticEnum(..) | StaticStruct(..) => {
             cx.span_bug(trait_span, "static function in `deriving`")
diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs
index 839e99c81d1..4a9076b07b5 100644
--- a/src/libsyntax/ext/deriving/mod.rs
+++ b/src/libsyntax/ext/deriving/mod.rs
@@ -115,7 +115,7 @@ pub fn expand_meta_deriving(cx: &mut ExtCtxt,
                                 cx.span_err(titem.span,
                                             format!("unknown `deriving` \
                                                      trait: `{}`",
-                                                    *tname).as_slice());
+                                                    *tname)[]);
                             }
                         };
                     }
diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs
index a68b521bbc9..19b45a1e610 100644
--- a/src/libsyntax/ext/deriving/show.rs
+++ b/src/libsyntax/ext/deriving/show.rs
@@ -127,7 +127,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
     let formatter = substr.nonself_args[0].clone();
 
     let meth = cx.ident_of("write_fmt");
-    let s = token::intern_and_get_ident(format_string.as_slice());
+    let s = token::intern_and_get_ident(format_string[]);
     let format_string = cx.expr_str(span, s);
 
     // phew, not our responsibility any more!
diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs
index 8c17b31f458..9fedc4a158e 100644
--- a/src/libsyntax/ext/env.rs
+++ b/src/libsyntax/ext/env.rs
@@ -30,7 +30,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT
         Some(v) => v
     };
 
-    let e = match os::getenv(var.as_slice()) {
+    let e = match os::getenv(var[]) {
       None => {
           cx.expr_path(cx.path_all(sp,
                                    true,
@@ -56,7 +56,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT
                                    cx.ident_of("Some")),
                               vec!(cx.expr_str(sp,
                                                token::intern_and_get_ident(
-                                          s.as_slice()))))
+                                          s[]))))
       }
     };
     MacExpr::new(e)
@@ -83,7 +83,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         None => {
             token::intern_and_get_ident(format!("environment variable `{}` \
                                                  not defined",
-                                                var).as_slice())
+                                                var)[])
         }
         Some(second) => {
             match expr_to_string(cx, second, "expected string literal") {
@@ -106,7 +106,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             cx.span_err(sp, msg.get());
             cx.expr_uint(sp, 0)
         }
-        Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s.as_slice()))
+        Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s[]))
     };
     MacExpr::new(e)
 }
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index b10ae7a09db..f2b6f6bfe16 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -293,7 +293,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                     fld.cx.span_err(
                         pth.span,
                         format!("macro undefined: '{}!'",
-                                extnamestr.get()).as_slice());
+                                extnamestr.get())[]);
 
                     // let compilation continue
                     None
@@ -309,7 +309,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                                 },
                             });
                         let fm = fresh_mark();
-                        let marked_before = mark_tts(tts.as_slice(), fm);
+                        let marked_before = mark_tts(tts[], fm);
 
                         // The span that we pass to the expanders we want to
                         // be the root of the call stack. That's the most
@@ -320,7 +320,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                         let opt_parsed = {
                             let expanded = expandfun.expand(fld.cx,
                                                             mac_span,
-                                                            marked_before.as_slice());
+                                                            marked_before[]);
                             parse_thunk(expanded)
                         };
                         let parsed = match opt_parsed {
@@ -329,8 +329,8 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                                 fld.cx.span_err(
                                     pth.span,
                                     format!("non-expression macro in expression position: {}",
-                                            extnamestr.get().as_slice()
-                                            ).as_slice());
+                                            extnamestr.get()[]
+                                            )[]);
                                 return None;
                             }
                         };
@@ -340,7 +340,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                         fld.cx.span_err(
                             pth.span,
                             format!("'{}' is not a tt-style macro",
-                                    extnamestr.get()).as_slice());
+                                    extnamestr.get())[]);
                         None
                     }
                 }
@@ -445,7 +445,7 @@ pub fn expand_item(it: P<ast::Item>, fld: &mut MacroExpander)
             if valid_ident {
                 fld.cx.mod_push(it.ident);
             }
-            let macro_escape = contains_macro_escape(new_attrs.as_slice());
+            let macro_escape = contains_macro_escape(new_attrs[]);
             let result = with_exts_frame!(fld.cx.syntax_env,
                                           macro_escape,
                                           noop_fold_item(it, fld));
@@ -553,7 +553,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
             None => {
                 fld.cx.span_err(path_span,
                                 format!("macro undefined: '{}!'",
-                                        extnamestr).as_slice());
+                                        extnamestr)[]);
                 // let compilation continue
                 return SmallVector::zero();
             }
@@ -566,7 +566,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
                                       format!("macro {}! expects no ident argument, \
                                         given '{}'",
                                       extnamestr,
-                                      token::get_ident(it.ident)).as_slice());
+                                      token::get_ident(it.ident))[]);
                         return SmallVector::zero();
                     }
                     fld.cx.bt_push(ExpnInfo {
@@ -578,14 +578,14 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
                         }
                     });
                     // mark before expansion:
-                    let marked_before = mark_tts(tts.as_slice(), fm);
-                    expander.expand(fld.cx, it.span, marked_before.as_slice())
+                    let marked_before = mark_tts(tts[], fm);
+                    expander.expand(fld.cx, it.span, marked_before[])
                 }
                 IdentTT(ref expander, span) => {
                     if it.ident.name == parse::token::special_idents::invalid.name {
                         fld.cx.span_err(path_span,
                                         format!("macro {}! expects an ident argument",
-                                                extnamestr.get()).as_slice());
+                                                extnamestr.get())[]);
                         return SmallVector::zero();
                     }
                     fld.cx.bt_push(ExpnInfo {
@@ -597,14 +597,14 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
                         }
                     });
                     // mark before expansion:
-                    let marked_tts = mark_tts(tts.as_slice(), fm);
+                    let marked_tts = mark_tts(tts[], fm);
                     expander.expand(fld.cx, it.span, it.ident, marked_tts)
                 }
                 LetSyntaxTT(ref expander, span) => {
                     if it.ident.name == parse::token::special_idents::invalid.name {
                         fld.cx.span_err(path_span,
                                         format!("macro {}! expects an ident argument",
-                                                extnamestr.get()).as_slice());
+                                                extnamestr.get())[]);
                         return SmallVector::zero();
                     }
                     fld.cx.bt_push(ExpnInfo {
@@ -621,7 +621,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
                 _ => {
                     fld.cx.span_err(it.span,
                                     format!("{}! is not legal in item position",
-                                            extnamestr.get()).as_slice());
+                                            extnamestr.get())[]);
                     return SmallVector::zero();
                 }
             }
@@ -639,8 +639,8 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
             // result of expanding a LetSyntaxTT, and thus doesn't
             // need to be marked. Not that it could be marked anyway.
             // create issue to recommend refactoring here?
-            fld.cx.syntax_env.insert(intern(name.as_slice()), ext);
-            if attr::contains_name(it.attrs.as_slice(), "macro_export") {
+            fld.cx.syntax_env.insert(intern(name[]), ext);
+            if attr::contains_name(it.attrs[], "macro_export") {
                 fld.cx.exported_macros.push(it);
             }
             SmallVector::zero()
@@ -654,7 +654,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
         Right(None) => {
             fld.cx.span_err(path_span,
                             format!("non-item macro in item position: {}",
-                                    extnamestr.get()).as_slice());
+                                    extnamestr.get())[]);
             return SmallVector::zero();
         }
     };
@@ -903,7 +903,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
             None => {
                 fld.cx.span_err(pth.span,
                                 format!("macro undefined: '{}!'",
-                                        extnamestr).as_slice());
+                                        extnamestr)[]);
                 // let compilation continue
                 return DummyResult::raw_pat(span);
             }
@@ -920,11 +920,11 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
                     });
 
                     let fm = fresh_mark();
-                    let marked_before = mark_tts(tts.as_slice(), fm);
+                    let marked_before = mark_tts(tts[], fm);
                     let mac_span = fld.cx.original_span();
                     let expanded = match expander.expand(fld.cx,
                                         mac_span,
-                                        marked_before.as_slice()).make_pat() {
+                                        marked_before[]).make_pat() {
                         Some(e) => e,
                         None => {
                             fld.cx.span_err(
@@ -932,7 +932,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
                                 format!(
                                     "non-pattern macro in pattern position: {}",
                                     extnamestr.get()
-                                ).as_slice()
+                                )[]
                             );
                             return DummyResult::raw_pat(span);
                         }
@@ -944,7 +944,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
                 _ => {
                     fld.cx.span_err(span,
                                     format!("{}! is not legal in pattern position",
-                                            extnamestr.get()).as_slice());
+                                            extnamestr.get())[]);
                     return DummyResult::raw_pat(span);
                 }
             }
@@ -1192,8 +1192,7 @@ pub fn expand_crate(parse_sess: &parse::ParseSess,
     let mut expander = MacroExpander::new(&mut cx);
 
     for ExportedMacros { crate_name, macros } in imported_macros.into_iter() {
-        let name = format!("<{} macros>", token::get_ident(crate_name))
-            .into_string();
+        let name = format!("<{} macros>", token::get_ident(crate_name));
 
         for source in macros.into_iter() {
             let item = parse::parse_item_from_source_str(name.clone(),
@@ -1238,7 +1237,7 @@ impl Folder for Marker {
             node: match node {
                 MacInvocTT(path, tts, ctxt) => {
                     MacInvocTT(self.fold_path(path),
-                               self.fold_tts(tts.as_slice()),
+                               self.fold_tts(tts[]),
                                mtwt::apply_mark(self.mark, ctxt))
                 }
             },
@@ -1415,9 +1414,9 @@ mod test {
         let attr2 = make_dummy_attr ("bar");
         let escape_attr = make_dummy_attr ("macro_escape");
         let attrs1 = vec!(attr1.clone(), escape_attr, attr2.clone());
-        assert_eq!(contains_macro_escape(attrs1.as_slice()),true);
+        assert_eq!(contains_macro_escape(attrs1[]),true);
         let attrs2 = vec!(attr1,attr2);
-        assert_eq!(contains_macro_escape(attrs2.as_slice()),false);
+        assert_eq!(contains_macro_escape(attrs2[]),false);
     }
 
     // make a MetaWord outer attribute with the given name
@@ -1729,7 +1728,7 @@ foo_module!();
                 let string = ident.get();
                 "xx" == string
             }).collect();
-        let cxbinds: &[&ast::Ident] = cxbinds.as_slice();
+        let cxbinds: &[&ast::Ident] = cxbinds[];
         let cxbind = match cxbinds {
             [b] => b,
             _ => panic!("expected just one binding for ext_cx")
diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs
index 95c7fcc564a..aad4045f00a 100644
--- a/src/libsyntax/ext/format.rs
+++ b/src/libsyntax/ext/format.rs
@@ -136,7 +136,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool,
                 _ => {
                     ecx.span_err(p.span,
                                  format!("expected ident for named argument, found `{}`",
-                                         p.this_token_to_string()).as_slice());
+                                         p.this_token_to_string())[]);
                     return (invocation, None);
                 }
             };
@@ -149,7 +149,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool,
                 Some(prev) => {
                     ecx.span_err(e.span,
                                  format!("duplicate argument named `{}`",
-                                         name).as_slice());
+                                         name)[]);
                     ecx.parse_sess.span_diagnostic.span_note(prev.span, "previously here");
                     continue
                 }
@@ -240,7 +240,7 @@ impl<'a, 'b> Context<'a, 'b> {
                     let msg = format!("invalid reference to argument `{}` ({})",
                                       arg, self.describe_num_args());
 
-                    self.ecx.span_err(self.fmtsp, msg.as_slice());
+                    self.ecx.span_err(self.fmtsp, msg[]);
                     return;
                 }
                 {
@@ -260,7 +260,7 @@ impl<'a, 'b> Context<'a, 'b> {
                     Some(e) => e.span,
                     None => {
                         let msg = format!("there is no argument named `{}`", name);
-                        self.ecx.span_err(self.fmtsp, msg.as_slice());
+                        self.ecx.span_err(self.fmtsp, msg[]);
                         return;
                     }
                 };
@@ -303,19 +303,19 @@ impl<'a, 'b> Context<'a, 'b> {
                                   format!("argument redeclared with type `{}` when \
                                            it was previously `{}`",
                                           *ty,
-                                          *cur).as_slice());
+                                          *cur)[]);
             }
             (&Known(ref cur), _) => {
                 self.ecx.span_err(sp,
                                   format!("argument used to format with `{}` was \
                                            attempted to not be used for formatting",
-                                           *cur).as_slice());
+                                           *cur)[]);
             }
             (_, &Known(ref ty)) => {
                 self.ecx.span_err(sp,
                                   format!("argument previously used as a format \
                                            argument attempted to be used as `{}`",
-                                           *ty).as_slice());
+                                           *ty)[]);
             }
             (_, _) => {
                 self.ecx.span_err(sp, "argument declared with multiple formats");
@@ -380,7 +380,7 @@ impl<'a, 'b> Context<'a, 'b> {
     /// Translate the accumulated string literals to a literal expression
     fn trans_literal_string(&mut self) -> P<ast::Expr> {
         let sp = self.fmtsp;
-        let s = token::intern_and_get_ident(self.literal.as_slice());
+        let s = token::intern_and_get_ident(self.literal[]);
         self.literal.clear();
         self.ecx.expr_str(sp, s)
     }
@@ -552,7 +552,7 @@ impl<'a, 'b> Context<'a, 'b> {
                 None => continue // error already generated
             };
 
-            let name = self.ecx.ident_of(format!("__arg{}", i).as_slice());
+            let name = self.ecx.ident_of(format!("__arg{}", i)[]);
             pats.push(self.ecx.pat_ident(e.span, name));
             locals.push(Context::format_arg(self.ecx, e.span, arg_ty,
                                             self.ecx.expr_ident(e.span, name)));
@@ -569,7 +569,7 @@ impl<'a, 'b> Context<'a, 'b> {
             };
 
             let lname = self.ecx.ident_of(format!("__arg{}",
-                                                  *name).as_slice());
+                                                  *name)[]);
             pats.push(self.ecx.pat_ident(e.span, lname));
             names[self.name_positions[*name]] =
                 Some(Context::format_arg(self.ecx, e.span, arg_ty,
@@ -652,7 +652,7 @@ impl<'a, 'b> Context<'a, 'b> {
                   -> P<ast::Expr> {
         let trait_ = match *ty {
             Known(ref tyname) => {
-                match tyname.as_slice() {
+                match tyname[] {
                     ""  => "Show",
                     "?" => "Show",
                     "e" => "LowerExp",
@@ -665,7 +665,7 @@ impl<'a, 'b> Context<'a, 'b> {
                     _ => {
                         ecx.span_err(sp,
                                      format!("unknown format trait `{}`",
-                                             *tyname).as_slice());
+                                             *tyname)[]);
                         "Dummy"
                     }
                 }
@@ -760,8 +760,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
     match parser.errors.remove(0) {
         Some(error) => {
             cx.ecx.span_err(cx.fmtsp,
-                            format!("invalid format string: {}",
-                                    error).as_slice());
+                            format!("invalid format string: {}", error)[]);
             return DummyResult::raw_expr(sp);
         }
         None => {}
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index c7cb41e2ece..368d4fa8447 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -474,7 +474,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt,
 }
 
 fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> {
-    strs.iter().map(|str| str_to_ident((*str).as_slice())).collect()
+    strs.iter().map(|str| str_to_ident((*str)[])).collect()
 }
 
 fn id_ext(str: &str) -> ast::Ident {
@@ -676,7 +676,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
             for i in range(0, tt.len()) {
                 seq.push(tt.get_tt(i));
             }
-            mk_tts(cx, seq.as_slice())
+            mk_tts(cx, seq[])
         }
         ast::TtToken(sp, ref tok) => {
             let e_sp = cx.expr_ident(sp, id_ext("_sp"));
@@ -765,7 +765,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
 
     let mut vector = vec!(stmt_let_sp, stmt_let_tt);
-    vector.extend(mk_tts(cx, tts.as_slice()).into_iter());
+    vector.extend(mk_tts(cx, tts[]).into_iter());
     let block = cx.expr_block(
         cx.block_all(sp,
                      Vec::new(),
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index 570231940aa..7c2c5c1530c 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -57,7 +57,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
 
     let topmost = cx.original_span_in_file();
     let loc = cx.codemap().lookup_char_pos(topmost.lo);
-    let filename = token::intern_and_get_ident(loc.file.name.as_slice());
+    let filename = token::intern_and_get_ident(loc.file.name[]);
     base::MacExpr::new(cx.expr_str(topmost, filename))
 }
 
@@ -65,7 +65,7 @@ pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                         -> Box<base::MacResult+'static> {
     let s = pprust::tts_to_string(tts);
     base::MacExpr::new(cx.expr_str(sp,
-                                   token::intern_and_get_ident(s.as_slice())))
+                                   token::intern_and_get_ident(s[])))
 }
 
 pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
@@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                    .connect("::");
     base::MacExpr::new(cx.expr_str(
             sp,
-            token::intern_and_get_ident(string.as_slice())))
+            token::intern_and_get_ident(string[])))
 }
 
 /// include! : parse the given file as an expr
@@ -137,7 +137,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             cx.span_err(sp,
                         format!("couldn't read {}: {}",
                                 file.display(),
-                                e).as_slice());
+                                e)[]);
             return DummyResult::expr(sp);
         }
         Ok(bytes) => bytes,
@@ -147,7 +147,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             // Add this input file to the code map to make it available as
             // dependency information
             let filename = file.display().to_string();
-            let interned = token::intern_and_get_ident(src.as_slice());
+            let interned = token::intern_and_get_ident(src[]);
             cx.codemap().new_filemap(filename, src);
 
             base::MacExpr::new(cx.expr_str(sp, interned))
@@ -155,7 +155,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         Err(_) => {
             cx.span_err(sp,
                         format!("{} wasn't a utf-8 file",
-                                file.display()).as_slice());
+                                file.display())[]);
             return DummyResult::expr(sp);
         }
     }
@@ -171,9 +171,7 @@ pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     match File::open(&file).read_to_end() {
         Err(e) => {
             cx.span_err(sp,
-                        format!("couldn't read {}: {}",
-                                file.display(),
-                                e).as_slice());
+                        format!("couldn't read {}: {}", file.display(), e)[]);
             return DummyResult::expr(sp);
         }
         Ok(bytes) => {
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index bc639c32380..73ef18b8449 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -153,7 +153,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint {
                 seq.num_captures
             }
             &TtDelimited(_, ref delim) => {
-                count_names(delim.tts.as_slice())
+                count_names(delim.tts[])
             }
             &TtToken(_, MatchNt(..)) => {
                 1
@@ -165,7 +165,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint {
 
 pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos)
                            -> Box<MatcherPos> {
-    let match_idx_hi = count_names(ms.as_slice());
+    let match_idx_hi = count_names(ms[]);
     let matches = Vec::from_fn(match_idx_hi, |_i| Vec::new());
     box MatcherPos {
         stack: vec![],
@@ -229,7 +229,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
                         p_s.span_diagnostic
                            .span_fatal(sp,
                                        format!("duplicated bind name: {}",
-                                               string.get()).as_slice())
+                                               string.get())[])
                     }
                 }
             }
@@ -254,13 +254,13 @@ pub fn parse_or_else(sess: &ParseSess,
                      rdr: TtReader,
                      ms: Vec<TokenTree> )
                      -> HashMap<Ident, Rc<NamedMatch>> {
-    match parse(sess, cfg, rdr, ms.as_slice()) {
+    match parse(sess, cfg, rdr, ms[]) {
         Success(m) => m,
         Failure(sp, str) => {
-            sess.span_diagnostic.span_fatal(sp, str.as_slice())
+            sess.span_diagnostic.span_fatal(sp, str[])
         }
         Error(sp, str) => {
-            sess.span_diagnostic.span_fatal(sp, str.as_slice())
+            sess.span_diagnostic.span_fatal(sp, str[])
         }
     }
 }
@@ -416,7 +416,7 @@ pub fn parse(sess: &ParseSess,
                         }
                     }
                     TtToken(sp, SubstNt(..)) => {
-                        return Error(sp, "Cannot transcribe in macro LHS".into_string())
+                        return Error(sp, "Cannot transcribe in macro LHS".to_string())
                     }
                     seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
                         let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
@@ -446,7 +446,7 @@ pub fn parse(sess: &ParseSess,
                 for dv in eof_eis[0].matches.iter_mut() {
                     v.push(dv.pop().unwrap());
                 }
-                return Success(nameize(sess, ms, v.as_slice()));
+                return Success(nameize(sess, ms, v[]));
             } else if eof_eis.len() > 1u {
                 return Error(sp, "ambiguity: multiple successful parses".to_string());
             } else {
@@ -521,7 +521,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
         _ => {
             let token_str = pprust::token_to_string(&p.token);
             p.fatal((format!("expected ident, found {}",
-                             token_str.as_slice())).as_slice())
+                             token_str[]))[])
         }
       },
       "path" => {
@@ -535,8 +535,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
         res
       }
       _ => {
-          p.fatal(format!("unsupported builtin nonterminal parser: {}",
-                          name).as_slice())
+          p.fatal(format!("unsupported builtin nonterminal parser: {}", name)[])
       }
     }
 }
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 92c68b7a9c7..08014dc1338 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -52,7 +52,7 @@ impl<'a> ParserAnyMacro<'a> {
                                following",
                               token_str);
             let span = parser.span;
-            parser.span_err(span, msg.as_slice());
+            parser.span_err(span, msg[]);
         }
     }
 }
@@ -124,8 +124,8 @@ impl TTMacroExpander for MacroRulesMacroExpander {
                           sp,
                           self.name,
                           arg,
-                          self.lhses.as_slice(),
-                          self.rhses.as_slice())
+                          self.lhses[],
+                          self.rhses[])
     }
 }
 
@@ -160,7 +160,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
         match **lhs {
           MatchedNonterminal(NtTT(ref lhs_tt)) => {
             let lhs_tt = match **lhs_tt {
-                TtDelimited(_, ref delim) => delim.tts.as_slice(),
+                TtDelimited(_, ref delim) => delim.tts[],
                 _ => cx.span_fatal(sp, "malformed macro lhs")
             };
             // `None` is because we're not interpolating
@@ -198,13 +198,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                 best_fail_spot = sp;
                 best_fail_msg = (*msg).clone();
               },
-              Error(sp, ref msg) => cx.span_fatal(sp, msg.as_slice())
+              Error(sp, ref msg) => cx.span_fatal(sp, msg[])
             }
           }
           _ => cx.bug("non-matcher found in parsed lhses")
         }
     }
-    cx.span_fatal(best_fail_spot, best_fail_msg.as_slice());
+    cx.span_fatal(best_fail_spot, best_fail_msg[]);
 }
 
 // Note that macro-by-example's input is also matched against a token tree:
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 378dbba07fa..deed0b78e87 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -223,7 +223,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                     }
                     LisContradiction(ref msg) => {
                         // FIXME #2887 blame macro invoker instead
-                        r.sp_diag.span_fatal(sp.clone(), msg.as_slice());
+                        r.sp_diag.span_fatal(sp.clone(), msg[]);
                     }
                     LisConstraint(len, _) => {
                         if len == 0 {
@@ -280,7 +280,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                                 r.sp_diag.span_fatal(
                                     r.cur_span, /* blame the macro writer */
                                     format!("variable '{}' is still repeating at this depth",
-                                            token::get_ident(ident)).as_slice());
+                                            token::get_ident(ident))[]);
                             }
                         }
                     }
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index 0e0a87c74f8..d53a4b0e8d1 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -133,7 +133,7 @@ impl<'a> Context<'a> {
             self.span_handler.span_err(span, explain);
             self.span_handler.span_help(span, format!("add #![feature({})] to the \
                                                        crate attributes to enable",
-                                                      feature).as_slice());
+                                                      feature)[]);
         }
     }
 
@@ -187,7 +187,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> {
         }
         match i.node {
             ast::ItemForeignMod(ref foreign_module) => {
-                if attr::contains_name(i.attrs.as_slice(), "link_args") {
+                if attr::contains_name(i.attrs[], "link_args") {
                     self.gate_feature("link_args", i.span,
                                       "the `link_args` attribute is not portable \
                                        across platforms, it is recommended to \
@@ -201,14 +201,14 @@ impl<'a, 'v> Visitor<'v> for Context<'a> {
             }
 
             ast::ItemFn(..) => {
-                if attr::contains_name(i.attrs.as_slice(), "plugin_registrar") {
+                if attr::contains_name(i.attrs[], "plugin_registrar") {
                     self.gate_feature("plugin_registrar", i.span,
                                       "compiler plugins are experimental and possibly buggy");
                 }
             }
 
             ast::ItemStruct(..) => {
-                if attr::contains_name(i.attrs.as_slice(), "simd") {
+                if attr::contains_name(i.attrs[], "simd") {
                     self.gate_feature("simd", i.span,
                                       "SIMD types are experimental and possibly buggy");
                 }
@@ -285,7 +285,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> {
     }
 
     fn visit_foreign_item(&mut self, i: &ast::ForeignItem) {
-        if attr::contains_name(i.attrs.as_slice(), "linkage") {
+        if attr::contains_name(i.attrs[], "linkage") {
             self.gate_feature("linkage", i.span,
                               "the `linkage` attribute is experimental \
                                and not portable across platforms")
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 41fee1556ab..41693d9d47a 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -92,8 +92,7 @@ impl<'a> ParserAttr for Parser<'a> {
             }
             _ => {
                 let token_str = self.this_token_to_string();
-                self.fatal(format!("expected `#`, found `{}`",
-                                   token_str).as_slice());
+                self.fatal(format!("expected `#`, found `{}`", token_str)[]);
             }
         };
 
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index 95bae63f58f..b8da8365f7e 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -82,7 +82,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
         while j > i && lines[j - 1].trim().is_empty() {
             j -= 1;
         }
-        return lines.slice(i, j).iter().map(|x| (*x).clone()).collect();
+        return lines[i..j].iter().map(|x| (*x).clone()).collect();
     }
 
     /// remove a "[ \t]*\*" block from each line, if possible
@@ -116,7 +116,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
 
         if can_trim {
             lines.iter().map(|line| {
-                line.slice(i + 1, line.len()).to_string()
+                line[i + 1..line.len()].to_string()
             }).collect()
         } else {
             lines
@@ -127,12 +127,12 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
     static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"];
     for prefix in ONLINERS.iter() {
         if comment.starts_with(*prefix) {
-            return comment.slice_from(prefix.len()).to_string();
+            return comment[prefix.len()..].to_string();
         }
     }
 
     if comment.starts_with("/*") {
-        let lines = comment.slice(3u, comment.len() - 2u)
+        let lines = comment[3u..comment.len() - 2u]
             .lines_any()
             .map(|s| s.to_string())
             .collect::<Vec<String> >();
@@ -187,7 +187,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool,
         let line = rdr.read_one_line_comment();
         debug!("{}", line);
         // Doc comments are not put in comments.
-        if is_doc_comment(line.as_slice()) {
+        if is_doc_comment(line[]) {
             break;
         }
         lines.push(line);
@@ -224,10 +224,10 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<uint> {
 fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String> ,
                                         s: String, col: CharPos) {
     let len = s.len();
-    let s1 = match all_whitespace(s.as_slice(), col) {
+    let s1 = match all_whitespace(s[], col) {
         Some(col) => {
             if col < len {
-                s.slice(col, len).to_string()
+                s[col..len].to_string()
             } else {
                 "".to_string()
             }
@@ -261,7 +261,7 @@ fn read_block_comment(rdr: &mut StringReader,
             rdr.bump();
             rdr.bump();
         }
-        if is_block_doc_comment(curr_line.as_slice()) {
+        if is_block_doc_comment(curr_line[]) {
             return
         }
         assert!(!curr_line.contains_char('\n'));
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index da908f46ff6..13d020f6ae3 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -194,7 +194,7 @@ impl<'a> StringReader<'a> {
         let mut m = m.to_string();
         m.push_str(": ");
         for c in c.escape_default() { m.push(c) }
-        self.fatal_span_(from_pos, to_pos, m.as_slice());
+        self.fatal_span_(from_pos, to_pos, m[]);
     }
 
     /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@@ -203,7 +203,7 @@ impl<'a> StringReader<'a> {
         let mut m = m.to_string();
         m.push_str(": ");
         for c in c.escape_default() { m.push(c) }
-        self.err_span_(from_pos, to_pos, m.as_slice());
+        self.err_span_(from_pos, to_pos, m[]);
     }
 
     /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
@@ -212,8 +212,8 @@ impl<'a> StringReader<'a> {
         m.push_str(": ");
         let from = self.byte_offset(from_pos).to_uint();
         let to = self.byte_offset(to_pos).to_uint();
-        m.push_str(self.filemap.src.as_slice().slice(from, to));
-        self.fatal_span_(from_pos, to_pos, m.as_slice());
+        m.push_str(self.filemap.src[from..to]);
+        self.fatal_span_(from_pos, to_pos, m[]);
     }
 
     /// Advance peek_tok and peek_span to refer to the next token, and
@@ -299,7 +299,7 @@ impl<'a> StringReader<'a> {
             while i < s.len() {
                 let str::CharRange { ch, next } = s.char_range_at(i);
                 if ch == '\r' {
-                    if j < i { buf.push_str(s.slice(j, i)); }
+                    if j < i { buf.push_str(s[j..i]); }
                     j = next;
                     if next >= s.len() || s.char_at(next) != '\n' {
                         let pos = start + BytePos(i as u32);
@@ -309,7 +309,7 @@ impl<'a> StringReader<'a> {
                 }
                 i = next;
             }
-            if j < s.len() { buf.push_str(s.slice_from(j)); }
+            if j < s.len() { buf.push_str(s[j..]); }
             buf
         }
     }
@@ -358,7 +358,7 @@ impl<'a> StringReader<'a> {
 
     pub fn nextnextch(&self) -> Option<char> {
         let offset = self.byte_offset(self.pos).to_uint();
-        let s = self.filemap.deref().src.as_slice();
+        let s = self.filemap.deref().src[];
         if offset >= s.len() { return None }
         let str::CharRange { next, .. } = s.char_range_at(offset);
         if next < s.len() {
@@ -554,7 +554,7 @@ impl<'a> StringReader<'a> {
                     self.translate_crlf(start_bpos, string,
                                         "bare CR not allowed in block doc-comment")
                 } else { string.into_cow() };
-                token::DocComment(token::intern(string.as_slice()))
+                token::DocComment(token::intern(string[]))
             } else {
                 token::Comment
             };
@@ -1108,7 +1108,7 @@ impl<'a> StringReader<'a> {
                 // expansion purposes. See #12512 for the gory details of why
                 // this is necessary.
                 let ident = self.with_str_from(start, |lifetime_name| {
-                    str_to_ident(format!("'{}", lifetime_name).as_slice())
+                    str_to_ident(format!("'{}", lifetime_name)[])
                 });
 
                 // Conjure up a "keyword checking ident" to make sure that
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 3d0877dd432..8cefb111fd1 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -251,17 +251,17 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
         Err(e) => {
             err(format!("couldn't read {}: {}",
                         path.display(),
-                        e).as_slice());
+                        e)[]);
             unreachable!()
         }
     };
-    match str::from_utf8(bytes.as_slice()) {
+    match str::from_utf8(bytes[]).ok() {
         Some(s) => {
             return string_to_filemap(sess, s.to_string(),
                                      path.as_str().unwrap().to_string())
         }
         None => {
-            err(format!("{} is not UTF-8 encoded", path.display()).as_slice())
+            err(format!("{} is not UTF-8 encoded", path.display())[])
         }
     }
     unreachable!()
@@ -391,10 +391,10 @@ pub fn char_lit(lit: &str) -> (char, int) {
     }
 
     let msg = format!("lexer should have rejected a bad character escape {}", lit);
-    let msg2 = msg.as_slice();
+    let msg2 = msg[];
 
     fn esc(len: uint, lit: &str) -> Option<(char, int)> {
-        num::from_str_radix(lit.slice(2, len), 16)
+        num::from_str_radix(lit[2..len], 16)
         .and_then(char::from_u32)
         .map(|x| (x, len as int))
     }
@@ -402,10 +402,10 @@ pub fn char_lit(lit: &str) -> (char, int) {
     let unicode_escape: || -> Option<(char, int)> = ||
         if lit.as_bytes()[2] == b'{' {
             let idx = lit.find('}').expect(msg2);
-            let subslice = lit.slice(3, idx);
+            let subslice = lit[3..idx];
             num::from_str_radix(subslice, 16)
                 .and_then(char::from_u32)
-                .map(|x| (x, subslice.char_len() as int + 4))
+                .map(|x| (x, subslice.chars().count() as int + 4))
         } else {
             esc(6, lit)
         };
@@ -429,7 +429,7 @@ pub fn str_lit(lit: &str) -> String {
     let error = |i| format!("lexer should have rejected {} at {}", lit, i);
 
     /// Eat everything up to a non-whitespace
-    fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharOffsets<'a>>) {
+    fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharIndices<'a>>) {
         loop {
             match it.peek().map(|x| x.1) {
                 Some(' ') | Some('\n') | Some('\r') | Some('\t') => {
@@ -464,7 +464,7 @@ pub fn str_lit(lit: &str) -> String {
                             eat(&mut chars);
                         } else {
                             // otherwise, a normal escape
-                            let (c, n) = char_lit(lit.slice_from(i));
+                            let (c, n) = char_lit(lit[i..]);
                             for _ in range(0, n - 1) { // we don't need to move past the first \
                                 chars.next();
                             }
@@ -527,7 +527,7 @@ pub fn raw_str_lit(lit: &str) -> String {
 fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
     s.len() > 1 &&
         first_chars.contains(&s.char_at(0)) &&
-        s.slice_from(1).chars().all(|c| '0' <= c && c <= '9')
+        s[1..].chars().all(|c| '0' <= c && c <= '9')
 }
 
 fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
@@ -540,7 +540,7 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
             if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
                 // if it looks like a width, lets try to be helpful.
                 sd.span_err(sp, &*format!("illegal width `{}` for float literal, \
-                                          valid widths are 32 and 64", suf.slice_from(1)));
+                                          valid widths are 32 and 64", suf[1..]));
             } else {
                 sd.span_err(sp, &*format!("illegal suffix `{}` for float literal, \
                                           valid suffixes are `f32` and `f64`", suf));
@@ -576,7 +576,7 @@ pub fn byte_lit(lit: &str) -> (u8, uint) {
             b'\'' => b'\'',
             b'0' => b'\0',
             _ => {
-                match ::std::num::from_str_radix::<u64>(lit.slice(2, 4), 16) {
+                match ::std::num::from_str_radix::<u64>(lit[2..4], 16) {
                     Some(c) =>
                         if c > 0xFF {
                             panic!(err(2))
@@ -626,7 +626,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> {
                     }
                     _ => {
                         // otherwise, a normal escape
-                        let (c, n) = byte_lit(lit.slice_from(i));
+                        let (c, n) = byte_lit(lit[i..]);
                         // we don't need to move past the first \
                         for _ in range(0, n - 1) {
                             chars.next();
@@ -655,7 +655,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
     // s can only be ascii, byte indexing is fine
 
     let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
-    let mut s = s2.as_slice();
+    let mut s = s2[];
 
     debug!("integer_lit: {}, {}", s, suffix);
 
@@ -688,7 +688,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
     }
 
     if base != 10 {
-        s = s.slice_from(2);
+        s = s[2..];
     }
 
     if let Some(suf) = suffix {
@@ -710,7 +710,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
                 if looks_like_width_suffix(&['i', 'u'], suf) {
                     sd.span_err(sp, &*format!("illegal width `{}` for integer literal; \
                                               valid widths are 8, 16, 32 and 64",
-                                              suf.slice_from(1)));
+                                              suf[1..]));
                 } else {
                     sd.span_err(sp, &*format!("illegal suffix `{}` for numeric literal", suf));
                 }
@@ -808,7 +808,7 @@ mod test {
     #[test]
     fn string_to_tts_macro () {
         let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
-        let tts: &[ast::TokenTree] = tts.as_slice();
+        let tts: &[ast::TokenTree] = tts[];
         match tts {
             [ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)),
              ast::TtToken(_, token::Not),
@@ -816,19 +816,19 @@ mod test {
              ast::TtDelimited(_, ref macro_delimed)]
             if name_macro_rules.as_str() == "macro_rules"
             && name_zip.as_str() == "zip" => {
-                match macro_delimed.tts.as_slice() {
+                match macro_delimed.tts[] {
                     [ast::TtDelimited(_, ref first_delimed),
                      ast::TtToken(_, token::FatArrow),
                      ast::TtDelimited(_, ref second_delimed)]
                     if macro_delimed.delim == token::Paren => {
-                        match first_delimed.tts.as_slice() {
+                        match first_delimed.tts[] {
                             [ast::TtToken(_, token::Dollar),
                              ast::TtToken(_, token::Ident(name, token::Plain))]
                             if first_delimed.delim == token::Paren
                             && name.as_str() == "a" => {},
                             _ => panic!("value 3: {}", **first_delimed),
                         }
-                        match second_delimed.tts.as_slice() {
+                        match second_delimed.tts[] {
                             [ast::TtToken(_, token::Dollar),
                              ast::TtToken(_, token::Ident(name, token::Plain))]
                             if second_delimed.delim == token::Paren
@@ -1106,24 +1106,24 @@ mod test {
         let use_s = "use foo::bar::baz;";
         let vitem = string_to_view_item(use_s.to_string());
         let vitem_s = view_item_to_string(&vitem);
-        assert_eq!(vitem_s.as_slice(), use_s);
+        assert_eq!(vitem_s[], use_s);
 
         let use_s = "use foo::bar as baz;";
         let vitem = string_to_view_item(use_s.to_string());
         let vitem_s = view_item_to_string(&vitem);
-        assert_eq!(vitem_s.as_slice(), use_s);
+        assert_eq!(vitem_s[], use_s);
     }
 
     #[test] fn parse_extern_crate() {
         let ex_s = "extern crate foo;";
         let vitem = string_to_view_item(ex_s.to_string());
         let vitem_s = view_item_to_string(&vitem);
-        assert_eq!(vitem_s.as_slice(), ex_s);
+        assert_eq!(vitem_s[], ex_s);
 
         let ex_s = "extern crate \"foo\" as bar;";
         let vitem = string_to_view_item(ex_s.to_string());
         let vitem_s = view_item_to_string(&vitem);
-        assert_eq!(vitem_s.as_slice(), ex_s);
+        assert_eq!(vitem_s[], ex_s);
     }
 
     fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
@@ -1161,9 +1161,9 @@ mod test {
         for &src in srcs.iter() {
             let spans = get_spans_of_pat_idents(src);
             let Span{lo:lo,hi:hi,..} = spans[0];
-            assert!("self" == src.slice(lo.to_uint(), hi.to_uint()),
+            assert!("self" == src[lo.to_uint()..hi.to_uint()],
                     "\"{}\" != \"self\". src=\"{}\"",
-                    src.slice(lo.to_uint(), hi.to_uint()), src)
+                    src[lo.to_uint()..hi.to_uint()], src)
         }
     }
 
@@ -1202,7 +1202,7 @@ mod test {
         let docs = item.attrs.iter().filter(|a| a.name().get() == "doc")
                     .map(|a| a.value_str().unwrap().get().to_string()).collect::<Vec<_>>();
         let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
-        assert_eq!(docs.as_slice(), b);
+        assert_eq!(docs[], b);
 
         let source = "/** doc comment\r\n *  with CRLF */\r\nfn foo() {}".to_string();
         let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap();
diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs
index a6ddcbf9ac4..e3c831c09ba 100644
--- a/src/libsyntax/parse/obsolete.rs
+++ b/src/libsyntax/parse/obsolete.rs
@@ -113,13 +113,13 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> {
               kind_str: &str,
               desc: &str) {
         self.span_err(sp,
-                      format!("obsolete syntax: {}", kind_str).as_slice());
+                      format!("obsolete syntax: {}", kind_str)[]);
 
         if !self.obsolete_set.contains(&kind) {
             self.sess
                 .span_diagnostic
                 .handler()
-                .note(format!("{}", desc).as_slice());
+                .note(format!("{}", desc)[]);
             self.obsolete_set.insert(kind);
         }
     }
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 19af118b190..7e53b28a09c 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -319,7 +319,7 @@ impl TokenType {
     fn to_string(&self) -> String {
         match *self {
             TokenType::Token(ref t) => format!("`{}`", Parser::token_to_string(t)),
-            TokenType::Operator => "an operator".into_string(),
+            TokenType::Operator => "an operator".to_string(),
         }
     }
 }
@@ -384,12 +384,12 @@ impl<'a> Parser<'a> {
         let token_str = Parser::token_to_string(t);
         let last_span = self.last_span;
         self.span_fatal(last_span, format!("unexpected token: `{}`",
-                                                token_str).as_slice());
+                                                token_str)[]);
     }
 
     pub fn unexpected(&mut self) -> ! {
         let this_token = self.this_token_to_string();
-        self.fatal(format!("unexpected token: `{}`", this_token).as_slice());
+        self.fatal(format!("unexpected token: `{}`", this_token)[]);
     }
 
     /// Expect and consume the token t. Signal an error if
@@ -403,7 +403,7 @@ impl<'a> Parser<'a> {
                 let this_token_str = self.this_token_to_string();
                 self.fatal(format!("expected `{}`, found `{}`",
                                    token_str,
-                                   this_token_str).as_slice())
+                                   this_token_str)[])
             }
         } else {
             self.expect_one_of(slice::ref_slice(t), &[]);
@@ -420,7 +420,7 @@ impl<'a> Parser<'a> {
             let mut i = tokens.iter();
             // This might be a sign we need a connect method on Iterator.
             let b = i.next()
-                     .map_or("".into_string(), |t| t.to_string());
+                     .map_or("".to_string(), |t| t.to_string());
             i.enumerate().fold(b, |mut b, (i, ref a)| {
                 if tokens.len() > 2 && i == tokens.len() - 2 {
                     b.push_str(", or ");
@@ -444,7 +444,7 @@ impl<'a> Parser<'a> {
             expected.push_all(&*self.expected_tokens);
             expected.sort_by(|a, b| a.to_string().cmp(&b.to_string()));
             expected.dedup();
-            let expect = tokens_to_string(expected.as_slice());
+            let expect = tokens_to_string(expected[]);
             let actual = self.this_token_to_string();
             self.fatal(
                 (if expected.len() != 1 {
@@ -455,7 +455,7 @@ impl<'a> Parser<'a> {
                     (format!("expected {}, found `{}`",
                              expect,
                              actual))
-                }).as_slice()
+                })[]
             )
         }
     }
@@ -488,7 +488,7 @@ impl<'a> Parser<'a> {
             // might be unit-struct construction; check for recoverableinput error.
             let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>();
             expected.push_all(inedible);
-            self.check_for_erroneous_unit_struct_expecting(expected.as_slice());
+            self.check_for_erroneous_unit_struct_expecting(expected[]);
         }
         self.expect_one_of(edible, inedible)
     }
@@ -505,9 +505,9 @@ impl<'a> Parser<'a> {
                .as_ref()
                .map_or(false, |t| t.is_ident() || t.is_path()) {
             let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>();
-            expected.push_all(inedible.as_slice());
+            expected.push_all(inedible[]);
             self.check_for_erroneous_unit_struct_expecting(
-                expected.as_slice());
+                expected[]);
         }
         self.expect_one_of(edible, inedible)
     }
@@ -530,7 +530,7 @@ impl<'a> Parser<'a> {
             _ => {
                 let token_str = self.this_token_to_string();
                 self.fatal((format!("expected ident, found `{}`",
-                                    token_str)).as_slice())
+                                    token_str))[])
             }
         }
     }
@@ -584,7 +584,7 @@ impl<'a> Parser<'a> {
             let id_interned_str = token::get_name(kw.to_name());
             let token_str = self.this_token_to_string();
             self.fatal(format!("expected `{}`, found `{}`",
-                               id_interned_str, token_str).as_slice())
+                               id_interned_str, token_str)[])
         }
     }
 
@@ -595,7 +595,7 @@ impl<'a> Parser<'a> {
             let span = self.span;
             self.span_err(span,
                           format!("expected identifier, found keyword `{}`",
-                                  token_str).as_slice());
+                                  token_str)[]);
         }
     }
 
@@ -604,7 +604,7 @@ impl<'a> Parser<'a> {
         if self.token.is_reserved_keyword() {
             let token_str = self.this_token_to_string();
             self.fatal(format!("`{}` is a reserved keyword",
-                               token_str).as_slice())
+                               token_str)[])
         }
     }
 
@@ -624,7 +624,7 @@ impl<'a> Parser<'a> {
                     Parser::token_to_string(&token::BinOp(token::And));
                 self.fatal(format!("expected `{}`, found `{}`",
                                    found_token,
-                                   token_str).as_slice())
+                                   token_str)[])
             }
         }
     }
@@ -645,7 +645,7 @@ impl<'a> Parser<'a> {
                     Parser::token_to_string(&token::BinOp(token::Or));
                 self.fatal(format!("expected `{}`, found `{}`",
                                    token_str,
-                                   found_token).as_slice())
+                                   found_token)[])
             }
         }
     }
@@ -711,7 +711,7 @@ impl<'a> Parser<'a> {
             let token_str = Parser::token_to_string(&token::Lt);
             self.fatal(format!("expected `{}`, found `{}`",
                                token_str,
-                               found_token).as_slice())
+                               found_token)[])
         }
     }
 
@@ -763,7 +763,7 @@ impl<'a> Parser<'a> {
                 let this_token_str = self.this_token_to_string();
                 self.fatal(format!("expected `{}`, found `{}`",
                                    gt_str,
-                                   this_token_str).as_slice())
+                                   this_token_str)[])
             }
         }
     }
@@ -1392,7 +1392,7 @@ impl<'a> Parser<'a> {
                     let (inner_attrs, body) =
                         p.parse_inner_attrs_and_block();
                     let mut attrs = attrs;
-                    attrs.push_all(inner_attrs.as_slice());
+                    attrs.push_all(inner_attrs[]);
                     ProvidedMethod(P(ast::Method {
                         attrs: attrs,
                         id: ast::DUMMY_NODE_ID,
@@ -1411,7 +1411,7 @@ impl<'a> Parser<'a> {
                   _ => {
                       let token_str = p.this_token_to_string();
                       p.fatal((format!("expected `;` or `{{`, found `{}`",
-                                       token_str)).as_slice())
+                                       token_str))[])
                   }
                 }
             }
@@ -1606,7 +1606,7 @@ impl<'a> Parser<'a> {
         } else {
             let this_token_str = self.this_token_to_string();
             let msg = format!("expected type, found `{}`", this_token_str);
-            self.fatal(msg.as_slice());
+            self.fatal(msg[]);
         };
 
         let sp = mk_sp(lo, self.last_span.hi);
@@ -1753,14 +1753,14 @@ impl<'a> Parser<'a> {
 
                     token::Str_(s) => {
                         (true,
-                         LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str()).as_slice()),
+                         LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str())[]),
                                 ast::CookedStr))
                     }
                     token::StrRaw(s, n) => {
                         (true,
                          LitStr(
                             token::intern_and_get_ident(
-                                parse::raw_str_lit(s.as_str()).as_slice()),
+                                parse::raw_str_lit(s.as_str())[]),
                             ast::RawStr(n)))
                     }
                     token::Binary(i) =>
@@ -2004,7 +2004,7 @@ impl<'a> Parser<'a> {
                 };
             }
             _ => {
-                self.fatal(format!("expected a lifetime name").as_slice());
+                self.fatal(format!("expected a lifetime name")[]);
             }
         }
     }
@@ -2042,7 +2042,7 @@ impl<'a> Parser<'a> {
                     let msg = format!("expected `,` or `>` after lifetime \
                                       name, found `{}`",
                                       this_token_str);
-                    self.fatal(msg.as_slice());
+                    self.fatal(msg[]);
                 }
             }
         }
@@ -2517,7 +2517,7 @@ impl<'a> Parser<'a> {
                     hi = self.span.hi;
                     self.bump();
 
-                    let index = from_str::<uint>(n.as_str());
+                    let index = n.as_str().parse::<uint>();
                     match index {
                         Some(n) => {
                             let id = spanned(dot, hi, n);
@@ -2535,16 +2535,16 @@ impl<'a> Parser<'a> {
                     let last_span = self.last_span;
                     let fstr = n.as_str();
                     self.span_err(last_span,
-                                  format!("unexpected token: `{}`", n.as_str()).as_slice());
+                                  format!("unexpected token: `{}`", n.as_str())[]);
                     if fstr.chars().all(|x| "0123456789.".contains_char(x)) {
-                        let float = match from_str::<f64>(fstr) {
+                        let float = match fstr.parse::<f64>() {
                             Some(f) => f,
                             None => continue,
                         };
                         self.span_help(last_span,
                             format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
                                     float.trunc() as uint,
-                                    float.fract().to_string()[1..]).as_slice());
+                                    float.fract().to_string()[1..])[]);
                     }
                     self.abort_if_errors();
 
@@ -2716,7 +2716,7 @@ impl<'a> Parser<'a> {
                   };
                   let token_str = p.this_token_to_string();
                   p.fatal(format!("incorrect close delimiter: `{}`",
-                                  token_str).as_slice())
+                                  token_str)[])
               },
               /* we ought to allow different depths of unquotation */
               token::Dollar if p.quote_depth > 0u => {
@@ -2734,7 +2734,7 @@ impl<'a> Parser<'a> {
                     let seq = match seq {
                         Spanned { node, .. } => node,
                     };
-                    let name_num = macro_parser::count_names(seq.as_slice());
+                    let name_num = macro_parser::count_names(seq[]);
                     TtSequence(mk_sp(sp.lo, p.span.hi),
                                Rc::new(SequenceRepetition {
                                    tts: seq,
@@ -2885,7 +2885,7 @@ impl<'a> Parser<'a> {
                         let this_token_to_string = self.this_token_to_string();
                         self.span_err(span,
                                       format!("expected expression, found `{}`",
-                                              this_token_to_string).as_slice());
+                                              this_token_to_string)[]);
                         let box_span = mk_sp(lo, self.last_span.hi);
                         self.span_help(box_span,
                                        "perhaps you meant `box() (foo)` instead?");
@@ -3264,7 +3264,7 @@ impl<'a> Parser<'a> {
                 if self.token != token::CloseDelim(token::Brace) {
                     let token_str = self.this_token_to_string();
                     self.fatal(format!("expected `{}`, found `{}`", "}",
-                                       token_str).as_slice())
+                                       token_str)[])
                 }
                 etc = true;
                 break;
@@ -3285,7 +3285,7 @@ impl<'a> Parser<'a> {
                     BindByRef(..) | BindByValue(MutMutable) => {
                         let token_str = self.this_token_to_string();
                         self.fatal(format!("unexpected `{}`",
-                                           token_str).as_slice())
+                                           token_str)[])
                     }
                     _ => {}
                 }
@@ -3563,7 +3563,7 @@ impl<'a> Parser<'a> {
             let span = self.span;
             let tok_str = self.this_token_to_string();
             self.span_fatal(span,
-                            format!("expected identifier, found `{}`", tok_str).as_slice());
+                            format!("expected identifier, found `{}`", tok_str)[]);
         }
         let ident = self.parse_ident();
         let last_span = self.last_span;
@@ -3664,7 +3664,7 @@ impl<'a> Parser<'a> {
 
         let lo = self.span.lo;
         if self.token.is_keyword(keywords::Let) {
-            check_expected_item(self, item_attrs.as_slice());
+            check_expected_item(self, item_attrs[]);
             self.expect_keyword(keywords::Let);
             let decl = self.parse_let();
             P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID)))
@@ -3673,7 +3673,7 @@ impl<'a> Parser<'a> {
             && self.look_ahead(1, |t| *t == token::Not) {
             // it's a macro invocation:
 
-            check_expected_item(self, item_attrs.as_slice());
+            check_expected_item(self, item_attrs[]);
 
             // Potential trouble: if we allow macros with paths instead of
             // idents, we'd need to look ahead past the whole path here...
@@ -3701,7 +3701,7 @@ impl<'a> Parser<'a> {
                     let tok_str = self.this_token_to_string();
                     self.fatal(format!("expected {}`(` or `{{`, found `{}`",
                                        ident_str,
-                                       tok_str).as_slice())
+                                       tok_str)[])
                 },
             };
 
@@ -3749,7 +3749,7 @@ impl<'a> Parser<'a> {
             }
         } else {
             let found_attrs = !item_attrs.is_empty();
-            let item_err = Parser::expected_item_err(item_attrs.as_slice());
+            let item_err = Parser::expected_item_err(item_attrs[]);
             match self.parse_item_or_view_item(item_attrs, false) {
                 IoviItem(i) => {
                     let hi = i.span.hi;
@@ -3793,7 +3793,7 @@ impl<'a> Parser<'a> {
             let sp = self.span;
             let tok = self.this_token_to_string();
             self.span_fatal_help(sp,
-                                 format!("expected `{{`, found `{}`", tok).as_slice(),
+                                 format!("expected `{{`, found `{}`", tok)[],
                                  "place this code inside a block");
         }
 
@@ -3847,13 +3847,13 @@ impl<'a> Parser<'a> {
         while self.token != token::CloseDelim(token::Brace) {
             // parsing items even when they're not allowed lets us give
             // better error messages and recover more gracefully.
-            attributes_box.push_all(self.parse_outer_attributes().as_slice());
+            attributes_box.push_all(self.parse_outer_attributes()[]);
             match self.token {
                 token::Semi => {
                     if !attributes_box.is_empty() {
                         let last_span = self.last_span;
                         self.span_err(last_span,
-                                      Parser::expected_item_err(attributes_box.as_slice()));
+                                      Parser::expected_item_err(attributes_box[]));
                         attributes_box = Vec::new();
                     }
                     self.bump(); // empty
@@ -3944,7 +3944,7 @@ impl<'a> Parser<'a> {
         if !attributes_box.is_empty() {
             let last_span = self.last_span;
             self.span_err(last_span,
-                          Parser::expected_item_err(attributes_box.as_slice()));
+                          Parser::expected_item_err(attributes_box[]));
         }
 
         let hi = self.span.hi;
@@ -4362,7 +4362,7 @@ impl<'a> Parser<'a> {
             _ => {
                 let token_str = self.this_token_to_string();
                 self.fatal(format!("expected `self`, found `{}`",
-                                   token_str).as_slice())
+                                   token_str)[])
             }
         }
     }
@@ -4516,7 +4516,7 @@ impl<'a> Parser<'a> {
                 _ => {
                     let token_str = self.this_token_to_string();
                     self.fatal(format!("expected `,` or `)`, found `{}`",
-                                       token_str).as_slice())
+                                       token_str)[])
                 }
             }
             }
@@ -4692,7 +4692,7 @@ impl<'a> Parser<'a> {
                 let (inner_attrs, body) = self.parse_inner_attrs_and_block();
                 let body_span = body.span;
                 let mut new_attrs = attrs;
-                new_attrs.push_all(inner_attrs.as_slice());
+                new_attrs.push_all(inner_attrs[]);
                 (ast::MethDecl(ident,
                                generics,
                                abi,
@@ -4849,7 +4849,7 @@ impl<'a> Parser<'a> {
             if fields.len() == 0 {
                 self.fatal(format!("unit-like struct definition should be \
                                     written as `struct {};`",
-                                   token::get_ident(class_name)).as_slice());
+                                   token::get_ident(class_name))[]);
             }
             self.bump();
         } else if self.check(&token::OpenDelim(token::Paren)) {
@@ -4873,7 +4873,7 @@ impl<'a> Parser<'a> {
             if fields.len() == 0 {
                 self.fatal(format!("unit-like struct definition should be \
                                     written as `struct {};`",
-                                   token::get_ident(class_name)).as_slice());
+                                   token::get_ident(class_name))[]);
             }
             self.expect(&token::Semi);
         } else if self.eat(&token::Semi) {
@@ -4884,7 +4884,7 @@ impl<'a> Parser<'a> {
             let token_str = self.this_token_to_string();
             self.fatal(format!("expected `{}`, `(`, or `;` after struct \
                                 name, found `{}`", "{",
-                               token_str).as_slice())
+                               token_str)[])
         }
 
         let _ = ast::DUMMY_NODE_ID;  // FIXME: Workaround for crazy bug.
@@ -4913,7 +4913,7 @@ impl<'a> Parser<'a> {
                 let token_str = self.this_token_to_string();
                 self.span_fatal_help(span,
                                      format!("expected `,`, or `}}`, found `{}`",
-                                             token_str).as_slice(),
+                                             token_str)[],
                                      "struct fields should be separated by commas")
             }
         }
@@ -4983,7 +4983,7 @@ impl<'a> Parser<'a> {
             let mut attrs = self.parse_outer_attributes();
             if first {
                 let mut tmp = attrs_remaining.clone();
-                tmp.push_all(attrs.as_slice());
+                tmp.push_all(attrs[]);
                 attrs = tmp;
                 first = false;
             }
@@ -5000,7 +5000,7 @@ impl<'a> Parser<'a> {
               _ => {
                   let token_str = self.this_token_to_string();
                   self.fatal(format!("expected item, found `{}`",
-                                     token_str).as_slice())
+                                     token_str)[])
               }
             }
         }
@@ -5009,7 +5009,7 @@ impl<'a> Parser<'a> {
             // We parsed attributes for the first item but didn't find it
             let last_span = self.last_span;
             self.span_err(last_span,
-                          Parser::expected_item_err(attrs_remaining.as_slice()));
+                          Parser::expected_item_err(attrs_remaining[]));
         }
 
         ast::Mod {
@@ -5079,7 +5079,7 @@ impl<'a> Parser<'a> {
                     -> (ast::Item_, Vec<ast::Attribute> ) {
         let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span));
         prefix.pop();
-        let mod_path = Path::new(".").join_many(self.mod_path_stack.as_slice());
+        let mod_path = Path::new(".").join_many(self.mod_path_stack[]);
         let dir_path = prefix.join(&mod_path);
         let mod_string = token::get_ident(id);
         let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name(
@@ -5089,8 +5089,8 @@ impl<'a> Parser<'a> {
                 let mod_name = mod_string.get().to_string();
                 let default_path_str = format!("{}.rs", mod_name);
                 let secondary_path_str = format!("{}/mod.rs", mod_name);
-                let default_path = dir_path.join(default_path_str.as_slice());
-                let secondary_path = dir_path.join(secondary_path_str.as_slice());
+                let default_path = dir_path.join(default_path_str[]);
+                let secondary_path = dir_path.join(secondary_path_str[]);
                 let default_exists = default_path.exists();
                 let secondary_exists = secondary_path.exists();
 
@@ -5105,13 +5105,13 @@ impl<'a> Parser<'a> {
                                    format!("maybe move this module `{0}` \
                                             to its own directory via \
                                             `{0}/mod.rs`",
-                                           this_module).as_slice());
+                                           this_module)[]);
                     if default_exists || secondary_exists {
                         self.span_note(id_sp,
                                        format!("... or maybe `use` the module \
                                                 `{}` instead of possibly \
                                                 redeclaring it",
-                                               mod_name).as_slice());
+                                               mod_name)[]);
                     }
                     self.abort_if_errors();
                 }
@@ -5122,12 +5122,12 @@ impl<'a> Parser<'a> {
                     (false, false) => {
                         self.span_fatal_help(id_sp,
                                              format!("file not found for module `{}`",
-                                                     mod_name).as_slice(),
+                                                     mod_name)[],
                                              format!("name the file either {} or {} inside \
                                                      the directory {}",
                                                      default_path_str,
                                                      secondary_path_str,
-                                                     dir_path.display()).as_slice());
+                                                     dir_path.display())[]);
                     }
                     (true, true) => {
                         self.span_fatal_help(
@@ -5136,7 +5136,7 @@ impl<'a> Parser<'a> {
                                      and {}",
                                     mod_name,
                                     default_path_str,
-                                    secondary_path_str).as_slice(),
+                                    secondary_path_str)[],
                             "delete or rename one of them to remove the ambiguity");
                     }
                 }
@@ -5158,11 +5158,11 @@ impl<'a> Parser<'a> {
                 let mut err = String::from_str("circular modules: ");
                 let len = included_mod_stack.len();
                 for p in included_mod_stack.slice(i, len).iter() {
-                    err.push_str(p.display().as_cow().as_slice());
+                    err.push_str(p.display().as_cow()[]);
                     err.push_str(" -> ");
                 }
-                err.push_str(path.display().as_cow().as_slice());
-                self.span_fatal(id_sp, err.as_slice());
+                err.push_str(path.display().as_cow()[]);
+                self.span_fatal(id_sp, err[]);
             }
             None => ()
         }
@@ -5243,7 +5243,7 @@ impl<'a> Parser<'a> {
         if !attrs_remaining.is_empty() {
             let last_span = self.last_span;
             self.span_err(last_span,
-                          Parser::expected_item_err(attrs_remaining.as_slice()));
+                          Parser::expected_item_err(attrs_remaining[]));
         }
         assert!(self.token == token::CloseDelim(token::Brace));
         ast::ForeignMod {
@@ -5284,7 +5284,7 @@ impl<'a> Parser<'a> {
                     self.span_help(span,
                                    format!("perhaps you meant to enclose the crate name `{}` in \
                                            a string?",
-                                          the_ident.as_str()).as_slice());
+                                          the_ident.as_str())[]);
                     None
                 } else {
                     None
@@ -5310,7 +5310,7 @@ impl<'a> Parser<'a> {
                 self.span_fatal(span,
                                 format!("expected extern crate name but \
                                          found `{}`",
-                                        token_str).as_slice());
+                                        token_str)[]);
             }
         };
 
@@ -5408,7 +5408,7 @@ impl<'a> Parser<'a> {
                     self.span_err(start_span,
                         format!("unit-like struct variant should be written \
                                  without braces, as `{},`",
-                                token::get_ident(ident)).as_slice());
+                                token::get_ident(ident))[]);
                 }
                 kind = StructVariantKind(struct_def);
             } else if self.check(&token::OpenDelim(token::Paren)) {
@@ -5493,7 +5493,7 @@ impl<'a> Parser<'a> {
                             format!("illegal ABI: expected one of [{}], \
                                      found `{}`",
                                     abi::all_names().connect(", "),
-                                    the_string).as_slice());
+                                    the_string)[]);
                         None
                     }
                 }
@@ -5555,7 +5555,7 @@ impl<'a> Parser<'a> {
                                  format!("`extern mod` is obsolete, use \
                                           `extern crate` instead \
                                           to refer to external \
-                                          crates.").as_slice())
+                                          crates.")[])
                 }
                 return self.parse_item_extern_crate(lo, visibility, attrs);
             }
@@ -5583,7 +5583,7 @@ impl<'a> Parser<'a> {
             let token_str = self.this_token_to_string();
             self.span_fatal(span,
                             format!("expected `{}` or `fn`, found `{}`", "{",
-                                    token_str).as_slice());
+                                    token_str)[]);
         }
 
         if self.eat_keyword(keywords::Virtual) {
@@ -5696,7 +5696,7 @@ impl<'a> Parser<'a> {
         if self.eat_keyword(keywords::Mod) {
             // MODULE ITEM
             let (ident, item_, extra_attrs) =
-                self.parse_item_mod(attrs.as_slice());
+                self.parse_item_mod(attrs[]);
             let last_span = self.last_span;
             let item = self.mk_item(lo,
                                     last_span.hi,
@@ -6031,7 +6031,7 @@ impl<'a> Parser<'a> {
                                   macros_allowed: bool)
                                   -> ParsedItemsAndViewItems {
         let mut attrs = first_item_attrs;
-        attrs.push_all(self.parse_outer_attributes().as_slice());
+        attrs.push_all(self.parse_outer_attributes()[]);
         // First, parse view items.
         let mut view_items : Vec<ast::ViewItem> = Vec::new();
         let mut items = Vec::new();
@@ -6113,7 +6113,7 @@ impl<'a> Parser<'a> {
                            macros_allowed: bool)
         -> ParsedItemsAndViewItems {
         let mut attrs = first_item_attrs;
-        attrs.push_all(self.parse_outer_attributes().as_slice());
+        attrs.push_all(self.parse_outer_attributes()[]);
         let mut foreign_items = Vec::new();
         loop {
             match self.parse_foreign_item(attrs, macros_allowed) {
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index dad369792d7..9e61eaae352 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -454,7 +454,7 @@ macro_rules! declare_special_idents_and_keywords {(
         $(init_vec.push($si_str);)*
         $(init_vec.push($sk_str);)*
         $(init_vec.push($rk_str);)*
-        interner::StrInterner::prefill(init_vec.as_slice())
+        interner::StrInterner::prefill(init_vec[])
     }
 }}
 
@@ -602,10 +602,14 @@ impl InternedString {
 
     #[inline]
     pub fn get<'a>(&'a self) -> &'a str {
-        self.string.as_slice()
+        self.string[]
     }
 }
 
+impl Deref<str> for InternedString {
+    fn deref(&self) -> &str { &*self.string }
+}
+
 impl BytesContainer for InternedString {
     fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
         // FIXME #12938: This is a workaround for the incorrect signature
@@ -620,49 +624,49 @@ impl BytesContainer for InternedString {
 
 impl fmt::Show for InternedString {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{}", self.string.as_slice())
+        write!(f, "{}", self.string[])
     }
 }
 
 #[allow(deprecated)]
 impl<'a> Equiv<&'a str> for InternedString {
     fn equiv(&self, other: & &'a str) -> bool {
-        (*other) == self.string.as_slice()
+        (*other) == self.string[]
     }
 }
 
 impl<'a> PartialEq<&'a str> for InternedString {
     #[inline(always)]
     fn eq(&self, other: & &'a str) -> bool {
-        PartialEq::eq(self.string.as_slice(), *other)
+        PartialEq::eq(self.string[], *other)
     }
     #[inline(always)]
     fn ne(&self, other: & &'a str) -> bool {
-        PartialEq::ne(self.string.as_slice(), *other)
+        PartialEq::ne(self.string[], *other)
     }
 }
 
 impl<'a> PartialEq<InternedString > for &'a str {
     #[inline(always)]
     fn eq(&self, other: &InternedString) -> bool {
-        PartialEq::eq(*self, other.string.as_slice())
+        PartialEq::eq(*self, other.string[])
     }
     #[inline(always)]
     fn ne(&self, other: &InternedString) -> bool {
-        PartialEq::ne(*self, other.string.as_slice())
+        PartialEq::ne(*self, other.string[])
     }
 }
 
 impl<D:Decoder<E>, E> Decodable<D, E> for InternedString {
     fn decode(d: &mut D) -> Result<InternedString, E> {
         Ok(get_name(get_ident_interner().intern(
-                    try!(d.read_str()).as_slice())))
+                    try!(d.read_str())[])))
     }
 }
 
 impl<S:Encoder<E>, E> Encodable<S, E> for InternedString {
     fn encode(&self, s: &mut S) -> Result<(), E> {
-        s.emit_str(self.string.as_slice())
+        s.emit_str(self.string[])
     }
 }
 
diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs
index bfa47a46e74..ab0e0f9585c 100644
--- a/src/libsyntax/print/pp.rs
+++ b/src/libsyntax/print/pp.rs
@@ -139,12 +139,12 @@ pub fn buf_str(toks: Vec<Token>,
         }
         s.push_str(format!("{}={}",
                            szs[i],
-                           tok_str(toks[i].clone())).as_slice());
+                           tok_str(toks[i].clone()))[]);
         i += 1u;
         i %= n;
     }
     s.push(']');
-    return s.into_string();
+    s
 }
 
 #[deriving(Copy)]
@@ -601,7 +601,7 @@ impl Printer {
             assert_eq!(l, len);
             // assert!(l <= space);
             self.space -= len;
-            self.print_str(s.as_slice())
+            self.print_str(s[])
           }
           Eof => {
             // Eof should never get here.
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index a9717a526ad..0d79b7cf925 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -30,6 +30,7 @@ use ptr::P;
 
 use std::{ascii, mem};
 use std::io::{mod, IoResult};
+use std::iter;
 
 pub enum AnnNode<'a> {
     NodeIdent(&'a ast::Ident),
@@ -113,7 +114,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap,
                                       out,
                                       ann,
                                       is_expanded);
-    try!(s.print_mod(&krate.module, krate.attrs.as_slice()));
+    try!(s.print_mod(&krate.module, krate.attrs[]));
     try!(s.print_remaining_comments());
     eof(&mut s.s)
 }
@@ -197,56 +198,56 @@ pub fn binop_to_string(op: BinOpToken) -> &'static str {
 
 pub fn token_to_string(tok: &Token) -> String {
     match *tok {
-        token::Eq                   => "=".into_string(),
-        token::Lt                   => "<".into_string(),
-        token::Le                   => "<=".into_string(),
-        token::EqEq                 => "==".into_string(),
-        token::Ne                   => "!=".into_string(),
-        token::Ge                   => ">=".into_string(),
-        token::Gt                   => ">".into_string(),
-        token::Not                  => "!".into_string(),
-        token::Tilde                => "~".into_string(),
-        token::OrOr                 => "||".into_string(),
-        token::AndAnd               => "&&".into_string(),
-        token::BinOp(op)            => binop_to_string(op).into_string(),
+        token::Eq                   => "=".to_string(),
+        token::Lt                   => "<".to_string(),
+        token::Le                   => "<=".to_string(),
+        token::EqEq                 => "==".to_string(),
+        token::Ne                   => "!=".to_string(),
+        token::Ge                   => ">=".to_string(),
+        token::Gt                   => ">".to_string(),
+        token::Not                  => "!".to_string(),
+        token::Tilde                => "~".to_string(),
+        token::OrOr                 => "||".to_string(),
+        token::AndAnd               => "&&".to_string(),
+        token::BinOp(op)            => binop_to_string(op).to_string(),
         token::BinOpEq(op)          => format!("{}=", binop_to_string(op)),
 
         /* Structural symbols */
-        token::At                   => "@".into_string(),
-        token::Dot                  => ".".into_string(),
-        token::DotDot               => "..".into_string(),
-        token::DotDotDot            => "...".into_string(),
-        token::Comma                => ",".into_string(),
-        token::Semi                 => ";".into_string(),
-        token::Colon                => ":".into_string(),
-        token::ModSep               => "::".into_string(),
-        token::RArrow               => "->".into_string(),
-        token::LArrow               => "<-".into_string(),
-        token::FatArrow             => "=>".into_string(),
-        token::OpenDelim(token::Paren) => "(".into_string(),
-        token::CloseDelim(token::Paren) => ")".into_string(),
-        token::OpenDelim(token::Bracket) => "[".into_string(),
-        token::CloseDelim(token::Bracket) => "]".into_string(),
-        token::OpenDelim(token::Brace) => "{".into_string(),
-        token::CloseDelim(token::Brace) => "}".into_string(),
-        token::Pound                => "#".into_string(),
-        token::Dollar               => "$".into_string(),
-        token::Question             => "?".into_string(),
+        token::At                   => "@".to_string(),
+        token::Dot                  => ".".to_string(),
+        token::DotDot               => "..".to_string(),
+        token::DotDotDot            => "...".to_string(),
+        token::Comma                => ",".to_string(),
+        token::Semi                 => ";".to_string(),
+        token::Colon                => ":".to_string(),
+        token::ModSep               => "::".to_string(),
+        token::RArrow               => "->".to_string(),
+        token::LArrow               => "<-".to_string(),
+        token::FatArrow             => "=>".to_string(),
+        token::OpenDelim(token::Paren) => "(".to_string(),
+        token::CloseDelim(token::Paren) => ")".to_string(),
+        token::OpenDelim(token::Bracket) => "[".to_string(),
+        token::CloseDelim(token::Bracket) => "]".to_string(),
+        token::OpenDelim(token::Brace) => "{".to_string(),
+        token::CloseDelim(token::Brace) => "}".to_string(),
+        token::Pound                => "#".to_string(),
+        token::Dollar               => "$".to_string(),
+        token::Question             => "?".to_string(),
 
         /* Literals */
         token::Literal(lit, suf) => {
             let mut out = match lit {
                 token::Byte(b)           => format!("b'{}'", b.as_str()),
                 token::Char(c)           => format!("'{}'", c.as_str()),
-                token::Float(c)          => c.as_str().into_string(),
-                token::Integer(c)        => c.as_str().into_string(),
+                token::Float(c)          => c.as_str().to_string(),
+                token::Integer(c)        => c.as_str().to_string(),
                 token::Str_(s)           => format!("\"{}\"", s.as_str()),
                 token::StrRaw(s, n)      => format!("r{delim}\"{string}\"{delim}",
-                                                    delim="#".repeat(n),
+                                                    delim=repeat("#", n),
                                                     string=s.as_str()),
                 token::Binary(v)         => format!("b\"{}\"", v.as_str()),
                 token::BinaryRaw(s, n)   => format!("br{delim}\"{string}\"{delim}",
-                                                    delim="#".repeat(n),
+                                                    delim=repeat("#", n),
                                                     string=s.as_str()),
             };
 
@@ -258,17 +259,17 @@ pub fn token_to_string(tok: &Token) -> String {
         }
 
         /* Name components */
-        token::Ident(s, _)          => token::get_ident(s).get().into_string(),
+        token::Ident(s, _)          => token::get_ident(s).get().to_string(),
         token::Lifetime(s)          => format!("{}", token::get_ident(s)),
-        token::Underscore           => "_".into_string(),
+        token::Underscore           => "_".to_string(),
 
         /* Other */
-        token::DocComment(s)        => s.as_str().into_string(),
+        token::DocComment(s)        => s.as_str().to_string(),
         token::SubstNt(s, _)        => format!("${}", s),
         token::MatchNt(s, t, _, _)  => format!("${}:{}", s, t),
-        token::Eof                  => "<eof>".into_string(),
-        token::Whitespace           => " ".into_string(),
-        token::Comment              => "/* */".into_string(),
+        token::Eof                  => "<eof>".to_string(),
+        token::Whitespace           => " ".to_string(),
+        token::Comment              => "/* */".to_string(),
         token::Shebang(s)           => format!("/* shebang: {}*/", s.as_str()),
 
         token::Interpolated(ref nt) => match *nt {
@@ -276,12 +277,12 @@ pub fn token_to_string(tok: &Token) -> String {
             token::NtMeta(ref e)  => meta_item_to_string(&**e),
             token::NtTy(ref e)    => ty_to_string(&**e),
             token::NtPath(ref e)  => path_to_string(&**e),
-            token::NtItem(..)     => "an interpolated item".into_string(),
-            token::NtBlock(..)    => "an interpolated block".into_string(),
-            token::NtStmt(..)     => "an interpolated statement".into_string(),
-            token::NtPat(..)      => "an interpolated pattern".into_string(),
-            token::NtIdent(..)    => "an interpolated identifier".into_string(),
-            token::NtTT(..)       => "an interpolated tt".into_string(),
+            token::NtItem(..)     => "an interpolated item".to_string(),
+            token::NtBlock(..)    => "an interpolated block".to_string(),
+            token::NtStmt(..)     => "an interpolated statement".to_string(),
+            token::NtPat(..)      => "an interpolated pattern".to_string(),
+            token::NtIdent(..)    => "an interpolated identifier".to_string(),
+            token::NtTT(..)       => "an interpolated tt".to_string(),
         }
     }
 }
@@ -577,7 +578,7 @@ impl<'a> State<'a> {
     pub fn synth_comment(&mut self, text: String) -> IoResult<()> {
         try!(word(&mut self.s, "/*"));
         try!(space(&mut self.s));
-        try!(word(&mut self.s, text.as_slice()));
+        try!(word(&mut self.s, text[]));
         try!(space(&mut self.s));
         word(&mut self.s, "*/")
     }
@@ -682,7 +683,7 @@ impl<'a> State<'a> {
             }
             ast::TyTup(ref elts) => {
                 try!(self.popen());
-                try!(self.commasep(Inconsistent, elts.as_slice(),
+                try!(self.commasep(Inconsistent, elts[],
                                    |s, ty| s.print_type(&**ty)));
                 if elts.len() == 1 {
                     try!(word(&mut self.s, ","));
@@ -737,10 +738,10 @@ impl<'a> State<'a> {
             }
             ast::TyObjectSum(ref ty, ref bounds) => {
                 try!(self.print_type(&**ty));
-                try!(self.print_bounds("+", bounds.as_slice()));
+                try!(self.print_bounds("+", bounds[]));
             }
             ast::TyPolyTraitRef(ref bounds) => {
-                try!(self.print_bounds("", bounds.as_slice()));
+                try!(self.print_bounds("", bounds[]));
             }
             ast::TyQPath(ref qpath) => {
                 try!(word(&mut self.s, "<"));
@@ -775,7 +776,7 @@ impl<'a> State<'a> {
                               item: &ast::ForeignItem) -> IoResult<()> {
         try!(self.hardbreak_if_not_bol());
         try!(self.maybe_print_comment(item.span.lo));
-        try!(self.print_outer_attributes(item.attrs.as_slice()));
+        try!(self.print_outer_attributes(item.attrs[]));
         match item.node {
             ast::ForeignItemFn(ref decl, ref generics) => {
                 try!(self.print_fn(&**decl, None, abi::Rust, item.ident, generics,
@@ -786,7 +787,7 @@ impl<'a> State<'a> {
             }
             ast::ForeignItemStatic(ref t, m) => {
                 try!(self.head(visibility_qualified(item.vis,
-                                                    "static").as_slice()));
+                                                    "static")[]));
                 if m {
                     try!(self.word_space("mut"));
                 }
@@ -822,12 +823,12 @@ impl<'a> State<'a> {
     pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> {
         try!(self.hardbreak_if_not_bol());
         try!(self.maybe_print_comment(item.span.lo));
-        try!(self.print_outer_attributes(item.attrs.as_slice()));
+        try!(self.print_outer_attributes(item.attrs[]));
         try!(self.ann.pre(self, NodeItem(item)));
         match item.node {
             ast::ItemStatic(ref ty, m, ref expr) => {
                 try!(self.head(visibility_qualified(item.vis,
-                                                    "static").as_slice()));
+                                                    "static")[]));
                 if m == ast::MutMutable {
                     try!(self.word_space("mut"));
                 }
@@ -844,7 +845,7 @@ impl<'a> State<'a> {
             }
             ast::ItemConst(ref ty, ref expr) => {
                 try!(self.head(visibility_qualified(item.vis,
-                                                    "const").as_slice()));
+                                                    "const")[]));
                 try!(self.print_ident(item.ident));
                 try!(self.word_space(":"));
                 try!(self.print_type(&**ty));
@@ -867,29 +868,29 @@ impl<'a> State<'a> {
                     item.vis
                 ));
                 try!(word(&mut self.s, " "));
-                try!(self.print_block_with_attrs(&**body, item.attrs.as_slice()));
+                try!(self.print_block_with_attrs(&**body, item.attrs[]));
             }
             ast::ItemMod(ref _mod) => {
                 try!(self.head(visibility_qualified(item.vis,
-                                                    "mod").as_slice()));
+                                                    "mod")[]));
                 try!(self.print_ident(item.ident));
                 try!(self.nbsp());
                 try!(self.bopen());
-                try!(self.print_mod(_mod, item.attrs.as_slice()));
+                try!(self.print_mod(_mod, item.attrs[]));
                 try!(self.bclose(item.span));
             }
             ast::ItemForeignMod(ref nmod) => {
                 try!(self.head("extern"));
-                try!(self.word_nbsp(nmod.abi.to_string().as_slice()));
+                try!(self.word_nbsp(nmod.abi.to_string()[]));
                 try!(self.bopen());
-                try!(self.print_foreign_mod(nmod, item.attrs.as_slice()));
+                try!(self.print_foreign_mod(nmod, item.attrs[]));
                 try!(self.bclose(item.span));
             }
             ast::ItemTy(ref ty, ref params) => {
                 try!(self.ibox(indent_unit));
                 try!(self.ibox(0u));
                 try!(self.word_nbsp(visibility_qualified(item.vis,
-                                                         "type").as_slice()));
+                                                         "type")[]));
                 try!(self.print_ident(item.ident));
                 try!(self.print_generics(params));
                 try!(self.end()); // end the inner ibox
@@ -911,7 +912,7 @@ impl<'a> State<'a> {
                 ));
             }
             ast::ItemStruct(ref struct_def, ref generics) => {
-                try!(self.head(visibility_qualified(item.vis,"struct").as_slice()));
+                try!(self.head(visibility_qualified(item.vis,"struct")[]));
                 try!(self.print_struct(&**struct_def, generics, item.ident, item.span));
             }
 
@@ -944,7 +945,7 @@ impl<'a> State<'a> {
 
                 try!(space(&mut self.s));
                 try!(self.bopen());
-                try!(self.print_inner_attributes(item.attrs.as_slice()));
+                try!(self.print_inner_attributes(item.attrs[]));
                 for impl_item in impl_items.iter() {
                     match *impl_item {
                         ast::MethodImplItem(ref meth) => {
@@ -970,7 +971,7 @@ impl<'a> State<'a> {
                     try!(self.print_trait_ref(tref));
                     try!(word(&mut self.s, "?"));
                 }
-                try!(self.print_bounds(":", bounds.as_slice()));
+                try!(self.print_bounds(":", bounds[]));
                 try!(self.print_where_clause(generics));
                 try!(word(&mut self.s, " "));
                 try!(self.bopen());
@@ -988,7 +989,7 @@ impl<'a> State<'a> {
                 try!(self.print_ident(item.ident));
                 try!(self.cbox(indent_unit));
                 try!(self.popen());
-                try!(self.print_tts(tts.as_slice()));
+                try!(self.print_tts(tts[]));
                 try!(self.pclose());
                 try!(word(&mut self.s, ";"));
                 try!(self.end());
@@ -1022,12 +1023,12 @@ impl<'a> State<'a> {
                           generics: &ast::Generics, ident: ast::Ident,
                           span: codemap::Span,
                           visibility: ast::Visibility) -> IoResult<()> {
-        try!(self.head(visibility_qualified(visibility, "enum").as_slice()));
+        try!(self.head(visibility_qualified(visibility, "enum")[]));
         try!(self.print_ident(ident));
         try!(self.print_generics(generics));
         try!(self.print_where_clause(generics));
         try!(space(&mut self.s));
-        self.print_variants(enum_definition.variants.as_slice(), span)
+        self.print_variants(enum_definition.variants[], span)
     }
 
     pub fn print_variants(&mut self,
@@ -1037,7 +1038,7 @@ impl<'a> State<'a> {
         for v in variants.iter() {
             try!(self.space_if_not_bol());
             try!(self.maybe_print_comment(v.span.lo));
-            try!(self.print_outer_attributes(v.node.attrs.as_slice()));
+            try!(self.print_outer_attributes(v.node.attrs[]));
             try!(self.ibox(indent_unit));
             try!(self.print_variant(&**v));
             try!(word(&mut self.s, ","));
@@ -1066,7 +1067,7 @@ impl<'a> State<'a> {
             if !struct_def.fields.is_empty() {
                 try!(self.popen());
                 try!(self.commasep(
-                    Inconsistent, struct_def.fields.as_slice(),
+                    Inconsistent, struct_def.fields[],
                     |s, field| {
                         match field.node.kind {
                             ast::NamedField(..) => panic!("unexpected named field"),
@@ -1094,7 +1095,7 @@ impl<'a> State<'a> {
                     ast::NamedField(ident, visibility) => {
                         try!(self.hardbreak_if_not_bol());
                         try!(self.maybe_print_comment(field.span.lo));
-                        try!(self.print_outer_attributes(field.node.attrs.as_slice()));
+                        try!(self.print_outer_attributes(field.node.attrs[]));
                         try!(self.print_visibility(visibility));
                         try!(self.print_ident(ident));
                         try!(self.word_nbsp(":"));
@@ -1118,7 +1119,7 @@ impl<'a> State<'a> {
     pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
         match *tt {
             ast::TtToken(_, ref tk) => {
-                try!(word(&mut self.s, token_to_string(tk).as_slice()));
+                try!(word(&mut self.s, token_to_string(tk)[]));
                 match *tk {
                     parse::token::DocComment(..) => {
                         hardbreak(&mut self.s)
@@ -1127,11 +1128,11 @@ impl<'a> State<'a> {
                 }
             }
             ast::TtDelimited(_, ref delimed) => {
-                try!(word(&mut self.s, token_to_string(&delimed.open_token()).as_slice()));
+                try!(word(&mut self.s, token_to_string(&delimed.open_token())[]));
                 try!(space(&mut self.s));
-                try!(self.print_tts(delimed.tts.as_slice()));
+                try!(self.print_tts(delimed.tts[]));
                 try!(space(&mut self.s));
-                word(&mut self.s, token_to_string(&delimed.close_token()).as_slice())
+                word(&mut self.s, token_to_string(&delimed.close_token())[])
             },
             ast::TtSequence(_, ref seq) => {
                 try!(word(&mut self.s, "$("));
@@ -1141,7 +1142,7 @@ impl<'a> State<'a> {
                 try!(word(&mut self.s, ")"));
                 match seq.separator {
                     Some(ref tk) => {
-                        try!(word(&mut self.s, token_to_string(tk).as_slice()));
+                        try!(word(&mut self.s, token_to_string(tk)[]));
                     }
                     None => {},
                 }
@@ -1172,7 +1173,7 @@ impl<'a> State<'a> {
                 if !args.is_empty() {
                     try!(self.popen());
                     try!(self.commasep(Consistent,
-                                       args.as_slice(),
+                                       args[],
                                        |s, arg| s.print_type(&*arg.ty)));
                     try!(self.pclose());
                 }
@@ -1196,7 +1197,7 @@ impl<'a> State<'a> {
     pub fn print_ty_method(&mut self, m: &ast::TypeMethod) -> IoResult<()> {
         try!(self.hardbreak_if_not_bol());
         try!(self.maybe_print_comment(m.span.lo));
-        try!(self.print_outer_attributes(m.attrs.as_slice()));
+        try!(self.print_outer_attributes(m.attrs[]));
         try!(self.print_ty_fn(None,
                               None,
                               m.unsafety,
@@ -1228,7 +1229,7 @@ impl<'a> State<'a> {
     pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> {
         try!(self.hardbreak_if_not_bol());
         try!(self.maybe_print_comment(meth.span.lo));
-        try!(self.print_outer_attributes(meth.attrs.as_slice()));
+        try!(self.print_outer_attributes(meth.attrs[]));
         match meth.node {
             ast::MethDecl(ident,
                           ref generics,
@@ -1246,7 +1247,7 @@ impl<'a> State<'a> {
                                    Some(&explicit_self.node),
                                    vis));
                 try!(word(&mut self.s, " "));
-                self.print_block_with_attrs(&**body, meth.attrs.as_slice())
+                self.print_block_with_attrs(&**body, meth.attrs[])
             },
             ast::MethMac(codemap::Spanned { node: ast::MacInvocTT(ref pth, ref tts, _),
                                             ..}) => {
@@ -1255,7 +1256,7 @@ impl<'a> State<'a> {
                 try!(word(&mut self.s, "! "));
                 try!(self.cbox(indent_unit));
                 try!(self.popen());
-                try!(self.print_tts(tts.as_slice()));
+                try!(self.print_tts(tts[]));
                 try!(self.pclose());
                 try!(word(&mut self.s, ";"));
                 self.end()
@@ -1522,7 +1523,7 @@ impl<'a> State<'a> {
             ast::ExprVec(ref exprs) => {
                 try!(self.ibox(indent_unit));
                 try!(word(&mut self.s, "["));
-                try!(self.commasep_exprs(Inconsistent, exprs.as_slice()));
+                try!(self.commasep_exprs(Inconsistent, exprs[]));
                 try!(word(&mut self.s, "]"));
                 try!(self.end());
             }
@@ -1542,7 +1543,7 @@ impl<'a> State<'a> {
                 try!(word(&mut self.s, "{"));
                 try!(self.commasep_cmnt(
                     Consistent,
-                    fields.as_slice(),
+                    fields[],
                     |s, field| {
                         try!(s.ibox(indent_unit));
                         try!(s.print_ident(field.ident.node));
@@ -1568,7 +1569,7 @@ impl<'a> State<'a> {
             }
             ast::ExprTup(ref exprs) => {
                 try!(self.popen());
-                try!(self.commasep_exprs(Inconsistent, exprs.as_slice()));
+                try!(self.commasep_exprs(Inconsistent, exprs[]));
                 if exprs.len() == 1 {
                     try!(word(&mut self.s, ","));
                 }
@@ -1576,7 +1577,7 @@ impl<'a> State<'a> {
             }
             ast::ExprCall(ref func, ref args) => {
                 try!(self.print_expr_maybe_paren(&**func));
-                try!(self.print_call_post(args.as_slice()));
+                try!(self.print_call_post(args[]));
             }
             ast::ExprMethodCall(ident, ref tys, ref args) => {
                 let base_args = args.slice_from(1);
@@ -1585,7 +1586,7 @@ impl<'a> State<'a> {
                 try!(self.print_ident(ident.node));
                 if tys.len() > 0u {
                     try!(word(&mut self.s, "::<"));
-                    try!(self.commasep(Inconsistent, tys.as_slice(),
+                    try!(self.commasep(Inconsistent, tys[],
                                        |s, ty| s.print_type(&**ty)));
                     try!(word(&mut self.s, ">"));
                 }
@@ -1795,11 +1796,11 @@ impl<'a> State<'a> {
                 try!(self.print_string(a.asm.get(), a.asm_str_style));
                 try!(self.word_space(":"));
 
-                try!(self.commasep(Inconsistent, a.outputs.as_slice(),
+                try!(self.commasep(Inconsistent, a.outputs[],
                                    |s, &(ref co, ref o, is_rw)| {
                     match co.get().slice_shift_char() {
                         Some(('=', operand)) if is_rw => {
-                            try!(s.print_string(format!("+{}", operand).as_slice(),
+                            try!(s.print_string(format!("+{}", operand)[],
                                                 ast::CookedStr))
                         }
                         _ => try!(s.print_string(co.get(), ast::CookedStr))
@@ -1812,7 +1813,7 @@ impl<'a> State<'a> {
                 try!(space(&mut self.s));
                 try!(self.word_space(":"));
 
-                try!(self.commasep(Inconsistent, a.inputs.as_slice(),
+                try!(self.commasep(Inconsistent, a.inputs[],
                                    |s, &(ref co, ref o)| {
                     try!(s.print_string(co.get(), ast::CookedStr));
                     try!(s.popen());
@@ -1823,7 +1824,7 @@ impl<'a> State<'a> {
                 try!(space(&mut self.s));
                 try!(self.word_space(":"));
 
-                try!(self.commasep(Inconsistent, a.clobbers.as_slice(),
+                try!(self.commasep(Inconsistent, a.clobbers[],
                                    |s, co| {
                     try!(s.print_string(co.get(), ast::CookedStr));
                     Ok(())
@@ -1877,7 +1878,7 @@ impl<'a> State<'a> {
     pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> {
         if self.encode_idents_with_hygiene {
             let encoded = ident.encode_with_hygiene();
-            try!(word(&mut self.s, encoded.as_slice()))
+            try!(word(&mut self.s, encoded[]))
         } else {
             try!(word(&mut self.s, token::get_ident(ident).get()))
         }
@@ -1885,7 +1886,7 @@ impl<'a> State<'a> {
     }
 
     pub fn print_uint(&mut self, i: uint) -> IoResult<()> {
-        word(&mut self.s, i.to_string().as_slice())
+        word(&mut self.s, i.to_string()[])
     }
 
     pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> {
@@ -1959,7 +1960,7 @@ impl<'a> State<'a> {
                     }
                     try!(self.commasep(
                         Inconsistent,
-                        data.types.as_slice(),
+                        data.types[],
                         |s, ty| s.print_type(&**ty)));
                         comma = true;
                 }
@@ -1982,7 +1983,7 @@ impl<'a> State<'a> {
                 try!(word(&mut self.s, "("));
                 try!(self.commasep(
                     Inconsistent,
-                    data.inputs.as_slice(),
+                    data.inputs[],
                     |s, ty| s.print_type(&**ty)));
                 try!(word(&mut self.s, ")"));
 
@@ -2035,7 +2036,7 @@ impl<'a> State<'a> {
                     Some(ref args) => {
                         if !args.is_empty() {
                             try!(self.popen());
-                            try!(self.commasep(Inconsistent, args.as_slice(),
+                            try!(self.commasep(Inconsistent, args[],
                                               |s, p| s.print_pat(&**p)));
                             try!(self.pclose());
                         }
@@ -2047,7 +2048,7 @@ impl<'a> State<'a> {
                 try!(self.nbsp());
                 try!(self.word_space("{"));
                 try!(self.commasep_cmnt(
-                    Consistent, fields.as_slice(),
+                    Consistent, fields[],
                     |s, f| {
                         try!(s.cbox(indent_unit));
                         if !f.node.is_shorthand {
@@ -2068,7 +2069,7 @@ impl<'a> State<'a> {
             ast::PatTup(ref elts) => {
                 try!(self.popen());
                 try!(self.commasep(Inconsistent,
-                                   elts.as_slice(),
+                                   elts[],
                                    |s, p| s.print_pat(&**p)));
                 if elts.len() == 1 {
                     try!(word(&mut self.s, ","));
@@ -2093,7 +2094,7 @@ impl<'a> State<'a> {
             ast::PatVec(ref before, ref slice, ref after) => {
                 try!(word(&mut self.s, "["));
                 try!(self.commasep(Inconsistent,
-                                   before.as_slice(),
+                                   before[],
                                    |s, p| s.print_pat(&**p)));
                 for p in slice.iter() {
                     if !before.is_empty() { try!(self.word_space(",")); }
@@ -2107,7 +2108,7 @@ impl<'a> State<'a> {
                     if !after.is_empty() { try!(self.word_space(",")); }
                 }
                 try!(self.commasep(Inconsistent,
-                                   after.as_slice(),
+                                   after[],
                                    |s, p| s.print_pat(&**p)));
                 try!(word(&mut self.s, "]"));
             }
@@ -2124,7 +2125,7 @@ impl<'a> State<'a> {
         }
         try!(self.cbox(indent_unit));
         try!(self.ibox(0u));
-        try!(self.print_outer_attributes(arm.attrs.as_slice()));
+        try!(self.print_outer_attributes(arm.attrs[]));
         let mut first = true;
         for p in arm.pats.iter() {
             if first {
@@ -2224,7 +2225,7 @@ impl<'a> State<'a> {
 
         // HACK(eddyb) ignore the separately printed self argument.
         let args = if first {
-            decl.inputs.as_slice()
+            decl.inputs[]
         } else {
             decl.inputs.slice_from(1)
         };
@@ -2386,7 +2387,7 @@ impl<'a> State<'a> {
             ints.push(i);
         }
 
-        try!(self.commasep(Inconsistent, ints.as_slice(), |s, &idx| {
+        try!(self.commasep(Inconsistent, ints[], |s, &idx| {
             if idx < generics.lifetimes.len() {
                 let lifetime = &generics.lifetimes[idx];
                 s.print_lifetime_def(lifetime)
@@ -2407,7 +2408,7 @@ impl<'a> State<'a> {
             try!(self.word_space("?"));
         }
         try!(self.print_ident(param.ident));
-        try!(self.print_bounds(":", param.bounds.as_slice()));
+        try!(self.print_bounds(":", param.bounds[]));
         match param.default {
             Some(ref default) => {
                 try!(space(&mut self.s));
@@ -2483,7 +2484,7 @@ impl<'a> State<'a> {
                 try!(word(&mut self.s, name.get()));
                 try!(self.popen());
                 try!(self.commasep(Consistent,
-                                   items.as_slice(),
+                                   items[],
                                    |s, i| s.print_meta_item(&**i)));
                 try!(self.pclose());
             }
@@ -2519,7 +2520,7 @@ impl<'a> State<'a> {
                     try!(self.print_path(path, false));
                     try!(word(&mut self.s, "::{"));
                 }
-                try!(self.commasep(Inconsistent, idents.as_slice(), |s, w| {
+                try!(self.commasep(Inconsistent, idents[], |s, w| {
                     match w.node {
                         ast::PathListIdent { name, .. } => {
                             s.print_ident(name)
@@ -2537,7 +2538,7 @@ impl<'a> State<'a> {
     pub fn print_view_item(&mut self, item: &ast::ViewItem) -> IoResult<()> {
         try!(self.hardbreak_if_not_bol());
         try!(self.maybe_print_comment(item.span.lo));
-        try!(self.print_outer_attributes(item.attrs.as_slice()));
+        try!(self.print_outer_attributes(item.attrs[]));
         try!(self.print_visibility(item.vis));
         match item.node {
             ast::ViewItemExternCrate(id, ref optional_path, _) => {
@@ -2679,7 +2680,7 @@ impl<'a> State<'a> {
             try!(self.pclose());
         }
 
-        try!(self.print_bounds(":", bounds.as_slice()));
+        try!(self.print_bounds(":", bounds[]));
 
         try!(self.print_fn_output(decl));
 
@@ -2738,7 +2739,7 @@ impl<'a> State<'a> {
         try!(self.maybe_print_comment(lit.span.lo));
         match self.next_lit(lit.span.lo) {
             Some(ref ltrl) => {
-                return word(&mut self.s, (*ltrl).lit.as_slice());
+                return word(&mut self.s, (*ltrl).lit[]);
             }
             _ => ()
         }
@@ -2748,7 +2749,7 @@ impl<'a> State<'a> {
                 let mut res = String::from_str("b'");
                 ascii::escape_default(byte, |c| res.push(c as char));
                 res.push('\'');
-                word(&mut self.s, res.as_slice())
+                word(&mut self.s, res[])
             }
             ast::LitChar(ch) => {
                 let mut res = String::from_str("'");
@@ -2756,27 +2757,27 @@ impl<'a> State<'a> {
                     res.push(c);
                 }
                 res.push('\'');
-                word(&mut self.s, res.as_slice())
+                word(&mut self.s, res[])
             }
             ast::LitInt(i, t) => {
                 match t {
                     ast::SignedIntLit(st, ast::Plus) => {
                         word(&mut self.s,
-                             ast_util::int_ty_to_string(st, Some(i as i64)).as_slice())
+                             ast_util::int_ty_to_string(st, Some(i as i64))[])
                     }
                     ast::SignedIntLit(st, ast::Minus) => {
                         let istr = ast_util::int_ty_to_string(st, Some(-(i as i64)));
                         word(&mut self.s,
-                             format!("-{}", istr).as_slice())
+                             format!("-{}", istr)[])
                     }
                     ast::UnsignedIntLit(ut) => {
-                        word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i)).as_slice())
+                        word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i))[])
                     }
                     ast::UnsuffixedIntLit(ast::Plus) => {
-                        word(&mut self.s, format!("{}", i).as_slice())
+                        word(&mut self.s, format!("{}", i)[])
                     }
                     ast::UnsuffixedIntLit(ast::Minus) => {
-                        word(&mut self.s, format!("-{}", i).as_slice())
+                        word(&mut self.s, format!("-{}", i)[])
                     }
                 }
             }
@@ -2785,7 +2786,7 @@ impl<'a> State<'a> {
                      format!(
                          "{}{}",
                          f.get(),
-                         ast_util::float_ty_to_string(t).as_slice()).as_slice())
+                         ast_util::float_ty_to_string(t)[])[])
             }
             ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()),
             ast::LitBool(val) => {
@@ -2797,7 +2798,7 @@ impl<'a> State<'a> {
                     ascii::escape_default(ch as u8,
                                           |ch| escaped.push(ch as char));
                 }
-                word(&mut self.s, format!("b\"{}\"", escaped).as_slice())
+                word(&mut self.s, format!("b\"{}\"", escaped)[])
             }
         }
     }
@@ -2838,7 +2839,7 @@ impl<'a> State<'a> {
             comments::Mixed => {
                 assert_eq!(cmnt.lines.len(), 1u);
                 try!(zerobreak(&mut self.s));
-                try!(word(&mut self.s, cmnt.lines[0].as_slice()));
+                try!(word(&mut self.s, cmnt.lines[0][]));
                 zerobreak(&mut self.s)
             }
             comments::Isolated => {
@@ -2847,7 +2848,7 @@ impl<'a> State<'a> {
                     // Don't print empty lines because they will end up as trailing
                     // whitespace
                     if !line.is_empty() {
-                        try!(word(&mut self.s, line.as_slice()));
+                        try!(word(&mut self.s, line[]));
                     }
                     try!(hardbreak(&mut self.s));
                 }
@@ -2856,13 +2857,13 @@ impl<'a> State<'a> {
             comments::Trailing => {
                 try!(word(&mut self.s, " "));
                 if cmnt.lines.len() == 1u {
-                    try!(word(&mut self.s, cmnt.lines[0].as_slice()));
+                    try!(word(&mut self.s, cmnt.lines[0][]));
                     hardbreak(&mut self.s)
                 } else {
                     try!(self.ibox(0u));
                     for line in cmnt.lines.iter() {
                         if !line.is_empty() {
-                            try!(word(&mut self.s, line.as_slice()));
+                            try!(word(&mut self.s, line[]));
                         }
                         try!(hardbreak(&mut self.s));
                     }
@@ -2891,11 +2892,11 @@ impl<'a> State<'a> {
             }
             ast::RawStr(n) => {
                 (format!("r{delim}\"{string}\"{delim}",
-                         delim="#".repeat(n),
+                         delim=repeat("#", n),
                          string=st))
             }
         };
-        word(&mut self.s, st.as_slice())
+        word(&mut self.s, st[])
     }
 
     pub fn next_comment(&mut self) -> Option<comments::Comment> {
@@ -2926,7 +2927,7 @@ impl<'a> State<'a> {
             Some(abi::Rust) => Ok(()),
             Some(abi) => {
                 try!(self.word_nbsp("extern"));
-                self.word_nbsp(abi.to_string().as_slice())
+                self.word_nbsp(abi.to_string()[])
             }
             None => Ok(())
         }
@@ -2937,7 +2938,7 @@ impl<'a> State<'a> {
         match opt_abi {
             Some(abi) => {
                 try!(self.word_nbsp("extern"));
-                self.word_nbsp(abi.to_string().as_slice())
+                self.word_nbsp(abi.to_string()[])
             }
             None => Ok(())
         }
@@ -2953,7 +2954,7 @@ impl<'a> State<'a> {
 
         if abi != abi::Rust {
             try!(self.word_nbsp("extern"));
-            try!(self.word_nbsp(abi.to_string().as_slice()));
+            try!(self.word_nbsp(abi.to_string()[]));
         }
 
         word(&mut self.s, "fn")
@@ -2967,6 +2968,8 @@ impl<'a> State<'a> {
     }
 }
 
+fn repeat(s: &str, n: uint) -> String { iter::repeat(s).take(n).collect() }
+
 #[cfg(test)]
 mod test {
     use super::*;
diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs
index e98be046586..e1c8ff5011b 100644
--- a/src/libsyntax/std_inject.rs
+++ b/src/libsyntax/std_inject.rs
@@ -40,7 +40,7 @@ pub fn maybe_inject_prelude(krate: ast::Crate) -> ast::Crate {
 }
 
 fn use_std(krate: &ast::Crate) -> bool {
-    !attr::contains_name(krate.attrs.as_slice(), "no_std")
+    !attr::contains_name(krate.attrs[], "no_std")
 }
 
 fn no_prelude(attrs: &[ast::Attribute]) -> bool {
@@ -56,7 +56,7 @@ impl<'a> fold::Folder for StandardLibraryInjector<'a> {
 
         // The name to use in `extern crate "name" as std;`
         let actual_crate_name = match self.alt_std_name {
-            Some(ref s) => token::intern_and_get_ident(s.as_slice()),
+            Some(ref s) => token::intern_and_get_ident(s[]),
             None => token::intern_and_get_ident("std"),
         };
 
@@ -118,7 +118,7 @@ impl<'a> fold::Folder for PreludeInjector<'a> {
         attr::mark_used(&no_std_attr);
         krate.attrs.push(no_std_attr);
 
-        if !no_prelude(krate.attrs.as_slice()) {
+        if !no_prelude(krate.attrs[]) {
             // only add `use std::prelude::*;` if there wasn't a
             // `#![no_implicit_prelude]` at the crate level.
             // fold_mod() will insert glob path.
@@ -138,7 +138,7 @@ impl<'a> fold::Folder for PreludeInjector<'a> {
     }
 
     fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {
-        if !no_prelude(item.attrs.as_slice()) {
+        if !no_prelude(item.attrs[]) {
             // only recur if there wasn't `#![no_implicit_prelude]`
             // on this item, i.e. this means that the prelude is not
             // implicitly imported though the whole subtree
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index 155cabb153c..bc7dda8c44a 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -73,14 +73,14 @@ pub fn modify_for_testing(sess: &ParseSess,
     // We generate the test harness when building in the 'test'
     // configuration, either with the '--test' or '--cfg test'
     // command line options.
-    let should_test = attr::contains_name(krate.config.as_slice(), "test");
+    let should_test = attr::contains_name(krate.config[], "test");
 
     // Check for #[reexport_test_harness_main = "some_name"] which
     // creates a `use some_name = __test::main;`. This needs to be
     // unconditional, so that the attribute is still marked as used in
     // non-test builds.
     let reexport_test_harness_main =
-        attr::first_attr_value_str_by_name(krate.attrs.as_slice(),
+        attr::first_attr_value_str_by_name(krate.attrs[],
                                            "reexport_test_harness_main");
 
     if should_test {
@@ -119,7 +119,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
             self.cx.path.push(ident);
         }
         debug!("current path: {}",
-               ast_util::path_name_i(self.cx.path.as_slice()));
+               ast_util::path_name_i(self.cx.path[]));
 
         if is_test_fn(&self.cx, &*i) || is_bench_fn(&self.cx, &*i) {
             match i.node {
@@ -277,8 +277,8 @@ fn strip_test_functions(krate: ast::Crate) -> ast::Crate {
     // When not compiling with --test we should not compile the
     // #[test] functions
     config::strip_items(krate, |attrs| {
-        !attr::contains_name(attrs.as_slice(), "test") &&
-        !attr::contains_name(attrs.as_slice(), "bench")
+        !attr::contains_name(attrs[], "test") &&
+        !attr::contains_name(attrs[], "bench")
     })
 }
 
@@ -291,7 +291,7 @@ enum HasTestSignature {
 
 
 fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
-    let has_test_attr = attr::contains_name(i.attrs.as_slice(), "test");
+    let has_test_attr = attr::contains_name(i.attrs[], "test");
 
     fn has_test_signature(i: &ast::Item) -> HasTestSignature {
         match &i.node {
@@ -329,7 +329,7 @@ fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
 }
 
 fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
-    let has_bench_attr = attr::contains_name(i.attrs.as_slice(), "bench");
+    let has_bench_attr = attr::contains_name(i.attrs[], "bench");
 
     fn has_test_signature(i: &ast::Item) -> bool {
         match i.node {
@@ -384,7 +384,7 @@ We're going to be building a module that looks more or less like:
 mod __test {
   extern crate test (name = "test", vers = "...");
   fn main() {
-    test::test_main_static(::os::args().as_slice(), tests)
+    test::test_main_static(::os::args()[], tests)
   }
 
   static tests : &'static [test::TestDescAndFn] = &[
@@ -510,8 +510,8 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
 }
 
 fn is_test_crate(krate: &ast::Crate) -> bool {
-    match attr::find_crate_name(krate.attrs.as_slice()) {
-        Some(ref s) if "test" == s.get().as_slice() => true,
+    match attr::find_crate_name(krate.attrs[]) {
+        Some(ref s) if "test" == s.get()[] => true,
         _ => false
     }
 }
@@ -551,11 +551,11 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P<ast::Expr> {
     // creates $name: $expr
     let field = |name, expr| ecx.field_imm(span, ecx.ident_of(name), expr);
 
-    debug!("encoding {}", ast_util::path_name_i(path.as_slice()));
+    debug!("encoding {}", ast_util::path_name_i(path[]));
 
     // path to the #[test] function: "foo::bar::baz"
-    let path_string = ast_util::path_name_i(path.as_slice());
-    let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string.as_slice()));
+    let path_string = ast_util::path_name_i(path[]);
+    let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string[]));
 
     // self::test::StaticTestName($name_expr)
     let name_expr = ecx.expr_call(span,
diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs
index 590a04ce221..97eb4316583 100644
--- a/src/libsyntax/util/interner.rs
+++ b/src/libsyntax/util/interner.rs
@@ -95,41 +95,37 @@ pub struct RcStr {
     string: Rc<String>,
 }
 
+impl RcStr {
+    pub fn new(string: &str) -> RcStr {
+        RcStr {
+            string: Rc::new(string.to_string()),
+        }
+    }
+}
+
 impl Eq for RcStr {}
 
 impl Ord for RcStr {
     fn cmp(&self, other: &RcStr) -> Ordering {
-        self.as_slice().cmp(other.as_slice())
-    }
-}
-
-impl Str for RcStr {
-    #[inline]
-    fn as_slice<'a>(&'a self) -> &'a str {
-        let s: &'a str = self.string.as_slice();
-        s
+        self[].cmp(other[])
     }
 }
 
 impl fmt::Show for RcStr {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         use std::fmt::Show;
-        self.as_slice().fmt(f)
+        self[].fmt(f)
     }
 }
 
 impl BorrowFrom<RcStr> for str {
     fn borrow_from(owned: &RcStr) -> &str {
-        owned.string.as_slice()
+        owned.string[]
     }
 }
 
-impl RcStr {
-    pub fn new(string: &str) -> RcStr {
-        RcStr {
-            string: Rc::new(string.into_string()),
-        }
-    }
+impl Deref<str> for RcStr {
+    fn deref(&self) -> &str { self.string[] }
 }
 
 /// A StrInterner differs from Interner<String> in that it accepts
diff --git a/src/libterm/terminfo/mod.rs b/src/libterm/terminfo/mod.rs
index 65f8415835a..d944d0362fb 100644
--- a/src/libterm/terminfo/mod.rs
+++ b/src/libterm/terminfo/mod.rs
@@ -180,7 +180,7 @@ impl<T: Writer+Send> TerminfoTerminal<T> {
             }
         };
 
-        let entry = open(term.as_slice());
+        let entry = open(term[]);
         if entry.is_err() {
             if os::getenv("MSYSCON").map_or(false, |s| {
                     "mintty.exe" == s
diff --git a/src/libterm/terminfo/searcher.rs b/src/libterm/terminfo/searcher.rs
index 33bfd69f71b..395fac52d8d 100644
--- a/src/libterm/terminfo/searcher.rs
+++ b/src/libterm/terminfo/searcher.rs
@@ -61,13 +61,13 @@ pub fn get_dbpath_for_term(term: &str) -> Option<Box<Path>> {
     for p in dirs_to_search.iter() {
         if p.exists() {
             let f = first_char.to_string();
-            let newp = p.join_many(&[f.as_slice(), term]);
+            let newp = p.join_many(&[f[], term]);
             if newp.exists() {
                 return Some(box newp);
             }
             // on some installations the dir is named after the hex of the char (e.g. OS X)
             let f = format!("{:x}", first_char as uint);
-            let newp = p.join_many(&[f.as_slice(), term]);
+            let newp = p.join_many(&[f[], term]);
             if newp.exists() {
                 return Some(box newp);
             }
diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs
index 5b04a1fed89..1870f162ece 100644
--- a/src/libtest/lib.rs
+++ b/src/libtest/lib.rs
@@ -65,6 +65,7 @@ use std::io::fs::PathExtensions;
 use std::io::stdio::StdWriter;
 use std::io::{File, ChanReader, ChanWriter};
 use std::io;
+use std::iter::repeat;
 use std::num::{Float, FloatMath, Int};
 use std::os;
 use std::str::FromStr;
@@ -121,7 +122,7 @@ impl TestDesc {
     fn padded_name(&self, column_count: uint, align: NamePadding) -> String {
         let mut name = String::from_str(self.name.as_slice());
         let fill = column_count.saturating_sub(name.len());
-        let mut pad = " ".repeat(fill);
+        let mut pad = repeat(" ").take(fill).collect::<String>();
         match align {
             PadNone => name,
             PadOnLeft => {
@@ -426,7 +427,7 @@ pub fn parse_opts(args: &[String]) -> Option<OptRes> {
 
     let ratchet_noise_percent = matches.opt_str("ratchet-noise-percent");
     let ratchet_noise_percent =
-        ratchet_noise_percent.map(|s| from_str::<f64>(s.as_slice()).unwrap());
+        ratchet_noise_percent.map(|s| s.as_slice().parse::<f64>().unwrap());
 
     let save_metrics = matches.opt_str("save-metrics");
     let save_metrics = save_metrics.map(|s| Path::new(s));
@@ -489,7 +490,8 @@ pub fn opt_shard(maybestr: Option<String>) -> Option<(uint,uint)> {
         None => None,
         Some(s) => {
             let mut it = s.split('.');
-            match (it.next().and_then(from_str::<uint>), it.next().and_then(from_str::<uint>),
+            match (it.next().and_then(|s| s.parse::<uint>()),
+                   it.next().and_then(|s| s.parse::<uint>()),
                    it.next()) {
                 (Some(a), Some(b), None) => {
                     if a <= 0 || a > b {
diff --git a/src/libunicode/u_str.rs b/src/libunicode/u_str.rs
index 5d7d2951628..7d59e3de7b1 100644
--- a/src/libunicode/u_str.rs
+++ b/src/libunicode/u_str.rs
@@ -15,22 +15,16 @@
 //! This module provides functionality to `str` that requires the Unicode methods provided by the
 //! UnicodeChar trait.
 
+use self::GraphemeState::*;
 use core::prelude::*;
 
 use core::char;
 use core::cmp;
-use core::iter::{DoubleEndedIterator, DoubleEndedIteratorExt};
-use core::iter::{Filter, AdditiveIterator, Iterator, IteratorExt};
 use core::iter::{Filter, AdditiveIterator};
-use core::kinds::Sized;
 use core::mem;
 use core::num::Int;
-use core::option::Option::{None, Some};
-use core::option::Option;
-use core::slice::SliceExt;
 use core::slice;
-use core::str::{CharSplits, StrPrelude};
-use core::str::{CharSplits};
+use core::str::CharSplits;
 
 use u_char::UnicodeChar;
 use tables::grapheme::GraphemeCat;
@@ -39,106 +33,20 @@ use tables::grapheme::GraphemeCat;
 /// FIXME: This should be opaque
 #[stable]
 pub struct Words<'a> {
-    inner: Filter<'a, &'a str, CharSplits<'a, |char|:'a -> bool>,
-                  fn(&&str) -> bool>,
+    inner: Filter<&'a str, CharSplits<'a, fn(char) -> bool>, fn(&&str) -> bool>,
 }
 
 /// Methods for Unicode string slices
+#[allow(missing_docs)] // docs in libcollections
 pub trait UnicodeStr for Sized? {
-    /// Returns an iterator over the
-    /// [grapheme clusters](http://www.unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries)
-    /// of the string.
-    ///
-    /// If `is_extended` is true, the iterator is over the *extended grapheme clusters*;
-    /// otherwise, the iterator is over the *legacy grapheme clusters*.
-    /// [UAX#29](http://www.unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries)
-    /// recommends extended grapheme cluster boundaries for general processing.
-    ///
-    /// # Example
-    ///
-    /// ```rust
-    /// let gr1 = "a\u{0310}e\u{0301}o\u{0308}\u{0332}".graphemes(true).collect::<Vec<&str>>();
-    /// let b: &[_] = &["a\u{0310}", "e\u{0301}", "o\u{0308}\u{0332}"];
-    /// assert_eq!(gr1.as_slice(), b);
-    /// let gr2 = "a\r\nb🇷🇺🇸🇹".graphemes(true).collect::<Vec<&str>>();
-    /// let b: &[_] = &["a", "\r\n", "b", "🇷🇺🇸🇹"];
-    /// assert_eq!(gr2.as_slice(), b);
-    /// ```
     fn graphemes<'a>(&'a self, is_extended: bool) -> Graphemes<'a>;
-
-    /// Returns an iterator over the grapheme clusters of self and their byte offsets.
-    /// See `graphemes()` method for more information.
-    ///
-    /// # Example
-    ///
-    /// ```rust
-    /// let gr_inds = "a̐éö̲\r\n".grapheme_indices(true).collect::<Vec<(uint, &str)>>();
-    /// let b: &[_] = &[(0u, "a̐"), (3, "é"), (6, "ö̲"), (11, "\r\n")];
-    /// assert_eq!(gr_inds.as_slice(), b);
-    /// ```
     fn grapheme_indices<'a>(&'a self, is_extended: bool) -> GraphemeIndices<'a>;
-
-    /// An iterator over the words of a string (subsequences separated
-    /// by any sequence of whitespace). Sequences of whitespace are
-    /// collapsed, so empty "words" are not included.
-    ///
-    /// # Example
-    ///
-    /// ```rust
-    /// let some_words = " Mary   had\ta little  \n\t lamb";
-    /// let v: Vec<&str> = some_words.words().collect();
-    /// assert_eq!(v, vec!["Mary", "had", "a", "little", "lamb"]);
-    /// ```
-    #[stable]
     fn words<'a>(&'a self) -> Words<'a>;
-
-    /// Returns true if the string contains only whitespace.
-    ///
-    /// Whitespace characters are determined by `char::is_whitespace`.
-    ///
-    /// # Example
-    ///
-    /// ```rust
-    /// assert!(" \t\n".is_whitespace());
-    /// assert!("".is_whitespace());
-    ///
-    /// assert!( !"abc".is_whitespace());
-    /// ```
     fn is_whitespace(&self) -> bool;
-
-    /// Returns true if the string contains only alphanumeric code
-    /// points.
-    ///
-    /// Alphanumeric characters are determined by `char::is_alphanumeric`.
-    ///
-    /// # Example
-    ///
-    /// ```rust
-    /// assert!("Löwe老虎Léopard123".is_alphanumeric());
-    /// assert!("".is_alphanumeric());
-    ///
-    /// assert!( !" &*~".is_alphanumeric());
-    /// ```
     fn is_alphanumeric(&self) -> bool;
-
-    /// Returns a string's displayed width in columns, treating control
-    /// characters as zero-width.
-    ///
-    /// `is_cjk` determines behavior for characters in the Ambiguous category:
-    /// if `is_cjk` is `true`, these are 2 columns wide; otherwise, they are 1.
-    /// In CJK locales, `is_cjk` should be `true`, else it should be `false`.
-    /// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/)
-    /// recommends that these characters be treated as 1 column (i.e.,
-    /// `is_cjk` = `false`) if the locale is unknown.
     fn width(&self, is_cjk: bool) -> uint;
-
-    /// Returns a string with leading and trailing whitespace removed.
     fn trim<'a>(&'a self) -> &'a str;
-
-    /// Returns a string with leading whitespace removed.
     fn trim_left<'a>(&'a self) -> &'a str;
-
-    /// Returns a string with trailing whitespace removed.
     fn trim_right<'a>(&'a self) -> &'a str;
 }
 
@@ -471,10 +379,10 @@ pub fn utf8_char_width(b: u8) -> uint {
 /// Determines if a vector of `u16` contains valid UTF-16
 pub fn is_utf16(v: &[u16]) -> bool {
     let mut it = v.iter();
-    macro_rules! next ( ($ret:expr) => {
+    macro_rules! next { ($ret:expr) => {
             match it.next() { Some(u) => *u, None => return $ret }
         }
-    )
+    }
     loop {
         let u = next!(true);
 
@@ -513,7 +421,7 @@ impl Utf16Item {
     pub fn to_char_lossy(&self) -> char {
         match *self {
             Utf16Item::ScalarValue(c) => c,
-            Utf16Item::LoneSurrogate(_) => '\uFFFD'
+            Utf16Item::LoneSurrogate(_) => '\u{FFFD}'
         }
     }
 }
@@ -568,15 +476,14 @@ impl<'a> Iterator<Utf16Item> for Utf16Items<'a> {
 /// # Example
 ///
 /// ```rust
-/// use std::str;
-/// use std::str::{ScalarValue, LoneSurrogate};
+/// use unicode::str::Utf16Item::{ScalarValue, LoneSurrogate};
 ///
 /// // 𝄞mus<invalid>ic<invalid>
 /// let v = [0xD834, 0xDD1E, 0x006d, 0x0075,
 ///          0x0073, 0xDD1E, 0x0069, 0x0063,
 ///          0xD834];
 ///
-/// assert_eq!(str::utf16_items(&v).collect::<Vec<_>>(),
+/// assert_eq!(unicode::str::utf16_items(&v).collect::<Vec<_>>(),
 ///            vec![ScalarValue('𝄞'),
 ///                 ScalarValue('m'), ScalarValue('u'), ScalarValue('s'),
 ///                 LoneSurrogate(0xDD1E),
diff --git a/src/test/run-pass/issue-19340-1.rs b/src/test/run-pass/issue-19340-1.rs
index b7a6391ee04..2f466d4ca8c 100644
--- a/src/test/run-pass/issue-19340-1.rs
+++ b/src/test/run-pass/issue-19340-1.rs
@@ -15,7 +15,7 @@ extern crate "issue-19340-1" as lib;
 use lib::Homura;
 
 fn main() {
-    let homura = Homura::Madoka { name: "Kaname".into_string() };
+    let homura = Homura::Madoka { name: "Kaname".to_string() };
 
     match homura {
         Homura::Madoka { name } => (),
diff --git a/src/test/run-pass/issue-19340-2.rs b/src/test/run-pass/issue-19340-2.rs
index 5179c1e2acb..8300220edea 100644
--- a/src/test/run-pass/issue-19340-2.rs
+++ b/src/test/run-pass/issue-19340-2.rs
@@ -17,7 +17,7 @@ enum Homura {
 
 fn main() {
     let homura = Homura::Madoka {
-        name: "Akemi".into_string(),
+        name: "Akemi".to_string(),
         age: 14,
     };
 
diff --git a/src/test/run-pass/issue-19367.rs b/src/test/run-pass/issue-19367.rs
index 3efc2ee50f3..7db84d518ff 100644
--- a/src/test/run-pass/issue-19367.rs
+++ b/src/test/run-pass/issue-19367.rs
@@ -16,10 +16,10 @@ struct S {
 // on field of struct or tuple which we reassign in the match body.
 
 fn main() {
-    let mut a = (0i, Some("right".into_string()));
+    let mut a = (0i, Some("right".to_string()));
     let b = match a.1 {
         Some(v) => {
-            a.1 = Some("wrong".into_string());
+            a.1 = Some("wrong".to_string());
             v
         }
         None => String::new()
@@ -28,10 +28,10 @@ fn main() {
     assert_eq!(b, "right");
 
 
-    let mut s = S{ o: Some("right".into_string()) };
+    let mut s = S{ o: Some("right".to_string()) };
     let b = match s.o {
         Some(v) => {
-            s.o = Some("wrong".into_string());
+            s.o = Some("wrong".to_string());
             v
         }
         None => String::new(),