about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs2
-rw-r--r--src/libsyntax/attr.rs14
-rw-r--r--src/libsyntax/codemap.rs60
-rw-r--r--src/libsyntax/config.rs5
-rw-r--r--src/libsyntax/diagnostics/metadata.rs2
-rw-r--r--src/libsyntax/ext/base.rs4
-rw-r--r--src/libsyntax/ext/build.rs2
-rw-r--r--src/libsyntax/ext/derive.rs2
-rw-r--r--src/libsyntax/ext/expand.rs13
-rw-r--r--src/libsyntax/ext/source_util.rs6
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs7
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs4
-rw-r--r--src/libsyntax/ext/tt/quoted.rs8
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs6
-rw-r--r--src/libsyntax/json.rs8
-rw-r--r--src/libsyntax/parse/lexer/comments.rs2
-rw-r--r--src/libsyntax/parse/lexer/mod.rs28
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs7
-rw-r--r--src/libsyntax/parse/lexer/unicode_chars.rs2
-rw-r--r--src/libsyntax/parse/mod.rs6
-rw-r--r--src/libsyntax/parse/parser.rs39
-rw-r--r--src/libsyntax/print/pprust.rs52
-rw-r--r--src/libsyntax/std_inject.rs2
-rw-r--r--src/libsyntax/test.rs2
-rw-r--r--src/libsyntax/test_snippet.rs6
-rw-r--r--src/libsyntax/tokenstream.rs10
26 files changed, 135 insertions, 164 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index c1c2faaef0b..925178f8639 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -134,7 +134,7 @@ impl PathSegment {
     }
     pub fn crate_root(span: Span) -> Self {
         PathSegment {
-            identifier: Ident { ctxt: span.ctxt, ..keywords::CrateRoot.ident() },
+            identifier: Ident { ctxt: span.ctxt(), ..keywords::CrateRoot.ident() },
             span,
             parameters: None,
         }
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index f97a8f67e22..bf8e0c0996a 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -1059,7 +1059,7 @@ impl MetaItem {
     fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
         where I: Iterator<Item = TokenTree>,
     {
-        let (mut span, name) = match tokens.next() {
+        let (span, name) = match tokens.next() {
             Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
             Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
                 token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name),
@@ -1068,17 +1068,17 @@ impl MetaItem {
             },
             _ => return None,
         };
-        let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi);
+        let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi());
         let node = match MetaItemKind::from_tokens(tokens) {
             Some(node) => node,
             _ => return None,
         };
-        span.hi = match node {
-            MetaItemKind::NameValue(ref lit) => lit.span.hi,
-            MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(span.hi),
-            _ => span.hi,
+        let hi = match node {
+            MetaItemKind::NameValue(ref lit) => lit.span.hi(),
+            MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(span.hi()),
+            _ => span.hi(),
         };
-        Some(MetaItem { name: name, span: span, node: node })
+        Some(MetaItem { name, node, span: span.with_hi(hi) })
     }
 }
 
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index 30ae7df9353..cd4a6f921fe 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -34,8 +34,8 @@ use errors::CodeMapper;
 /// otherwise return the call site span up to the `enclosing_sp` by
 /// following the `expn_info` chain.
 pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
-    let call_site1 = sp.ctxt.outer().expn_info().map(|ei| ei.call_site);
-    let call_site2 = enclosing_sp.ctxt.outer().expn_info().map(|ei| ei.call_site);
+    let call_site1 = sp.ctxt().outer().expn_info().map(|ei| ei.call_site);
+    let call_site2 = enclosing_sp.ctxt().outer().expn_info().map(|ei| ei.call_site);
     match (call_site1, call_site2) {
         (None, _) => sp,
         (Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp,
@@ -232,7 +232,7 @@ impl CodeMap {
     }
 
     pub fn mk_substr_filename(&self, sp: Span) -> String {
-        let pos = self.lookup_char_pos(sp.lo);
+        let pos = self.lookup_char_pos(sp.lo());
         (format!("<{}:{}:{}>",
                  pos.file.name,
                  pos.line,
@@ -299,18 +299,16 @@ impl CodeMap {
     ///    * the lhs span needs to end on the same line the rhs span begins
     ///    * the lhs span must start at or before the rhs span
     pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
-        use std::cmp;
-
         // make sure we're at the same expansion id
-        if sp_lhs.ctxt != sp_rhs.ctxt {
+        if sp_lhs.ctxt() != sp_rhs.ctxt() {
             return None;
         }
 
-        let lhs_end = match self.lookup_line(sp_lhs.hi) {
+        let lhs_end = match self.lookup_line(sp_lhs.hi()) {
             Ok(x) => x,
             Err(_) => return None
         };
-        let rhs_begin = match self.lookup_line(sp_rhs.lo) {
+        let rhs_begin = match self.lookup_line(sp_rhs.lo()) {
             Ok(x) => x,
             Err(_) => return None
         };
@@ -321,12 +319,8 @@ impl CodeMap {
         }
 
         // ensure these follow the expected order and we don't overlap
-        if (sp_lhs.lo <= sp_rhs.lo) && (sp_lhs.hi <= sp_rhs.lo) {
-            Some(Span {
-                lo: cmp::min(sp_lhs.lo, sp_rhs.lo),
-                hi: cmp::max(sp_lhs.hi, sp_rhs.hi),
-                ctxt: sp_lhs.ctxt,
-            })
+        if (sp_lhs.lo() <= sp_rhs.lo()) && (sp_lhs.hi() <= sp_rhs.lo()) {
+            Some(sp_lhs.to(sp_rhs))
         } else {
             None
         }
@@ -337,8 +331,8 @@ impl CodeMap {
             return "no-location".to_string();
         }
 
-        let lo = self.lookup_char_pos_adj(sp.lo);
-        let hi = self.lookup_char_pos_adj(sp.hi);
+        let lo = self.lookup_char_pos_adj(sp.lo());
+        let hi = self.lookup_char_pos_adj(sp.hi());
         return (format!("{}:{}:{}: {}:{}",
                         lo.filename,
                         lo.line,
@@ -348,19 +342,19 @@ impl CodeMap {
     }
 
     pub fn span_to_filename(&self, sp: Span) -> FileName {
-        self.lookup_char_pos(sp.lo).file.name.to_string()
+        self.lookup_char_pos(sp.lo()).file.name.to_string()
     }
 
     pub fn span_to_lines(&self, sp: Span) -> FileLinesResult {
         debug!("span_to_lines(sp={:?})", sp);
 
-        if sp.lo > sp.hi {
+        if sp.lo() > sp.hi() {
             return Err(SpanLinesError::IllFormedSpan(sp));
         }
 
-        let lo = self.lookup_char_pos(sp.lo);
+        let lo = self.lookup_char_pos(sp.lo());
         debug!("span_to_lines: lo={:?}", lo);
-        let hi = self.lookup_char_pos(sp.hi);
+        let hi = self.lookup_char_pos(sp.hi());
         debug!("span_to_lines: hi={:?}", hi);
 
         if lo.file.start_pos != hi.file.start_pos {
@@ -400,12 +394,12 @@ impl CodeMap {
     }
 
     pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
-        if sp.lo > sp.hi {
+        if sp.lo() > sp.hi() {
             return Err(SpanSnippetError::IllFormedSpan(sp));
         }
 
-        let local_begin = self.lookup_byte_offset(sp.lo);
-        let local_end = self.lookup_byte_offset(sp.hi);
+        let local_begin = self.lookup_byte_offset(sp.lo());
+        let local_end = self.lookup_byte_offset(sp.hi());
 
         if local_begin.fm.start_pos != local_end.fm.start_pos {
             return Err(SpanSnippetError::DistinctSources(DistinctSources {
@@ -450,7 +444,7 @@ impl CodeMap {
             Ok(snippet) => {
                 let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right();
                 if !snippet.is_empty() && !snippet.contains('\n') {
-                    Span { hi: BytePos(sp.lo.0 + snippet.len() as u32), ..sp }
+                    sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32))
                 } else {
                     sp
                 }
@@ -752,7 +746,7 @@ mod tests {
     fn t7() {
         // Test span_to_lines for a span ending at the end of filemap
         let cm = init_code_map();
-        let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION};
+        let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
         let file_lines = cm.span_to_lines(span).unwrap();
 
         assert_eq!(file_lines.file.name, "blork.rs");
@@ -768,7 +762,7 @@ mod tests {
         assert_eq!(input.len(), selection.len());
         let left_index = selection.find('~').unwrap() as u32;
         let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index);
-        Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), ctxt: NO_EXPANSION }
+        Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION)
     }
 
     /// Test span_to_snippet and span_to_lines for a span converting 3
@@ -798,7 +792,7 @@ mod tests {
     fn t8() {
         // Test span_to_snippet for a span ending at the end of filemap
         let cm = init_code_map();
-        let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION};
+        let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
         let snippet = cm.span_to_snippet(span);
 
         assert_eq!(snippet, Ok("second line".to_string()));
@@ -808,7 +802,7 @@ mod tests {
     fn t9() {
         // Test span_to_str for a span ending at the end of filemap
         let cm = init_code_map();
-        let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION};
+        let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
         let sstr =  cm.span_to_string(span);
 
         assert_eq!(sstr, "blork.rs:2:1: 2:12");
@@ -859,11 +853,11 @@ mod tests {
                 let lo = hi + offset;
                 hi = lo + substring.len();
                 if i == n {
-                    let span = Span {
-                        lo: BytePos(lo as u32 + file.start_pos.0),
-                        hi: BytePos(hi as u32 + file.start_pos.0),
-                        ctxt: NO_EXPANSION,
-                    };
+                    let span = Span::new(
+                        BytePos(lo as u32 + file.start_pos.0),
+                        BytePos(hi as u32 + file.start_pos.0),
+                        NO_EXPANSION,
+                    );
                     assert_eq!(&self.span_to_snippet(span).unwrap()[..],
                             substring);
                     return span;
diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs
index 14f1f8fbf8c..0909eec6269 100644
--- a/src/libsyntax/config.rs
+++ b/src/libsyntax/config.rs
@@ -14,7 +14,6 @@ use {fold, attr};
 use ast;
 use codemap::Spanned;
 use parse::{token, ParseSess};
-use syntax_pos::Span;
 
 use ptr::P;
 use util::small_vector::SmallVector;
@@ -89,10 +88,10 @@ impl<'a> StripUnconfigured<'a> {
             parser.expect(&token::OpenDelim(token::Paren))?;
             let cfg = parser.parse_meta_item()?;
             parser.expect(&token::Comma)?;
-            let lo = parser.span.lo;
+            let lo = parser.span.lo();
             let (path, tokens) = parser.parse_path_and_tokens()?;
             parser.expect(&token::CloseDelim(token::Paren))?;
-            Ok((cfg, path, tokens, Span { lo: lo, ..parser.prev_span }))
+            Ok((cfg, path, tokens, parser.prev_span.with_lo(lo)))
         }) {
             Ok(result) => result,
             Err(mut e) => {
diff --git a/src/libsyntax/diagnostics/metadata.rs b/src/libsyntax/diagnostics/metadata.rs
index 5bbd18bd9ee..daa7112235f 100644
--- a/src/libsyntax/diagnostics/metadata.rs
+++ b/src/libsyntax/diagnostics/metadata.rs
@@ -47,7 +47,7 @@ pub struct ErrorLocation {
 impl ErrorLocation {
     /// Create an error location from a span.
     pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation {
-        let loc = ecx.codemap().lookup_char_pos_adj(sp.lo);
+        let loc = ecx.codemap().lookup_char_pos_adj(sp.lo());
         ErrorLocation {
             filename: loc.filename,
             line: loc.line
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index e57d9c6fe89..cac2ff975d6 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -731,7 +731,7 @@ impl<'a> ExtCtxt<'a> {
                     // Stop going up the backtrace once include! is encountered
                     return None;
                 }
-                ctxt = info.call_site.ctxt;
+                ctxt = info.call_site.ctxt();
                 last_macro = Some(info.call_site);
                 Some(())
             }).is_none() {
@@ -837,7 +837,7 @@ pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &st
                               -> Option<Spanned<(Symbol, ast::StrStyle)>> {
     // Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation.
     let expr = expr.map(|mut expr| {
-        expr.span.ctxt = expr.span.ctxt.apply_mark(cx.current_expansion.mark);
+        expr.span = expr.span.with_ctxt(expr.span.ctxt().apply_mark(cx.current_expansion.mark));
         expr
     });
 
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index cbdd0013593..66df734b328 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -755,7 +755,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
     }
 
     fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr> {
-        let loc = self.codemap().lookup_char_pos(span.lo);
+        let loc = self.codemap().lookup_char_pos(span.lo());
         let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name));
         let expr_line = self.expr_u32(span, loc.line as u32);
         let expr_col = self.expr_u32(span, loc.col.to_usize() as u32 + 1);
diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs
index 38715f7275d..2e70962cad6 100644
--- a/src/libsyntax/ext/derive.rs
+++ b/src/libsyntax/ext/derive.rs
@@ -68,7 +68,7 @@ pub fn add_derived_markers<T>(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path]
         },
     });
 
-    let span = Span { ctxt: cx.backtrace(), ..span };
+    let span = span.with_ctxt(cx.backtrace());
     item.map_attrs(|mut attrs| {
         if names.contains(&Symbol::intern("Eq")) && names.contains(&Symbol::intern("PartialEq")) {
             let meta = cx.meta_word(span, Symbol::intern("structural_match"));
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index d1172b1b2ce..2f7d5685b6e 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -598,7 +598,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         match *ext {
             ProcMacroDerive(ref ext, _) => {
                 invoc.expansion_data.mark.set_expn_info(expn_info);
-                let span = Span { ctxt: self.cx.backtrace(), ..span };
+                let span = span.with_ctxt(self.cx.backtrace());
                 let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this
                     name: keywords::Invalid.name(),
                     span: DUMMY_SP,
@@ -609,7 +609,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             BuiltinDerive(func) => {
                 expn_info.callee.allow_internal_unstable = true;
                 invoc.expansion_data.mark.set_expn_info(expn_info);
-                let span = Span { ctxt: self.cx.backtrace(), ..span };
+                let span = span.with_ctxt(self.cx.backtrace());
                 let mut items = Vec::new();
                 func(self.cx, span, &attr.meta().unwrap(), &item, &mut |a| items.push(a));
                 kind.expect_from_annotatables(items)
@@ -684,8 +684,8 @@ impl<'a> Parser<'a> {
         if self.token != token::Eof {
             let msg = format!("macro expansion ignores token `{}` and any following",
                               self.this_token_to_string());
-            let mut def_site_span = self.span;
-            def_site_span.ctxt = SyntaxContext::empty(); // Avoid emitting backtrace info twice.
+            // Avoid emitting backtrace info twice.
+            let def_site_span = self.span.with_ctxt(SyntaxContext::empty());
             let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
             let msg = format!("caused by the macro expansion here; the usage \
                                of `{}!` is likely invalid in {} context",
@@ -1069,9 +1069,8 @@ impl Folder for Marker {
         ident
     }
 
-    fn new_span(&mut self, mut span: Span) -> Span {
-        span.ctxt = span.ctxt.apply_mark(self.0);
-        span
+    fn new_span(&mut self, span: Span) -> Span {
+        span.with_ctxt(span.ctxt().apply_mark(self.0))
     }
 
     fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index 95fe41be122..18a262d139a 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -36,7 +36,7 @@ pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
     base::check_zero_tts(cx, sp, tts, "line!");
 
     let topmost = cx.expansion_cause().unwrap_or(sp);
-    let loc = cx.codemap().lookup_char_pos(topmost.lo);
+    let loc = cx.codemap().lookup_char_pos(topmost.lo());
 
     base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32))
 }
@@ -47,7 +47,7 @@ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
     base::check_zero_tts(cx, sp, tts, "column!");
 
     let topmost = cx.expansion_cause().unwrap_or(sp);
-    let loc = cx.codemap().lookup_char_pos(topmost.lo);
+    let loc = cx.codemap().lookup_char_pos(topmost.lo());
 
     base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32))
 }
@@ -70,7 +70,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
     base::check_zero_tts(cx, sp, tts, "file!");
 
     let topmost = cx.expansion_cause().unwrap_or(sp);
-    let loc = cx.codemap().lookup_char_pos(topmost.lo);
+    let loc = cx.codemap().lookup_char_pos(topmost.lo());
     base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name)))
 }
 
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 405d06dafbf..2167b64e610 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -329,7 +329,8 @@ fn inner_parse_loop(sess: &ParseSess,
                     // Only touch the binders we have actually bound
                     for idx in item.match_lo..item.match_hi {
                         let sub = item.matches[idx].clone();
-                        new_pos.push_match(idx, MatchedSeq(sub, Span { lo: item.sp_lo, ..span }));
+                        let span = span.with_lo(item.sp_lo);
+                        new_pos.push_match(idx, MatchedSeq(sub, span));
                     }
 
                     new_pos.match_cur = item.match_hi;
@@ -379,7 +380,7 @@ fn inner_parse_loop(sess: &ParseSess,
                         match_cur: item.match_cur,
                         match_hi: item.match_cur + seq.num_captures,
                         up: Some(item),
-                        sp_lo: sp.lo,
+                        sp_lo: sp.lo(),
                         top_elts: Tt(TokenTree::Sequence(sp, seq)),
                     }));
                 }
@@ -424,7 +425,7 @@ pub fn parse(sess: &ParseSess,
              recurse_into_modules: bool)
              -> NamedParseResult {
     let mut parser = Parser::new(sess, tts, directory, recurse_into_modules, true);
-    let mut cur_items = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
+    let mut cur_items = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo()));
     let mut next_items = Vec::new(); // or proceed normally
 
     loop {
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 983b19c5bf0..6d58af497f0 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -130,7 +130,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                     tts = tts.map_enumerated(|i, tt| {
                         let mut tt = tt.clone();
                         let mut sp = rhs_spans[i];
-                        sp.ctxt = tt.span().ctxt;
+                        sp = sp.with_ctxt(tt.span().ctxt());
                         tt.set_span(sp);
                         tt
                     });
@@ -161,7 +161,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                     macro_ident: name
                 })
             }
-            Failure(sp, tok) => if sp.lo >= best_fail_spot.lo {
+            Failure(sp, tok) => if sp.lo() >= best_fail_spot.lo() {
                 best_fail_spot = sp;
                 best_fail_tok = Some(tok);
             },
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index 012d4a54b36..0e21e3f6b00 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -37,7 +37,7 @@ impl Delimited {
         let open_span = if span == DUMMY_SP {
             DUMMY_SP
         } else {
-            Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }
+            span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
         };
         TokenTree::Token(open_span, self.open_token())
     }
@@ -46,7 +46,7 @@ impl Delimited {
         let close_span = if span == DUMMY_SP {
             DUMMY_SP
         } else {
-            Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }
+            span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
         };
         TokenTree::Token(close_span, self.close_token())
     }
@@ -152,7 +152,7 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars
                     Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
                         Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
                             Some(kind) => {
-                                let span = Span { lo: start_sp.lo, ..end_sp };
+                                let span = end_sp.with_lo(start_sp.lo());
                                 result.push(TokenTree::MetaVarDecl(span, ident, kind));
                                 continue
                             }
@@ -198,7 +198,7 @@ fn parse_tree<I>(tree: tokenstream::TokenTree,
             }
             Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
                 let ident = token.ident().unwrap();
-                let span = Span { lo: span.lo, ..ident_span };
+                let span = ident_span.with_lo(span.lo());
                 if ident.name == keywords::Crate.name() {
                     let ident = ast::Ident { name: keywords::DollarCrate.name(), ..ident };
                     TokenTree::Token(span, token::Ident(ident))
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index fe3dd83f9d5..d51b0d0ae3e 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -155,7 +155,7 @@ pub fn transcribe(cx: &ExtCtxt,
                         if let NtTT(ref tt) = **nt {
                             result.push(tt.clone().into());
                         } else {
-                            sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark);
+                            sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
                             let token = TokenTree::Token(sp, Token::interpolated((**nt).clone()));
                             result.push(token.into());
                         }
@@ -166,13 +166,13 @@ pub fn transcribe(cx: &ExtCtxt,
                 } else {
                     let ident =
                         Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident };
-                    sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark);
+                    sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
                     result.push(TokenTree::Token(sp, token::Dollar).into());
                     result.push(TokenTree::Token(sp, token::Ident(ident)).into());
                 }
             }
             quoted::TokenTree::Delimited(mut span, delimited) => {
-                span.ctxt = span.ctxt.apply_mark(cx.current_expansion.mark);
+                span = span.with_ctxt(span.ctxt().apply_mark(cx.current_expansion.mark));
                 stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
                 result_stack.push(mem::replace(&mut result, Vec::new()));
             }
diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs
index 37a59411c16..db49ab10343 100644
--- a/src/libsyntax/json.rs
+++ b/src/libsyntax/json.rs
@@ -230,8 +230,8 @@ impl DiagnosticSpan {
                       mut backtrace: vec::IntoIter<MacroBacktrace>,
                       je: &JsonEmitter)
                       -> DiagnosticSpan {
-        let start = je.cm.lookup_char_pos(span.lo);
-        let end = je.cm.lookup_char_pos(span.hi);
+        let start = je.cm.lookup_char_pos(span.lo());
+        let end = je.cm.lookup_char_pos(span.hi());
         let backtrace_step = backtrace.next().map(|bt| {
             let call_site =
                 Self::from_span_full(bt.call_site,
@@ -256,8 +256,8 @@ impl DiagnosticSpan {
         });
         DiagnosticSpan {
             file_name: start.file.name.clone(),
-            byte_start: span.lo.0 - start.file.start_pos.0,
-            byte_end: span.hi.0 - start.file.start_pos.0,
+            byte_start: span.lo().0 - start.file.start_pos.0,
+            byte_end: span.hi().0 - start.file.start_pos.0,
             line_start: start.line,
             line_end: end.line,
             column_start: start.col.0 + 1,
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index f65fffebe33..fb558d1a58f 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -386,7 +386,7 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: String, srdr: &mut R
                 debug!("tok lit: {}", s);
                 literals.push(Literal {
                     lit: s.to_string(),
-                    pos: sp.lo,
+                    pos: sp.lo(),
                 });
             })
         } else {
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 527d2e41396..f26a0460905 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -71,7 +71,7 @@ pub struct StringReader<'a> {
 
 impl<'a> StringReader<'a> {
     fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
-        unwrap_or!(self.override_span, Span { lo: lo, hi: hi, ctxt: NO_EXPANSION})
+        unwrap_or!(self.override_span, Span::new(lo, hi, NO_EXPANSION))
     }
 
     fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
@@ -190,20 +190,20 @@ impl<'a> StringReader<'a> {
     }
 
     pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
-        let begin = sess.codemap().lookup_byte_offset(span.lo);
-        let end = sess.codemap().lookup_byte_offset(span.hi);
+        let begin = sess.codemap().lookup_byte_offset(span.lo());
+        let end = sess.codemap().lookup_byte_offset(span.hi());
 
         // Make the range zero-length if the span is invalid.
-        if span.lo > span.hi || begin.fm.start_pos != end.fm.start_pos {
-            span.hi = span.lo;
+        if span.lo() > span.hi() || begin.fm.start_pos != end.fm.start_pos {
+            span = span.with_hi(span.lo());
         }
 
         let mut sr = StringReader::new_raw_internal(sess, begin.fm);
 
         // Seek the lexer to the right byte range.
         sr.save_new_lines_and_multibyte = false;
-        sr.next_pos = span.lo;
-        sr.terminator = Some(span.hi);
+        sr.next_pos = span.lo();
+        sr.terminator = Some(span.hi());
 
         sr.bump();
 
@@ -1745,11 +1745,7 @@ mod tests {
         let tok1 = string_reader.next_token();
         let tok2 = TokenAndSpan {
             tok: token::Ident(id),
-            sp: Span {
-                lo: BytePos(21),
-                hi: BytePos(23),
-                ctxt: NO_EXPANSION,
-            },
+            sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
         };
         assert_eq!(tok1, tok2);
         assert_eq!(string_reader.next_token().tok, token::Whitespace);
@@ -1759,11 +1755,7 @@ mod tests {
         let tok3 = string_reader.next_token();
         let tok4 = TokenAndSpan {
             tok: token::Ident(Ident::from_str("main")),
-            sp: Span {
-                lo: BytePos(24),
-                hi: BytePos(28),
-                ctxt: NO_EXPANSION,
-            },
+            sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
         };
         assert_eq!(tok3, tok4);
         // the lparen is already read:
@@ -1921,7 +1913,7 @@ mod tests {
         let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
         let comment = lexer.next_token();
         assert_eq!(comment.tok, token::Comment);
-        assert_eq!((comment.sp.lo, comment.sp.hi), (BytePos(0), BytePos(7)));
+        assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7)));
         assert_eq!(lexer.next_token().tok, token::Whitespace);
         assert_eq!(lexer.next_token().tok,
                    token::DocComment(Symbol::intern("/// test")));
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index ad389ab510a..a2c81e24754 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -11,7 +11,6 @@
 use print::pprust::token_to_string;
 use parse::lexer::StringReader;
 use parse::{token, PResult};
-use syntax_pos::Span;
 use tokenstream::{Delimited, TokenStream, TokenTree};
 
 impl<'a> StringReader<'a> {
@@ -20,7 +19,7 @@ impl<'a> StringReader<'a> {
         let mut tts = Vec::new();
         while self.token != token::Eof {
             let tree = self.parse_token_tree()?;
-            let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token);
+            let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token);
             tts.push(if is_joint { tree.joint() } else { tree.into() });
         }
         Ok(TokenStream::concat(tts))
@@ -40,7 +39,7 @@ impl<'a> StringReader<'a> {
                     return TokenStream::concat(tts);
                 }
             };
-            let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token);
+            let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token);
             tts.push(if is_joint { tree.joint() } else { tree.into() });
         }
     }
@@ -69,7 +68,7 @@ impl<'a> StringReader<'a> {
                 let tts = self.parse_token_trees_until_close_delim();
 
                 // Expand to cover the entire delimited token tree
-                let span = Span { hi: self.span.hi, ..pre_span };
+                let span = pre_span.with_hi(self.span.hi());
 
                 match self.token {
                     // Correct delimiter.
diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs
index c36fdef2d4c..39b5482a066 100644
--- a/src/libsyntax/parse/lexer/unicode_chars.rs
+++ b/src/libsyntax/parse/lexer/unicode_chars.rs
@@ -340,7 +340,7 @@ pub fn check_for_substitution<'a>(reader: &StringReader<'a>,
     .iter()
     .find(|&&(c, _, _)| c == ch)
     .map(|&(_, u_name, ascii_char)| {
-        let span = Span { lo: reader.pos, hi: reader.next_pos, ctxt: NO_EXPANSION };
+        let span = Span::new(reader.pos, reader.next_pos, NO_EXPANSION);
         match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) {
             Some(&(ascii_char, ascii_name)) => {
                 let msg =
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 67b4954a8f1..76a7e2923fc 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -181,7 +181,7 @@ pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser {
     let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None));
 
     if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP {
-        parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION };
+        parser.span = Span::new(end_pos, end_pos, NO_EXPANSION);
     }
 
     parser
@@ -661,7 +661,7 @@ mod tests {
 
     // produce a syntax_pos::span
     fn sp(a: u32, b: u32) -> Span {
-        Span {lo: BytePos(a), hi: BytePos(b), ctxt: NO_EXPANSION}
+        Span::new(BytePos(a), BytePos(b), NO_EXPANSION)
     }
 
     fn str2seg(s: &str, lo: u32, hi: u32) -> ast::PathSegment {
@@ -976,7 +976,7 @@ mod tests {
 
         for &src in &srcs {
             let spans = get_spans_of_pat_idents(src);
-            let Span{ lo, hi, .. } = spans[0];
+            let (lo, hi) = (spans[0].lo(), spans[0].hi());
             assert!("self" == &src[lo.to_usize()..hi.to_usize()],
                     "\"{}\" != \"self\". src=\"{}\"",
                     &src[lo.to_usize()..hi.to_usize()], src)
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index d2bf943ec17..5e05f36345f 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -790,9 +790,8 @@ impl<'a> Parser<'a> {
                 Ok(())
             }
             token::AndAnd => {
-                let span = self.span;
-                let lo = span.lo + BytePos(1);
-                Ok(self.bump_with(token::BinOp(token::And), Span { lo: lo, ..span }))
+                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                Ok(self.bump_with(token::BinOp(token::And), span))
             }
             _ => self.unexpected()
         }
@@ -824,9 +823,8 @@ impl<'a> Parser<'a> {
                 true
             }
             token::BinOp(token::Shl) => {
-                let span = self.span;
-                let lo = span.lo + BytePos(1);
-                self.bump_with(token::Lt, Span { lo: lo, ..span });
+                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                self.bump_with(token::Lt, span);
                 true
             }
             _ => false,
@@ -852,19 +850,16 @@ impl<'a> Parser<'a> {
                 Ok(())
             }
             token::BinOp(token::Shr) => {
-                let span = self.span;
-                let lo = span.lo + BytePos(1);
-                Ok(self.bump_with(token::Gt, Span { lo: lo, ..span }))
+                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                Ok(self.bump_with(token::Gt, span))
             }
             token::BinOpEq(token::Shr) => {
-                let span = self.span;
-                let lo = span.lo + BytePos(1);
-                Ok(self.bump_with(token::Ge, Span { lo: lo, ..span }))
+                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                Ok(self.bump_with(token::Ge, span))
             }
             token::Ge => {
-                let span = self.span;
-                let lo = span.lo + BytePos(1);
-                Ok(self.bump_with(token::Eq, Span { lo: lo, ..span }))
+                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                Ok(self.bump_with(token::Eq, span))
             }
             _ => self.unexpected()
         }
@@ -1094,7 +1089,7 @@ impl<'a> Parser<'a> {
     /// Advance the parser using provided token as a next one. Use this when
     /// consuming a part of a token. For example a single `<` from `<<`.
     pub fn bump_with(&mut self, next: token::Token, span: Span) {
-        self.prev_span = Span { hi: span.lo, ..self.span };
+        self.prev_span = self.span.with_hi(span.lo());
         // It would be incorrect to record the kind of the current token, but
         // fortunately for tokens currently using `bump_with`, the
         // prev_token_kind will be of no use anyway.
@@ -1356,7 +1351,7 @@ impl<'a> Parser<'a> {
         if self.eat(&token::RArrow) {
             Ok(FunctionRetTy::Ty(self.parse_ty_no_plus()?))
         } else {
-            Ok(FunctionRetTy::Default(Span { hi: self.span.lo, ..self.span }))
+            Ok(FunctionRetTy::Default(self.span.with_hi(self.span.lo())))
         }
     }
 
@@ -2532,7 +2527,7 @@ impl<'a> Parser<'a> {
 
     pub fn process_potential_macro_variable(&mut self) {
         let ident = match self.token {
-            token::Dollar if self.span.ctxt != syntax_pos::hygiene::SyntaxContext::empty() &&
+            token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
                              self.look_ahead(1, |t| t.is_ident()) => {
                 self.bump();
                 let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() };
@@ -2734,8 +2729,8 @@ impl<'a> Parser<'a> {
                         err.span_label(self.span,
                                        "expecting a type here because of type ascription");
                         let cm = self.sess.codemap();
-                        let cur_pos = cm.lookup_char_pos(self.span.lo);
-                        let op_pos = cm.lookup_char_pos(cur_op_span.hi);
+                        let cur_pos = cm.lookup_char_pos(self.span.lo());
+                        let op_pos = cm.lookup_char_pos(cur_op_span.hi());
                         if cur_pos.line != op_pos.line {
                             err.span_suggestion_short(cur_op_span,
                                                       "did you mean to use `;` here?",
@@ -4056,7 +4051,7 @@ impl<'a> Parser<'a> {
                     let mut stmt_span = stmt.span;
                     // expand the span to include the semicolon, if it exists
                     if self.eat(&token::Semi) {
-                        stmt_span.hi = self.prev_span.hi;
+                        stmt_span = stmt_span.with_hi(self.prev_span.hi());
                     }
                     let sugg = pprust::to_string(|s| {
                         use print::pprust::{PrintState, INDENT_UNIT};
@@ -4148,7 +4143,7 @@ impl<'a> Parser<'a> {
             stmt = stmt.add_trailing_semicolon();
         }
 
-        stmt.span.hi = self.prev_span.hi;
+        stmt.span = stmt.span.with_hi(self.prev_span.hi());
         Ok(Some(stmt))
     }
 
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 70b0277929b..3b5ec1caf0d 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -603,8 +603,8 @@ pub trait PrintState<'a> {
     }
 
     fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
-        self.maybe_print_comment(lit.span.lo)?;
-        if let Some(ltrl) = self.next_lit(lit.span.lo) {
+        self.maybe_print_comment(lit.span.lo())?;
+        if let Some(ltrl) = self.next_lit(lit.span.lo()) {
             return self.writer().word(&ltrl.lit);
         }
         match lit.node {
@@ -723,7 +723,7 @@ pub trait PrintState<'a> {
         if !is_inline {
             self.hardbreak_if_not_bol()?;
         }
-        self.maybe_print_comment(attr.span.lo)?;
+        self.maybe_print_comment(attr.span.lo())?;
         if attr.is_sugared_doc {
             self.writer().word(&attr.value_str().unwrap().as_str())?;
             self.writer().hardbreak()
@@ -892,7 +892,7 @@ impl<'a> State<'a> {
     }
     pub fn bclose_maybe_open(&mut self, span: syntax_pos::Span,
                              indented: usize, close_box: bool) -> io::Result<()> {
-        self.maybe_print_comment(span.hi)?;
+        self.maybe_print_comment(span.hi())?;
         self.break_offset_if_not_bol(1, -(indented as isize))?;
         self.s.word("}")?;
         if close_box {
@@ -950,13 +950,13 @@ impl<'a> State<'a> {
         let len = elts.len();
         let mut i = 0;
         for elt in elts {
-            self.maybe_print_comment(get_span(elt).hi)?;
+            self.maybe_print_comment(get_span(elt).hi())?;
             op(self, elt)?;
             i += 1;
             if i < len {
                 self.s.word(",")?;
                 self.maybe_print_trailing_comment(get_span(elt),
-                                                  Some(get_span(&elts[i]).hi))?;
+                                                  Some(get_span(&elts[i]).hi()))?;
                 self.space_if_not_bol()?;
             }
         }
@@ -996,7 +996,7 @@ impl<'a> State<'a> {
     }
 
     pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
-        self.maybe_print_comment(ty.span.lo)?;
+        self.maybe_print_comment(ty.span.lo())?;
         self.ibox(0)?;
         match ty.node {
             ast::TyKind::Slice(ref ty) => {
@@ -1094,7 +1094,7 @@ impl<'a> State<'a> {
     pub fn print_foreign_item(&mut self,
                               item: &ast::ForeignItem) -> io::Result<()> {
         self.hardbreak_if_not_bol()?;
-        self.maybe_print_comment(item.span.lo)?;
+        self.maybe_print_comment(item.span.lo())?;
         self.print_outer_attributes(&item.attrs)?;
         match item.node {
             ast::ForeignItemKind::Fn(ref decl, ref generics) => {
@@ -1163,7 +1163,7 @@ impl<'a> State<'a> {
     /// Pretty-print an item
     pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
         self.hardbreak_if_not_bol()?;
-        self.maybe_print_comment(item.span.lo)?;
+        self.maybe_print_comment(item.span.lo())?;
         self.print_outer_attributes(&item.attrs)?;
         self.ann.pre(self, NodeItem(item))?;
         match item.node {
@@ -1433,7 +1433,7 @@ impl<'a> State<'a> {
         self.bopen()?;
         for v in variants {
             self.space_if_not_bol()?;
-            self.maybe_print_comment(v.span.lo)?;
+            self.maybe_print_comment(v.span.lo())?;
             self.print_outer_attributes(&v.node.attrs)?;
             self.ibox(INDENT_UNIT)?;
             self.print_variant(v)?;
@@ -1481,7 +1481,7 @@ impl<'a> State<'a> {
                 self.commasep(
                     Inconsistent, struct_def.fields(),
                     |s, field| {
-                        s.maybe_print_comment(field.span.lo)?;
+                        s.maybe_print_comment(field.span.lo())?;
                         s.print_outer_attributes(&field.attrs)?;
                         s.print_visibility(&field.vis)?;
                         s.print_type(&field.ty)
@@ -1503,7 +1503,7 @@ impl<'a> State<'a> {
 
             for field in struct_def.fields() {
                 self.hardbreak_if_not_bol()?;
-                self.maybe_print_comment(field.span.lo)?;
+                self.maybe_print_comment(field.span.lo())?;
                 self.print_outer_attributes(&field.attrs)?;
                 self.print_visibility(&field.vis)?;
                 self.print_ident(field.ident.unwrap())?;
@@ -1548,7 +1548,7 @@ impl<'a> State<'a> {
                             -> io::Result<()> {
         self.ann.pre(self, NodeSubItem(ti.id))?;
         self.hardbreak_if_not_bol()?;
-        self.maybe_print_comment(ti.span.lo)?;
+        self.maybe_print_comment(ti.span.lo())?;
         self.print_outer_attributes(&ti.attrs)?;
         match ti.node {
             ast::TraitItemKind::Const(ref ty, ref default) => {
@@ -1590,7 +1590,7 @@ impl<'a> State<'a> {
     pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> {
         self.ann.pre(self, NodeSubItem(ii.id))?;
         self.hardbreak_if_not_bol()?;
-        self.maybe_print_comment(ii.span.lo)?;
+        self.maybe_print_comment(ii.span.lo())?;
         self.print_outer_attributes(&ii.attrs)?;
         self.print_defaultness(ii.defaultness)?;
         match ii.node {
@@ -1622,7 +1622,7 @@ impl<'a> State<'a> {
     }
 
     pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> {
-        self.maybe_print_comment(st.span.lo)?;
+        self.maybe_print_comment(st.span.lo())?;
         match st.node {
             ast::StmtKind::Local(ref loc) => {
                 self.print_outer_attributes(&loc.attrs)?;
@@ -1705,7 +1705,7 @@ impl<'a> State<'a> {
             BlockCheckMode::Unsafe(..) => self.word_space("unsafe")?,
             BlockCheckMode::Default => ()
         }
-        self.maybe_print_comment(blk.span.lo)?;
+        self.maybe_print_comment(blk.span.lo())?;
         self.ann.pre(self, NodeBlock(blk))?;
         self.bopen()?;
 
@@ -1714,10 +1714,10 @@ impl<'a> State<'a> {
         for (i, st) in blk.stmts.iter().enumerate() {
             match st.node {
                 ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
-                    self.maybe_print_comment(st.span.lo)?;
+                    self.maybe_print_comment(st.span.lo())?;
                     self.space_if_not_bol()?;
                     self.print_expr_outer_attr_style(expr, false)?;
-                    self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?;
+                    self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi()))?;
                 }
                 _ => self.print_stmt(st)?,
             }
@@ -1988,7 +1988,7 @@ impl<'a> State<'a> {
     fn print_expr_outer_attr_style(&mut self,
                                   expr: &ast::Expr,
                                   is_inline: bool) -> io::Result<()> {
-        self.maybe_print_comment(expr.span.lo)?;
+        self.maybe_print_comment(expr.span.lo())?;
 
         let attrs = &expr.attrs;
         if is_inline {
@@ -2343,7 +2343,7 @@ impl<'a> State<'a> {
                   defaults_to_global: bool)
                   -> io::Result<()>
     {
-        self.maybe_print_comment(path.span.lo)?;
+        self.maybe_print_comment(path.span.lo())?;
 
         let mut segments = path.segments[..path.segments.len()-depth].iter();
         if defaults_to_global && path.is_global() {
@@ -2465,7 +2465,7 @@ impl<'a> State<'a> {
     }
 
     pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> {
-        self.maybe_print_comment(pat.span.lo)?;
+        self.maybe_print_comment(pat.span.lo())?;
         self.ann.pre(self, NodePat(pat))?;
         /* Pat isn't normalized, but the beauty of it
          is that it doesn't matter */
@@ -2607,7 +2607,7 @@ impl<'a> State<'a> {
         }
         self.cbox(INDENT_UNIT)?;
         self.ibox(0)?;
-        self.maybe_print_comment(arm.pats[0].span.lo)?;
+        self.maybe_print_comment(arm.pats[0].span.lo())?;
         self.print_outer_attributes(&arm.attrs)?;
         let mut first = true;
         for p in &arm.pats {
@@ -2715,7 +2715,7 @@ impl<'a> State<'a> {
         match decl.output {
             ast::FunctionRetTy::Ty(ref ty) => {
                 self.print_type(ty)?;
-                self.maybe_print_comment(ty.span.lo)
+                self.maybe_print_comment(ty.span.lo())
             }
             ast::FunctionRetTy::Default(..) => unreachable!(),
         }
@@ -2971,7 +2971,7 @@ impl<'a> State<'a> {
         self.end()?;
 
         match decl.output {
-            ast::FunctionRetTy::Ty(ref output) => self.maybe_print_comment(output.span.lo),
+            ast::FunctionRetTy::Ty(ref output) => self.maybe_print_comment(output.span.lo()),
             _ => Ok(())
         }
     }
@@ -3017,10 +3017,10 @@ impl<'a> State<'a> {
         };
         if let Some(ref cmnt) = self.next_comment() {
             if cmnt.style != comments::Trailing { return Ok(()) }
-            let span_line = cm.lookup_char_pos(span.hi);
+            let span_line = cm.lookup_char_pos(span.hi());
             let comment_line = cm.lookup_char_pos(cmnt.pos);
             let next = next_pos.unwrap_or(cmnt.pos + BytePos(1));
-            if span.hi < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line {
+            if span.hi() < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line {
                 self.print_comment(cmnt)?;
             }
         }
diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs
index 8977d701e5a..7aa94de9d3d 100644
--- a/src/libsyntax/std_inject.rs
+++ b/src/libsyntax/std_inject.rs
@@ -31,7 +31,7 @@ fn ignored_span(sp: Span) -> Span {
             allow_internal_unsafe: false,
         }
     });
-    Span { ctxt: SyntaxContext::empty().apply_mark(mark), ..sp }
+    sp.with_ctxt(SyntaxContext::empty().apply_mark(mark))
 }
 
 pub fn injected_crate_name(krate: &ast::Crate) -> Option<&'static str> {
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index 35dc9819529..5a5a1ce3777 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -306,7 +306,7 @@ fn generate_test_harness(sess: &ParseSess,
 /// call to codemap's `is_internal` check.
 /// The expanded code calls some unstable functions in the test crate.
 fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
-    Span { ctxt: cx.ctxt, ..sp }
+    sp.with_ctxt(cx.ctxt)
 }
 
 #[derive(PartialEq)]
diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs
index 4fae2ff9814..e9b1976ea47 100644
--- a/src/libsyntax/test_snippet.rs
+++ b/src/libsyntax/test_snippet.rs
@@ -80,11 +80,7 @@ fn make_span(file_text: &str, start: &Position, end: &Position) -> Span {
     let start = make_pos(file_text, start);
     let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends
     assert!(start <= end);
-    Span {
-        lo: BytePos(start as u32),
-        hi: BytePos(end as u32),
-        ctxt: NO_EXPANSION,
-    }
+    Span::new(BytePos(start as u32), BytePos(end as u32), NO_EXPANSION)
 }
 
 fn make_pos(file_text: &str, pos: &Position) -> usize {
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 747bc7b4385..870f54e4396 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -59,7 +59,7 @@ impl Delimited {
         let open_span = if span == DUMMY_SP {
             DUMMY_SP
         } else {
-            Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }
+            span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
         };
         TokenTree::Token(open_span, self.open_token())
     }
@@ -69,7 +69,7 @@ impl Delimited {
         let close_span = if span == DUMMY_SP {
             DUMMY_SP
         } else {
-            Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }
+            span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
         };
         TokenTree::Token(close_span, self.close_token())
     }
@@ -602,11 +602,7 @@ mod tests {
     }
 
     fn sp(a: u32, b: u32) -> Span {
-        Span {
-            lo: BytePos(a),
-            hi: BytePos(b),
-            ctxt: NO_EXPANSION,
-        }
+        Span::new(BytePos(a), BytePos(b), NO_EXPANSION)
     }
 
     #[test]