diff options
| author | Eduard Burtescu <edy.burt@gmail.com> | 2014-03-16 20:56:24 +0200 |
|---|---|---|
| committer | Eduard Burtescu <edy.burt@gmail.com> | 2014-03-17 09:53:08 +0200 |
| commit | 871e5708106c5ee3ad8d2bd6ec68fca60428b77e (patch) | |
| tree | 7dc2002f3ffb245f6dfdd3b05fc4788ddb316f04 /src | |
| parent | 0bb6de3076e17a8a25728f616b833e1060f06088 (diff) | |
| download | rust-871e5708106c5ee3ad8d2bd6ec68fca60428b77e.tar.gz rust-871e5708106c5ee3ad8d2bd6ec68fca60428b77e.zip | |
De-@ codemap and diagnostic.
Diffstat (limited to 'src')
33 files changed, 311 insertions, 356 deletions
diff --git a/src/librustc/back/link.rs b/src/librustc/back/link.rs index 13625993200..edb9d11b11a 100644 --- a/src/librustc/back/link.rs +++ b/src/librustc/back/link.rs @@ -1352,9 +1352,8 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session, } // Converts a library file-stem into a cc -l argument - fn unlib(config: @session::Config, stem: &str) -> ~str { - if stem.starts_with("lib") && - config.os != abi::OsWin32 { + fn unlib(config: &session::Config, stem: &str) -> ~str { + if stem.starts_with("lib") && config.os != abi::OsWin32 { stem.slice(3, stem.len()).to_owned() } else { stem.to_owned() @@ -1434,7 +1433,7 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session, // what its name is let dir = cratepath.dirname_str().unwrap(); if !dir.is_empty() { args.push("-L" + dir); } - let libarg = unlib(sess.targ_cfg, cratepath.filestem_str().unwrap()); + let libarg = unlib(&sess.targ_cfg, cratepath.filestem_str().unwrap()); args.push("-l" + libarg); } } diff --git a/src/librustc/driver/driver.rs b/src/librustc/driver/driver.rs index 7ffbab4c9b3..a14fda080bb 100644 --- a/src/librustc/driver/driver.rs +++ b/src/librustc/driver/driver.rs @@ -27,7 +27,7 @@ use middle::{trans, freevars, kind, ty, typeck, lint, astencode, reachable}; use middle; use util::common::time; use util::ppaux; -use util::nodemap::NodeSet; +use util::nodemap::{NodeMap, NodeSet}; use serialize::{json, Encodable}; @@ -39,7 +39,6 @@ use std::mem::drop; use std::os; use std::vec_ng::Vec; use std::vec_ng; -use collections::HashMap; use getopts::{optopt, optmulti, optflag, optflagopt}; use getopts; use syntax::ast; @@ -75,9 +74,9 @@ pub fn anon_src() -> ~str { pub fn source_name(input: &Input) -> ~str { match *input { - // FIXME (#9639): This needs to handle non-utf8 paths - FileInput(ref ifile) => ifile.as_str().unwrap().to_str(), - StrInput(_) => anon_src() + // FIXME (#9639): This needs to handle non-utf8 paths + FileInput(ref ifile) => ifile.as_str().unwrap().to_str(), + StrInput(_) => anon_src() } } @@ -213,7 +212,7 @@ pub fn phase_2_configure_and_expand(sess: &Session, -> (ast::Crate, syntax::ast_map::Map) { let time_passes = sess.time_passes(); - sess.building_library.set(session::building_library(sess.opts, &krate)); + sess.building_library.set(session::building_library(&sess.opts, &krate)); sess.crate_types.set(session::collect_crate_types(sess, krate.attrs .as_slice())); @@ -315,7 +314,7 @@ pub fn phase_3_run_analysis_passes(sess: Session, sess.macro_registrar_fn.with_mut(|r| *r = time(time_passes, "looking for macro registrar", (), |_| syntax::ext::registrar::find_macro_registrar( - sess.span_diagnostic, krate))); + sess.diagnostic(), krate))); let freevars = time(time_passes, "freevar finding", (), |_| freevars::annotate_freevars(def_map, krate)); @@ -541,19 +540,14 @@ fn write_out_deps(sess: &Session, // Build a list of files used to compile the output and // write Makefile-compatible dependency rules - let files: Vec<~str> = { - let files = sess.codemap.files.borrow(); - files.get() - .iter() - .filter_map(|fmap| { - if fmap.is_real_file() { - Some(fmap.name.clone()) - } else { - None - } - }) - .collect() - }; + let files: Vec<~str> = sess.codemap().files.borrow().get() + .iter().filter_map(|fmap| { + if fmap.deref().is_real_file() { + Some(fmap.deref().name.clone()) + } else { + None + } + }).collect(); let mut file = try!(io::File::create(&deps_filename)); for path in out_filenames.iter() { try!(write!(&mut file as &mut Writer, @@ -741,8 +735,7 @@ static architecture_abis : &'static [(&'static str, abi::Architecture)] = &'stat ("mips", abi::Mips)]; -pub fn build_target_config(sopts: @session::Options) - -> @session::Config { +pub fn build_target_config(sopts: &session::Options) -> session::Config { let os = match get_os(sopts.target_triple) { Some(os) => os, None => early_error("unknown operating system") @@ -764,14 +757,13 @@ pub fn build_target_config(sopts: @session::Options) abi::Arm => arm::get_target_strs(target_triple, os), abi::Mips => mips::get_target_strs(target_triple, os) }; - let target_cfg = @session::Config { + session::Config { os: os, arch: arch, target_strs: target_strs, int_type: int_type, uint_type: uint_type, - }; - return target_cfg; + } } pub fn host_triple() -> ~str { @@ -938,7 +930,7 @@ pub fn build_session_options(matches: &getopts::Matches) matches.opt_present("crate-file-name")); let cg = build_codegen_options(matches); - @session::Options { + session::Options { crate_types: crate_types, gc: gc, optimize: opt_level, @@ -991,25 +983,24 @@ pub fn build_codegen_options(matches: &getopts::Matches) return cg; } -pub fn build_session(sopts: @session::Options, +pub fn build_session(sopts: session::Options, local_crate_source_file: Option<Path>) -> Session { - let codemap = @codemap::CodeMap::new(); + let codemap = codemap::CodeMap::new(); let diagnostic_handler = diagnostic::default_handler(); let span_diagnostic_handler = diagnostic::mk_span_handler(diagnostic_handler, codemap); - build_session_(sopts, local_crate_source_file, codemap, span_diagnostic_handler) + build_session_(sopts, local_crate_source_file, span_diagnostic_handler) } -pub fn build_session_(sopts: @session::Options, +pub fn build_session_(sopts: session::Options, local_crate_source_file: Option<Path>, - codemap: @codemap::CodeMap, - span_diagnostic_handler: @diagnostic::SpanHandler) + span_diagnostic: diagnostic::SpanHandler) -> Session { - let target_cfg = build_target_config(sopts); - let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler, codemap); + let target_cfg = build_target_config(&sopts); + let p_s = parse::new_parse_sess_special_handler(span_diagnostic); let default_sysroot = match sopts.maybe_sysroot { Some(_) => None, None => Some(filesearch::get_or_default_sysroot()) @@ -1029,19 +1020,17 @@ pub fn build_session_(sopts: @session::Options, opts: sopts, cstore: CStore::new(token::get_ident_interner()), parse_sess: p_s, - codemap: codemap, // For a library crate, this is always none entry_fn: RefCell::new(None), entry_type: Cell::new(None), macro_registrar_fn: RefCell::new(None), - span_diagnostic: span_diagnostic_handler, default_sysroot: default_sysroot, building_library: Cell::new(false), local_crate_source_file: local_crate_source_file, working_dir: os::getcwd(), - lints: RefCell::new(HashMap::new()), + lints: RefCell::new(NodeMap::new()), node_id: Cell::new(1), - crate_types: @RefCell::new(Vec::new()), + crate_types: RefCell::new(Vec::new()), features: front::feature_gate::Features::new(), recursion_limit: Cell::new(64), } diff --git a/src/librustc/driver/session.rs b/src/librustc/driver/session.rs index acc92221539..8d3048a80b2 100644 --- a/src/librustc/driver/session.rs +++ b/src/librustc/driver/session.rs @@ -16,6 +16,7 @@ use front; use metadata::filesearch; use metadata; use middle::lint; +use util::nodemap::NodeMap; use syntax::attr::AttrMetaMethods; use syntax::ast::NodeId; @@ -28,7 +29,7 @@ use syntax; use std::cell::{Cell, RefCell}; use std::vec_ng::Vec; -use collections::{HashMap,HashSet}; +use collections::HashSet; pub struct Config { os: abi::Os, @@ -174,15 +175,13 @@ pub enum CrateType { } pub struct Session { - targ_cfg: @Config, - opts: @Options, + targ_cfg: Config, + opts: Options, cstore: metadata::cstore::CStore, parse_sess: ParseSess, - codemap: @codemap::CodeMap, // For a library crate, this is always none entry_fn: RefCell<Option<(NodeId, codemap::Span)>>, entry_type: Cell<Option<EntryFnType>>, - span_diagnostic: @diagnostic::SpanHandler, macro_registrar_fn: RefCell<Option<ast::DefId>>, default_sysroot: Option<Path>, building_library: Cell<bool>, @@ -190,10 +189,9 @@ pub struct Session { // expected to be absolute. `None` means that there is no source file. local_crate_source_file: Option<Path>, working_dir: Path, - lints: RefCell<HashMap<ast::NodeId, - Vec<(lint::Lint, codemap::Span, ~str)> >>, + lints: RefCell<NodeMap<Vec<(lint::Lint, codemap::Span, ~str)>>>, node_id: Cell<ast::NodeId>, - crate_types: @RefCell<Vec<CrateType> >, + crate_types: RefCell<Vec<CrateType>>, features: front::feature_gate::Features, /// The maximum recursion limit for potentially infinitely recursive @@ -203,52 +201,52 @@ pub struct Session { impl Session { pub fn span_fatal(&self, sp: Span, msg: &str) -> ! { - self.span_diagnostic.span_fatal(sp, msg) + self.diagnostic().span_fatal(sp, msg) } pub fn fatal(&self, msg: &str) -> ! { - self.span_diagnostic.handler().fatal(msg) + self.diagnostic().handler().fatal(msg) } pub fn span_err(&self, sp: Span, msg: &str) { - self.span_diagnostic.span_err(sp, msg) + self.diagnostic().span_err(sp, msg) } pub fn err(&self, msg: &str) { - self.span_diagnostic.handler().err(msg) + self.diagnostic().handler().err(msg) } pub fn err_count(&self) -> uint { - self.span_diagnostic.handler().err_count() + self.diagnostic().handler().err_count() } pub fn has_errors(&self) -> bool { - self.span_diagnostic.handler().has_errors() + self.diagnostic().handler().has_errors() } pub fn abort_if_errors(&self) { - self.span_diagnostic.handler().abort_if_errors() + self.diagnostic().handler().abort_if_errors() } pub fn span_warn(&self, sp: Span, msg: &str) { - self.span_diagnostic.span_warn(sp, msg) + self.diagnostic().span_warn(sp, msg) } pub fn warn(&self, msg: &str) { - self.span_diagnostic.handler().warn(msg) + self.diagnostic().handler().warn(msg) } pub fn span_note(&self, sp: Span, msg: &str) { - self.span_diagnostic.span_note(sp, msg) + self.diagnostic().span_note(sp, msg) } pub fn span_end_note(&self, sp: Span, msg: &str) { - self.span_diagnostic.span_end_note(sp, msg) + self.diagnostic().span_end_note(sp, msg) } pub fn note(&self, msg: &str) { - self.span_diagnostic.handler().note(msg) + self.diagnostic().handler().note(msg) } pub fn span_bug(&self, sp: Span, msg: &str) -> ! { - self.span_diagnostic.span_bug(sp, msg) + self.diagnostic().span_bug(sp, msg) } pub fn bug(&self, msg: &str) -> ! { - self.span_diagnostic.handler().bug(msg) + self.diagnostic().handler().bug(msg) } pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! { - self.span_diagnostic.span_unimpl(sp, msg) + self.diagnostic().span_unimpl(sp, msg) } pub fn unimpl(&self, msg: &str) -> ! { - self.span_diagnostic.handler().unimpl(msg) + self.diagnostic().handler().unimpl(msg) } pub fn add_lint(&self, lint: lint::Lint, @@ -275,12 +273,15 @@ impl Session { v } - pub fn diagnostic(&self) -> @diagnostic::SpanHandler { - self.span_diagnostic + pub fn diagnostic<'a>(&'a self) -> &'a diagnostic::SpanHandler { + &self.parse_sess.span_diagnostic } pub fn debugging_opt(&self, opt: u64) -> bool { (self.opts.debugging_opts & opt) != 0 } + pub fn codemap<'a>(&'a self) -> &'a codemap::CodeMap { + &self.parse_sess.span_diagnostic.cm + } // This exists to help with refactoring to eliminate impossible // cases later on pub fn impossible_case(&self, sp: Span, msg: &str) -> ! { @@ -328,8 +329,8 @@ impl Session { } /// Some reasonable defaults -pub fn basic_options() -> @Options { - @Options { +pub fn basic_options() -> Options { + Options { crate_types: Vec::new(), gc: false, optimize: No, diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 4383f3f5933..9409d8c457f 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -295,26 +295,26 @@ pub fn run_compiler(args: &[~str]) { d::parse_pretty(&sess, a) }); match pretty { - Some::<d::PpMode>(ppm) => { - d::pretty_print_input(sess, cfg, &input, ppm); - return; - } - None::<d::PpMode> => {/* continue */ } + Some::<d::PpMode>(ppm) => { + d::pretty_print_input(sess, cfg, &input, ppm); + return; + } + None::<d::PpMode> => {/* continue */ } } let ls = matches.opt_present("ls"); if ls { match input { - d::FileInput(ref ifile) => { - let mut stdout = io::stdout(); - d::list_metadata(&sess, &(*ifile), &mut stdout).unwrap(); - } - d::StrInput(_) => { - d::early_error("can not list metadata for stdin"); - } + d::FileInput(ref ifile) => { + let mut stdout = io::stdout(); + d::list_metadata(&sess, &(*ifile), &mut stdout).unwrap(); + } + d::StrInput(_) => { + d::early_error("can not list metadata for stdin"); + } } return; } - let (crate_id, crate_name, crate_file_name) = sopts.print_metas; + let (crate_id, crate_name, crate_file_name) = sess.opts.print_metas; // these nasty nested conditions are to avoid doing extra work if crate_id || crate_name || crate_file_name { let attrs = parse_crate_attrs(&sess, &input); diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index 359f28f2ad1..74e180072fa 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -88,7 +88,7 @@ fn dump_crates(crate_cache: &[cache_entry]) { } fn warn_if_multiple_versions(e: &mut Env, - diag: @SpanHandler, + diag: &SpanHandler, crate_cache: &[cache_entry]) { if crate_cache.len() != 0u { let name = crate_cache[crate_cache.len() - 1].crate_id.name.clone(); diff --git a/src/librustc/metadata/csearch.rs b/src/librustc/metadata/csearch.rs index 95c85a04b19..d41d01f3c85 100644 --- a/src/librustc/metadata/csearch.rs +++ b/src/librustc/metadata/csearch.rs @@ -214,11 +214,11 @@ pub fn get_field_type(tcx: &ty::ctxt, class_id: ast::DefId, let cstore = &tcx.sess.cstore; let cdata = cstore.get_crate_data(class_id.krate); let all_items = reader::get_doc(reader::Doc(cdata.data()), tag_items); - let class_doc = expect(tcx.diag, + let class_doc = expect(tcx.sess.diagnostic(), decoder::maybe_find_item(class_id.node, all_items), || format!("get_field_type: class ID {:?} not found", class_id) ); - let the_field = expect(tcx.diag, + let the_field = expect(tcx.sess.diagnostic(), decoder::maybe_find_item(def.node, class_doc), || format!("get_field_type: in class {:?}, field ID {:?} not found", class_id, def) ); diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index a1ee7d39ed7..b869e0a7d91 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -68,7 +68,7 @@ pub type EncodeInlinedItem<'a> = 'a |ecx: &EncodeContext, ii: InlinedItemRef|; pub struct EncodeParams<'a> { - diag: @SpanHandler, + diag: &'a SpanHandler, tcx: &'a ty::ctxt, reexports2: middle::resolve::ExportMap2, item_symbols: &'a RefCell<NodeMap<~str>>, @@ -95,7 +95,7 @@ pub struct Stats { } pub struct EncodeContext<'a> { - diag: @SpanHandler, + diag: &'a SpanHandler, tcx: &'a ty::ctxt, stats: @Stats, reexports2: middle::resolve::ExportMap2, @@ -904,7 +904,7 @@ fn encode_info_for_item(ecx: &EncodeContext, } debug!("encoding info for item at {}", - ecx.tcx.sess.codemap.span_to_str(item.span)); + ecx.tcx.sess.codemap().span_to_str(item.span)); let def_id = local_def(item.id); match item.node { @@ -1630,7 +1630,7 @@ impl<'a, 'b> Visitor<()> for MacroDefVisitor<'a, 'b> { fn visit_item(&mut self, item: &Item, _: ()) { match item.node { ItemMac(..) => { - let def = self.ecx.tcx.sess.codemap.span_to_snippet(item.span) + let def = self.ecx.tcx.sess.codemap().span_to_snippet(item.span) .expect("Unable to find source for macro"); self.ebml_w.start_tag(tag_macro_def); self.ebml_w.wr_str(def); @@ -1901,7 +1901,7 @@ fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate) // Get the encoded string for a type pub fn encoded_ty(tcx: &ty::ctxt, t: ty::t) -> ~str { let cx = &tyencode::ctxt { - diag: tcx.diag, + diag: tcx.sess.diagnostic(), ds: def_to_str, tcx: tcx, abbrevs: tyencode::ac_no_abbrevs}; diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 5c4945a8140..28556105c7b 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -352,7 +352,7 @@ impl<'a> Context<'a> { } } -pub fn note_crateid_attr(diag: @SpanHandler, crateid: &CrateId) { +pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) { diag.handler().note(format!("crate_id: {}", crateid.to_str())); } diff --git a/src/librustc/metadata/tyencode.rs b/src/librustc/metadata/tyencode.rs index 6366149c0f6..e78fe8e72a8 100644 --- a/src/librustc/metadata/tyencode.rs +++ b/src/librustc/metadata/tyencode.rs @@ -29,16 +29,15 @@ use syntax::ast; use syntax::ast::*; use syntax::diagnostic::SpanHandler; use syntax::parse::token; -use syntax::print::pprust::*; macro_rules! mywrite( ($wr:expr, $($arg:tt)*) => ( format_args!(|a| { mywrite($wr, a) }, $($arg)*) ) ) pub struct ctxt<'a> { - diag: @SpanHandler, + diag: &'a SpanHandler, // Def -> str Callback: - ds: extern "Rust" fn(DefId) -> ~str, + ds: fn(DefId) -> ~str, // The type context. tcx: &'a ty::ctxt, abbrevs: abbrev_ctxt diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 69932a14795..2d4cbee243e 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -153,7 +153,7 @@ enum LiveNodeKind { } fn live_node_kind_to_str(lnk: LiveNodeKind, cx: &ty::ctxt) -> ~str { - let cm = cx.sess.codemap; + let cm = cx.sess.codemap(); match lnk { FreeVarNode(s) => format!("Free var node [{}]", cm.span_to_str(s)), ExprNode(s) => format!("Expr node [{}]", cm.span_to_str(s)), diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index a19b348b78a..a13dce65f51 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -847,7 +847,7 @@ fn resolve_fn(visitor: &mut RegionResolutionVisitor, body.id={}, \ cx.parent={})", id, - visitor.sess.codemap.span_to_str(sp), + visitor.sess.codemap().span_to_str(sp), body.id, cx.parent); diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index bea776eb209..2784be5a01b 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -3275,7 +3275,7 @@ impl<'a> Resolver<'a> { let import_count = imports.get().len(); if index != import_count { let sn = self.session - .codemap + .codemap() .span_to_snippet(imports.get().get(index).span) .unwrap(); if sn.contains("::") { @@ -5449,7 +5449,7 @@ impl<'a> Resolver<'a> { // public or private item, we will check the correct thing, dependent on how the import // is used. fn finalize_import(&mut self, id: NodeId, span: Span) { - debug!("finalizing import uses for {}", self.session.codemap.span_to_snippet(span)); + debug!("finalizing import uses for {}", self.session.codemap().span_to_snippet(span)); if !self.used_imports.contains(&(id, TypeNS)) && !self.used_imports.contains(&(id, ValueNS)) { diff --git a/src/librustc/middle/trans/builder.rs b/src/librustc/middle/trans/builder.rs index bc99a0ac559..c403d323bf4 100644 --- a/src/librustc/middle/trans/builder.rs +++ b/src/librustc/middle/trans/builder.rs @@ -749,7 +749,7 @@ impl<'a> Builder<'a> { pub fn add_span_comment(&self, sp: Span, text: &str) { if self.ccx.sess().asm_comments() { - let s = format!("{} ({})", text, self.ccx.sess().codemap.span_to_str(sp)); + let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_str(sp)); debug!("{}", s); self.add_comment(s); } diff --git a/src/librustc/middle/trans/common.rs b/src/librustc/middle/trans/common.rs index 43f7397f190..e812f36e85d 100644 --- a/src/librustc/middle/trans/common.rs +++ b/src/librustc/middle/trans/common.rs @@ -941,9 +941,9 @@ pub fn dummy_substs(tps: Vec<ty::t> ) -> ty::substs { pub fn filename_and_line_num_from_span(bcx: &Block, span: Span) -> (ValueRef, ValueRef) { - let loc = bcx.sess().parse_sess.cm.lookup_char_pos(span.lo); + let loc = bcx.sess().codemap().lookup_char_pos(span.lo); let filename_cstr = C_cstr(bcx.ccx(), - token::intern_and_get_ident(loc.file.name)); + token::intern_and_get_ident(loc.file.deref().name)); let filename = build::PointerCast(bcx, filename_cstr, Type::i8p(bcx.ccx())); let line = C_int(bcx.ccx(), loc.line as int); (filename, line) diff --git a/src/librustc/middle/trans/context.rs b/src/librustc/middle/trans/context.rs index b9c7cb93e12..4ed4f0fb310 100644 --- a/src/librustc/middle/trans/context.rs +++ b/src/librustc/middle/trans/context.rs @@ -132,13 +132,11 @@ impl CrateContext { let metadata_llmod = format!("{}_metadata", name).with_c_str(|buf| { llvm::LLVMModuleCreateWithNameInContext(buf, llcx) }); - let data_layout: &str = tcx.sess.targ_cfg.target_strs.data_layout; - let targ_triple: &str = tcx.sess.targ_cfg.target_strs.target_triple; - data_layout.with_c_str(|buf| { + tcx.sess.targ_cfg.target_strs.data_layout.with_c_str(|buf| { llvm::LLVMSetDataLayout(llmod, buf); llvm::LLVMSetDataLayout(metadata_llmod, buf); }); - targ_triple.with_c_str(|buf| { + tcx.sess.targ_cfg.target_strs.target_triple.with_c_str(|buf| { llvm::LLVMRustSetNormalizedTarget(llmod, buf); llvm::LLVMRustSetNormalizedTarget(metadata_llmod, buf); }); diff --git a/src/librustc/middle/trans/controlflow.rs b/src/librustc/middle/trans/controlflow.rs index 43e1e219e6f..2796eaeb129 100644 --- a/src/librustc/middle/trans/controlflow.rs +++ b/src/librustc/middle/trans/controlflow.rs @@ -331,13 +331,11 @@ pub fn trans_fail<'a>( sp: Span, fail_str: InternedString) -> &'a Block<'a> { - let v_fail_str = C_cstr(bcx.ccx(), fail_str); - let _icx = push_ctxt("trans_fail_value"); let ccx = bcx.ccx(); - let sess = bcx.sess(); - let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo); - let v_filename = C_cstr(bcx.ccx(), - token::intern_and_get_ident(loc.file.name)); + let v_fail_str = C_cstr(ccx, fail_str); + let _icx = push_ctxt("trans_fail_value"); + let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); + let v_filename = C_cstr(ccx, token::intern_and_get_ident(loc.file.deref().name)); let v_line = loc.line as int; let v_str = PointerCast(bcx, v_fail_str, Type::i8p(ccx)); let v_filename = PointerCast(bcx, v_filename, Type::i8p(ccx)); diff --git a/src/librustc/middle/trans/debuginfo.rs b/src/librustc/middle/trans/debuginfo.rs index 630512c8f69..b044ee15b3b 100644 --- a/src/librustc/middle/trans/debuginfo.rs +++ b/src/librustc/middle/trans/debuginfo.rs @@ -485,7 +485,7 @@ pub fn set_source_location(fcx: &FunctionContext, let cx = fcx.ccx; - debug!("set_source_location: {}", cx.sess().codemap.span_to_str(span)); + debug!("set_source_location: {}", cx.sess().codemap().span_to_str(span)); if fcx.debug_context.get_ref(cx, span).source_locations_enabled.get() { let loc = span_start(cx, span); @@ -616,7 +616,7 @@ pub fn create_function_debug_context(cx: &CrateContext, } let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name); + let file_metadata = file_metadata(cx, loc.file.deref().name); let function_type_metadata = unsafe { let fn_signature = get_function_signature(cx, fn_ast_id, fn_decl, param_substs, span); @@ -939,7 +939,7 @@ fn declare_local(bcx: &Block, span: Span) { let cx: &CrateContext = bcx.ccx(); - let filename = span_start(cx, span).file.name.clone(); + let filename = span_start(cx, span).file.deref().name.clone(); let file_metadata = file_metadata(cx, filename); let name = token::get_ident(variable_ident); @@ -1195,7 +1195,7 @@ fn prepare_struct_metadata(cx: &CrateContext, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id); - let file_name = span_start(cx, definition_span).file.name.clone(); + let file_name = span_start(cx, definition_span).file.deref().name.clone(); let file_metadata = file_metadata(cx, file_name); let struct_metadata_stub = create_struct_stub(cx, @@ -1292,7 +1292,7 @@ fn prepare_tuple_metadata(cx: &CrateContext, let tuple_llvm_type = type_of::type_of(cx, tuple_type); let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name); + let file_metadata = file_metadata(cx, loc.file.deref().name); UnfinishedMetadata { cache_id: cache_id_for_type(tuple_type), @@ -1452,7 +1452,7 @@ fn prepare_enum_metadata(cx: &CrateContext, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id); let loc = span_start(cx, definition_span); - let file_metadata = file_metadata(cx, loc.file.name); + let file_metadata = file_metadata(cx, loc.file.deref().name); // For empty enums there is an early exit. Just describe it as an empty struct with the // appropriate type name @@ -1791,7 +1791,7 @@ fn boxed_type_metadata(cx: &CrateContext, ]; let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name); + let file_metadata = file_metadata(cx, loc.file.deref().name); return composite_type_metadata( cx, @@ -1892,16 +1892,16 @@ fn vec_metadata(cx: &CrateContext, assert!(member_descriptions.len() == member_llvm_types.len()); let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name); + let file_metadata = file_metadata(cx, loc.file.deref().name); - return composite_type_metadata( + composite_type_metadata( cx, vec_llvm_type, vec_type_name, member_descriptions, file_metadata, file_metadata, - span); + span) } fn vec_slice_metadata(cx: &CrateContext, @@ -1943,7 +1943,7 @@ fn vec_slice_metadata(cx: &CrateContext, assert!(member_descriptions.len() == member_llvm_types.len()); let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name); + let file_metadata = file_metadata(cx, loc.file.deref().name); return composite_type_metadata( cx, @@ -1969,7 +1969,7 @@ fn subroutine_type_metadata(cx: &CrateContext, span: Span) -> DICompositeType { let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name); + let file_metadata = file_metadata(cx, loc.file.deref().name); let mut signature_metadata: Vec<DIType> = Vec::with_capacity(signature.inputs.len() + 1); @@ -2015,7 +2015,7 @@ fn trait_metadata(cx: &CrateContext, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id); - let file_name = span_start(cx, definition_span).file.name.clone(); + let file_name = span_start(cx, definition_span).file.deref().name.clone(); let file_metadata = file_metadata(cx, file_name); let trait_llvm_type = type_of::type_of(cx, trait_type); @@ -2218,7 +2218,7 @@ fn generate_unique_type_id(prefix: &'static str) -> ~str { /// Return codemap::Loc corresponding to the beginning of the span fn span_start(cx: &CrateContext, span: Span) -> codemap::Loc { - cx.sess().codemap.lookup_char_pos(span.lo) + cx.sess().codemap().lookup_char_pos(span.lo) } fn size_and_align_of(cx: &CrateContext, llvm_type: Type) -> (u64, u64) { @@ -2315,8 +2315,8 @@ fn populate_scope_map(cx: &CrateContext, &mut Vec<ScopeStackEntry> , &mut HashMap<ast::NodeId, DIScope>|) { // Create a new lexical scope and push it onto the stack - let loc = cx.sess().codemap.lookup_char_pos(scope_span.lo); - let file_metadata = file_metadata(cx, loc.file.name); + let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo); + let file_metadata = file_metadata(cx, loc.file.deref().name); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { @@ -2432,8 +2432,8 @@ fn populate_scope_map(cx: &CrateContext, if need_new_scope { // Create a new lexical scope and push it onto the stack - let loc = cx.sess().codemap.lookup_char_pos(pat.span.lo); - let file_metadata = file_metadata(cx, loc.file.name); + let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo); + let file_metadata = file_metadata(cx, loc.file.deref().name); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index be74602c2f5..ae940ebeef2 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -253,7 +253,6 @@ pub enum AutoRef { /// generates so that so that it can be reused and doesn't have to be redone /// later on. pub struct ctxt { - diag: @syntax::diagnostic::SpanHandler, // Specifically use a speedy hash algorithm for this hash map, it's used // quite often. interner: RefCell<FnvHashMap<intern_key, ~t_box_>>, @@ -1088,7 +1087,6 @@ pub fn mk_ctxt(s: Session, ctxt { named_region_map: named_region_map, item_variance_map: RefCell::new(DefIdMap::new()), - diag: s.diagnostic(), interner: RefCell::new(FnvHashMap::new()), next_id: Cell::new(primitives::LAST_PRIMITIVE_ID), sess: s, diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index b80fb2fa491..b3d3e59ea6b 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -139,9 +139,8 @@ pub fn explain_region_and_span(cx: &ctxt, region: ty::Region) }; fn explain_span(cx: &ctxt, heading: &str, span: Span) - -> (~str, Option<Span>) - { - let lo = cx.sess.codemap.lookup_char_pos_adj(span.lo); + -> (~str, Option<Span>) { + let lo = cx.sess.codemap().lookup_char_pos_adj(span.lo); (format!("the {} at {}:{}", heading, lo.line, lo.col.to_uint()), Some(span)) } @@ -172,28 +171,28 @@ pub fn ReScope_id_to_str(cx: &ctxt, node_id: ast::NodeId) -> ~str { match cx.map.find(node_id) { Some(ast_map::NodeBlock(ref blk)) => { format!("<block at {}>", - cx.sess.codemap.span_to_str(blk.span)) + cx.sess.codemap().span_to_str(blk.span)) } Some(ast_map::NodeExpr(expr)) => { match expr.node { ast::ExprCall(..) => { format!("<call at {}>", - cx.sess.codemap.span_to_str(expr.span)) + cx.sess.codemap().span_to_str(expr.span)) } ast::ExprMatch(..) => { format!("<match at {}>", - cx.sess.codemap.span_to_str(expr.span)) + cx.sess.codemap().span_to_str(expr.span)) } ast::ExprAssignOp(..) | ast::ExprUnary(..) | ast::ExprBinary(..) | ast::ExprIndex(..) => { format!("<method at {}>", - cx.sess.codemap.span_to_str(expr.span)) + cx.sess.codemap().span_to_str(expr.span)) } _ => { format!("<expression at {}>", - cx.sess.codemap.span_to_str(expr.span)) + cx.sess.codemap().span_to_str(expr.span)) } } } @@ -972,7 +971,7 @@ impl Repr for ty::BuiltinBounds { impl Repr for Span { fn repr(&self, tcx: &ctxt) -> ~str { - tcx.sess.codemap.span_to_str(*self) + tcx.sess.codemap().span_to_str(*self) } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 7ff9a73f29d..65a0f473db2 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -375,7 +375,7 @@ pub fn find_stability<AM: AttrMetaMethods, It: Iterator<AM>>(mut metas: It) None } -pub fn require_unique_names(diagnostic: @SpanHandler, metas: &[@MetaItem]) { +pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[@MetaItem]) { let mut set = HashSet::new(); for meta in metas.iter() { let name = meta.name(); @@ -400,7 +400,7 @@ pub fn require_unique_names(diagnostic: @SpanHandler, metas: &[@MetaItem]) { * present (before fields, if any) with that type; reprensentation * optimizations which would remove it will not be done. */ -pub fn find_repr_attr(diagnostic: @SpanHandler, attr: @ast::MetaItem, acc: ReprAttr) +pub fn find_repr_attr(diagnostic: &SpanHandler, attr: @ast::MetaItem, acc: ReprAttr) -> ReprAttr { let mut acc = acc; match attr.node { @@ -438,7 +438,7 @@ pub fn find_repr_attr(diagnostic: @SpanHandler, attr: @ast::MetaItem, acc: ReprA // Not a "repr" hint: ignore. _ => { } } - return acc; + acc } fn int_type_of_word(s: &str) -> Option<IntType> { diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 5be03317b77..4bfd5391a8f 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -23,6 +23,7 @@ source code snippets, etc. use std::cell::RefCell; use std::cmp; +use std::rc::Rc; use std::vec_ng::Vec; use serialize::{Encodable, Decodable, Encoder, Decoder}; @@ -140,7 +141,7 @@ pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span { /// A source code location used for error reporting pub struct Loc { /// Information about the original source - file: @FileMap, + file: Rc<FileMap>, /// The (1-based) line number line: uint, /// The (0-based) column offset @@ -154,12 +155,12 @@ pub struct LocWithOpt { filename: FileName, line: uint, col: CharPos, - file: Option<@FileMap>, + file: Option<Rc<FileMap>>, } // used to be structural records. Better names, anyone? -pub struct FileMapAndLine {fm: @FileMap, line: uint} -pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos} +pub struct FileMapAndLine {fm: Rc<FileMap>, line: uint} +pub struct FileMapAndBytePos {fm: Rc<FileMap>, pos: BytePos} #[deriving(Clone, Hash, Show)] pub enum MacroFormat { @@ -186,10 +187,10 @@ pub struct ExpnInfo { pub type FileName = ~str; -pub struct FileLines -{ - file: @FileMap, - lines: Vec<uint> } +pub struct FileLines { + file: Rc<FileMap>, + lines: Vec<uint> +} /// Identifies an offset of a multi-byte character in a FileMap pub struct MultiByteChar { @@ -251,8 +252,7 @@ impl FileMap { pos: pos, bytes: bytes, }; - let mut multibyte_chars = self.multibyte_chars.borrow_mut(); - multibyte_chars.get().push(mbc); + self.multibyte_chars.borrow_mut().get().push(mbc); } pub fn is_real_file(&self) -> bool { @@ -261,7 +261,7 @@ impl FileMap { } pub struct CodeMap { - files: RefCell<Vec<@FileMap> > + files: RefCell<Vec<Rc<FileMap>>> } impl CodeMap { @@ -271,11 +271,11 @@ impl CodeMap { } } - pub fn new_filemap(&self, filename: FileName, mut src: ~str) -> @FileMap { + pub fn new_filemap(&self, filename: FileName, mut src: ~str) -> Rc<FileMap> { let mut files = self.files.borrow_mut(); let start_pos = match files.get().last() { None => 0, - Some(last) => last.start_pos.to_uint() + last.src.len(), + Some(last) => last.deref().start_pos.to_uint() + last.deref().src.len(), }; // Append '\n' in case it's not already there. @@ -286,34 +286,33 @@ impl CodeMap { src.push_char('\n'); } - let filemap = @FileMap { + let filemap = Rc::new(FileMap { name: filename, src: src, start_pos: Pos::from_uint(start_pos), lines: RefCell::new(Vec::new()), multibyte_chars: RefCell::new(Vec::new()), - }; + }); - files.get().push(filemap); + files.get().push(filemap.clone()); - return filemap; + filemap } pub fn mk_substr_filename(&self, sp: Span) -> ~str { let pos = self.lookup_char_pos(sp.lo); - return format!("<{}:{}:{}>", pos.file.name, - pos.line, pos.col.to_uint() + 1) + format!("<{}:{}:{}>", pos.file.deref().name, pos.line, pos.col.to_uint() + 1) } /// Lookup source information about a BytePos pub fn lookup_char_pos(&self, pos: BytePos) -> Loc { - return self.lookup_pos(pos); + self.lookup_pos(pos) } pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt { let loc = self.lookup_char_pos(pos); LocWithOpt { - filename: loc.file.name.to_str(), + filename: loc.file.deref().name.to_str(), line: loc.line, col: loc.col, file: Some(loc.file) @@ -321,11 +320,8 @@ impl CodeMap { } pub fn span_to_str(&self, sp: Span) -> ~str { - { - let files = self.files.borrow(); - if files.get().len() == 0 && sp == DUMMY_SP { - return ~"no-location"; - } + if self.files.borrow().get().len() == 0 && sp == DUMMY_SP { + return ~"no-location"; } let lo = self.lookup_char_pos_adj(sp.lo); @@ -335,18 +331,17 @@ impl CodeMap { } pub fn span_to_filename(&self, sp: Span) -> FileName { - let lo = self.lookup_char_pos(sp.lo); - lo.file.name.to_str() + self.lookup_char_pos(sp.lo).file.deref().name.to_str() } - pub fn span_to_lines(&self, sp: Span) -> @FileLines { + pub fn span_to_lines(&self, sp: Span) -> FileLines { let lo = self.lookup_char_pos(sp.lo); let hi = self.lookup_char_pos(sp.hi); let mut lines = Vec::new(); for i in range(lo.line - 1u, hi.line as uint) { lines.push(i); }; - return @FileLines {file: lo.file, lines: lines}; + FileLines {file: lo.file, lines: lines} } pub fn span_to_snippet(&self, sp: Span) -> Option<~str> { @@ -357,27 +352,22 @@ impl CodeMap { // it's testing isn't true for all spans in the AST, so to allow the // caller to not have to fail (and it can't catch it since the CodeMap // isn't sendable), return None - if begin.fm.start_pos != end.fm.start_pos { + if begin.fm.deref().start_pos != end.fm.deref().start_pos { None } else { - Some(begin.fm.src.slice( begin.pos.to_uint(), end.pos.to_uint()).to_owned()) + Some(begin.fm.deref().src.slice( begin.pos.to_uint(), end.pos.to_uint()).to_owned()) } } - pub fn get_filemap(&self, filename: &str) -> @FileMap { - let files = self.files.borrow(); - for fm in files.get().iter() { - if filename == fm.name { - return *fm + pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> { + for fm in self.files.borrow().get().iter() { + if filename == fm.deref().name { + return fm.clone(); } } - //XXjdm the following triggers a mismatched type bug - // (or expected function, found _|_) - fail!(); // ("asking for " + filename + " which we don't know about"); + fail!("asking for {} which we don't know about", filename); } -} -impl CodeMap { fn lookup_filemap_idx(&self, pos: BytePos) -> uint { let files = self.files.borrow(); let files = files.get(); @@ -386,7 +376,7 @@ impl CodeMap { let mut b = len; while b - a > 1u { let m = (a + b) / 2u; - if files.get(m).start_pos > pos { + if files.get(m).deref().start_pos > pos { b = m; } else { a = m; @@ -396,7 +386,7 @@ impl CodeMap { // filemap, but are not the filemaps we want (because they are length 0, they cannot // contain what we are looking for). So, rewind until we find a useful filemap. loop { - let lines = files.get(a).lines.borrow(); + let lines = files.get(a).deref().lines.borrow(); let lines = lines.get(); if lines.len() > 0 { break; @@ -413,28 +403,28 @@ impl CodeMap { return a; } - fn lookup_line(&self, pos: BytePos) -> FileMapAndLine - { + fn lookup_line(&self, pos: BytePos) -> FileMapAndLine { let idx = self.lookup_filemap_idx(pos); let files = self.files.borrow(); - let f = *files.get().get(idx); + let f = files.get().get(idx).clone(); let mut a = 0u; - let mut lines = f.lines.borrow_mut(); - let mut b = lines.get().len(); - while b - a > 1u { - let m = (a + b) / 2u; - if *lines.get().get(m) > pos { b = m; } else { a = m; } + { + let mut lines = f.deref().lines.borrow_mut(); + let mut b = lines.get().len(); + while b - a > 1u { + let m = (a + b) / 2u; + if *lines.get().get(m) > pos { b = m; } else { a = m; } + } } - return FileMapAndLine {fm: f, line: a}; + FileMapAndLine {fm: f, line: a} } fn lookup_pos(&self, pos: BytePos) -> Loc { let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos); let line = a + 1u; // Line numbers start at 1 let chpos = self.bytepos_to_file_charpos(pos); - let lines = f.lines.borrow(); - let linebpos = *lines.get().get(a); + let linebpos = *f.deref().lines.borrow().get().get(a); let linechpos = self.bytepos_to_file_charpos(linebpos); debug!("codemap: byte pos {:?} is on the line at byte pos {:?}", pos, linebpos); @@ -442,20 +432,18 @@ impl CodeMap { chpos, linechpos); debug!("codemap: byte is on line: {:?}", line); assert!(chpos >= linechpos); - return Loc { + Loc { file: f, line: line, col: chpos - linechpos - }; + } } - fn lookup_byte_offset(&self, bpos: BytePos) - -> FileMapAndBytePos { + fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos { let idx = self.lookup_filemap_idx(bpos); - let files = self.files.borrow(); - let fm = *files.get().get(idx); - let offset = bpos - fm.start_pos; - return FileMapAndBytePos {fm: fm, pos: offset}; + let fm = self.files.borrow().get().get(idx).clone(); + let offset = bpos - fm.deref().start_pos; + FileMapAndBytePos {fm: fm, pos: offset} } // Converts an absolute BytePos to a CharPos relative to the filemap. @@ -468,8 +456,7 @@ impl CodeMap { // The number of extra bytes due to multibyte chars in the FileMap let mut total_extra_bytes = 0; - let multibyte_chars = map.multibyte_chars.borrow(); - for mbc in multibyte_chars.get().iter() { + for mbc in map.deref().multibyte_chars.borrow().get().iter() { debug!("codemap: {:?}-byte char at {:?}", mbc.bytes, mbc.pos); if mbc.pos < bpos { // every character is at least one byte, so we only @@ -483,8 +470,8 @@ impl CodeMap { } } - assert!(map.start_pos.to_uint() + total_extra_bytes <= bpos.to_uint()); - CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes) + assert!(map.deref().start_pos.to_uint() + total_extra_bytes <= bpos.to_uint()); + CharPos(bpos.to_uint() - map.deref().start_pos.to_uint() - total_extra_bytes) } } diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 2da8b786805..c88c5a3c4e6 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -40,37 +40,37 @@ pub struct ExplicitBug; // accepts span information for source-location // reporting. pub struct SpanHandler { - handler: @Handler, - cm: @codemap::CodeMap, + handler: Handler, + cm: codemap::CodeMap, } impl SpanHandler { pub fn span_fatal(&self, sp: Span, msg: &str) -> ! { - self.handler.emit(Some((&*self.cm, sp)), msg, Fatal); + self.handler.emit(Some((&self.cm, sp)), msg, Fatal); fail!(FatalError); } pub fn span_err(&self, sp: Span, msg: &str) { - self.handler.emit(Some((&*self.cm, sp)), msg, Error); + self.handler.emit(Some((&self.cm, sp)), msg, Error); self.handler.bump_err_count(); } pub fn span_warn(&self, sp: Span, msg: &str) { - self.handler.emit(Some((&*self.cm, sp)), msg, Warning); + self.handler.emit(Some((&self.cm, sp)), msg, Warning); } pub fn span_note(&self, sp: Span, msg: &str) { - self.handler.emit(Some((&*self.cm, sp)), msg, Note); + self.handler.emit(Some((&self.cm, sp)), msg, Note); } pub fn span_end_note(&self, sp: Span, msg: &str) { - self.handler.custom_emit(&*self.cm, sp, msg, Note); + self.handler.custom_emit(&self.cm, sp, msg, Note); } pub fn span_bug(&self, sp: Span, msg: &str) -> ! { - self.handler.emit(Some((&*self.cm, sp)), msg, Bug); + self.handler.emit(Some((&self.cm, sp)), msg, Bug); fail!(ExplicitBug); } pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! { self.span_bug(sp, ~"unimplemented " + msg); } - pub fn handler(&self) -> @Handler { - self.handler + pub fn handler<'a>(&'a self) -> &'a Handler { + &self.handler } } @@ -137,20 +137,19 @@ impl Handler { } } -pub fn mk_span_handler(handler: @Handler, cm: @codemap::CodeMap) - -> @SpanHandler { - @SpanHandler { +pub fn mk_span_handler(handler: Handler, cm: codemap::CodeMap) -> SpanHandler { + SpanHandler { handler: handler, cm: cm, } } -pub fn default_handler() -> @Handler { +pub fn default_handler() -> Handler { mk_handler(~EmitterWriter::stderr()) } -pub fn mk_handler(e: ~Emitter) -> @Handler { - @Handler { +pub fn mk_handler(e: ~Emitter) -> Handler { + Handler { err_count: Cell::new(0), emit: RefCell::new(e), } @@ -301,8 +300,8 @@ fn highlight_lines(err: &mut EmitterWriter, cm: &codemap::CodeMap, sp: Span, lvl: Level, - lines: &codemap::FileLines) -> io::IoResult<()> { - let fm = lines.file; + lines: codemap::FileLines) -> io::IoResult<()> { + let fm = lines.file.deref(); let mut elided = false; let mut display_lines = lines.lines.as_slice(); @@ -374,8 +373,8 @@ fn custom_highlight_lines(w: &mut EmitterWriter, cm: &codemap::CodeMap, sp: Span, lvl: Level, - lines: &codemap::FileLines) -> io::IoResult<()> { - let fm = lines.file; + lines: codemap::FileLines) -> io::IoResult<()> { + let fm = lines.file.deref(); let lines = lines.lines.as_slice(); if lines.len() > MAX_LINES { @@ -420,8 +419,7 @@ fn print_macro_backtrace(w: &mut EmitterWriter, Ok(()) } -pub fn expect<T:Clone>(diag: @SpanHandler, opt: Option<T>, msg: || -> ~str) - -> T { +pub fn expect<T:Clone>(diag: &SpanHandler, opt: Option<T>, msg: || -> ~str) -> T { match opt { Some(ref t) => (*t).clone(), None => diag.handler().bug(msg()), diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index a6f145a129e..df2c265e6eb 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -326,7 +326,7 @@ impl<'a> ExtCtxt<'a> { } } - pub fn codemap(&self) -> @CodeMap { self.parse_sess.cm } + pub fn codemap(&self) -> &'a CodeMap { &self.parse_sess.span_diagnostic.cm } pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } pub fn cfg(&self) -> ast::CrateConfig { self.cfg.clone() } pub fn call_site(&self) -> Span { diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 6aa90e5e842..7c42476bc01 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -633,7 +633,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { vec!( self.expr_str(span, msg), self.expr_str(span, - token::intern_and_get_ident(loc.file.name)), + token::intern_and_get_ident(loc.file.deref().name)), self.expr_uint(span, loc.line))) } diff --git a/src/libsyntax/ext/registrar.rs b/src/libsyntax/ext/registrar.rs index 4c18eb83afc..d8bf726da79 100644 --- a/src/libsyntax/ext/registrar.rs +++ b/src/libsyntax/ext/registrar.rs @@ -37,7 +37,7 @@ impl Visitor<()> for MacroRegistrarContext { } } -pub fn find_macro_registrar(diagnostic: @diagnostic::SpanHandler, +pub fn find_macro_registrar(diagnostic: &diagnostic::SpanHandler, krate: &ast::Crate) -> Option<ast::DefId> { let mut ctx = MacroRegistrarContext { registrars: Vec::new() }; visit::walk_crate(&mut ctx, krate, ()); diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index b31388f58eb..137cd89bf30 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -57,7 +57,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let topmost = topmost_expn_info(cx.backtrace().unwrap()); let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo); - let filename = token::intern_and_get_ident(loc.file.name); + let filename = token::intern_and_get_ident(loc.file.deref().name); base::MRExpr(cx.expr_str(topmost.call_site, filename)) } @@ -117,7 +117,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // dependency information let filename = file.display().to_str(); let interned = token::intern_and_get_ident(src); - cx.parse_sess.cm.new_filemap(filename, src); + cx.codemap().new_filemap(filename, src); base::MRExpr(cx.expr_str(sp, interned)) } diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index 183cccde18e..fa49f06e516 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -22,7 +22,7 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt, -> base::MacResult { let sess = cx.parse_sess(); let cfg = cx.cfg(); - let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, + let tt_rdr = new_tt_reader(&cx.parse_sess().span_diagnostic, None, tt.iter().map(|x| (*x).clone()).collect()); let mut rust_parser = Parser(sess, cfg.clone(), tt_rdr.dup()); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 4cacbfd6e5a..3f4ed0b1e8e 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -131,13 +131,11 @@ fn generic_extension(cx: &ExtCtxt, let mut best_fail_spot = DUMMY_SP; let mut best_fail_msg = ~"internal error: ran no matchers"; - let s_d = cx.parse_sess().span_diagnostic; - for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers match **lhs { MatchedNonterminal(NtMatchers(ref mtcs)) => { // `None` is because we're not interpolating - let arg_rdr = new_tt_reader(s_d, + let arg_rdr = new_tt_reader(&cx.parse_sess().span_diagnostic, None, arg.iter() .map(|x| (*x).clone()) @@ -162,7 +160,8 @@ fn generic_extension(cx: &ExtCtxt, _ => cx.span_bug(sp, "bad thing in rhs") }; // rhs has holes ( `$id` and `$(...)` that need filled) - let trncbr = new_tt_reader(s_d, Some(named_matches), + let trncbr = new_tt_reader(&cx.parse_sess().span_diagnostic, + Some(named_matches), rhs); let p = Parser(cx.parse_sess(), cx.cfg(), ~trncbr); // Let the context choose how to interpret the result. @@ -218,7 +217,7 @@ pub fn add_new_extension(cx: &mut ExtCtxt, // Parse the macro_rules! invocation (`none` is for no interpolations): - let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic, + let arg_reader = new_tt_reader(&cx.parse_sess().span_diagnostic, None, arg.clone()); let argument_map = parse_or_else(cx.parse_sess(), diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index a3f179e851a..e120f07742e 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -30,8 +30,8 @@ struct TtFrame { up: Option<@TtFrame>, } -pub struct TtReader { - sp_diag: @SpanHandler, +pub struct TtReader<'a> { + sp_diag: &'a SpanHandler, // the unzipped tree: priv stack: RefCell<@TtFrame>, /* for MBE-style macro transcription */ @@ -46,10 +46,10 @@ pub struct TtReader { /** This can do Macro-By-Example transcription. On the other hand, if * `src` contains no `TTSeq`s and `TTNonterminal`s, `interp` can (and * should) be none. */ -pub fn new_tt_reader(sp_diag: @SpanHandler, - interp: Option<HashMap<Ident, @NamedMatch>>, - src: Vec<ast::TokenTree> ) - -> TtReader { +pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, + interp: Option<HashMap<Ident, @NamedMatch>>, + src: Vec<ast::TokenTree> ) + -> TtReader<'a> { let r = TtReader { sp_diag: sp_diag, stack: RefCell::new(@TtFrame { @@ -70,7 +70,7 @@ pub fn new_tt_reader(sp_diag: @SpanHandler, cur_span: RefCell::new(DUMMY_SP), }; tt_next_token(&r); /* get cur_tok and cur_span set up */ - return r; + r } fn dup_tt_frame(f: @TtFrame) -> @TtFrame { @@ -86,7 +86,7 @@ fn dup_tt_frame(f: @TtFrame) -> @TtFrame { } } -pub fn dup_tt_reader(r: &TtReader) -> TtReader { +pub fn dup_tt_reader<'a>(r: &TtReader<'a>) -> TtReader<'a> { TtReader { sp_diag: r.sp_diag, stack: RefCell::new(dup_tt_frame(r.stack.get())), diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index c2a2097de24..ed74fd416d1 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -346,10 +346,10 @@ pub struct Literal { // it appears this function is called only from pprust... that's // probably not a good thing. pub fn gather_comments_and_literals(span_diagnostic: - @diagnostic::SpanHandler, + &diagnostic::SpanHandler, path: ~str, srdr: &mut io::Reader) - -> (Vec<Comment> , Vec<Literal> ) { + -> (Vec<Comment>, Vec<Literal>) { let src = srdr.read_to_end().unwrap(); let src = str::from_utf8_owned(src).unwrap(); let cm = CodeMap::new(); diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 884fc306f22..43e1f8756fa 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -18,6 +18,7 @@ use parse::token::{str_to_ident}; use std::cell::{Cell, RefCell}; use std::char; +use std::rc::Rc; use std::mem::replace; use std::num::from_str_radix; @@ -27,7 +28,7 @@ pub trait Reader { fn is_eof(&self) -> bool; fn next_token(&self) -> TokenAndSpan; fn fatal(&self, ~str) -> !; - fn span_diag(&self) -> @SpanHandler; + fn span_diag<'a>(&'a self) -> &'a SpanHandler; fn peek(&self) -> TokenAndSpan; fn dup(&self) -> ~Reader:; } @@ -38,8 +39,8 @@ pub struct TokenAndSpan { sp: Span, } -pub struct StringReader { - span_diagnostic: @SpanHandler, +pub struct StringReader<'a> { + span_diagnostic: &'a SpanHandler, // The absolute offset within the codemap of the next character to read pos: Cell<BytePos>, // The absolute offset within the codemap of the last character read(curr) @@ -48,36 +49,36 @@ pub struct StringReader { col: Cell<CharPos>, // The last character to be read curr: Cell<Option<char>>, - filemap: @codemap::FileMap, + filemap: Rc<codemap::FileMap>, /* cached: */ peek_tok: RefCell<token::Token>, peek_span: RefCell<Span>, } -impl StringReader { +impl<'a> StringReader<'a> { pub fn curr_is(&self, c: char) -> bool { self.curr.get() == Some(c) } } -pub fn new_string_reader(span_diagnostic: @SpanHandler, - filemap: @codemap::FileMap) - -> StringReader { +pub fn new_string_reader<'a>(span_diagnostic: &'a SpanHandler, + filemap: Rc<codemap::FileMap>) + -> StringReader<'a> { let r = new_low_level_string_reader(span_diagnostic, filemap); string_advance_token(&r); /* fill in peek_* */ r } /* For comments.rs, which hackily pokes into 'pos' and 'curr' */ -pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler, - filemap: @codemap::FileMap) - -> StringReader { +pub fn new_low_level_string_reader<'a>(span_diagnostic: &'a SpanHandler, + filemap: Rc<codemap::FileMap>) + -> StringReader<'a> { // Force the initial reader bump to start on a fresh line let initial_char = '\n'; let r = StringReader { span_diagnostic: span_diagnostic, - pos: Cell::new(filemap.start_pos), - last_pos: Cell::new(filemap.start_pos), + pos: Cell::new(filemap.deref().start_pos), + last_pos: Cell::new(filemap.deref().start_pos), col: Cell::new(CharPos(0)), curr: Cell::new(Some(initial_char)), filemap: filemap, @@ -92,20 +93,20 @@ pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler, // duplicating the string reader is probably a bad idea, in // that using them will cause interleaved pushes of line // offsets to the underlying filemap... -fn dup_string_reader(r: &StringReader) -> StringReader { +fn dup_string_reader<'a>(r: &StringReader<'a>) -> StringReader<'a> { StringReader { span_diagnostic: r.span_diagnostic, pos: Cell::new(r.pos.get()), last_pos: Cell::new(r.last_pos.get()), col: Cell::new(r.col.get()), curr: Cell::new(r.curr.get()), - filemap: r.filemap, + filemap: r.filemap.clone(), peek_tok: r.peek_tok.clone(), peek_span: r.peek_span.clone(), } } -impl Reader for StringReader { +impl<'a> Reader for StringReader<'a> { fn is_eof(&self) -> bool { is_eof(self) } // return the next token. EFFECT: advances the string_reader. fn next_token(&self) -> TokenAndSpan { @@ -122,7 +123,7 @@ impl Reader for StringReader { fn fatal(&self, m: ~str) -> ! { self.span_diagnostic.span_fatal(self.peek_span.get(), m) } - fn span_diag(&self) -> @SpanHandler { self.span_diagnostic } + fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.span_diagnostic } fn peek(&self) -> TokenAndSpan { // FIXME(pcwalton): Bad copy! TokenAndSpan { @@ -133,7 +134,7 @@ impl Reader for StringReader { fn dup(&self) -> ~Reader: { ~dup_string_reader(self) as ~Reader: } } -impl Reader for TtReader { +impl<'a> Reader for TtReader<'a> { fn is_eof(&self) -> bool { let cur_tok = self.cur_tok.borrow(); *cur_tok.get() == token::EOF @@ -146,7 +147,7 @@ impl Reader for TtReader { fn fatal(&self, m: ~str) -> ! { self.sp_diag.span_fatal(self.cur_span.get(), m); } - fn span_diag(&self) -> @SpanHandler { self.sp_diag } + fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.sp_diag } fn peek(&self) -> TokenAndSpan { TokenAndSpan { tok: self.cur_tok.get(), @@ -189,7 +190,7 @@ fn fatal_span_verbose(rdr: &StringReader, -> ! { let mut m = m; m.push_str(": "); - let s = rdr.filemap.src.slice( + let s = rdr.filemap.deref().src.slice( byte_offset(rdr, from_pos).to_uint(), byte_offset(rdr, to_pos).to_uint()); m.push_str(s); @@ -218,7 +219,7 @@ fn string_advance_token(r: &StringReader) { } fn byte_offset(rdr: &StringReader, pos: BytePos) -> BytePos { - (pos - rdr.filemap.start_pos) + (pos - rdr.filemap.deref().start_pos) } /// Calls `f` with a string slice of the source text spanning from `start` @@ -240,7 +241,7 @@ fn with_str_from_to<T>( end: BytePos, f: |s: &str| -> T) -> T { - f(rdr.filemap.src.slice( + f(rdr.filemap.deref().src.slice( byte_offset(rdr, start).to_uint(), byte_offset(rdr, end).to_uint())) } @@ -250,21 +251,21 @@ fn with_str_from_to<T>( pub fn bump(rdr: &StringReader) { rdr.last_pos.set(rdr.pos.get()); let current_byte_offset = byte_offset(rdr, rdr.pos.get()).to_uint(); - if current_byte_offset < (rdr.filemap.src).len() { + if current_byte_offset < rdr.filemap.deref().src.len() { assert!(rdr.curr.get().is_some()); let last_char = rdr.curr.get().unwrap(); - let next = rdr.filemap.src.char_range_at(current_byte_offset); + let next = rdr.filemap.deref().src.char_range_at(current_byte_offset); let byte_offset_diff = next.next - current_byte_offset; rdr.pos.set(rdr.pos.get() + Pos::from_uint(byte_offset_diff)); rdr.curr.set(Some(next.ch)); rdr.col.set(rdr.col.get() + CharPos(1u)); if last_char == '\n' { - rdr.filemap.next_line(rdr.last_pos.get()); + rdr.filemap.deref().next_line(rdr.last_pos.get()); rdr.col.set(CharPos(0u)); } if byte_offset_diff > 1 { - rdr.filemap.record_multibyte_char(rdr.last_pos.get(), byte_offset_diff); + rdr.filemap.deref().record_multibyte_char(rdr.last_pos.get(), byte_offset_diff); } } else { rdr.curr.set(None); @@ -275,8 +276,8 @@ pub fn is_eof(rdr: &StringReader) -> bool { } pub fn nextch(rdr: &StringReader) -> Option<char> { let offset = byte_offset(rdr, rdr.pos.get()).to_uint(); - if offset < (rdr.filemap.src).len() { - Some(rdr.filemap.src.char_at(offset)) + if offset < rdr.filemap.deref().src.len() { + Some(rdr.filemap.deref().src.char_at(offset)) } else { None } @@ -334,56 +335,55 @@ fn consume_any_line_comment(rdr: &StringReader) -> Option<TokenAndSpan> { if rdr.curr_is('/') { match nextch(rdr) { - Some('/') => { - bump(rdr); - bump(rdr); - // line comments starting with "///" or "//!" are doc-comments - if rdr.curr_is('/') || rdr.curr_is('!') { - let start_bpos = rdr.pos.get() - BytePos(3); - while !rdr.curr_is('\n') && !is_eof(rdr) { - bump(rdr); - } - let ret = with_str_from(rdr, start_bpos, |string| { - // but comments with only more "/"s are not - if !is_line_non_doc_comment(string) { - Some(TokenAndSpan{ - tok: token::DOC_COMMENT(str_to_ident(string)), - sp: codemap::mk_sp(start_bpos, rdr.pos.get()) - }) - } else { - None + Some('/') => { + bump(rdr); + bump(rdr); + // line comments starting with "///" or "//!" are doc-comments + if rdr.curr_is('/') || rdr.curr_is('!') { + let start_bpos = rdr.pos.get() - BytePos(3); + while !rdr.curr_is('\n') && !is_eof(rdr) { + bump(rdr); } - }); + let ret = with_str_from(rdr, start_bpos, |string| { + // but comments with only more "/"s are not + if !is_line_non_doc_comment(string) { + Some(TokenAndSpan{ + tok: token::DOC_COMMENT(str_to_ident(string)), + sp: codemap::mk_sp(start_bpos, rdr.pos.get()) + }) + } else { + None + } + }); - if ret.is_some() { - return ret; + if ret.is_some() { + return ret; + } + } else { + while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); } } - } else { - while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); } + // Restart whitespace munch. + consume_whitespace_and_comments(rdr) } - // Restart whitespace munch. - return consume_whitespace_and_comments(rdr); - } - Some('*') => { bump(rdr); bump(rdr); return consume_block_comment(rdr); } - _ => () + Some('*') => { bump(rdr); bump(rdr); consume_block_comment(rdr) } + _ => None } } else if rdr.curr_is('#') { if nextch_is(rdr, '!') { // I guess this is the only way to figure out if // we're at the beginning of the file... - let cmap = @CodeMap::new(); - { - let mut files = cmap.files.borrow_mut(); - files.get().push(rdr.filemap); - } + let cmap = CodeMap::new(); + cmap.files.borrow_mut().get().push(rdr.filemap.clone()); let loc = cmap.lookup_char_pos_adj(rdr.last_pos.get()); if loc.line == 1u && loc.col == CharPos(0u) { while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); } return consume_whitespace_and_comments(rdr); } } + None + } else { + None } - return None; } pub fn is_block_non_doc_comment(s: &str) -> bool { @@ -1019,7 +1019,7 @@ mod test { let writer = ~util::NullWriter; let emitter = diagnostic::EmitterWriter::new(writer); let handler = diagnostic::mk_handler(~emitter); - let span_handler = diagnostic::mk_span_handler(handler, @cm); + let span_handler = diagnostic::mk_span_handler(handler, cm); Env { string_reader: new_string_reader(span_handler,fm) } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 19291f72101..79fedf82798 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -13,13 +13,13 @@ use ast; use codemap::{Span, CodeMap, FileMap}; -use codemap; use diagnostic::{SpanHandler, mk_span_handler, default_handler}; use parse::attr::ParserAttr; use parse::parser::Parser; use std::cell::RefCell; use std::io::File; +use std::rc::Rc; use std::str; use std::vec_ng::Vec; @@ -40,26 +40,20 @@ pub mod obsolete; // info about a parsing session. pub struct ParseSess { - cm: @codemap::CodeMap, // better be the same as the one in the reader! - span_diagnostic: @SpanHandler, // better be the same as the one in the reader! + span_diagnostic: SpanHandler, // better be the same as the one in the reader! /// Used to determine and report recursive mod inclusions - included_mod_stack: RefCell<Vec<Path> >, + included_mod_stack: RefCell<Vec<Path>>, } pub fn new_parse_sess() -> ParseSess { - let cm = @CodeMap::new(); ParseSess { - cm: cm, - span_diagnostic: mk_span_handler(default_handler(), cm), + span_diagnostic: mk_span_handler(default_handler(), CodeMap::new()), included_mod_stack: RefCell::new(Vec::new()), } } -pub fn new_parse_sess_special_handler(sh: @SpanHandler, - cm: @codemap::CodeMap) - -> ParseSess { +pub fn new_parse_sess_special_handler(sh: SpanHandler) -> ParseSess { ParseSess { - cm: cm, span_diagnostic: sh, included_mod_stack: RefCell::new(Vec::new()), } @@ -175,40 +169,36 @@ pub fn parse_tts_from_source_str(name: ~str, // Create a new parser from a source string pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, - cfg: ast::CrateConfig, - name: ~str, - source: ~str) - -> Parser<'a> { - filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg) + cfg: ast::CrateConfig, + name: ~str, + source: ~str) + -> Parser<'a> { + filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg) } /// Create a new parser, handling errors as appropriate /// if the file doesn't exist -pub fn new_parser_from_file<'a>( - sess: &'a ParseSess, - cfg: ast::CrateConfig, - path: &Path -) -> Parser<'a> { - filemap_to_parser(sess,file_to_filemap(sess,path,None),cfg) +pub fn new_parser_from_file<'a>(sess: &'a ParseSess, + cfg: ast::CrateConfig, + path: &Path) -> Parser<'a> { + filemap_to_parser(sess, file_to_filemap(sess, path, None), cfg) } /// Given a session, a crate config, a path, and a span, add /// the file at the given path to the codemap, and return a parser. /// On an error, use the given span as the source of the problem. -pub fn new_sub_parser_from_file<'a>( - sess: &'a ParseSess, - cfg: ast::CrateConfig, - path: &Path, - sp: Span -) -> Parser<'a> { - filemap_to_parser(sess,file_to_filemap(sess,path,Some(sp)),cfg) +pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, + cfg: ast::CrateConfig, + path: &Path, + sp: Span) -> Parser<'a> { + filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp)), cfg) } /// Given a filemap and config, return a parser pub fn filemap_to_parser<'a>(sess: &'a ParseSess, - filemap: @FileMap, + filemap: Rc<FileMap>, cfg: ast::CrateConfig) -> Parser<'a> { - tts_to_parser(sess,filemap_to_tts(sess,filemap),cfg) + tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg) } // must preserve old name for now, because quote! from the *existing* @@ -216,7 +206,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, cfg: ast::CrateConfig, tts: Vec<ast::TokenTree>) -> Parser<'a> { - tts_to_parser(sess,tts,cfg) + tts_to_parser(sess, tts, cfg) } @@ -225,7 +215,7 @@ pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, /// Given a session and a path and an optional span (for error reporting), /// add the path to the session's codemap and return the new filemap. pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) - -> @FileMap { + -> Rc<FileMap> { let err = |msg: &str| { match spanopt { Some(sp) => sess.span_diagnostic.span_fatal(sp, msg), @@ -251,17 +241,17 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) // given a session and a string, add the string to // the session's codemap and return the new filemap pub fn string_to_filemap(sess: &ParseSess, source: ~str, path: ~str) - -> @FileMap { - sess.cm.new_filemap(path, source) + -> Rc<FileMap> { + sess.span_diagnostic.cm.new_filemap(path, source) } // given a filemap, produce a sequence of token-trees -pub fn filemap_to_tts(sess: &ParseSess, filemap: @FileMap) +pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<ast::TokenTree> { // it appears to me that the cfg doesn't matter here... indeed, // parsing tt's probably shouldn't require a parser at all. let cfg = Vec::new(); - let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap); + let srdr = lexer::new_string_reader(&sess.span_diagnostic, filemap); let mut p1 = Parser(sess, cfg, ~srdr); p1.parse_all_token_trees() } @@ -270,7 +260,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: @FileMap) pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec<ast::TokenTree>, cfg: ast::CrateConfig) -> Parser<'a> { - let trdr = lexer::new_tt_reader(sess.span_diagnostic, None, tts); + let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts); Parser(sess, cfg, ~trdr) } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index d183eb44cc2..27c86956499 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -4150,7 +4150,7 @@ impl<'a> Parser<'a> { outer_attrs: &[ast::Attribute], id_sp: Span) -> (ast::Item_, Vec<ast::Attribute> ) { - let mut prefix = Path::new(self.sess.cm.span_to_filename(self.span)); + let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span)); prefix.pop(); let mod_path = Path::new(".").join_many(self.mod_path_stack.as_slice()); let dir_path = prefix.join(&mod_path); |
