diff options
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/ext/pipes.rs | 67 | ||||
| -rw-r--r-- | src/libsyntax/ext/pipes/mod.rs | 70 | ||||
| -rw-r--r-- | src/libsyntax/parse.rs | 196 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 198 | ||||
| -rw-r--r-- | src/libsyntax/syntax.rc | 5 |
5 files changed, 270 insertions, 266 deletions
diff --git a/src/libsyntax/ext/pipes.rs b/src/libsyntax/ext/pipes.rs deleted file mode 100644 index b4c49b12d59..00000000000 --- a/src/libsyntax/ext/pipes.rs +++ /dev/null @@ -1,67 +0,0 @@ -/*! Implementation of proto! extension. - -This is frequently called the pipe compiler. It handles code such as... - -~~~ -proto! pingpong ( - ping: send { - ping -> pong - } - pong: recv { - pong -> ping - } -) -~~~ - -There are several components: - - * The parser (libsyntax/ext/pipes/parse_proto.rs) - * Responsible for building an AST from a protocol specification. - - * The checker (libsyntax/ext/pipes/check.rs) - * Basic correctness checking for protocols (i.e. no undefined states, etc.) - - * The analyzer (libsyntax/ext/pipes/liveness.rs) - * Determines whether the protocol is bounded or unbounded. - - * The compiler (libsynatx/ext/pipes/pipec.rs) - * Generates a Rust AST from the protocol AST and the results of analysis. - -There is more documentation in each of the files referenced above. - -FIXME (#3072) - This is still incomplete. - -*/ - -use codemap::span; -use ext::base::ext_ctxt; -use ast::tt_delim; -use parse::lexer::{new_tt_reader, reader}; -use parse::parser::Parser; -use parse::common::parser_common; - -use pipes::parse_proto::proto_parser; - -use pipes::proto::{visit, protocol}; - -fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident, - tt: ~[ast::token_tree]) -> base::mac_result -{ - let sess = cx.parse_sess(); - let cfg = cx.cfg(); - let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, - cx.parse_sess().interner, None, tt); - let rdr = tt_rdr as reader; - let rust_parser = Parser(sess, cfg, rdr.dup()); - - let proto = rust_parser.parse_proto(cx.str_of(id)); - - // check for errors - visit(proto, cx); - - // do analysis - liveness::analyze(proto, cx); - - // compile - base::mr_item(proto.compile(cx)) -} diff --git a/src/libsyntax/ext/pipes/mod.rs b/src/libsyntax/ext/pipes/mod.rs index 638ccad0143..b064f39eb3a 100644 --- a/src/libsyntax/ext/pipes/mod.rs +++ b/src/libsyntax/ext/pipes/mod.rs @@ -1,3 +1,49 @@ +/*! Implementation of proto! extension. + +This is frequently called the pipe compiler. It handles code such as... + +~~~ +proto! pingpong ( + ping: send { + ping -> pong + } + pong: recv { + pong -> ping + } +) +~~~ + +There are several components: + + * The parser (libsyntax/ext/pipes/parse_proto.rs) + * Responsible for building an AST from a protocol specification. + + * The checker (libsyntax/ext/pipes/check.rs) + * Basic correctness checking for protocols (i.e. no undefined states, etc.) + + * The analyzer (libsyntax/ext/pipes/liveness.rs) + * Determines whether the protocol is bounded or unbounded. + + * The compiler (libsynatx/ext/pipes/pipec.rs) + * Generates a Rust AST from the protocol AST and the results of analysis. + +There is more documentation in each of the files referenced above. + +FIXME (#3072) - This is still incomplete. + +*/ + +use codemap::span; +use ext::base::ext_ctxt; +use ast::tt_delim; +use parse::lexer::{new_tt_reader, reader}; +use parse::parser::Parser; +use parse::common::parser_common; + +use pipes::parse_proto::proto_parser; + +use pipes::proto::{visit, protocol}; + #[legacy_exports] mod ast_builder; #[legacy_exports] @@ -10,3 +56,27 @@ mod proto; mod check; #[legacy_exports] mod liveness; + + +fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident, + tt: ~[ast::token_tree]) -> base::mac_result +{ + let sess = cx.parse_sess(); + let cfg = cx.cfg(); + let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, + cx.parse_sess().interner, None, tt); + let rdr = tt_rdr as reader; + let rust_parser = Parser(sess, cfg, rdr.dup()); + + let proto = rust_parser.parse_proto(cx.str_of(id)); + + // check for errors + visit(proto, cx); + + // do analysis + liveness::analyze(proto, cx); + + // compile + base::mr_item(proto.compile(cx)) +} + diff --git a/src/libsyntax/parse.rs b/src/libsyntax/parse.rs deleted file mode 100644 index 9d243556c03..00000000000 --- a/src/libsyntax/parse.rs +++ /dev/null @@ -1,196 +0,0 @@ -//! The main parser interface - -#[legacy_exports]; - -export parser; -export common; -export lexer; -export token; -export comments; -export prec; -export classify; -export attr; - -export parse_sess; -export new_parse_sess, new_parse_sess_special_handler; -export next_node_id; -export new_parser_from_file, new_parser_etc_from_file; -export new_parser_from_source_str; -export new_parser_from_tts; -export new_sub_parser_from_file; -export parse_crate_from_file, parse_crate_from_crate_file; -export parse_crate_from_source_str; -export parse_expr_from_source_str, parse_item_from_source_str; -export parse_stmt_from_source_str; -export parse_tts_from_source_str; -export parse_from_source_str; - -use parser::Parser; -use attr::parser_attr; -use common::parser_common; -use ast::node_id; -use util::interner; -use diagnostic::{span_handler, mk_span_handler, mk_handler, emitter}; -use lexer::{reader, string_reader}; -use parse::token::{ident_interner, mk_ident_interner}; -use codemap::{span, CodeMap, FileMap, CharPos, BytePos}; - -type parse_sess = @{ - cm: @codemap::CodeMap, - mut next_id: node_id, - span_diagnostic: span_handler, - interner: @ident_interner, -}; - -fn new_parse_sess(demitter: Option<emitter>) -> parse_sess { - let cm = @CodeMap::new(); - return @{cm: cm, - mut next_id: 1, - span_diagnostic: mk_span_handler(mk_handler(demitter), cm), - interner: mk_ident_interner(), - }; -} - -fn new_parse_sess_special_handler(sh: span_handler, cm: @codemap::CodeMap) - -> parse_sess { - return @{cm: cm, - mut next_id: 1, - span_diagnostic: sh, - interner: mk_ident_interner(), - }; -} - -fn parse_crate_from_file(input: &Path, cfg: ast::crate_cfg, - sess: parse_sess) -> @ast::crate { - let p = new_crate_parser_from_file(sess, cfg, input); - let r = p.parse_crate_mod(cfg); - return r; -} - -fn parse_crate_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, - sess: parse_sess) -> @ast::crate { - let p = new_parser_from_source_str(sess, cfg, name, - codemap::FssNone, source); - let r = p.parse_crate_mod(cfg); - p.abort_if_errors(); - return r; -} - -fn parse_expr_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, - sess: parse_sess) -> @ast::expr { - let p = new_parser_from_source_str(sess, cfg, name, - codemap::FssNone, source); - let r = p.parse_expr(); - p.abort_if_errors(); - return r; -} - -fn parse_item_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, - +attrs: ~[ast::attribute], - sess: parse_sess) -> Option<@ast::item> { - let p = new_parser_from_source_str(sess, cfg, name, - codemap::FssNone, source); - let r = p.parse_item(attrs); - p.abort_if_errors(); - return r; -} - -fn parse_stmt_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, - +attrs: ~[ast::attribute], - sess: parse_sess) -> @ast::stmt { - let p = new_parser_from_source_str(sess, cfg, name, - codemap::FssNone, source); - let r = p.parse_stmt(attrs); - p.abort_if_errors(); - return r; -} - -fn parse_tts_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, - sess: parse_sess) -> ~[ast::token_tree] { - let p = new_parser_from_source_str(sess, cfg, name, - codemap::FssNone, source); - p.quote_depth += 1u; - let r = p.parse_all_token_trees(); - p.abort_if_errors(); - return r; -} - -fn parse_from_source_str<T>(f: fn (p: Parser) -> T, - name: ~str, ss: codemap::FileSubstr, - source: @~str, cfg: ast::crate_cfg, - sess: parse_sess) - -> T -{ - let p = new_parser_from_source_str(sess, cfg, name, ss, - source); - let r = f(p); - if !p.reader.is_eof() { - p.reader.fatal(~"expected end-of-string"); - } - p.abort_if_errors(); - move r -} - -fn next_node_id(sess: parse_sess) -> node_id { - let rv = sess.next_id; - sess.next_id += 1; - // ID 0 is reserved for the crate and doesn't actually exist in the AST - assert rv != 0; - return rv; -} - -fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, - +name: ~str, +ss: codemap::FileSubstr, - source: @~str) -> Parser { - let filemap = sess.cm.new_filemap_w_substr(name, ss, source); - let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, - sess.interner); - return Parser(sess, cfg, srdr as reader); -} - -fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, - path: &Path) -> Result<Parser, ~str> { - match io::read_whole_file_str(path) { - result::Ok(move src) => { - - let filemap = sess.cm.new_filemap(path.to_str(), @move src); - let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, - sess.interner); - - Ok(Parser(sess, cfg, srdr as reader)) - - } - result::Err(move e) => Err(move e) - } -} - -/// Create a new parser for an entire crate, handling errors as appropriate -/// if the file doesn't exist -fn new_crate_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, - path: &Path) -> Parser { - match new_parser_from_file(sess, cfg, path) { - Ok(move parser) => move parser, - Err(move e) => { - sess.span_diagnostic.handler().fatal(e) - } - } -} - -/// Create a new parser based on a span from an existing parser. Handles -/// error messages correctly when the file does not exist. -fn new_sub_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, - path: &Path, sp: span) -> Parser { - match new_parser_from_file(sess, cfg, path) { - Ok(move parser) => move parser, - Err(move e) => { - sess.span_diagnostic.span_fatal(sp, e) - } - } -} - -fn new_parser_from_tts(sess: parse_sess, cfg: ast::crate_cfg, - tts: ~[ast::token_tree]) -> Parser { - let trdr = lexer::new_tt_reader(sess.span_diagnostic, sess.interner, - None, tts); - return Parser(sess, cfg, trdr as reader) -} diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index a7c5f20fedf..c290e7cf307 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -1,3 +1,40 @@ +//! The main parser interface + +#[legacy_exports]; + +export parser; +export common; +export lexer; +export token; +export comments; +export prec; +export classify; +export attr; + +export parse_sess; +export new_parse_sess, new_parse_sess_special_handler; +export next_node_id; +export new_parser_from_file, new_parser_etc_from_file; +export new_parser_from_source_str; +export new_parser_from_tts; +export new_sub_parser_from_file; +export parse_crate_from_file, parse_crate_from_crate_file; +export parse_crate_from_source_str; +export parse_expr_from_source_str, parse_item_from_source_str; +export parse_stmt_from_source_str; +export parse_tts_from_source_str; +export parse_from_source_str; + +use parser::Parser; +use attr::parser_attr; +use common::parser_common; +use ast::node_id; +use util::interner; +use diagnostic::{span_handler, mk_span_handler, mk_handler, emitter}; +use lexer::{reader, string_reader}; +use parse::token::{ident_interner, mk_ident_interner}; +use codemap::{span, CodeMap, FileMap, CharPos, BytePos}; + #[legacy_exports] mod lexer; @@ -26,3 +63,164 @@ mod classify; /// Reporting obsolete syntax #[legacy_exports] mod obsolete; + + +type parse_sess = @{ + cm: @codemap::CodeMap, + mut next_id: node_id, + span_diagnostic: span_handler, + interner: @ident_interner, +}; + +fn new_parse_sess(demitter: Option<emitter>) -> parse_sess { + let cm = @CodeMap::new(); + return @{cm: cm, + mut next_id: 1, + span_diagnostic: mk_span_handler(mk_handler(demitter), cm), + interner: mk_ident_interner(), + }; +} + +fn new_parse_sess_special_handler(sh: span_handler, cm: @codemap::CodeMap) + -> parse_sess { + return @{cm: cm, + mut next_id: 1, + span_diagnostic: sh, + interner: mk_ident_interner(), + }; +} + +fn parse_crate_from_file(input: &Path, cfg: ast::crate_cfg, + sess: parse_sess) -> @ast::crate { + let p = new_crate_parser_from_file(sess, cfg, input); + let r = p.parse_crate_mod(cfg); + return r; +} + +fn parse_crate_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, + sess: parse_sess) -> @ast::crate { + let p = new_parser_from_source_str(sess, cfg, name, + codemap::FssNone, source); + let r = p.parse_crate_mod(cfg); + p.abort_if_errors(); + return r; +} + +fn parse_expr_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, + sess: parse_sess) -> @ast::expr { + let p = new_parser_from_source_str(sess, cfg, name, + codemap::FssNone, source); + let r = p.parse_expr(); + p.abort_if_errors(); + return r; +} + +fn parse_item_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, + +attrs: ~[ast::attribute], + sess: parse_sess) -> Option<@ast::item> { + let p = new_parser_from_source_str(sess, cfg, name, + codemap::FssNone, source); + let r = p.parse_item(attrs); + p.abort_if_errors(); + return r; +} + +fn parse_stmt_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, + +attrs: ~[ast::attribute], + sess: parse_sess) -> @ast::stmt { + let p = new_parser_from_source_str(sess, cfg, name, + codemap::FssNone, source); + let r = p.parse_stmt(attrs); + p.abort_if_errors(); + return r; +} + +fn parse_tts_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, + sess: parse_sess) -> ~[ast::token_tree] { + let p = new_parser_from_source_str(sess, cfg, name, + codemap::FssNone, source); + p.quote_depth += 1u; + let r = p.parse_all_token_trees(); + p.abort_if_errors(); + return r; +} + +fn parse_from_source_str<T>(f: fn (p: Parser) -> T, + name: ~str, ss: codemap::FileSubstr, + source: @~str, cfg: ast::crate_cfg, + sess: parse_sess) + -> T +{ + let p = new_parser_from_source_str(sess, cfg, name, ss, + source); + let r = f(p); + if !p.reader.is_eof() { + p.reader.fatal(~"expected end-of-string"); + } + p.abort_if_errors(); + move r +} + +fn next_node_id(sess: parse_sess) -> node_id { + let rv = sess.next_id; + sess.next_id += 1; + // ID 0 is reserved for the crate and doesn't actually exist in the AST + assert rv != 0; + return rv; +} + +fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, + +name: ~str, +ss: codemap::FileSubstr, + source: @~str) -> Parser { + let filemap = sess.cm.new_filemap_w_substr(name, ss, source); + let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, + sess.interner); + return Parser(sess, cfg, srdr as reader); +} + +fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, + path: &Path) -> Result<Parser, ~str> { + match io::read_whole_file_str(path) { + result::Ok(move src) => { + + let filemap = sess.cm.new_filemap(path.to_str(), @move src); + let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, + sess.interner); + + Ok(Parser(sess, cfg, srdr as reader)) + + } + result::Err(move e) => Err(move e) + } +} + +/// Create a new parser for an entire crate, handling errors as appropriate +/// if the file doesn't exist +fn new_crate_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, + path: &Path) -> Parser { + match new_parser_from_file(sess, cfg, path) { + Ok(move parser) => move parser, + Err(move e) => { + sess.span_diagnostic.handler().fatal(e) + } + } +} + +/// Create a new parser based on a span from an existing parser. Handles +/// error messages correctly when the file does not exist. +fn new_sub_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, + path: &Path, sp: span) -> Parser { + match new_parser_from_file(sess, cfg, path) { + Ok(move parser) => move parser, + Err(move e) => { + sess.span_diagnostic.span_fatal(sp, e) + } + } +} + +fn new_parser_from_tts(sess: parse_sess, cfg: ast::crate_cfg, + tts: ~[ast::token_tree]) -> Parser { + let trdr = lexer::new_tt_reader(sess.span_diagnostic, sess.interner, + None, tts); + return Parser(sess, cfg, trdr as reader) +} diff --git a/src/libsyntax/syntax.rc b/src/libsyntax/syntax.rc index d5a28a716ec..60036fe7737 100644 --- a/src/libsyntax/syntax.rc +++ b/src/libsyntax/syntax.rc @@ -44,7 +44,7 @@ mod util { mod interner; } -#[merge = "parse/mod.rs"] +#[path = "parse/mod.rs"] mod parse; mod print { @@ -118,8 +118,7 @@ mod ext { mod source_util; #[legacy_exports] - #[path = "ext/pipes.rs"] - #[merge = "ext/pipes/mod.rs"] + #[path = "ext/pipes/mod.rs"] mod pipes; #[legacy_exports] |
