diff options
| author | Michael Sullivan <sully@msully.net> | 2012-07-11 23:42:26 -0700 |
|---|---|---|
| committer | Michael Sullivan <sully@msully.net> | 2012-07-12 16:52:26 -0700 |
| commit | 2ea9c8df0f7c9ee72913883128b37d0a80d2f4f6 (patch) | |
| tree | b3e4acbf2912f804cb45f87e8819a5e4847ec213 /src/libsyntax | |
| parent | acb86921a62ba01726fd922f55d0176fa6c1df7c (diff) | |
| download | rust-2ea9c8df0f7c9ee72913883128b37d0a80d2f4f6.tar.gz rust-2ea9c8df0f7c9ee72913883128b37d0a80d2f4f6.zip | |
Accept prefix notation for writing the types of str/~ and friends.
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/ast.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/attr.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/codemap.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/auto_serialize.rs | 25 | ||||
| -rw-r--r-- | src/libsyntax/ext/simplext.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse.rs | 18 | ||||
| -rw-r--r-- | src/libsyntax/parse/comments.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/eval.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer.rs | 14 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 31 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/print/pp.rs | 2 |
13 files changed, 63 insertions, 59 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index f7e990b1025..10a29453c1e 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -32,7 +32,7 @@ fn deserialize_span<D>(_d: D) -> span { type spanned<T> = {node: T, span: span}; #[auto_serialize] -type ident = @str; +type ident = @str/~; // Functions may or may not have names. #[auto_serialize] @@ -427,11 +427,11 @@ type lit = spanned<lit_>; #[auto_serialize] enum lit_ { - lit_str(@str), + lit_str(@str/~), lit_int(i64, int_ty), lit_uint(u64, uint_ty), lit_int_unsuffixed(i64), - lit_float(@str, float_ty), + lit_float(@str/~, float_ty), lit_nil, lit_bool(bool), } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 43a6183b4d5..aac0b1f4d76 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -124,7 +124,7 @@ fn get_meta_item_name(meta: @ast::meta_item) -> ast::ident { * Gets the string value if the meta_item is a meta_name_value variant * containing a string, otherwise none */ -fn get_meta_item_value_str(meta: @ast::meta_item) -> option<@str> { +fn get_meta_item_value_str(meta: @ast::meta_item) -> option<@str/~> { alt meta.node { ast::meta_name_value(_, v) { alt v.node { @@ -154,7 +154,7 @@ fn get_meta_item_list(meta: @ast::meta_item) -> option<~[@ast::meta_item]> { */ fn get_name_value_str_pair( item: @ast::meta_item -) -> option<(ast::ident, @str)> { +) -> option<(ast::ident, @str/~)> { alt attr::get_meta_item_value_str(item) { some(value) { let name = attr::get_meta_item_name(item); @@ -239,7 +239,7 @@ fn attrs_contains_name(attrs: ~[ast::attribute], +name: str) -> bool { } fn first_attr_value_str_by_name(attrs: ~[ast::attribute], +name: str) - -> option<@str> { + -> option<@str/~> { let mattrs = find_attrs_by_name(attrs, name); if vec::len(mattrs) > 0u { ret get_meta_item_value_str(attr_meta(mattrs[0])); @@ -258,7 +258,7 @@ fn last_meta_item_by_name( fn last_meta_item_value_str_by_name( items: ~[@ast::meta_item], +name: str -) -> option<@str> { +) -> option<@str/~> { alt last_meta_item_by_name(items, name) { some(item) { alt attr::get_meta_item_value_str(item) { diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 3daf9106b06..867d2d7e45a 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -45,7 +45,7 @@ enum file_substr { } type filemap = - @{name: filename, substr: file_substr, src: @str, + @{name: filename, substr: file_substr, src: @str/~, start_pos: file_pos, mut lines: ~[file_pos]}; type codemap = @{files: dvec<filemap>}; @@ -55,7 +55,7 @@ type loc = {file: filemap, line: uint, col: uint}; fn new_codemap() -> codemap { @{files: dvec()} } fn new_filemap_w_substr(+filename: filename, +substr: file_substr, - src: @str, + src: @str/~, start_pos_ch: uint, start_pos_byte: uint) -> filemap { ret @{name: filename, substr: substr, src: src, @@ -63,7 +63,7 @@ fn new_filemap_w_substr(+filename: filename, +substr: file_substr, mut lines: ~[{ch: start_pos_ch, byte: start_pos_byte}]}; } -fn new_filemap(+filename: filename, src: @str, +fn new_filemap(+filename: filename, src: @str/~, start_pos_ch: uint, start_pos_byte: uint) -> filemap { ret new_filemap_w_substr(filename, fss_none, src, diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs index 0de49367b17..e32ebf4eae5 100644 --- a/src/libsyntax/ext/auto_serialize.rs +++ b/src/libsyntax/ext/auto_serialize.rs @@ -218,7 +218,7 @@ impl helpers for ext_ctxt { ast::expr_alt(v, arms, ast::alt_exhaustive))) } - fn lit_str(span: span, s: @str) -> @ast::expr { + fn lit_str(span: span, s: @str/~) -> @ast::expr { self.expr( span, ast::expr_lit( @@ -343,8 +343,19 @@ fn ser_lambda(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty, cx.lambda(cx.blk(ty.span, ser_ty(cx, tps, ty, s, v))) } +fn is_vec_or_str(ty: @ast::ty) -> bool { + alt ty.node { + ast::ty_vec(_) { true } + // This may be wrong if the user has shadowed (!) str + ast::ty_path(@{span: _, global: _, idents: ids, + rp: none, types: _}, _) + if ids == ~[@"str"] { true } + _ { false } + } +} + fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, - ty: @ast::ty, -s: @ast::expr, -v: @ast::expr) + ty: @ast::ty, -s: @ast::expr, -v: @ast::expr) -> ~[@ast::stmt] { let ext_cx = cx; // required for #ast{} @@ -365,6 +376,11 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, ~[#ast(stmt){$(s).emit_box($(l));}] } + // For unique evecs/estrs, just pass through to underlying vec or str + ast::ty_uniq(mt) if is_vec_or_str(mt.ty) { + ser_ty(cx, tps, mt.ty, s, v) + } + ast::ty_uniq(mt) { let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) }); ~[#ast(stmt){$(s).emit_uniq($(l));}] @@ -612,6 +628,11 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map, #ast{ @$(d).read_box($(l)) } } + // For unique evecs/estrs, just pass through to underlying vec or str + ast::ty_uniq(mt) if is_vec_or_str(mt.ty) { + deser_ty(cx, tps, mt.ty, d) + } + ast::ty_uniq(mt) { let l = deser_lambda(cx, tps, mt.ty, cx.clone(d)); #ast{ ~$(d).read_uniq($(l)) } diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index c9355f05e38..0d415ccfc43 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -679,7 +679,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> base::macro_def { let args = get_mac_args_no_max(cx, sp, arg, 0u, "macro"); - let mut macro_name: option<@str> = none; + let mut macro_name: option<@str/~> = none; let mut clauses: ~[@clause] = ~[]; for args.each |arg| { alt arg.node { diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 8924c5820a9..113fdea42e4 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -24,7 +24,7 @@ type tt_frame = @{ type tt_reader = @{ sp_diag: span_handler, - interner: @interner<@str>, + interner: @interner<@str/~>, mut cur: tt_frame, /* for MBE-style macro transcription */ interpolations: std::map::hashmap<ident, @arb_depth>, @@ -38,7 +38,7 @@ type tt_reader = @{ /** This can do Macro-By-Example transcription. On the other hand, if * `src` contains no `tt_dotdotdot`s and `tt_interpolate`s, `interp` can (and * should) be none. */ -fn new_tt_reader(sp_diag: span_handler, itr: @interner<@str>, +fn new_tt_reader(sp_diag: span_handler, itr: @interner<@str/~>, interp: option<std::map::hashmap<ident,@arb_depth>>, src: ~[ast::token_tree]) -> tt_reader { diff --git a/src/libsyntax/parse.rs b/src/libsyntax/parse.rs index 4d6bd53d8df..7ccc9c5a438 100644 --- a/src/libsyntax/parse.rs +++ b/src/libsyntax/parse.rs @@ -25,7 +25,7 @@ type parse_sess = @{ cm: codemap::codemap, mut next_id: node_id, span_diagnostic: span_handler, - interner: @interner::interner<@str>, + interner: @interner::interner<@str/~>, // these two must be kept up to date mut chpos: uint, mut byte_pos: uint @@ -36,7 +36,7 @@ fn new_parse_sess(demitter: option<emitter>) -> parse_sess { ret @{cm: cm, mut next_id: 1, span_diagnostic: mk_span_handler(mk_handler(demitter), cm), - interner: @interner::mk::<@str>(|x| str::hash(*x), + interner: @interner::mk::<@str/~>(|x| str::hash(*x), |x,y| str::eq(*x, *y)), mut chpos: 0u, mut byte_pos: 0u}; } @@ -46,7 +46,7 @@ fn new_parse_sess_special_handler(sh: span_handler, cm: codemap::codemap) ret @{cm: cm, mut next_id: 1, span_diagnostic: sh, - interner: @interner::mk::<@str>(|x| str::hash(*x), + interner: @interner::mk::<@str/~>(|x| str::hash(*x), |x,y| str::eq(*x, *y)), mut chpos: 0u, mut byte_pos: 0u}; } @@ -97,7 +97,7 @@ fn parse_crate_from_source_file(input: str, cfg: ast::crate_cfg, ret r; } -fn parse_crate_from_source_str(name: str, source: @str, cfg: ast::crate_cfg, +fn parse_crate_from_source_str(name: str, source: @str/~, cfg: ast::crate_cfg, sess: parse_sess) -> @ast::crate { let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name, codemap::fss_none, source); @@ -107,7 +107,7 @@ fn parse_crate_from_source_str(name: str, source: @str, cfg: ast::crate_cfg, ret r; } -fn parse_expr_from_source_str(name: str, source: @str, cfg: ast::crate_cfg, +fn parse_expr_from_source_str(name: str, source: @str/~, cfg: ast::crate_cfg, sess: parse_sess) -> @ast::expr { let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name, codemap::fss_none, source); @@ -117,7 +117,7 @@ fn parse_expr_from_source_str(name: str, source: @str, cfg: ast::crate_cfg, ret r; } -fn parse_item_from_source_str(name: str, source: @str, cfg: ast::crate_cfg, +fn parse_item_from_source_str(name: str, source: @str/~, cfg: ast::crate_cfg, +attrs: ~[ast::attribute], vis: ast::visibility, sess: parse_sess) -> option<@ast::item> { @@ -131,7 +131,7 @@ fn parse_item_from_source_str(name: str, source: @str, cfg: ast::crate_cfg, fn parse_from_source_str<T>(f: fn (p: parser) -> T, name: str, ss: codemap::file_substr, - source: @str, cfg: ast::crate_cfg, + source: @str/~, cfg: ast::crate_cfg, sess: parse_sess) -> T { @@ -156,7 +156,7 @@ fn next_node_id(sess: parse_sess) -> node_id { fn new_parser_etc_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, +name: str, +ss: codemap::file_substr, - source: @str) -> (parser, string_reader) { + source: @str/~) -> (parser, string_reader) { let ftype = parser::SOURCE_FILE; let filemap = codemap::new_filemap_w_substr (name, ss, source, sess.chpos, sess.byte_pos); @@ -168,7 +168,7 @@ fn new_parser_etc_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, +name: str, +ss: codemap::file_substr, - source: @str) -> parser { + source: @str/~) -> parser { let (p, _) = new_parser_etc_from_source_str(sess, cfg, name, ss, source); ret p; } diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index e05887d28b4..aa3e808f63e 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -275,7 +275,7 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, srdr: io::reader) -> {cmnts: ~[cmnt], lits: ~[lit]} { let src = @str::from_bytes(srdr.read_whole_stream()); - let itr = @interner::mk::<@str>( + let itr = @interner::mk::<@str/~>( |x| str::hash(*x), |x,y| str::eq(*x, *y) ); diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs index 883aedb75a6..54a2abf20c9 100644 --- a/src/libsyntax/parse/eval.rs +++ b/src/libsyntax/parse/eval.rs @@ -78,7 +78,7 @@ fn parse_companion_mod(cx: ctx, prefix: str, suffix: option<str>) } } -fn cdir_path_opt(id: ast::ident, attrs: ~[ast::attribute]) -> @str { +fn cdir_path_opt(id: ast::ident, attrs: ~[ast::attribute]) -> @str/~ { alt ::attr::first_attr_value_str_by_name(attrs, "path") { some(d) { ret d; diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index b3ba35dea2e..d7f9fc12840 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -14,20 +14,20 @@ iface reader { fn next_token() -> {tok: token::token, sp: span}; fn fatal(str) -> !; fn span_diag() -> span_handler; - fn interner() -> @interner<@str>; + fn interner() -> @interner<@str/~>; fn peek() -> {tok: token::token, sp: span}; fn dup() -> reader; } type string_reader = @{ span_diagnostic: span_handler, - src: @str, + src: @str/~, mut col: uint, mut pos: uint, mut curr: char, mut chpos: uint, filemap: codemap::filemap, - interner: @interner<@str>, + interner: @interner<@str/~>, /* cached: */ mut peek_tok: token::token, mut peek_span: span @@ -35,7 +35,7 @@ type string_reader = @{ fn new_string_reader(span_diagnostic: span_handler, filemap: codemap::filemap, - itr: @interner<@str>) -> string_reader { + itr: @interner<@str/~>) -> string_reader { let r = new_low_level_string_reader(span_diagnostic, filemap, itr); string_advance_token(r); /* fill in peek_* */ ret r; @@ -44,7 +44,7 @@ fn new_string_reader(span_diagnostic: span_handler, /* For comments.rs, which hackily pokes into 'pos' and 'curr' */ fn new_low_level_string_reader(span_diagnostic: span_handler, filemap: codemap::filemap, - itr: @interner<@str>) + itr: @interner<@str/~>) -> string_reader { let r = @{span_diagnostic: span_diagnostic, src: filemap.src, mut col: 0u, mut pos: 0u, mut curr: -1 as char, @@ -79,7 +79,7 @@ impl string_reader_as_reader of reader for string_reader { self.span_diagnostic.span_fatal(copy self.peek_span, m) } fn span_diag() -> span_handler { self.span_diagnostic } - fn interner() -> @interner<@str> { self.interner } + fn interner() -> @interner<@str/~> { self.interner } fn peek() -> {tok: token::token, sp: span} { {tok: self.peek_tok, sp: self.peek_span} } @@ -101,7 +101,7 @@ impl tt_reader_as_reader of reader for tt_reader { self.sp_diag.span_fatal(copy self.cur_span, m); } fn span_diag() -> span_handler { self.sp_diag } - fn interner() -> @interner<@str> { self.interner } + fn interner() -> @interner<@str/~> { self.interner } fn peek() -> {tok: token::token, sp: span} { { tok: self.cur_tok, sp: self.cur_span } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index f4681ba2619..9d8458c9c60 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -230,7 +230,7 @@ class parser { fn warn(m: str) { self.sess.span_diagnostic.span_warn(copy self.span, m) } - fn get_str(i: token::str_num) -> @str { + fn get_str(i: token::str_num) -> @str/~ { interner::get(*self.reader.interner(), i) } fn get_id() -> node_id { next_node_id(self.sess) } @@ -394,7 +394,7 @@ class parser { } } - fn region_from_name(s: option<@str>) -> @region { + fn region_from_name(s: option<@str/~>) -> @region { let r = alt s { some (string) { re_named(string) } none { re_anon } @@ -461,22 +461,10 @@ class parser { } } else if self.token == token::AT { self.bump(); - // HACK: turn @[...] into a @-evec - alt self.parse_mt() { - {ty: t @ @{node: ty_vec(_), _}, mutbl: m_imm} { - ty_vstore(t, vstore_box) - } - mt { ty_box(mt) } - } + ty_box(self.parse_mt()) } else if self.token == token::TILDE { self.bump(); - // HACK: turn ~[...] into a ~-evec - alt self.parse_mt() { - {ty: t @ @{node: ty_vec(_), _}, mutbl: m_imm} { - ty_vstore(t, vstore_uniq) - } - mt { ty_uniq(mt) } - } + ty_uniq(self.parse_mt()) } else if self.token == token::BINOP(token::STAR) { self.bump(); ty_ptr(self.parse_mt()) @@ -506,13 +494,8 @@ class parser { } else if self.token == token::BINOP(token::AND) { self.bump(); let region = self.parse_region_dot(); - // HACK: turn &a.[...] into a &a-evec - alt self.parse_mt() { - {ty: t @ @{node: ty_vec(_), _}, mutbl: m_imm} { - ty_vstore(t, vstore_slice(region)) - } - mt { ty_rptr(region, mt) } - } + let mt = self.parse_mt(); + ty_rptr(region, mt) } else if self.eat_keyword("pure") { self.parse_ty_fn(ast::pure_fn) } else if self.eat_keyword("unsafe") { @@ -2742,7 +2725,7 @@ class parser { config: self.cfg}); } - fn parse_str() -> @str { + fn parse_str() -> @str/~ { alt copy self.token { token::LIT_STR(s) { self.bump(); self.get_str(s) } _ { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index bcfb9289086..2c7b14cfe11 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -115,7 +115,7 @@ fn binop_to_str(o: binop) -> str { } } -fn to_str(in: interner<@str>, t: token) -> str { +fn to_str(in: interner<@str/~>, t: token) -> str { alt t { EQ { "=" } LT { "<" } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 58f5ac85d48..09754d27e67 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -59,7 +59,7 @@ type break_t = {offset: int, blank_space: int}; type begin_t = {offset: int, breaks: breaks}; -enum token { STRING(@str, int), BREAK(break_t), BEGIN(begin_t), END, EOF, } +enum token { STRING(@str/~, int), BREAK(break_t), BEGIN(begin_t), END, EOF, } fn tok_str(++t: token) -> str { alt t { |
