diff options
| author | bors <bors@rust-lang.org> | 2017-07-05 21:16:34 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2017-07-05 21:16:34 +0000 |
| commit | 4d526e0d14b43a87627cd6aca6c6f71ad1e07b6e (patch) | |
| tree | 93ba1e7852501c25569fb7f614b884052207afb8 /src/libsyntax/parse | |
| parent | 3610a70ce488953c5b0379fece70f2baad30a825 (diff) | |
| parent | 78fdbfc4008b52bcce201fd589ed84d2abb0419d (diff) | |
| download | rust-4d526e0d14b43a87627cd6aca6c6f71ad1e07b6e.tar.gz rust-4d526e0d14b43a87627cd6aca6c6f71ad1e07b6e.zip | |
Auto merge of #40939 - jseyfried:proc_macro_api, r=nrc
proc_macro: implement `TokenTree`, `TokenKind`, hygienic `quote!`, and other API
All new API is gated behind `#![feature(proc_macro)]` and may be used with `#[proc_macro]`, `#[proc_macro_attribute]`, and `#[proc_macro_derive]` procedural macros.
More specifically, this PR adds the following in `proc_macro`:
```rust
// `TokenStream` constructors:
impl TokenStream { fn empty() -> TokenStream { ... } }
impl From<TokenTree> for TokenStream { ... }
impl From<TokenKind> for TokenStream { ... }
impl<T: Into<TokenStream>> FromIterator<T> for TokenStream { ... }
macro quote($($t:tt)*) { ... } // A hygienic `TokenStream` quoter
// `TokenStream` destructuring:
impl TokenStream { fn is_empty(&self) -> bool { ... } }
impl IntoIterator for TokenStream { type Item = TokenTree; ... }
struct TokenTree { span: Span, kind: TokenKind }
impl From<TokenKind> for TokenTree { ... }
impl Display for TokenTree { ... }
struct Span { ... } // a region of source code along with expansion/hygiene information
impl Default for Span { ... } // a span from the current procedural macro definition
impl Span { fn call_site() -> Span { ... } } // the call site of the current expansion
fn quote_span(span: Span) -> TokenStream;
enum TokenKind {
Group(Delimiter, TokenStream), // A delimited sequence, e.g. `( ... )`
Term(Term), // a unicode identifier, lifetime ('a), or underscore
Op(char, Spacing), // a punctuation character (`+`, `,`, `$`, etc.).
Literal(Literal), // a literal character (`'a'`), string (`"hello"`), or number (`2.3`)
}
enum Delimiter {
Parenthesis, // `( ... )`
Brace, // `[ ... ]`
Bracket, // `{ ... }`
None, // an implicit delimiter, e.g. `$var`, where $var is `...`.
}
struct Term { ... } // An interned string
impl Term {
fn intern(string: &str) -> Symbol { ... }
fn as_str(&self) -> &str { ... }
}
enum Spacing {
Alone, // not immediately followed by another `Op`, e.g. `+` in `+ =`.
Joint, // immediately followed by another `Op`, e.g. `+` in `+=`
}
struct Literal { ... }
impl Display for Literal { ... }
impl Literal {
fn integer(n: i128) -> Literal { .. } // unsuffixed integer literal
fn float(n: f64) -> Literal { .. } // unsuffixed floating point literal
fn u8(n: u8) -> Literal { ... } // similarly: i8, u16, i16, u32, i32, u64, i64, f32, f64
fn string(string: &str) -> Literal { ... }
fn character(ch: char) -> Literal { ... }
fn byte_string(bytes: &[u8]) -> Literal { ... }
}
```
For details on `quote!` hygiene, see [this example](https://github.com/rust-lang/rust/pull/40939/commits/20a90485c040df87a667e9b6ee38e4d8a7d7fc5d) and [declarative macros 2.0](https://github.com/rust-lang/rust/pull/40847).
r? @nrc
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/attr.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 44 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/tokentrees.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 15 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 19 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 143 |
6 files changed, 185 insertions, 52 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 082930777e5..c99a09ab24e 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -151,7 +151,7 @@ impl<'a> Parser<'a> { pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { let meta = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { Nonterminal::NtMeta(ref meta) => Some(meta.clone()), _ => None, }, @@ -223,7 +223,7 @@ impl<'a> Parser<'a> { /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { let nt_meta = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { token::NtMeta(ref e) => Some(e.clone()), _ => None, }, diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index a35b278a4b0..09cdf26bf1f 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -66,14 +66,15 @@ pub struct StringReader<'a> { token: token::Token, span: Span, open_braces: Vec<(token::DelimToken, Span)>, -} - -fn mk_sp(lo: BytePos, hi: BytePos) -> Span { - Span { lo: lo, hi: hi, ctxt: NO_EXPANSION } + pub override_span: Option<Span>, } impl<'a> StringReader<'a> { - fn next_token(&mut self) -> TokenAndSpan { + fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { + unwrap_or!(self.override_span, Span { lo: lo, hi: hi, ctxt: NO_EXPANSION}) + } + + fn next_token(&mut self) -> TokenAndSpan where Self: Sized { let res = self.try_next_token(); self.unwrap_or_abort(res) } @@ -175,6 +176,7 @@ impl<'a> StringReader<'a> { token: token::Eof, span: syntax_pos::DUMMY_SP, open_braces: Vec::new(), + override_span: None, } } @@ -229,12 +231,12 @@ impl<'a> StringReader<'a> { /// Report a fatal error spanning [`from_pos`, `to_pos`). fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError { - self.fatal_span(mk_sp(from_pos, to_pos), m) + self.fatal_span(self.mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`). fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) { - self.err_span(mk_sp(from_pos, to_pos), m) + self.err_span(self.mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -258,7 +260,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -282,7 +284,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_err(self.mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -306,11 +308,11 @@ impl<'a> StringReader<'a> { None => { if self.is_eof() { self.peek_tok = token::Eof; - self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos); + self.peek_span = self.mk_sp(self.filemap.end_pos, self.filemap.end_pos); } else { let start_bytepos = self.pos; self.peek_tok = self.next_token_inner()?; - self.peek_span = mk_sp(start_bytepos, self.pos); + self.peek_span = self.mk_sp(start_bytepos, self.pos); }; } } @@ -481,7 +483,7 @@ impl<'a> StringReader<'a> { self.with_str_from(start, |string| { if string == "_" { self.sess.span_diagnostic - .struct_span_warn(mk_sp(start, self.pos), + .struct_span_warn(self.mk_sp(start, self.pos), "underscore literal suffix is not allowed") .warn("this was previously accepted by the compiler but is \ being phased out; it will become a hard error in \ @@ -502,7 +504,7 @@ impl<'a> StringReader<'a> { if let Some(c) = self.ch { if c.is_whitespace() { let msg = "called consume_any_line_comment, but there was whitespace"; - self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg); + self.sess.span_diagnostic.span_err(self.mk_sp(self.pos, self.pos), msg); } } @@ -545,13 +547,13 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) }) } else { Some(TokenAndSpan { tok: token::Comment, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) } } @@ -584,7 +586,7 @@ impl<'a> StringReader<'a> { } return Some(TokenAndSpan { tok: token::Shebang(self.name_from(start)), - sp: mk_sp(start, self.pos), + sp: self.mk_sp(start, self.pos), }); } } @@ -612,7 +614,7 @@ impl<'a> StringReader<'a> { } let c = Some(TokenAndSpan { tok: token::Whitespace, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }); debug!("scanning whitespace: {:?}", c); c @@ -674,7 +676,7 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) }) } @@ -869,7 +871,7 @@ impl<'a> StringReader<'a> { let valid = if self.ch_is('{') { self.scan_unicode_escape(delim) && !ascii_only } else { - let span = mk_sp(start, self.pos); + let span = self.mk_sp(start, self.pos); self.sess.span_diagnostic .struct_span_err(span, "incorrect unicode escape sequence") .span_help(span, @@ -907,13 +909,13 @@ impl<'a> StringReader<'a> { }, c); if e == '\r' { - err.span_help(mk_sp(escaped_pos, pos), + err.span_help(self.mk_sp(escaped_pos, pos), "this is an isolated carriage return; consider \ checking your editor and version control \ settings"); } if (e == '{' || e == '}') && !ascii_only { - err.span_help(mk_sp(escaped_pos, pos), + err.span_help(self.mk_sp(escaped_pos, pos), "if used in a formatting string, curly braces \ are escaped with `{{` and `}}`"); } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 554a1fcfc71..63a396c14db 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -19,7 +19,9 @@ impl<'a> StringReader<'a> { pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { let mut tts = Vec::new(); while self.token != token::Eof { - tts.push(self.parse_token_tree()?.into()); + let tree = self.parse_token_tree()?; + let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token); + tts.push(if is_joint { tree.joint() } else { tree.into() }); } Ok(TokenStream::concat(tts)) } @@ -31,13 +33,15 @@ impl<'a> StringReader<'a> { if let token::CloseDelim(..) = self.token { return TokenStream::concat(tts); } - match self.parse_token_tree() { - Ok(tt) => tts.push(tt.into()), + let tree = match self.parse_token_tree() { + Ok(tree) => tree, Err(mut e) => { e.emit(); return TokenStream::concat(tts); } - } + }; + let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token); + tts.push(if is_joint { tree.joint() } else { tree.into() }); } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 3a68a6ba764..bd9a621c00c 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -141,9 +141,10 @@ pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess new_parser_from_source_str(sess, name, source).parse_stmt() } -pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess) - -> TokenStream { - filemap_to_stream(sess, sess.codemap().new_filemap(name, source)) +pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess, + override_span: Option<Span>) + -> TokenStream { + filemap_to_stream(sess, sess.codemap().new_filemap(name, source), override_span) } // Create a new parser from a source string @@ -177,7 +178,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, /// Given a filemap and config, return a parser pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser { let end_pos = filemap.end_pos; - let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap)); + let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None)); if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP { parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION }; @@ -212,8 +213,10 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) } /// Given a filemap, produce a sequence of token-trees -pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream { +pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>, override_span: Option<Span>) + -> TokenStream { let mut srdr = lexer::StringReader::new(sess, filemap); + srdr.override_span = override_span; srdr.real_token(); panictry!(srdr.parse_all_token_trees()) } @@ -684,7 +687,7 @@ mod tests { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Path(None, ast::Path { span: sp(0, 6), - segments: vec![ast::PathSegment::crate_root(), + segments: vec![ast::PathSegment::crate_root(sp(0, 2)), str2seg("a", 2, 3), str2seg("b", 5, 6)] }), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 64506c4af46..c248e20b608 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -107,7 +107,7 @@ pub enum BlockMode { macro_rules! maybe_whole_expr { ($p:expr) => { if let token::Interpolated(nt) = $p.token.clone() { - match *nt { + match nt.0 { token::NtExpr(ref e) => { $p.bump(); return Ok((*e).clone()); @@ -134,7 +134,7 @@ macro_rules! maybe_whole_expr { macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { if let token::Interpolated(nt) = $p.token.clone() { - if let token::$constructor($x) = (*nt).clone() { + if let token::$constructor($x) = nt.0.clone() { $p.bump(); return Ok($e); } @@ -1602,7 +1602,7 @@ impl<'a> Parser<'a> { /// Matches token_lit = LIT_INTEGER | ... pub fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { let out = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { token::NtExpr(ref v) => match v.node { ExprKind::Lit(ref lit) => { lit.node.clone() } _ => { return self.unexpected_last(&self.token); } @@ -1761,7 +1761,7 @@ impl<'a> Parser<'a> { }; if is_global { - segments.insert(0, PathSegment::crate_root()); + segments.insert(0, PathSegment::crate_root(lo)); } // Assemble the result. @@ -1775,7 +1775,7 @@ impl<'a> Parser<'a> { /// This is used when parsing derive macro paths in `#[derive]` attributes. pub fn parse_path_allowing_meta(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { let meta_ident = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { token::NtMeta(ref meta) => match meta.node { ast::MetaItemKind::Word => Some(ast::Ident::with_empty_ctxt(meta.name)), _ => None, @@ -2610,13 +2610,16 @@ impl<'a> Parser<'a> { pub fn process_potential_macro_variable(&mut self) { let ident = match self.token { - token::SubstNt(name) => { + token::Dollar if self.span.ctxt != syntax_pos::hygiene::SyntaxContext::empty() && + self.look_ahead(1, |t| t.is_ident()) => { + self.bump(); + let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() }; self.fatal(&format!("unknown macro variable `{}`", name)).emit(); return } token::Interpolated(ref nt) => { self.meta_var_span = Some(self.span); - match **nt { + match nt.0 { token::NtIdent(ident) => ident, _ => return, } @@ -6168,7 +6171,7 @@ impl<'a> Parser<'a> { // `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`. self.eat(&token::ModSep); let prefix = ast::Path { - segments: vec![PathSegment::crate_root()], + segments: vec![PathSegment::crate_root(lo)], span: lo.to(self.span), }; let view_path_kind = if self.eat(&token::BinOp(token::Star)) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 75969cf2eb8..834ac38af98 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -16,10 +16,12 @@ pub use self::Token::*; use ast::{self}; use ptr::P; +use serialize::{Decodable, Decoder, Encodable, Encoder}; use symbol::keywords; -use tokenstream::TokenTree; +use tokenstream::{TokenStream, TokenTree}; -use std::fmt; +use std::cell::Cell; +use std::{cmp, fmt}; use std::rc::Rc; #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] @@ -167,14 +169,12 @@ pub enum Token { Underscore, Lifetime(ast::Ident), - /* For interpolation */ - Interpolated(Rc<Nonterminal>), + // The `LazyTokenStream` is a pure function of the `Nonterminal`, + // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc. + Interpolated(Rc<(Nonterminal, LazyTokenStream)>), // Can be expanded into several tokens. /// Doc comment DocComment(ast::Name), - // In right-hand-sides of MBE macros: - /// A syntactic variable that will be filled in by macro expansion. - SubstNt(ast::Ident), // Junk. These carry no data because we don't really care about the data // they *would* carry, and don't really want to allocate a new ident for @@ -190,6 +190,10 @@ pub enum Token { } impl Token { + pub fn interpolated(nt: Nonterminal) -> Token { + Token::Interpolated(Rc::new((nt, LazyTokenStream::new()))) + } + /// Returns `true` if the token starts with '>'. pub fn is_like_gt(&self) -> bool { match *self { @@ -214,7 +218,7 @@ impl Token { Lt | BinOp(Shl) | // associated path ModSep | // global path Pound => true, // expression attributes - Interpolated(ref nt) => match **nt { + Interpolated(ref nt) => match nt.0 { NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true, _ => false, }, @@ -237,7 +241,7 @@ impl Token { Lifetime(..) | // lifetime bound in trait object Lt | BinOp(Shl) | // associated path ModSep => true, // global path - Interpolated(ref nt) => match **nt { + Interpolated(ref nt) => match nt.0 { NtIdent(..) | NtTy(..) | NtPath(..) => true, _ => false, }, @@ -256,7 +260,7 @@ impl Token { pub fn ident(&self) -> Option<ast::Ident> { match *self { Ident(ident) => Some(ident), - Interpolated(ref nt) => match **nt { + Interpolated(ref nt) => match nt.0 { NtIdent(ident) => Some(ident.node), _ => None, }, @@ -288,7 +292,7 @@ impl Token { /// Returns `true` if the token is an interpolated path. pub fn is_path(&self) -> bool { if let Interpolated(ref nt) = *self { - if let NtPath(..) = **nt { + if let NtPath(..) = nt.0 { return true; } } @@ -358,6 +362,60 @@ impl Token { } } + pub fn glue(self, joint: Token) -> Option<Token> { + Some(match self { + Eq => match joint { + Eq => EqEq, + Gt => FatArrow, + _ => return None, + }, + Lt => match joint { + Eq => Le, + Lt => BinOp(Shl), + Le => BinOpEq(Shl), + BinOp(Minus) => LArrow, + _ => return None, + }, + Gt => match joint { + Eq => Ge, + Gt => BinOp(Shr), + Ge => BinOpEq(Shr), + _ => return None, + }, + Not => match joint { + Eq => Ne, + _ => return None, + }, + BinOp(op) => match joint { + Eq => BinOpEq(op), + BinOp(And) if op == And => AndAnd, + BinOp(Or) if op == Or => OrOr, + Gt if op == Minus => RArrow, + _ => return None, + }, + Dot => match joint { + Dot => DotDot, + DotDot => DotDotDot, + _ => return None, + }, + DotDot => match joint { + Dot => DotDotDot, + _ => return None, + }, + Colon => match joint { + Colon => ModSep, + _ => return None, + }, + + Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | Comma | + Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | + OpenDelim(..) | CloseDelim(..) | Underscore => return None, + + Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) | + Whitespace | Comment | Shebang(..) | Eof => return None, + }) + } + /// Returns `true` if the token is either a special identifier or a keyword. pub fn is_reserved_ident(&self) -> bool { self.is_special_ident() || self.is_used_keyword() || self.is_unused_keyword() @@ -411,3 +469,66 @@ impl fmt::Debug for Nonterminal { } } } + +pub fn is_op(tok: &Token) -> bool { + match *tok { + OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | + Ident(..) | Underscore | Lifetime(..) | Interpolated(..) | + Whitespace | Comment | Shebang(..) | Eof => false, + _ => true, + } +} + +pub struct LazyTokenStream(Cell<Option<TokenStream>>); + +impl Clone for LazyTokenStream { + fn clone(&self) -> Self { + let opt_stream = self.0.take(); + self.0.set(opt_stream.clone()); + LazyTokenStream(Cell::new(opt_stream)) + } +} + +impl cmp::Eq for LazyTokenStream {} +impl PartialEq for LazyTokenStream { + fn eq(&self, _other: &LazyTokenStream) -> bool { + true + } +} + +impl fmt::Debug for LazyTokenStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self.clone().0.into_inner(), f) + } +} + +impl LazyTokenStream { + pub fn new() -> Self { + LazyTokenStream(Cell::new(None)) + } + + pub fn force<F: FnOnce() -> TokenStream>(&self, f: F) -> TokenStream { + let mut opt_stream = self.0.take(); + if opt_stream.is_none() { + opt_stream = Some(f()); + } + self.0.set(opt_stream.clone()); + opt_stream.clone().unwrap() + } +} + +impl Encodable for LazyTokenStream { + fn encode<S: Encoder>(&self, _: &mut S) -> Result<(), S::Error> { + Ok(()) + } +} + +impl Decodable for LazyTokenStream { + fn decode<D: Decoder>(_: &mut D) -> Result<LazyTokenStream, D::Error> { + Ok(LazyTokenStream::new()) + } +} + +impl ::std::hash::Hash for LazyTokenStream { + fn hash<H: ::std::hash::Hasher>(&self, _hasher: &mut H) {} +} |
