diff options
| author | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2019-12-29 16:39:31 +0300 |
|---|---|---|
| committer | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2019-12-30 19:18:16 +0300 |
| commit | b683de4ad79242fdeebcae2afefb72c1530babe9 (patch) | |
| tree | e46daf86fae68f2246b1dd80500f4a504d452b84 /src/libsyntax_ext | |
| parent | 0fb43801368ae8b5931583f813071120bed55c35 (diff) | |
| download | rust-b683de4ad79242fdeebcae2afefb72c1530babe9.tar.gz rust-b683de4ad79242fdeebcae2afefb72c1530babe9.zip | |
Rename directories for some crates from `syntax_x` to `rustc_x`
`syntax_expand` -> `rustc_expand` `syntax_pos` -> `rustc_span` `syntax_ext` -> `rustc_builtin_macros`
Diffstat (limited to 'src/libsyntax_ext')
38 files changed, 0 insertions, 8981 deletions
diff --git a/src/libsyntax_ext/Cargo.toml b/src/libsyntax_ext/Cargo.toml deleted file mode 100644 index d73a9ea6cdb..00000000000 --- a/src/libsyntax_ext/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ -[package] -authors = ["The Rust Project Developers"] -name = "syntax_ext" -version = "0.0.0" -edition = "2018" - -[lib] -name = "syntax_ext" -path = "lib.rs" -doctest = false - -[dependencies] -errors = { path = "../librustc_errors", package = "rustc_errors" } -fmt_macros = { path = "../libfmt_macros" } -log = "0.4" -rustc_data_structures = { path = "../librustc_data_structures" } -rustc_feature = { path = "../librustc_feature" } -rustc_parse = { path = "../librustc_parse" } -rustc_target = { path = "../librustc_target" } -smallvec = { version = "1.0", features = ["union", "may_dangle"] } -syntax = { path = "../libsyntax" } -syntax_expand = { path = "../libsyntax_expand" } -syntax_pos = { path = "../libsyntax_pos" } -rustc_error_codes = { path = "../librustc_error_codes" } diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs deleted file mode 100644 index 324bef9cbb8..00000000000 --- a/src/libsyntax_ext/asm.rs +++ /dev/null @@ -1,289 +0,0 @@ -// Inline assembly support. -// -use State::*; - -use errors::{DiagnosticBuilder, PResult}; -use rustc_parse::parser::Parser; -use syntax::ast::{self, AsmDialect}; -use syntax::ptr::P; -use syntax::symbol::{kw, sym, Symbol}; -use syntax::token::{self, Token}; -use syntax::tokenstream::{self, TokenStream}; -use syntax::{span_err, struct_span_err}; -use syntax_expand::base::*; -use syntax_pos::Span; - -use rustc_error_codes::*; - -enum State { - Asm, - Outputs, - Inputs, - Clobbers, - Options, - StateNone, -} - -impl State { - fn next(&self) -> State { - match *self { - Asm => Outputs, - Outputs => Inputs, - Inputs => Clobbers, - Clobbers => Options, - Options => StateNone, - StateNone => StateNone, - } - } -} - -const OPTIONS: &[Symbol] = &[sym::volatile, sym::alignstack, sym::intel]; - -pub fn expand_asm<'cx>( - cx: &'cx mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn MacResult + 'cx> { - let mut inline_asm = match parse_inline_asm(cx, sp, tts) { - Ok(Some(inline_asm)) => inline_asm, - Ok(None) => return DummyResult::any(sp), - Err(mut err) => { - err.emit(); - return DummyResult::any(sp); - } - }; - - // If there are no outputs, the inline assembly is executed just for its side effects, - // so ensure that it is volatile - if inline_asm.outputs.is_empty() { - inline_asm.volatile = true; - } - - MacEager::expr(P(ast::Expr { - id: ast::DUMMY_NODE_ID, - kind: ast::ExprKind::InlineAsm(P(inline_asm)), - span: cx.with_def_site_ctxt(sp), - attrs: ast::AttrVec::new(), - })) -} - -fn parse_asm_str<'a>(p: &mut Parser<'a>) -> PResult<'a, Symbol> { - match p.parse_str_lit() { - Ok(str_lit) => Ok(str_lit.symbol_unescaped), - Err(opt_lit) => { - let span = opt_lit.map_or(p.token.span, |lit| lit.span); - let mut err = p.sess.span_diagnostic.struct_span_err(span, "expected string literal"); - err.span_label(span, "not a string literal"); - Err(err) - } - } -} - -fn parse_inline_asm<'a>( - cx: &mut ExtCtxt<'a>, - sp: Span, - tts: TokenStream, -) -> Result<Option<ast::InlineAsm>, DiagnosticBuilder<'a>> { - // Split the tts before the first colon, to avoid `asm!("x": y)` being - // parsed as `asm!(z)` with `z = "x": y` which is type ascription. - let first_colon = tts - .trees() - .position(|tt| match tt { - tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) - | tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true, - _ => false, - }) - .unwrap_or(tts.len()); - let mut p = cx.new_parser_from_tts(tts.trees().skip(first_colon).collect()); - let mut asm = kw::Invalid; - let mut asm_str_style = None; - let mut outputs = Vec::new(); - let mut inputs = Vec::new(); - let mut clobs = Vec::new(); - let mut volatile = false; - let mut alignstack = false; - let mut dialect = AsmDialect::Att; - - let mut state = Asm; - - 'statement: loop { - match state { - Asm => { - if asm_str_style.is_some() { - // If we already have a string with instructions, - // ending up in Asm state again is an error. - return Err(struct_span_err!( - cx.parse_sess.span_diagnostic, - sp, - E0660, - "malformed inline assembly" - )); - } - // Nested parser, stop before the first colon (see above). - let mut p2 = cx.new_parser_from_tts(tts.trees().take(first_colon).collect()); - - if p2.token == token::Eof { - let mut err = - cx.struct_span_err(sp, "macro requires a string literal as an argument"); - err.span_label(sp, "string literal required"); - return Err(err); - } - - let expr = p2.parse_expr()?; - let (s, style) = - match expr_to_string(cx, expr, "inline assembly must be a string literal") { - Some((s, st)) => (s, st), - None => return Ok(None), - }; - - // This is most likely malformed. - if p2.token != token::Eof { - let mut extra_tts = p2.parse_all_token_trees()?; - extra_tts.extend(tts.trees().skip(first_colon)); - p = cx.new_parser_from_tts(extra_tts.into_iter().collect()); - } - - asm = s; - asm_str_style = Some(style); - } - Outputs => { - while p.token != token::Eof && p.token != token::Colon && p.token != token::ModSep { - if !outputs.is_empty() { - p.eat(&token::Comma); - } - - let constraint = parse_asm_str(&mut p)?; - - let span = p.prev_span; - - p.expect(&token::OpenDelim(token::Paren))?; - let expr = p.parse_expr()?; - p.expect(&token::CloseDelim(token::Paren))?; - - // Expands a read+write operand into two operands. - // - // Use '+' modifier when you want the same expression - // to be both an input and an output at the same time. - // It's the opposite of '=&' which means that the memory - // cannot be shared with any other operand (usually when - // a register is clobbered early.) - let constraint_str = constraint.as_str(); - let mut ch = constraint_str.chars(); - let output = match ch.next() { - Some('=') => None, - Some('+') => Some(Symbol::intern(&format!("={}", ch.as_str()))), - _ => { - span_err!( - cx, - span, - E0661, - "output operand constraint lacks '=' or '+'" - ); - None - } - }; - - let is_rw = output.is_some(); - let is_indirect = constraint_str.contains("*"); - outputs.push(ast::InlineAsmOutput { - constraint: output.unwrap_or(constraint), - expr, - is_rw, - is_indirect, - }); - } - } - Inputs => { - while p.token != token::Eof && p.token != token::Colon && p.token != token::ModSep { - if !inputs.is_empty() { - p.eat(&token::Comma); - } - - let constraint = parse_asm_str(&mut p)?; - - if constraint.as_str().starts_with("=") { - span_err!(cx, p.prev_span, E0662, "input operand constraint contains '='"); - } else if constraint.as_str().starts_with("+") { - span_err!(cx, p.prev_span, E0663, "input operand constraint contains '+'"); - } - - p.expect(&token::OpenDelim(token::Paren))?; - let input = p.parse_expr()?; - p.expect(&token::CloseDelim(token::Paren))?; - - inputs.push((constraint, input)); - } - } - Clobbers => { - while p.token != token::Eof && p.token != token::Colon && p.token != token::ModSep { - if !clobs.is_empty() { - p.eat(&token::Comma); - } - - let s = parse_asm_str(&mut p)?; - - if OPTIONS.iter().any(|&opt| s == opt) { - cx.span_warn(p.prev_span, "expected a clobber, found an option"); - } else if s.as_str().starts_with("{") || s.as_str().ends_with("}") { - span_err!( - cx, - p.prev_span, - E0664, - "clobber should not be surrounded by braces" - ); - } - - clobs.push(s); - } - } - Options => { - let option = parse_asm_str(&mut p)?; - - if option == sym::volatile { - // Indicates that the inline assembly has side effects - // and must not be optimized out along with its outputs. - volatile = true; - } else if option == sym::alignstack { - alignstack = true; - } else if option == sym::intel { - dialect = AsmDialect::Intel; - } else { - cx.span_warn(p.prev_span, "unrecognized option"); - } - - if p.token == token::Comma { - p.eat(&token::Comma); - } - } - StateNone => (), - } - - loop { - // MOD_SEP is a double colon '::' without space in between. - // When encountered, the state must be advanced twice. - match (&p.token.kind, state.next(), state.next().next()) { - (&token::Colon, StateNone, _) | (&token::ModSep, _, StateNone) => { - p.bump(); - break 'statement; - } - (&token::Colon, st, _) | (&token::ModSep, _, st) => { - p.bump(); - state = st; - } - (&token::Eof, ..) => break 'statement, - _ => break, - } - } - } - - Ok(Some(ast::InlineAsm { - asm, - asm_str_style: asm_str_style.unwrap(), - outputs, - inputs, - clobbers: clobs, - volatile, - alignstack, - dialect, - })) -} diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs deleted file mode 100644 index 331e9fa61d0..00000000000 --- a/src/libsyntax_ext/assert.rs +++ /dev/null @@ -1,137 +0,0 @@ -use errors::{Applicability, DiagnosticBuilder}; - -use rustc_parse::parser::Parser; -use syntax::ast::{self, *}; -use syntax::print::pprust; -use syntax::ptr::P; -use syntax::symbol::{sym, Symbol}; -use syntax::token::{self, TokenKind}; -use syntax::tokenstream::{DelimSpan, TokenStream, TokenTree}; -use syntax_expand::base::*; -use syntax_pos::{Span, DUMMY_SP}; - -pub fn expand_assert<'cx>( - cx: &'cx mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn MacResult + 'cx> { - let Assert { cond_expr, custom_message } = match parse_assert(cx, sp, tts) { - Ok(assert) => assert, - Err(mut err) => { - err.emit(); - return DummyResult::any(sp); - } - }; - - // `core::panic` and `std::panic` are different macros, so we use call-site - // context to pick up whichever is currently in scope. - let sp = cx.with_call_site_ctxt(sp); - let tokens = custom_message.unwrap_or_else(|| { - TokenStream::from(TokenTree::token( - TokenKind::lit( - token::Str, - Symbol::intern(&format!( - "assertion failed: {}", - pprust::expr_to_string(&cond_expr).escape_debug() - )), - None, - ), - DUMMY_SP, - )) - }); - let args = P(MacArgs::Delimited(DelimSpan::from_single(sp), MacDelimiter::Parenthesis, tokens)); - let panic_call = Mac { - path: Path::from_ident(Ident::new(sym::panic, sp)), - args, - prior_type_ascription: None, - }; - let if_expr = cx.expr_if( - sp, - cx.expr(sp, ExprKind::Unary(UnOp::Not, cond_expr)), - cx.expr(sp, ExprKind::Mac(panic_call)), - None, - ); - MacEager::expr(if_expr) -} - -struct Assert { - cond_expr: P<ast::Expr>, - custom_message: Option<TokenStream>, -} - -fn parse_assert<'a>( - cx: &mut ExtCtxt<'a>, - sp: Span, - stream: TokenStream, -) -> Result<Assert, DiagnosticBuilder<'a>> { - let mut parser = cx.new_parser_from_tts(stream); - - if parser.token == token::Eof { - let mut err = cx.struct_span_err(sp, "macro requires a boolean expression as an argument"); - err.span_label(sp, "boolean expression required"); - return Err(err); - } - - let cond_expr = parser.parse_expr()?; - - // Some crates use the `assert!` macro in the following form (note extra semicolon): - // - // assert!( - // my_function(); - // ); - // - // Warn about semicolon and suggest removing it. Eventually, this should be turned into an - // error. - if parser.token == token::Semi { - let mut err = cx.struct_span_warn(sp, "macro requires an expression as an argument"); - err.span_suggestion( - parser.token.span, - "try removing semicolon", - String::new(), - Applicability::MaybeIncorrect, - ); - err.note("this is going to be an error in the future"); - err.emit(); - - parser.bump(); - } - - // Some crates use the `assert!` macro in the following form (note missing comma before - // message): - // - // assert!(true "error message"); - // - // Parse this as an actual message, and suggest inserting a comma. Eventually, this should be - // turned into an error. - let custom_message = - if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token.kind { - let mut err = cx.struct_span_warn(parser.token.span, "unexpected string literal"); - let comma_span = cx.source_map().next_point(parser.prev_span); - err.span_suggestion_short( - comma_span, - "try adding a comma", - ", ".to_string(), - Applicability::MaybeIncorrect, - ); - err.note("this is going to be an error in the future"); - err.emit(); - - parse_custom_message(&mut parser) - } else if parser.eat(&token::Comma) { - parse_custom_message(&mut parser) - } else { - None - }; - - if parser.token != token::Eof { - parser.expect_one_of(&[], &[])?; - unreachable!(); - } - - Ok(Assert { cond_expr, custom_message }) -} - -fn parse_custom_message(parser: &mut Parser<'_>) -> Option<TokenStream> { - let ts = parser.parse_tokens(); - if !ts.is_empty() { Some(ts) } else { None } -} diff --git a/src/libsyntax_ext/cfg.rs b/src/libsyntax_ext/cfg.rs deleted file mode 100644 index 7b1dbcc7762..00000000000 --- a/src/libsyntax_ext/cfg.rs +++ /dev/null @@ -1,54 +0,0 @@ -/// The compiler code necessary to support the cfg! extension, which expands to -/// a literal `true` or `false` based on whether the given cfg matches the -/// current compilation environment. -use errors::DiagnosticBuilder; - -use syntax::ast; -use syntax::attr; -use syntax::token; -use syntax::tokenstream::TokenStream; -use syntax_expand::base::{self, *}; -use syntax_pos::Span; - -pub fn expand_cfg( - cx: &mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'static> { - let sp = cx.with_def_site_ctxt(sp); - - match parse_cfg(cx, sp, tts) { - Ok(cfg) => { - let matches_cfg = attr::cfg_matches(&cfg, cx.parse_sess, cx.ecfg.features); - MacEager::expr(cx.expr_bool(sp, matches_cfg)) - } - Err(mut err) => { - err.emit(); - DummyResult::any(sp) - } - } -} - -fn parse_cfg<'a>( - cx: &mut ExtCtxt<'a>, - sp: Span, - tts: TokenStream, -) -> Result<ast::MetaItem, DiagnosticBuilder<'a>> { - let mut p = cx.new_parser_from_tts(tts); - - if p.token == token::Eof { - let mut err = cx.struct_span_err(sp, "macro requires a cfg-pattern as an argument"); - err.span_label(sp, "cfg-pattern required"); - return Err(err); - } - - let cfg = p.parse_meta_item()?; - - let _ = p.eat(&token::Comma); - - if !p.eat(&token::Eof) { - return Err(cx.struct_span_err(sp, "expected 1 cfg-pattern")); - } - - Ok(cfg) -} diff --git a/src/libsyntax_ext/cmdline_attrs.rs b/src/libsyntax_ext/cmdline_attrs.rs deleted file mode 100644 index 1ce083112a8..00000000000 --- a/src/libsyntax_ext/cmdline_attrs.rs +++ /dev/null @@ -1,30 +0,0 @@ -//! Attributes injected into the crate root from command line using `-Z crate-attr`. - -use syntax::ast::{self, AttrItem, AttrStyle}; -use syntax::attr::mk_attr; -use syntax::sess::ParseSess; -use syntax::token; -use syntax_expand::panictry; -use syntax_pos::FileName; - -pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -> ast::Crate { - for raw_attr in attrs { - let mut parser = rustc_parse::new_parser_from_source_str( - parse_sess, - FileName::cli_crate_attr_source_code(&raw_attr), - raw_attr.clone(), - ); - - let start_span = parser.token.span; - let AttrItem { path, args } = panictry!(parser.parse_attr_item()); - let end_span = parser.token.span; - if parser.token != token::Eof { - parse_sess.span_diagnostic.span_err(start_span.to(end_span), "invalid crate attribute"); - continue; - } - - krate.attrs.push(mk_attr(AttrStyle::Inner, path, args, start_span.to(end_span))); - } - - krate -} diff --git a/src/libsyntax_ext/compile_error.rs b/src/libsyntax_ext/compile_error.rs deleted file mode 100644 index 394259fc67b..00000000000 --- a/src/libsyntax_ext/compile_error.rs +++ /dev/null @@ -1,20 +0,0 @@ -// The compiler code necessary to support the compile_error! extension. - -use syntax::tokenstream::TokenStream; -use syntax_expand::base::{self, *}; -use syntax_pos::Span; - -pub fn expand_compile_error<'cx>( - cx: &'cx mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'cx> { - let var = match get_single_str_from_tts(cx, sp, tts, "compile_error!") { - None => return DummyResult::any(sp), - Some(v) => v, - }; - - cx.span_err(sp, &var); - - DummyResult::any(sp) -} diff --git a/src/libsyntax_ext/concat.rs b/src/libsyntax_ext/concat.rs deleted file mode 100644 index 0cc8e205ae9..00000000000 --- a/src/libsyntax_ext/concat.rs +++ /dev/null @@ -1,62 +0,0 @@ -use syntax::ast; -use syntax::symbol::Symbol; -use syntax::tokenstream::TokenStream; -use syntax_expand::base::{self, DummyResult}; - -use std::string::String; - -pub fn expand_concat( - cx: &mut base::ExtCtxt<'_>, - sp: syntax_pos::Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'static> { - let es = match base::get_exprs_from_tts(cx, sp, tts) { - Some(e) => e, - None => return DummyResult::any(sp), - }; - let mut accumulator = String::new(); - let mut missing_literal = vec![]; - let mut has_errors = false; - for e in es { - match e.kind { - ast::ExprKind::Lit(ref lit) => match lit.kind { - ast::LitKind::Str(ref s, _) | ast::LitKind::Float(ref s, _) => { - accumulator.push_str(&s.as_str()); - } - ast::LitKind::Char(c) => { - accumulator.push(c); - } - ast::LitKind::Int(i, ast::LitIntType::Unsigned(_)) - | ast::LitKind::Int(i, ast::LitIntType::Signed(_)) - | ast::LitKind::Int(i, ast::LitIntType::Unsuffixed) => { - accumulator.push_str(&i.to_string()); - } - ast::LitKind::Bool(b) => { - accumulator.push_str(&b.to_string()); - } - ast::LitKind::Byte(..) | ast::LitKind::ByteStr(..) => { - cx.span_err(e.span, "cannot concatenate a byte string literal"); - } - ast::LitKind::Err(_) => { - has_errors = true; - } - }, - ast::ExprKind::Err => { - has_errors = true; - } - _ => { - missing_literal.push(e.span); - } - } - } - if missing_literal.len() > 0 { - let mut err = cx.struct_span_err(missing_literal, "expected a literal"); - err.note("only literals (like `\"foo\"`, `42` and `3.14`) can be passed to `concat!()`"); - err.emit(); - return DummyResult::any(sp); - } else if has_errors { - return DummyResult::any(sp); - } - let sp = cx.with_def_site_ctxt(sp); - base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator))) -} diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs deleted file mode 100644 index d870e858bea..00000000000 --- a/src/libsyntax_ext/concat_idents.rs +++ /dev/null @@ -1,68 +0,0 @@ -use syntax::ast; -use syntax::ptr::P; -use syntax::token::{self, Token}; -use syntax::tokenstream::{TokenStream, TokenTree}; -use syntax_expand::base::{self, *}; -use syntax_pos::symbol::Symbol; -use syntax_pos::Span; - -pub fn expand_concat_idents<'cx>( - cx: &'cx mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'cx> { - if tts.is_empty() { - cx.span_err(sp, "concat_idents! takes 1 or more arguments."); - return DummyResult::any(sp); - } - - let mut res_str = String::new(); - for (i, e) in tts.into_trees().enumerate() { - if i & 1 == 1 { - match e { - TokenTree::Token(Token { kind: token::Comma, .. }) => {} - _ => { - cx.span_err(sp, "concat_idents! expecting comma."); - return DummyResult::any(sp); - } - } - } else { - match e { - TokenTree::Token(Token { kind: token::Ident(name, _), .. }) => { - res_str.push_str(&name.as_str()) - } - _ => { - cx.span_err(sp, "concat_idents! requires ident args."); - return DummyResult::any(sp); - } - } - } - } - - let ident = ast::Ident::new(Symbol::intern(&res_str), cx.with_call_site_ctxt(sp)); - - struct ConcatIdentsResult { - ident: ast::Ident, - } - - impl base::MacResult for ConcatIdentsResult { - fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> { - Some(P(ast::Expr { - id: ast::DUMMY_NODE_ID, - kind: ast::ExprKind::Path(None, ast::Path::from_ident(self.ident)), - span: self.ident.span, - attrs: ast::AttrVec::new(), - })) - } - - fn make_ty(self: Box<Self>) -> Option<P<ast::Ty>> { - Some(P(ast::Ty { - id: ast::DUMMY_NODE_ID, - kind: ast::TyKind::Path(None, ast::Path::from_ident(self.ident)), - span: self.ident.span, - })) - } - } - - Box::new(ConcatIdentsResult { ident }) -} diff --git a/src/libsyntax_ext/deriving/bounds.rs b/src/libsyntax_ext/deriving/bounds.rs deleted file mode 100644 index 9793ac1ca08..00000000000 --- a/src/libsyntax_ext/deriving/bounds.rs +++ /dev/null @@ -1,29 +0,0 @@ -use crate::deriving::generic::ty::*; -use crate::deriving::generic::*; -use crate::deriving::path_std; - -use syntax::ast::MetaItem; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::Span; - -pub fn expand_deriving_copy( - cx: &mut ExtCtxt<'_>, - span: Span, - mitem: &MetaItem, - item: &Annotatable, - push: &mut dyn FnMut(Annotatable), -) { - let trait_def = TraitDef { - span, - attributes: Vec::new(), - path: path_std!(cx, marker::Copy), - additional_bounds: Vec::new(), - generics: LifetimeBounds::empty(), - is_unsafe: false, - supports_unions: true, - methods: Vec::new(), - associated_types: Vec::new(), - }; - - trait_def.expand(cx, mitem, item, push); -} diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs deleted file mode 100644 index 171e4104c0a..00000000000 --- a/src/libsyntax_ext/deriving/clone.rs +++ /dev/null @@ -1,225 +0,0 @@ -use crate::deriving::generic::ty::*; -use crate::deriving::generic::*; -use crate::deriving::path_std; - -use syntax::ast::{self, Expr, GenericArg, Generics, ItemKind, MetaItem, VariantData}; -use syntax::ptr::P; -use syntax::symbol::{kw, sym, Symbol}; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::Span; - -pub fn expand_deriving_clone( - cx: &mut ExtCtxt<'_>, - span: Span, - mitem: &MetaItem, - item: &Annotatable, - push: &mut dyn FnMut(Annotatable), -) { - // check if we can use a short form - // - // the short form is `fn clone(&self) -> Self { *self }` - // - // we can use the short form if: - // - the item is Copy (unfortunately, all we can check is whether it's also deriving Copy) - // - there are no generic parameters (after specialization this limitation can be removed) - // if we used the short form with generics, we'd have to bound the generics with - // Clone + Copy, and then there'd be no Clone impl at all if the user fills in something - // that is Clone but not Copy. and until specialization we can't write both impls. - // - the item is a union with Copy fields - // Unions with generic parameters still can derive Clone because they require Copy - // for deriving, Clone alone is not enough. - // Whever Clone is implemented for fields is irrelevant so we don't assert it. - let bounds; - let substructure; - let is_shallow; - match *item { - Annotatable::Item(ref annitem) => match annitem.kind { - ItemKind::Struct(_, Generics { ref params, .. }) - | ItemKind::Enum(_, Generics { ref params, .. }) => { - let container_id = cx.current_expansion.id.expn_data().parent; - if cx.resolver.has_derive_copy(container_id) - && !params.iter().any(|param| match param.kind { - ast::GenericParamKind::Type { .. } => true, - _ => false, - }) - { - bounds = vec![]; - is_shallow = true; - substructure = combine_substructure(Box::new(|c, s, sub| { - cs_clone_shallow("Clone", c, s, sub, false) - })); - } else { - bounds = vec![]; - is_shallow = false; - substructure = - combine_substructure(Box::new(|c, s, sub| cs_clone("Clone", c, s, sub))); - } - } - ItemKind::Union(..) => { - bounds = vec![Literal(path_std!(cx, marker::Copy))]; - is_shallow = true; - substructure = combine_substructure(Box::new(|c, s, sub| { - cs_clone_shallow("Clone", c, s, sub, true) - })); - } - _ => { - bounds = vec![]; - is_shallow = false; - substructure = - combine_substructure(Box::new(|c, s, sub| cs_clone("Clone", c, s, sub))); - } - }, - - _ => cx.span_bug(span, "`#[derive(Clone)]` on trait item or impl item"), - } - - let inline = cx.meta_word(span, sym::inline); - let attrs = vec![cx.attribute(inline)]; - let trait_def = TraitDef { - span, - attributes: Vec::new(), - path: path_std!(cx, clone::Clone), - additional_bounds: bounds, - generics: LifetimeBounds::empty(), - is_unsafe: false, - supports_unions: true, - methods: vec![MethodDef { - name: "clone", - generics: LifetimeBounds::empty(), - explicit_self: borrowed_explicit_self(), - args: Vec::new(), - ret_ty: Self_, - attributes: attrs, - is_unsafe: false, - unify_fieldless_variants: false, - combine_substructure: substructure, - }], - associated_types: Vec::new(), - }; - - trait_def.expand_ext(cx, mitem, item, push, is_shallow) -} - -fn cs_clone_shallow( - name: &str, - cx: &mut ExtCtxt<'_>, - trait_span: Span, - substr: &Substructure<'_>, - is_union: bool, -) -> P<Expr> { - fn assert_ty_bounds( - cx: &mut ExtCtxt<'_>, - stmts: &mut Vec<ast::Stmt>, - ty: P<ast::Ty>, - span: Span, - helper_name: &str, - ) { - // Generate statement `let _: helper_name<ty>;`, - // set the expn ID so we can use the unstable struct. - let span = cx.with_def_site_ctxt(span); - let assert_path = cx.path_all( - span, - true, - cx.std_path(&[sym::clone, Symbol::intern(helper_name)]), - vec![GenericArg::Type(ty)], - ); - stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path))); - } - fn process_variant(cx: &mut ExtCtxt<'_>, stmts: &mut Vec<ast::Stmt>, variant: &VariantData) { - for field in variant.fields() { - // let _: AssertParamIsClone<FieldTy>; - assert_ty_bounds(cx, stmts, field.ty.clone(), field.span, "AssertParamIsClone"); - } - } - - let mut stmts = Vec::new(); - if is_union { - // let _: AssertParamIsCopy<Self>; - let self_ty = - cx.ty_path(cx.path_ident(trait_span, ast::Ident::with_dummy_span(kw::SelfUpper))); - assert_ty_bounds(cx, &mut stmts, self_ty, trait_span, "AssertParamIsCopy"); - } else { - match *substr.fields { - StaticStruct(vdata, ..) => { - process_variant(cx, &mut stmts, vdata); - } - StaticEnum(enum_def, ..) => { - for variant in &enum_def.variants { - process_variant(cx, &mut stmts, &variant.data); - } - } - _ => cx.span_bug( - trait_span, - &format!( - "unexpected substructure in \ - shallow `derive({})`", - name - ), - ), - } - } - stmts.push(cx.stmt_expr(cx.expr_deref(trait_span, cx.expr_self(trait_span)))); - cx.expr_block(cx.block(trait_span, stmts)) -} - -fn cs_clone( - name: &str, - cx: &mut ExtCtxt<'_>, - trait_span: Span, - substr: &Substructure<'_>, -) -> P<Expr> { - let ctor_path; - let all_fields; - let fn_path = cx.std_path(&[sym::clone, sym::Clone, sym::clone]); - let subcall = |cx: &mut ExtCtxt<'_>, field: &FieldInfo<'_>| { - let args = vec![cx.expr_addr_of(field.span, field.self_.clone())]; - cx.expr_call_global(field.span, fn_path.clone(), args) - }; - - let vdata; - match *substr.fields { - Struct(vdata_, ref af) => { - ctor_path = cx.path(trait_span, vec![substr.type_ident]); - all_fields = af; - vdata = vdata_; - } - EnumMatching(.., variant, ref af) => { - ctor_path = cx.path(trait_span, vec![substr.type_ident, variant.ident]); - all_fields = af; - vdata = &variant.data; - } - EnumNonMatchingCollapsed(..) => { - cx.span_bug(trait_span, &format!("non-matching enum variants in `derive({})`", name,)) - } - StaticEnum(..) | StaticStruct(..) => { - cx.span_bug(trait_span, &format!("associated function in `derive({})`", name)) - } - } - - match *vdata { - VariantData::Struct(..) => { - let fields = all_fields - .iter() - .map(|field| { - let ident = match field.name { - Some(i) => i, - None => cx.span_bug( - trait_span, - &format!("unnamed field in normal struct in `derive({})`", name,), - ), - }; - let call = subcall(cx, field); - cx.field_imm(field.span, ident, call) - }) - .collect::<Vec<_>>(); - - cx.expr_struct(trait_span, ctor_path, fields) - } - VariantData::Tuple(..) => { - let subcalls = all_fields.iter().map(|f| subcall(cx, f)).collect(); - let path = cx.expr_path(ctor_path); - cx.expr_call(trait_span, path, subcalls) - } - VariantData::Unit(..) => cx.expr_path(ctor_path), - } -} diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs deleted file mode 100644 index f292ec0e428..00000000000 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ /dev/null @@ -1,104 +0,0 @@ -use crate::deriving::generic::ty::*; -use crate::deriving::generic::*; -use crate::deriving::path_std; - -use syntax::ast::{self, Expr, GenericArg, Ident, MetaItem}; -use syntax::ptr::P; -use syntax::symbol::{sym, Symbol}; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::Span; - -pub fn expand_deriving_eq( - cx: &mut ExtCtxt<'_>, - span: Span, - mitem: &MetaItem, - item: &Annotatable, - push: &mut dyn FnMut(Annotatable), -) { - let inline = cx.meta_word(span, sym::inline); - let hidden = syntax::attr::mk_nested_word_item(Ident::new(sym::hidden, span)); - let doc = syntax::attr::mk_list_item(Ident::new(sym::doc, span), vec![hidden]); - let attrs = vec![cx.attribute(inline), cx.attribute(doc)]; - let trait_def = TraitDef { - span, - attributes: Vec::new(), - path: path_std!(cx, cmp::Eq), - additional_bounds: Vec::new(), - generics: LifetimeBounds::empty(), - is_unsafe: false, - supports_unions: true, - methods: vec![MethodDef { - name: "assert_receiver_is_total_eq", - generics: LifetimeBounds::empty(), - explicit_self: borrowed_explicit_self(), - args: vec![], - ret_ty: nil_ty(), - attributes: attrs, - is_unsafe: false, - unify_fieldless_variants: true, - combine_substructure: combine_substructure(Box::new(|a, b, c| { - cs_total_eq_assert(a, b, c) - })), - }], - associated_types: Vec::new(), - }; - - super::inject_impl_of_structural_trait( - cx, - span, - item, - path_std!(cx, marker::StructuralEq), - push, - ); - - trait_def.expand_ext(cx, mitem, item, push, true) -} - -fn cs_total_eq_assert( - cx: &mut ExtCtxt<'_>, - trait_span: Span, - substr: &Substructure<'_>, -) -> P<Expr> { - fn assert_ty_bounds( - cx: &mut ExtCtxt<'_>, - stmts: &mut Vec<ast::Stmt>, - ty: P<ast::Ty>, - span: Span, - helper_name: &str, - ) { - // Generate statement `let _: helper_name<ty>;`, - // set the expn ID so we can use the unstable struct. - let span = cx.with_def_site_ctxt(span); - let assert_path = cx.path_all( - span, - true, - cx.std_path(&[sym::cmp, Symbol::intern(helper_name)]), - vec![GenericArg::Type(ty)], - ); - stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path))); - } - fn process_variant( - cx: &mut ExtCtxt<'_>, - stmts: &mut Vec<ast::Stmt>, - variant: &ast::VariantData, - ) { - for field in variant.fields() { - // let _: AssertParamIsEq<FieldTy>; - assert_ty_bounds(cx, stmts, field.ty.clone(), field.span, "AssertParamIsEq"); - } - } - - let mut stmts = Vec::new(); - match *substr.fields { - StaticStruct(vdata, ..) => { - process_variant(cx, &mut stmts, vdata); - } - StaticEnum(enum_def, ..) => { - for variant in &enum_def.variants { - process_variant(cx, &mut stmts, &variant.data); - } - } - _ => cx.span_bug(trait_span, "unexpected substructure in `derive(Eq)`"), - } - cx.expr_block(cx.block(trait_span, stmts)) -} diff --git a/src/libsyntax_ext/deriving/cmp/ord.rs b/src/libsyntax_ext/deriving/cmp/ord.rs deleted file mode 100644 index e009763da1b..00000000000 --- a/src/libsyntax_ext/deriving/cmp/ord.rs +++ /dev/null @@ -1,113 +0,0 @@ -use crate::deriving::generic::ty::*; -use crate::deriving::generic::*; -use crate::deriving::path_std; - -use syntax::ast::{self, Expr, MetaItem}; -use syntax::ptr::P; -use syntax::symbol::sym; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::Span; - -pub fn expand_deriving_ord( - cx: &mut ExtCtxt<'_>, - span: Span, - mitem: &MetaItem, - item: &Annotatable, - push: &mut dyn FnMut(Annotatable), -) { - let inline = cx.meta_word(span, sym::inline); - let attrs = vec![cx.attribute(inline)]; - let trait_def = TraitDef { - span, - attributes: Vec::new(), - path: path_std!(cx, cmp::Ord), - additional_bounds: Vec::new(), - generics: LifetimeBounds::empty(), - is_unsafe: false, - supports_unions: false, - methods: vec![MethodDef { - name: "cmp", - generics: LifetimeBounds::empty(), - explicit_self: borrowed_explicit_self(), - args: vec![(borrowed_self(), "other")], - ret_ty: Literal(path_std!(cx, cmp::Ordering)), - attributes: attrs, - is_unsafe: false, - unify_fieldless_variants: true, - combine_substructure: combine_substructure(Box::new(|a, b, c| cs_cmp(a, b, c))), - }], - associated_types: Vec::new(), - }; - - trait_def.expand(cx, mitem, item, push) -} - -pub fn ordering_collapsed( - cx: &mut ExtCtxt<'_>, - span: Span, - self_arg_tags: &[ast::Ident], -) -> P<ast::Expr> { - let lft = cx.expr_ident(span, self_arg_tags[0]); - let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1])); - cx.expr_method_call(span, lft, ast::Ident::new(sym::cmp, span), vec![rgt]) -} - -pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { - let test_id = ast::Ident::new(sym::cmp, span); - let equals_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); - - let cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]); - - // Builds: - // - // match ::std::cmp::Ord::cmp(&self_field1, &other_field1) { - // ::std::cmp::Ordering::Equal => - // match ::std::cmp::Ord::cmp(&self_field2, &other_field2) { - // ::std::cmp::Ordering::Equal => { - // ... - // } - // cmp => cmp - // }, - // cmp => cmp - // } - // - cs_fold( - // foldr nests the if-elses correctly, leaving the first field - // as the outermost one, and the last as the innermost. - false, - |cx, span, old, self_f, other_fs| { - // match new { - // ::std::cmp::Ordering::Equal => old, - // cmp => cmp - // } - - let new = { - let other_f = match other_fs { - [o_f] => o_f, - _ => cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"), - }; - - let args = - vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())]; - - cx.expr_call_global(span, cmp_path.clone(), args) - }; - - let eq_arm = cx.arm(span, cx.pat_path(span, equals_path.clone()), old); - let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); - - cx.expr_match(span, new, vec![eq_arm, neq_arm]) - }, - cx.expr_path(equals_path.clone()), - Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| { - if self_args.len() != 2 { - cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`") - } else { - ordering_collapsed(cx, span, tag_tuple) - } - }), - cx, - span, - substr, - ) -} diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs deleted file mode 100644 index 91c13b76a00..00000000000 --- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs +++ /dev/null @@ -1,112 +0,0 @@ -use crate::deriving::generic::ty::*; -use crate::deriving::generic::*; -use crate::deriving::{path_local, path_std}; - -use syntax::ast::{BinOpKind, Expr, MetaItem}; -use syntax::ptr::P; -use syntax::symbol::sym; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::Span; - -pub fn expand_deriving_partial_eq( - cx: &mut ExtCtxt<'_>, - span: Span, - mitem: &MetaItem, - item: &Annotatable, - push: &mut dyn FnMut(Annotatable), -) { - // structures are equal if all fields are equal, and non equal, if - // any fields are not equal or if the enum variants are different - fn cs_op( - cx: &mut ExtCtxt<'_>, - span: Span, - substr: &Substructure<'_>, - op: BinOpKind, - combiner: BinOpKind, - base: bool, - ) -> P<Expr> { - let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P<Expr>, other_fs: &[P<Expr>]| { - let other_f = match other_fs { - [o_f] => o_f, - _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"), - }; - - cx.expr_binary(span, op, self_f, other_f.clone()) - }; - - cs_fold1( - true, // use foldl - |cx, span, subexpr, self_f, other_fs| { - let eq = op(cx, span, self_f, other_fs); - cx.expr_binary(span, combiner, subexpr, eq) - }, - |cx, args| { - match args { - Some((span, self_f, other_fs)) => { - // Special-case the base case to generate cleaner code. - op(cx, span, self_f, other_fs) - } - None => cx.expr_bool(span, base), - } - }, - Box::new(|cx, span, _, _| cx.expr_bool(span, !base)), - cx, - span, - substr, - ) - } - - fn cs_eq(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { - cs_op(cx, span, substr, BinOpKind::Eq, BinOpKind::And, true) - } - fn cs_ne(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { - cs_op(cx, span, substr, BinOpKind::Ne, BinOpKind::Or, false) - } - - macro_rules! md { - ($name:expr, $f:ident) => {{ - let inline = cx.meta_word(span, sym::inline); - let attrs = vec![cx.attribute(inline)]; - MethodDef { - name: $name, - generics: LifetimeBounds::empty(), - explicit_self: borrowed_explicit_self(), - args: vec![(borrowed_self(), "other")], - ret_ty: Literal(path_local!(bool)), - attributes: attrs, - is_unsafe: false, - unify_fieldless_variants: true, - combine_substructure: combine_substructure(Box::new(|a, b, c| $f(a, b, c))), - } - }}; - } - - super::inject_impl_of_structural_trait( - cx, - span, - item, - path_std!(cx, marker::StructuralPartialEq), - push, - ); - - // avoid defining `ne` if we can - // c-like enums, enums without any fields and structs without fields - // can safely define only `eq`. - let mut methods = vec![md!("eq", cs_eq)]; - if !is_type_without_fields(item) { - methods.push(md!("ne", cs_ne)); - } - - let trait_def = TraitDef { - span, - attributes: Vec::new(), - path: path_std!(cx, cmp::PartialEq), - additional_bounds: Vec::new(), - generics: LifetimeBounds::empty(), - is_unsafe: false, - supports_unions: false, - methods, - associated_types: Vec::new(), - }; - trait_def.expand(cx, mitem, item, push) -} diff --git a/src/libsyntax_ext/deriving/cmp/partial_ord.rs b/src/libsyntax_ext/deriving/cmp/partial_ord.rs deleted file mode 100644 index 760ed325f36..00000000000 --- a/src/libsyntax_ext/deriving/cmp/partial_ord.rs +++ /dev/null @@ -1,302 +0,0 @@ -pub use OrderingOp::*; - -use crate::deriving::generic::ty::*; -use crate::deriving::generic::*; -use crate::deriving::{path_local, path_std, pathvec_std}; - -use syntax::ast::{self, BinOpKind, Expr, MetaItem}; -use syntax::ptr::P; -use syntax::symbol::{sym, Symbol}; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::Span; - -pub fn expand_deriving_partial_ord( - cx: &mut ExtCtxt<'_>, - span: Span, - mitem: &MetaItem, - item: &Annotatable, - push: &mut dyn FnMut(Annotatable), -) { - macro_rules! md { - ($name:expr, $op:expr, $equal:expr) => {{ - let inline = cx.meta_word(span, sym::inline); - let attrs = vec![cx.attribute(inline)]; - MethodDef { - name: $name, - generics: LifetimeBounds::empty(), - explicit_self: borrowed_explicit_self(), - args: vec![(borrowed_self(), "other")], - ret_ty: Literal(path_local!(bool)), - attributes: attrs, - is_unsafe: false, - unify_fieldless_variants: true, - combine_substructure: combine_substructure(Box::new(|cx, span, substr| { - cs_op($op, $equal, cx, span, substr) - })), - } - }}; - } - - let ordering_ty = Literal(path_std!(cx, cmp::Ordering)); - let ret_ty = Literal(Path::new_( - pathvec_std!(cx, option::Option), - None, - vec![Box::new(ordering_ty)], - PathKind::Std, - )); - - let inline = cx.meta_word(span, sym::inline); - let attrs = vec![cx.attribute(inline)]; - - let partial_cmp_def = MethodDef { - name: "partial_cmp", - generics: LifetimeBounds::empty(), - explicit_self: borrowed_explicit_self(), - args: vec![(borrowed_self(), "other")], - ret_ty, - attributes: attrs, - is_unsafe: false, - unify_fieldless_variants: true, - combine_substructure: combine_substructure(Box::new(|cx, span, substr| { - cs_partial_cmp(cx, span, substr) - })), - }; - - // avoid defining extra methods if we can - // c-like enums, enums without any fields and structs without fields - // can safely define only `partial_cmp`. - let methods = if is_type_without_fields(item) { - vec![partial_cmp_def] - } else { - vec![ - partial_cmp_def, - md!("lt", true, false), - md!("le", true, true), - md!("gt", false, false), - md!("ge", false, true), - ] - }; - - let trait_def = TraitDef { - span, - attributes: vec![], - path: path_std!(cx, cmp::PartialOrd), - additional_bounds: vec![], - generics: LifetimeBounds::empty(), - is_unsafe: false, - supports_unions: false, - methods, - associated_types: Vec::new(), - }; - trait_def.expand(cx, mitem, item, push) -} - -#[derive(Copy, Clone)] -pub enum OrderingOp { - PartialCmpOp, - LtOp, - LeOp, - GtOp, - GeOp, -} - -pub fn some_ordering_collapsed( - cx: &mut ExtCtxt<'_>, - span: Span, - op: OrderingOp, - self_arg_tags: &[ast::Ident], -) -> P<ast::Expr> { - let lft = cx.expr_ident(span, self_arg_tags[0]); - let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1])); - let op_str = match op { - PartialCmpOp => "partial_cmp", - LtOp => "lt", - LeOp => "le", - GtOp => "gt", - GeOp => "ge", - }; - cx.expr_method_call(span, lft, cx.ident_of(op_str, span), vec![rgt]) -} - -pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { - let test_id = ast::Ident::new(sym::cmp, span); - let ordering = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); - let ordering_expr = cx.expr_path(ordering.clone()); - let equals_expr = cx.expr_some(span, ordering_expr); - - let partial_cmp_path = cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]); - - // Builds: - // - // match ::std::cmp::PartialOrd::partial_cmp(&self_field1, &other_field1) { - // ::std::option::Option::Some(::std::cmp::Ordering::Equal) => - // match ::std::cmp::PartialOrd::partial_cmp(&self_field2, &other_field2) { - // ::std::option::Option::Some(::std::cmp::Ordering::Equal) => { - // ... - // } - // cmp => cmp - // }, - // cmp => cmp - // } - // - cs_fold( - // foldr nests the if-elses correctly, leaving the first field - // as the outermost one, and the last as the innermost. - false, - |cx, span, old, self_f, other_fs| { - // match new { - // Some(::std::cmp::Ordering::Equal) => old, - // cmp => cmp - // } - - let new = { - let other_f = match other_fs { - [o_f] => o_f, - _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"), - }; - - let args = - vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())]; - - cx.expr_call_global(span, partial_cmp_path.clone(), args) - }; - - let eq_arm = cx.arm(span, cx.pat_some(span, cx.pat_path(span, ordering.clone())), old); - let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); - - cx.expr_match(span, new, vec![eq_arm, neq_arm]) - }, - equals_expr, - Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| { - if self_args.len() != 2 { - cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`") - } else { - some_ordering_collapsed(cx, span, PartialCmpOp, tag_tuple) - } - }), - cx, - span, - substr, - ) -} - -/// Strict inequality. -fn cs_op( - less: bool, - inclusive: bool, - cx: &mut ExtCtxt<'_>, - span: Span, - substr: &Substructure<'_>, -) -> P<Expr> { - let ordering_path = |cx: &mut ExtCtxt<'_>, name: &str| { - cx.expr_path( - cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, Symbol::intern(name)])), - ) - }; - - let par_cmp = |cx: &mut ExtCtxt<'_>, span, self_f: P<Expr>, other_fs: &[P<Expr>], default| { - let other_f = match other_fs { - [o_f] => o_f, - _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"), - }; - - // `PartialOrd::partial_cmp(self.fi, other.fi)` - let cmp_path = cx.expr_path( - cx.path_global(span, cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp])), - ); - let cmp = cx.expr_call( - span, - cmp_path, - vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())], - ); - - let default = ordering_path(cx, default); - // `Option::unwrap_or(_, Ordering::Equal)` - let unwrap_path = cx.expr_path( - cx.path_global(span, cx.std_path(&[sym::option, sym::Option, sym::unwrap_or])), - ); - cx.expr_call(span, unwrap_path, vec![cmp, default]) - }; - - let fold = cs_fold1( - false, // need foldr - |cx, span, subexpr, self_f, other_fs| { - // build up a series of `partial_cmp`s from the inside - // out (hence foldr) to get lexical ordering, i.e., for op == - // `ast::lt` - // - // ``` - // Ordering::then_with( - // Option::unwrap_or( - // PartialOrd::partial_cmp(self.f1, other.f1), Ordering::Equal) - // ), - // Option::unwrap_or( - // PartialOrd::partial_cmp(self.f2, other.f2), Ordering::Greater) - // ) - // ) - // == Ordering::Less - // ``` - // - // and for op == - // `ast::le` - // - // ``` - // Ordering::then_with( - // Option::unwrap_or( - // PartialOrd::partial_cmp(self.f1, other.f1), Ordering::Equal) - // ), - // Option::unwrap_or( - // PartialOrd::partial_cmp(self.f2, other.f2), Ordering::Greater) - // ) - // ) - // != Ordering::Greater - // ``` - // - // The optimiser should remove the redundancy. We explicitly - // get use the binops to avoid auto-deref dereferencing too many - // layers of pointers, if the type includes pointers. - - // `Option::unwrap_or(PartialOrd::partial_cmp(self.fi, other.fi), Ordering::Equal)` - let par_cmp = par_cmp(cx, span, self_f, other_fs, "Equal"); - - // `Ordering::then_with(Option::unwrap_or(..), ..)` - let then_with_path = cx.expr_path( - cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::then_with])), - ); - cx.expr_call(span, then_with_path, vec![par_cmp, cx.lambda0(span, subexpr)]) - }, - |cx, args| match args { - Some((span, self_f, other_fs)) => { - let opposite = if less { "Greater" } else { "Less" }; - par_cmp(cx, span, self_f, other_fs, opposite) - } - None => cx.expr_bool(span, inclusive), - }, - Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| { - if self_args.len() != 2 { - cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`") - } else { - let op = match (less, inclusive) { - (false, false) => GtOp, - (false, true) => GeOp, - (true, false) => LtOp, - (true, true) => LeOp, - }; - some_ordering_collapsed(cx, span, op, tag_tuple) - } - }), - cx, - span, - substr, - ); - - match *substr.fields { - EnumMatching(.., ref all_fields) | Struct(.., ref all_fields) if !all_fields.is_empty() => { - let ordering = ordering_path(cx, if less ^ inclusive { "Less" } else { "Greater" }); - let comp_op = if inclusive { BinOpKind::Ne } else { BinOpKind::Eq }; - - cx.expr_binary(span, comp_op, fold, ordering) - } - _ => fold, - } -} diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs deleted file mode 100644 index c145b63274e..00000000000 --- a/src/libsyntax_ext/deriving/debug.rs +++ /dev/null @@ -1,137 +0,0 @@ -use crate::deriving::generic::ty::*; -use crate::deriving::generic::*; -use crate::deriving::path_std; - -use syntax::ast::{self, Ident}; -use syntax::ast::{Expr, MetaItem}; -use syntax::ptr::P; -use syntax::symbol::sym; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::{Span, DUMMY_SP}; - -pub fn expand_deriving_debug( - cx: &mut ExtCtxt<'_>, - span: Span, - mitem: &MetaItem, - item: &Annotatable, - push: &mut dyn FnMut(Annotatable), -) { - // &mut ::std::fmt::Formatter - let fmtr = - Ptr(Box::new(Literal(path_std!(cx, fmt::Formatter))), Borrowed(None, ast::Mutability::Mut)); - - let trait_def = TraitDef { - span, - attributes: Vec::new(), - path: path_std!(cx, fmt::Debug), - additional_bounds: Vec::new(), - generics: LifetimeBounds::empty(), - is_unsafe: false, - supports_unions: false, - methods: vec![MethodDef { - name: "fmt", - generics: LifetimeBounds::empty(), - explicit_self: borrowed_explicit_self(), - args: vec![(fmtr, "f")], - ret_ty: Literal(path_std!(cx, fmt::Result)), - attributes: Vec::new(), - is_unsafe: false, - unify_fieldless_variants: false, - combine_substructure: combine_substructure(Box::new(|a, b, c| { - show_substructure(a, b, c) - })), - }], - associated_types: Vec::new(), - }; - trait_def.expand(cx, mitem, item, push) -} - -/// We use the debug builders to do the heavy lifting here -fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { - // build fmt.debug_struct(<name>).field(<fieldname>, &<fieldval>)....build() - // or fmt.debug_tuple(<name>).field(&<fieldval>)....build() - // based on the "shape". - let (ident, vdata, fields) = match substr.fields { - Struct(vdata, fields) => (substr.type_ident, *vdata, fields), - EnumMatching(_, _, v, fields) => (v.ident, &v.data, fields), - EnumNonMatchingCollapsed(..) | StaticStruct(..) | StaticEnum(..) => { - cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`") - } - }; - - // We want to make sure we have the ctxt set so that we can use unstable methods - let span = cx.with_def_site_ctxt(span); - let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked)); - let builder = cx.ident_of("debug_trait_builder", span); - let builder_expr = cx.expr_ident(span, builder.clone()); - - let fmt = substr.nonself_args[0].clone(); - - let mut stmts = vec![]; - match vdata { - ast::VariantData::Tuple(..) | ast::VariantData::Unit(..) => { - // tuple struct/"normal" variant - let expr = cx.expr_method_call(span, fmt, cx.ident_of("debug_tuple", span), vec![name]); - stmts.push(cx.stmt_let(span, true, builder, expr)); - - for field in fields { - // Use double indirection to make sure this works for unsized types - let field = cx.expr_addr_of(field.span, field.self_.clone()); - let field = cx.expr_addr_of(field.span, field); - - let expr = cx.expr_method_call( - span, - builder_expr.clone(), - Ident::new(sym::field, span), - vec![field], - ); - - // Use `let _ = expr;` to avoid triggering the - // unused_results lint. - stmts.push(stmt_let_undescore(cx, span, expr)); - } - } - ast::VariantData::Struct(..) => { - // normal struct/struct variant - let expr = - cx.expr_method_call(span, fmt, cx.ident_of("debug_struct", span), vec![name]); - stmts.push(cx.stmt_let(DUMMY_SP, true, builder, expr)); - - for field in fields { - let name = cx.expr_lit( - field.span, - ast::LitKind::Str(field.name.unwrap().name, ast::StrStyle::Cooked), - ); - - // Use double indirection to make sure this works for unsized types - let field = cx.expr_addr_of(field.span, field.self_.clone()); - let field = cx.expr_addr_of(field.span, field); - let expr = cx.expr_method_call( - span, - builder_expr.clone(), - Ident::new(sym::field, span), - vec![name, field], - ); - stmts.push(stmt_let_undescore(cx, span, expr)); - } - } - } - - let expr = cx.expr_method_call(span, builder_expr, cx.ident_of("finish", span), vec![]); - - stmts.push(cx.stmt_expr(expr)); - let block = cx.block(span, stmts); - cx.expr_block(block) -} - -fn stmt_let_undescore(cx: &mut ExtCtxt<'_>, sp: Span, expr: P<ast::Expr>) -> ast::Stmt { - let local = P(ast::Local { - pat: cx.pat_wild(sp), - ty: None, - init: Some(expr), - id: ast::DUMMY_NODE_ID, - span: sp, - attrs: ast::AttrVec::new(), - }); - ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp } -} diff --git a/src/libsyntax_ext/deriving/decodable.rs b/src/libsyntax_ext/deriving/decodable.rs deleted file mode 100644 index 7f21440d49a..00000000000 --- a/src/libsyntax_ext/deriving/decodable.rs +++ /dev/null @@ -1,225 +0,0 @@ -//! The compiler code necessary for `#[derive(RustcDecodable)]`. See encodable.rs for more. - -use crate::deriving::generic::ty::*; -use crate::deriving::generic::*; -use crate::deriving::pathvec_std; - -use syntax::ast; -use syntax::ast::{Expr, MetaItem, Mutability}; -use syntax::ptr::P; -use syntax::symbol::Symbol; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::Span; - -pub fn expand_deriving_rustc_decodable( - cx: &mut ExtCtxt<'_>, - span: Span, - mitem: &MetaItem, - item: &Annotatable, - push: &mut dyn FnMut(Annotatable), -) { - let krate = "rustc_serialize"; - let typaram = "__D"; - - let trait_def = TraitDef { - span, - attributes: Vec::new(), - path: Path::new_(vec![krate, "Decodable"], None, vec![], PathKind::Global), - additional_bounds: Vec::new(), - generics: LifetimeBounds::empty(), - is_unsafe: false, - supports_unions: false, - methods: vec![MethodDef { - name: "decode", - generics: LifetimeBounds { - lifetimes: Vec::new(), - bounds: vec![( - typaram, - vec![Path::new_(vec![krate, "Decoder"], None, vec![], PathKind::Global)], - )], - }, - explicit_self: None, - args: vec![( - Ptr(Box::new(Literal(Path::new_local(typaram))), Borrowed(None, Mutability::Mut)), - "d", - )], - ret_ty: Literal(Path::new_( - pathvec_std!(cx, result::Result), - None, - vec![ - Box::new(Self_), - Box::new(Literal(Path::new_( - vec![typaram, "Error"], - None, - vec![], - PathKind::Local, - ))), - ], - PathKind::Std, - )), - attributes: Vec::new(), - is_unsafe: false, - unify_fieldless_variants: false, - combine_substructure: combine_substructure(Box::new(|a, b, c| { - decodable_substructure(a, b, c, krate) - })), - }], - associated_types: Vec::new(), - }; - - trait_def.expand(cx, mitem, item, push) -} - -fn decodable_substructure( - cx: &mut ExtCtxt<'_>, - trait_span: Span, - substr: &Substructure<'_>, - krate: &str, -) -> P<Expr> { - let decoder = substr.nonself_args[0].clone(); - let recurse = vec![ - cx.ident_of(krate, trait_span), - cx.ident_of("Decodable", trait_span), - cx.ident_of("decode", trait_span), - ]; - let exprdecode = cx.expr_path(cx.path_global(trait_span, recurse)); - // throw an underscore in front to suppress unused variable warnings - let blkarg = cx.ident_of("_d", trait_span); - let blkdecoder = cx.expr_ident(trait_span, blkarg); - - return match *substr.fields { - StaticStruct(_, ref summary) => { - let nfields = match *summary { - Unnamed(ref fields, _) => fields.len(), - Named(ref fields) => fields.len(), - }; - let read_struct_field = cx.ident_of("read_struct_field", trait_span); - - let path = cx.path_ident(trait_span, substr.type_ident); - let result = - decode_static_fields(cx, trait_span, path, summary, |cx, span, name, field| { - cx.expr_try( - span, - cx.expr_method_call( - span, - blkdecoder.clone(), - read_struct_field, - vec![ - cx.expr_str(span, name), - cx.expr_usize(span, field), - exprdecode.clone(), - ], - ), - ) - }); - let result = cx.expr_ok(trait_span, result); - cx.expr_method_call( - trait_span, - decoder, - cx.ident_of("read_struct", trait_span), - vec![ - cx.expr_str(trait_span, substr.type_ident.name), - cx.expr_usize(trait_span, nfields), - cx.lambda1(trait_span, result, blkarg), - ], - ) - } - StaticEnum(_, ref fields) => { - let variant = cx.ident_of("i", trait_span); - - let mut arms = Vec::with_capacity(fields.len() + 1); - let mut variants = Vec::with_capacity(fields.len()); - let rvariant_arg = cx.ident_of("read_enum_variant_arg", trait_span); - - for (i, &(ident, v_span, ref parts)) in fields.iter().enumerate() { - variants.push(cx.expr_str(v_span, ident.name)); - - let path = cx.path(trait_span, vec![substr.type_ident, ident]); - let decoded = - decode_static_fields(cx, v_span, path, parts, |cx, span, _, field| { - let idx = cx.expr_usize(span, field); - cx.expr_try( - span, - cx.expr_method_call( - span, - blkdecoder.clone(), - rvariant_arg, - vec![idx, exprdecode.clone()], - ), - ) - }); - - arms.push(cx.arm(v_span, cx.pat_lit(v_span, cx.expr_usize(v_span, i)), decoded)); - } - - arms.push(cx.arm_unreachable(trait_span)); - - let result = cx.expr_ok( - trait_span, - cx.expr_match(trait_span, cx.expr_ident(trait_span, variant), arms), - ); - let lambda = cx.lambda(trait_span, vec![blkarg, variant], result); - let variant_vec = cx.expr_vec(trait_span, variants); - let variant_vec = cx.expr_addr_of(trait_span, variant_vec); - let result = cx.expr_method_call( - trait_span, - blkdecoder, - cx.ident_of("read_enum_variant", trait_span), - vec![variant_vec, lambda], - ); - cx.expr_method_call( - trait_span, - decoder, - cx.ident_of("read_enum", trait_span), - vec![ - cx.expr_str(trait_span, substr.type_ident.name), - cx.lambda1(trait_span, result, blkarg), - ], - ) - } - _ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)"), - }; -} - -/// Creates a decoder for a single enum variant/struct: -/// - `outer_pat_path` is the path to this enum variant/struct -/// - `getarg` should retrieve the `usize`-th field with name `@str`. -fn decode_static_fields<F>( - cx: &mut ExtCtxt<'_>, - trait_span: Span, - outer_pat_path: ast::Path, - fields: &StaticFields, - mut getarg: F, -) -> P<Expr> -where - F: FnMut(&mut ExtCtxt<'_>, Span, Symbol, usize) -> P<Expr>, -{ - match *fields { - Unnamed(ref fields, is_tuple) => { - let path_expr = cx.expr_path(outer_pat_path); - if !is_tuple { - path_expr - } else { - let fields = fields - .iter() - .enumerate() - .map(|(i, &span)| getarg(cx, span, Symbol::intern(&format!("_field{}", i)), i)) - .collect(); - - cx.expr_call(trait_span, path_expr, fields) - } - } - Named(ref fields) => { - // use the field's span to get nicer error messages. - let fields = fields - .iter() - .enumerate() - .map(|(i, &(ident, span))| { - let arg = getarg(cx, span, ident.name, i); - cx.field_imm(span, ident, arg) - }) - .collect(); - cx.expr_struct(trait_span, outer_pat_path, fields) - } - } -} diff --git a/src/libsyntax_ext/deriving/default.rs b/src/libsyntax_ext/deriving/default.rs deleted file mode 100644 index d623e1fa4cc..00000000000 --- a/src/libsyntax_ext/deriving/default.rs +++ /dev/null @@ -1,83 +0,0 @@ -use crate::deriving::generic::ty::*; -use crate::deriving::generic::*; -use crate::deriving::path_std; - -use syntax::ast::{Expr, MetaItem}; -use syntax::ptr::P; -use syntax::span_err; -use syntax::symbol::{kw, sym}; -use syntax_expand::base::{Annotatable, DummyResult, ExtCtxt}; -use syntax_pos::Span; - -use rustc_error_codes::*; - -pub fn expand_deriving_default( - cx: &mut ExtCtxt<'_>, - span: Span, - mitem: &MetaItem, - item: &Annotatable, - push: &mut dyn FnMut(Annotatable), -) { - let inline = cx.meta_word(span, sym::inline); - let attrs = vec![cx.attribute(inline)]; - let trait_def = TraitDef { - span, - attributes: Vec::new(), - path: path_std!(cx, default::Default), - additional_bounds: Vec::new(), - generics: LifetimeBounds::empty(), - is_unsafe: false, - supports_unions: false, - methods: vec![MethodDef { - name: "default", - generics: LifetimeBounds::empty(), - explicit_self: None, - args: Vec::new(), - ret_ty: Self_, - attributes: attrs, - is_unsafe: false, - unify_fieldless_variants: false, - combine_substructure: combine_substructure(Box::new(|a, b, c| { - default_substructure(a, b, c) - })), - }], - associated_types: Vec::new(), - }; - trait_def.expand(cx, mitem, item, push) -} - -fn default_substructure( - cx: &mut ExtCtxt<'_>, - trait_span: Span, - substr: &Substructure<'_>, -) -> P<Expr> { - // Note that `kw::Default` is "default" and `sym::Default` is "Default"! - let default_ident = cx.std_path(&[kw::Default, sym::Default, kw::Default]); - let default_call = |span| cx.expr_call_global(span, default_ident.clone(), Vec::new()); - - return match *substr.fields { - StaticStruct(_, ref summary) => match *summary { - Unnamed(ref fields, is_tuple) => { - if !is_tuple { - cx.expr_ident(trait_span, substr.type_ident) - } else { - let exprs = fields.iter().map(|sp| default_call(*sp)).collect(); - cx.expr_call_ident(trait_span, substr.type_ident, exprs) - } - } - Named(ref fields) => { - let default_fields = fields - .iter() - .map(|&(ident, span)| cx.field_imm(span, ident, default_call(span))) - .collect(); - cx.expr_struct_ident(trait_span, substr.type_ident, default_fields) - } - }, - StaticEnum(..) => { - span_err!(cx, trait_span, E0665, "`Default` cannot be derived for enums, only structs"); - // let compilation continue - DummyResult::raw_expr(trait_span, true) - } - _ => cx.span_bug(trait_span, "method in `derive(Default)`"), - }; -} diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs deleted file mode 100644 index 98b0160d6e8..00000000000 --- a/src/libsyntax_ext/deriving/encodable.rs +++ /dev/null @@ -1,287 +0,0 @@ -//! The compiler code necessary to implement the `#[derive(RustcEncodable)]` -//! (and `RustcDecodable`, in `decodable.rs`) extension. The idea here is that -//! type-defining items may be tagged with -//! `#[derive(RustcEncodable, RustcDecodable)]`. -//! -//! For example, a type like: -//! -//! ``` -//! #[derive(RustcEncodable, RustcDecodable)] -//! struct Node { id: usize } -//! ``` -//! -//! would generate two implementations like: -//! -//! ``` -//! # struct Node { id: usize } -//! impl<S: Encoder<E>, E> Encodable<S, E> for Node { -//! fn encode(&self, s: &mut S) -> Result<(), E> { -//! s.emit_struct("Node", 1, |this| { -//! this.emit_struct_field("id", 0, |this| { -//! Encodable::encode(&self.id, this) -//! /* this.emit_usize(self.id) can also be used */ -//! }) -//! }) -//! } -//! } -//! -//! impl<D: Decoder<E>, E> Decodable<D, E> for Node { -//! fn decode(d: &mut D) -> Result<Node, E> { -//! d.read_struct("Node", 1, |this| { -//! match this.read_struct_field("id", 0, |this| Decodable::decode(this)) { -//! Ok(id) => Ok(Node { id: id }), -//! Err(e) => Err(e), -//! } -//! }) -//! } -//! } -//! ``` -//! -//! Other interesting scenarios are when the item has type parameters or -//! references other non-built-in types. A type definition like: -//! -//! ``` -//! # #[derive(RustcEncodable, RustcDecodable)] -//! # struct Span; -//! #[derive(RustcEncodable, RustcDecodable)] -//! struct Spanned<T> { node: T, span: Span } -//! ``` -//! -//! would yield functions like: -//! -//! ``` -//! # #[derive(RustcEncodable, RustcDecodable)] -//! # struct Span; -//! # struct Spanned<T> { node: T, span: Span } -//! impl< -//! S: Encoder<E>, -//! E, -//! T: Encodable<S, E> -//! > Encodable<S, E> for Spanned<T> { -//! fn encode(&self, s: &mut S) -> Result<(), E> { -//! s.emit_struct("Spanned", 2, |this| { -//! this.emit_struct_field("node", 0, |this| self.node.encode(this)) -//! .unwrap(); -//! this.emit_struct_field("span", 1, |this| self.span.encode(this)) -//! }) -//! } -//! } -//! -//! impl< -//! D: Decoder<E>, -//! E, -//! T: Decodable<D, E> -//! > Decodable<D, E> for Spanned<T> { -//! fn decode(d: &mut D) -> Result<Spanned<T>, E> { -//! d.read_struct("Spanned", 2, |this| { -//! Ok(Spanned { -//! node: this.read_struct_field("node", 0, |this| Decodable::decode(this)) -//! .unwrap(), -//! span: this.read_struct_field("span", 1, |this| Decodable::decode(this)) -//! .unwrap(), -//! }) -//! }) -//! } -//! } -//! ``` - -use crate::deriving::generic::ty::*; -use crate::deriving::generic::*; -use crate::deriving::pathvec_std; - -use syntax::ast::{Expr, ExprKind, MetaItem, Mutability}; -use syntax::ptr::P; -use syntax::symbol::Symbol; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::Span; - -pub fn expand_deriving_rustc_encodable( - cx: &mut ExtCtxt<'_>, - span: Span, - mitem: &MetaItem, - item: &Annotatable, - push: &mut dyn FnMut(Annotatable), -) { - let krate = "rustc_serialize"; - let typaram = "__S"; - - let trait_def = TraitDef { - span, - attributes: Vec::new(), - path: Path::new_(vec![krate, "Encodable"], None, vec![], PathKind::Global), - additional_bounds: Vec::new(), - generics: LifetimeBounds::empty(), - is_unsafe: false, - supports_unions: false, - methods: vec![MethodDef { - name: "encode", - generics: LifetimeBounds { - lifetimes: Vec::new(), - bounds: vec![( - typaram, - vec![Path::new_(vec![krate, "Encoder"], None, vec![], PathKind::Global)], - )], - }, - explicit_self: borrowed_explicit_self(), - args: vec![( - Ptr(Box::new(Literal(Path::new_local(typaram))), Borrowed(None, Mutability::Mut)), - "s", - )], - ret_ty: Literal(Path::new_( - pathvec_std!(cx, result::Result), - None, - vec![ - Box::new(Tuple(Vec::new())), - Box::new(Literal(Path::new_( - vec![typaram, "Error"], - None, - vec![], - PathKind::Local, - ))), - ], - PathKind::Std, - )), - attributes: Vec::new(), - is_unsafe: false, - unify_fieldless_variants: false, - combine_substructure: combine_substructure(Box::new(|a, b, c| { - encodable_substructure(a, b, c, krate) - })), - }], - associated_types: Vec::new(), - }; - - trait_def.expand(cx, mitem, item, push) -} - -fn encodable_substructure( - cx: &mut ExtCtxt<'_>, - trait_span: Span, - substr: &Substructure<'_>, - krate: &'static str, -) -> P<Expr> { - let encoder = substr.nonself_args[0].clone(); - // throw an underscore in front to suppress unused variable warnings - let blkarg = cx.ident_of("_e", trait_span); - let blkencoder = cx.expr_ident(trait_span, blkarg); - let fn_path = cx.expr_path(cx.path_global( - trait_span, - vec![ - cx.ident_of(krate, trait_span), - cx.ident_of("Encodable", trait_span), - cx.ident_of("encode", trait_span), - ], - )); - - return match *substr.fields { - Struct(_, ref fields) => { - let emit_struct_field = cx.ident_of("emit_struct_field", trait_span); - let mut stmts = Vec::new(); - for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() { - let name = match name { - Some(id) => id.name, - None => Symbol::intern(&format!("_field{}", i)), - }; - let self_ref = cx.expr_addr_of(span, self_.clone()); - let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]); - let lambda = cx.lambda1(span, enc, blkarg); - let call = cx.expr_method_call( - span, - blkencoder.clone(), - emit_struct_field, - vec![cx.expr_str(span, name), cx.expr_usize(span, i), lambda], - ); - - // last call doesn't need a try! - let last = fields.len() - 1; - let call = if i != last { - cx.expr_try(span, call) - } else { - cx.expr(span, ExprKind::Ret(Some(call))) - }; - - let stmt = cx.stmt_expr(call); - stmts.push(stmt); - } - - // unit structs have no fields and need to return Ok() - let blk = if stmts.is_empty() { - let ok = cx.expr_ok(trait_span, cx.expr_tuple(trait_span, vec![])); - cx.lambda1(trait_span, ok, blkarg) - } else { - cx.lambda_stmts_1(trait_span, stmts, blkarg) - }; - - cx.expr_method_call( - trait_span, - encoder, - cx.ident_of("emit_struct", trait_span), - vec![ - cx.expr_str(trait_span, substr.type_ident.name), - cx.expr_usize(trait_span, fields.len()), - blk, - ], - ) - } - - EnumMatching(idx, _, variant, ref fields) => { - // We're not generating an AST that the borrow checker is expecting, - // so we need to generate a unique local variable to take the - // mutable loan out on, otherwise we get conflicts which don't - // actually exist. - let me = cx.stmt_let(trait_span, false, blkarg, encoder); - let encoder = cx.expr_ident(trait_span, blkarg); - let emit_variant_arg = cx.ident_of("emit_enum_variant_arg", trait_span); - let mut stmts = Vec::new(); - if !fields.is_empty() { - let last = fields.len() - 1; - for (i, &FieldInfo { ref self_, span, .. }) in fields.iter().enumerate() { - let self_ref = cx.expr_addr_of(span, self_.clone()); - let enc = - cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]); - let lambda = cx.lambda1(span, enc, blkarg); - let call = cx.expr_method_call( - span, - blkencoder.clone(), - emit_variant_arg, - vec![cx.expr_usize(span, i), lambda], - ); - let call = if i != last { - cx.expr_try(span, call) - } else { - cx.expr(span, ExprKind::Ret(Some(call))) - }; - stmts.push(cx.stmt_expr(call)); - } - } else { - let ok = cx.expr_ok(trait_span, cx.expr_tuple(trait_span, vec![])); - let ret_ok = cx.expr(trait_span, ExprKind::Ret(Some(ok))); - stmts.push(cx.stmt_expr(ret_ok)); - } - - let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); - let name = cx.expr_str(trait_span, variant.ident.name); - let call = cx.expr_method_call( - trait_span, - blkencoder, - cx.ident_of("emit_enum_variant", trait_span), - vec![ - name, - cx.expr_usize(trait_span, idx), - cx.expr_usize(trait_span, fields.len()), - blk, - ], - ); - let blk = cx.lambda1(trait_span, call, blkarg); - let ret = cx.expr_method_call( - trait_span, - encoder, - cx.ident_of("emit_enum", trait_span), - vec![cx.expr_str(trait_span, substr.type_ident.name), blk], - ); - cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)])) - } - - _ => cx.bug("expected Struct or EnumMatching in derive(Encodable)"), - }; -} diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs deleted file mode 100644 index 7d7b73ebb42..00000000000 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ /dev/null @@ -1,1812 +0,0 @@ -//! Some code that abstracts away much of the boilerplate of writing -//! `derive` instances for traits. Among other things it manages getting -//! access to the fields of the 4 different sorts of structs and enum -//! variants, as well as creating the method and impl ast instances. -//! -//! Supported features (fairly exhaustive): -//! -//! - Methods taking any number of parameters of any type, and returning -//! any type, other than vectors, bottom and closures. -//! - Generating `impl`s for types with type parameters and lifetimes -//! (e.g., `Option<T>`), the parameters are automatically given the -//! current trait as a bound. (This includes separate type parameters -//! and lifetimes for methods.) -//! - Additional bounds on the type parameters (`TraitDef.additional_bounds`) -//! -//! The most important thing for implementors is the `Substructure` and -//! `SubstructureFields` objects. The latter groups 5 possibilities of the -//! arguments: -//! -//! - `Struct`, when `Self` is a struct (including tuple structs, e.g -//! `struct T(i32, char)`). -//! - `EnumMatching`, when `Self` is an enum and all the arguments are the -//! same variant of the enum (e.g., `Some(1)`, `Some(3)` and `Some(4)`) -//! - `EnumNonMatchingCollapsed` when `Self` is an enum and the arguments -//! are not the same variant (e.g., `None`, `Some(1)` and `None`). -//! - `StaticEnum` and `StaticStruct` for static methods, where the type -//! being derived upon is either an enum or struct respectively. (Any -//! argument with type Self is just grouped among the non-self -//! arguments.) -//! -//! In the first two cases, the values from the corresponding fields in -//! all the arguments are grouped together. For `EnumNonMatchingCollapsed` -//! this isn't possible (different variants have different fields), so the -//! fields are inaccessible. (Previous versions of the deriving infrastructure -//! had a way to expand into code that could access them, at the cost of -//! generating exponential amounts of code; see issue #15375). There are no -//! fields with values in the static cases, so these are treated entirely -//! differently. -//! -//! The non-static cases have `Option<ident>` in several places associated -//! with field `expr`s. This represents the name of the field it is -//! associated with. It is only not `None` when the associated field has -//! an identifier in the source code. For example, the `x`s in the -//! following snippet -//! -//! ```rust -//! # #![allow(dead_code)] -//! struct A { x : i32 } -//! -//! struct B(i32); -//! -//! enum C { -//! C0(i32), -//! C1 { x: i32 } -//! } -//! ``` -//! -//! The `i32`s in `B` and `C0` don't have an identifier, so the -//! `Option<ident>`s would be `None` for them. -//! -//! In the static cases, the structure is summarized, either into the just -//! spans of the fields or a list of spans and the field idents (for tuple -//! structs and record structs, respectively), or a list of these, for -//! enums (one for each variant). For empty struct and empty enum -//! variants, it is represented as a count of 0. -//! -//! # "`cs`" functions -//! -//! The `cs_...` functions ("combine substructure) are designed to -//! make life easier by providing some pre-made recipes for common -//! threads; mostly calling the function being derived on all the -//! arguments and then combining them back together in some way (or -//! letting the user chose that). They are not meant to be the only -//! way to handle the structures that this code creates. -//! -//! # Examples -//! -//! The following simplified `PartialEq` is used for in-code examples: -//! -//! ```rust -//! trait PartialEq { -//! fn eq(&self, other: &Self) -> bool; -//! } -//! impl PartialEq for i32 { -//! fn eq(&self, other: &i32) -> bool { -//! *self == *other -//! } -//! } -//! ``` -//! -//! Some examples of the values of `SubstructureFields` follow, using the -//! above `PartialEq`, `A`, `B` and `C`. -//! -//! ## Structs -//! -//! When generating the `expr` for the `A` impl, the `SubstructureFields` is -//! -//! ```{.text} -//! Struct(vec![FieldInfo { -//! span: <span of x> -//! name: Some(<ident of x>), -//! self_: <expr for &self.x>, -//! other: vec![<expr for &other.x] -//! }]) -//! ``` -//! -//! For the `B` impl, called with `B(a)` and `B(b)`, -//! -//! ```{.text} -//! Struct(vec![FieldInfo { -//! span: <span of `i32`>, -//! name: None, -//! self_: <expr for &a> -//! other: vec![<expr for &b>] -//! }]) -//! ``` -//! -//! ## Enums -//! -//! When generating the `expr` for a call with `self == C0(a)` and `other -//! == C0(b)`, the SubstructureFields is -//! -//! ```{.text} -//! EnumMatching(0, <ast::Variant for C0>, -//! vec![FieldInfo { -//! span: <span of i32> -//! name: None, -//! self_: <expr for &a>, -//! other: vec![<expr for &b>] -//! }]) -//! ``` -//! -//! For `C1 {x}` and `C1 {x}`, -//! -//! ```{.text} -//! EnumMatching(1, <ast::Variant for C1>, -//! vec![FieldInfo { -//! span: <span of x> -//! name: Some(<ident of x>), -//! self_: <expr for &self.x>, -//! other: vec![<expr for &other.x>] -//! }]) -//! ``` -//! -//! For `C0(a)` and `C1 {x}` , -//! -//! ```{.text} -//! EnumNonMatchingCollapsed( -//! vec![<ident of self>, <ident of __arg_1>], -//! &[<ast::Variant for C0>, <ast::Variant for C1>], -//! &[<ident for self index value>, <ident of __arg_1 index value>]) -//! ``` -//! -//! It is the same for when the arguments are flipped to `C1 {x}` and -//! `C0(a)`; the only difference is what the values of the identifiers -//! <ident for self index value> and <ident of __arg_1 index value> will -//! be in the generated code. -//! -//! `EnumNonMatchingCollapsed` deliberately provides far less information -//! than is generally available for a given pair of variants; see #15375 -//! for discussion. -//! -//! ## Static -//! -//! A static method on the types above would result in, -//! -//! ```{.text} -//! StaticStruct(<ast::VariantData of A>, Named(vec![(<ident of x>, <span of x>)])) -//! -//! StaticStruct(<ast::VariantData of B>, Unnamed(vec![<span of x>])) -//! -//! StaticEnum(<ast::EnumDef of C>, -//! vec![(<ident of C0>, <span of C0>, Unnamed(vec![<span of i32>])), -//! (<ident of C1>, <span of C1>, Named(vec![(<ident of x>, <span of x>)]))]) -//! ``` - -pub use StaticFields::*; -pub use SubstructureFields::*; - -use std::cell::RefCell; -use std::iter; -use std::vec; - -use syntax::ast::{self, BinOpKind, EnumDef, Expr, Generics, Ident, PatKind}; -use syntax::ast::{GenericArg, GenericParamKind, VariantData}; -use syntax::attr; -use syntax::ptr::P; -use syntax::sess::ParseSess; -use syntax::source_map::respan; -use syntax::symbol::{kw, sym, Symbol}; -use syntax::util::map_in_place::MapInPlace; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::Span; - -use ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty}; - -use crate::deriving; - -pub mod ty; - -pub struct TraitDef<'a> { - /// The span for the current #[derive(Foo)] header. - pub span: Span, - - pub attributes: Vec<ast::Attribute>, - - /// Path of the trait, including any type parameters - pub path: Path<'a>, - - /// Additional bounds required of any type parameters of the type, - /// other than the current trait - pub additional_bounds: Vec<Ty<'a>>, - - /// Any extra lifetimes and/or bounds, e.g., `D: serialize::Decoder` - pub generics: LifetimeBounds<'a>, - - /// Is it an `unsafe` trait? - pub is_unsafe: bool, - - /// Can this trait be derived for unions? - pub supports_unions: bool, - - pub methods: Vec<MethodDef<'a>>, - - pub associated_types: Vec<(ast::Ident, Ty<'a>)>, -} - -pub struct MethodDef<'a> { - /// name of the method - pub name: &'a str, - /// List of generics, e.g., `R: rand::Rng` - pub generics: LifetimeBounds<'a>, - - /// Whether there is a self argument (outer Option) i.e., whether - /// this is a static function, and whether it is a pointer (inner - /// Option) - pub explicit_self: Option<Option<PtrTy>>, - - /// Arguments other than the self argument - pub args: Vec<(Ty<'a>, &'a str)>, - - /// Returns type - pub ret_ty: Ty<'a>, - - pub attributes: Vec<ast::Attribute>, - - // Is it an `unsafe fn`? - pub is_unsafe: bool, - - /// Can we combine fieldless variants for enums into a single match arm? - pub unify_fieldless_variants: bool, - - pub combine_substructure: RefCell<CombineSubstructureFunc<'a>>, -} - -/// All the data about the data structure/method being derived upon. -pub struct Substructure<'a> { - /// ident of self - pub type_ident: Ident, - /// ident of the method - pub method_ident: Ident, - /// dereferenced access to any `Self_` or `Ptr(Self_, _)` arguments - pub self_args: &'a [P<Expr>], - /// verbatim access to any other arguments - pub nonself_args: &'a [P<Expr>], - pub fields: &'a SubstructureFields<'a>, -} - -/// Summary of the relevant parts of a struct/enum field. -pub struct FieldInfo<'a> { - pub span: Span, - /// None for tuple structs/normal enum variants, Some for normal - /// structs/struct enum variants. - pub name: Option<Ident>, - /// The expression corresponding to this field of `self` - /// (specifically, a reference to it). - pub self_: P<Expr>, - /// The expressions corresponding to references to this field in - /// the other `Self` arguments. - pub other: Vec<P<Expr>>, - /// The attributes on the field - pub attrs: &'a [ast::Attribute], -} - -/// Fields for a static method -pub enum StaticFields { - /// Tuple and unit structs/enum variants like this. - Unnamed(Vec<Span>, bool /*is tuple*/), - /// Normal structs/struct variants. - Named(Vec<(Ident, Span)>), -} - -/// A summary of the possible sets of fields. -pub enum SubstructureFields<'a> { - Struct(&'a ast::VariantData, Vec<FieldInfo<'a>>), - /// Matching variants of the enum: variant index, variant count, ast::Variant, - /// fields: the field name is only non-`None` in the case of a struct - /// variant. - EnumMatching(usize, usize, &'a ast::Variant, Vec<FieldInfo<'a>>), - - /// Non-matching variants of the enum, but with all state hidden from - /// the consequent code. The first component holds `Ident`s for all of - /// the `Self` arguments; the second component is a slice of all of the - /// variants for the enum itself, and the third component is a list of - /// `Ident`s bound to the variant index values for each of the actual - /// input `Self` arguments. - EnumNonMatchingCollapsed(Vec<Ident>, &'a [ast::Variant], &'a [Ident]), - - /// A static method where `Self` is a struct. - StaticStruct(&'a ast::VariantData, StaticFields), - /// A static method where `Self` is an enum. - StaticEnum(&'a ast::EnumDef, Vec<(Ident, Span, StaticFields)>), -} - -/// Combine the values of all the fields together. The last argument is -/// all the fields of all the structures. -pub type CombineSubstructureFunc<'a> = - Box<dyn FnMut(&mut ExtCtxt<'_>, Span, &Substructure<'_>) -> P<Expr> + 'a>; - -/// Deal with non-matching enum variants. The tuple is a list of -/// identifiers (one for each `Self` argument, which could be any of the -/// variants since they have been collapsed together) and the identifiers -/// holding the variant index value for each of the `Self` arguments. The -/// last argument is all the non-`Self` args of the method being derived. -pub type EnumNonMatchCollapsedFunc<'a> = - Box<dyn FnMut(&mut ExtCtxt<'_>, Span, (&[Ident], &[Ident]), &[P<Expr>]) -> P<Expr> + 'a>; - -pub fn combine_substructure( - f: CombineSubstructureFunc<'_>, -) -> RefCell<CombineSubstructureFunc<'_>> { - RefCell::new(f) -} - -/// This method helps to extract all the type parameters referenced from a -/// type. For a type parameter `<T>`, it looks for either a `TyPath` that -/// is not global and starts with `T`, or a `TyQPath`. -fn find_type_parameters( - ty: &ast::Ty, - ty_param_names: &[ast::Name], - cx: &ExtCtxt<'_>, -) -> Vec<P<ast::Ty>> { - use syntax::visit; - - struct Visitor<'a, 'b> { - cx: &'a ExtCtxt<'b>, - ty_param_names: &'a [ast::Name], - types: Vec<P<ast::Ty>>, - } - - impl<'a, 'b> visit::Visitor<'a> for Visitor<'a, 'b> { - fn visit_ty(&mut self, ty: &'a ast::Ty) { - if let ast::TyKind::Path(_, ref path) = ty.kind { - if let Some(segment) = path.segments.first() { - if self.ty_param_names.contains(&segment.ident.name) { - self.types.push(P(ty.clone())); - } - } - } - - visit::walk_ty(self, ty) - } - - fn visit_mac(&mut self, mac: &ast::Mac) { - self.cx.span_err(mac.span(), "`derive` cannot be used on items with type macros"); - } - } - - let mut visitor = Visitor { cx, ty_param_names, types: Vec::new() }; - visit::Visitor::visit_ty(&mut visitor, ty); - - visitor.types -} - -impl<'a> TraitDef<'a> { - pub fn expand( - self, - cx: &mut ExtCtxt<'_>, - mitem: &ast::MetaItem, - item: &'a Annotatable, - push: &mut dyn FnMut(Annotatable), - ) { - self.expand_ext(cx, mitem, item, push, false); - } - - pub fn expand_ext( - self, - cx: &mut ExtCtxt<'_>, - mitem: &ast::MetaItem, - item: &'a Annotatable, - push: &mut dyn FnMut(Annotatable), - from_scratch: bool, - ) { - match *item { - Annotatable::Item(ref item) => { - let is_packed = item.attrs.iter().any(|attr| { - for r in attr::find_repr_attrs(&cx.parse_sess, attr) { - if let attr::ReprPacked(_) = r { - return true; - } - } - false - }); - let has_no_type_params = match item.kind { - ast::ItemKind::Struct(_, ref generics) - | ast::ItemKind::Enum(_, ref generics) - | ast::ItemKind::Union(_, ref generics) => { - !generics.params.iter().any(|param| match param.kind { - ast::GenericParamKind::Type { .. } => true, - _ => false, - }) - } - _ => { - // Non-ADT derive is an error, but it should have been - // set earlier; see - // libsyntax_expand/expand.rs:MacroExpander::fully_expand_fragment() - // libsyntax_expand/base.rs:Annotatable::derive_allowed() - return; - } - }; - let container_id = cx.current_expansion.id.expn_data().parent; - let always_copy = has_no_type_params && cx.resolver.has_derive_copy(container_id); - let use_temporaries = is_packed && always_copy; - - let newitem = match item.kind { - ast::ItemKind::Struct(ref struct_def, ref generics) => self.expand_struct_def( - cx, - &struct_def, - item.ident, - generics, - from_scratch, - use_temporaries, - ), - ast::ItemKind::Enum(ref enum_def, ref generics) => { - // We ignore `use_temporaries` here, because - // `repr(packed)` enums cause an error later on. - // - // This can only cause further compilation errors - // downstream in blatantly illegal code, so it - // is fine. - self.expand_enum_def( - cx, - enum_def, - &item.attrs, - item.ident, - generics, - from_scratch, - ) - } - ast::ItemKind::Union(ref struct_def, ref generics) => { - if self.supports_unions { - self.expand_struct_def( - cx, - &struct_def, - item.ident, - generics, - from_scratch, - use_temporaries, - ) - } else { - cx.span_err(mitem.span, "this trait cannot be derived for unions"); - return; - } - } - _ => unreachable!(), - }; - // Keep the lint attributes of the previous item to control how the - // generated implementations are linted - let mut attrs = newitem.attrs.clone(); - attrs.extend( - item.attrs - .iter() - .filter(|a| { - [ - sym::allow, - sym::warn, - sym::deny, - sym::forbid, - sym::stable, - sym::unstable, - ] - .contains(&a.name_or_empty()) - }) - .cloned(), - ); - push(Annotatable::Item(P(ast::Item { attrs: attrs, ..(*newitem).clone() }))) - } - _ => { - // Non-Item derive is an error, but it should have been - // set earlier; see - // libsyntax_expand/expand.rs:MacroExpander::fully_expand_fragment() - // libsyntax_expand/base.rs:Annotatable::derive_allowed() - return; - } - } - } - - /// Given that we are deriving a trait `DerivedTrait` for a type like: - /// - /// ```ignore (only-for-syntax-highlight) - /// struct Struct<'a, ..., 'z, A, B: DeclaredTrait, C, ..., Z> where C: WhereTrait { - /// a: A, - /// b: B::Item, - /// b1: <B as DeclaredTrait>::Item, - /// c1: <C as WhereTrait>::Item, - /// c2: Option<<C as WhereTrait>::Item>, - /// ... - /// } - /// ``` - /// - /// create an impl like: - /// - /// ```ignore (only-for-syntax-highlight) - /// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ... Z> where - /// C: WhereTrait, - /// A: DerivedTrait + B1 + ... + BN, - /// B: DerivedTrait + B1 + ... + BN, - /// C: DerivedTrait + B1 + ... + BN, - /// B::Item: DerivedTrait + B1 + ... + BN, - /// <C as WhereTrait>::Item: DerivedTrait + B1 + ... + BN, - /// ... - /// { - /// ... - /// } - /// ``` - /// - /// where B1, ..., BN are the bounds given by `bounds_paths`.'. Z is a phantom type, and - /// therefore does not get bound by the derived trait. - fn create_derived_impl( - &self, - cx: &mut ExtCtxt<'_>, - type_ident: Ident, - generics: &Generics, - field_tys: Vec<P<ast::Ty>>, - methods: Vec<ast::AssocItem>, - ) -> P<ast::Item> { - let trait_path = self.path.to_path(cx, self.span, type_ident, generics); - - // Transform associated types from `deriving::ty::Ty` into `ast::AssocItem` - let associated_types = - self.associated_types.iter().map(|&(ident, ref type_def)| ast::AssocItem { - id: ast::DUMMY_NODE_ID, - span: self.span, - ident, - vis: respan(self.span.shrink_to_lo(), ast::VisibilityKind::Inherited), - defaultness: ast::Defaultness::Final, - attrs: Vec::new(), - generics: Generics::default(), - kind: ast::AssocItemKind::TyAlias( - Vec::new(), - Some(type_def.to_ty(cx, self.span, type_ident, generics)), - ), - tokens: None, - }); - - let Generics { mut params, mut where_clause, span } = - self.generics.to_generics(cx, self.span, type_ident, generics); - - // Create the generic parameters - params.extend(generics.params.iter().map(|param| match param.kind { - GenericParamKind::Lifetime { .. } => param.clone(), - GenericParamKind::Type { .. } => { - // I don't think this can be moved out of the loop, since - // a GenericBound requires an ast id - let bounds: Vec<_> = - // extra restrictions on the generics parameters to the - // type being derived upon - self.additional_bounds.iter().map(|p| { - cx.trait_bound(p.to_path(cx, self.span, type_ident, generics)) - }).chain( - // require the current trait - iter::once(cx.trait_bound(trait_path.clone())) - ).chain( - // also add in any bounds from the declaration - param.bounds.iter().cloned() - ).collect(); - - cx.typaram(self.span, param.ident, vec![], bounds, None) - } - GenericParamKind::Const { .. } => param.clone(), - })); - - // and similarly for where clauses - where_clause.predicates.extend(generics.where_clause.predicates.iter().map(|clause| { - match *clause { - ast::WherePredicate::BoundPredicate(ref wb) => { - ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate { - span: self.span, - bound_generic_params: wb.bound_generic_params.clone(), - bounded_ty: wb.bounded_ty.clone(), - bounds: wb.bounds.iter().cloned().collect(), - }) - } - ast::WherePredicate::RegionPredicate(ref rb) => { - ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate { - span: self.span, - lifetime: rb.lifetime, - bounds: rb.bounds.iter().cloned().collect(), - }) - } - ast::WherePredicate::EqPredicate(ref we) => { - ast::WherePredicate::EqPredicate(ast::WhereEqPredicate { - id: ast::DUMMY_NODE_ID, - span: self.span, - lhs_ty: we.lhs_ty.clone(), - rhs_ty: we.rhs_ty.clone(), - }) - } - } - })); - - { - // Extra scope required here so ty_params goes out of scope before params is moved - - let mut ty_params = params - .iter() - .filter_map(|param| match param.kind { - ast::GenericParamKind::Type { .. } => Some(param), - _ => None, - }) - .peekable(); - - if ty_params.peek().is_some() { - let ty_param_names: Vec<ast::Name> = - ty_params.map(|ty_param| ty_param.ident.name).collect(); - - for field_ty in field_tys { - let tys = find_type_parameters(&field_ty, &ty_param_names, cx); - - for ty in tys { - // if we have already handled this type, skip it - if let ast::TyKind::Path(_, ref p) = ty.kind { - if p.segments.len() == 1 - && ty_param_names.contains(&p.segments[0].ident.name) - { - continue; - }; - } - let mut bounds: Vec<_> = self - .additional_bounds - .iter() - .map(|p| cx.trait_bound(p.to_path(cx, self.span, type_ident, generics))) - .collect(); - - // require the current trait - bounds.push(cx.trait_bound(trait_path.clone())); - - let predicate = ast::WhereBoundPredicate { - span: self.span, - bound_generic_params: Vec::new(), - bounded_ty: ty, - bounds, - }; - - let predicate = ast::WherePredicate::BoundPredicate(predicate); - where_clause.predicates.push(predicate); - } - } - } - } - - let trait_generics = Generics { params, where_clause, span }; - - // Create the reference to the trait. - let trait_ref = cx.trait_ref(trait_path); - - let self_params: Vec<_> = generics - .params - .iter() - .map(|param| match param.kind { - GenericParamKind::Lifetime { .. } => { - GenericArg::Lifetime(cx.lifetime(self.span, param.ident)) - } - GenericParamKind::Type { .. } => { - GenericArg::Type(cx.ty_ident(self.span, param.ident)) - } - GenericParamKind::Const { .. } => { - GenericArg::Const(cx.const_ident(self.span, param.ident)) - } - }) - .collect(); - - // Create the type of `self`. - let path = cx.path_all(self.span, false, vec![type_ident], self_params); - let self_type = cx.ty_path(path); - - let attr = cx.attribute(cx.meta_word(self.span, sym::automatically_derived)); - // Just mark it now since we know that it'll end up used downstream - attr::mark_used(&attr); - let opt_trait_ref = Some(trait_ref); - let unused_qual = { - let word = syntax::attr::mk_nested_word_item(Ident::new( - Symbol::intern("unused_qualifications"), - self.span, - )); - let list = syntax::attr::mk_list_item(Ident::new(sym::allow, self.span), vec![word]); - cx.attribute(list) - }; - - let mut a = vec![attr, unused_qual]; - a.extend(self.attributes.iter().cloned()); - - let unsafety = if self.is_unsafe { ast::Unsafety::Unsafe } else { ast::Unsafety::Normal }; - - cx.item( - self.span, - Ident::invalid(), - a, - ast::ItemKind::Impl( - unsafety, - ast::ImplPolarity::Positive, - ast::Defaultness::Final, - trait_generics, - opt_trait_ref, - self_type, - methods.into_iter().chain(associated_types).collect(), - ), - ) - } - - fn expand_struct_def( - &self, - cx: &mut ExtCtxt<'_>, - struct_def: &'a VariantData, - type_ident: Ident, - generics: &Generics, - from_scratch: bool, - use_temporaries: bool, - ) -> P<ast::Item> { - let field_tys: Vec<P<ast::Ty>> = - struct_def.fields().iter().map(|field| field.ty.clone()).collect(); - - let methods = self - .methods - .iter() - .map(|method_def| { - let (explicit_self, self_args, nonself_args, tys) = - method_def.split_self_nonself_args(cx, self, type_ident, generics); - - let body = if from_scratch || method_def.is_static() { - method_def.expand_static_struct_method_body( - cx, - self, - struct_def, - type_ident, - &self_args[..], - &nonself_args[..], - ) - } else { - method_def.expand_struct_method_body( - cx, - self, - struct_def, - type_ident, - &self_args[..], - &nonself_args[..], - use_temporaries, - ) - }; - - method_def.create_method(cx, self, type_ident, generics, explicit_self, tys, body) - }) - .collect(); - - self.create_derived_impl(cx, type_ident, generics, field_tys, methods) - } - - fn expand_enum_def( - &self, - cx: &mut ExtCtxt<'_>, - enum_def: &'a EnumDef, - type_attrs: &[ast::Attribute], - type_ident: Ident, - generics: &Generics, - from_scratch: bool, - ) -> P<ast::Item> { - let mut field_tys = Vec::new(); - - for variant in &enum_def.variants { - field_tys.extend(variant.data.fields().iter().map(|field| field.ty.clone())); - } - - let methods = self - .methods - .iter() - .map(|method_def| { - let (explicit_self, self_args, nonself_args, tys) = - method_def.split_self_nonself_args(cx, self, type_ident, generics); - - let body = if from_scratch || method_def.is_static() { - method_def.expand_static_enum_method_body( - cx, - self, - enum_def, - type_ident, - &self_args[..], - &nonself_args[..], - ) - } else { - method_def.expand_enum_method_body( - cx, - self, - enum_def, - type_attrs, - type_ident, - self_args, - &nonself_args[..], - ) - }; - - method_def.create_method(cx, self, type_ident, generics, explicit_self, tys, body) - }) - .collect(); - - self.create_derived_impl(cx, type_ident, generics, field_tys, methods) - } -} - -fn find_repr_type_name(sess: &ParseSess, type_attrs: &[ast::Attribute]) -> &'static str { - let mut repr_type_name = "isize"; - for a in type_attrs { - for r in &attr::find_repr_attrs(sess, a) { - repr_type_name = match *r { - attr::ReprPacked(_) - | attr::ReprSimd - | attr::ReprAlign(_) - | attr::ReprTransparent => continue, - - attr::ReprC => "i32", - - attr::ReprInt(attr::SignedInt(ast::IntTy::Isize)) => "isize", - attr::ReprInt(attr::SignedInt(ast::IntTy::I8)) => "i8", - attr::ReprInt(attr::SignedInt(ast::IntTy::I16)) => "i16", - attr::ReprInt(attr::SignedInt(ast::IntTy::I32)) => "i32", - attr::ReprInt(attr::SignedInt(ast::IntTy::I64)) => "i64", - attr::ReprInt(attr::SignedInt(ast::IntTy::I128)) => "i128", - - attr::ReprInt(attr::UnsignedInt(ast::UintTy::Usize)) => "usize", - attr::ReprInt(attr::UnsignedInt(ast::UintTy::U8)) => "u8", - attr::ReprInt(attr::UnsignedInt(ast::UintTy::U16)) => "u16", - attr::ReprInt(attr::UnsignedInt(ast::UintTy::U32)) => "u32", - attr::ReprInt(attr::UnsignedInt(ast::UintTy::U64)) => "u64", - attr::ReprInt(attr::UnsignedInt(ast::UintTy::U128)) => "u128", - } - } - } - repr_type_name -} - -impl<'a> MethodDef<'a> { - fn call_substructure_method( - &self, - cx: &mut ExtCtxt<'_>, - trait_: &TraitDef<'_>, - type_ident: Ident, - self_args: &[P<Expr>], - nonself_args: &[P<Expr>], - fields: &SubstructureFields<'_>, - ) -> P<Expr> { - let substructure = Substructure { - type_ident, - method_ident: cx.ident_of(self.name, trait_.span), - self_args, - nonself_args, - fields, - }; - let mut f = self.combine_substructure.borrow_mut(); - let f: &mut CombineSubstructureFunc<'_> = &mut *f; - f(cx, trait_.span, &substructure) - } - - fn get_ret_ty( - &self, - cx: &mut ExtCtxt<'_>, - trait_: &TraitDef<'_>, - generics: &Generics, - type_ident: Ident, - ) -> P<ast::Ty> { - self.ret_ty.to_ty(cx, trait_.span, type_ident, generics) - } - - fn is_static(&self) -> bool { - self.explicit_self.is_none() - } - - fn split_self_nonself_args( - &self, - cx: &mut ExtCtxt<'_>, - trait_: &TraitDef<'_>, - type_ident: Ident, - generics: &Generics, - ) -> (Option<ast::ExplicitSelf>, Vec<P<Expr>>, Vec<P<Expr>>, Vec<(Ident, P<ast::Ty>)>) { - let mut self_args = Vec::new(); - let mut nonself_args = Vec::new(); - let mut arg_tys = Vec::new(); - let mut nonstatic = false; - - let ast_explicit_self = self.explicit_self.as_ref().map(|self_ptr| { - let (self_expr, explicit_self) = ty::get_explicit_self(cx, trait_.span, self_ptr); - - self_args.push(self_expr); - nonstatic = true; - - explicit_self - }); - - for (ty, name) in self.args.iter() { - let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics); - let ident = cx.ident_of(name, trait_.span); - arg_tys.push((ident, ast_ty)); - - let arg_expr = cx.expr_ident(trait_.span, ident); - - match *ty { - // for static methods, just treat any Self - // arguments as a normal arg - Self_ if nonstatic => { - self_args.push(arg_expr); - } - Ptr(ref ty, _) if (if let Self_ = **ty { true } else { false }) && nonstatic => { - self_args.push(cx.expr_deref(trait_.span, arg_expr)) - } - _ => { - nonself_args.push(arg_expr); - } - } - } - - (ast_explicit_self, self_args, nonself_args, arg_tys) - } - - fn create_method( - &self, - cx: &mut ExtCtxt<'_>, - trait_: &TraitDef<'_>, - type_ident: Ident, - generics: &Generics, - explicit_self: Option<ast::ExplicitSelf>, - arg_types: Vec<(Ident, P<ast::Ty>)>, - body: P<Expr>, - ) -> ast::AssocItem { - // Create the generics that aren't for `Self`. - let fn_generics = self.generics.to_generics(cx, trait_.span, type_ident, generics); - - let args = { - let self_args = explicit_self.map(|explicit_self| { - let ident = Ident::with_dummy_span(kw::SelfLower).with_span_pos(trait_.span); - ast::Param::from_self(ast::AttrVec::default(), explicit_self, ident) - }); - let nonself_args = - arg_types.into_iter().map(|(name, ty)| cx.param(trait_.span, name, ty)); - self_args.into_iter().chain(nonself_args).collect() - }; - - let ret_type = self.get_ret_ty(cx, trait_, generics, type_ident); - - let method_ident = cx.ident_of(self.name, trait_.span); - let fn_decl = cx.fn_decl(args, ast::FunctionRetTy::Ty(ret_type)); - let body_block = cx.block_expr(body); - - let unsafety = if self.is_unsafe { ast::Unsafety::Unsafe } else { ast::Unsafety::Normal }; - - let trait_lo_sp = trait_.span.shrink_to_lo(); - - let sig = ast::FnSig { - header: ast::FnHeader { unsafety, ext: ast::Extern::None, ..ast::FnHeader::default() }, - decl: fn_decl, - }; - - // Create the method. - ast::AssocItem { - id: ast::DUMMY_NODE_ID, - attrs: self.attributes.clone(), - generics: fn_generics, - span: trait_.span, - vis: respan(trait_lo_sp, ast::VisibilityKind::Inherited), - defaultness: ast::Defaultness::Final, - ident: method_ident, - kind: ast::AssocItemKind::Fn(sig, Some(body_block)), - tokens: None, - } - } - - /// ``` - /// #[derive(PartialEq)] - /// # struct Dummy; - /// struct A { x: i32, y: i32 } - /// - /// // equivalent to: - /// impl PartialEq for A { - /// fn eq(&self, other: &A) -> bool { - /// match *self { - /// A {x: ref __self_0_0, y: ref __self_0_1} => { - /// match *other { - /// A {x: ref __self_1_0, y: ref __self_1_1} => { - /// __self_0_0.eq(__self_1_0) && __self_0_1.eq(__self_1_1) - /// } - /// } - /// } - /// } - /// } - /// } - /// - /// // or if A is repr(packed) - note fields are matched by-value - /// // instead of by-reference. - /// impl PartialEq for A { - /// fn eq(&self, other: &A) -> bool { - /// match *self { - /// A {x: __self_0_0, y: __self_0_1} => { - /// match other { - /// A {x: __self_1_0, y: __self_1_1} => { - /// __self_0_0.eq(&__self_1_0) && __self_0_1.eq(&__self_1_1) - /// } - /// } - /// } - /// } - /// } - /// } - /// ``` - fn expand_struct_method_body<'b>( - &self, - cx: &mut ExtCtxt<'_>, - trait_: &TraitDef<'b>, - struct_def: &'b VariantData, - type_ident: Ident, - self_args: &[P<Expr>], - nonself_args: &[P<Expr>], - use_temporaries: bool, - ) -> P<Expr> { - let mut raw_fields = Vec::new(); // Vec<[fields of self], - // [fields of next Self arg], [etc]> - let mut patterns = Vec::new(); - for i in 0..self_args.len() { - let struct_path = cx.path(trait_.span, vec![type_ident]); - let (pat, ident_expr) = trait_.create_struct_pattern( - cx, - struct_path, - struct_def, - &format!("__self_{}", i), - ast::Mutability::Not, - use_temporaries, - ); - patterns.push(pat); - raw_fields.push(ident_expr); - } - - // transpose raw_fields - let fields = if !raw_fields.is_empty() { - let mut raw_fields = raw_fields.into_iter().map(|v| v.into_iter()); - let first_field = raw_fields.next().unwrap(); - let mut other_fields: Vec<vec::IntoIter<_>> = raw_fields.collect(); - first_field - .map(|(span, opt_id, field, attrs)| FieldInfo { - span, - name: opt_id, - self_: field, - other: other_fields - .iter_mut() - .map(|l| match l.next().unwrap() { - (.., ex, _) => ex, - }) - .collect(), - attrs, - }) - .collect() - } else { - cx.span_bug(trait_.span, "no `self` parameter for method in generic `derive`") - }; - - // body of the inner most destructuring match - let mut body = self.call_substructure_method( - cx, - trait_, - type_ident, - self_args, - nonself_args, - &Struct(struct_def, fields), - ); - - // make a series of nested matches, to destructure the - // structs. This is actually right-to-left, but it shouldn't - // matter. - for (arg_expr, pat) in self_args.iter().zip(patterns) { - body = cx.expr_match( - trait_.span, - arg_expr.clone(), - vec![cx.arm(trait_.span, pat.clone(), body)], - ) - } - - body - } - - fn expand_static_struct_method_body( - &self, - cx: &mut ExtCtxt<'_>, - trait_: &TraitDef<'_>, - struct_def: &VariantData, - type_ident: Ident, - self_args: &[P<Expr>], - nonself_args: &[P<Expr>], - ) -> P<Expr> { - let summary = trait_.summarise_struct(cx, struct_def); - - self.call_substructure_method( - cx, - trait_, - type_ident, - self_args, - nonself_args, - &StaticStruct(struct_def, summary), - ) - } - - /// ``` - /// #[derive(PartialEq)] - /// # struct Dummy; - /// enum A { - /// A1, - /// A2(i32) - /// } - /// - /// // is equivalent to - /// - /// impl PartialEq for A { - /// fn eq(&self, other: &A) -> ::bool { - /// match (&*self, &*other) { - /// (&A1, &A1) => true, - /// (&A2(ref self_0), - /// &A2(ref __arg_1_0)) => (*self_0).eq(&(*__arg_1_0)), - /// _ => { - /// let __self_vi = match *self { A1(..) => 0, A2(..) => 1 }; - /// let __arg_1_vi = match *other { A1(..) => 0, A2(..) => 1 }; - /// false - /// } - /// } - /// } - /// } - /// ``` - /// - /// (Of course `__self_vi` and `__arg_1_vi` are unused for - /// `PartialEq`, and those subcomputations will hopefully be removed - /// as their results are unused. The point of `__self_vi` and - /// `__arg_1_vi` is for `PartialOrd`; see #15503.) - fn expand_enum_method_body<'b>( - &self, - cx: &mut ExtCtxt<'_>, - trait_: &TraitDef<'b>, - enum_def: &'b EnumDef, - type_attrs: &[ast::Attribute], - type_ident: Ident, - self_args: Vec<P<Expr>>, - nonself_args: &[P<Expr>], - ) -> P<Expr> { - self.build_enum_match_tuple( - cx, - trait_, - enum_def, - type_attrs, - type_ident, - self_args, - nonself_args, - ) - } - - /// Creates a match for a tuple of all `self_args`, where either all - /// variants match, or it falls into a catch-all for when one variant - /// does not match. - - /// There are N + 1 cases because is a case for each of the N - /// variants where all of the variants match, and one catch-all for - /// when one does not match. - - /// As an optimization we generate code which checks whether all variants - /// match first which makes llvm see that C-like enums can be compiled into - /// a simple equality check (for PartialEq). - - /// The catch-all handler is provided access the variant index values - /// for each of the self-args, carried in precomputed variables. - - /// ```{.text} - /// let __self0_vi = unsafe { - /// std::intrinsics::discriminant_value(&self) } as i32; - /// let __self1_vi = unsafe { - /// std::intrinsics::discriminant_value(&arg1) } as i32; - /// let __self2_vi = unsafe { - /// std::intrinsics::discriminant_value(&arg2) } as i32; - /// - /// if __self0_vi == __self1_vi && __self0_vi == __self2_vi && ... { - /// match (...) { - /// (Variant1, Variant1, ...) => Body1 - /// (Variant2, Variant2, ...) => Body2, - /// ... - /// _ => ::core::intrinsics::unreachable() - /// } - /// } - /// else { - /// ... // catch-all remainder can inspect above variant index values. - /// } - /// ``` - fn build_enum_match_tuple<'b>( - &self, - cx: &mut ExtCtxt<'_>, - trait_: &TraitDef<'b>, - enum_def: &'b EnumDef, - type_attrs: &[ast::Attribute], - type_ident: Ident, - mut self_args: Vec<P<Expr>>, - nonself_args: &[P<Expr>], - ) -> P<Expr> { - let sp = trait_.span; - let variants = &enum_def.variants; - - let self_arg_names = iter::once("__self".to_string()) - .chain( - self_args - .iter() - .enumerate() - .skip(1) - .map(|(arg_count, _self_arg)| format!("__arg_{}", arg_count)), - ) - .collect::<Vec<String>>(); - - let self_arg_idents = - self_arg_names.iter().map(|name| cx.ident_of(name, sp)).collect::<Vec<ast::Ident>>(); - - // The `vi_idents` will be bound, solely in the catch-all, to - // a series of let statements mapping each self_arg to an int - // value corresponding to its discriminant. - let vi_idents = self_arg_names - .iter() - .map(|name| { - let vi_suffix = format!("{}_vi", &name[..]); - cx.ident_of(&vi_suffix[..], trait_.span) - }) - .collect::<Vec<ast::Ident>>(); - - // Builds, via callback to call_substructure_method, the - // delegated expression that handles the catch-all case, - // using `__variants_tuple` to drive logic if necessary. - let catch_all_substructure = - EnumNonMatchingCollapsed(self_arg_idents, &variants[..], &vi_idents[..]); - - let first_fieldless = variants.iter().find(|v| v.data.fields().is_empty()); - - // These arms are of the form: - // (Variant1, Variant1, ...) => Body1 - // (Variant2, Variant2, ...) => Body2 - // ... - // where each tuple has length = self_args.len() - let mut match_arms: Vec<ast::Arm> = variants - .iter() - .enumerate() - .filter(|&(_, v)| !(self.unify_fieldless_variants && v.data.fields().is_empty())) - .map(|(index, variant)| { - let mk_self_pat = |cx: &mut ExtCtxt<'_>, self_arg_name: &str| { - let (p, idents) = trait_.create_enum_variant_pattern( - cx, - type_ident, - variant, - self_arg_name, - ast::Mutability::Not, - ); - (cx.pat(sp, PatKind::Ref(p, ast::Mutability::Not)), idents) - }; - - // A single arm has form (&VariantK, &VariantK, ...) => BodyK - // (see "Final wrinkle" note below for why.) - let mut subpats = Vec::with_capacity(self_arg_names.len()); - let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); - let first_self_pat_idents = { - let (p, idents) = mk_self_pat(cx, &self_arg_names[0]); - subpats.push(p); - idents - }; - for self_arg_name in &self_arg_names[1..] { - let (p, idents) = mk_self_pat(cx, &self_arg_name[..]); - subpats.push(p); - self_pats_idents.push(idents); - } - - // Here is the pat = `(&VariantK, &VariantK, ...)` - let single_pat = cx.pat_tuple(sp, subpats); - - // For the BodyK, we need to delegate to our caller, - // passing it an EnumMatching to indicate which case - // we are in. - - // All of the Self args have the same variant in these - // cases. So we transpose the info in self_pats_idents - // to gather the getter expressions together, in the - // form that EnumMatching expects. - - // The transposition is driven by walking across the - // arg fields of the variant for the first self pat. - let field_tuples = first_self_pat_idents - .into_iter() - .enumerate() - // For each arg field of self, pull out its getter expr ... - .map(|(field_index, (sp, opt_ident, self_getter_expr, attrs))| { - // ... but FieldInfo also wants getter expr - // for matching other arguments of Self type; - // so walk across the *other* self_pats_idents - // and pull out getter for same field in each - // of them (using `field_index` tracked above). - // That is the heart of the transposition. - let others = self_pats_idents - .iter() - .map(|fields| { - let (_, _opt_ident, ref other_getter_expr, _) = fields[field_index]; - - // All Self args have same variant, so - // opt_idents are the same. (Assert - // here to make it self-evident that - // it is okay to ignore `_opt_ident`.) - assert!(opt_ident == _opt_ident); - - other_getter_expr.clone() - }) - .collect::<Vec<P<Expr>>>(); - - FieldInfo { - span: sp, - name: opt_ident, - self_: self_getter_expr, - other: others, - attrs, - } - }) - .collect::<Vec<FieldInfo<'_>>>(); - - // Now, for some given VariantK, we have built up - // expressions for referencing every field of every - // Self arg, assuming all are instances of VariantK. - // Build up code associated with such a case. - let substructure = EnumMatching(index, variants.len(), variant, field_tuples); - let arm_expr = self.call_substructure_method( - cx, - trait_, - type_ident, - &self_args[..], - nonself_args, - &substructure, - ); - - cx.arm(sp, single_pat, arm_expr) - }) - .collect(); - - let default = match first_fieldless { - Some(v) if self.unify_fieldless_variants => { - // We need a default case that handles the fieldless variants. - // The index and actual variant aren't meaningful in this case, - // so just use whatever - let substructure = EnumMatching(0, variants.len(), v, Vec::new()); - Some(self.call_substructure_method( - cx, - trait_, - type_ident, - &self_args[..], - nonself_args, - &substructure, - )) - } - _ if variants.len() > 1 && self_args.len() > 1 => { - // Since we know that all the arguments will match if we reach - // the match expression we add the unreachable intrinsics as the - // result of the catch all which should help llvm in optimizing it - Some(deriving::call_intrinsic(cx, sp, "unreachable", vec![])) - } - _ => None, - }; - if let Some(arm) = default { - match_arms.push(cx.arm(sp, cx.pat_wild(sp), arm)); - } - - // We will usually need the catch-all after matching the - // tuples `(VariantK, VariantK, ...)` for each VariantK of the - // enum. But: - // - // * when there is only one Self arg, the arms above suffice - // (and the deriving we call back into may not be prepared to - // handle EnumNonMatchCollapsed), and, - // - // * when the enum has only one variant, the single arm that - // is already present always suffices. - // - // * In either of the two cases above, if we *did* add a - // catch-all `_` match, it would trigger the - // unreachable-pattern error. - // - if variants.len() > 1 && self_args.len() > 1 { - // Build a series of let statements mapping each self_arg - // to its discriminant value. If this is a C-style enum - // with a specific repr type, then casts the values to - // that type. Otherwise casts to `i32` (the default repr - // type). - // - // i.e., for `enum E<T> { A, B(1), C(T, T) }`, and a deriving - // with three Self args, builds three statements: - // - // ``` - // let __self0_vi = unsafe { - // std::intrinsics::discriminant_value(&self) } as i32; - // let __self1_vi = unsafe { - // std::intrinsics::discriminant_value(&arg1) } as i32; - // let __self2_vi = unsafe { - // std::intrinsics::discriminant_value(&arg2) } as i32; - // ``` - let mut index_let_stmts: Vec<ast::Stmt> = Vec::with_capacity(vi_idents.len() + 1); - - // We also build an expression which checks whether all discriminants are equal - // discriminant_test = __self0_vi == __self1_vi && __self0_vi == __self2_vi && ... - let mut discriminant_test = cx.expr_bool(sp, true); - - let target_type_name = find_repr_type_name(&cx.parse_sess, type_attrs); - - let mut first_ident = None; - for (&ident, self_arg) in vi_idents.iter().zip(&self_args) { - let self_addr = cx.expr_addr_of(sp, self_arg.clone()); - let variant_value = - deriving::call_intrinsic(cx, sp, "discriminant_value", vec![self_addr]); - - let target_ty = cx.ty_ident(sp, cx.ident_of(target_type_name, sp)); - let variant_disr = cx.expr_cast(sp, variant_value, target_ty); - let let_stmt = cx.stmt_let(sp, false, ident, variant_disr); - index_let_stmts.push(let_stmt); - - match first_ident { - Some(first) => { - let first_expr = cx.expr_ident(sp, first); - let id = cx.expr_ident(sp, ident); - let test = cx.expr_binary(sp, BinOpKind::Eq, first_expr, id); - discriminant_test = - cx.expr_binary(sp, BinOpKind::And, discriminant_test, test) - } - None => { - first_ident = Some(ident); - } - } - } - - let arm_expr = self.call_substructure_method( - cx, - trait_, - type_ident, - &self_args[..], - nonself_args, - &catch_all_substructure, - ); - - // Final wrinkle: the self_args are expressions that deref - // down to desired places, but we cannot actually deref - // them when they are fed as r-values into a tuple - // expression; here add a layer of borrowing, turning - // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. - self_args.map_in_place(|self_arg| cx.expr_addr_of(sp, self_arg)); - let match_arg = cx.expr(sp, ast::ExprKind::Tup(self_args)); - - // Lastly we create an expression which branches on all discriminants being equal - // if discriminant_test { - // match (...) { - // (Variant1, Variant1, ...) => Body1 - // (Variant2, Variant2, ...) => Body2, - // ... - // _ => ::core::intrinsics::unreachable() - // } - // } - // else { - // <delegated expression referring to __self0_vi, et al.> - // } - let all_match = cx.expr_match(sp, match_arg, match_arms); - let arm_expr = cx.expr_if(sp, discriminant_test, all_match, Some(arm_expr)); - index_let_stmts.push(cx.stmt_expr(arm_expr)); - cx.expr_block(cx.block(sp, index_let_stmts)) - } else if variants.is_empty() { - // As an additional wrinkle, For a zero-variant enum A, - // currently the compiler - // will accept `fn (a: &Self) { match *a { } }` - // but rejects `fn (a: &Self) { match (&*a,) { } }` - // as well as `fn (a: &Self) { match ( *a,) { } }` - // - // This means that the strategy of building up a tuple of - // all Self arguments fails when Self is a zero variant - // enum: rustc rejects the expanded program, even though - // the actual code tends to be impossible to execute (at - // least safely), according to the type system. - // - // The most expedient fix for this is to just let the - // code fall through to the catch-all. But even this is - // error-prone, since the catch-all as defined above would - // generate code like this: - // - // _ => { let __self0 = match *self { }; - // let __self1 = match *__arg_0 { }; - // <catch-all-expr> } - // - // Which is yields bindings for variables which type - // inference cannot resolve to unique types. - // - // One option to the above might be to add explicit type - // annotations. But the *only* reason to go down that path - // would be to try to make the expanded output consistent - // with the case when the number of enum variants >= 1. - // - // That just isn't worth it. In fact, trying to generate - // sensible code for *any* deriving on a zero-variant enum - // does not make sense. But at the same time, for now, we - // do not want to cause a compile failure just because the - // user happened to attach a deriving to their - // zero-variant enum. - // - // Instead, just generate a failing expression for the - // zero variant case, skipping matches and also skipping - // delegating back to the end user code entirely. - // - // (See also #4499 and #12609; note that some of the - // discussions there influence what choice we make here; - // e.g., if we feature-gate `match x { ... }` when x refers - // to an uninhabited type (e.g., a zero-variant enum or a - // type holding such an enum), but do not feature-gate - // zero-variant enums themselves, then attempting to - // derive Debug on such a type could here generate code - // that needs the feature gate enabled.) - - deriving::call_intrinsic(cx, sp, "unreachable", vec![]) - } else { - // Final wrinkle: the self_args are expressions that deref - // down to desired places, but we cannot actually deref - // them when they are fed as r-values into a tuple - // expression; here add a layer of borrowing, turning - // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. - self_args.map_in_place(|self_arg| cx.expr_addr_of(sp, self_arg)); - let match_arg = cx.expr(sp, ast::ExprKind::Tup(self_args)); - cx.expr_match(sp, match_arg, match_arms) - } - } - - fn expand_static_enum_method_body( - &self, - cx: &mut ExtCtxt<'_>, - trait_: &TraitDef<'_>, - enum_def: &EnumDef, - type_ident: Ident, - self_args: &[P<Expr>], - nonself_args: &[P<Expr>], - ) -> P<Expr> { - let summary = enum_def - .variants - .iter() - .map(|v| { - let sp = v.span.with_ctxt(trait_.span.ctxt()); - let summary = trait_.summarise_struct(cx, &v.data); - (v.ident, sp, summary) - }) - .collect(); - self.call_substructure_method( - cx, - trait_, - type_ident, - self_args, - nonself_args, - &StaticEnum(enum_def, summary), - ) - } -} - -// general helper methods. -impl<'a> TraitDef<'a> { - fn summarise_struct(&self, cx: &mut ExtCtxt<'_>, struct_def: &VariantData) -> StaticFields { - let mut named_idents = Vec::new(); - let mut just_spans = Vec::new(); - for field in struct_def.fields() { - let sp = field.span.with_ctxt(self.span.ctxt()); - match field.ident { - Some(ident) => named_idents.push((ident, sp)), - _ => just_spans.push(sp), - } - } - - let is_tuple = if let ast::VariantData::Tuple(..) = struct_def { true } else { false }; - match (just_spans.is_empty(), named_idents.is_empty()) { - (false, false) => cx.span_bug( - self.span, - "a struct with named and unnamed \ - fields in generic `derive`", - ), - // named fields - (_, false) => Named(named_idents), - // unnamed fields - (false, _) => Unnamed(just_spans, is_tuple), - // empty - _ => Named(Vec::new()), - } - } - - fn create_subpatterns( - &self, - cx: &mut ExtCtxt<'_>, - field_paths: Vec<ast::Ident>, - mutbl: ast::Mutability, - use_temporaries: bool, - ) -> Vec<P<ast::Pat>> { - field_paths - .iter() - .map(|path| { - let binding_mode = if use_temporaries { - ast::BindingMode::ByValue(ast::Mutability::Not) - } else { - ast::BindingMode::ByRef(mutbl) - }; - cx.pat(path.span, PatKind::Ident(binding_mode, (*path).clone(), None)) - }) - .collect() - } - - fn create_struct_pattern( - &self, - cx: &mut ExtCtxt<'_>, - struct_path: ast::Path, - struct_def: &'a VariantData, - prefix: &str, - mutbl: ast::Mutability, - use_temporaries: bool, - ) -> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) { - let mut paths = Vec::new(); - let mut ident_exprs = Vec::new(); - for (i, struct_field) in struct_def.fields().iter().enumerate() { - let sp = struct_field.span.with_ctxt(self.span.ctxt()); - let ident = cx.ident_of(&format!("{}_{}", prefix, i), self.span); - paths.push(ident.with_span_pos(sp)); - let val = cx.expr_path(cx.path_ident(sp, ident)); - let val = if use_temporaries { val } else { cx.expr_deref(sp, val) }; - let val = cx.expr(sp, ast::ExprKind::Paren(val)); - - ident_exprs.push((sp, struct_field.ident, val, &struct_field.attrs[..])); - } - - let subpats = self.create_subpatterns(cx, paths, mutbl, use_temporaries); - let pattern = match *struct_def { - VariantData::Struct(..) => { - let field_pats = subpats - .into_iter() - .zip(&ident_exprs) - .map(|(pat, &(sp, ident, ..))| { - if ident.is_none() { - cx.span_bug(sp, "a braced struct with unnamed fields in `derive`"); - } - ast::FieldPat { - ident: ident.unwrap(), - is_shorthand: false, - attrs: ast::AttrVec::new(), - id: ast::DUMMY_NODE_ID, - span: pat.span.with_ctxt(self.span.ctxt()), - pat, - is_placeholder: false, - } - }) - .collect(); - cx.pat_struct(self.span, struct_path, field_pats) - } - VariantData::Tuple(..) => cx.pat_tuple_struct(self.span, struct_path, subpats), - VariantData::Unit(..) => cx.pat_path(self.span, struct_path), - }; - - (pattern, ident_exprs) - } - - fn create_enum_variant_pattern( - &self, - cx: &mut ExtCtxt<'_>, - enum_ident: ast::Ident, - variant: &'a ast::Variant, - prefix: &str, - mutbl: ast::Mutability, - ) -> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) { - let sp = variant.span.with_ctxt(self.span.ctxt()); - let variant_path = cx.path(sp, vec![enum_ident, variant.ident]); - let use_temporaries = false; // enums can't be repr(packed) - self.create_struct_pattern(cx, variant_path, &variant.data, prefix, mutbl, use_temporaries) - } -} - -// helpful premade recipes - -pub fn cs_fold_fields<'a, F>( - use_foldl: bool, - mut f: F, - base: P<Expr>, - cx: &mut ExtCtxt<'_>, - all_fields: &[FieldInfo<'a>], -) -> P<Expr> -where - F: FnMut(&mut ExtCtxt<'_>, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>, -{ - if use_foldl { - all_fields - .iter() - .fold(base, |old, field| f(cx, field.span, old, field.self_.clone(), &field.other)) - } else { - all_fields - .iter() - .rev() - .fold(base, |old, field| f(cx, field.span, old, field.self_.clone(), &field.other)) - } -} - -pub fn cs_fold_enumnonmatch( - mut enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>, - cx: &mut ExtCtxt<'_>, - trait_span: Span, - substructure: &Substructure<'_>, -) -> P<Expr> { - match *substructure.fields { - EnumNonMatchingCollapsed(ref all_args, _, tuple) => { - enum_nonmatch_f(cx, trait_span, (&all_args[..], tuple), substructure.nonself_args) - } - _ => cx.span_bug(trait_span, "cs_fold_enumnonmatch expected an EnumNonMatchingCollapsed"), - } -} - -pub fn cs_fold_static(cx: &mut ExtCtxt<'_>, trait_span: Span) -> P<Expr> { - cx.span_bug(trait_span, "static function in `derive`") -} - -/// Fold the fields. `use_foldl` controls whether this is done -/// left-to-right (`true`) or right-to-left (`false`). -pub fn cs_fold<F>( - use_foldl: bool, - f: F, - base: P<Expr>, - enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>, - cx: &mut ExtCtxt<'_>, - trait_span: Span, - substructure: &Substructure<'_>, -) -> P<Expr> -where - F: FnMut(&mut ExtCtxt<'_>, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>, -{ - match *substructure.fields { - EnumMatching(.., ref all_fields) | Struct(_, ref all_fields) => { - cs_fold_fields(use_foldl, f, base, cx, all_fields) - } - EnumNonMatchingCollapsed(..) => { - cs_fold_enumnonmatch(enum_nonmatch_f, cx, trait_span, substructure) - } - StaticEnum(..) | StaticStruct(..) => cs_fold_static(cx, trait_span), - } -} - -/// Function to fold over fields, with three cases, to generate more efficient and concise code. -/// When the `substructure` has grouped fields, there are two cases: -/// Zero fields: call the base case function with `None` (like the usual base case of `cs_fold`). -/// One or more fields: call the base case function on the first value (which depends on -/// `use_fold`), and use that as the base case. Then perform `cs_fold` on the remainder of the -/// fields. -/// When the `substructure` is a `EnumNonMatchingCollapsed`, the result of `enum_nonmatch_f` -/// is returned. Statics may not be folded over. -/// See `cs_op` in `partial_ord.rs` for a model example. -pub fn cs_fold1<F, B>( - use_foldl: bool, - f: F, - mut b: B, - enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>, - cx: &mut ExtCtxt<'_>, - trait_span: Span, - substructure: &Substructure<'_>, -) -> P<Expr> -where - F: FnMut(&mut ExtCtxt<'_>, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>, - B: FnMut(&mut ExtCtxt<'_>, Option<(Span, P<Expr>, &[P<Expr>])>) -> P<Expr>, -{ - match *substructure.fields { - EnumMatching(.., ref all_fields) | Struct(_, ref all_fields) => { - let (base, all_fields) = match (all_fields.is_empty(), use_foldl) { - (false, true) => { - let field = &all_fields[0]; - let args = (field.span, field.self_.clone(), &field.other[..]); - (b(cx, Some(args)), &all_fields[1..]) - } - (false, false) => { - let idx = all_fields.len() - 1; - let field = &all_fields[idx]; - let args = (field.span, field.self_.clone(), &field.other[..]); - (b(cx, Some(args)), &all_fields[..idx]) - } - (true, _) => (b(cx, None), &all_fields[..]), - }; - - cs_fold_fields(use_foldl, f, base, cx, all_fields) - } - EnumNonMatchingCollapsed(..) => { - cs_fold_enumnonmatch(enum_nonmatch_f, cx, trait_span, substructure) - } - StaticEnum(..) | StaticStruct(..) => cs_fold_static(cx, trait_span), - } -} - -/// Returns `true` if the type has no value fields -/// (for an enum, no variant has any fields) -pub fn is_type_without_fields(item: &Annotatable) -> bool { - if let Annotatable::Item(ref item) = *item { - match item.kind { - ast::ItemKind::Enum(ref enum_def, _) => { - enum_def.variants.iter().all(|v| v.data.fields().is_empty()) - } - ast::ItemKind::Struct(ref variant_data, _) => variant_data.fields().is_empty(), - _ => false, - } - } else { - false - } -} diff --git a/src/libsyntax_ext/deriving/generic/ty.rs b/src/libsyntax_ext/deriving/generic/ty.rs deleted file mode 100644 index 7eab15aff77..00000000000 --- a/src/libsyntax_ext/deriving/generic/ty.rs +++ /dev/null @@ -1,283 +0,0 @@ -//! A mini version of ast::Ty, which is easier to use, and features an explicit `Self` type to use -//! when specifying impls to be derived. - -pub use PtrTy::*; -pub use Ty::*; - -use syntax::ast::{self, Expr, GenericArg, GenericParamKind, Generics, Ident, SelfKind}; -use syntax::ptr::P; -use syntax::source_map::{respan, DUMMY_SP}; -use syntax_expand::base::ExtCtxt; -use syntax_pos::symbol::kw; -use syntax_pos::Span; - -/// The types of pointers -#[derive(Clone)] -pub enum PtrTy { - /// &'lifetime mut - Borrowed(Option<Ident>, ast::Mutability), - /// *mut - #[allow(dead_code)] - Raw(ast::Mutability), -} - -/// A path, e.g., `::std::option::Option::<i32>` (global). Has support -/// for type parameters and a lifetime. -#[derive(Clone)] -pub struct Path<'a> { - path: Vec<&'a str>, - lifetime: Option<Ident>, - params: Vec<Box<Ty<'a>>>, - kind: PathKind, -} - -#[derive(Clone)] -pub enum PathKind { - Local, - Global, - Std, -} - -impl<'a> Path<'a> { - pub fn new(path: Vec<&str>) -> Path<'_> { - Path::new_(path, None, Vec::new(), PathKind::Std) - } - pub fn new_local(path: &str) -> Path<'_> { - Path::new_(vec![path], None, Vec::new(), PathKind::Local) - } - pub fn new_<'r>( - path: Vec<&'r str>, - lifetime: Option<Ident>, - params: Vec<Box<Ty<'r>>>, - kind: PathKind, - ) -> Path<'r> { - Path { path, lifetime, params, kind } - } - - pub fn to_ty( - &self, - cx: &ExtCtxt<'_>, - span: Span, - self_ty: Ident, - self_generics: &Generics, - ) -> P<ast::Ty> { - cx.ty_path(self.to_path(cx, span, self_ty, self_generics)) - } - pub fn to_path( - &self, - cx: &ExtCtxt<'_>, - span: Span, - self_ty: Ident, - self_generics: &Generics, - ) -> ast::Path { - let mut idents = self.path.iter().map(|s| cx.ident_of(*s, span)).collect(); - let lt = mk_lifetimes(cx, span, &self.lifetime); - let tys: Vec<P<ast::Ty>> = - self.params.iter().map(|t| t.to_ty(cx, span, self_ty, self_generics)).collect(); - let params = lt - .into_iter() - .map(|lt| GenericArg::Lifetime(lt)) - .chain(tys.into_iter().map(|ty| GenericArg::Type(ty))) - .collect(); - - match self.kind { - PathKind::Global => cx.path_all(span, true, idents, params), - PathKind::Local => cx.path_all(span, false, idents, params), - PathKind::Std => { - let def_site = cx.with_def_site_ctxt(DUMMY_SP); - idents.insert(0, Ident::new(kw::DollarCrate, def_site)); - cx.path_all(span, false, idents, params) - } - } - } -} - -/// A type. Supports pointers, Self, and literals. -#[derive(Clone)] -pub enum Ty<'a> { - Self_, - /// &/Box/ Ty - Ptr(Box<Ty<'a>>, PtrTy), - /// mod::mod::Type<[lifetime], [Params...]>, including a plain type - /// parameter, and things like `i32` - Literal(Path<'a>), - /// includes unit - Tuple(Vec<Ty<'a>>), -} - -pub fn borrowed_ptrty() -> PtrTy { - Borrowed(None, ast::Mutability::Not) -} -pub fn borrowed(ty: Box<Ty<'_>>) -> Ty<'_> { - Ptr(ty, borrowed_ptrty()) -} - -pub fn borrowed_explicit_self() -> Option<Option<PtrTy>> { - Some(Some(borrowed_ptrty())) -} - -pub fn borrowed_self<'r>() -> Ty<'r> { - borrowed(Box::new(Self_)) -} - -pub fn nil_ty<'r>() -> Ty<'r> { - Tuple(Vec::new()) -} - -fn mk_lifetime(cx: &ExtCtxt<'_>, span: Span, lt: &Option<Ident>) -> Option<ast::Lifetime> { - lt.map(|ident| cx.lifetime(span, ident)) -} - -fn mk_lifetimes(cx: &ExtCtxt<'_>, span: Span, lt: &Option<Ident>) -> Vec<ast::Lifetime> { - mk_lifetime(cx, span, lt).into_iter().collect() -} - -impl<'a> Ty<'a> { - pub fn to_ty( - &self, - cx: &ExtCtxt<'_>, - span: Span, - self_ty: Ident, - self_generics: &Generics, - ) -> P<ast::Ty> { - match *self { - Ptr(ref ty, ref ptr) => { - let raw_ty = ty.to_ty(cx, span, self_ty, self_generics); - match *ptr { - Borrowed(ref lt, mutbl) => { - let lt = mk_lifetime(cx, span, lt); - cx.ty_rptr(span, raw_ty, lt, mutbl) - } - Raw(mutbl) => cx.ty_ptr(span, raw_ty, mutbl), - } - } - Literal(ref p) => p.to_ty(cx, span, self_ty, self_generics), - Self_ => cx.ty_path(self.to_path(cx, span, self_ty, self_generics)), - Tuple(ref fields) => { - let ty = ast::TyKind::Tup( - fields.iter().map(|f| f.to_ty(cx, span, self_ty, self_generics)).collect(), - ); - cx.ty(span, ty) - } - } - } - - pub fn to_path( - &self, - cx: &ExtCtxt<'_>, - span: Span, - self_ty: Ident, - generics: &Generics, - ) -> ast::Path { - match *self { - Self_ => { - let params: Vec<_> = generics - .params - .iter() - .map(|param| match param.kind { - GenericParamKind::Lifetime { .. } => { - GenericArg::Lifetime(ast::Lifetime { id: param.id, ident: param.ident }) - } - GenericParamKind::Type { .. } => { - GenericArg::Type(cx.ty_ident(span, param.ident)) - } - GenericParamKind::Const { .. } => { - GenericArg::Const(cx.const_ident(span, param.ident)) - } - }) - .collect(); - - cx.path_all(span, false, vec![self_ty], params) - } - Literal(ref p) => p.to_path(cx, span, self_ty, generics), - Ptr(..) => cx.span_bug(span, "pointer in a path in generic `derive`"), - Tuple(..) => cx.span_bug(span, "tuple in a path in generic `derive`"), - } - } -} - -fn mk_ty_param( - cx: &ExtCtxt<'_>, - span: Span, - name: &str, - attrs: &[ast::Attribute], - bounds: &[Path<'_>], - self_ident: Ident, - self_generics: &Generics, -) -> ast::GenericParam { - let bounds = bounds - .iter() - .map(|b| { - let path = b.to_path(cx, span, self_ident, self_generics); - cx.trait_bound(path) - }) - .collect(); - cx.typaram(span, cx.ident_of(name, span), attrs.to_owned(), bounds, None) -} - -fn mk_generics(params: Vec<ast::GenericParam>, span: Span) -> Generics { - Generics { params, where_clause: ast::WhereClause { predicates: Vec::new(), span }, span } -} - -/// Lifetimes and bounds on type parameters -#[derive(Clone)] -pub struct LifetimeBounds<'a> { - pub lifetimes: Vec<(&'a str, Vec<&'a str>)>, - pub bounds: Vec<(&'a str, Vec<Path<'a>>)>, -} - -impl<'a> LifetimeBounds<'a> { - pub fn empty() -> LifetimeBounds<'a> { - LifetimeBounds { lifetimes: Vec::new(), bounds: Vec::new() } - } - pub fn to_generics( - &self, - cx: &ExtCtxt<'_>, - span: Span, - self_ty: Ident, - self_generics: &Generics, - ) -> Generics { - let generic_params = self - .lifetimes - .iter() - .map(|&(lt, ref bounds)| { - let bounds = bounds - .iter() - .map(|b| ast::GenericBound::Outlives(cx.lifetime(span, Ident::from_str(b)))); - cx.lifetime_def(span, Ident::from_str(lt), vec![], bounds.collect()) - }) - .chain(self.bounds.iter().map(|t| { - let (name, ref bounds) = *t; - mk_ty_param(cx, span, name, &[], &bounds, self_ty, self_generics) - })) - .collect(); - - mk_generics(generic_params, span) - } -} - -pub fn get_explicit_self( - cx: &ExtCtxt<'_>, - span: Span, - self_ptr: &Option<PtrTy>, -) -> (P<Expr>, ast::ExplicitSelf) { - // this constructs a fresh `self` path - let self_path = cx.expr_self(span); - match *self_ptr { - None => (self_path, respan(span, SelfKind::Value(ast::Mutability::Not))), - Some(ref ptr) => { - let self_ty = respan( - span, - match *ptr { - Borrowed(ref lt, mutbl) => { - let lt = lt.map(|s| cx.lifetime(span, s)); - SelfKind::Region(lt, mutbl) - } - Raw(_) => cx.span_bug(span, "attempted to use *self in deriving definition"), - }, - ); - let self_expr = cx.expr_deref(span, self_path); - (self_expr, self_ty) - } - } -} diff --git a/src/libsyntax_ext/deriving/hash.rs b/src/libsyntax_ext/deriving/hash.rs deleted file mode 100644 index acf18ac70e6..00000000000 --- a/src/libsyntax_ext/deriving/hash.rs +++ /dev/null @@ -1,92 +0,0 @@ -use crate::deriving::generic::ty::*; -use crate::deriving::generic::*; -use crate::deriving::{self, path_std, pathvec_std}; - -use syntax::ast::{Expr, MetaItem, Mutability}; -use syntax::ptr::P; -use syntax::symbol::sym; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::Span; - -pub fn expand_deriving_hash( - cx: &mut ExtCtxt<'_>, - span: Span, - mitem: &MetaItem, - item: &Annotatable, - push: &mut dyn FnMut(Annotatable), -) { - let path = Path::new_(pathvec_std!(cx, hash::Hash), None, vec![], PathKind::Std); - - let typaram = "__H"; - - let arg = Path::new_local(typaram); - let hash_trait_def = TraitDef { - span, - attributes: Vec::new(), - path, - additional_bounds: Vec::new(), - generics: LifetimeBounds::empty(), - is_unsafe: false, - supports_unions: false, - methods: vec![MethodDef { - name: "hash", - generics: LifetimeBounds { - lifetimes: Vec::new(), - bounds: vec![(typaram, vec![path_std!(cx, hash::Hasher)])], - }, - explicit_self: borrowed_explicit_self(), - args: vec![(Ptr(Box::new(Literal(arg)), Borrowed(None, Mutability::Mut)), "state")], - ret_ty: nil_ty(), - attributes: vec![], - is_unsafe: false, - unify_fieldless_variants: true, - combine_substructure: combine_substructure(Box::new(|a, b, c| { - hash_substructure(a, b, c) - })), - }], - associated_types: Vec::new(), - }; - - hash_trait_def.expand(cx, mitem, item, push); -} - -fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructure<'_>) -> P<Expr> { - let state_expr = match &substr.nonself_args { - &[o_f] => o_f, - _ => cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`"), - }; - let call_hash = |span, thing_expr| { - let hash_path = { - let strs = cx.std_path(&[sym::hash, sym::Hash, sym::hash]); - - cx.expr_path(cx.path_global(span, strs)) - }; - let ref_thing = cx.expr_addr_of(span, thing_expr); - let expr = cx.expr_call(span, hash_path, vec![ref_thing, state_expr.clone()]); - cx.stmt_expr(expr) - }; - let mut stmts = Vec::new(); - - let fields = match *substr.fields { - Struct(_, ref fs) | EnumMatching(_, 1, .., ref fs) => fs, - EnumMatching(.., ref fs) => { - let variant_value = deriving::call_intrinsic( - cx, - trait_span, - "discriminant_value", - vec![cx.expr_self(trait_span)], - ); - - stmts.push(call_hash(trait_span, variant_value)); - - fs - } - _ => cx.span_bug(trait_span, "impossible substructure in `derive(Hash)`"), - }; - - stmts.extend( - fields.iter().map(|FieldInfo { ref self_, span, .. }| call_hash(*span, self_.clone())), - ); - - cx.expr_block(cx.block(trait_span, stmts)) -} diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs deleted file mode 100644 index ca4d4fbc5bd..00000000000 --- a/src/libsyntax_ext/deriving/mod.rs +++ /dev/null @@ -1,171 +0,0 @@ -//! The compiler code necessary to implement the `#[derive]` extensions. - -use syntax::ast::{self, ItemKind, MetaItem}; -use syntax::ptr::P; -use syntax::symbol::{sym, Symbol}; -use syntax_expand::base::{Annotatable, ExtCtxt, MultiItemModifier}; -use syntax_pos::Span; - -macro path_local($x:ident) { - generic::ty::Path::new_local(stringify!($x)) -} - -macro pathvec_std($cx:expr, $($rest:ident)::+) {{ - vec![ $( stringify!($rest) ),+ ] -}} - -macro path_std($($x:tt)*) { - generic::ty::Path::new( pathvec_std!( $($x)* ) ) -} - -pub mod bounds; -pub mod clone; -pub mod debug; -pub mod decodable; -pub mod default; -pub mod encodable; -pub mod hash; - -#[path = "cmp/eq.rs"] -pub mod eq; -#[path = "cmp/ord.rs"] -pub mod ord; -#[path = "cmp/partial_eq.rs"] -pub mod partial_eq; -#[path = "cmp/partial_ord.rs"] -pub mod partial_ord; - -pub mod generic; - -crate struct BuiltinDerive( - crate fn(&mut ExtCtxt<'_>, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable)), -); - -impl MultiItemModifier for BuiltinDerive { - fn expand( - &self, - ecx: &mut ExtCtxt<'_>, - span: Span, - meta_item: &MetaItem, - item: Annotatable, - ) -> Vec<Annotatable> { - // FIXME: Built-in derives often forget to give spans contexts, - // so we are doing it here in a centralized way. - let span = ecx.with_def_site_ctxt(span); - let mut items = Vec::new(); - (self.0)(ecx, span, meta_item, &item, &mut |a| items.push(a)); - items - } -} - -/// Constructs an expression that calls an intrinsic -fn call_intrinsic( - cx: &ExtCtxt<'_>, - span: Span, - intrinsic: &str, - args: Vec<P<ast::Expr>>, -) -> P<ast::Expr> { - let span = cx.with_def_site_ctxt(span); - let path = cx.std_path(&[sym::intrinsics, Symbol::intern(intrinsic)]); - let call = cx.expr_call_global(span, path, args); - - cx.expr_block(P(ast::Block { - stmts: vec![cx.stmt_expr(call)], - id: ast::DUMMY_NODE_ID, - rules: ast::BlockCheckMode::Unsafe(ast::CompilerGenerated), - span, - })) -} - -// Injects `impl<...> Structural for ItemType<...> { }`. In particular, -// does *not* add `where T: Structural` for parameters `T` in `...`. -// (That's the main reason we cannot use TraitDef here.) -fn inject_impl_of_structural_trait( - cx: &mut ExtCtxt<'_>, - span: Span, - item: &Annotatable, - structural_path: generic::ty::Path<'_>, - push: &mut dyn FnMut(Annotatable), -) { - let item = match *item { - Annotatable::Item(ref item) => item, - _ => { - // Non-Item derive is an error, but it should have been - // set earlier; see - // libsyntax_expand/expand.rs:MacroExpander::fully_expand_fragment() - // libsyntax_expand/base.rs:Annotatable::derive_allowed() - return; - } - }; - - let generics = match item.kind { - ItemKind::Struct(_, ref generics) | ItemKind::Enum(_, ref generics) => generics, - // Do not inject `impl Structural for Union`. (`PartialEq` does not - // support unions, so we will see error downstream.) - ItemKind::Union(..) => return, - _ => unreachable!(), - }; - - // Create generics param list for where clauses and impl headers - let mut generics = generics.clone(); - - // Create the type of `self`. - // - // in addition, remove defaults from type params (impls cannot have them). - let self_params: Vec<_> = generics - .params - .iter_mut() - .map(|param| match &mut param.kind { - ast::GenericParamKind::Lifetime => { - ast::GenericArg::Lifetime(cx.lifetime(span, param.ident)) - } - ast::GenericParamKind::Type { default } => { - *default = None; - ast::GenericArg::Type(cx.ty_ident(span, param.ident)) - } - ast::GenericParamKind::Const { ty: _ } => { - ast::GenericArg::Const(cx.const_ident(span, param.ident)) - } - }) - .collect(); - - let type_ident = item.ident; - - let trait_ref = cx.trait_ref(structural_path.to_path(cx, span, type_ident, &generics)); - let self_type = cx.ty_path(cx.path_all(span, false, vec![type_ident], self_params)); - - // It would be nice to also encode constraint `where Self: Eq` (by adding it - // onto `generics` cloned above). Unfortunately, that strategy runs afoul of - // rust-lang/rust#48214. So we perform that additional check in the compiler - // itself, instead of encoding it here. - - // Keep the lint and stability attributes of the original item, to control - // how the generated implementation is linted. - let mut attrs = Vec::new(); - attrs.extend( - item.attrs - .iter() - .filter(|a| { - [sym::allow, sym::warn, sym::deny, sym::forbid, sym::stable, sym::unstable] - .contains(&a.name_or_empty()) - }) - .cloned(), - ); - - let newitem = cx.item( - span, - ast::Ident::invalid(), - attrs, - ItemKind::Impl( - ast::Unsafety::Normal, - ast::ImplPolarity::Positive, - ast::Defaultness::Final, - generics, - Some(trait_ref), - self_type, - Vec::new(), - ), - ); - - push(Annotatable::Item(newitem)); -} diff --git a/src/libsyntax_ext/env.rs b/src/libsyntax_ext/env.rs deleted file mode 100644 index c9ecbabc8ff..00000000000 --- a/src/libsyntax_ext/env.rs +++ /dev/null @@ -1,88 +0,0 @@ -// The compiler code necessary to support the env! extension. Eventually this -// should all get sucked into either the compiler syntax extension plugin -// interface. -// - -use syntax::ast::{self, GenericArg, Ident}; -use syntax::symbol::{kw, sym, Symbol}; -use syntax::tokenstream::TokenStream; -use syntax_expand::base::{self, *}; -use syntax_pos::Span; - -use std::env; - -pub fn expand_option_env<'cx>( - cx: &'cx mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'cx> { - let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") { - None => return DummyResult::any(sp), - Some(v) => v, - }; - - let sp = cx.with_def_site_ctxt(sp); - let e = match env::var(&var.as_str()) { - Err(..) => { - let lt = cx.lifetime(sp, Ident::new(kw::StaticLifetime, sp)); - cx.expr_path(cx.path_all( - sp, - true, - cx.std_path(&[sym::option, sym::Option, sym::None]), - vec![GenericArg::Type(cx.ty_rptr( - sp, - cx.ty_ident(sp, Ident::new(sym::str, sp)), - Some(lt), - ast::Mutability::Not, - ))], - )) - } - Ok(s) => cx.expr_call_global( - sp, - cx.std_path(&[sym::option, sym::Option, sym::Some]), - vec![cx.expr_str(sp, Symbol::intern(&s))], - ), - }; - MacEager::expr(e) -} - -pub fn expand_env<'cx>( - cx: &'cx mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'cx> { - let mut exprs = match get_exprs_from_tts(cx, sp, tts) { - Some(ref exprs) if exprs.is_empty() => { - cx.span_err(sp, "env! takes 1 or 2 arguments"); - return DummyResult::any(sp); - } - None => return DummyResult::any(sp), - Some(exprs) => exprs.into_iter(), - }; - - let var = match expr_to_string(cx, exprs.next().unwrap(), "expected string literal") { - None => return DummyResult::any(sp), - Some((v, _style)) => v, - }; - let msg = match exprs.next() { - None => Symbol::intern(&format!("environment variable `{}` not defined", var)), - Some(second) => match expr_to_string(cx, second, "expected string literal") { - None => return DummyResult::any(sp), - Some((s, _style)) => s, - }, - }; - - if exprs.next().is_some() { - cx.span_err(sp, "env! takes 1 or 2 arguments"); - return DummyResult::any(sp); - } - - let e = match env::var(&*var.as_str()) { - Err(_) => { - cx.span_err(sp, &msg.as_str()); - return DummyResult::any(sp); - } - Ok(s) => cx.expr_str(sp, Symbol::intern(&s)), - }; - MacEager::expr(e) -} diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs deleted file mode 100644 index 1d1f68a4906..00000000000 --- a/src/libsyntax_ext/format.rs +++ /dev/null @@ -1,1233 +0,0 @@ -use ArgumentType::*; -use Position::*; - -use fmt_macros as parse; - -use errors::pluralize; -use errors::Applicability; -use errors::DiagnosticBuilder; - -use syntax::ast; -use syntax::ptr::P; -use syntax::symbol::{sym, Symbol}; -use syntax::token; -use syntax::tokenstream::TokenStream; -use syntax_expand::base::{self, *}; -use syntax_pos::{MultiSpan, Span}; - -use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use std::borrow::Cow; -use std::collections::hash_map::Entry; - -#[derive(PartialEq)] -enum ArgumentType { - Placeholder(&'static str), - Count, -} - -enum Position { - Exact(usize), - Named(Symbol), -} - -struct Context<'a, 'b> { - ecx: &'a mut ExtCtxt<'b>, - /// The macro's call site. References to unstable formatting internals must - /// use this span to pass the stability checker. - macsp: Span, - /// The span of the format string literal. - fmtsp: Span, - - /// List of parsed argument expressions. - /// Named expressions are resolved early, and are appended to the end of - /// argument expressions. - /// - /// Example showing the various data structures in motion: - /// - /// * Original: `"{foo:o} {:o} {foo:x} {0:x} {1:o} {:x} {1:x} {0:o}"` - /// * Implicit argument resolution: `"{foo:o} {0:o} {foo:x} {0:x} {1:o} {1:x} {1:x} {0:o}"` - /// * Name resolution: `"{2:o} {0:o} {2:x} {0:x} {1:o} {1:x} {1:x} {0:o}"` - /// * `arg_types` (in JSON): `[[0, 1, 0], [0, 1, 1], [0, 1]]` - /// * `arg_unique_types` (in simplified JSON): `[["o", "x"], ["o", "x"], ["o", "x"]]` - /// * `names` (in JSON): `{"foo": 2}` - args: Vec<P<ast::Expr>>, - /// Placeholder slot numbers indexed by argument. - arg_types: Vec<Vec<usize>>, - /// Unique format specs seen for each argument. - arg_unique_types: Vec<Vec<ArgumentType>>, - /// Map from named arguments to their resolved indices. - names: FxHashMap<Symbol, usize>, - - /// The latest consecutive literal strings, or empty if there weren't any. - literal: String, - - /// Collection of the compiled `rt::Argument` structures - pieces: Vec<P<ast::Expr>>, - /// Collection of string literals - str_pieces: Vec<P<ast::Expr>>, - /// Stays `true` if all formatting parameters are default (as in "{}{}"). - all_pieces_simple: bool, - - /// Mapping between positional argument references and indices into the - /// final generated static argument array. We record the starting indices - /// corresponding to each positional argument, and number of references - /// consumed so far for each argument, to facilitate correct `Position` - /// mapping in `build_piece`. In effect this can be seen as a "flattened" - /// version of `arg_unique_types`. - /// - /// Again with the example described above in docstring for `args`: - /// - /// * `arg_index_map` (in JSON): `[[0, 1, 0], [2, 3, 3], [4, 5]]` - arg_index_map: Vec<Vec<usize>>, - - /// Starting offset of count argument slots. - count_args_index_offset: usize, - - /// Count argument slots and tracking data structures. - /// Count arguments are separately tracked for de-duplication in case - /// multiple references are made to one argument. For example, in this - /// format string: - /// - /// * Original: `"{:.*} {:.foo$} {1:.*} {:.0$}"` - /// * Implicit argument resolution: `"{1:.0$} {2:.foo$} {1:.3$} {4:.0$}"` - /// * Name resolution: `"{1:.0$} {2:.5$} {1:.3$} {4:.0$}"` - /// * `count_positions` (in JSON): `{0: 0, 5: 1, 3: 2}` - /// * `count_args`: `vec![Exact(0), Exact(5), Exact(3)]` - count_args: Vec<Position>, - /// Relative slot numbers for count arguments. - count_positions: FxHashMap<usize, usize>, - /// Number of count slots assigned. - count_positions_count: usize, - - /// Current position of the implicit positional arg pointer, as if it - /// still existed in this phase of processing. - /// Used only for `all_pieces_simple` tracking in `build_piece`. - curarg: usize, - /// Current piece being evaluated, used for error reporting. - curpiece: usize, - /// Keep track of invalid references to positional arguments. - invalid_refs: Vec<(usize, usize)>, - /// Spans of all the formatting arguments, in order. - arg_spans: Vec<Span>, - /// All the formatting arguments that have formatting flags set, in order for diagnostics. - arg_with_formatting: Vec<parse::FormatSpec<'a>>, - /// Whether this formatting string is a literal or it comes from a macro. - is_literal: bool, -} - -/// Parses the arguments from the given list of tokens, returning the diagnostic -/// if there's a parse error so we can continue parsing other format! -/// expressions. -/// -/// If parsing succeeds, the return value is: -/// -/// ```text -/// Some((fmtstr, parsed arguments, index map for named arguments)) -/// ``` -fn parse_args<'a>( - ecx: &mut ExtCtxt<'a>, - sp: Span, - tts: TokenStream, -) -> Result<(P<ast::Expr>, Vec<P<ast::Expr>>, FxHashMap<Symbol, usize>), DiagnosticBuilder<'a>> { - let mut args = Vec::<P<ast::Expr>>::new(); - let mut names = FxHashMap::<Symbol, usize>::default(); - - let mut p = ecx.new_parser_from_tts(tts); - - if p.token == token::Eof { - return Err(ecx.struct_span_err(sp, "requires at least a format string argument")); - } - - let fmtstr = p.parse_expr()?; - let mut first = true; - let mut named = false; - - while p.token != token::Eof { - if !p.eat(&token::Comma) { - if first { - // After `format!(""` we always expect *only* a comma... - let mut err = ecx.struct_span_err(p.token.span, "expected token: `,`"); - err.span_label(p.token.span, "expected `,`"); - p.maybe_annotate_with_ascription(&mut err, false); - return Err(err); - } else { - // ...after that delegate to `expect` to also include the other expected tokens. - return Err(p.expect(&token::Comma).err().unwrap()); - } - } - first = false; - if p.token == token::Eof { - break; - } // accept trailing commas - if p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq) { - named = true; - let name = if let token::Ident(name, _) = p.token.kind { - p.bump(); - name - } else { - unreachable!(); - }; - - p.expect(&token::Eq)?; - let e = p.parse_expr()?; - if let Some(prev) = names.get(&name) { - ecx.struct_span_err(e.span, &format!("duplicate argument named `{}`", name)) - .span_label(args[*prev].span, "previously here") - .span_label(e.span, "duplicate argument") - .emit(); - continue; - } - - // Resolve names into slots early. - // Since all the positional args are already seen at this point - // if the input is valid, we can simply append to the positional - // args. And remember the names. - let slot = args.len(); - names.insert(name, slot); - args.push(e); - } else { - let e = p.parse_expr()?; - if named { - let mut err = ecx - .struct_span_err(e.span, "positional arguments cannot follow named arguments"); - err.span_label(e.span, "positional arguments must be before named arguments"); - for (_, pos) in &names { - err.span_label(args[*pos].span, "named argument"); - } - err.emit(); - } - args.push(e); - } - } - Ok((fmtstr, args, names)) -} - -impl<'a, 'b> Context<'a, 'b> { - fn resolve_name_inplace(&self, p: &mut parse::Piece<'_>) { - // NOTE: the `unwrap_or` branch is needed in case of invalid format - // arguments, e.g., `format_args!("{foo}")`. - let lookup = |s: Symbol| *self.names.get(&s).unwrap_or(&0); - - match *p { - parse::String(_) => {} - parse::NextArgument(ref mut arg) => { - if let parse::ArgumentNamed(s) = arg.position { - arg.position = parse::ArgumentIs(lookup(s)); - } - if let parse::CountIsName(s) = arg.format.width { - arg.format.width = parse::CountIsParam(lookup(s)); - } - if let parse::CountIsName(s) = arg.format.precision { - arg.format.precision = parse::CountIsParam(lookup(s)); - } - } - } - } - - /// Verifies one piece of a parse string, and remembers it if valid. - /// All errors are not emitted as fatal so we can continue giving errors - /// about this and possibly other format strings. - fn verify_piece(&mut self, p: &parse::Piece<'_>) { - match *p { - parse::String(..) => {} - parse::NextArgument(ref arg) => { - // width/precision first, if they have implicit positional - // parameters it makes more sense to consume them first. - self.verify_count(arg.format.width); - self.verify_count(arg.format.precision); - - // argument second, if it's an implicit positional parameter - // it's written second, so it should come after width/precision. - let pos = match arg.position { - parse::ArgumentIs(i) | parse::ArgumentImplicitlyIs(i) => Exact(i), - parse::ArgumentNamed(s) => Named(s), - }; - - let ty = Placeholder(match &arg.format.ty[..] { - "" => "Display", - "?" => "Debug", - "e" => "LowerExp", - "E" => "UpperExp", - "o" => "Octal", - "p" => "Pointer", - "b" => "Binary", - "x" => "LowerHex", - "X" => "UpperHex", - _ => { - let fmtsp = self.fmtsp; - let sp = arg.format.ty_span.map(|sp| fmtsp.from_inner(sp)); - let mut err = self.ecx.struct_span_err( - sp.unwrap_or(fmtsp), - &format!("unknown format trait `{}`", arg.format.ty), - ); - err.note( - "the only appropriate formatting traits are:\n\ - - ``, which uses the `Display` trait\n\ - - `?`, which uses the `Debug` trait\n\ - - `e`, which uses the `LowerExp` trait\n\ - - `E`, which uses the `UpperExp` trait\n\ - - `o`, which uses the `Octal` trait\n\ - - `p`, which uses the `Pointer` trait\n\ - - `b`, which uses the `Binary` trait\n\ - - `x`, which uses the `LowerHex` trait\n\ - - `X`, which uses the `UpperHex` trait", - ); - if let Some(sp) = sp { - for (fmt, name) in &[ - ("", "Display"), - ("?", "Debug"), - ("e", "LowerExp"), - ("E", "UpperExp"), - ("o", "Octal"), - ("p", "Pointer"), - ("b", "Binary"), - ("x", "LowerHex"), - ("X", "UpperHex"), - ] { - err.tool_only_span_suggestion( - sp, - &format!("use the `{}` trait", name), - fmt.to_string(), - Applicability::MaybeIncorrect, - ); - } - } - err.emit(); - "<invalid>" - } - }); - self.verify_arg_type(pos, ty); - self.curpiece += 1; - } - } - } - - fn verify_count(&mut self, c: parse::Count) { - match c { - parse::CountImplied | parse::CountIs(..) => {} - parse::CountIsParam(i) => { - self.verify_arg_type(Exact(i), Count); - } - parse::CountIsName(s) => { - self.verify_arg_type(Named(s), Count); - } - } - } - - fn describe_num_args(&self) -> Cow<'_, str> { - match self.args.len() { - 0 => "no arguments were given".into(), - 1 => "there is 1 argument".into(), - x => format!("there are {} arguments", x).into(), - } - } - - /// Handle invalid references to positional arguments. Output different - /// errors for the case where all arguments are positional and for when - /// there are named arguments or numbered positional arguments in the - /// format string. - fn report_invalid_references(&self, numbered_position_args: bool) { - let mut e; - let sp = if self.is_literal { - // Point at the formatting arguments. - MultiSpan::from_spans(self.arg_spans.clone()) - } else { - MultiSpan::from_span(self.fmtsp) - }; - let refs = - self.invalid_refs.iter().map(|(r, pos)| (r.to_string(), self.arg_spans.get(*pos))); - - let mut zero_based_note = false; - - let count = self.pieces.len() - + self.arg_with_formatting.iter().filter(|fmt| fmt.precision_span.is_some()).count(); - if self.names.is_empty() && !numbered_position_args && count != self.args.len() { - e = self.ecx.struct_span_err( - sp, - &format!( - "{} positional argument{} in format string, but {}", - count, - pluralize!(count), - self.describe_num_args(), - ), - ); - for arg in &self.args { - // Point at the arguments that will be formatted. - e.span_label(arg.span, ""); - } - } else { - let (mut refs, spans): (Vec<_>, Vec<_>) = refs.unzip(); - // Avoid `invalid reference to positional arguments 7 and 7 (there is 1 argument)` - // for `println!("{7:7$}", 1);` - refs.sort(); - refs.dedup(); - let (arg_list, mut sp) = if refs.len() == 1 { - let spans: Vec<_> = spans.into_iter().filter_map(|sp| sp.map(|sp| *sp)).collect(); - ( - format!("argument {}", refs[0]), - if spans.is_empty() { - MultiSpan::from_span(self.fmtsp) - } else { - MultiSpan::from_spans(spans) - }, - ) - } else { - let pos = MultiSpan::from_spans(spans.into_iter().map(|s| *s.unwrap()).collect()); - let reg = refs.pop().unwrap(); - (format!("arguments {head} and {tail}", head = refs.join(", "), tail = reg,), pos) - }; - if !self.is_literal { - sp = MultiSpan::from_span(self.fmtsp); - } - - e = self.ecx.struct_span_err( - sp, - &format!( - "invalid reference to positional {} ({})", - arg_list, - self.describe_num_args() - ), - ); - zero_based_note = true; - }; - - for fmt in &self.arg_with_formatting { - if let Some(span) = fmt.precision_span { - let span = self.fmtsp.from_inner(span); - match fmt.precision { - parse::CountIsParam(pos) if pos > self.args.len() => { - e.span_label( - span, - &format!( - "this precision flag expects an `usize` argument at position {}, \ - but {}", - pos, - self.describe_num_args(), - ), - ); - zero_based_note = true; - } - parse::CountIsParam(pos) => { - let count = self.pieces.len() - + self - .arg_with_formatting - .iter() - .filter(|fmt| fmt.precision_span.is_some()) - .count(); - e.span_label(span, &format!( - "this precision flag adds an extra required argument at position {}, \ - which is why there {} expected", - pos, - if count == 1 { - "is 1 argument".to_string() - } else { - format!("are {} arguments", count) - }, - )); - if let Some(arg) = self.args.get(pos) { - e.span_label( - arg.span, - "this parameter corresponds to the precision flag", - ); - } - zero_based_note = true; - } - _ => {} - } - } - if let Some(span) = fmt.width_span { - let span = self.fmtsp.from_inner(span); - match fmt.width { - parse::CountIsParam(pos) if pos > self.args.len() => { - e.span_label( - span, - &format!( - "this width flag expects an `usize` argument at position {}, \ - but {}", - pos, - self.describe_num_args(), - ), - ); - zero_based_note = true; - } - _ => {} - } - } - } - if zero_based_note { - e.note("positional arguments are zero-based"); - } - if !self.arg_with_formatting.is_empty() { - e.note( - "for information about formatting flags, visit \ - https://doc.rust-lang.org/std/fmt/index.html", - ); - } - - e.emit(); - } - - /// Actually verifies and tracks a given format placeholder - /// (a.k.a. argument). - fn verify_arg_type(&mut self, arg: Position, ty: ArgumentType) { - match arg { - Exact(arg) => { - if self.args.len() <= arg { - self.invalid_refs.push((arg, self.curpiece)); - return; - } - match ty { - Placeholder(_) => { - // record every (position, type) combination only once - let ref mut seen_ty = self.arg_unique_types[arg]; - let i = seen_ty.iter().position(|x| *x == ty).unwrap_or_else(|| { - let i = seen_ty.len(); - seen_ty.push(ty); - i - }); - self.arg_types[arg].push(i); - } - Count => { - if let Entry::Vacant(e) = self.count_positions.entry(arg) { - let i = self.count_positions_count; - e.insert(i); - self.count_args.push(Exact(arg)); - self.count_positions_count += 1; - } - } - } - } - - Named(name) => { - match self.names.get(&name) { - Some(&idx) => { - // Treat as positional arg. - self.verify_arg_type(Exact(idx), ty) - } - None => { - let msg = format!("there is no argument named `{}`", name); - let sp = if self.is_literal { - *self.arg_spans.get(self.curpiece).unwrap_or(&self.fmtsp) - } else { - self.fmtsp - }; - let mut err = self.ecx.struct_span_err(sp, &msg[..]); - err.emit(); - } - } - } - } - } - - /// Builds the mapping between format placeholders and argument objects. - fn build_index_map(&mut self) { - // NOTE: Keep the ordering the same as `into_expr`'s expansion would do! - let args_len = self.args.len(); - self.arg_index_map.reserve(args_len); - - let mut sofar = 0usize; - - // Map the arguments - for i in 0..args_len { - let ref arg_types = self.arg_types[i]; - let arg_offsets = arg_types.iter().map(|offset| sofar + *offset).collect::<Vec<_>>(); - self.arg_index_map.push(arg_offsets); - sofar += self.arg_unique_types[i].len(); - } - - // Record starting index for counts, which appear just after arguments - self.count_args_index_offset = sofar; - } - - fn rtpath(ecx: &ExtCtxt<'_>, s: &str) -> Vec<ast::Ident> { - ecx.std_path(&[sym::fmt, sym::rt, sym::v1, Symbol::intern(s)]) - } - - fn build_count(&self, c: parse::Count) -> P<ast::Expr> { - let sp = self.macsp; - let count = |c, arg| { - let mut path = Context::rtpath(self.ecx, "Count"); - path.push(self.ecx.ident_of(c, sp)); - match arg { - Some(arg) => self.ecx.expr_call_global(sp, path, vec![arg]), - None => self.ecx.expr_path(self.ecx.path_global(sp, path)), - } - }; - match c { - parse::CountIs(i) => count("Is", Some(self.ecx.expr_usize(sp, i))), - parse::CountIsParam(i) => { - // This needs mapping too, as `i` is referring to a macro - // argument. If `i` is not found in `count_positions` then - // the error had already been emitted elsewhere. - let i = self.count_positions.get(&i).cloned().unwrap_or(0) - + self.count_args_index_offset; - count("Param", Some(self.ecx.expr_usize(sp, i))) - } - parse::CountImplied => count("Implied", None), - // should never be the case, names are already resolved - parse::CountIsName(_) => panic!("should never happen"), - } - } - - /// Build a literal expression from the accumulated string literals - fn build_literal_string(&mut self) -> P<ast::Expr> { - let sp = self.fmtsp; - let s = Symbol::intern(&self.literal); - self.literal.clear(); - self.ecx.expr_str(sp, s) - } - - /// Builds a static `rt::Argument` from a `parse::Piece` or append - /// to the `literal` string. - fn build_piece( - &mut self, - piece: &parse::Piece<'a>, - arg_index_consumed: &mut Vec<usize>, - ) -> Option<P<ast::Expr>> { - let sp = self.macsp; - match *piece { - parse::String(s) => { - self.literal.push_str(s); - None - } - parse::NextArgument(ref arg) => { - // Build the position - let pos = { - let pos = |c, arg| { - let mut path = Context::rtpath(self.ecx, "Position"); - path.push(self.ecx.ident_of(c, sp)); - match arg { - Some(i) => { - let arg = self.ecx.expr_usize(sp, i); - self.ecx.expr_call_global(sp, path, vec![arg]) - } - None => self.ecx.expr_path(self.ecx.path_global(sp, path)), - } - }; - match arg.position { - parse::ArgumentIs(i) | parse::ArgumentImplicitlyIs(i) => { - // Map to index in final generated argument array - // in case of multiple types specified - let arg_idx = match arg_index_consumed.get_mut(i) { - None => 0, // error already emitted elsewhere - Some(offset) => { - let ref idx_map = self.arg_index_map[i]; - // unwrap_or branch: error already emitted elsewhere - let arg_idx = *idx_map.get(*offset).unwrap_or(&0); - *offset += 1; - arg_idx - } - }; - pos("At", Some(arg_idx)) - } - - // should never be the case, because names are already - // resolved. - parse::ArgumentNamed(_) => panic!("should never happen"), - } - }; - - let simple_arg = parse::Argument { - position: { - // We don't have ArgumentNext any more, so we have to - // track the current argument ourselves. - let i = self.curarg; - self.curarg += 1; - parse::ArgumentIs(i) - }, - format: parse::FormatSpec { - fill: arg.format.fill, - align: parse::AlignUnknown, - flags: 0, - precision: parse::CountImplied, - precision_span: None, - width: parse::CountImplied, - width_span: None, - ty: arg.format.ty, - ty_span: arg.format.ty_span, - }, - }; - - let fill = arg.format.fill.unwrap_or(' '); - - let pos_simple = arg.position.index() == simple_arg.position.index(); - - if arg.format.precision_span.is_some() || arg.format.width_span.is_some() { - self.arg_with_formatting.push(arg.format); - } - if !pos_simple || arg.format != simple_arg.format || fill != ' ' { - self.all_pieces_simple = false; - } - - // Build the format - let fill = self.ecx.expr_lit(sp, ast::LitKind::Char(fill)); - let align = |name| { - let mut p = Context::rtpath(self.ecx, "Alignment"); - p.push(self.ecx.ident_of(name, sp)); - self.ecx.path_global(sp, p) - }; - let align = match arg.format.align { - parse::AlignLeft => align("Left"), - parse::AlignRight => align("Right"), - parse::AlignCenter => align("Center"), - parse::AlignUnknown => align("Unknown"), - }; - let align = self.ecx.expr_path(align); - let flags = self.ecx.expr_u32(sp, arg.format.flags); - let prec = self.build_count(arg.format.precision); - let width = self.build_count(arg.format.width); - let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "FormatSpec")); - let fmt = self.ecx.expr_struct( - sp, - path, - vec![ - self.ecx.field_imm(sp, self.ecx.ident_of("fill", sp), fill), - self.ecx.field_imm(sp, self.ecx.ident_of("align", sp), align), - self.ecx.field_imm(sp, self.ecx.ident_of("flags", sp), flags), - self.ecx.field_imm(sp, self.ecx.ident_of("precision", sp), prec), - self.ecx.field_imm(sp, self.ecx.ident_of("width", sp), width), - ], - ); - - let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "Argument")); - Some(self.ecx.expr_struct( - sp, - path, - vec![ - self.ecx.field_imm(sp, self.ecx.ident_of("position", sp), pos), - self.ecx.field_imm(sp, self.ecx.ident_of("format", sp), fmt), - ], - )) - } - } - } - - /// Actually builds the expression which the format_args! block will be - /// expanded to. - fn into_expr(self) -> P<ast::Expr> { - let mut locals = - Vec::with_capacity((0..self.args.len()).map(|i| self.arg_unique_types[i].len()).sum()); - let mut counts = Vec::with_capacity(self.count_args.len()); - let mut pats = Vec::with_capacity(self.args.len()); - let mut heads = Vec::with_capacity(self.args.len()); - - let names_pos: Vec<_> = (0..self.args.len()) - .map(|i| self.ecx.ident_of(&format!("arg{}", i), self.macsp)) - .collect(); - - // First, build up the static array which will become our precompiled - // format "string" - let pieces = self.ecx.expr_vec_slice(self.fmtsp, self.str_pieces); - - // Before consuming the expressions, we have to remember spans for - // count arguments as they are now generated separate from other - // arguments, hence have no access to the `P<ast::Expr>`'s. - let spans_pos: Vec<_> = self.args.iter().map(|e| e.span.clone()).collect(); - - // Right now there is a bug such that for the expression: - // foo(bar(&1)) - // the lifetime of `1` doesn't outlast the call to `bar`, so it's not - // valid for the call to `foo`. To work around this all arguments to the - // format! string are shoved into locals. Furthermore, we shove the address - // of each variable because we don't want to move out of the arguments - // passed to this function. - for (i, e) in self.args.into_iter().enumerate() { - let name = names_pos[i]; - let span = self.ecx.with_def_site_ctxt(e.span); - pats.push(self.ecx.pat_ident(span, name)); - for ref arg_ty in self.arg_unique_types[i].iter() { - locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name)); - } - heads.push(self.ecx.expr_addr_of(e.span, e)); - } - for pos in self.count_args { - let index = match pos { - Exact(i) => i, - _ => panic!("should never happen"), - }; - let name = names_pos[index]; - let span = spans_pos[index]; - counts.push(Context::format_arg(self.ecx, self.macsp, span, &Count, name)); - } - - // Now create a vector containing all the arguments - let args = locals.into_iter().chain(counts.into_iter()); - - let args_array = self.ecx.expr_vec(self.macsp, args.collect()); - - // Constructs an AST equivalent to: - // - // match (&arg0, &arg1) { - // (tmp0, tmp1) => args_array - // } - // - // It was: - // - // let tmp0 = &arg0; - // let tmp1 = &arg1; - // args_array - // - // Because of #11585 the new temporary lifetime rule, the enclosing - // statements for these temporaries become the let's themselves. - // If one or more of them are RefCell's, RefCell borrow() will also - // end there; they don't last long enough for args_array to use them. - // The match expression solves the scope problem. - // - // Note, it may also very well be transformed to: - // - // match arg0 { - // ref tmp0 => { - // match arg1 => { - // ref tmp1 => args_array } } } - // - // But the nested match expression is proved to perform not as well - // as series of let's; the first approach does. - let pat = self.ecx.pat_tuple(self.macsp, pats); - let arm = self.ecx.arm(self.macsp, pat, args_array); - let head = self.ecx.expr(self.macsp, ast::ExprKind::Tup(heads)); - let result = self.ecx.expr_match(self.macsp, head, vec![arm]); - - let args_slice = self.ecx.expr_addr_of(self.macsp, result); - - // Now create the fmt::Arguments struct with all our locals we created. - let (fn_name, fn_args) = if self.all_pieces_simple { - ("new_v1", vec![pieces, args_slice]) - } else { - // Build up the static array which will store our precompiled - // nonstandard placeholders, if there are any. - let fmt = self.ecx.expr_vec_slice(self.macsp, self.pieces); - - ("new_v1_formatted", vec![pieces, args_slice, fmt]) - }; - - let path = self.ecx.std_path(&[sym::fmt, sym::Arguments, Symbol::intern(fn_name)]); - self.ecx.expr_call_global(self.macsp, path, fn_args) - } - - fn format_arg( - ecx: &ExtCtxt<'_>, - macsp: Span, - mut sp: Span, - ty: &ArgumentType, - arg: ast::Ident, - ) -> P<ast::Expr> { - sp = ecx.with_def_site_ctxt(sp); - let arg = ecx.expr_ident(sp, arg); - let trait_ = match *ty { - Placeholder(trait_) if trait_ == "<invalid>" => return DummyResult::raw_expr(sp, true), - Placeholder(trait_) => trait_, - Count => { - let path = ecx.std_path(&[sym::fmt, sym::ArgumentV1, sym::from_usize]); - return ecx.expr_call_global(macsp, path, vec![arg]); - } - }; - - let path = ecx.std_path(&[sym::fmt, Symbol::intern(trait_), sym::fmt]); - let format_fn = ecx.path_global(sp, path); - let path = ecx.std_path(&[sym::fmt, sym::ArgumentV1, sym::new]); - ecx.expr_call_global(macsp, path, vec![arg, ecx.expr_path(format_fn)]) - } -} - -fn expand_format_args_impl<'cx>( - ecx: &'cx mut ExtCtxt<'_>, - mut sp: Span, - tts: TokenStream, - nl: bool, -) -> Box<dyn base::MacResult + 'cx> { - sp = ecx.with_def_site_ctxt(sp); - match parse_args(ecx, sp, tts) { - Ok((efmt, args, names)) => { - MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names, nl)) - } - Err(mut err) => { - err.emit(); - DummyResult::any(sp) - } - } -} - -pub fn expand_format_args<'cx>( - ecx: &'cx mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'cx> { - expand_format_args_impl(ecx, sp, tts, false) -} - -pub fn expand_format_args_nl<'cx>( - ecx: &'cx mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'cx> { - expand_format_args_impl(ecx, sp, tts, true) -} - -/// Take the various parts of `format_args!(efmt, args..., name=names...)` -/// and construct the appropriate formatting expression. -pub fn expand_preparsed_format_args( - ecx: &mut ExtCtxt<'_>, - sp: Span, - efmt: P<ast::Expr>, - args: Vec<P<ast::Expr>>, - names: FxHashMap<Symbol, usize>, - append_newline: bool, -) -> P<ast::Expr> { - // NOTE: this verbose way of initializing `Vec<Vec<ArgumentType>>` is because - // `ArgumentType` does not derive `Clone`. - let arg_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect(); - let arg_unique_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect(); - - let mut macsp = ecx.call_site(); - macsp = ecx.with_def_site_ctxt(macsp); - - let msg = "format argument must be a string literal"; - let fmt_sp = efmt.span; - let (fmt_str, fmt_style, fmt_span) = match expr_to_spanned_string(ecx, efmt, msg) { - Ok(mut fmt) if append_newline => { - fmt.0 = Symbol::intern(&format!("{}\n", fmt.0)); - fmt - } - Ok(fmt) => fmt, - Err(err) => { - if let Some(mut err) = err { - let sugg_fmt = match args.len() { - 0 => "{}".to_string(), - _ => format!("{}{{}}", "{} ".repeat(args.len())), - }; - err.span_suggestion( - fmt_sp.shrink_to_lo(), - "you might be missing a string literal to format with", - format!("\"{}\", ", sugg_fmt), - Applicability::MaybeIncorrect, - ); - err.emit(); - } - return DummyResult::raw_expr(sp, true); - } - }; - - let (is_literal, fmt_snippet) = match ecx.source_map().span_to_snippet(fmt_sp) { - Ok(s) => (s.starts_with("\"") || s.starts_with("r#"), Some(s)), - _ => (false, None), - }; - - let str_style = match fmt_style { - ast::StrStyle::Cooked => None, - ast::StrStyle::Raw(raw) => Some(raw as usize), - }; - - /// Finds the indices of all characters that have been processed and differ between the actual - /// written code (code snippet) and the `InternedString` that get's processed in the `Parser` - /// in order to properly synthethise the intra-string `Span`s for error diagnostics. - fn find_skips(snippet: &str, is_raw: bool) -> Vec<usize> { - let mut eat_ws = false; - let mut s = snippet.chars().enumerate().peekable(); - let mut skips = vec![]; - while let Some((pos, c)) = s.next() { - match (c, s.peek()) { - // skip whitespace and empty lines ending in '\\' - ('\\', Some((next_pos, '\n'))) if !is_raw => { - eat_ws = true; - skips.push(pos); - skips.push(*next_pos); - let _ = s.next(); - } - ('\\', Some((next_pos, '\n'))) - | ('\\', Some((next_pos, 'n'))) - | ('\\', Some((next_pos, 't'))) - if eat_ws => - { - skips.push(pos); - skips.push(*next_pos); - let _ = s.next(); - } - (' ', _) | ('\n', _) | ('\t', _) if eat_ws => { - skips.push(pos); - } - ('\\', Some((next_pos, 'n'))) - | ('\\', Some((next_pos, 't'))) - | ('\\', Some((next_pos, '0'))) - | ('\\', Some((next_pos, '\\'))) - | ('\\', Some((next_pos, '\''))) - | ('\\', Some((next_pos, '\"'))) => { - skips.push(*next_pos); - let _ = s.next(); - } - ('\\', Some((_, 'x'))) if !is_raw => { - for _ in 0..3 { - // consume `\xAB` literal - if let Some((pos, _)) = s.next() { - skips.push(pos); - } else { - break; - } - } - } - ('\\', Some((_, 'u'))) if !is_raw => { - if let Some((pos, _)) = s.next() { - skips.push(pos); - } - if let Some((next_pos, next_c)) = s.next() { - if next_c == '{' { - skips.push(next_pos); - let mut i = 0; // consume up to 6 hexanumeric chars + closing `}` - while let (Some((next_pos, c)), true) = (s.next(), i < 7) { - if c.is_digit(16) { - skips.push(next_pos); - } else if c == '}' { - skips.push(next_pos); - break; - } else { - break; - } - i += 1; - } - } else if next_c.is_digit(16) { - skips.push(next_pos); - // We suggest adding `{` and `}` when appropriate, accept it here as if - // it were correct - let mut i = 0; // consume up to 6 hexanumeric chars - while let (Some((next_pos, c)), _) = (s.next(), i < 6) { - if c.is_digit(16) { - skips.push(next_pos); - } else { - break; - } - i += 1; - } - } - } - } - _ if eat_ws => { - // `take_while(|c| c.is_whitespace())` - eat_ws = false; - } - _ => {} - } - } - skips - } - - let skips = if let (true, Some(ref snippet)) = (is_literal, fmt_snippet.as_ref()) { - let r_start = str_style.map(|r| r + 1).unwrap_or(0); - let r_end = str_style.map(|r| r).unwrap_or(0); - let s = &snippet[r_start + 1..snippet.len() - r_end - 1]; - find_skips(s, str_style.is_some()) - } else { - vec![] - }; - - let fmt_str = &fmt_str.as_str(); // for the suggestions below - let mut parser = parse::Parser::new(fmt_str, str_style, skips, append_newline); - - let mut unverified_pieces = Vec::new(); - while let Some(piece) = parser.next() { - if !parser.errors.is_empty() { - break; - } else { - unverified_pieces.push(piece); - } - } - - if !parser.errors.is_empty() { - let err = parser.errors.remove(0); - let sp = fmt_span.from_inner(err.span); - let mut e = ecx.struct_span_err(sp, &format!("invalid format string: {}", err.description)); - e.span_label(sp, err.label + " in format string"); - if let Some(note) = err.note { - e.note(¬e); - } - if let Some((label, span)) = err.secondary_label { - let sp = fmt_span.from_inner(span); - e.span_label(sp, label); - } - e.emit(); - return DummyResult::raw_expr(sp, true); - } - - let arg_spans = parser.arg_places.iter().map(|span| fmt_span.from_inner(*span)).collect(); - - let named_pos: FxHashSet<usize> = names.values().cloned().collect(); - - let mut cx = Context { - ecx, - args, - arg_types, - arg_unique_types, - names, - curarg: 0, - curpiece: 0, - arg_index_map: Vec::new(), - count_args: Vec::new(), - count_positions: FxHashMap::default(), - count_positions_count: 0, - count_args_index_offset: 0, - literal: String::new(), - pieces: Vec::with_capacity(unverified_pieces.len()), - str_pieces: Vec::with_capacity(unverified_pieces.len()), - all_pieces_simple: true, - macsp, - fmtsp: fmt_span, - invalid_refs: Vec::new(), - arg_spans, - arg_with_formatting: Vec::new(), - is_literal, - }; - - // This needs to happen *after* the Parser has consumed all pieces to create all the spans - let pieces = unverified_pieces - .into_iter() - .map(|mut piece| { - cx.verify_piece(&piece); - cx.resolve_name_inplace(&mut piece); - piece - }) - .collect::<Vec<_>>(); - - let numbered_position_args = pieces.iter().any(|arg: &parse::Piece<'_>| match *arg { - parse::String(_) => false, - parse::NextArgument(arg) => match arg.position { - parse::Position::ArgumentIs(_) => true, - _ => false, - }, - }); - - cx.build_index_map(); - - let mut arg_index_consumed = vec![0usize; cx.arg_index_map.len()]; - - for piece in pieces { - if let Some(piece) = cx.build_piece(&piece, &mut arg_index_consumed) { - let s = cx.build_literal_string(); - cx.str_pieces.push(s); - cx.pieces.push(piece); - } - } - - if !cx.literal.is_empty() { - let s = cx.build_literal_string(); - cx.str_pieces.push(s); - } - - if cx.invalid_refs.len() >= 1 { - cx.report_invalid_references(numbered_position_args); - } - - // Make sure that all arguments were used and all arguments have types. - let errs = cx - .arg_types - .iter() - .enumerate() - .filter(|(i, ty)| ty.is_empty() && !cx.count_positions.contains_key(&i)) - .map(|(i, _)| { - let msg = if named_pos.contains(&i) { - // named argument - "named argument never used" - } else { - // positional argument - "argument never used" - }; - (cx.args[i].span, msg) - }) - .collect::<Vec<_>>(); - - let errs_len = errs.len(); - if !errs.is_empty() { - let args_used = cx.arg_types.len() - errs_len; - let args_unused = errs_len; - - let mut diag = { - if errs_len == 1 { - let (sp, msg) = errs.into_iter().next().unwrap(); - let mut diag = cx.ecx.struct_span_err(sp, msg); - diag.span_label(sp, msg); - diag - } else { - let mut diag = cx.ecx.struct_span_err( - errs.iter().map(|&(sp, _)| sp).collect::<Vec<Span>>(), - "multiple unused formatting arguments", - ); - diag.span_label(cx.fmtsp, "multiple missing formatting specifiers"); - for (sp, msg) in errs { - diag.span_label(sp, msg); - } - diag - } - }; - - // Used to ensure we only report translations for *one* kind of foreign format. - let mut found_foreign = false; - // Decide if we want to look for foreign formatting directives. - if args_used < args_unused { - use super::format_foreign as foreign; - - // The set of foreign substitutions we've explained. This prevents spamming the user - // with `%d should be written as {}` over and over again. - let mut explained = FxHashSet::default(); - - macro_rules! check_foreign { - ($kind:ident) => {{ - let mut show_doc_note = false; - - let mut suggestions = vec![]; - // account for `"` and account for raw strings `r#` - let padding = str_style.map(|i| i + 2).unwrap_or(1); - for sub in foreign::$kind::iter_subs(fmt_str, padding) { - let trn = match sub.translate() { - Some(trn) => trn, - - // If it has no translation, don't call it out specifically. - None => continue, - }; - - let pos = sub.position(); - let sub = String::from(sub.as_str()); - if explained.contains(&sub) { - continue; - } - explained.insert(sub.clone()); - - if !found_foreign { - found_foreign = true; - show_doc_note = true; - } - - if let Some(inner_sp) = pos { - let sp = fmt_sp.from_inner(inner_sp); - suggestions.push((sp, trn)); - } else { - diag.help(&format!("`{}` should be written as `{}`", sub, trn)); - } - } - - if show_doc_note { - diag.note(concat!( - stringify!($kind), - " formatting not supported; see the documentation for `std::fmt`", - )); - } - if suggestions.len() > 0 { - diag.multipart_suggestion( - "format specifiers use curly braces", - suggestions, - Applicability::MachineApplicable, - ); - } - }}; - } - - check_foreign!(printf); - if !found_foreign { - check_foreign!(shell); - } - } - if !found_foreign && errs_len == 1 { - diag.span_label(cx.fmtsp, "formatting specifier missing"); - } - - diag.emit(); - } - - cx.into_expr() -} diff --git a/src/libsyntax_ext/format_foreign.rs b/src/libsyntax_ext/format_foreign.rs deleted file mode 100644 index 9c151cf94b4..00000000000 --- a/src/libsyntax_ext/format_foreign.rs +++ /dev/null @@ -1,827 +0,0 @@ -pub mod printf { - use super::strcursor::StrCursor as Cur; - use syntax_pos::InnerSpan; - - /// Represents a single `printf`-style substitution. - #[derive(Clone, PartialEq, Debug)] - pub enum Substitution<'a> { - /// A formatted output substitution with its internal byte offset. - Format(Format<'a>), - /// A literal `%%` escape. - Escape, - } - - impl<'a> Substitution<'a> { - pub fn as_str(&self) -> &str { - match *self { - Substitution::Format(ref fmt) => fmt.span, - Substitution::Escape => "%%", - } - } - - pub fn position(&self) -> Option<InnerSpan> { - match *self { - Substitution::Format(ref fmt) => Some(fmt.position), - _ => None, - } - } - - pub fn set_position(&mut self, start: usize, end: usize) { - match self { - Substitution::Format(ref mut fmt) => { - fmt.position = InnerSpan::new(start, end); - } - _ => {} - } - } - - /// Translate this substitution into an equivalent Rust formatting directive. - /// - /// This ignores cases where the substitution does not have an exact equivalent, or where - /// the substitution would be unnecessary. - pub fn translate(&self) -> Option<String> { - match *self { - Substitution::Format(ref fmt) => fmt.translate(), - Substitution::Escape => None, - } - } - } - - #[derive(Clone, PartialEq, Debug)] - /// A single `printf`-style formatting directive. - pub struct Format<'a> { - /// The entire original formatting directive. - pub span: &'a str, - /// The (1-based) parameter to be converted. - pub parameter: Option<u16>, - /// Formatting flags. - pub flags: &'a str, - /// Minimum width of the output. - pub width: Option<Num>, - /// Precision of the conversion. - pub precision: Option<Num>, - /// Length modifier for the conversion. - pub length: Option<&'a str>, - /// Type of parameter being converted. - pub type_: &'a str, - /// Byte offset for the start and end of this formatting directive. - pub position: InnerSpan, - } - - impl Format<'_> { - /// Translate this directive into an equivalent Rust formatting directive. - /// - /// Returns `None` in cases where the `printf` directive does not have an exact Rust - /// equivalent, rather than guessing. - pub fn translate(&self) -> Option<String> { - use std::fmt::Write; - - let (c_alt, c_zero, c_left, c_plus) = { - let mut c_alt = false; - let mut c_zero = false; - let mut c_left = false; - let mut c_plus = false; - for c in self.flags.chars() { - match c { - '#' => c_alt = true, - '0' => c_zero = true, - '-' => c_left = true, - '+' => c_plus = true, - _ => return None, - } - } - (c_alt, c_zero, c_left, c_plus) - }; - - // Has a special form in Rust for numbers. - let fill = c_zero.then_some("0"); - - let align = c_left.then_some("<"); - - // Rust doesn't have an equivalent to the `' '` flag. - let sign = c_plus.then_some("+"); - - // Not *quite* the same, depending on the type... - let alt = c_alt; - - let width = match self.width { - Some(Num::Next) => { - // NOTE: Rust doesn't support this. - return None; - } - w @ Some(Num::Arg(_)) => w, - w @ Some(Num::Num(_)) => w, - None => None, - }; - - let precision = self.precision; - - // NOTE: although length *can* have an effect, we can't duplicate the effect in Rust, so - // we just ignore it. - - let (type_, use_zero_fill, is_int) = match self.type_ { - "d" | "i" | "u" => (None, true, true), - "f" | "F" => (None, false, false), - "s" | "c" => (None, false, false), - "e" | "E" => (Some(self.type_), true, false), - "x" | "X" | "o" => (Some(self.type_), true, true), - "p" => (Some(self.type_), false, true), - "g" => (Some("e"), true, false), - "G" => (Some("E"), true, false), - _ => return None, - }; - - let (fill, width, precision) = match (is_int, width, precision) { - (true, Some(_), Some(_)) => { - // Rust can't duplicate this insanity. - return None; - } - (true, None, Some(p)) => (Some("0"), Some(p), None), - (true, w, None) => (fill, w, None), - (false, w, p) => (fill, w, p), - }; - - let align = match (self.type_, width.is_some(), align.is_some()) { - ("s", true, false) => Some(">"), - _ => align, - }; - - let (fill, zero_fill) = match (fill, use_zero_fill) { - (Some("0"), true) => (None, true), - (fill, _) => (fill, false), - }; - - let alt = match type_ { - Some("x") | Some("X") => alt, - _ => false, - }; - - let has_options = fill.is_some() - || align.is_some() - || sign.is_some() - || alt - || zero_fill - || width.is_some() - || precision.is_some() - || type_.is_some(); - - // Initialise with a rough guess. - let cap = self.span.len() + if has_options { 2 } else { 0 }; - let mut s = String::with_capacity(cap); - - s.push_str("{"); - - if let Some(arg) = self.parameter { - write!(s, "{}", arg.checked_sub(1)?).ok()?; - } - - if has_options { - s.push_str(":"); - - let align = if let Some(fill) = fill { - s.push_str(fill); - align.or(Some(">")) - } else { - align - }; - - if let Some(align) = align { - s.push_str(align); - } - - if let Some(sign) = sign { - s.push_str(sign); - } - - if alt { - s.push_str("#"); - } - - if zero_fill { - s.push_str("0"); - } - - if let Some(width) = width { - width.translate(&mut s).ok()?; - } - - if let Some(precision) = precision { - s.push_str("."); - precision.translate(&mut s).ok()?; - } - - if let Some(type_) = type_ { - s.push_str(type_); - } - } - - s.push_str("}"); - Some(s) - } - } - - /// A general number used in a `printf` formatting directive. - #[derive(Copy, Clone, PartialEq, Debug)] - pub enum Num { - // The range of these values is technically bounded by `NL_ARGMAX`... but, at least for GNU - // libc, it apparently has no real fixed limit. A `u16` is used here on the basis that it - // is *vanishingly* unlikely that *anyone* is going to try formatting something wider, or - // with more precision, than 32 thousand positions which is so wide it couldn't possibly fit - // on a screen. - /// A specific, fixed value. - Num(u16), - /// The value is derived from a positional argument. - Arg(u16), - /// The value is derived from the "next" unconverted argument. - Next, - } - - impl Num { - fn from_str(s: &str, arg: Option<&str>) -> Self { - if let Some(arg) = arg { - Num::Arg(arg.parse().unwrap_or_else(|_| panic!("invalid format arg `{:?}`", arg))) - } else if s == "*" { - Num::Next - } else { - Num::Num(s.parse().unwrap_or_else(|_| panic!("invalid format num `{:?}`", s))) - } - } - - fn translate(&self, s: &mut String) -> std::fmt::Result { - use std::fmt::Write; - match *self { - Num::Num(n) => write!(s, "{}", n), - Num::Arg(n) => { - let n = n.checked_sub(1).ok_or(std::fmt::Error)?; - write!(s, "{}$", n) - } - Num::Next => write!(s, "*"), - } - } - } - - /// Returns an iterator over all substitutions in a given string. - pub fn iter_subs(s: &str, start_pos: usize) -> Substitutions<'_> { - Substitutions { s, pos: start_pos } - } - - /// Iterator over substitutions in a string. - pub struct Substitutions<'a> { - s: &'a str, - pos: usize, - } - - impl<'a> Iterator for Substitutions<'a> { - type Item = Substitution<'a>; - fn next(&mut self) -> Option<Self::Item> { - let (mut sub, tail) = parse_next_substitution(self.s)?; - self.s = tail; - match sub { - Substitution::Format(_) => { - if let Some(inner_span) = sub.position() { - sub.set_position(inner_span.start + self.pos, inner_span.end + self.pos); - self.pos += inner_span.end; - } - } - Substitution::Escape => self.pos += 2, - } - Some(sub) - } - - fn size_hint(&self) -> (usize, Option<usize>) { - // Substitutions are at least 2 characters long. - (0, Some(self.s.len() / 2)) - } - } - - enum State { - Start, - Flags, - Width, - WidthArg, - Prec, - PrecInner, - Length, - Type, - } - - /// Parse the next substitution from the input string. - pub fn parse_next_substitution(s: &str) -> Option<(Substitution<'_>, &str)> { - use self::State::*; - - let at = { - let start = s.find('%')?; - match s[start + 1..].chars().next()? { - '%' => return Some((Substitution::Escape, &s[start + 2..])), - _ => { /* fall-through */ } - } - - Cur::new_at(&s[..], start) - }; - - // This is meant to be a translation of the following regex: - // - // ```regex - // (?x) - // ^ % - // (?: (?P<parameter> \d+) \$ )? - // (?P<flags> [-+ 0\#']* ) - // (?P<width> \d+ | \* (?: (?P<widtha> \d+) \$ )? )? - // (?: \. (?P<precision> \d+ | \* (?: (?P<precisiona> \d+) \$ )? ) )? - // (?P<length> - // # Standard - // hh | h | ll | l | L | z | j | t - // - // # Other - // | I32 | I64 | I | q - // )? - // (?P<type> . ) - // ``` - - // Used to establish the full span at the end. - let start = at; - // The current position within the string. - let mut at = at.at_next_cp()?; - // `c` is the next codepoint, `next` is a cursor after it. - let (mut c, mut next) = at.next_cp()?; - - // Update `at`, `c`, and `next`, exiting if we're out of input. - macro_rules! move_to { - ($cur:expr) => {{ - at = $cur; - let (c_, next_) = at.next_cp()?; - c = c_; - next = next_; - }}; - } - - // Constructs a result when parsing fails. - // - // Note: `move` used to capture copies of the cursors as they are *now*. - let fallback = move || { - return Some(( - Substitution::Format(Format { - span: start.slice_between(next).unwrap(), - parameter: None, - flags: "", - width: None, - precision: None, - length: None, - type_: at.slice_between(next).unwrap(), - position: InnerSpan::new(start.at, next.at), - }), - next.slice_after(), - )); - }; - - // Next parsing state. - let mut state = Start; - - // Sadly, Rust isn't *quite* smart enough to know these *must* be initialised by the end. - let mut parameter: Option<u16> = None; - let mut flags: &str = ""; - let mut width: Option<Num> = None; - let mut precision: Option<Num> = None; - let mut length: Option<&str> = None; - let mut type_: &str = ""; - let end: Cur<'_>; - - if let Start = state { - match c { - '1'..='9' => { - let end = at_next_cp_while(next, is_digit); - match end.next_cp() { - // Yes, this *is* the parameter. - Some(('$', end2)) => { - state = Flags; - parameter = Some(at.slice_between(end).unwrap().parse().unwrap()); - move_to!(end2); - } - // Wait, no, actually, it's the width. - Some(_) => { - state = Prec; - parameter = None; - flags = ""; - width = Some(Num::from_str(at.slice_between(end).unwrap(), None)); - move_to!(end); - } - // It's invalid, is what it is. - None => return fallback(), - } - } - _ => { - state = Flags; - parameter = None; - move_to!(at); - } - } - } - - if let Flags = state { - let end = at_next_cp_while(at, is_flag); - state = Width; - flags = at.slice_between(end).unwrap(); - move_to!(end); - } - - if let Width = state { - match c { - '*' => { - state = WidthArg; - move_to!(next); - } - '1'..='9' => { - let end = at_next_cp_while(next, is_digit); - state = Prec; - width = Some(Num::from_str(at.slice_between(end).unwrap(), None)); - move_to!(end); - } - _ => { - state = Prec; - width = None; - move_to!(at); - } - } - } - - if let WidthArg = state { - let end = at_next_cp_while(at, is_digit); - match end.next_cp() { - Some(('$', end2)) => { - state = Prec; - width = Some(Num::from_str("", Some(at.slice_between(end).unwrap()))); - move_to!(end2); - } - _ => { - state = Prec; - width = Some(Num::Next); - move_to!(end); - } - } - } - - if let Prec = state { - match c { - '.' => { - state = PrecInner; - move_to!(next); - } - _ => { - state = Length; - precision = None; - move_to!(at); - } - } - } - - if let PrecInner = state { - match c { - '*' => { - let end = at_next_cp_while(next, is_digit); - match end.next_cp() { - Some(('$', end2)) => { - state = Length; - precision = Some(Num::from_str("*", next.slice_between(end))); - move_to!(end2); - } - _ => { - state = Length; - precision = Some(Num::Next); - move_to!(end); - } - } - } - '0'..='9' => { - let end = at_next_cp_while(next, is_digit); - state = Length; - precision = Some(Num::from_str(at.slice_between(end).unwrap(), None)); - move_to!(end); - } - _ => return fallback(), - } - } - - if let Length = state { - let c1_next1 = next.next_cp(); - match (c, c1_next1) { - ('h', Some(('h', next1))) | ('l', Some(('l', next1))) => { - state = Type; - length = Some(at.slice_between(next1).unwrap()); - move_to!(next1); - } - - ('h', _) | ('l', _) | ('L', _) | ('z', _) | ('j', _) | ('t', _) | ('q', _) => { - state = Type; - length = Some(at.slice_between(next).unwrap()); - move_to!(next); - } - - ('I', _) => { - let end = next - .at_next_cp() - .and_then(|end| end.at_next_cp()) - .map(|end| (next.slice_between(end).unwrap(), end)); - let end = match end { - Some(("32", end)) => end, - Some(("64", end)) => end, - _ => next, - }; - state = Type; - length = Some(at.slice_between(end).unwrap()); - move_to!(end); - } - - _ => { - state = Type; - length = None; - move_to!(at); - } - } - } - - if let Type = state { - drop(c); - type_ = at.slice_between(next).unwrap(); - - // Don't use `move_to!` here, as we *can* be at the end of the input. - at = next; - } - - drop(c); - drop(next); - - end = at; - let position = InnerSpan::new(start.at, end.at); - - let f = Format { - span: start.slice_between(end).unwrap(), - parameter, - flags, - width, - precision, - length, - type_, - position, - }; - Some((Substitution::Format(f), end.slice_after())) - } - - fn at_next_cp_while<F>(mut cur: Cur<'_>, mut pred: F) -> Cur<'_> - where - F: FnMut(char) -> bool, - { - loop { - match cur.next_cp() { - Some((c, next)) => { - if pred(c) { - cur = next; - } else { - return cur; - } - } - None => return cur, - } - } - } - - fn is_digit(c: char) -> bool { - match c { - '0'..='9' => true, - _ => false, - } - } - - fn is_flag(c: char) -> bool { - match c { - '0' | '-' | '+' | ' ' | '#' | '\'' => true, - _ => false, - } - } - - #[cfg(test)] - mod tests; -} - -pub mod shell { - use super::strcursor::StrCursor as Cur; - use syntax_pos::InnerSpan; - - #[derive(Clone, PartialEq, Debug)] - pub enum Substitution<'a> { - Ordinal(u8, (usize, usize)), - Name(&'a str, (usize, usize)), - Escape((usize, usize)), - } - - impl Substitution<'_> { - pub fn as_str(&self) -> String { - match self { - Substitution::Ordinal(n, _) => format!("${}", n), - Substitution::Name(n, _) => format!("${}", n), - Substitution::Escape(_) => "$$".into(), - } - } - - pub fn position(&self) -> Option<InnerSpan> { - match self { - Substitution::Ordinal(_, pos) - | Substitution::Name(_, pos) - | Substitution::Escape(pos) => Some(InnerSpan::new(pos.0, pos.1)), - } - } - - pub fn set_position(&mut self, start: usize, end: usize) { - match self { - Substitution::Ordinal(_, ref mut pos) - | Substitution::Name(_, ref mut pos) - | Substitution::Escape(ref mut pos) => *pos = (start, end), - } - } - - pub fn translate(&self) -> Option<String> { - match *self { - Substitution::Ordinal(n, _) => Some(format!("{{{}}}", n)), - Substitution::Name(n, _) => Some(format!("{{{}}}", n)), - Substitution::Escape(_) => None, - } - } - } - - /// Returns an iterator over all substitutions in a given string. - pub fn iter_subs(s: &str, start_pos: usize) -> Substitutions<'_> { - Substitutions { s, pos: start_pos } - } - - /// Iterator over substitutions in a string. - pub struct Substitutions<'a> { - s: &'a str, - pos: usize, - } - - impl<'a> Iterator for Substitutions<'a> { - type Item = Substitution<'a>; - fn next(&mut self) -> Option<Self::Item> { - match parse_next_substitution(self.s) { - Some((mut sub, tail)) => { - self.s = tail; - if let Some(InnerSpan { start, end }) = sub.position() { - sub.set_position(start + self.pos, end + self.pos); - self.pos += end; - } - Some(sub) - } - None => None, - } - } - - fn size_hint(&self) -> (usize, Option<usize>) { - (0, Some(self.s.len())) - } - } - - /// Parse the next substitution from the input string. - pub fn parse_next_substitution(s: &str) -> Option<(Substitution<'_>, &str)> { - let at = { - let start = s.find('$')?; - match s[start + 1..].chars().next()? { - '$' => return Some((Substitution::Escape((start, start + 2)), &s[start + 2..])), - c @ '0'..='9' => { - let n = (c as u8) - b'0'; - return Some((Substitution::Ordinal(n, (start, start + 2)), &s[start + 2..])); - } - _ => { /* fall-through */ } - } - - Cur::new_at(&s[..], start) - }; - - let at = at.at_next_cp()?; - let (c, inner) = at.next_cp()?; - - if !is_ident_head(c) { - None - } else { - let end = at_next_cp_while(inner, is_ident_tail); - let slice = at.slice_between(end).unwrap(); - let start = at.at - 1; - let end_pos = at.at + slice.len(); - Some((Substitution::Name(slice, (start, end_pos)), end.slice_after())) - } - } - - fn at_next_cp_while<F>(mut cur: Cur<'_>, mut pred: F) -> Cur<'_> - where - F: FnMut(char) -> bool, - { - loop { - match cur.next_cp() { - Some((c, next)) => { - if pred(c) { - cur = next; - } else { - return cur; - } - } - None => return cur, - } - } - } - - fn is_ident_head(c: char) -> bool { - match c { - 'a'..='z' | 'A'..='Z' | '_' => true, - _ => false, - } - } - - fn is_ident_tail(c: char) -> bool { - match c { - '0'..='9' => true, - c => is_ident_head(c), - } - } - - #[cfg(test)] - mod tests; -} - -mod strcursor { - pub struct StrCursor<'a> { - s: &'a str, - pub at: usize, - } - - impl<'a> StrCursor<'a> { - pub fn new_at(s: &'a str, at: usize) -> StrCursor<'a> { - StrCursor { s, at } - } - - pub fn at_next_cp(mut self) -> Option<StrCursor<'a>> { - match self.try_seek_right_cp() { - true => Some(self), - false => None, - } - } - - pub fn next_cp(mut self) -> Option<(char, StrCursor<'a>)> { - let cp = self.cp_after()?; - self.seek_right(cp.len_utf8()); - Some((cp, self)) - } - - fn slice_before(&self) -> &'a str { - &self.s[0..self.at] - } - - pub fn slice_after(&self) -> &'a str { - &self.s[self.at..] - } - - pub fn slice_between(&self, until: StrCursor<'a>) -> Option<&'a str> { - if !str_eq_literal(self.s, until.s) { - None - } else { - use std::cmp::{max, min}; - let beg = min(self.at, until.at); - let end = max(self.at, until.at); - Some(&self.s[beg..end]) - } - } - - fn cp_after(&self) -> Option<char> { - self.slice_after().chars().next() - } - - fn try_seek_right_cp(&mut self) -> bool { - match self.slice_after().chars().next() { - Some(c) => { - self.at += c.len_utf8(); - true - } - None => false, - } - } - - fn seek_right(&mut self, bytes: usize) { - self.at += bytes; - } - } - - impl Copy for StrCursor<'_> {} - - impl<'a> Clone for StrCursor<'a> { - fn clone(&self) -> StrCursor<'a> { - *self - } - } - - impl std::fmt::Debug for StrCursor<'_> { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(fmt, "StrCursor({:?} | {:?})", self.slice_before(), self.slice_after()) - } - } - - fn str_eq_literal(a: &str, b: &str) -> bool { - a.as_bytes().as_ptr() == b.as_bytes().as_ptr() && a.len() == b.len() - } -} diff --git a/src/libsyntax_ext/format_foreign/printf/tests.rs b/src/libsyntax_ext/format_foreign/printf/tests.rs deleted file mode 100644 index b9a85a84d6c..00000000000 --- a/src/libsyntax_ext/format_foreign/printf/tests.rs +++ /dev/null @@ -1,145 +0,0 @@ -use super::{iter_subs, parse_next_substitution as pns, Format as F, Num as N, Substitution as S}; - -macro_rules! assert_eq_pnsat { - ($lhs:expr, $rhs:expr) => { - assert_eq!( - pns($lhs).and_then(|(s, _)| s.translate()), - $rhs.map(<String as From<&str>>::from) - ) - }; -} - -#[test] -fn test_escape() { - assert_eq!(pns("has no escapes"), None); - assert_eq!(pns("has no escapes, either %"), None); - assert_eq!(pns("*so* has a %% escape"), Some((S::Escape, " escape"))); - assert_eq!(pns("%% leading escape"), Some((S::Escape, " leading escape"))); - assert_eq!(pns("trailing escape %%"), Some((S::Escape, ""))); -} - -#[test] -fn test_parse() { - macro_rules! assert_pns_eq_sub { - ($in_:expr, { - $param:expr, $flags:expr, - $width:expr, $prec:expr, $len:expr, $type_:expr, - $pos:expr, - }) => { - assert_eq!( - pns(concat!($in_, "!")), - Some(( - S::Format(F { - span: $in_, - parameter: $param, - flags: $flags, - width: $width, - precision: $prec, - length: $len, - type_: $type_, - position: syntax_pos::InnerSpan::new($pos.0, $pos.1), - }), - "!" - )) - ) - }; - } - - assert_pns_eq_sub!("%!", - { None, "", None, None, None, "!", (0, 2), }); - assert_pns_eq_sub!("%c", - { None, "", None, None, None, "c", (0, 2), }); - assert_pns_eq_sub!("%s", - { None, "", None, None, None, "s", (0, 2), }); - assert_pns_eq_sub!("%06d", - { None, "0", Some(N::Num(6)), None, None, "d", (0, 4), }); - assert_pns_eq_sub!("%4.2f", - { None, "", Some(N::Num(4)), Some(N::Num(2)), None, "f", (0, 5), }); - assert_pns_eq_sub!("%#x", - { None, "#", None, None, None, "x", (0, 3), }); - assert_pns_eq_sub!("%-10s", - { None, "-", Some(N::Num(10)), None, None, "s", (0, 5), }); - assert_pns_eq_sub!("%*s", - { None, "", Some(N::Next), None, None, "s", (0, 3), }); - assert_pns_eq_sub!("%-10.*s", - { None, "-", Some(N::Num(10)), Some(N::Next), None, "s", (0, 7), }); - assert_pns_eq_sub!("%-*.*s", - { None, "-", Some(N::Next), Some(N::Next), None, "s", (0, 6), }); - assert_pns_eq_sub!("%.6i", - { None, "", None, Some(N::Num(6)), None, "i", (0, 4), }); - assert_pns_eq_sub!("%+i", - { None, "+", None, None, None, "i", (0, 3), }); - assert_pns_eq_sub!("%08X", - { None, "0", Some(N::Num(8)), None, None, "X", (0, 4), }); - assert_pns_eq_sub!("%lu", - { None, "", None, None, Some("l"), "u", (0, 3), }); - assert_pns_eq_sub!("%Iu", - { None, "", None, None, Some("I"), "u", (0, 3), }); - assert_pns_eq_sub!("%I32u", - { None, "", None, None, Some("I32"), "u", (0, 5), }); - assert_pns_eq_sub!("%I64u", - { None, "", None, None, Some("I64"), "u", (0, 5), }); - assert_pns_eq_sub!("%'d", - { None, "'", None, None, None, "d", (0, 3), }); - assert_pns_eq_sub!("%10s", - { None, "", Some(N::Num(10)), None, None, "s", (0, 4), }); - assert_pns_eq_sub!("%-10.10s", - { None, "-", Some(N::Num(10)), Some(N::Num(10)), None, "s", (0, 8), }); - assert_pns_eq_sub!("%1$d", - { Some(1), "", None, None, None, "d", (0, 4), }); - assert_pns_eq_sub!("%2$.*3$d", - { Some(2), "", None, Some(N::Arg(3)), None, "d", (0, 8), }); - assert_pns_eq_sub!("%1$*2$.*3$d", - { Some(1), "", Some(N::Arg(2)), Some(N::Arg(3)), None, "d", (0, 11), }); - assert_pns_eq_sub!("%-8ld", - { None, "-", Some(N::Num(8)), None, Some("l"), "d", (0, 5), }); -} - -#[test] -fn test_iter() { - let s = "The %d'th word %% is: `%.*s` %!\n"; - let subs: Vec<_> = iter_subs(s, 0).map(|sub| sub.translate()).collect(); - assert_eq!( - subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(), - vec![Some("{}"), None, Some("{:.*}"), None] - ); -} - -/// Checks that the translations are what we expect. -#[test] -fn test_translation() { - assert_eq_pnsat!("%c", Some("{}")); - assert_eq_pnsat!("%d", Some("{}")); - assert_eq_pnsat!("%u", Some("{}")); - assert_eq_pnsat!("%x", Some("{:x}")); - assert_eq_pnsat!("%X", Some("{:X}")); - assert_eq_pnsat!("%e", Some("{:e}")); - assert_eq_pnsat!("%E", Some("{:E}")); - assert_eq_pnsat!("%f", Some("{}")); - assert_eq_pnsat!("%g", Some("{:e}")); - assert_eq_pnsat!("%G", Some("{:E}")); - assert_eq_pnsat!("%s", Some("{}")); - assert_eq_pnsat!("%p", Some("{:p}")); - - assert_eq_pnsat!("%06d", Some("{:06}")); - assert_eq_pnsat!("%4.2f", Some("{:4.2}")); - assert_eq_pnsat!("%#x", Some("{:#x}")); - assert_eq_pnsat!("%-10s", Some("{:<10}")); - assert_eq_pnsat!("%*s", None); - assert_eq_pnsat!("%-10.*s", Some("{:<10.*}")); - assert_eq_pnsat!("%-*.*s", None); - assert_eq_pnsat!("%.6i", Some("{:06}")); - assert_eq_pnsat!("%+i", Some("{:+}")); - assert_eq_pnsat!("%08X", Some("{:08X}")); - assert_eq_pnsat!("%lu", Some("{}")); - assert_eq_pnsat!("%Iu", Some("{}")); - assert_eq_pnsat!("%I32u", Some("{}")); - assert_eq_pnsat!("%I64u", Some("{}")); - assert_eq_pnsat!("%'d", None); - assert_eq_pnsat!("%10s", Some("{:>10}")); - assert_eq_pnsat!("%-10.10s", Some("{:<10.10}")); - assert_eq_pnsat!("%1$d", Some("{0}")); - assert_eq_pnsat!("%2$.*3$d", Some("{1:02$}")); - assert_eq_pnsat!("%1$*2$.*3$s", Some("{0:>1$.2$}")); - assert_eq_pnsat!("%-8ld", Some("{:<8}")); -} diff --git a/src/libsyntax_ext/format_foreign/shell/tests.rs b/src/libsyntax_ext/format_foreign/shell/tests.rs deleted file mode 100644 index ed8fe81dfcd..00000000000 --- a/src/libsyntax_ext/format_foreign/shell/tests.rs +++ /dev/null @@ -1,56 +0,0 @@ -use super::{parse_next_substitution as pns, Substitution as S}; - -macro_rules! assert_eq_pnsat { - ($lhs:expr, $rhs:expr) => { - assert_eq!( - pns($lhs).and_then(|(f, _)| f.translate()), - $rhs.map(<String as From<&str>>::from) - ) - }; -} - -#[test] -fn test_escape() { - assert_eq!(pns("has no escapes"), None); - assert_eq!(pns("has no escapes, either $"), None); - assert_eq!(pns("*so* has a $$ escape"), Some((S::Escape((11, 13)), " escape"))); - assert_eq!(pns("$$ leading escape"), Some((S::Escape((0, 2)), " leading escape"))); - assert_eq!(pns("trailing escape $$"), Some((S::Escape((16, 18)), ""))); -} - -#[test] -fn test_parse() { - macro_rules! assert_pns_eq_sub { - ($in_:expr, $kind:ident($arg:expr, $pos:expr)) => { - assert_eq!(pns(concat!($in_, "!")), Some((S::$kind($arg.into(), $pos), "!"))) - }; - } - - assert_pns_eq_sub!("$0", Ordinal(0, (0, 2))); - assert_pns_eq_sub!("$1", Ordinal(1, (0, 2))); - assert_pns_eq_sub!("$9", Ordinal(9, (0, 2))); - assert_pns_eq_sub!("$N", Name("N", (0, 2))); - assert_pns_eq_sub!("$NAME", Name("NAME", (0, 5))); -} - -#[test] -fn test_iter() { - use super::iter_subs; - let s = "The $0'th word $$ is: `$WORD` $!\n"; - let subs: Vec<_> = iter_subs(s, 0).map(|sub| sub.translate()).collect(); - assert_eq!( - subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(), - vec![Some("{0}"), None, Some("{WORD}")] - ); -} - -#[test] -fn test_translation() { - assert_eq_pnsat!("$0", Some("{0}")); - assert_eq_pnsat!("$9", Some("{9}")); - assert_eq_pnsat!("$1", Some("{1}")); - assert_eq_pnsat!("$10", Some("{1}")); - assert_eq_pnsat!("$stuff", Some("{stuff}")); - assert_eq_pnsat!("$NAME", Some("{NAME}")); - assert_eq_pnsat!("$PREFIX/bin", Some("{PREFIX}")); -} diff --git a/src/libsyntax_ext/global_allocator.rs b/src/libsyntax_ext/global_allocator.rs deleted file mode 100644 index edfdda4703c..00000000000 --- a/src/libsyntax_ext/global_allocator.rs +++ /dev/null @@ -1,175 +0,0 @@ -use crate::util::check_builtin_macro_attribute; - -use syntax::ast::{self, Attribute, Expr, FnHeader, FnSig, Generics, Ident, Param}; -use syntax::ast::{ItemKind, Mutability, Stmt, Ty, TyKind, Unsafety}; -use syntax::expand::allocator::{AllocatorKind, AllocatorMethod, AllocatorTy, ALLOCATOR_METHODS}; -use syntax::ptr::P; -use syntax::symbol::{kw, sym, Symbol}; -use syntax_expand::base::{Annotatable, ExtCtxt}; -use syntax_pos::Span; - -pub fn expand( - ecx: &mut ExtCtxt<'_>, - _span: Span, - meta_item: &ast::MetaItem, - item: Annotatable, -) -> Vec<Annotatable> { - check_builtin_macro_attribute(ecx, meta_item, sym::global_allocator); - - let not_static = |item: Annotatable| { - ecx.parse_sess.span_diagnostic.span_err(item.span(), "allocators must be statics"); - vec![item] - }; - let item = match item { - Annotatable::Item(item) => match item.kind { - ItemKind::Static(..) => item, - _ => return not_static(Annotatable::Item(item)), - }, - _ => return not_static(item), - }; - - // Generate a bunch of new items using the AllocFnFactory - let span = ecx.with_def_site_ctxt(item.span); - let f = AllocFnFactory { span, kind: AllocatorKind::Global, global: item.ident, cx: ecx }; - - // Generate item statements for the allocator methods. - let stmts = ALLOCATOR_METHODS.iter().map(|method| f.allocator_fn(method)).collect(); - - // Generate anonymous constant serving as container for the allocator methods. - let const_ty = ecx.ty(span, TyKind::Tup(Vec::new())); - let const_body = ecx.expr_block(ecx.block(span, stmts)); - let const_item = ecx.item_const(span, Ident::new(kw::Underscore, span), const_ty, const_body); - - // Return the original item and the new methods. - vec![Annotatable::Item(item), Annotatable::Item(const_item)] -} - -struct AllocFnFactory<'a, 'b> { - span: Span, - kind: AllocatorKind, - global: Ident, - cx: &'b ExtCtxt<'a>, -} - -impl AllocFnFactory<'_, '_> { - fn allocator_fn(&self, method: &AllocatorMethod) -> Stmt { - let mut abi_args = Vec::new(); - let mut i = 0; - let ref mut mk = || { - let name = self.cx.ident_of(&format!("arg{}", i), self.span); - i += 1; - name - }; - let args = method.inputs.iter().map(|ty| self.arg_ty(ty, &mut abi_args, mk)).collect(); - let result = self.call_allocator(method.name, args); - let (output_ty, output_expr) = self.ret_ty(&method.output, result); - let decl = self.cx.fn_decl(abi_args, ast::FunctionRetTy::Ty(output_ty)); - let header = FnHeader { unsafety: Unsafety::Unsafe, ..FnHeader::default() }; - let sig = FnSig { decl, header }; - let kind = ItemKind::Fn(sig, Generics::default(), self.cx.block_expr(output_expr)); - let item = self.cx.item( - self.span, - self.cx.ident_of(&self.kind.fn_name(method.name), self.span), - self.attrs(), - kind, - ); - self.cx.stmt_item(self.span, item) - } - - fn call_allocator(&self, method: &str, mut args: Vec<P<Expr>>) -> P<Expr> { - let method = self.cx.std_path(&[ - Symbol::intern("alloc"), - Symbol::intern("GlobalAlloc"), - Symbol::intern(method), - ]); - let method = self.cx.expr_path(self.cx.path(self.span, method)); - let allocator = self.cx.path_ident(self.span, self.global); - let allocator = self.cx.expr_path(allocator); - let allocator = self.cx.expr_addr_of(self.span, allocator); - args.insert(0, allocator); - - self.cx.expr_call(self.span, method, args) - } - - fn attrs(&self) -> Vec<Attribute> { - let special = sym::rustc_std_internal_symbol; - let special = self.cx.meta_word(self.span, special); - vec![self.cx.attribute(special)] - } - - fn arg_ty( - &self, - ty: &AllocatorTy, - args: &mut Vec<Param>, - ident: &mut dyn FnMut() -> Ident, - ) -> P<Expr> { - match *ty { - AllocatorTy::Layout => { - let usize = self.cx.path_ident(self.span, Ident::new(sym::usize, self.span)); - let ty_usize = self.cx.ty_path(usize); - let size = ident(); - let align = ident(); - args.push(self.cx.param(self.span, size, ty_usize.clone())); - args.push(self.cx.param(self.span, align, ty_usize)); - - let layout_new = self.cx.std_path(&[ - Symbol::intern("alloc"), - Symbol::intern("Layout"), - Symbol::intern("from_size_align_unchecked"), - ]); - let layout_new = self.cx.expr_path(self.cx.path(self.span, layout_new)); - let size = self.cx.expr_ident(self.span, size); - let align = self.cx.expr_ident(self.span, align); - let layout = self.cx.expr_call(self.span, layout_new, vec![size, align]); - layout - } - - AllocatorTy::Ptr => { - let ident = ident(); - args.push(self.cx.param(self.span, ident, self.ptr_u8())); - let arg = self.cx.expr_ident(self.span, ident); - self.cx.expr_cast(self.span, arg, self.ptr_u8()) - } - - AllocatorTy::Usize => { - let ident = ident(); - args.push(self.cx.param(self.span, ident, self.usize())); - self.cx.expr_ident(self.span, ident) - } - - AllocatorTy::ResultPtr | AllocatorTy::Unit => { - panic!("can't convert AllocatorTy to an argument") - } - } - } - - fn ret_ty(&self, ty: &AllocatorTy, expr: P<Expr>) -> (P<Ty>, P<Expr>) { - match *ty { - AllocatorTy::ResultPtr => { - // We're creating: - // - // #expr as *mut u8 - - let expr = self.cx.expr_cast(self.span, expr, self.ptr_u8()); - (self.ptr_u8(), expr) - } - - AllocatorTy::Unit => (self.cx.ty(self.span, TyKind::Tup(Vec::new())), expr), - - AllocatorTy::Layout | AllocatorTy::Usize | AllocatorTy::Ptr => { - panic!("can't convert `AllocatorTy` to an output") - } - } - } - - fn usize(&self) -> P<Ty> { - let usize = self.cx.path_ident(self.span, Ident::new(sym::usize, self.span)); - self.cx.ty_path(usize) - } - - fn ptr_u8(&self) -> P<Ty> { - let u8 = self.cx.path_ident(self.span, Ident::new(sym::u8, self.span)); - let ty_u8 = self.cx.ty_path(u8); - self.cx.ty_ptr(self.span, ty_u8, Mutability::Mut) - } -} diff --git a/src/libsyntax_ext/global_asm.rs b/src/libsyntax_ext/global_asm.rs deleted file mode 100644 index fc933e4673a..00000000000 --- a/src/libsyntax_ext/global_asm.rs +++ /dev/null @@ -1,64 +0,0 @@ -/// Module-level assembly support. -/// -/// The macro defined here allows you to specify "top-level", -/// "file-scoped", or "module-level" assembly. These synonyms -/// all correspond to LLVM's module-level inline assembly instruction. -/// -/// For example, `global_asm!("some assembly here")` codegens to -/// LLVM's `module asm "some assembly here"`. All of LLVM's caveats -/// therefore apply. -use errors::DiagnosticBuilder; - -use smallvec::smallvec; -use syntax::ast; -use syntax::ptr::P; -use syntax::source_map::respan; -use syntax::token; -use syntax::tokenstream::TokenStream; -use syntax_expand::base::{self, *}; -use syntax_pos::Span; - -pub fn expand_global_asm<'cx>( - cx: &'cx mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'cx> { - match parse_global_asm(cx, sp, tts) { - Ok(Some(global_asm)) => MacEager::items(smallvec![P(ast::Item { - ident: ast::Ident::invalid(), - attrs: Vec::new(), - id: ast::DUMMY_NODE_ID, - kind: ast::ItemKind::GlobalAsm(P(global_asm)), - vis: respan(sp.shrink_to_lo(), ast::VisibilityKind::Inherited), - span: cx.with_def_site_ctxt(sp), - tokens: None, - })]), - Ok(None) => DummyResult::any(sp), - Err(mut err) => { - err.emit(); - DummyResult::any(sp) - } - } -} - -fn parse_global_asm<'a>( - cx: &mut ExtCtxt<'a>, - sp: Span, - tts: TokenStream, -) -> Result<Option<ast::GlobalAsm>, DiagnosticBuilder<'a>> { - let mut p = cx.new_parser_from_tts(tts); - - if p.token == token::Eof { - let mut err = cx.struct_span_err(sp, "macro requires a string literal as an argument"); - err.span_label(sp, "string literal required"); - return Err(err); - } - - let expr = p.parse_expr()?; - let (asm, _) = match expr_to_string(cx, expr, "inline assembly must be a string literal") { - Some((s, st)) => (s, st), - None => return Ok(None), - }; - - Ok(Some(ast::GlobalAsm { asm })) -} diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs deleted file mode 100644 index 40aafece8c6..00000000000 --- a/src/libsyntax_ext/lib.rs +++ /dev/null @@ -1,109 +0,0 @@ -//! This crate contains implementations of built-in macros and other code generating facilities -//! injecting code into the crate before it is lowered to HIR. - -#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] -#![feature(bool_to_option)] -#![feature(crate_visibility_modifier)] -#![feature(decl_macro)] -#![feature(nll)] -#![feature(proc_macro_internals)] -#![feature(proc_macro_quote)] - -extern crate proc_macro; - -use crate::deriving::*; - -use syntax::ast::Ident; -use syntax::edition::Edition; -use syntax::symbol::sym; -use syntax_expand::base::{MacroExpanderFn, Resolver, SyntaxExtension, SyntaxExtensionKind}; -use syntax_expand::proc_macro::BangProcMacro; - -mod asm; -mod assert; -mod cfg; -mod compile_error; -mod concat; -mod concat_idents; -mod deriving; -mod env; -mod format; -mod format_foreign; -mod global_allocator; -mod global_asm; -mod log_syntax; -mod source_util; -mod test; -mod trace_macros; -mod util; - -pub mod cmdline_attrs; -pub mod proc_macro_harness; -pub mod standard_library_imports; -pub mod test_harness; - -pub fn register_builtin_macros(resolver: &mut dyn Resolver, edition: Edition) { - let mut register = |name, kind| { - resolver.register_builtin_macro( - Ident::with_dummy_span(name), - SyntaxExtension { is_builtin: true, ..SyntaxExtension::default(kind, edition) }, - ) - }; - macro register_bang($($name:ident: $f:expr,)*) { - $(register(sym::$name, SyntaxExtensionKind::LegacyBang(Box::new($f as MacroExpanderFn)));)* - } - macro register_attr($($name:ident: $f:expr,)*) { - $(register(sym::$name, SyntaxExtensionKind::LegacyAttr(Box::new($f)));)* - } - macro register_derive($($name:ident: $f:expr,)*) { - $(register(sym::$name, SyntaxExtensionKind::LegacyDerive(Box::new(BuiltinDerive($f))));)* - } - - register_bang! { - asm: asm::expand_asm, - assert: assert::expand_assert, - cfg: cfg::expand_cfg, - column: source_util::expand_column, - compile_error: compile_error::expand_compile_error, - concat_idents: concat_idents::expand_concat_idents, - concat: concat::expand_concat, - env: env::expand_env, - file: source_util::expand_file, - format_args_nl: format::expand_format_args_nl, - format_args: format::expand_format_args, - global_asm: global_asm::expand_global_asm, - include_bytes: source_util::expand_include_bytes, - include_str: source_util::expand_include_str, - include: source_util::expand_include, - line: source_util::expand_line, - log_syntax: log_syntax::expand_log_syntax, - module_path: source_util::expand_mod, - option_env: env::expand_option_env, - stringify: source_util::expand_stringify, - trace_macros: trace_macros::expand_trace_macros, - } - - register_attr! { - bench: test::expand_bench, - global_allocator: global_allocator::expand, - test: test::expand_test, - test_case: test::expand_test_case, - } - - register_derive! { - Clone: clone::expand_deriving_clone, - Copy: bounds::expand_deriving_copy, - Debug: debug::expand_deriving_debug, - Default: default::expand_deriving_default, - Eq: eq::expand_deriving_eq, - Hash: hash::expand_deriving_hash, - Ord: ord::expand_deriving_ord, - PartialEq: partial_eq::expand_deriving_partial_eq, - PartialOrd: partial_ord::expand_deriving_partial_ord, - RustcDecodable: decodable::expand_deriving_rustc_decodable, - RustcEncodable: encodable::expand_deriving_rustc_encodable, - } - - let client = proc_macro::bridge::client::Client::expand1(proc_macro::quote); - register(sym::quote, SyntaxExtensionKind::Bang(Box::new(BangProcMacro { client }))); -} diff --git a/src/libsyntax_ext/log_syntax.rs b/src/libsyntax_ext/log_syntax.rs deleted file mode 100644 index 111226be877..00000000000 --- a/src/libsyntax_ext/log_syntax.rs +++ /dev/null @@ -1,15 +0,0 @@ -use syntax::print; -use syntax::tokenstream::TokenStream; -use syntax_expand::base; -use syntax_pos; - -pub fn expand_log_syntax<'cx>( - _cx: &'cx mut base::ExtCtxt<'_>, - sp: syntax_pos::Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'cx> { - println!("{}", print::pprust::tts_to_string(tts)); - - // any so that `log_syntax` can be invoked as an expression and item. - base::DummyResult::any_valid(sp) -} diff --git a/src/libsyntax_ext/proc_macro_harness.rs b/src/libsyntax_ext/proc_macro_harness.rs deleted file mode 100644 index b6436cc1646..00000000000 --- a/src/libsyntax_ext/proc_macro_harness.rs +++ /dev/null @@ -1,463 +0,0 @@ -use std::mem; - -use smallvec::smallvec; -use syntax::ast::{self, Ident}; -use syntax::attr; -use syntax::expand::is_proc_macro_attr; -use syntax::print::pprust; -use syntax::ptr::P; -use syntax::sess::ParseSess; -use syntax::symbol::{kw, sym}; -use syntax::visit::{self, Visitor}; -use syntax_expand::base::{ExtCtxt, Resolver}; -use syntax_expand::expand::{AstFragment, ExpansionConfig}; -use syntax_pos::hygiene::AstPass; -use syntax_pos::{Span, DUMMY_SP}; - -struct ProcMacroDerive { - trait_name: ast::Name, - function_name: Ident, - span: Span, - attrs: Vec<ast::Name>, -} - -enum ProcMacroDefType { - Attr, - Bang, -} - -struct ProcMacroDef { - function_name: Ident, - span: Span, - def_type: ProcMacroDefType, -} - -enum ProcMacro { - Derive(ProcMacroDerive), - Def(ProcMacroDef), -} - -struct CollectProcMacros<'a> { - macros: Vec<ProcMacro>, - in_root: bool, - handler: &'a errors::Handler, - is_proc_macro_crate: bool, - is_test_crate: bool, -} - -pub fn inject( - sess: &ParseSess, - resolver: &mut dyn Resolver, - mut krate: ast::Crate, - is_proc_macro_crate: bool, - has_proc_macro_decls: bool, - is_test_crate: bool, - num_crate_types: usize, - handler: &errors::Handler, -) -> ast::Crate { - let ecfg = ExpansionConfig::default("proc_macro".to_string()); - let mut cx = ExtCtxt::new(sess, ecfg, resolver); - - let mut collect = CollectProcMacros { - macros: Vec::new(), - in_root: true, - handler, - is_proc_macro_crate, - is_test_crate, - }; - - if has_proc_macro_decls || is_proc_macro_crate { - visit::walk_crate(&mut collect, &krate); - } - // NOTE: If you change the order of macros in this vec - // for any reason, you must also update 'raw_proc_macro' - // in src/librustc_metadata/decoder.rs - let macros = collect.macros; - - if !is_proc_macro_crate { - return krate; - } - - if num_crate_types > 1 { - handler.err("cannot mix `proc-macro` crate type with others"); - } - - if is_test_crate { - return krate; - } - - krate.module.items.push(mk_decls(&mut cx, ¯os)); - - krate -} - -impl<'a> CollectProcMacros<'a> { - fn check_not_pub_in_root(&self, vis: &ast::Visibility, sp: Span) { - if self.is_proc_macro_crate && self.in_root && vis.node.is_pub() { - self.handler.span_err( - sp, - "`proc-macro` crate types currently cannot export any items other \ - than functions tagged with `#[proc_macro]`, `#[proc_macro_derive]`, \ - or `#[proc_macro_attribute]`", - ); - } - } - - fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) { - // Once we've located the `#[proc_macro_derive]` attribute, verify - // that it's of the form `#[proc_macro_derive(Foo)]` or - // `#[proc_macro_derive(Foo, attributes(A, ..))]` - let list = match attr.meta_item_list() { - Some(list) => list, - None => return, - }; - if list.len() != 1 && list.len() != 2 { - self.handler.span_err(attr.span, "attribute must have either one or two arguments"); - return; - } - let trait_attr = match list[0].meta_item() { - Some(meta_item) => meta_item, - _ => { - self.handler.span_err(list[0].span(), "not a meta item"); - return; - } - }; - let trait_ident = match trait_attr.ident() { - Some(trait_ident) if trait_attr.is_word() => trait_ident, - _ => { - self.handler.span_err(trait_attr.span, "must only be one word"); - return; - } - }; - - if !trait_ident.name.can_be_raw() { - self.handler.span_err( - trait_attr.span, - &format!("`{}` cannot be a name of derive macro", trait_ident), - ); - } - - let attributes_attr = list.get(1); - let proc_attrs: Vec<_> = if let Some(attr) = attributes_attr { - if !attr.check_name(sym::attributes) { - self.handler.span_err(attr.span(), "second argument must be `attributes`") - } - attr.meta_item_list() - .unwrap_or_else(|| { - self.handler - .span_err(attr.span(), "attribute must be of form: `attributes(foo, bar)`"); - &[] - }) - .into_iter() - .filter_map(|attr| { - let attr = match attr.meta_item() { - Some(meta_item) => meta_item, - _ => { - self.handler.span_err(attr.span(), "not a meta item"); - return None; - } - }; - - let ident = match attr.ident() { - Some(ident) if attr.is_word() => ident, - _ => { - self.handler.span_err(attr.span, "must only be one word"); - return None; - } - }; - if !ident.name.can_be_raw() { - self.handler.span_err( - attr.span, - &format!("`{}` cannot be a name of derive helper attribute", ident), - ); - } - - Some(ident.name) - }) - .collect() - } else { - Vec::new() - }; - - if self.in_root && item.vis.node.is_pub() { - self.macros.push(ProcMacro::Derive(ProcMacroDerive { - span: item.span, - trait_name: trait_ident.name, - function_name: item.ident, - attrs: proc_attrs, - })); - } else { - let msg = if !self.in_root { - "functions tagged with `#[proc_macro_derive]` must \ - currently reside in the root of the crate" - } else { - "functions tagged with `#[proc_macro_derive]` must be `pub`" - }; - self.handler.span_err(item.span, msg); - } - } - - fn collect_attr_proc_macro(&mut self, item: &'a ast::Item) { - if self.in_root && item.vis.node.is_pub() { - self.macros.push(ProcMacro::Def(ProcMacroDef { - span: item.span, - function_name: item.ident, - def_type: ProcMacroDefType::Attr, - })); - } else { - let msg = if !self.in_root { - "functions tagged with `#[proc_macro_attribute]` must \ - currently reside in the root of the crate" - } else { - "functions tagged with `#[proc_macro_attribute]` must be `pub`" - }; - self.handler.span_err(item.span, msg); - } - } - - fn collect_bang_proc_macro(&mut self, item: &'a ast::Item) { - if self.in_root && item.vis.node.is_pub() { - self.macros.push(ProcMacro::Def(ProcMacroDef { - span: item.span, - function_name: item.ident, - def_type: ProcMacroDefType::Bang, - })); - } else { - let msg = if !self.in_root { - "functions tagged with `#[proc_macro]` must \ - currently reside in the root of the crate" - } else { - "functions tagged with `#[proc_macro]` must be `pub`" - }; - self.handler.span_err(item.span, msg); - } - } -} - -impl<'a> Visitor<'a> for CollectProcMacros<'a> { - fn visit_item(&mut self, item: &'a ast::Item) { - if let ast::ItemKind::MacroDef(..) = item.kind { - if self.is_proc_macro_crate && attr::contains_name(&item.attrs, sym::macro_export) { - let msg = - "cannot export macro_rules! macros from a `proc-macro` crate type currently"; - self.handler.span_err(item.span, msg); - } - } - - // First up, make sure we're checking a bare function. If we're not then - // we're just not interested in this item. - // - // If we find one, try to locate a `#[proc_macro_derive]` attribute on it. - let is_fn = match item.kind { - ast::ItemKind::Fn(..) => true, - _ => false, - }; - - let mut found_attr: Option<&'a ast::Attribute> = None; - - for attr in &item.attrs { - if is_proc_macro_attr(&attr) { - if let Some(prev_attr) = found_attr { - let prev_item = prev_attr.get_normal_item(); - let item = attr.get_normal_item(); - let path_str = pprust::path_to_string(&item.path); - let msg = if item.path.segments[0].ident.name - == prev_item.path.segments[0].ident.name - { - format!( - "only one `#[{}]` attribute is allowed on any given function", - path_str, - ) - } else { - format!( - "`#[{}]` and `#[{}]` attributes cannot both be applied - to the same function", - path_str, - pprust::path_to_string(&prev_item.path), - ) - }; - - self.handler - .struct_span_err(attr.span, &msg) - .span_label(prev_attr.span, "previous attribute here") - .emit(); - - return; - } - - found_attr = Some(attr); - } - } - - let attr = match found_attr { - None => { - self.check_not_pub_in_root(&item.vis, item.span); - let prev_in_root = mem::replace(&mut self.in_root, false); - visit::walk_item(self, item); - self.in_root = prev_in_root; - return; - } - Some(attr) => attr, - }; - - if !is_fn { - let msg = format!( - "the `#[{}]` attribute may only be used on bare functions", - pprust::path_to_string(&attr.get_normal_item().path), - ); - - self.handler.span_err(attr.span, &msg); - return; - } - - if self.is_test_crate { - return; - } - - if !self.is_proc_macro_crate { - let msg = format!( - "the `#[{}]` attribute is only usable with crates of the `proc-macro` crate type", - pprust::path_to_string(&attr.get_normal_item().path), - ); - - self.handler.span_err(attr.span, &msg); - return; - } - - if attr.check_name(sym::proc_macro_derive) { - self.collect_custom_derive(item, attr); - } else if attr.check_name(sym::proc_macro_attribute) { - self.collect_attr_proc_macro(item); - } else if attr.check_name(sym::proc_macro) { - self.collect_bang_proc_macro(item); - }; - - let prev_in_root = mem::replace(&mut self.in_root, false); - visit::walk_item(self, item); - self.in_root = prev_in_root; - } - - fn visit_mac(&mut self, mac: &'a ast::Mac) { - visit::walk_mac(self, mac) - } -} - -// Creates a new module which looks like: -// -// const _: () = { -// extern crate proc_macro; -// -// use proc_macro::bridge::client::ProcMacro; -// -// #[rustc_proc_macro_decls] -// #[allow(deprecated)] -// static DECLS: &[ProcMacro] = &[ -// ProcMacro::custom_derive($name_trait1, &[], ::$name1); -// ProcMacro::custom_derive($name_trait2, &["attribute_name"], ::$name2); -// // ... -// ]; -// } -fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P<ast::Item> { - let expn_id = cx.resolver.expansion_for_ast_pass( - DUMMY_SP, - AstPass::ProcMacroHarness, - &[sym::rustc_attrs, sym::proc_macro_internals], - None, - ); - let span = DUMMY_SP.with_def_site_ctxt(expn_id); - - let proc_macro = Ident::new(sym::proc_macro, span); - let krate = cx.item(span, proc_macro, Vec::new(), ast::ItemKind::ExternCrate(None)); - - let bridge = cx.ident_of("bridge", span); - let client = cx.ident_of("client", span); - let proc_macro_ty = cx.ident_of("ProcMacro", span); - let custom_derive = cx.ident_of("custom_derive", span); - let attr = cx.ident_of("attr", span); - let bang = cx.ident_of("bang", span); - - let decls = { - let local_path = - |sp: Span, name| cx.expr_path(cx.path(sp.with_ctxt(span.ctxt()), vec![name])); - let proc_macro_ty_method_path = |method| { - cx.expr_path(cx.path(span, vec![proc_macro, bridge, client, proc_macro_ty, method])) - }; - macros - .iter() - .map(|m| match m { - ProcMacro::Derive(cd) => cx.expr_call( - span, - proc_macro_ty_method_path(custom_derive), - vec![ - cx.expr_str(cd.span, cd.trait_name), - cx.expr_vec_slice( - span, - cd.attrs.iter().map(|&s| cx.expr_str(cd.span, s)).collect::<Vec<_>>(), - ), - local_path(cd.span, cd.function_name), - ], - ), - ProcMacro::Def(ca) => { - let ident = match ca.def_type { - ProcMacroDefType::Attr => attr, - ProcMacroDefType::Bang => bang, - }; - - cx.expr_call( - span, - proc_macro_ty_method_path(ident), - vec![ - cx.expr_str(ca.span, ca.function_name.name), - local_path(ca.span, ca.function_name), - ], - ) - } - }) - .collect() - }; - - let decls_static = cx - .item_static( - span, - cx.ident_of("_DECLS", span), - cx.ty_rptr( - span, - cx.ty( - span, - ast::TyKind::Slice( - cx.ty_path(cx.path(span, vec![proc_macro, bridge, client, proc_macro_ty])), - ), - ), - None, - ast::Mutability::Not, - ), - ast::Mutability::Not, - cx.expr_vec_slice(span, decls), - ) - .map(|mut i| { - let attr = cx.meta_word(span, sym::rustc_proc_macro_decls); - i.attrs.push(cx.attribute(attr)); - - let deprecated_attr = attr::mk_nested_word_item(Ident::new(sym::deprecated, span)); - let allow_deprecated_attr = - attr::mk_list_item(Ident::new(sym::allow, span), vec![deprecated_attr]); - i.attrs.push(cx.attribute(allow_deprecated_attr)); - - i - }); - - let block = cx.expr_block( - cx.block(span, vec![cx.stmt_item(span, krate), cx.stmt_item(span, decls_static)]), - ); - - let anon_constant = cx.item_const( - span, - ast::Ident::new(kw::Underscore, span), - cx.ty(span, ast::TyKind::Tup(Vec::new())), - block, - ); - - // Integrate the new item into existing module structures. - let items = AstFragment::Items(smallvec![anon_constant]); - cx.monotonic_expander().fully_expand_fragment(items).make_items().pop().unwrap() -} diff --git a/src/libsyntax_ext/source_util.rs b/src/libsyntax_ext/source_util.rs deleted file mode 100644 index fccc36e2ea8..00000000000 --- a/src/libsyntax_ext/source_util.rs +++ /dev/null @@ -1,216 +0,0 @@ -use rustc_parse::{self, new_sub_parser_from_file, parser::Parser, DirectoryOwnership}; -use syntax::ast; -use syntax::early_buffered_lints::INCOMPLETE_INCLUDE; -use syntax::print::pprust; -use syntax::ptr::P; -use syntax::symbol::Symbol; -use syntax::token; -use syntax::tokenstream::TokenStream; -use syntax_expand::base::{self, *}; -use syntax_expand::panictry; - -use smallvec::SmallVec; -use syntax_pos::{self, Pos, Span}; - -use rustc_data_structures::sync::Lrc; - -// These macros all relate to the file system; they either return -// the column/row/filename of the expression, or they include -// a given file into the current one. - -/// line!(): expands to the current line number -pub fn expand_line( - cx: &mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'static> { - let sp = cx.with_def_site_ctxt(sp); - base::check_zero_tts(cx, sp, tts, "line!"); - - let topmost = cx.expansion_cause().unwrap_or(sp); - let loc = cx.source_map().lookup_char_pos(topmost.lo()); - - base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32)) -} - -/* column!(): expands to the current column number */ -pub fn expand_column( - cx: &mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'static> { - let sp = cx.with_def_site_ctxt(sp); - base::check_zero_tts(cx, sp, tts, "column!"); - - let topmost = cx.expansion_cause().unwrap_or(sp); - let loc = cx.source_map().lookup_char_pos(topmost.lo()); - - base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32 + 1)) -} - -/// file!(): expands to the current filename */ -/// The source_file (`loc.file`) contains a bunch more information we could spit -/// out if we wanted. -pub fn expand_file( - cx: &mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'static> { - let sp = cx.with_def_site_ctxt(sp); - base::check_zero_tts(cx, sp, tts, "file!"); - - let topmost = cx.expansion_cause().unwrap_or(sp); - let loc = cx.source_map().lookup_char_pos(topmost.lo()); - base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string()))) -} - -pub fn expand_stringify( - cx: &mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'static> { - let sp = cx.with_def_site_ctxt(sp); - let s = pprust::tts_to_string(tts); - base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s))) -} - -pub fn expand_mod( - cx: &mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'static> { - let sp = cx.with_def_site_ctxt(sp); - base::check_zero_tts(cx, sp, tts, "module_path!"); - let mod_path = &cx.current_expansion.module.mod_path; - let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::"); - - base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&string))) -} - -/// include! : parse the given file as an expr -/// This is generally a bad idea because it's going to behave -/// unhygienically. -pub fn expand_include<'cx>( - cx: &'cx mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'cx> { - let sp = cx.with_def_site_ctxt(sp); - let file = match get_single_str_from_tts(cx, sp, tts, "include!") { - Some(f) => f, - None => return DummyResult::any(sp), - }; - // The file will be added to the code map by the parser - let file = match cx.resolve_path(file, sp) { - Ok(f) => f, - Err(mut err) => { - err.emit(); - return DummyResult::any(sp); - } - }; - let directory_ownership = DirectoryOwnership::Owned { relative: None }; - let p = new_sub_parser_from_file(cx.parse_sess(), &file, directory_ownership, None, sp); - - struct ExpandResult<'a> { - p: Parser<'a>, - } - impl<'a> base::MacResult for ExpandResult<'a> { - fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> { - let r = panictry!(self.p.parse_expr()); - if self.p.token != token::Eof { - self.p.sess.buffer_lint( - &INCOMPLETE_INCLUDE, - self.p.token.span, - ast::CRATE_NODE_ID, - "include macro expected single expression in source", - ); - } - Some(r) - } - - fn make_items(mut self: Box<ExpandResult<'a>>) -> Option<SmallVec<[P<ast::Item>; 1]>> { - let mut ret = SmallVec::new(); - while self.p.token != token::Eof { - match panictry!(self.p.parse_item()) { - Some(item) => ret.push(item), - None => { - let token = pprust::token_to_string(&self.p.token); - self.p - .sess - .span_diagnostic - .span_fatal( - self.p.token.span, - &format!("expected item, found `{}`", token), - ) - .raise(); - } - } - } - Some(ret) - } - } - - Box::new(ExpandResult { p }) -} - -// include_str! : read the given file, insert it as a literal string expr -pub fn expand_include_str( - cx: &mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'static> { - let sp = cx.with_def_site_ctxt(sp); - let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") { - Some(f) => f, - None => return DummyResult::any(sp), - }; - let file = match cx.resolve_path(file, sp) { - Ok(f) => f, - Err(mut err) => { - err.emit(); - return DummyResult::any(sp); - } - }; - match cx.source_map().load_binary_file(&file) { - Ok(bytes) => match std::str::from_utf8(&bytes) { - Ok(src) => { - let interned_src = Symbol::intern(&src); - base::MacEager::expr(cx.expr_str(sp, interned_src)) - } - Err(_) => { - cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display())); - DummyResult::any(sp) - } - }, - Err(e) => { - cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e)); - DummyResult::any(sp) - } - } -} - -pub fn expand_include_bytes( - cx: &mut ExtCtxt<'_>, - sp: Span, - tts: TokenStream, -) -> Box<dyn base::MacResult + 'static> { - let sp = cx.with_def_site_ctxt(sp); - let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") { - Some(f) => f, - None => return DummyResult::any(sp), - }; - let file = match cx.resolve_path(file, sp) { - Ok(f) => f, - Err(mut err) => { - err.emit(); - return DummyResult::any(sp); - } - }; - match cx.source_map().load_binary_file(&file) { - Ok(bytes) => base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes)))), - Err(e) => { - cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e)); - DummyResult::any(sp) - } - } -} diff --git a/src/libsyntax_ext/standard_library_imports.rs b/src/libsyntax_ext/standard_library_imports.rs deleted file mode 100644 index 50f86a0f3ec..00000000000 --- a/src/libsyntax_ext/standard_library_imports.rs +++ /dev/null @@ -1,85 +0,0 @@ -use syntax::edition::Edition; -use syntax::ptr::P; -use syntax::sess::ParseSess; -use syntax::symbol::{kw, sym, Ident, Symbol}; -use syntax::{ast, attr}; -use syntax_expand::base::{ExtCtxt, Resolver}; -use syntax_expand::expand::ExpansionConfig; -use syntax_pos::hygiene::AstPass; -use syntax_pos::DUMMY_SP; - -pub fn inject( - mut krate: ast::Crate, - resolver: &mut dyn Resolver, - sess: &ParseSess, - alt_std_name: Option<Symbol>, -) -> (ast::Crate, Option<Symbol>) { - let rust_2018 = sess.edition >= Edition::Edition2018; - - // the first name in this list is the crate name of the crate with the prelude - let names: &[Symbol] = if attr::contains_name(&krate.attrs, sym::no_core) { - return (krate, None); - } else if attr::contains_name(&krate.attrs, sym::no_std) { - if attr::contains_name(&krate.attrs, sym::compiler_builtins) { - &[sym::core] - } else { - &[sym::core, sym::compiler_builtins] - } - } else { - &[sym::std] - }; - - let expn_id = resolver.expansion_for_ast_pass( - DUMMY_SP, - AstPass::StdImports, - &[sym::prelude_import], - None, - ); - let span = DUMMY_SP.with_def_site_ctxt(expn_id); - let call_site = DUMMY_SP.with_call_site_ctxt(expn_id); - - let ecfg = ExpansionConfig::default("std_lib_injection".to_string()); - let cx = ExtCtxt::new(sess, ecfg, resolver); - - // .rev() to preserve ordering above in combination with insert(0, ...) - for &name in names.iter().rev() { - let ident = if rust_2018 { Ident::new(name, span) } else { Ident::new(name, call_site) }; - krate.module.items.insert( - 0, - cx.item( - span, - ident, - vec![cx.attribute(cx.meta_word(span, sym::macro_use))], - ast::ItemKind::ExternCrate(alt_std_name), - ), - ); - } - - // The crates have been injected, the assumption is that the first one is - // the one with the prelude. - let name = names[0]; - - let import_path = if rust_2018 { - [name, sym::prelude, sym::v1].iter().map(|symbol| ast::Ident::new(*symbol, span)).collect() - } else { - [kw::PathRoot, name, sym::prelude, sym::v1] - .iter() - .map(|symbol| ast::Ident::new(*symbol, span)) - .collect() - }; - - let use_item = cx.item( - span, - ast::Ident::invalid(), - vec![cx.attribute(cx.meta_word(span, sym::prelude_import))], - ast::ItemKind::Use(P(ast::UseTree { - prefix: cx.path(span, import_path), - kind: ast::UseTreeKind::Glob, - span, - })), - ); - - krate.module.items.insert(0, use_item); - - (krate, Some(name)) -} diff --git a/src/libsyntax_ext/test.rs b/src/libsyntax_ext/test.rs deleted file mode 100644 index edf427edaae..00000000000 --- a/src/libsyntax_ext/test.rs +++ /dev/null @@ -1,439 +0,0 @@ -/// The expansion from a test function to the appropriate test struct for libtest -/// Ideally, this code would be in libtest but for efficiency and error messages it lives here. -use crate::util::check_builtin_macro_attribute; - -use syntax::ast; -use syntax::attr; -use syntax::print::pprust; -use syntax::source_map::respan; -use syntax::symbol::{sym, Symbol}; -use syntax_expand::base::*; -use syntax_pos::Span; - -use std::iter; - -// #[test_case] is used by custom test authors to mark tests -// When building for test, it needs to make the item public and gensym the name -// Otherwise, we'll omit the item. This behavior means that any item annotated -// with #[test_case] is never addressable. -// -// We mark item with an inert attribute "rustc_test_marker" which the test generation -// logic will pick up on. -pub fn expand_test_case( - ecx: &mut ExtCtxt<'_>, - attr_sp: Span, - meta_item: &ast::MetaItem, - anno_item: Annotatable, -) -> Vec<Annotatable> { - check_builtin_macro_attribute(ecx, meta_item, sym::test_case); - - if !ecx.ecfg.should_test { - return vec![]; - } - - let sp = ecx.with_def_site_ctxt(attr_sp); - let mut item = anno_item.expect_item(); - item = item.map(|mut item| { - item.vis = respan(item.vis.span, ast::VisibilityKind::Public); - item.ident.span = item.ident.span.with_ctxt(sp.ctxt()); - item.attrs.push(ecx.attribute(ecx.meta_word(sp, sym::rustc_test_marker))); - item - }); - - return vec![Annotatable::Item(item)]; -} - -pub fn expand_test( - cx: &mut ExtCtxt<'_>, - attr_sp: Span, - meta_item: &ast::MetaItem, - item: Annotatable, -) -> Vec<Annotatable> { - check_builtin_macro_attribute(cx, meta_item, sym::test); - expand_test_or_bench(cx, attr_sp, item, false) -} - -pub fn expand_bench( - cx: &mut ExtCtxt<'_>, - attr_sp: Span, - meta_item: &ast::MetaItem, - item: Annotatable, -) -> Vec<Annotatable> { - check_builtin_macro_attribute(cx, meta_item, sym::bench); - expand_test_or_bench(cx, attr_sp, item, true) -} - -pub fn expand_test_or_bench( - cx: &mut ExtCtxt<'_>, - attr_sp: Span, - item: Annotatable, - is_bench: bool, -) -> Vec<Annotatable> { - // If we're not in test configuration, remove the annotated item - if !cx.ecfg.should_test { - return vec![]; - } - - let item = if let Annotatable::Item(i) = item { - i - } else { - cx.parse_sess - .span_diagnostic - .span_fatal( - item.span(), - "`#[test]` attribute is only allowed on non associated functions", - ) - .raise(); - }; - - if let ast::ItemKind::Mac(_) = item.kind { - cx.parse_sess.span_diagnostic.span_warn( - item.span, - "`#[test]` attribute should not be used on macros. Use `#[cfg(test)]` instead.", - ); - return vec![Annotatable::Item(item)]; - } - - // has_*_signature will report any errors in the type so compilation - // will fail. We shouldn't try to expand in this case because the errors - // would be spurious. - if (!is_bench && !has_test_signature(cx, &item)) - || (is_bench && !has_bench_signature(cx, &item)) - { - return vec![Annotatable::Item(item)]; - } - - let (sp, attr_sp) = (cx.with_def_site_ctxt(item.span), cx.with_def_site_ctxt(attr_sp)); - - let test_id = ast::Ident::new(sym::test, attr_sp); - - // creates test::$name - let test_path = |name| cx.path(sp, vec![test_id, cx.ident_of(name, sp)]); - - // creates test::ShouldPanic::$name - let should_panic_path = - |name| cx.path(sp, vec![test_id, cx.ident_of("ShouldPanic", sp), cx.ident_of(name, sp)]); - - // creates test::TestType::$name - let test_type_path = - |name| cx.path(sp, vec![test_id, cx.ident_of("TestType", sp), cx.ident_of(name, sp)]); - - // creates $name: $expr - let field = |name, expr| cx.field_imm(sp, cx.ident_of(name, sp), expr); - - let test_fn = if is_bench { - // A simple ident for a lambda - let b = cx.ident_of("b", attr_sp); - - cx.expr_call( - sp, - cx.expr_path(test_path("StaticBenchFn")), - vec![ - // |b| self::test::assert_test_result( - cx.lambda1( - sp, - cx.expr_call( - sp, - cx.expr_path(test_path("assert_test_result")), - vec![ - // super::$test_fn(b) - cx.expr_call( - sp, - cx.expr_path(cx.path(sp, vec![item.ident])), - vec![cx.expr_ident(sp, b)], - ), - ], - ), - b, - ), // ) - ], - ) - } else { - cx.expr_call( - sp, - cx.expr_path(test_path("StaticTestFn")), - vec![ - // || { - cx.lambda0( - sp, - // test::assert_test_result( - cx.expr_call( - sp, - cx.expr_path(test_path("assert_test_result")), - vec![ - // $test_fn() - cx.expr_call(sp, cx.expr_path(cx.path(sp, vec![item.ident])), vec![]), // ) - ], - ), // } - ), // ) - ], - ) - }; - - let mut test_const = cx.item( - sp, - ast::Ident::new(item.ident.name, sp), - vec![ - // #[cfg(test)] - cx.attribute(attr::mk_list_item( - ast::Ident::new(sym::cfg, attr_sp), - vec![attr::mk_nested_word_item(ast::Ident::new(sym::test, attr_sp))], - )), - // #[rustc_test_marker] - cx.attribute(cx.meta_word(attr_sp, sym::rustc_test_marker)), - ], - // const $ident: test::TestDescAndFn = - ast::ItemKind::Const( - cx.ty(sp, ast::TyKind::Path(None, test_path("TestDescAndFn"))), - // test::TestDescAndFn { - cx.expr_struct( - sp, - test_path("TestDescAndFn"), - vec![ - // desc: test::TestDesc { - field( - "desc", - cx.expr_struct( - sp, - test_path("TestDesc"), - vec![ - // name: "path::to::test" - field( - "name", - cx.expr_call( - sp, - cx.expr_path(test_path("StaticTestName")), - vec![cx.expr_str( - sp, - Symbol::intern(&item_path( - // skip the name of the root module - &cx.current_expansion.module.mod_path[1..], - &item.ident, - )), - )], - ), - ), - // ignore: true | false - field("ignore", cx.expr_bool(sp, should_ignore(&item))), - // allow_fail: true | false - field("allow_fail", cx.expr_bool(sp, should_fail(&item))), - // should_panic: ... - field( - "should_panic", - match should_panic(cx, &item) { - // test::ShouldPanic::No - ShouldPanic::No => cx.expr_path(should_panic_path("No")), - // test::ShouldPanic::Yes - ShouldPanic::Yes(None) => { - cx.expr_path(should_panic_path("Yes")) - } - // test::ShouldPanic::YesWithMessage("...") - ShouldPanic::Yes(Some(sym)) => cx.expr_call( - sp, - cx.expr_path(should_panic_path("YesWithMessage")), - vec![cx.expr_str(sp, sym)], - ), - }, - ), - // test_type: ... - field( - "test_type", - match test_type(cx) { - // test::TestType::UnitTest - TestType::UnitTest => { - cx.expr_path(test_type_path("UnitTest")) - } - // test::TestType::IntegrationTest - TestType::IntegrationTest => { - cx.expr_path(test_type_path("IntegrationTest")) - } - // test::TestPath::Unknown - TestType::Unknown => { - cx.expr_path(test_type_path("Unknown")) - } - }, - ), - // }, - ], - ), - ), - // testfn: test::StaticTestFn(...) | test::StaticBenchFn(...) - field("testfn", test_fn), // } - ], - ), // } - ), - ); - test_const = test_const.map(|mut tc| { - tc.vis.node = ast::VisibilityKind::Public; - tc - }); - - // extern crate test - let test_extern = cx.item(sp, test_id, vec![], ast::ItemKind::ExternCrate(None)); - - log::debug!("synthetic test item:\n{}\n", pprust::item_to_string(&test_const)); - - vec![ - // Access to libtest under a hygienic name - Annotatable::Item(test_extern), - // The generated test case - Annotatable::Item(test_const), - // The original item - Annotatable::Item(item), - ] -} - -fn item_path(mod_path: &[ast::Ident], item_ident: &ast::Ident) -> String { - mod_path - .iter() - .chain(iter::once(item_ident)) - .map(|x| x.to_string()) - .collect::<Vec<String>>() - .join("::") -} - -enum ShouldPanic { - No, - Yes(Option<Symbol>), -} - -fn should_ignore(i: &ast::Item) -> bool { - attr::contains_name(&i.attrs, sym::ignore) -} - -fn should_fail(i: &ast::Item) -> bool { - attr::contains_name(&i.attrs, sym::allow_fail) -} - -fn should_panic(cx: &ExtCtxt<'_>, i: &ast::Item) -> ShouldPanic { - match attr::find_by_name(&i.attrs, sym::should_panic) { - Some(attr) => { - let ref sd = cx.parse_sess.span_diagnostic; - - match attr.meta_item_list() { - // Handle #[should_panic(expected = "foo")] - Some(list) => { - let msg = list - .iter() - .find(|mi| mi.check_name(sym::expected)) - .and_then(|mi| mi.meta_item()) - .and_then(|mi| mi.value_str()); - if list.len() != 1 || msg.is_none() { - sd.struct_span_warn( - attr.span, - "argument must be of the form: \ - `expected = \"error message\"`", - ) - .note( - "Errors in this attribute were erroneously \ - allowed and will become a hard error in a \ - future release.", - ) - .emit(); - ShouldPanic::Yes(None) - } else { - ShouldPanic::Yes(msg) - } - } - // Handle #[should_panic] and #[should_panic = "expected"] - None => ShouldPanic::Yes(attr.value_str()), - } - } - None => ShouldPanic::No, - } -} - -enum TestType { - UnitTest, - IntegrationTest, - Unknown, -} - -/// Attempts to determine the type of test. -/// Since doctests are created without macro expanding, only possible variants here -/// are `UnitTest`, `IntegrationTest` or `Unknown`. -fn test_type(cx: &ExtCtxt<'_>) -> TestType { - // Root path from context contains the topmost sources directory of the crate. - // I.e., for `project` with sources in `src` and tests in `tests` folders - // (no matter how many nested folders lie inside), - // there will be two different root paths: `/project/src` and `/project/tests`. - let crate_path = cx.root_path.as_path(); - - if crate_path.ends_with("src") { - // `/src` folder contains unit-tests. - TestType::UnitTest - } else if crate_path.ends_with("tests") { - // `/tests` folder contains integration tests. - TestType::IntegrationTest - } else { - // Crate layout doesn't match expected one, test type is unknown. - TestType::Unknown - } -} - -fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool { - let has_should_panic_attr = attr::contains_name(&i.attrs, sym::should_panic); - let ref sd = cx.parse_sess.span_diagnostic; - if let ast::ItemKind::Fn(ref sig, ref generics, _) = i.kind { - if sig.header.unsafety == ast::Unsafety::Unsafe { - sd.span_err(i.span, "unsafe functions cannot be used for tests"); - return false; - } - if sig.header.asyncness.node.is_async() { - sd.span_err(i.span, "async functions cannot be used for tests"); - return false; - } - - // If the termination trait is active, the compiler will check that the output - // type implements the `Termination` trait as `libtest` enforces that. - let has_output = match sig.decl.output { - ast::FunctionRetTy::Default(..) => false, - ast::FunctionRetTy::Ty(ref t) if t.kind.is_unit() => false, - _ => true, - }; - - if !sig.decl.inputs.is_empty() { - sd.span_err(i.span, "functions used as tests can not have any arguments"); - return false; - } - - match (has_output, has_should_panic_attr) { - (true, true) => { - sd.span_err(i.span, "functions using `#[should_panic]` must return `()`"); - false - } - (true, false) => { - if !generics.params.is_empty() { - sd.span_err(i.span, "functions used as tests must have signature fn() -> ()"); - false - } else { - true - } - } - (false, _) => true, - } - } else { - sd.span_err(i.span, "only functions may be used as tests"); - false - } -} - -fn has_bench_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool { - let has_sig = if let ast::ItemKind::Fn(ref sig, _, _) = i.kind { - // N.B., inadequate check, but we're running - // well before resolve, can't get too deep. - sig.decl.inputs.len() == 1 - } else { - false - }; - - if !has_sig { - cx.parse_sess.span_diagnostic.span_err( - i.span, - "functions used as benches must have \ - signature `fn(&mut Bencher) -> impl Termination`", - ); - } - - has_sig -} diff --git a/src/libsyntax_ext/test_harness.rs b/src/libsyntax_ext/test_harness.rs deleted file mode 100644 index b00fc3d26c1..00000000000 --- a/src/libsyntax_ext/test_harness.rs +++ /dev/null @@ -1,366 +0,0 @@ -// Code that generates a test runner to run all the tests in a crate - -use log::debug; -use rustc_feature::Features; -use rustc_target::spec::PanicStrategy; -use smallvec::{smallvec, SmallVec}; -use syntax::ast::{self, Ident}; -use syntax::attr; -use syntax::entry::{self, EntryPointType}; -use syntax::mut_visit::{ExpectOne, *}; -use syntax::ptr::P; -use syntax::sess::ParseSess; -use syntax::source_map::respan; -use syntax::symbol::{sym, Symbol}; -use syntax_expand::base::{ExtCtxt, Resolver}; -use syntax_expand::expand::{AstFragment, ExpansionConfig}; -use syntax_pos::hygiene::{AstPass, SyntaxContext, Transparency}; -use syntax_pos::{Span, DUMMY_SP}; - -use std::{iter, mem}; - -struct Test { - span: Span, - ident: Ident, -} - -struct TestCtxt<'a> { - ext_cx: ExtCtxt<'a>, - panic_strategy: PanicStrategy, - def_site: Span, - test_cases: Vec<Test>, - reexport_test_harness_main: Option<Symbol>, - test_runner: Option<ast::Path>, -} - -// Traverse the crate, collecting all the test functions, eliding any -// existing main functions, and synthesizing a main test harness -pub fn inject( - sess: &ParseSess, - resolver: &mut dyn Resolver, - should_test: bool, - krate: &mut ast::Crate, - span_diagnostic: &errors::Handler, - features: &Features, - panic_strategy: PanicStrategy, - platform_panic_strategy: PanicStrategy, - enable_panic_abort_tests: bool, -) { - // Check for #![reexport_test_harness_main = "some_name"] which gives the - // main test function the name `some_name` without hygiene. This needs to be - // unconditional, so that the attribute is still marked as used in - // non-test builds. - let reexport_test_harness_main = - attr::first_attr_value_str_by_name(&krate.attrs, sym::reexport_test_harness_main); - - // Do this here so that the test_runner crate attribute gets marked as used - // even in non-test builds - let test_runner = get_test_runner(span_diagnostic, &krate); - - if should_test { - let panic_strategy = match (panic_strategy, enable_panic_abort_tests) { - (PanicStrategy::Abort, true) => PanicStrategy::Abort, - (PanicStrategy::Abort, false) if panic_strategy == platform_panic_strategy => { - // Silently allow compiling with panic=abort on these platforms, - // but with old behavior (abort if a test fails). - PanicStrategy::Unwind - } - (PanicStrategy::Abort, false) => { - span_diagnostic.err( - "building tests with panic=abort is not supported \ - without `-Zpanic_abort_tests`", - ); - PanicStrategy::Unwind - } - (PanicStrategy::Unwind, _) => PanicStrategy::Unwind, - }; - generate_test_harness( - sess, - resolver, - reexport_test_harness_main, - krate, - features, - panic_strategy, - test_runner, - ) - } -} - -struct TestHarnessGenerator<'a> { - cx: TestCtxt<'a>, - tests: Vec<Test>, -} - -impl<'a> MutVisitor for TestHarnessGenerator<'a> { - fn visit_crate(&mut self, c: &mut ast::Crate) { - noop_visit_crate(c, self); - - // Create a main function to run our tests - c.module.items.push(mk_main(&mut self.cx)); - } - - fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> { - let mut item = i.into_inner(); - if is_test_case(&item) { - debug!("this is a test item"); - - let test = Test { span: item.span, ident: item.ident }; - self.tests.push(test); - } - - // We don't want to recurse into anything other than mods, since - // mods or tests inside of functions will break things - if let ast::ItemKind::Mod(mut module) = item.kind { - let tests = mem::take(&mut self.tests); - noop_visit_mod(&mut module, self); - let mut tests = mem::replace(&mut self.tests, tests); - - if !tests.is_empty() { - let parent = - if item.id == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { item.id }; - // Create an identifier that will hygienically resolve the test - // case name, even in another module. - let expn_id = self.cx.ext_cx.resolver.expansion_for_ast_pass( - module.inner, - AstPass::TestHarness, - &[], - Some(parent), - ); - for test in &mut tests { - // See the comment on `mk_main` for why we're using - // `apply_mark` directly. - test.ident.span = test.ident.span.apply_mark(expn_id, Transparency::Opaque); - } - self.cx.test_cases.extend(tests); - } - item.kind = ast::ItemKind::Mod(module); - } - smallvec![P(item)] - } - - fn visit_mac(&mut self, _mac: &mut ast::Mac) { - // Do nothing. - } -} - -/// A folder used to remove any entry points (like fn main) because the harness -/// generator will provide its own -struct EntryPointCleaner { - // Current depth in the ast - depth: usize, - def_site: Span, -} - -impl MutVisitor for EntryPointCleaner { - fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> { - self.depth += 1; - let item = noop_flat_map_item(i, self).expect_one("noop did something"); - self.depth -= 1; - - // Remove any #[main] or #[start] from the AST so it doesn't - // clash with the one we're going to add, but mark it as - // #[allow(dead_code)] to avoid printing warnings. - let item = match entry::entry_point_type(&item, self.depth) { - EntryPointType::MainNamed | EntryPointType::MainAttr | EntryPointType::Start => item - .map(|ast::Item { id, ident, attrs, kind, vis, span, tokens }| { - let allow_ident = Ident::new(sym::allow, self.def_site); - let dc_nested = attr::mk_nested_word_item(Ident::from_str_and_span( - "dead_code", - self.def_site, - )); - let allow_dead_code_item = attr::mk_list_item(allow_ident, vec![dc_nested]); - let allow_dead_code = attr::mk_attr_outer(allow_dead_code_item); - - ast::Item { - id, - ident, - attrs: attrs - .into_iter() - .filter(|attr| { - !attr.check_name(sym::main) && !attr.check_name(sym::start) - }) - .chain(iter::once(allow_dead_code)) - .collect(), - kind, - vis, - span, - tokens, - } - }), - EntryPointType::None | EntryPointType::OtherMain => item, - }; - - smallvec![item] - } - - fn visit_mac(&mut self, _mac: &mut ast::Mac) { - // Do nothing. - } -} - -/// Crawl over the crate, inserting test reexports and the test main function -fn generate_test_harness( - sess: &ParseSess, - resolver: &mut dyn Resolver, - reexport_test_harness_main: Option<Symbol>, - krate: &mut ast::Crate, - features: &Features, - panic_strategy: PanicStrategy, - test_runner: Option<ast::Path>, -) { - let mut econfig = ExpansionConfig::default("test".to_string()); - econfig.features = Some(features); - - let ext_cx = ExtCtxt::new(sess, econfig, resolver); - - let expn_id = ext_cx.resolver.expansion_for_ast_pass( - DUMMY_SP, - AstPass::TestHarness, - &[sym::main, sym::test, sym::rustc_attrs], - None, - ); - let def_site = DUMMY_SP.with_def_site_ctxt(expn_id); - - // Remove the entry points - let mut cleaner = EntryPointCleaner { depth: 0, def_site }; - cleaner.visit_crate(krate); - - let cx = TestCtxt { - ext_cx, - panic_strategy, - def_site, - test_cases: Vec::new(), - reexport_test_harness_main, - test_runner, - }; - - TestHarnessGenerator { cx, tests: Vec::new() }.visit_crate(krate); -} - -/// Creates a function item for use as the main function of a test build. -/// This function will call the `test_runner` as specified by the crate attribute -/// -/// By default this expands to -/// -/// #[main] -/// pub fn main() { -/// extern crate test; -/// test::test_main_static(&[ -/// &test_const1, -/// &test_const2, -/// &test_const3, -/// ]); -/// } -/// -/// Most of the Ident have the usual def-site hygiene for the AST pass. The -/// exception is the `test_const`s. These have a syntax context that has two -/// opaque marks: one from the expansion of `test` or `test_case`, and one -/// generated in `TestHarnessGenerator::flat_map_item`. When resolving this -/// identifier after failing to find a matching identifier in the root module -/// we remove the outer mark, and try resolving at its def-site, which will -/// then resolve to `test_const`. -/// -/// The expansion here can be controlled by two attributes: -/// -/// `reexport_test_harness_main` provides a different name for the `main` -/// function and `test_runner` provides a path that replaces -/// `test::test_main_static`. -fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> { - let sp = cx.def_site; - let ecx = &cx.ext_cx; - let test_id = Ident::new(sym::test, sp); - - let runner_name = match cx.panic_strategy { - PanicStrategy::Unwind => "test_main_static", - PanicStrategy::Abort => "test_main_static_abort", - }; - - // test::test_main_static(...) - let mut test_runner = cx - .test_runner - .clone() - .unwrap_or(ecx.path(sp, vec![test_id, ecx.ident_of(runner_name, sp)])); - - test_runner.span = sp; - - let test_main_path_expr = ecx.expr_path(test_runner); - let call_test_main = ecx.expr_call(sp, test_main_path_expr, vec![mk_tests_slice(cx, sp)]); - let call_test_main = ecx.stmt_expr(call_test_main); - - // extern crate test - let test_extern_stmt = - ecx.stmt_item(sp, ecx.item(sp, test_id, vec![], ast::ItemKind::ExternCrate(None))); - - // #[main] - let main_meta = ecx.meta_word(sp, sym::main); - let main_attr = ecx.attribute(main_meta); - - // pub fn main() { ... } - let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![])); - - // If no test runner is provided we need to import the test crate - let main_body = if cx.test_runner.is_none() { - ecx.block(sp, vec![test_extern_stmt, call_test_main]) - } else { - ecx.block(sp, vec![call_test_main]) - }; - - let decl = ecx.fn_decl(vec![], ast::FunctionRetTy::Ty(main_ret_ty)); - let sig = ast::FnSig { decl, header: ast::FnHeader::default() }; - let main = ast::ItemKind::Fn(sig, ast::Generics::default(), main_body); - - // Honor the reexport_test_harness_main attribute - let main_id = match cx.reexport_test_harness_main { - Some(sym) => Ident::new(sym, sp.with_ctxt(SyntaxContext::root())), - None => Ident::new(sym::main, sp), - }; - - let main = P(ast::Item { - ident: main_id, - attrs: vec![main_attr], - id: ast::DUMMY_NODE_ID, - kind: main, - vis: respan(sp, ast::VisibilityKind::Public), - span: sp, - tokens: None, - }); - - // Integrate the new item into existing module structures. - let main = AstFragment::Items(smallvec![main]); - cx.ext_cx.monotonic_expander().fully_expand_fragment(main).make_items().pop().unwrap() -} - -/// Creates a slice containing every test like so: -/// &[&test1, &test2] -fn mk_tests_slice(cx: &TestCtxt<'_>, sp: Span) -> P<ast::Expr> { - debug!("building test vector from {} tests", cx.test_cases.len()); - let ref ecx = cx.ext_cx; - - ecx.expr_vec_slice( - sp, - cx.test_cases - .iter() - .map(|test| { - ecx.expr_addr_of(test.span, ecx.expr_path(ecx.path(test.span, vec![test.ident]))) - }) - .collect(), - ) -} - -fn is_test_case(i: &ast::Item) -> bool { - attr::contains_name(&i.attrs, sym::rustc_test_marker) -} - -fn get_test_runner(sd: &errors::Handler, krate: &ast::Crate) -> Option<ast::Path> { - let test_attr = attr::find_by_name(&krate.attrs, sym::test_runner)?; - test_attr.meta_item_list().map(|meta_list| { - if meta_list.len() != 1 { - sd.span_fatal(test_attr.span, "`#![test_runner(..)]` accepts exactly 1 argument") - .raise() - } - match meta_list[0].meta_item() { - Some(meta_item) if meta_item.is_word() => meta_item.path.clone(), - _ => sd.span_fatal(test_attr.span, "`test_runner` argument must be a path").raise(), - } - }) -} diff --git a/src/libsyntax_ext/trace_macros.rs b/src/libsyntax_ext/trace_macros.rs deleted file mode 100644 index 96ae5bf5b4e..00000000000 --- a/src/libsyntax_ext/trace_macros.rs +++ /dev/null @@ -1,29 +0,0 @@ -use syntax::symbol::kw; -use syntax::tokenstream::{TokenStream, TokenTree}; -use syntax_expand::base::{self, ExtCtxt}; -use syntax_pos::Span; - -pub fn expand_trace_macros( - cx: &mut ExtCtxt<'_>, - sp: Span, - tt: TokenStream, -) -> Box<dyn base::MacResult + 'static> { - let mut cursor = tt.into_trees(); - let mut err = false; - let value = match &cursor.next() { - Some(TokenTree::Token(token)) if token.is_keyword(kw::True) => true, - Some(TokenTree::Token(token)) if token.is_keyword(kw::False) => false, - _ => { - err = true; - false - } - }; - err |= cursor.next().is_some(); - if err { - cx.span_err(sp, "trace_macros! accepts only `true` or `false`") - } else { - cx.set_trace_macros(value); - } - - base::DummyResult::any_valid(sp) -} diff --git a/src/libsyntax_ext/util.rs b/src/libsyntax_ext/util.rs deleted file mode 100644 index aedd5aac1a9..00000000000 --- a/src/libsyntax_ext/util.rs +++ /dev/null @@ -1,12 +0,0 @@ -use rustc_feature::AttributeTemplate; -use rustc_parse::validate_attr; -use syntax::ast::MetaItem; -use syntax_expand::base::ExtCtxt; -use syntax_pos::Symbol; - -pub fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaItem, name: Symbol) { - // All the built-in macro attributes are "words" at the moment. - let template = AttributeTemplate::only_word(); - let attr = ecx.attribute(meta_item.clone()); - validate_attr::check_builtin_attribute(ecx.parse_sess, &attr, name, template); -} |
