提交 fbdb07f4 编写于 作者: B bors

Auto merge of #98758 - nnethercote:more-derive-output-improvements, r=Mark-Simulacrum

More derive output improvements

This PR includes:
- Some test improvements.
- Some cosmetic changes to derive output that make the code look more like what a human would write.
- Some more fundamental improvements to `cmp` and `partial_cmp` generation.

r? `@Mark-Simulacrum`
...@@ -2036,6 +2036,14 @@ pub fn is_implicit_self(&self) -> bool { ...@@ -2036,6 +2036,14 @@ pub fn is_implicit_self(&self) -> bool {
pub fn is_unit(&self) -> bool { pub fn is_unit(&self) -> bool {
matches!(self, TyKind::Tup(tys) if tys.is_empty()) matches!(self, TyKind::Tup(tys) if tys.is_empty())
} }
pub fn is_simple_path(&self) -> Option<Symbol> {
if let TyKind::Path(None, Path { segments, .. }) = &self && segments.len() == 1 {
Some(segments[0].ident.name)
} else {
None
}
}
} }
/// Syntax used to declare a trait object. /// Syntax used to declare a trait object.
......
...@@ -2,8 +2,8 @@ ...@@ -2,8 +2,8 @@
use crate::deriving::generic::*; use crate::deriving::generic::*;
use crate::deriving::path_std; use crate::deriving::path_std;
use rustc_ast::ptr::P; use rustc_ast::{self as ast, Generics, ItemKind, MetaItem, VariantData};
use rustc_ast::{self as ast, Expr, Generics, ItemKind, MetaItem, VariantData}; use rustc_data_structures::fx::FxHashSet;
use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{kw, sym, Ident}; use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::Span; use rustc_span::Span;
...@@ -98,10 +98,17 @@ fn cs_clone_simple( ...@@ -98,10 +98,17 @@ fn cs_clone_simple(
trait_span: Span, trait_span: Span,
substr: &Substructure<'_>, substr: &Substructure<'_>,
is_union: bool, is_union: bool,
) -> P<Expr> { ) -> BlockOrExpr {
let mut stmts = Vec::new(); let mut stmts = Vec::new();
let mut seen_type_names = FxHashSet::default();
let mut process_variant = |variant: &VariantData| { let mut process_variant = |variant: &VariantData| {
for field in variant.fields() { for field in variant.fields() {
// This basic redundancy checking only prevents duplication of
// assertions like `AssertParamIsClone<Foo>` where the type is a
// simple name. That's enough to get a lot of cases, though.
if let Some(name) = field.ty.kind.is_simple_path() && !seen_type_names.insert(name) {
// Already produced an assertion for this type.
} else {
// let _: AssertParamIsClone<FieldTy>; // let _: AssertParamIsClone<FieldTy>;
super::assert_ty_bounds( super::assert_ty_bounds(
cx, cx,
...@@ -111,9 +118,11 @@ fn cs_clone_simple( ...@@ -111,9 +118,11 @@ fn cs_clone_simple(
&[sym::clone, sym::AssertParamIsClone], &[sym::clone, sym::AssertParamIsClone],
); );
} }
}
}; };
if is_union { if is_union {
// Just a single assertion for unions, that the union impls `Copy`.
// let _: AssertParamIsCopy<Self>; // let _: AssertParamIsCopy<Self>;
let self_ty = cx.ty_path(cx.path_ident(trait_span, Ident::with_dummy_span(kw::SelfUpper))); let self_ty = cx.ty_path(cx.path_ident(trait_span, Ident::with_dummy_span(kw::SelfUpper)));
super::assert_ty_bounds( super::assert_ty_bounds(
...@@ -139,8 +148,7 @@ fn cs_clone_simple( ...@@ -139,8 +148,7 @@ fn cs_clone_simple(
), ),
} }
} }
stmts.push(cx.stmt_expr(cx.expr_deref(trait_span, cx.expr_self(trait_span)))); BlockOrExpr::new_mixed(stmts, cx.expr_deref(trait_span, cx.expr_self(trait_span)))
cx.expr_block(cx.block(trait_span, stmts))
} }
fn cs_clone( fn cs_clone(
...@@ -148,7 +156,7 @@ fn cs_clone( ...@@ -148,7 +156,7 @@ fn cs_clone(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
trait_span: Span, trait_span: Span,
substr: &Substructure<'_>, substr: &Substructure<'_>,
) -> P<Expr> { ) -> BlockOrExpr {
let ctor_path; let ctor_path;
let all_fields; let all_fields;
let fn_path = cx.std_path(&[sym::clone, sym::Clone, sym::clone]); let fn_path = cx.std_path(&[sym::clone, sym::Clone, sym::clone]);
...@@ -177,7 +185,7 @@ fn cs_clone( ...@@ -177,7 +185,7 @@ fn cs_clone(
} }
} }
match *vdata { let expr = match *vdata {
VariantData::Struct(..) => { VariantData::Struct(..) => {
let fields = all_fields let fields = all_fields
.iter() .iter()
...@@ -201,5 +209,6 @@ fn cs_clone( ...@@ -201,5 +209,6 @@ fn cs_clone(
cx.expr_call(trait_span, path, subcalls) cx.expr_call(trait_span, path, subcalls)
} }
VariantData::Unit(..) => cx.expr_path(ctor_path), VariantData::Unit(..) => cx.expr_path(ctor_path),
} };
BlockOrExpr::new_expr(expr)
} }
...@@ -2,8 +2,8 @@ ...@@ -2,8 +2,8 @@
use crate::deriving::generic::*; use crate::deriving::generic::*;
use crate::deriving::path_std; use crate::deriving::path_std;
use rustc_ast::ptr::P; use rustc_ast::{self as ast, MetaItem};
use rustc_ast::{self as ast, Expr, MetaItem}; use rustc_data_structures::fx::FxHashSet;
use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident}; use rustc_span::symbol::{sym, Ident};
use rustc_span::Span; use rustc_span::Span;
...@@ -52,10 +52,17 @@ fn cs_total_eq_assert( ...@@ -52,10 +52,17 @@ fn cs_total_eq_assert(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
trait_span: Span, trait_span: Span,
substr: &Substructure<'_>, substr: &Substructure<'_>,
) -> P<Expr> { ) -> BlockOrExpr {
let mut stmts = Vec::new(); let mut stmts = Vec::new();
let mut seen_type_names = FxHashSet::default();
let mut process_variant = |variant: &ast::VariantData| { let mut process_variant = |variant: &ast::VariantData| {
for field in variant.fields() { for field in variant.fields() {
// This basic redundancy checking only prevents duplication of
// assertions like `AssertParamIsEq<Foo>` where the type is a
// simple name. That's enough to get a lot of cases, though.
if let Some(name) = field.ty.kind.is_simple_path() && !seen_type_names.insert(name) {
// Already produced an assertion for this type.
} else {
// let _: AssertParamIsEq<FieldTy>; // let _: AssertParamIsEq<FieldTy>;
super::assert_ty_bounds( super::assert_ty_bounds(
cx, cx,
...@@ -65,6 +72,7 @@ fn cs_total_eq_assert( ...@@ -65,6 +72,7 @@ fn cs_total_eq_assert(
&[sym::cmp, sym::AssertParamIsEq], &[sym::cmp, sym::AssertParamIsEq],
); );
} }
}
}; };
match *substr.fields { match *substr.fields {
...@@ -78,5 +86,5 @@ fn cs_total_eq_assert( ...@@ -78,5 +86,5 @@ fn cs_total_eq_assert(
} }
_ => cx.span_bug(trait_span, "unexpected substructure in `derive(Eq)`"), _ => cx.span_bug(trait_span, "unexpected substructure in `derive(Eq)`"),
} }
cx.expr_block(cx.block(trait_span, stmts)) BlockOrExpr::new_stmts(stmts)
} }
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
use crate::deriving::path_std; use crate::deriving::path_std;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, MetaItem}; use rustc_ast::{self as ast, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident}; use rustc_span::symbol::{sym, Ident};
use rustc_span::Span; use rustc_span::Span;
...@@ -51,7 +51,7 @@ pub fn ordering_collapsed( ...@@ -51,7 +51,7 @@ pub fn ordering_collapsed(
cx.expr_call_global(span, fn_cmp_path, vec![lft, rgt]) cx.expr_call_global(span, fn_cmp_path, vec![lft, rgt])
} }
pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
let test_id = Ident::new(sym::cmp, span); let test_id = Ident::new(sym::cmp, span);
let equals_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); let equals_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal]));
...@@ -70,7 +70,7 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P< ...@@ -70,7 +70,7 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<
// cmp => cmp // cmp => cmp
// } // }
// //
cs_fold( let expr = cs_fold(
// foldr nests the if-elses correctly, leaving the first field // foldr nests the if-elses correctly, leaving the first field
// as the outermost one, and the last as the innermost. // as the outermost one, and the last as the innermost.
false, false,
...@@ -79,15 +79,12 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P< ...@@ -79,15 +79,12 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<
// ::std::cmp::Ordering::Equal => old, // ::std::cmp::Ordering::Equal => old,
// cmp => cmp // cmp => cmp
// } // }
let new = { let new = {
let [other_f] = other_fs else { let [other_f] = other_fs else {
cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"); cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`");
}; };
let args = let args =
vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())]; vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())];
cx.expr_call_global(span, cmp_path.clone(), args) cx.expr_call_global(span, cmp_path.clone(), args)
}; };
...@@ -96,7 +93,21 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P< ...@@ -96,7 +93,21 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<
cx.expr_match(span, new, vec![eq_arm, neq_arm]) cx.expr_match(span, new, vec![eq_arm, neq_arm])
}, },
cx.expr_path(equals_path.clone()), |cx, args| match args {
Some((span, self_f, other_fs)) => {
let new = {
let [other_f] = other_fs else {
cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`");
};
let args =
vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())];
cx.expr_call_global(span, cmp_path.clone(), args)
};
new
}
None => cx.expr_path(equals_path.clone()),
},
Box::new(|cx, span, tag_tuple| { Box::new(|cx, span, tag_tuple| {
if tag_tuple.len() != 2 { if tag_tuple.len() != 2 {
cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`") cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`")
...@@ -107,5 +118,6 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P< ...@@ -107,5 +118,6 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<
cx, cx,
span, span,
substr, substr,
) );
BlockOrExpr::new_expr(expr)
} }
...@@ -15,8 +15,6 @@ pub fn expand_deriving_partial_eq( ...@@ -15,8 +15,6 @@ pub fn expand_deriving_partial_eq(
item: &Annotatable, item: &Annotatable,
push: &mut dyn FnMut(Annotatable), push: &mut dyn FnMut(Annotatable),
) { ) {
// structures are equal if all fields are equal, and non equal, if
// any fields are not equal or if the enum variants are different
fn cs_op( fn cs_op(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
span: Span, span: Span,
...@@ -24,7 +22,7 @@ fn cs_op( ...@@ -24,7 +22,7 @@ fn cs_op(
op: BinOpKind, op: BinOpKind,
combiner: BinOpKind, combiner: BinOpKind,
base: bool, base: bool,
) -> P<Expr> { ) -> BlockOrExpr {
let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P<Expr>, other_fs: &[P<Expr>]| { let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P<Expr>, other_fs: &[P<Expr>]| {
let [other_f] = other_fs else { let [other_f] = other_fs else {
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"); cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`");
...@@ -33,7 +31,7 @@ fn cs_op( ...@@ -33,7 +31,7 @@ fn cs_op(
cx.expr_binary(span, op, self_f, other_f.clone()) cx.expr_binary(span, op, self_f, other_f.clone())
}; };
cs_fold1( let expr = cs_fold(
true, // use foldl true, // use foldl
|cx, span, subexpr, self_f, other_fs| { |cx, span, subexpr, self_f, other_fs| {
let eq = op(cx, span, self_f, other_fs); let eq = op(cx, span, self_f, other_fs);
...@@ -52,13 +50,14 @@ fn cs_op( ...@@ -52,13 +50,14 @@ fn cs_op(
cx, cx,
span, span,
substr, substr,
) );
BlockOrExpr::new_expr(expr)
} }
fn cs_eq(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { fn cs_eq(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
cs_op(cx, span, substr, BinOpKind::Eq, BinOpKind::And, true) cs_op(cx, span, substr, BinOpKind::Eq, BinOpKind::And, true)
} }
fn cs_ne(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { fn cs_ne(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
cs_op(cx, span, substr, BinOpKind::Ne, BinOpKind::Or, false) cs_op(cx, span, substr, BinOpKind::Ne, BinOpKind::Or, false)
} }
......
...@@ -48,11 +48,10 @@ pub fn expand_deriving_partial_ord( ...@@ -48,11 +48,10 @@ pub fn expand_deriving_partial_ord(
trait_def.expand(cx, mitem, item, push) trait_def.expand(cx, mitem, item, push)
} }
pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
let test_id = Ident::new(sym::cmp, span); let test_id = Ident::new(sym::cmp, span);
let ordering = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); let ordering = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal]));
let ordering_expr = cx.expr_path(ordering.clone()); let ordering_expr = cx.expr_path(ordering.clone());
let equals_expr = cx.expr_some(span, ordering_expr);
let partial_cmp_path = cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]); let partial_cmp_path = cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]);
...@@ -69,7 +68,7 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_ ...@@ -69,7 +68,7 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_
// cmp => cmp // cmp => cmp
// } // }
// //
cs_fold( let expr = cs_fold(
// foldr nests the if-elses correctly, leaving the first field // foldr nests the if-elses correctly, leaving the first field
// as the outermost one, and the last as the innermost. // as the outermost one, and the last as the innermost.
false, false,
...@@ -95,7 +94,21 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_ ...@@ -95,7 +94,21 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_
cx.expr_match(span, new, vec![eq_arm, neq_arm]) cx.expr_match(span, new, vec![eq_arm, neq_arm])
}, },
equals_expr, |cx: &mut ExtCtxt<'_>, args: Option<(Span, P<Expr>, &[P<Expr>])>| match args {
Some((span, self_f, other_fs)) => {
let new = {
let [other_f] = other_fs else {
cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`");
};
let args =
vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())];
cx.expr_call_global(span, partial_cmp_path.clone(), args)
};
new
}
None => cx.expr_some(span, ordering_expr.clone()),
},
Box::new(|cx, span, tag_tuple| { Box::new(|cx, span, tag_tuple| {
if tag_tuple.len() != 2 { if tag_tuple.len() != 2 {
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`") cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`")
...@@ -110,5 +123,6 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_ ...@@ -110,5 +123,6 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_
cx, cx,
span, span,
substr, substr,
) );
BlockOrExpr::new_expr(expr)
} }
...@@ -2,8 +2,7 @@ ...@@ -2,8 +2,7 @@
use crate::deriving::generic::*; use crate::deriving::generic::*;
use crate::deriving::path_std; use crate::deriving::path_std;
use rustc_ast::ptr::P; use rustc_ast::{self as ast, MetaItem};
use rustc_ast::{self as ast, Expr, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident, Symbol}; use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span; use rustc_span::Span;
...@@ -42,7 +41,7 @@ pub fn expand_deriving_debug( ...@@ -42,7 +41,7 @@ pub fn expand_deriving_debug(
trait_def.expand(cx, mitem, item, push) trait_def.expand(cx, mitem, item, push)
} }
fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
let (ident, vdata, fields) = match substr.fields { let (ident, vdata, fields) = match substr.fields {
Struct(vdata, fields) => (substr.type_ident, *vdata, fields), Struct(vdata, fields) => (substr.type_ident, *vdata, fields),
EnumMatching(_, _, v, fields) => (v.ident, &v.data, fields), EnumMatching(_, _, v, fields) => (v.ident, &v.data, fields),
...@@ -74,7 +73,8 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> ...@@ -74,7 +73,8 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
if fields.is_empty() { if fields.is_empty() {
// Special case for no fields. // Special case for no fields.
let fn_path_write_str = cx.std_path(&[sym::fmt, sym::Formatter, sym::write_str]); let fn_path_write_str = cx.std_path(&[sym::fmt, sym::Formatter, sym::write_str]);
cx.expr_call_global(span, fn_path_write_str, vec![fmt, name]) let expr = cx.expr_call_global(span, fn_path_write_str, vec![fmt, name]);
BlockOrExpr::new_expr(expr)
} else if fields.len() <= CUTOFF { } else if fields.len() <= CUTOFF {
// Few enough fields that we can use a specific-length method. // Few enough fields that we can use a specific-length method.
let debug = if is_struct { let debug = if is_struct {
...@@ -100,7 +100,8 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> ...@@ -100,7 +100,8 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
let field = cx.expr_addr_of(field.span, field); let field = cx.expr_addr_of(field.span, field);
args.push(field); args.push(field);
} }
cx.expr_call_global(span, fn_path_debug, args) let expr = cx.expr_call_global(span, fn_path_debug, args);
BlockOrExpr::new_expr(expr)
} else { } else {
// Enough fields that we must use the any-length method. // Enough fields that we must use the any-length method.
let mut name_exprs = Vec::with_capacity(fields.len()); let mut name_exprs = Vec::with_capacity(fields.len());
...@@ -176,8 +177,6 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> ...@@ -176,8 +177,6 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
stmts.push(names_let.unwrap()); stmts.push(names_let.unwrap());
} }
stmts.push(values_let); stmts.push(values_let);
stmts.push(cx.stmt_expr(expr)); BlockOrExpr::new_mixed(stmts, expr)
cx.expr_block(cx.block(span, stmts))
} }
} }
...@@ -62,7 +62,7 @@ fn decodable_substructure( ...@@ -62,7 +62,7 @@ fn decodable_substructure(
trait_span: Span, trait_span: Span,
substr: &Substructure<'_>, substr: &Substructure<'_>,
krate: Symbol, krate: Symbol,
) -> P<Expr> { ) -> BlockOrExpr {
let decoder = substr.nonself_args[0].clone(); let decoder = substr.nonself_args[0].clone();
let recurse = vec![ let recurse = vec![
Ident::new(krate, trait_span), Ident::new(krate, trait_span),
...@@ -74,7 +74,7 @@ fn decodable_substructure( ...@@ -74,7 +74,7 @@ fn decodable_substructure(
let blkarg = Ident::new(sym::_d, trait_span); let blkarg = Ident::new(sym::_d, trait_span);
let blkdecoder = cx.expr_ident(trait_span, blkarg); let blkdecoder = cx.expr_ident(trait_span, blkarg);
match *substr.fields { let expr = match *substr.fields {
StaticStruct(_, ref summary) => { StaticStruct(_, ref summary) => {
let nfields = match *summary { let nfields = match *summary {
Unnamed(ref fields, _) => fields.len(), Unnamed(ref fields, _) => fields.len(),
...@@ -173,7 +173,8 @@ fn decodable_substructure( ...@@ -173,7 +173,8 @@ fn decodable_substructure(
) )
} }
_ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)"), _ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)"),
} };
BlockOrExpr::new_expr(expr)
} }
/// Creates a decoder for a single enum variant/struct: /// Creates a decoder for a single enum variant/struct:
......
use crate::deriving::generic::ty::*; use crate::deriving::generic::ty::*;
use crate::deriving::generic::*; use crate::deriving::generic::*;
use rustc_ast::ptr::P; use rustc_ast as ast;
use rustc_ast::walk_list; use rustc_ast::walk_list;
use rustc_ast::EnumDef; use rustc_ast::EnumDef;
use rustc_ast::VariantData; use rustc_ast::VariantData;
use rustc_ast::{Expr, MetaItem};
use rustc_errors::Applicability; use rustc_errors::Applicability;
use rustc_expand::base::{Annotatable, DummyResult, ExtCtxt}; use rustc_expand::base::{Annotatable, DummyResult, ExtCtxt};
use rustc_span::symbol::Ident; use rustc_span::symbol::Ident;
...@@ -16,7 +15,7 @@ ...@@ -16,7 +15,7 @@
pub fn expand_deriving_default( pub fn expand_deriving_default(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
span: Span, span: Span,
mitem: &MetaItem, mitem: &ast::MetaItem,
item: &Annotatable, item: &Annotatable,
push: &mut dyn FnMut(Annotatable), push: &mut dyn FnMut(Annotatable),
) { ) {
...@@ -59,12 +58,12 @@ fn default_struct_substructure( ...@@ -59,12 +58,12 @@ fn default_struct_substructure(
trait_span: Span, trait_span: Span,
substr: &Substructure<'_>, substr: &Substructure<'_>,
summary: &StaticFields, summary: &StaticFields,
) -> P<Expr> { ) -> BlockOrExpr {
// Note that `kw::Default` is "default" and `sym::Default` is "Default"! // Note that `kw::Default` is "default" and `sym::Default` is "Default"!
let default_ident = cx.std_path(&[kw::Default, sym::Default, kw::Default]); let default_ident = cx.std_path(&[kw::Default, sym::Default, kw::Default]);
let default_call = |span| cx.expr_call_global(span, default_ident.clone(), Vec::new()); let default_call = |span| cx.expr_call_global(span, default_ident.clone(), Vec::new());
match summary { let expr = match summary {
Unnamed(ref fields, is_tuple) => { Unnamed(ref fields, is_tuple) => {
if !is_tuple { if !is_tuple {
cx.expr_ident(trait_span, substr.type_ident) cx.expr_ident(trait_span, substr.type_ident)
...@@ -80,31 +79,27 @@ fn default_struct_substructure( ...@@ -80,31 +79,27 @@ fn default_struct_substructure(
.collect(); .collect();
cx.expr_struct_ident(trait_span, substr.type_ident, default_fields) cx.expr_struct_ident(trait_span, substr.type_ident, default_fields)
} }
} };
BlockOrExpr::new_expr(expr)
} }
fn default_enum_substructure( fn default_enum_substructure(
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
trait_span: Span, trait_span: Span,
enum_def: &EnumDef, enum_def: &EnumDef,
) -> P<Expr> { ) -> BlockOrExpr {
let Ok(default_variant) = extract_default_variant(cx, enum_def, trait_span) else { let expr = if let Ok(default_variant) = extract_default_variant(cx, enum_def, trait_span)
return DummyResult::raw_expr(trait_span, true); && let Ok(_) = validate_default_attribute(cx, default_variant)
}; {
// At this point, we know that there is exactly one variant with a `#[default]` attribute. The
// attribute hasn't yet been validated.
if let Err(()) = validate_default_attribute(cx, default_variant) {
return DummyResult::raw_expr(trait_span, true);
}
// We now know there is exactly one unit variant with exactly one `#[default]` attribute. // We now know there is exactly one unit variant with exactly one `#[default]` attribute.
cx.expr_path(cx.path( cx.expr_path(cx.path(
default_variant.span, default_variant.span,
vec![Ident::new(kw::SelfUpper, default_variant.span), default_variant.ident], vec![Ident::new(kw::SelfUpper, default_variant.span), default_variant.ident],
)) ))
} else {
DummyResult::raw_expr(trait_span, true)
};
BlockOrExpr::new_expr(expr)
} }
fn extract_default_variant<'a>( fn extract_default_variant<'a>(
......
...@@ -89,8 +89,7 @@ ...@@ -89,8 +89,7 @@
use crate::deriving::generic::*; use crate::deriving::generic::*;
use crate::deriving::pathvec_std; use crate::deriving::pathvec_std;
use rustc_ast::ptr::P; use rustc_ast::{ExprKind, MetaItem, Mutability};
use rustc_ast::{Expr, ExprKind, MetaItem, Mutability};
use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident, Symbol}; use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span; use rustc_span::Span;
...@@ -147,7 +146,7 @@ fn encodable_substructure( ...@@ -147,7 +146,7 @@ fn encodable_substructure(
trait_span: Span, trait_span: Span,
substr: &Substructure<'_>, substr: &Substructure<'_>,
krate: Symbol, krate: Symbol,
) -> P<Expr> { ) -> BlockOrExpr {
let encoder = substr.nonself_args[0].clone(); let encoder = substr.nonself_args[0].clone();
// throw an underscore in front to suppress unused variable warnings // throw an underscore in front to suppress unused variable warnings
let blkarg = Ident::new(sym::_e, trait_span); let blkarg = Ident::new(sym::_e, trait_span);
...@@ -208,7 +207,7 @@ fn encodable_substructure( ...@@ -208,7 +207,7 @@ fn encodable_substructure(
let fn_emit_struct_path = let fn_emit_struct_path =
cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_struct]); cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_struct]);
cx.expr_call_global( let expr = cx.expr_call_global(
trait_span, trait_span,
fn_emit_struct_path, fn_emit_struct_path,
vec![ vec![
...@@ -217,7 +216,8 @@ fn encodable_substructure( ...@@ -217,7 +216,8 @@ fn encodable_substructure(
cx.expr_usize(trait_span, fields.len()), cx.expr_usize(trait_span, fields.len()),
blk, blk,
], ],
) );
BlockOrExpr::new_expr(expr)
} }
EnumMatching(idx, _, variant, ref fields) => { EnumMatching(idx, _, variant, ref fields) => {
...@@ -279,12 +279,12 @@ fn encodable_substructure( ...@@ -279,12 +279,12 @@ fn encodable_substructure(
let blk = cx.lambda1(trait_span, call, blkarg); let blk = cx.lambda1(trait_span, call, blkarg);
let fn_emit_enum_path: Vec<_> = let fn_emit_enum_path: Vec<_> =
cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_enum]); cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_enum]);
let ret = cx.expr_call_global( let expr = cx.expr_call_global(
trait_span, trait_span,
fn_emit_enum_path, fn_emit_enum_path,
vec![encoder, cx.expr_str(trait_span, substr.type_ident.name), blk], vec![encoder, cx.expr_str(trait_span, substr.type_ident.name), blk],
); );
cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)])) BlockOrExpr::new_mixed(vec![me], expr)
} }
_ => cx.bug("expected Struct or EnumMatching in derive(Encodable)"), _ => cx.bug("expected Struct or EnumMatching in derive(Encodable)"),
......
...@@ -296,7 +296,7 @@ pub enum SubstructureFields<'a> { ...@@ -296,7 +296,7 @@ pub enum SubstructureFields<'a> {
/// Combine the values of all the fields together. The last argument is /// Combine the values of all the fields together. The last argument is
/// all the fields of all the structures. /// all the fields of all the structures.
pub type CombineSubstructureFunc<'a> = pub type CombineSubstructureFunc<'a> =
Box<dyn FnMut(&mut ExtCtxt<'_>, Span, &Substructure<'_>) -> P<Expr> + 'a>; Box<dyn FnMut(&mut ExtCtxt<'_>, Span, &Substructure<'_>) -> BlockOrExpr + 'a>;
/// Deal with non-matching enum variants. The slice is the identifiers holding /// Deal with non-matching enum variants. The slice is the identifiers holding
/// the variant index value for each of the `Self` arguments. /// the variant index value for each of the `Self` arguments.
...@@ -314,6 +314,48 @@ struct TypeParameter { ...@@ -314,6 +314,48 @@ struct TypeParameter {
ty: P<ast::Ty>, ty: P<ast::Ty>,
} }
// The code snippets built up for derived code are sometimes used as blocks
// (e.g. in a function body) and sometimes used as expressions (e.g. in a match
// arm). This structure avoids committing to either form until necessary,
// avoiding the insertion of any unnecessary blocks.
//
// The statements come before the expression.
pub struct BlockOrExpr(Vec<ast::Stmt>, Option<P<Expr>>);
impl BlockOrExpr {
pub fn new_stmts(stmts: Vec<ast::Stmt>) -> BlockOrExpr {
BlockOrExpr(stmts, None)
}
pub fn new_expr(expr: P<Expr>) -> BlockOrExpr {
BlockOrExpr(vec![], Some(expr))
}
pub fn new_mixed(stmts: Vec<ast::Stmt>, expr: P<Expr>) -> BlockOrExpr {
BlockOrExpr(stmts, Some(expr))
}
// Converts it into a block.
fn into_block(mut self, cx: &ExtCtxt<'_>, span: Span) -> P<ast::Block> {
if let Some(expr) = self.1 {
self.0.push(cx.stmt_expr(expr));
}
cx.block(span, self.0)
}
// Converts it into an expression.
fn into_expr(self, cx: &ExtCtxt<'_>, span: Span) -> P<Expr> {
if self.0.is_empty() {
match self.1 {
None => cx.expr_block(cx.block(span, vec![])),
Some(expr) => expr,
}
} else {
cx.expr_block(self.into_block(cx, span))
}
}
}
/// This method helps to extract all the type parameters referenced from a /// This method helps to extract all the type parameters referenced from a
/// type. For a type parameter `<T>`, it looks for either a `TyPath` that /// type. For a type parameter `<T>`, it looks for either a `TyPath` that
/// is not global and starts with `T`, or a `TyQPath`. /// is not global and starts with `T`, or a `TyQPath`.
...@@ -827,7 +869,7 @@ fn call_substructure_method( ...@@ -827,7 +869,7 @@ fn call_substructure_method(
type_ident: Ident, type_ident: Ident,
nonself_args: &[P<Expr>], nonself_args: &[P<Expr>],
fields: &SubstructureFields<'_>, fields: &SubstructureFields<'_>,
) -> P<Expr> { ) -> BlockOrExpr {
let span = trait_.span; let span = trait_.span;
let substructure = Substructure { type_ident, nonself_args, fields }; let substructure = Substructure { type_ident, nonself_args, fields };
let mut f = self.combine_substructure.borrow_mut(); let mut f = self.combine_substructure.borrow_mut();
...@@ -902,7 +944,7 @@ fn create_method( ...@@ -902,7 +944,7 @@ fn create_method(
generics: &Generics, generics: &Generics,
explicit_self: Option<ast::ExplicitSelf>, explicit_self: Option<ast::ExplicitSelf>,
arg_types: Vec<(Ident, P<ast::Ty>)>, arg_types: Vec<(Ident, P<ast::Ty>)>,
body: P<Expr>, body: BlockOrExpr,
) -> P<ast::AssocItem> { ) -> P<ast::AssocItem> {
let span = trait_.span; let span = trait_.span;
// Create the generics that aren't for `Self`. // Create the generics that aren't for `Self`.
...@@ -921,7 +963,7 @@ fn create_method( ...@@ -921,7 +963,7 @@ fn create_method(
let method_ident = Ident::new(self.name, span); let method_ident = Ident::new(self.name, span);
let fn_decl = cx.fn_decl(args, ast::FnRetTy::Ty(ret_type)); let fn_decl = cx.fn_decl(args, ast::FnRetTy::Ty(ret_type));
let body_block = cx.block_expr(body); let body_block = body.into_block(cx, span);
let trait_lo_sp = span.shrink_to_lo(); let trait_lo_sp = span.shrink_to_lo();
...@@ -986,7 +1028,7 @@ fn expand_struct_method_body<'b>( ...@@ -986,7 +1028,7 @@ fn expand_struct_method_body<'b>(
nonself_args: &[P<Expr>], nonself_args: &[P<Expr>],
use_temporaries: bool, use_temporaries: bool,
is_packed: bool, is_packed: bool,
) -> P<Expr> { ) -> BlockOrExpr {
let mut raw_fields = Vec::new(); // Vec<[fields of self], [fields of next Self arg], [etc]> let mut raw_fields = Vec::new(); // Vec<[fields of self], [fields of next Self arg], [etc]>
let span = trait_.span; let span = trait_.span;
let mut patterns = Vec::new(); let mut patterns = Vec::new();
...@@ -1047,16 +1089,14 @@ fn expand_struct_method_body<'b>( ...@@ -1047,16 +1089,14 @@ fn expand_struct_method_body<'b>(
); );
if !is_packed { if !is_packed {
body.span = span;
body body
} else { } else {
// Do the let-destructuring. // Do the let-destructuring.
let mut stmts: Vec<_> = iter::zip(self_args, patterns) let mut stmts: Vec<_> = iter::zip(self_args, patterns)
.map(|(arg_expr, pat)| cx.stmt_let_pat(span, pat, arg_expr.clone())) .map(|(arg_expr, pat)| cx.stmt_let_pat(span, pat, arg_expr.clone()))
.collect(); .collect();
stmts.push(cx.stmt_expr(body)); stmts.extend(std::mem::take(&mut body.0));
BlockOrExpr(stmts, body.1)
cx.expr_block(cx.block(span, stmts))
} }
} }
...@@ -1067,7 +1107,7 @@ fn expand_static_struct_method_body( ...@@ -1067,7 +1107,7 @@ fn expand_static_struct_method_body(
struct_def: &VariantData, struct_def: &VariantData,
type_ident: Ident, type_ident: Ident,
nonself_args: &[P<Expr>], nonself_args: &[P<Expr>],
) -> P<Expr> { ) -> BlockOrExpr {
let summary = trait_.summarise_struct(cx, struct_def); let summary = trait_.summarise_struct(cx, struct_def);
self.call_substructure_method( self.call_substructure_method(
...@@ -1130,7 +1170,7 @@ fn expand_enum_method_body<'b>( ...@@ -1130,7 +1170,7 @@ fn expand_enum_method_body<'b>(
type_ident: Ident, type_ident: Ident,
mut self_args: Vec<P<Expr>>, mut self_args: Vec<P<Expr>>,
nonself_args: &[P<Expr>], nonself_args: &[P<Expr>],
) -> P<Expr> { ) -> BlockOrExpr {
let span = trait_.span; let span = trait_.span;
let variants = &enum_def.variants; let variants = &enum_def.variants;
...@@ -1199,7 +1239,11 @@ fn expand_enum_method_body<'b>( ...@@ -1199,7 +1239,11 @@ fn expand_enum_method_body<'b>(
} }
// Here is the pat = `(&VariantK, &VariantK, ...)` // Here is the pat = `(&VariantK, &VariantK, ...)`
let single_pat = cx.pat_tuple(span, subpats); let single_pat = if subpats.len() == 1 {
subpats.pop().unwrap()
} else {
cx.pat_tuple(span, subpats)
};
// For the BodyK, we need to delegate to our caller, // For the BodyK, we need to delegate to our caller,
// passing it an EnumMatching to indicate which case // passing it an EnumMatching to indicate which case
...@@ -1253,13 +1297,9 @@ fn expand_enum_method_body<'b>( ...@@ -1253,13 +1297,9 @@ fn expand_enum_method_body<'b>(
// Self arg, assuming all are instances of VariantK. // Self arg, assuming all are instances of VariantK.
// Build up code associated with such a case. // Build up code associated with such a case.
let substructure = EnumMatching(index, variants.len(), variant, field_tuples); let substructure = EnumMatching(index, variants.len(), variant, field_tuples);
let arm_expr = self.call_substructure_method( let arm_expr = self
cx, .call_substructure_method(cx, trait_, type_ident, nonself_args, &substructure)
trait_, .into_expr(cx, span);
type_ident,
nonself_args,
&substructure,
);
cx.arm(span, single_pat, arm_expr) cx.arm(span, single_pat, arm_expr)
}) })
...@@ -1271,13 +1311,16 @@ fn expand_enum_method_body<'b>( ...@@ -1271,13 +1311,16 @@ fn expand_enum_method_body<'b>(
// The index and actual variant aren't meaningful in this case, // The index and actual variant aren't meaningful in this case,
// so just use whatever // so just use whatever
let substructure = EnumMatching(0, variants.len(), v, Vec::new()); let substructure = EnumMatching(0, variants.len(), v, Vec::new());
Some(self.call_substructure_method( Some(
self.call_substructure_method(
cx, cx,
trait_, trait_,
type_ident, type_ident,
nonself_args, nonself_args,
&substructure, &substructure,
)) )
.into_expr(cx, span),
)
} }
_ if variants.len() > 1 && self_args.len() > 1 => { _ if variants.len() > 1 && self_args.len() > 1 => {
// Since we know that all the arguments will match if we reach // Since we know that all the arguments will match if we reach
...@@ -1341,13 +1384,15 @@ fn expand_enum_method_body<'b>( ...@@ -1341,13 +1384,15 @@ fn expand_enum_method_body<'b>(
} }
} }
let arm_expr = self.call_substructure_method( let arm_expr = self
.call_substructure_method(
cx, cx,
trait_, trait_,
type_ident, type_ident,
nonself_args, nonself_args,
&catch_all_substructure, &catch_all_substructure,
); )
.into_expr(cx, span);
// Final wrinkle: the self_args are expressions that deref // Final wrinkle: the self_args are expressions that deref
// down to desired places, but we cannot actually deref // down to desired places, but we cannot actually deref
...@@ -1371,8 +1416,7 @@ fn expand_enum_method_body<'b>( ...@@ -1371,8 +1416,7 @@ fn expand_enum_method_body<'b>(
// } // }
let all_match = cx.expr_match(span, match_arg, match_arms); let all_match = cx.expr_match(span, match_arg, match_arms);
let arm_expr = cx.expr_if(span, discriminant_test, all_match, Some(arm_expr)); let arm_expr = cx.expr_if(span, discriminant_test, all_match, Some(arm_expr));
index_let_stmts.push(cx.stmt_expr(arm_expr)); BlockOrExpr(index_let_stmts, Some(arm_expr))
cx.expr_block(cx.block(span, index_let_stmts))
} else if variants.is_empty() { } else if variants.is_empty() {
// As an additional wrinkle, For a zero-variant enum A, // As an additional wrinkle, For a zero-variant enum A,
// currently the compiler // currently the compiler
...@@ -1423,7 +1467,7 @@ fn expand_enum_method_body<'b>( ...@@ -1423,7 +1467,7 @@ fn expand_enum_method_body<'b>(
// derive Debug on such a type could here generate code // derive Debug on such a type could here generate code
// that needs the feature gate enabled.) // that needs the feature gate enabled.)
deriving::call_unreachable(cx, span) BlockOrExpr(vec![], Some(deriving::call_unreachable(cx, span)))
} else { } else {
// Final wrinkle: the self_args are expressions that deref // Final wrinkle: the self_args are expressions that deref
// down to desired places, but we cannot actually deref // down to desired places, but we cannot actually deref
...@@ -1431,8 +1475,12 @@ fn expand_enum_method_body<'b>( ...@@ -1431,8 +1475,12 @@ fn expand_enum_method_body<'b>(
// expression; here add a layer of borrowing, turning // expression; here add a layer of borrowing, turning
// `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`.
self_args.map_in_place(|self_arg| cx.expr_addr_of(span, self_arg)); self_args.map_in_place(|self_arg| cx.expr_addr_of(span, self_arg));
let match_arg = cx.expr(span, ast::ExprKind::Tup(self_args)); let match_arg = if self_args.len() == 1 {
cx.expr_match(span, match_arg, match_arms) self_args.pop().unwrap()
} else {
cx.expr(span, ast::ExprKind::Tup(self_args))
};
BlockOrExpr(vec![], Some(cx.expr_match(span, match_arg, match_arms)))
} }
} }
...@@ -1443,7 +1491,7 @@ fn expand_static_enum_method_body( ...@@ -1443,7 +1491,7 @@ fn expand_static_enum_method_body(
enum_def: &EnumDef, enum_def: &EnumDef,
type_ident: Ident, type_ident: Ident,
nonself_args: &[P<Expr>], nonself_args: &[P<Expr>],
) -> P<Expr> { ) -> BlockOrExpr {
let summary = enum_def let summary = enum_def
.variants .variants
.iter() .iter()
...@@ -1606,71 +1654,6 @@ fn create_enum_variant_pattern( ...@@ -1606,71 +1654,6 @@ fn create_enum_variant_pattern(
} }
} }
// helpful premade recipes
fn cs_fold_fields<'a, F>(
use_foldl: bool,
mut f: F,
base: P<Expr>,
cx: &mut ExtCtxt<'_>,
all_fields: &[FieldInfo<'a>],
) -> P<Expr>
where
F: FnMut(&mut ExtCtxt<'_>, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>,
{
if use_foldl {
all_fields
.iter()
.fold(base, |old, field| f(cx, field.span, old, field.self_.clone(), &field.other))
} else {
all_fields
.iter()
.rev()
.fold(base, |old, field| f(cx, field.span, old, field.self_.clone(), &field.other))
}
}
fn cs_fold_enumnonmatch(
mut enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>,
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substructure: &Substructure<'_>,
) -> P<Expr> {
match *substructure.fields {
EnumNonMatchingCollapsed(tuple) => enum_nonmatch_f(cx, trait_span, tuple),
_ => cx.span_bug(trait_span, "cs_fold_enumnonmatch expected an EnumNonMatchingCollapsed"),
}
}
fn cs_fold_static(cx: &mut ExtCtxt<'_>, trait_span: Span) -> P<Expr> {
cx.span_bug(trait_span, "static function in `derive`")
}
/// Fold the fields. `use_foldl` controls whether this is done
/// left-to-right (`true`) or right-to-left (`false`).
pub fn cs_fold<F>(
use_foldl: bool,
f: F,
base: P<Expr>,
enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>,
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substructure: &Substructure<'_>,
) -> P<Expr>
where
F: FnMut(&mut ExtCtxt<'_>, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>,
{
match *substructure.fields {
EnumMatching(.., ref all_fields) | Struct(_, ref all_fields) => {
cs_fold_fields(use_foldl, f, base, cx, all_fields)
}
EnumNonMatchingCollapsed(..) => {
cs_fold_enumnonmatch(enum_nonmatch_f, cx, trait_span, substructure)
}
StaticEnum(..) | StaticStruct(..) => cs_fold_static(cx, trait_span),
}
}
/// Function to fold over fields, with three cases, to generate more efficient and concise code. /// Function to fold over fields, with three cases, to generate more efficient and concise code.
/// When the `substructure` has grouped fields, there are two cases: /// When the `substructure` has grouped fields, there are two cases:
/// Zero fields: call the base case function with `None` (like the usual base case of `cs_fold`). /// Zero fields: call the base case function with `None` (like the usual base case of `cs_fold`).
...@@ -1679,11 +1662,11 @@ pub fn cs_fold<F>( ...@@ -1679,11 +1662,11 @@ pub fn cs_fold<F>(
/// fields. /// fields.
/// When the `substructure` is an `EnumNonMatchingCollapsed`, the result of `enum_nonmatch_f` /// When the `substructure` is an `EnumNonMatchingCollapsed`, the result of `enum_nonmatch_f`
/// is returned. Statics may not be folded over. /// is returned. Statics may not be folded over.
pub fn cs_fold1<F, B>( pub fn cs_fold<F, B>(
use_foldl: bool, use_foldl: bool,
f: F, mut f: F,
mut b: B, mut b: B,
enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>, mut enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>,
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
trait_span: Span, trait_span: Span,
substructure: &Substructure<'_>, substructure: &Substructure<'_>,
...@@ -1708,12 +1691,18 @@ pub fn cs_fold1<F, B>( ...@@ -1708,12 +1691,18 @@ pub fn cs_fold1<F, B>(
(true, _) => (b(cx, None), &all_fields[..]), (true, _) => (b(cx, None), &all_fields[..]),
}; };
cs_fold_fields(use_foldl, f, base, cx, rest) if use_foldl {
rest.iter().fold(base, |old, field| {
f(cx, field.span, old, field.self_.clone(), &field.other)
})
} else {
rest.iter().rev().fold(base, |old, field| {
f(cx, field.span, old, field.self_.clone(), &field.other)
})
} }
EnumNonMatchingCollapsed(..) => {
cs_fold_enumnonmatch(enum_nonmatch_f, cx, trait_span, substructure)
} }
StaticEnum(..) | StaticStruct(..) => cs_fold_static(cx, trait_span), EnumNonMatchingCollapsed(tuple) => enum_nonmatch_f(cx, trait_span, tuple),
StaticEnum(..) | StaticStruct(..) => cx.span_bug(trait_span, "static function in `derive`"),
} }
} }
......
...@@ -2,8 +2,7 @@ ...@@ -2,8 +2,7 @@
use crate::deriving::generic::*; use crate::deriving::generic::*;
use crate::deriving::{self, path_std, pathvec_std}; use crate::deriving::{self, path_std, pathvec_std};
use rustc_ast::ptr::P; use rustc_ast::{MetaItem, Mutability};
use rustc_ast::{Expr, MetaItem, Mutability};
use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::sym; use rustc_span::symbol::sym;
use rustc_span::Span; use rustc_span::Span;
...@@ -45,7 +44,11 @@ pub fn expand_deriving_hash( ...@@ -45,7 +44,11 @@ pub fn expand_deriving_hash(
hash_trait_def.expand(cx, mitem, item, push); hash_trait_def.expand(cx, mitem, item, push);
} }
fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructure<'_>) -> P<Expr> { fn hash_substructure(
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
) -> BlockOrExpr {
let [state_expr] = substr.nonself_args else { let [state_expr] = substr.nonself_args else {
cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`"); cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`");
}; };
...@@ -81,6 +84,5 @@ fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructu ...@@ -81,6 +84,5 @@ fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructu
stmts.extend( stmts.extend(
fields.iter().map(|FieldInfo { ref self_, span, .. }| call_hash(*span, self_.clone())), fields.iter().map(|FieldInfo { ref self_, span, .. }| call_hash(*span, self_.clone())),
); );
BlockOrExpr::new_stmts(stmts)
cx.expr_block(cx.block(trait_span, stmts))
} }
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
// CHECK: @STATIC = {{.*}}, align 4 // CHECK: @STATIC = {{.*}}, align 4
// This checks the constants from inline_enum_const // This checks the constants from inline_enum_const
// CHECK: @alloc14 = {{.*}}, align 2 // CHECK: @alloc12 = {{.*}}, align 2
// This checks the constants from {low,high}_align_const, they share the same // This checks the constants from {low,high}_align_const, they share the same
// constant, but the alignment differs, so the higher one should be used // constant, but the alignment differs, so the higher one should be used
......
...@@ -39,6 +39,16 @@ struct Big { ...@@ -39,6 +39,16 @@ struct Big {
#[repr(packed)] #[repr(packed)]
struct Packed(u32); struct Packed(u32);
// An empty enum.
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
enum Enum0 {}
// A single-variant enum.
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
enum Enum1 {
Single { x: u32 }
}
// A C-like, fieldless enum. // A C-like, fieldless enum.
#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)] #[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
enum Fieldless { enum Fieldless {
...@@ -66,3 +76,11 @@ enum Fielded { ...@@ -66,3 +76,11 @@ enum Fielded {
Y(bool), Y(bool),
Z(Option<i32>), Z(Option<i32>),
} }
// A union. Most builtin traits are not derivable for unions.
#[derive(Clone, Copy)]
pub union Union {
pub b: bool,
pub u: u32,
pub i: i32,
}
...@@ -28,7 +28,7 @@ struct Empty; ...@@ -28,7 +28,7 @@ struct Empty;
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::clone::Clone for Empty { impl ::core::clone::Clone for Empty {
#[inline] #[inline]
fn clone(&self) -> Empty { { *self } } fn clone(&self) -> Empty { *self }
} }
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
...@@ -49,7 +49,7 @@ impl ::core::default::Default for Empty { ...@@ -49,7 +49,7 @@ impl ::core::default::Default for Empty {
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::hash::Hash for Empty { impl ::core::hash::Hash for Empty {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { {} } fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {}
} }
impl ::core::marker::StructuralPartialEq for Empty {} impl ::core::marker::StructuralPartialEq for Empty {}
#[automatically_derived] #[automatically_derived]
...@@ -65,7 +65,7 @@ impl ::core::cmp::Eq for Empty { ...@@ -65,7 +65,7 @@ impl ::core::cmp::Eq for Empty {
#[inline] #[inline]
#[doc(hidden)] #[doc(hidden)]
#[no_coverage] #[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { {} } fn assert_receiver_is_total_eq(&self) -> () {}
} }
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
...@@ -95,12 +95,9 @@ struct Point { ...@@ -95,12 +95,9 @@ struct Point {
impl ::core::clone::Clone for Point { impl ::core::clone::Clone for Point {
#[inline] #[inline]
fn clone(&self) -> Point { fn clone(&self) -> Point {
{
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>; let _: ::core::clone::AssertParamIsClone<u32>;
*self *self
} }
}
} }
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
...@@ -128,11 +125,9 @@ impl ::core::default::Default for Point { ...@@ -128,11 +125,9 @@ impl ::core::default::Default for Point {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::hash::Hash for Point { impl ::core::hash::Hash for Point {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
{
::core::hash::Hash::hash(&self.x, state); ::core::hash::Hash::hash(&self.x, state);
::core::hash::Hash::hash(&self.y, state) ::core::hash::Hash::hash(&self.y, state)
} }
}
} }
impl ::core::marker::StructuralPartialEq for Point {} impl ::core::marker::StructuralPartialEq for Point {}
#[automatically_derived] #[automatically_derived]
...@@ -155,11 +150,8 @@ impl ::core::cmp::Eq for Point { ...@@ -155,11 +150,8 @@ impl ::core::cmp::Eq for Point {
#[doc(hidden)] #[doc(hidden)]
#[no_coverage] #[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>; let _: ::core::cmp::AssertParamIsEq<u32>;
} }
}
} }
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
...@@ -169,13 +161,7 @@ impl ::core::cmp::PartialOrd for Point { ...@@ -169,13 +161,7 @@ impl ::core::cmp::PartialOrd for Point {
-> ::core::option::Option<::core::cmp::Ordering> { -> ::core::option::Option<::core::cmp::Ordering> {
match ::core::cmp::PartialOrd::partial_cmp(&self.x, &other.x) { match ::core::cmp::PartialOrd::partial_cmp(&self.x, &other.x) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) => ::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
match ::core::cmp::PartialOrd::partial_cmp(&self.y, &other.y) ::core::cmp::PartialOrd::partial_cmp(&self.y, &other.y),
{
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp, cmp => cmp,
} }
} }
...@@ -187,11 +173,7 @@ impl ::core::cmp::Ord for Point { ...@@ -187,11 +173,7 @@ impl ::core::cmp::Ord for Point {
fn cmp(&self, other: &Point) -> ::core::cmp::Ordering { fn cmp(&self, other: &Point) -> ::core::cmp::Ordering {
match ::core::cmp::Ord::cmp(&self.x, &other.x) { match ::core::cmp::Ord::cmp(&self.x, &other.x) {
::core::cmp::Ordering::Equal => ::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.y, &other.y) { ::core::cmp::Ord::cmp(&self.y, &other.y),
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp, cmp => cmp,
} }
} }
...@@ -229,15 +211,13 @@ impl ::core::clone::Clone for Big { ...@@ -229,15 +211,13 @@ impl ::core::clone::Clone for Big {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::fmt::Debug for Big { impl ::core::fmt::Debug for Big {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
{
let names: &'static _ = let names: &'static _ =
&["b1", "b2", "b3", "b4", "b5", "b6", "b7", "b8"]; &["b1", "b2", "b3", "b4", "b5", "b6", "b7", "b8"];
let values: &[&dyn ::core::fmt::Debug] = let values: &[&dyn ::core::fmt::Debug] =
&[&&self.b1, &&self.b2, &&self.b3, &&self.b4, &&self.b5, &[&&self.b1, &&self.b2, &&self.b3, &&self.b4, &&self.b5,
&&self.b6, &&self.b7, &&self.b8]; &&self.b6, &&self.b7, &&self.b8];
::core::fmt::Formatter::debug_struct_fields_finish(f, "Big", ::core::fmt::Formatter::debug_struct_fields_finish(f, "Big", names,
names, values) values)
}
} }
} }
#[automatically_derived] #[automatically_derived]
...@@ -261,7 +241,6 @@ impl ::core::default::Default for Big { ...@@ -261,7 +241,6 @@ impl ::core::default::Default for Big {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::hash::Hash for Big { impl ::core::hash::Hash for Big {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
{
::core::hash::Hash::hash(&self.b1, state); ::core::hash::Hash::hash(&self.b1, state);
::core::hash::Hash::hash(&self.b2, state); ::core::hash::Hash::hash(&self.b2, state);
::core::hash::Hash::hash(&self.b3, state); ::core::hash::Hash::hash(&self.b3, state);
...@@ -271,7 +250,6 @@ impl ::core::hash::Hash for Big { ...@@ -271,7 +250,6 @@ impl ::core::hash::Hash for Big {
::core::hash::Hash::hash(&self.b7, state); ::core::hash::Hash::hash(&self.b7, state);
::core::hash::Hash::hash(&self.b8, state) ::core::hash::Hash::hash(&self.b8, state)
} }
}
} }
impl ::core::marker::StructuralPartialEq for Big {} impl ::core::marker::StructuralPartialEq for Big {}
#[automatically_derived] #[automatically_derived]
...@@ -300,16 +278,7 @@ impl ::core::cmp::Eq for Big { ...@@ -300,16 +278,7 @@ impl ::core::cmp::Eq for Big {
#[doc(hidden)] #[doc(hidden)]
#[no_coverage] #[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>; let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
} }
} }
#[automatically_derived] #[automatically_derived]
...@@ -344,13 +313,7 @@ impl ::core::cmp::PartialOrd for Big { ...@@ -344,13 +313,7 @@ impl ::core::cmp::PartialOrd for Big {
&other.b7) { &other.b7) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) ::core::option::Option::Some(::core::cmp::Ordering::Equal)
=> =>
match ::core::cmp::PartialOrd::partial_cmp(&self.b8, ::core::cmp::PartialOrd::partial_cmp(&self.b8, &other.b8),
&other.b8) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp, cmp => cmp,
}, },
cmp => cmp, cmp => cmp,
...@@ -386,11 +349,7 @@ impl ::core::cmp::Ord for Big { ...@@ -386,11 +349,7 @@ impl ::core::cmp::Ord for Big {
::core::cmp::Ordering::Equal => ::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.b7, &other.b7) { match ::core::cmp::Ord::cmp(&self.b7, &other.b7) {
::core::cmp::Ordering::Equal => ::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.b8, &other.b8) { ::core::cmp::Ord::cmp(&self.b8, &other.b8),
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp, cmp => cmp,
}, },
cmp => cmp, cmp => cmp,
...@@ -416,7 +375,8 @@ struct Packed(u32); ...@@ -416,7 +375,8 @@ struct Packed(u32);
impl ::core::clone::Clone for Packed { impl ::core::clone::Clone for Packed {
#[inline] #[inline]
fn clone(&self) -> Packed { fn clone(&self) -> Packed {
{ let _: ::core::clone::AssertParamIsClone<u32>; *self } let _: ::core::clone::AssertParamIsClone<u32>;
*self
} }
} }
#[automatically_derived] #[automatically_derived]
...@@ -426,12 +386,10 @@ impl ::core::marker::Copy for Packed { } ...@@ -426,12 +386,10 @@ impl ::core::marker::Copy for Packed { }
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::fmt::Debug for Packed { impl ::core::fmt::Debug for Packed {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
{
let Self(__self_0_0) = *self; let Self(__self_0_0) = *self;
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Packed", ::core::fmt::Formatter::debug_tuple_field1_finish(f, "Packed",
&&__self_0_0) &&__self_0_0)
} }
}
} }
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
...@@ -443,10 +401,8 @@ impl ::core::default::Default for Packed { ...@@ -443,10 +401,8 @@ impl ::core::default::Default for Packed {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::hash::Hash for Packed { impl ::core::hash::Hash for Packed {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
{
let Self(__self_0_0) = *self; let Self(__self_0_0) = *self;
{ ::core::hash::Hash::hash(&__self_0_0, state) } ::core::hash::Hash::hash(&__self_0_0, state)
}
} }
} }
impl ::core::marker::StructuralPartialEq for Packed {} impl ::core::marker::StructuralPartialEq for Packed {}
...@@ -455,20 +411,16 @@ impl ::core::marker::StructuralPartialEq for Packed {} ...@@ -455,20 +411,16 @@ impl ::core::marker::StructuralPartialEq for Packed {}
impl ::core::cmp::PartialEq for Packed { impl ::core::cmp::PartialEq for Packed {
#[inline] #[inline]
fn eq(&self, other: &Packed) -> bool { fn eq(&self, other: &Packed) -> bool {
{
let Self(__self_0_0) = *self; let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other; let Self(__self_1_0) = *other;
__self_0_0 == __self_1_0 __self_0_0 == __self_1_0
} }
}
#[inline] #[inline]
fn ne(&self, other: &Packed) -> bool { fn ne(&self, other: &Packed) -> bool {
{
let Self(__self_0_0) = *self; let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other; let Self(__self_1_0) = *other;
__self_0_0 != __self_1_0 __self_0_0 != __self_1_0
} }
}
} }
impl ::core::marker::StructuralEq for Packed {} impl ::core::marker::StructuralEq for Packed {}
#[automatically_derived] #[automatically_derived]
...@@ -478,7 +430,7 @@ impl ::core::cmp::Eq for Packed { ...@@ -478,7 +430,7 @@ impl ::core::cmp::Eq for Packed {
#[doc(hidden)] #[doc(hidden)]
#[no_coverage] #[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { fn assert_receiver_is_total_eq(&self) -> () {
{ let _: ::core::cmp::AssertParamIsEq<u32>; } let _: ::core::cmp::AssertParamIsEq<u32>;
} }
} }
#[automatically_derived] #[automatically_derived]
...@@ -487,16 +439,9 @@ impl ::core::cmp::PartialOrd for Packed { ...@@ -487,16 +439,9 @@ impl ::core::cmp::PartialOrd for Packed {
#[inline] #[inline]
fn partial_cmp(&self, other: &Packed) fn partial_cmp(&self, other: &Packed)
-> ::core::option::Option<::core::cmp::Ordering> { -> ::core::option::Option<::core::cmp::Ordering> {
{
let Self(__self_0_0) = *self; let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other; let Self(__self_1_0) = *other;
match ::core::cmp::PartialOrd::partial_cmp(&__self_0_0, ::core::cmp::PartialOrd::partial_cmp(&__self_0_0, &__self_1_0)
&__self_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
}
}
} }
} }
#[automatically_derived] #[automatically_derived]
...@@ -504,13 +449,164 @@ impl ::core::cmp::PartialOrd for Packed { ...@@ -504,13 +449,164 @@ impl ::core::cmp::PartialOrd for Packed {
impl ::core::cmp::Ord for Packed { impl ::core::cmp::Ord for Packed {
#[inline] #[inline]
fn cmp(&self, other: &Packed) -> ::core::cmp::Ordering { fn cmp(&self, other: &Packed) -> ::core::cmp::Ordering {
{
let Self(__self_0_0) = *self; let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other; let Self(__self_1_0) = *other;
match ::core::cmp::Ord::cmp(&__self_0_0, &__self_1_0) { ::core::cmp::Ord::cmp(&__self_0_0, &__self_1_0)
::core::cmp::Ordering::Equal => ::core::cmp::Ordering::Equal,
cmp => cmp,
} }
}
// An empty enum.
enum Enum0 {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Enum0 {
#[inline]
fn clone(&self) -> Enum0 { *self }
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::marker::Copy for Enum0 { }
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Enum0 {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
unsafe { ::core::intrinsics::unreachable() }
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Enum0 {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
unsafe { ::core::intrinsics::unreachable() }
}
}
impl ::core::marker::StructuralPartialEq for Enum0 {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialEq for Enum0 {
#[inline]
fn eq(&self, other: &Enum0) -> bool {
unsafe { ::core::intrinsics::unreachable() }
}
}
impl ::core::marker::StructuralEq for Enum0 {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Eq for Enum0 {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialOrd for Enum0 {
#[inline]
fn partial_cmp(&self, other: &Enum0)
-> ::core::option::Option<::core::cmp::Ordering> {
unsafe { ::core::intrinsics::unreachable() }
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Ord for Enum0 {
#[inline]
fn cmp(&self, other: &Enum0) -> ::core::cmp::Ordering {
unsafe { ::core::intrinsics::unreachable() }
}
}
// A single-variant enum.
enum Enum1 {
Single {
x: u32,
},
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Enum1 {
#[inline]
fn clone(&self) -> Enum1 {
match &*self {
&Enum1::Single { x: ref __self_0 } =>
Enum1::Single { x: ::core::clone::Clone::clone(&*__self_0) },
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Enum1 {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match &*self {
&Enum1::Single { x: ref __self_0 } =>
::core::fmt::Formatter::debug_struct_field1_finish(f,
"Single", "x", &&*__self_0),
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Enum1 {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match &*self {
&Enum1::Single { x: ref __self_0 } => {
::core::hash::Hash::hash(&*__self_0, state)
}
}
}
}
impl ::core::marker::StructuralPartialEq for Enum1 {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialEq for Enum1 {
#[inline]
fn eq(&self, other: &Enum1) -> bool {
match (&*self, &*other) {
(&Enum1::Single { x: ref __self_0 }, &Enum1::Single {
x: ref __arg_1_0 }) => *__self_0 == *__arg_1_0,
}
}
#[inline]
fn ne(&self, other: &Enum1) -> bool {
match (&*self, &*other) {
(&Enum1::Single { x: ref __self_0 }, &Enum1::Single {
x: ref __arg_1_0 }) => *__self_0 != *__arg_1_0,
}
}
}
impl ::core::marker::StructuralEq for Enum1 {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Eq for Enum1 {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialOrd for Enum1 {
#[inline]
fn partial_cmp(&self, other: &Enum1)
-> ::core::option::Option<::core::cmp::Ordering> {
match (&*self, &*other) {
(&Enum1::Single { x: ref __self_0 }, &Enum1::Single {
x: ref __arg_1_0 }) =>
::core::cmp::PartialOrd::partial_cmp(&*__self_0, &*__arg_1_0),
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Ord for Enum1 {
#[inline]
fn cmp(&self, other: &Enum1) -> ::core::cmp::Ordering {
match (&*self, &*other) {
(&Enum1::Single { x: ref __self_0 }, &Enum1::Single {
x: ref __arg_1_0 }) =>
::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0),
} }
} }
} }
...@@ -527,7 +623,7 @@ enum Fieldless { ...@@ -527,7 +623,7 @@ enum Fieldless {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::clone::Clone for Fieldless { impl ::core::clone::Clone for Fieldless {
#[inline] #[inline]
fn clone(&self) -> Fieldless { { *self } } fn clone(&self) -> Fieldless { *self }
} }
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
...@@ -536,10 +632,10 @@ impl ::core::marker::Copy for Fieldless { } ...@@ -536,10 +632,10 @@ impl ::core::marker::Copy for Fieldless { }
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::fmt::Debug for Fieldless { impl ::core::fmt::Debug for Fieldless {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match (&*self,) { match &*self {
(&Fieldless::A,) => ::core::fmt::Formatter::write_str(f, "A"), &Fieldless::A => ::core::fmt::Formatter::write_str(f, "A"),
(&Fieldless::B,) => ::core::fmt::Formatter::write_str(f, "B"), &Fieldless::B => ::core::fmt::Formatter::write_str(f, "B"),
(&Fieldless::C,) => ::core::fmt::Formatter::write_str(f, "C"), &Fieldless::C => ::core::fmt::Formatter::write_str(f, "C"),
} }
} }
} }
...@@ -553,7 +649,7 @@ impl ::core::default::Default for Fieldless { ...@@ -553,7 +649,7 @@ impl ::core::default::Default for Fieldless {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::hash::Hash for Fieldless { impl ::core::hash::Hash for Fieldless {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match (&*self,) { match &*self {
_ => { _ => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self), ::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state) state)
...@@ -567,14 +663,12 @@ impl ::core::marker::StructuralPartialEq for Fieldless {} ...@@ -567,14 +663,12 @@ impl ::core::marker::StructuralPartialEq for Fieldless {}
impl ::core::cmp::PartialEq for Fieldless { impl ::core::cmp::PartialEq for Fieldless {
#[inline] #[inline]
fn eq(&self, other: &Fieldless) -> bool { fn eq(&self, other: &Fieldless) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self); let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other); let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi { if __self_vi == __arg_1_vi {
match (&*self, &*other) { _ => true, } match (&*self, &*other) { _ => true, }
} else { false } } else { false }
} }
}
} }
impl ::core::marker::StructuralEq for Fieldless {} impl ::core::marker::StructuralEq for Fieldless {}
#[automatically_derived] #[automatically_derived]
...@@ -583,7 +677,7 @@ impl ::core::cmp::Eq for Fieldless { ...@@ -583,7 +677,7 @@ impl ::core::cmp::Eq for Fieldless {
#[inline] #[inline]
#[doc(hidden)] #[doc(hidden)]
#[no_coverage] #[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { {} } fn assert_receiver_is_total_eq(&self) -> () {}
} }
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
...@@ -591,7 +685,6 @@ impl ::core::cmp::PartialOrd for Fieldless { ...@@ -591,7 +685,6 @@ impl ::core::cmp::PartialOrd for Fieldless {
#[inline] #[inline]
fn partial_cmp(&self, other: &Fieldless) fn partial_cmp(&self, other: &Fieldless)
-> ::core::option::Option<::core::cmp::Ordering> { -> ::core::option::Option<::core::cmp::Ordering> {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self); let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other); let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi { if __self_vi == __arg_1_vi {
...@@ -600,9 +693,7 @@ impl ::core::cmp::PartialOrd for Fieldless { ...@@ -600,9 +693,7 @@ impl ::core::cmp::PartialOrd for Fieldless {
::core::option::Option::Some(::core::cmp::Ordering::Equal), ::core::option::Option::Some(::core::cmp::Ordering::Equal),
} }
} else { } else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi, ::core::cmp::PartialOrd::partial_cmp(&__self_vi, &__arg_1_vi)
&__arg_1_vi)
}
} }
} }
} }
...@@ -611,16 +702,12 @@ impl ::core::cmp::PartialOrd for Fieldless { ...@@ -611,16 +702,12 @@ impl ::core::cmp::PartialOrd for Fieldless {
impl ::core::cmp::Ord for Fieldless { impl ::core::cmp::Ord for Fieldless {
#[inline] #[inline]
fn cmp(&self, other: &Fieldless) -> ::core::cmp::Ordering { fn cmp(&self, other: &Fieldless) -> ::core::cmp::Ordering {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self); let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other); let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi { if __self_vi == __arg_1_vi {
match (&*self, &*other) { match (&*self, &*other) { _ => ::core::cmp::Ordering::Equal, }
_ => ::core::cmp::Ordering::Equal,
}
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) } } else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
} }
}
} }
// An enum with multiple fieldless and fielded variants. // An enum with multiple fieldless and fielded variants.
...@@ -640,13 +727,9 @@ enum Mixed { ...@@ -640,13 +727,9 @@ enum Mixed {
impl ::core::clone::Clone for Mixed { impl ::core::clone::Clone for Mixed {
#[inline] #[inline]
fn clone(&self) -> Mixed { fn clone(&self) -> Mixed {
{
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>; let _: ::core::clone::AssertParamIsClone<u32>;
*self *self
} }
}
} }
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
...@@ -655,13 +738,13 @@ impl ::core::marker::Copy for Mixed { } ...@@ -655,13 +738,13 @@ impl ::core::marker::Copy for Mixed { }
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::fmt::Debug for Mixed { impl ::core::fmt::Debug for Mixed {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match (&*self,) { match &*self {
(&Mixed::P,) => ::core::fmt::Formatter::write_str(f, "P"), &Mixed::P => ::core::fmt::Formatter::write_str(f, "P"),
(&Mixed::Q,) => ::core::fmt::Formatter::write_str(f, "Q"), &Mixed::Q => ::core::fmt::Formatter::write_str(f, "Q"),
(&Mixed::R(ref __self_0),) => &Mixed::R(ref __self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "R", ::core::fmt::Formatter::debug_tuple_field1_finish(f, "R",
&&*__self_0), &&*__self_0),
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },) => &Mixed::S { d1: ref __self_0, d2: ref __self_1 } =>
::core::fmt::Formatter::debug_struct_field2_finish(f, "S", ::core::fmt::Formatter::debug_struct_field2_finish(f, "S",
"d1", &&*__self_0, "d2", &&*__self_1), "d1", &&*__self_0, "d2", &&*__self_1),
} }
...@@ -677,13 +760,13 @@ impl ::core::default::Default for Mixed { ...@@ -677,13 +760,13 @@ impl ::core::default::Default for Mixed {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::hash::Hash for Mixed { impl ::core::hash::Hash for Mixed {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match (&*self,) { match &*self {
(&Mixed::R(ref __self_0),) => { &Mixed::R(ref __self_0) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self), ::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state); state);
::core::hash::Hash::hash(&*__self_0, state) ::core::hash::Hash::hash(&*__self_0, state)
} }
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },) => { &Mixed::S { d1: ref __self_0, d2: ref __self_1 } => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self), ::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state); state);
::core::hash::Hash::hash(&*__self_0, state); ::core::hash::Hash::hash(&*__self_0, state);
...@@ -702,7 +785,6 @@ impl ::core::marker::StructuralPartialEq for Mixed {} ...@@ -702,7 +785,6 @@ impl ::core::marker::StructuralPartialEq for Mixed {}
impl ::core::cmp::PartialEq for Mixed { impl ::core::cmp::PartialEq for Mixed {
#[inline] #[inline]
fn eq(&self, other: &Mixed) -> bool { fn eq(&self, other: &Mixed) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self); let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other); let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi { if __self_vi == __arg_1_vi {
...@@ -716,10 +798,8 @@ impl ::core::cmp::PartialEq for Mixed { ...@@ -716,10 +798,8 @@ impl ::core::cmp::PartialEq for Mixed {
} }
} else { false } } else { false }
} }
}
#[inline] #[inline]
fn ne(&self, other: &Mixed) -> bool { fn ne(&self, other: &Mixed) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self); let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other); let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi { if __self_vi == __arg_1_vi {
...@@ -733,7 +813,6 @@ impl ::core::cmp::PartialEq for Mixed { ...@@ -733,7 +813,6 @@ impl ::core::cmp::PartialEq for Mixed {
} }
} else { true } } else { true }
} }
}
} }
impl ::core::marker::StructuralEq for Mixed {} impl ::core::marker::StructuralEq for Mixed {}
#[automatically_derived] #[automatically_derived]
...@@ -743,12 +822,8 @@ impl ::core::cmp::Eq for Mixed { ...@@ -743,12 +822,8 @@ impl ::core::cmp::Eq for Mixed {
#[doc(hidden)] #[doc(hidden)]
#[no_coverage] #[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>; let _: ::core::cmp::AssertParamIsEq<u32>;
} }
}
} }
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
...@@ -756,41 +831,28 @@ impl ::core::cmp::PartialOrd for Mixed { ...@@ -756,41 +831,28 @@ impl ::core::cmp::PartialOrd for Mixed {
#[inline] #[inline]
fn partial_cmp(&self, other: &Mixed) fn partial_cmp(&self, other: &Mixed)
-> ::core::option::Option<::core::cmp::Ordering> { -> ::core::option::Option<::core::cmp::Ordering> {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self); let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other); let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi { if __self_vi == __arg_1_vi {
match (&*self, &*other) { match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) => (&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0, ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) { &*__arg_1_0),
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 }, (&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) => &Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0, match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) { &*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) ::core::option::Option::Some(::core::cmp::Ordering::Equal)
=> =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_1, ::core::cmp::PartialOrd::partial_cmp(&*__self_1,
&*__arg_1_1) { &*__arg_1_1),
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp, cmp => cmp,
}, },
_ => _ =>
::core::option::Option::Some(::core::cmp::Ordering::Equal), ::core::option::Option::Some(::core::cmp::Ordering::Equal),
} }
} else { } else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi, ::core::cmp::PartialOrd::partial_cmp(&__self_vi, &__arg_1_vi)
&__arg_1_vi)
}
} }
} }
} }
...@@ -799,33 +861,23 @@ impl ::core::cmp::PartialOrd for Mixed { ...@@ -799,33 +861,23 @@ impl ::core::cmp::PartialOrd for Mixed {
impl ::core::cmp::Ord for Mixed { impl ::core::cmp::Ord for Mixed {
#[inline] #[inline]
fn cmp(&self, other: &Mixed) -> ::core::cmp::Ordering { fn cmp(&self, other: &Mixed) -> ::core::cmp::Ordering {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self); let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other); let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi { if __self_vi == __arg_1_vi {
match (&*self, &*other) { match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) => (&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) { ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0),
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 }, (&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) => &Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) { match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal => ::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&*__self_1, &*__arg_1_1) { ::core::cmp::Ord::cmp(&*__self_1, &*__arg_1_1),
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp, cmp => cmp,
}, },
_ => ::core::cmp::Ordering::Equal, _ => ::core::cmp::Ordering::Equal,
} }
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) } } else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
} }
}
} }
// An enum with no fieldless variants. Note that `Default` cannot be derived // An enum with no fieldless variants. Note that `Default` cannot be derived
...@@ -836,12 +888,12 @@ enum Fielded { X(u32), Y(bool), Z(Option<i32>), } ...@@ -836,12 +888,12 @@ enum Fielded { X(u32), Y(bool), Z(Option<i32>), }
impl ::core::clone::Clone for Fielded { impl ::core::clone::Clone for Fielded {
#[inline] #[inline]
fn clone(&self) -> Fielded { fn clone(&self) -> Fielded {
match (&*self,) { match &*self {
(&Fielded::X(ref __self_0),) => &Fielded::X(ref __self_0) =>
Fielded::X(::core::clone::Clone::clone(&*__self_0)), Fielded::X(::core::clone::Clone::clone(&*__self_0)),
(&Fielded::Y(ref __self_0),) => &Fielded::Y(ref __self_0) =>
Fielded::Y(::core::clone::Clone::clone(&*__self_0)), Fielded::Y(::core::clone::Clone::clone(&*__self_0)),
(&Fielded::Z(ref __self_0),) => &Fielded::Z(ref __self_0) =>
Fielded::Z(::core::clone::Clone::clone(&*__self_0)), Fielded::Z(::core::clone::Clone::clone(&*__self_0)),
} }
} }
...@@ -850,14 +902,14 @@ impl ::core::clone::Clone for Fielded { ...@@ -850,14 +902,14 @@ impl ::core::clone::Clone for Fielded {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::fmt::Debug for Fielded { impl ::core::fmt::Debug for Fielded {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match (&*self,) { match &*self {
(&Fielded::X(ref __self_0),) => &Fielded::X(ref __self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "X", ::core::fmt::Formatter::debug_tuple_field1_finish(f, "X",
&&*__self_0), &&*__self_0),
(&Fielded::Y(ref __self_0),) => &Fielded::Y(ref __self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Y", ::core::fmt::Formatter::debug_tuple_field1_finish(f, "Y",
&&*__self_0), &&*__self_0),
(&Fielded::Z(ref __self_0),) => &Fielded::Z(ref __self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Z", ::core::fmt::Formatter::debug_tuple_field1_finish(f, "Z",
&&*__self_0), &&*__self_0),
} }
...@@ -867,18 +919,18 @@ impl ::core::fmt::Debug for Fielded { ...@@ -867,18 +919,18 @@ impl ::core::fmt::Debug for Fielded {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::hash::Hash for Fielded { impl ::core::hash::Hash for Fielded {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match (&*self,) { match &*self {
(&Fielded::X(ref __self_0),) => { &Fielded::X(ref __self_0) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self), ::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state); state);
::core::hash::Hash::hash(&*__self_0, state) ::core::hash::Hash::hash(&*__self_0, state)
} }
(&Fielded::Y(ref __self_0),) => { &Fielded::Y(ref __self_0) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self), ::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state); state);
::core::hash::Hash::hash(&*__self_0, state) ::core::hash::Hash::hash(&*__self_0, state)
} }
(&Fielded::Z(ref __self_0),) => { &Fielded::Z(ref __self_0) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self), ::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state); state);
::core::hash::Hash::hash(&*__self_0, state) ::core::hash::Hash::hash(&*__self_0, state)
...@@ -892,7 +944,6 @@ impl ::core::marker::StructuralPartialEq for Fielded {} ...@@ -892,7 +944,6 @@ impl ::core::marker::StructuralPartialEq for Fielded {}
impl ::core::cmp::PartialEq for Fielded { impl ::core::cmp::PartialEq for Fielded {
#[inline] #[inline]
fn eq(&self, other: &Fielded) -> bool { fn eq(&self, other: &Fielded) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self); let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other); let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi { if __self_vi == __arg_1_vi {
...@@ -907,10 +958,8 @@ impl ::core::cmp::PartialEq for Fielded { ...@@ -907,10 +958,8 @@ impl ::core::cmp::PartialEq for Fielded {
} }
} else { false } } else { false }
} }
}
#[inline] #[inline]
fn ne(&self, other: &Fielded) -> bool { fn ne(&self, other: &Fielded) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self); let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other); let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi { if __self_vi == __arg_1_vi {
...@@ -925,7 +974,6 @@ impl ::core::cmp::PartialEq for Fielded { ...@@ -925,7 +974,6 @@ impl ::core::cmp::PartialEq for Fielded {
} }
} else { true } } else { true }
} }
}
} }
impl ::core::marker::StructuralEq for Fielded {} impl ::core::marker::StructuralEq for Fielded {}
#[automatically_derived] #[automatically_derived]
...@@ -935,12 +983,10 @@ impl ::core::cmp::Eq for Fielded { ...@@ -935,12 +983,10 @@ impl ::core::cmp::Eq for Fielded {
#[doc(hidden)] #[doc(hidden)]
#[no_coverage] #[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>; let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<bool>; let _: ::core::cmp::AssertParamIsEq<bool>;
let _: ::core::cmp::AssertParamIsEq<Option<i32>>; let _: ::core::cmp::AssertParamIsEq<Option<i32>>;
} }
}
} }
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
...@@ -948,41 +994,23 @@ impl ::core::cmp::PartialOrd for Fielded { ...@@ -948,41 +994,23 @@ impl ::core::cmp::PartialOrd for Fielded {
#[inline] #[inline]
fn partial_cmp(&self, other: &Fielded) fn partial_cmp(&self, other: &Fielded)
-> ::core::option::Option<::core::cmp::Ordering> { -> ::core::option::Option<::core::cmp::Ordering> {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self); let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other); let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi { if __self_vi == __arg_1_vi {
match (&*self, &*other) { match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) => (&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0, ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) { &*__arg_1_0),
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) => (&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0, ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) { &*__arg_1_0),
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) => (&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0, ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) { &*__arg_1_0),
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
_ => unsafe { ::core::intrinsics::unreachable() } _ => unsafe { ::core::intrinsics::unreachable() }
} }
} else { } else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi, ::core::cmp::PartialOrd::partial_cmp(&__self_vi, &__arg_1_vi)
&__arg_1_vi)
}
} }
} }
} }
...@@ -991,32 +1019,37 @@ impl ::core::cmp::PartialOrd for Fielded { ...@@ -991,32 +1019,37 @@ impl ::core::cmp::PartialOrd for Fielded {
impl ::core::cmp::Ord for Fielded { impl ::core::cmp::Ord for Fielded {
#[inline] #[inline]
fn cmp(&self, other: &Fielded) -> ::core::cmp::Ordering { fn cmp(&self, other: &Fielded) -> ::core::cmp::Ordering {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self); let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other); let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi { if __self_vi == __arg_1_vi {
match (&*self, &*other) { match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) => (&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) { ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0),
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) => (&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) { ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0),
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) => (&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) { ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0),
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
_ => unsafe { ::core::intrinsics::unreachable() } _ => unsafe { ::core::intrinsics::unreachable() }
} }
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) } } else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
} }
}
// A union. Most builtin traits are not derivable for unions.
pub union Union {
pub b: bool,
pub u: u32,
pub i: i32,
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Union {
#[inline]
fn clone(&self) -> Union {
let _: ::core::clone::AssertParamIsCopy<Self>;
*self
} }
} }
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::marker::Copy for Union { }
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册