diff --git a/compiler/rustc_builtin_macros/src/deriving/clone.rs b/compiler/rustc_builtin_macros/src/deriving/clone.rs index 0a55d4e0ce0a6..9cd72ed0c67b2 100644 --- a/compiler/rustc_builtin_macros/src/deriving/clone.rs +++ b/compiler/rustc_builtin_macros/src/deriving/clone.rs @@ -80,7 +80,7 @@ pub fn expand_deriving_clone( name: sym::clone, generics: Bounds::empty(), explicit_self: true, - args: Vec::new(), + nonself_args: Vec::new(), ret_ty: Self_, attributes: attrs, unify_fieldless_variants: false, @@ -160,8 +160,8 @@ fn cs_clone( let ctor_path; let all_fields; let fn_path = cx.std_path(&[sym::clone, sym::Clone, sym::clone]); - let subcall = |cx: &mut ExtCtxt<'_>, field: &FieldInfo<'_>| { - let args = vec![cx.expr_addr_of(field.span, field.self_.clone())]; + let subcall = |cx: &mut ExtCtxt<'_>, field: &FieldInfo| { + let args = vec![cx.expr_addr_of(field.span, field.self_expr.clone())]; cx.expr_call_global(field.span, fn_path.clone(), args) }; diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs index c1a2ebcc14601..4e798bf6acb10 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs @@ -32,7 +32,7 @@ pub fn expand_deriving_eq( name: sym::assert_receiver_is_total_eq, generics: Bounds::empty(), explicit_self: true, - args: vec![], + nonself_args: vec![], ret_ty: Unit, attributes: attrs, unify_fieldless_variants: true, diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs index bec59aac5eee1..859e995356e80 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs @@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*; use crate::deriving::generic::*; use crate::deriving::path_std; -use rustc_ast::ptr::P; -use rustc_ast::{self as ast, MetaItem}; +use rustc_ast::MetaItem; use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_span::symbol::{sym, Ident}; use rustc_span::Span; @@ -28,7 +27,7 @@ pub fn expand_deriving_ord( name: sym::cmp, generics: Bounds::empty(), explicit_self: true, - args: vec![(self_ref(), sym::other)], + nonself_args: vec![(self_ref(), sym::other)], ret_ty: Path(path_std!(cmp::Ordering)), attributes: attrs, unify_fieldless_variants: true, @@ -40,84 +39,54 @@ pub fn expand_deriving_ord( trait_def.expand(cx, mitem, item, push) } -pub fn ordering_collapsed( - cx: &mut ExtCtxt<'_>, - span: Span, - self_arg_tags: &[Ident], -) -> P { - let lft = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[0])); - let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1])); - let fn_cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]); - cx.expr_call_global(span, fn_cmp_path, vec![lft, rgt]) -} - pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr { let test_id = Ident::new(sym::cmp, span); - let equals_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); - + let equal_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); let cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]); // Builds: // - // match ::std::cmp::Ord::cmp(&self_field1, &other_field1) { - // ::std::cmp::Ordering::Equal => - // match ::std::cmp::Ord::cmp(&self_field2, &other_field2) { - // ::std::cmp::Ordering::Equal => { - // ... + // match ::core::cmp::Ord::cmp(&self.x, &other.x) { + // ::std::cmp::Ordering::Equal => + // ::core::cmp::Ord::cmp(&self.y, &other.y), + // cmp => cmp, // } - // cmp => cmp - // }, - // cmp => cmp - // } - // let expr = cs_fold( // foldr nests the if-elses correctly, leaving the first field // as the outermost one, and the last as the innermost. false, - |cx, span, old, self_f, other_fs| { - // match new { - // ::std::cmp::Ordering::Equal => old, - // cmp => cmp - // } - let new = { - let [other_f] = other_fs else { - cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"); - }; - let args = - vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())]; - cx.expr_call_global(span, cmp_path.clone(), args) - }; - - let eq_arm = cx.arm(span, cx.pat_path(span, equals_path.clone()), old); - let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); - - cx.expr_match(span, new, vec![eq_arm, neq_arm]) - }, - |cx, args| match args { - Some((span, self_f, other_fs)) => { - let new = { - let [other_f] = other_fs else { - cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"); - }; - let args = - vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())]; - cx.expr_call_global(span, cmp_path.clone(), args) - }; - - new - } - None => cx.expr_path(equals_path.clone()), - }, - Box::new(|cx, span, tag_tuple| { - if tag_tuple.len() != 2 { - cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`") - } else { - ordering_collapsed(cx, span, tag_tuple) - } - }), cx, span, substr, + |cx, fold| match fold { + CsFold::Single(field) => { + let [other_expr] = &field.other_selflike_exprs[..] else { + cx.span_bug(field.span, "not exactly 2 arguments in `derive(Ord)`"); + }; + let args = vec![ + cx.expr_addr_of(field.span, field.self_expr.clone()), + cx.expr_addr_of(field.span, other_expr.clone()), + ]; + cx.expr_call_global(field.span, cmp_path.clone(), args) + } + CsFold::Combine(span, expr1, expr2) => { + let eq_arm = cx.arm(span, cx.pat_path(span, equal_path.clone()), expr1); + let neq_arm = + cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); + cx.expr_match(span, expr2, vec![eq_arm, neq_arm]) + } + CsFold::Fieldless => cx.expr_path(equal_path.clone()), + CsFold::EnumNonMatching(span, tag_tuple) => { + if tag_tuple.len() != 2 { + cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`") + } else { + let lft = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[0])); + let rgt = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[1])); + let fn_cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]); + cx.expr_call_global(span, fn_cmp_path, vec![lft, rgt]) + } + } + }, ); BlockOrExpr::new_expr(expr) } diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs index b44c290d12f56..724c639984cca 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs @@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*; use crate::deriving::generic::*; use crate::deriving::{path_local, path_std}; -use rustc_ast::ptr::P; -use rustc_ast::{BinOpKind, Expr, MetaItem}; +use rustc_ast::{BinOpKind, MetaItem}; use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_span::symbol::sym; use rustc_span::Span; @@ -23,33 +22,22 @@ pub fn expand_deriving_partial_eq( combiner: BinOpKind, base: bool, ) -> BlockOrExpr { - let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P, other_fs: &[P]| { - let [other_f] = other_fs else { - cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"); - }; - - cx.expr_binary(span, op, self_f, other_f.clone()) - }; - let expr = cs_fold( true, // use foldl - |cx, span, subexpr, self_f, other_fs| { - let eq = op(cx, span, self_f, other_fs); - cx.expr_binary(span, combiner, subexpr, eq) - }, - |cx, args| { - match args { - Some((span, self_f, other_fs)) => { - // Special-case the base case to generate cleaner code. - op(cx, span, self_f, other_fs) - } - None => cx.expr_bool(span, base), - } - }, - Box::new(|cx, span, _| cx.expr_bool(span, !base)), cx, span, substr, + |cx, fold| match fold { + CsFold::Single(field) => { + let [other_expr] = &field.other_selflike_exprs[..] else { + cx.span_bug(field.span, "not exactly 2 arguments in `derive(PartialEq)`"); + }; + cx.expr_binary(field.span, op, field.self_expr.clone(), other_expr.clone()) + } + CsFold::Combine(span, expr1, expr2) => cx.expr_binary(span, combiner, expr1, expr2), + CsFold::Fieldless => cx.expr_bool(span, base), + CsFold::EnumNonMatching(span, _tag_tuple) => cx.expr_bool(span, !base), + }, ); BlockOrExpr::new_expr(expr) } @@ -69,7 +57,7 @@ pub fn expand_deriving_partial_eq( name: $name, generics: Bounds::empty(), explicit_self: true, - args: vec![(self_ref(), sym::other)], + nonself_args: vec![(self_ref(), sym::other)], ret_ty: Path(path_local!(bool)), attributes: attrs, unify_fieldless_variants: true, diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs index 5769f08f49482..3f9843922dad7 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs @@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*; use crate::deriving::generic::*; use crate::deriving::{path_std, pathvec_std}; -use rustc_ast::ptr::P; -use rustc_ast::{Expr, MetaItem}; +use rustc_ast::MetaItem; use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_span::symbol::{sym, Ident}; use rustc_span::Span; @@ -26,7 +25,7 @@ pub fn expand_deriving_partial_ord( name: sym::partial_cmp, generics: Bounds::empty(), explicit_self: true, - args: vec![(self_ref(), sym::other)], + nonself_args: vec![(self_ref(), sym::other)], ret_ty, attributes: attrs, unify_fieldless_variants: true, @@ -50,79 +49,54 @@ pub fn expand_deriving_partial_ord( pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr { let test_id = Ident::new(sym::cmp, span); - let ordering = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); - let ordering_expr = cx.expr_path(ordering.clone()); - + let equal_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); let partial_cmp_path = cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]); // Builds: // - // match ::std::cmp::PartialOrd::partial_cmp(&self_field1, &other_field1) { - // ::std::option::Option::Some(::std::cmp::Ordering::Equal) => - // match ::std::cmp::PartialOrd::partial_cmp(&self_field2, &other_field2) { - // ::std::option::Option::Some(::std::cmp::Ordering::Equal) => { - // ... - // } - // cmp => cmp - // }, - // cmp => cmp + // match ::core::cmp::PartialOrd::partial_cmp(&self.x, &other.x) { + // ::core::option::Option::Some(::core::cmp::Ordering::Equal) => + // ::core::cmp::PartialOrd::partial_cmp(&self.y, &other.y), + // cmp => cmp, // } - // let expr = cs_fold( // foldr nests the if-elses correctly, leaving the first field // as the outermost one, and the last as the innermost. false, - |cx, span, old, self_f, other_fs| { - // match new { - // Some(::std::cmp::Ordering::Equal) => old, - // cmp => cmp - // } - - let new = { - let [other_f] = other_fs else { - cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"); - }; - - let args = - vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())]; - - cx.expr_call_global(span, partial_cmp_path.clone(), args) - }; - - let eq_arm = cx.arm(span, cx.pat_some(span, cx.pat_path(span, ordering.clone())), old); - let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); - - cx.expr_match(span, new, vec![eq_arm, neq_arm]) - }, - |cx: &mut ExtCtxt<'_>, args: Option<(Span, P, &[P])>| match args { - Some((span, self_f, other_fs)) => { - let new = { - let [other_f] = other_fs else { - cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"); - }; - let args = - vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())]; - cx.expr_call_global(span, partial_cmp_path.clone(), args) - }; - - new - } - None => cx.expr_some(span, ordering_expr.clone()), - }, - Box::new(|cx, span, tag_tuple| { - if tag_tuple.len() != 2 { - cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`") - } else { - let lft = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[0])); - let rgt = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[1])); - let fn_partial_cmp_path = - cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]); - cx.expr_call_global(span, fn_partial_cmp_path, vec![lft, rgt]) - } - }), cx, span, substr, + |cx, fold| match fold { + CsFold::Single(field) => { + let [other_expr] = &field.other_selflike_exprs[..] else { + cx.span_bug(field.span, "not exactly 2 arguments in `derive(Ord)`"); + }; + let args = vec![ + cx.expr_addr_of(field.span, field.self_expr.clone()), + cx.expr_addr_of(field.span, other_expr.clone()), + ]; + cx.expr_call_global(field.span, partial_cmp_path.clone(), args) + } + CsFold::Combine(span, expr1, expr2) => { + let eq_arm = + cx.arm(span, cx.pat_some(span, cx.pat_path(span, equal_path.clone())), expr1); + let neq_arm = + cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); + cx.expr_match(span, expr2, vec![eq_arm, neq_arm]) + } + CsFold::Fieldless => cx.expr_some(span, cx.expr_path(equal_path.clone())), + CsFold::EnumNonMatching(span, tag_tuple) => { + if tag_tuple.len() != 2 { + cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`") + } else { + let lft = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[0])); + let rgt = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[1])); + let fn_partial_cmp_path = + cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]); + cx.expr_call_global(span, fn_partial_cmp_path, vec![lft, rgt]) + } + } + }, ); BlockOrExpr::new_expr(expr) } diff --git a/compiler/rustc_builtin_macros/src/deriving/debug.rs b/compiler/rustc_builtin_macros/src/deriving/debug.rs index e04898287608b..b99198054def8 100644 --- a/compiler/rustc_builtin_macros/src/deriving/debug.rs +++ b/compiler/rustc_builtin_macros/src/deriving/debug.rs @@ -28,7 +28,7 @@ pub fn expand_deriving_debug( name: sym::fmt, generics: Bounds::empty(), explicit_self: true, - args: vec![(fmtr, sym::f)], + nonself_args: vec![(fmtr, sym::f)], ret_ty: Path(path_std!(fmt::Result)), attributes: Vec::new(), unify_fieldless_variants: false, @@ -53,7 +53,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> // We want to make sure we have the ctxt set so that we can use unstable methods let span = cx.with_def_site_ctxt(span); let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked)); - let fmt = substr.nonself_args[0].clone(); + let fmt = substr.nonselflike_args[0].clone(); // Struct and tuples are similar enough that we use the same code for both, // with some extra pieces for structs due to the field names. @@ -96,7 +96,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> args.push(name); } // Use double indirection to make sure this works for unsized types - let field = cx.expr_addr_of(field.span, field.self_.clone()); + let field = cx.expr_addr_of(field.span, field.self_expr.clone()); let field = cx.expr_addr_of(field.span, field); args.push(field); } @@ -116,7 +116,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> } // Use double indirection to make sure this works for unsized types - let value_ref = cx.expr_addr_of(field.span, field.self_.clone()); + let value_ref = cx.expr_addr_of(field.span, field.self_expr.clone()); value_exprs.push(cx.expr_addr_of(field.span, value_ref)); } diff --git a/compiler/rustc_builtin_macros/src/deriving/decodable.rs b/compiler/rustc_builtin_macros/src/deriving/decodable.rs index b9f2a75082224..d688143a2a5c6 100644 --- a/compiler/rustc_builtin_macros/src/deriving/decodable.rs +++ b/compiler/rustc_builtin_macros/src/deriving/decodable.rs @@ -36,7 +36,10 @@ pub fn expand_deriving_rustc_decodable( )], }, explicit_self: false, - args: vec![(Ref(Box::new(Path(Path::new_local(typaram))), Mutability::Mut), sym::d)], + nonself_args: vec![( + Ref(Box::new(Path(Path::new_local(typaram))), Mutability::Mut), + sym::d, + )], ret_ty: Path(Path::new_( pathvec_std!(result::Result), vec![ @@ -63,7 +66,7 @@ fn decodable_substructure( substr: &Substructure<'_>, krate: Symbol, ) -> BlockOrExpr { - let decoder = substr.nonself_args[0].clone(); + let decoder = substr.nonselflike_args[0].clone(); let recurse = vec![ Ident::new(krate, trait_span), Ident::new(sym::Decodable, trait_span), diff --git a/compiler/rustc_builtin_macros/src/deriving/default.rs b/compiler/rustc_builtin_macros/src/deriving/default.rs index 90d5cdbc0a078..5177690917f21 100644 --- a/compiler/rustc_builtin_macros/src/deriving/default.rs +++ b/compiler/rustc_builtin_macros/src/deriving/default.rs @@ -34,7 +34,7 @@ pub fn expand_deriving_default( name: kw::Default, generics: Bounds::empty(), explicit_self: false, - args: Vec::new(), + nonself_args: Vec::new(), ret_ty: Self_, attributes: attrs, unify_fieldless_variants: false, diff --git a/compiler/rustc_builtin_macros/src/deriving/encodable.rs b/compiler/rustc_builtin_macros/src/deriving/encodable.rs index 0dfce114bfc53..49dbe51f7626c 100644 --- a/compiler/rustc_builtin_macros/src/deriving/encodable.rs +++ b/compiler/rustc_builtin_macros/src/deriving/encodable.rs @@ -120,7 +120,10 @@ pub fn expand_deriving_rustc_encodable( )], }, explicit_self: true, - args: vec![(Ref(Box::new(Path(Path::new_local(typaram))), Mutability::Mut), sym::s)], + nonself_args: vec![( + Ref(Box::new(Path(Path::new_local(typaram))), Mutability::Mut), + sym::s, + )], ret_ty: Path(Path::new_( pathvec_std!(result::Result), vec![ @@ -147,7 +150,7 @@ fn encodable_substructure( substr: &Substructure<'_>, krate: Symbol, ) -> BlockOrExpr { - let encoder = substr.nonself_args[0].clone(); + let encoder = substr.nonselflike_args[0].clone(); // throw an underscore in front to suppress unused variable warnings let blkarg = Ident::new(sym::_e, trait_span); let blkencoder = cx.expr_ident(trait_span, blkarg); @@ -165,12 +168,12 @@ fn encodable_substructure( let fn_emit_struct_field_path = cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_struct_field]); let mut stmts = Vec::new(); - for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() { + for (i, &FieldInfo { name, ref self_expr, span, .. }) in fields.iter().enumerate() { let name = match name { Some(id) => id.name, None => Symbol::intern(&format!("_field{}", i)), }; - let self_ref = cx.expr_addr_of(span, self_.clone()); + let self_ref = cx.expr_addr_of(span, self_expr.clone()); let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]); let lambda = cx.lambda1(span, enc, blkarg); let call = cx.expr_call_global( @@ -234,8 +237,8 @@ fn encodable_substructure( let mut stmts = Vec::new(); if !fields.is_empty() { let last = fields.len() - 1; - for (i, &FieldInfo { ref self_, span, .. }) in fields.iter().enumerate() { - let self_ref = cx.expr_addr_of(span, self_.clone()); + for (i, &FieldInfo { ref self_expr, span, .. }) in fields.iter().enumerate() { + let self_ref = cx.expr_addr_of(span, self_expr.clone()); let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]); let lambda = cx.lambda1(span, enc, blkarg); diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index e618255b0c6fd..74e18bffc2ec9 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -227,8 +227,8 @@ pub struct MethodDef<'a> { /// Is there is a `&self` argument? If not, it is a static function. pub explicit_self: bool, - /// Arguments other than the self argument - pub args: Vec<(Ty, Symbol)>, + /// Arguments other than the self argument. + pub nonself_args: Vec<(Ty, Symbol)>, /// Returns type pub ret_ty: Ty, @@ -245,25 +245,24 @@ pub struct MethodDef<'a> { pub struct Substructure<'a> { /// ident of self pub type_ident: Ident, - /// verbatim access to any non-self arguments - pub nonself_args: &'a [P], + /// Verbatim access to any non-selflike arguments, i.e. arguments that + /// don't have type `&Self`. + pub nonselflike_args: &'a [P], pub fields: &'a SubstructureFields<'a>, } /// Summary of the relevant parts of a struct/enum field. -pub struct FieldInfo<'a> { +pub struct FieldInfo { pub span: Span, /// None for tuple structs/normal enum variants, Some for normal /// structs/struct enum variants. pub name: Option, /// The expression corresponding to this field of `self` /// (specifically, a reference to it). - pub self_: P, + pub self_expr: P, /// The expressions corresponding to references to this field in - /// the other `Self` arguments. - pub other: Vec>, - /// The attributes on the field - pub attrs: &'a [ast::Attribute], + /// the other selflike arguments. + pub other_selflike_exprs: Vec>, } /// Fields for a static method @@ -276,11 +275,11 @@ pub enum StaticFields { /// A summary of the possible sets of fields. pub enum SubstructureFields<'a> { - Struct(&'a ast::VariantData, Vec>), + Struct(&'a ast::VariantData, Vec), /// Matching variants of the enum: variant index, variant count, ast::Variant, /// fields: the field name is only non-`None` in the case of a struct /// variant. - EnumMatching(usize, usize, &'a ast::Variant, Vec>), + EnumMatching(usize, usize, &'a ast::Variant, Vec), /// Non-matching variants of the enum, but with all state hidden from the /// consequent code. The field is a list of `Ident`s bound to the variant @@ -298,11 +297,6 @@ pub enum SubstructureFields<'a> { pub type CombineSubstructureFunc<'a> = Box, Span, &Substructure<'_>) -> BlockOrExpr + 'a>; -/// Deal with non-matching enum variants. The slice is the identifiers holding -/// the variant index value for each of the `Self` arguments. -pub type EnumNonMatchCollapsedFunc<'a> = - Box, Span, &[Ident]) -> P + 'a>; - pub fn combine_substructure( f: CombineSubstructureFunc<'_>, ) -> RefCell> { @@ -782,8 +776,8 @@ impl<'a> TraitDef<'a> { .methods .iter() .map(|method_def| { - let (explicit_self, self_args, nonself_args, tys) = - method_def.split_self_nonself_args(cx, self, type_ident, generics); + let (explicit_self, selflike_args, nonselflike_args, nonself_arg_tys) = + method_def.extract_arg_details(cx, self, type_ident, generics); let body = if from_scratch || method_def.is_static() { method_def.expand_static_struct_method_body( @@ -791,7 +785,7 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - &nonself_args, + &nonselflike_args, ) } else { method_def.expand_struct_method_body( @@ -799,14 +793,22 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - &self_args, - &nonself_args, + &selflike_args, + &nonselflike_args, use_temporaries, is_packed, ) }; - method_def.create_method(cx, self, type_ident, generics, explicit_self, tys, body) + method_def.create_method( + cx, + self, + type_ident, + generics, + explicit_self, + nonself_arg_tys, + body, + ) }) .collect(); @@ -831,8 +833,8 @@ impl<'a> TraitDef<'a> { .methods .iter() .map(|method_def| { - let (explicit_self, self_args, nonself_args, tys) = - method_def.split_self_nonself_args(cx, self, type_ident, generics); + let (explicit_self, selflike_args, nonselflike_args, nonself_arg_tys) = + method_def.extract_arg_details(cx, self, type_ident, generics); let body = if from_scratch || method_def.is_static() { method_def.expand_static_enum_method_body( @@ -840,7 +842,7 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - &nonself_args, + &nonselflike_args, ) } else { method_def.expand_enum_method_body( @@ -848,12 +850,20 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - self_args, - &nonself_args, + selflike_args, + &nonselflike_args, ) }; - method_def.create_method(cx, self, type_ident, generics, explicit_self, tys, body) + method_def.create_method( + cx, + self, + type_ident, + generics, + explicit_self, + nonself_arg_tys, + body, + ) }) .collect(); @@ -867,11 +877,11 @@ impl<'a> MethodDef<'a> { cx: &mut ExtCtxt<'_>, trait_: &TraitDef<'_>, type_ident: Ident, - nonself_args: &[P], + nonselflike_args: &[P], fields: &SubstructureFields<'_>, ) -> BlockOrExpr { let span = trait_.span; - let substructure = Substructure { type_ident, nonself_args, fields }; + let substructure = Substructure { type_ident, nonselflike_args, fields }; let mut f = self.combine_substructure.borrow_mut(); let f: &mut CombineSubstructureFunc<'_> = &mut *f; f(cx, span, &substructure) @@ -891,49 +901,51 @@ impl<'a> MethodDef<'a> { !self.explicit_self } - fn split_self_nonself_args( + // The return value includes: + // - explicit_self: The `&self` arg, if present. + // - selflike_args: Expressions for `&self` (if present) and also any other + // args with the same type (e.g. the `other` arg in `PartialEq::eq`). + // - nonselflike_args: Expressions for all the remaining args. + // - nonself_arg_tys: Additional information about all the args other than + // `&self`. + fn extract_arg_details( &self, cx: &mut ExtCtxt<'_>, trait_: &TraitDef<'_>, type_ident: Ident, generics: &Generics, ) -> (Option, Vec>, Vec>, Vec<(Ident, P)>) { - let mut self_args = Vec::new(); - let mut nonself_args = Vec::new(); - let mut arg_tys = Vec::new(); + let mut selflike_args = Vec::new(); + let mut nonselflike_args = Vec::new(); + let mut nonself_arg_tys = Vec::new(); let span = trait_.span; - let ast_explicit_self = if self.explicit_self { + let explicit_self = if self.explicit_self { let (self_expr, explicit_self) = ty::get_explicit_self(cx, span); - self_args.push(self_expr); + selflike_args.push(self_expr); Some(explicit_self) } else { None }; - for (ty, name) in self.args.iter() { + for (ty, name) in self.nonself_args.iter() { let ast_ty = ty.to_ty(cx, span, type_ident, generics); let ident = Ident::new(*name, span); - arg_tys.push((ident, ast_ty)); + nonself_arg_tys.push((ident, ast_ty)); let arg_expr = cx.expr_ident(span, ident); - match *ty { - // for static methods, just treat any Self - // arguments as a normal arg - Self_ if !self.is_static() => { - self_args.push(arg_expr); - } - Ref(ref ty, _) if matches!(**ty, Self_) && !self.is_static() => { - self_args.push(cx.expr_deref(span, arg_expr)) - } - _ => { - nonself_args.push(arg_expr); + match ty { + // Selflike (`&Self`) arguments only occur in non-static methods. + Ref(box Self_, _) if !self.is_static() => { + selflike_args.push(cx.expr_deref(span, arg_expr)) } + Self_ => cx.span_bug(span, "`Self` in non-return position"), + _ => nonselflike_args.push(arg_expr), } } - (ast_explicit_self, self_args, nonself_args, arg_tys) + (explicit_self, selflike_args, nonselflike_args, nonself_arg_tys) } fn create_method( @@ -943,7 +955,7 @@ impl<'a> MethodDef<'a> { type_ident: Ident, generics: &Generics, explicit_self: Option, - arg_types: Vec<(Ident, P)>, + nonself_arg_tys: Vec<(Ident, P)>, body: BlockOrExpr, ) -> P { let span = trait_.span; @@ -951,12 +963,13 @@ impl<'a> MethodDef<'a> { let fn_generics = self.generics.to_generics(cx, span, type_ident, generics); let args = { - let self_args = explicit_self.map(|explicit_self| { + let self_arg = explicit_self.map(|explicit_self| { let ident = Ident::with_dummy_span(kw::SelfLower).with_span_pos(span); ast::Param::from_self(ast::AttrVec::default(), explicit_self, ident) }); - let nonself_args = arg_types.into_iter().map(|(name, ty)| cx.param(span, name, ty)); - self_args.into_iter().chain(nonself_args).collect() + let nonself_args = + nonself_arg_tys.into_iter().map(|(name, ty)| cx.param(span, name, ty)); + self_arg.into_iter().chain(nonself_args).collect() }; let ret_type = self.get_ret_ty(cx, trait_, generics, type_ident); @@ -1024,76 +1037,50 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef<'b>, struct_def: &'b VariantData, type_ident: Ident, - self_args: &[P], - nonself_args: &[P], + selflike_args: &[P], + nonselflike_args: &[P], use_temporaries: bool, is_packed: bool, ) -> BlockOrExpr { - let mut raw_fields = Vec::new(); // Vec<[fields of self], [fields of next Self arg], [etc]> let span = trait_.span; - let mut patterns = Vec::new(); - - for (i, self_arg) in self_args.iter().enumerate() { - let ident_exprs = if !is_packed { - trait_.create_struct_field_accesses(cx, self_arg, struct_def) - } else { - // Get the pattern for the let-destructuring. - // - // We could use `type_ident` instead of `Self`, but in the case of a type parameter - // shadowing the struct name, that causes a second, unnecessary E0578 error. #97343 - let struct_path = cx.path(span, vec![Ident::new(kw::SelfUpper, type_ident.span)]); - let (pat, ident_exprs) = trait_.create_struct_pattern( - cx, - struct_path, - struct_def, - &format!("__self_{}", i), - ast::Mutability::Not, - use_temporaries, - ); - patterns.push(pat); - ident_exprs - }; - raw_fields.push(ident_exprs); - } - - // transpose raw_fields - let fields = if !raw_fields.is_empty() { - let mut raw_fields = raw_fields.into_iter().map(|v| v.into_iter()); - let first_field = raw_fields.next().unwrap(); - let mut other_fields: Vec> = raw_fields.collect(); - first_field - .map(|(span, opt_id, field, attrs)| FieldInfo { - span: span.with_ctxt(trait_.span.ctxt()), - name: opt_id, - self_: field, - other: other_fields - .iter_mut() - .map(|l| { - let (.., ex, _) = l.next().unwrap(); - ex - }) - .collect(), - attrs, - }) - .collect() - } else { - cx.span_bug(span, "no `self` parameter for method in generic `derive`") + assert!(selflike_args.len() == 1 || selflike_args.len() == 2); + + let mk_body = |cx, selflike_fields| { + self.call_substructure_method( + cx, + trait_, + type_ident, + nonselflike_args, + &Struct(struct_def, selflike_fields), + ) }; - let mut body = self.call_substructure_method( - cx, - trait_, - type_ident, - nonself_args, - &Struct(struct_def, fields), - ); - if !is_packed { - body + let selflike_fields = + trait_.create_struct_field_access_fields(cx, selflike_args, struct_def); + mk_body(cx, selflike_fields) } else { + let prefixes: Vec<_> = + (0..selflike_args.len()).map(|i| format!("__self_{}", i)).collect(); + let selflike_fields = + trait_.create_struct_pattern_fields(cx, struct_def, &prefixes, use_temporaries); + let mut body = mk_body(cx, selflike_fields); + + let struct_path = cx.path(span, vec![Ident::new(kw::SelfUpper, type_ident.span)]); + let patterns = trait_.create_struct_patterns( + cx, + struct_path, + struct_def, + &prefixes, + ast::Mutability::Not, + use_temporaries, + ); + // Do the let-destructuring. - let mut stmts: Vec<_> = iter::zip(self_args, patterns) - .map(|(arg_expr, pat)| cx.stmt_let_pat(span, pat, arg_expr.clone())) + let mut stmts: Vec<_> = iter::zip(selflike_args, patterns) + .map(|(selflike_arg_expr, pat)| { + cx.stmt_let_pat(span, pat, selflike_arg_expr.clone()) + }) .collect(); stmts.extend(std::mem::take(&mut body.0)); BlockOrExpr(stmts, body.1) @@ -1106,7 +1093,7 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef<'_>, struct_def: &VariantData, type_ident: Ident, - nonself_args: &[P], + nonselflike_args: &[P], ) -> BlockOrExpr { let summary = trait_.summarise_struct(cx, struct_def); @@ -1114,7 +1101,7 @@ impl<'a> MethodDef<'a> { cx, trait_, type_ident, - nonself_args, + nonselflike_args, &StaticStruct(struct_def, summary), ) } @@ -1148,7 +1135,7 @@ impl<'a> MethodDef<'a> { /// } /// } /// ``` - /// Creates a match for a tuple of all `self_args`, where either all + /// Creates a match for a tuple of all `selflike_args`, where either all /// variants match, or it falls into a catch-all for when one variant /// does not match. /// @@ -1161,33 +1148,33 @@ impl<'a> MethodDef<'a> { /// a simple equality check (for PartialEq). /// /// The catch-all handler is provided access the variant index values - /// for each of the self-args, carried in precomputed variables. + /// for each of the selflike_args, carried in precomputed variables. fn expand_enum_method_body<'b>( &self, cx: &mut ExtCtxt<'_>, trait_: &TraitDef<'b>, enum_def: &'b EnumDef, type_ident: Ident, - mut self_args: Vec>, - nonself_args: &[P], + mut selflike_args: Vec>, + nonselflike_args: &[P], ) -> BlockOrExpr { let span = trait_.span; let variants = &enum_def.variants; - let self_arg_names = iter::once("__self".to_string()) + let prefixes = iter::once("__self".to_string()) .chain( - self_args + selflike_args .iter() .enumerate() .skip(1) - .map(|(arg_count, _self_arg)| format!("__arg_{}", arg_count)), + .map(|(arg_count, _selflike_arg)| format!("__arg_{}", arg_count)), ) .collect::>(); // The `vi_idents` will be bound, solely in the catch-all, to - // a series of let statements mapping each self_arg to an int + // a series of let statements mapping each selflike_arg to an int // value corresponding to its discriminant. - let vi_idents = self_arg_names + let vi_idents = prefixes .iter() .map(|name| { let vi_suffix = format!("{}_vi", name); @@ -1206,37 +1193,38 @@ impl<'a> MethodDef<'a> { // (Variant1, Variant1, ...) => Body1 // (Variant2, Variant2, ...) => Body2 // ... - // where each tuple has length = self_args.len() + // where each tuple has length = selflike_args.len() + let mut match_arms: Vec = variants .iter() .enumerate() .filter(|&(_, v)| !(self.unify_fieldless_variants && v.data.fields().is_empty())) .map(|(index, variant)| { - let mk_self_pat = |cx: &mut ExtCtxt<'_>, self_arg_name: &str| { - let (p, idents) = trait_.create_enum_variant_pattern( - cx, - type_ident, - variant, - self_arg_name, - ast::Mutability::Not, - ); - (cx.pat(span, PatKind::Ref(p, ast::Mutability::Not)), idents) - }; - // A single arm has form (&VariantK, &VariantK, ...) => BodyK // (see "Final wrinkle" note below for why.) - let mut subpats = Vec::with_capacity(self_arg_names.len()); - let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); - let first_self_pat_idents = { - let (p, idents) = mk_self_pat(cx, &self_arg_names[0]); - subpats.push(p); - idents - }; - for self_arg_name in &self_arg_names[1..] { - let (p, idents) = mk_self_pat(cx, &self_arg_name); - subpats.push(p); - self_pats_idents.push(idents); - } + + let use_temporaries = false; // enums can't be repr(packed) + let fields = trait_.create_struct_pattern_fields( + cx, + &variant.data, + &prefixes, + use_temporaries, + ); + + let sp = variant.span.with_ctxt(trait_.span.ctxt()); + let variant_path = cx.path(sp, vec![type_ident, variant.ident]); + let mut subpats: Vec<_> = trait_ + .create_struct_patterns( + cx, + variant_path, + &variant.data, + &prefixes, + ast::Mutability::Not, + use_temporaries, + ) + .into_iter() + .map(|p| cx.pat(span, PatKind::Ref(p, ast::Mutability::Not))) + .collect(); // Here is the pat = `(&VariantK, &VariantK, ...)` let single_pat = if subpats.len() == 1 { @@ -1248,57 +1236,20 @@ impl<'a> MethodDef<'a> { // For the BodyK, we need to delegate to our caller, // passing it an EnumMatching to indicate which case // we are in. - - // All of the Self args have the same variant in these - // cases. So we transpose the info in self_pats_idents - // to gather the getter expressions together, in the - // form that EnumMatching expects. - - // The transposition is driven by walking across the - // arg fields of the variant for the first self pat. - let field_tuples = first_self_pat_idents - .into_iter() - .enumerate() - // For each arg field of self, pull out its getter expr ... - .map(|(field_index, (span, opt_ident, self_getter_expr, attrs))| { - // ... but FieldInfo also wants getter expr - // for matching other arguments of Self type; - // so walk across the *other* self_pats_idents - // and pull out getter for same field in each - // of them (using `field_index` tracked above). - // That is the heart of the transposition. - let others = self_pats_idents - .iter() - .map(|fields| { - let (_, _opt_ident, ref other_getter_expr, _) = fields[field_index]; - - // All Self args have same variant, so - // opt_idents are the same. (Assert - // here to make it self-evident that - // it is okay to ignore `_opt_ident`.) - assert!(opt_ident == _opt_ident); - - other_getter_expr.clone() - }) - .collect::>>(); - - FieldInfo { - span, - name: opt_ident, - self_: self_getter_expr, - other: others, - attrs, - } - }) - .collect::>>(); - + // // Now, for some given VariantK, we have built up // expressions for referencing every field of every // Self arg, assuming all are instances of VariantK. // Build up code associated with such a case. - let substructure = EnumMatching(index, variants.len(), variant, field_tuples); + let substructure = EnumMatching(index, variants.len(), variant, fields); let arm_expr = self - .call_substructure_method(cx, trait_, type_ident, nonself_args, &substructure) + .call_substructure_method( + cx, + trait_, + type_ident, + nonselflike_args, + &substructure, + ) .into_expr(cx, span); cx.arm(span, single_pat, arm_expr) @@ -1316,13 +1267,13 @@ impl<'a> MethodDef<'a> { cx, trait_, type_ident, - nonself_args, + nonselflike_args, &substructure, ) .into_expr(cx, span), ) } - _ if variants.len() > 1 && self_args.len() > 1 => { + _ if variants.len() > 1 && selflike_args.len() > 1 => { // Since we know that all the arguments will match if we reach // the match expression we add the unreachable intrinsics as the // result of the catch all which should help llvm in optimizing it @@ -1349,8 +1300,8 @@ impl<'a> MethodDef<'a> { // catch-all `_` match, it would trigger the // unreachable-pattern error. // - if variants.len() > 1 && self_args.len() > 1 { - // Build a series of let statements mapping each self_arg + if variants.len() > 1 && selflike_args.len() > 1 { + // Build a series of let statements mapping each selflike_arg // to its discriminant value. // // i.e., for `enum E { A, B(1), C(T, T) }`, and a deriving @@ -1365,10 +1316,14 @@ impl<'a> MethodDef<'a> { // We also build an expression which checks whether all discriminants are equal: // `__self_vi == __arg_1_vi && __self_vi == __arg_2_vi && ...` let mut discriminant_test = cx.expr_bool(span, true); - for (i, (&ident, self_arg)) in iter::zip(&vi_idents, &self_args).enumerate() { - let self_addr = cx.expr_addr_of(span, self_arg.clone()); - let variant_value = - deriving::call_intrinsic(cx, span, sym::discriminant_value, vec![self_addr]); + for (i, (&ident, selflike_arg)) in iter::zip(&vi_idents, &selflike_args).enumerate() { + let selflike_addr = cx.expr_addr_of(span, selflike_arg.clone()); + let variant_value = deriving::call_intrinsic( + cx, + span, + sym::discriminant_value, + vec![selflike_addr], + ); let let_stmt = cx.stmt_let(span, false, ident, variant_value); index_let_stmts.push(let_stmt); @@ -1389,18 +1344,18 @@ impl<'a> MethodDef<'a> { cx, trait_, type_ident, - nonself_args, + nonselflike_args, &catch_all_substructure, ) .into_expr(cx, span); - // Final wrinkle: the self_args are expressions that deref + // Final wrinkle: the selflike_args are expressions that deref // down to desired places, but we cannot actually deref // them when they are fed as r-values into a tuple // expression; here add a layer of borrowing, turning // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. - self_args.map_in_place(|self_arg| cx.expr_addr_of(span, self_arg)); - let match_arg = cx.expr(span, ast::ExprKind::Tup(self_args)); + selflike_args.map_in_place(|selflike_arg| cx.expr_addr_of(span, selflike_arg)); + let match_arg = cx.expr(span, ast::ExprKind::Tup(selflike_args)); // Lastly we create an expression which branches on all discriminants being equal // if discriminant_test { @@ -1418,67 +1373,21 @@ impl<'a> MethodDef<'a> { let arm_expr = cx.expr_if(span, discriminant_test, all_match, Some(arm_expr)); BlockOrExpr(index_let_stmts, Some(arm_expr)) } else if variants.is_empty() { - // As an additional wrinkle, For a zero-variant enum A, - // currently the compiler - // will accept `fn (a: &Self) { match *a { } }` - // but rejects `fn (a: &Self) { match (&*a,) { } }` - // as well as `fn (a: &Self) { match ( *a,) { } }` - // - // This means that the strategy of building up a tuple of - // all Self arguments fails when Self is a zero variant - // enum: rustc rejects the expanded program, even though - // the actual code tends to be impossible to execute (at - // least safely), according to the type system. - // - // The most expedient fix for this is to just let the - // code fall through to the catch-all. But even this is - // error-prone, since the catch-all as defined above would - // generate code like this: - // - // _ => { let __self0 = match *self { }; - // let __self1 = match *__arg_0 { }; - // } - // - // Which is yields bindings for variables which type - // inference cannot resolve to unique types. - // - // One option to the above might be to add explicit type - // annotations. But the *only* reason to go down that path - // would be to try to make the expanded output consistent - // with the case when the number of enum variants >= 1. - // - // That just isn't worth it. In fact, trying to generate - // sensible code for *any* deriving on a zero-variant enum - // does not make sense. But at the same time, for now, we - // do not want to cause a compile failure just because the - // user happened to attach a deriving to their - // zero-variant enum. - // - // Instead, just generate a failing expression for the - // zero variant case, skipping matches and also skipping - // delegating back to the end user code entirely. - // - // (See also #4499 and #12609; note that some of the - // discussions there influence what choice we make here; - // e.g., if we feature-gate `match x { ... }` when x refers - // to an uninhabited type (e.g., a zero-variant enum or a - // type holding such an enum), but do not feature-gate - // zero-variant enums themselves, then attempting to - // derive Debug on such a type could here generate code - // that needs the feature gate enabled.) - + // There is no sensible code to be generated for *any* deriving on + // a zero-variant enum. So we just generate a failing expression + // for the zero variant case. BlockOrExpr(vec![], Some(deriving::call_unreachable(cx, span))) } else { - // Final wrinkle: the self_args are expressions that deref + // Final wrinkle: the selflike_args are expressions that deref // down to desired places, but we cannot actually deref // them when they are fed as r-values into a tuple // expression; here add a layer of borrowing, turning // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. - self_args.map_in_place(|self_arg| cx.expr_addr_of(span, self_arg)); - let match_arg = if self_args.len() == 1 { - self_args.pop().unwrap() + selflike_args.map_in_place(|selflike_arg| cx.expr_addr_of(span, selflike_arg)); + let match_arg = if selflike_args.len() == 1 { + selflike_args.pop().unwrap() } else { - cx.expr(span, ast::ExprKind::Tup(self_args)) + cx.expr(span, ast::ExprKind::Tup(selflike_args)) }; BlockOrExpr(vec![], Some(cx.expr_match(span, match_arg, match_arms))) } @@ -1490,7 +1399,7 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef<'_>, enum_def: &EnumDef, type_ident: Ident, - nonself_args: &[P], + nonselflike_args: &[P], ) -> BlockOrExpr { let summary = enum_def .variants @@ -1505,7 +1414,7 @@ impl<'a> MethodDef<'a> { cx, trait_, type_ident, - nonself_args, + nonselflike_args, &StaticEnum(enum_def, summary), ) } @@ -1538,170 +1447,209 @@ impl<'a> TraitDef<'a> { } } - fn create_subpatterns( + fn create_struct_patterns( &self, cx: &mut ExtCtxt<'_>, - field_paths: Vec, + struct_path: ast::Path, + struct_def: &'a VariantData, + prefixes: &[String], mutbl: ast::Mutability, use_temporaries: bool, ) -> Vec> { - field_paths + prefixes .iter() - .map(|path| { - let binding_mode = if use_temporaries { - ast::BindingMode::ByValue(ast::Mutability::Not) - } else { - ast::BindingMode::ByRef(mutbl) - }; - cx.pat(path.span, PatKind::Ident(binding_mode, *path, None)) + .map(|prefix| { + let pieces_iter = + struct_def.fields().iter().enumerate().map(|(i, struct_field)| { + let sp = struct_field.span.with_ctxt(self.span.ctxt()); + let binding_mode = if use_temporaries { + ast::BindingMode::ByValue(ast::Mutability::Not) + } else { + ast::BindingMode::ByRef(mutbl) + }; + let ident = self.mk_pattern_ident(prefix, i); + let path = ident.with_span_pos(sp); + ( + sp, + struct_field.ident, + cx.pat(path.span, PatKind::Ident(binding_mode, path, None)), + ) + }); + + let struct_path = struct_path.clone(); + match *struct_def { + VariantData::Struct(..) => { + let field_pats = pieces_iter + .map(|(sp, ident, pat)| { + if ident.is_none() { + cx.span_bug( + sp, + "a braced struct with unnamed fields in `derive`", + ); + } + ast::PatField { + ident: ident.unwrap(), + is_shorthand: false, + attrs: ast::AttrVec::new(), + id: ast::DUMMY_NODE_ID, + span: pat.span.with_ctxt(self.span.ctxt()), + pat, + is_placeholder: false, + } + }) + .collect(); + cx.pat_struct(self.span, struct_path, field_pats) + } + VariantData::Tuple(..) => { + let subpats = pieces_iter.map(|(_, _, subpat)| subpat).collect(); + cx.pat_tuple_struct(self.span, struct_path, subpats) + } + VariantData::Unit(..) => cx.pat_path(self.span, struct_path), + } }) .collect() } - fn create_struct_pattern( - &self, - cx: &mut ExtCtxt<'_>, - struct_path: ast::Path, - struct_def: &'a VariantData, - prefix: &str, - mutbl: ast::Mutability, - use_temporaries: bool, - ) -> (P, Vec<(Span, Option, P, &'a [ast::Attribute])>) { - let mut paths = Vec::new(); - let mut ident_exprs = Vec::new(); - for (i, struct_field) in struct_def.fields().iter().enumerate() { - let sp = struct_field.span.with_ctxt(self.span.ctxt()); - let ident = Ident::from_str_and_span(&format!("{}_{}", prefix, i), self.span); - paths.push(ident.with_span_pos(sp)); - let val = cx.expr_path(cx.path_ident(sp, ident)); - let val = if use_temporaries { val } else { cx.expr_deref(sp, val) }; - ident_exprs.push((sp, struct_field.ident, val, &struct_field.attrs[..])); - } - - let subpats = self.create_subpatterns(cx, paths, mutbl, use_temporaries); - let pattern = match *struct_def { - VariantData::Struct(..) => { - let field_pats = iter::zip(subpats, &ident_exprs) - .map(|(pat, &(sp, ident, ..))| { - if ident.is_none() { - cx.span_bug(sp, "a braced struct with unnamed fields in `derive`"); - } - ast::PatField { - ident: ident.unwrap(), - is_shorthand: false, - attrs: ast::AttrVec::new(), - id: ast::DUMMY_NODE_ID, - span: pat.span.with_ctxt(self.span.ctxt()), - pat, - is_placeholder: false, - } - }) - .collect(); - cx.pat_struct(self.span, struct_path, field_pats) - } - VariantData::Tuple(..) => cx.pat_tuple_struct(self.span, struct_path, subpats), - VariantData::Unit(..) => cx.pat_path(self.span, struct_path), - }; + fn create_fields(&self, struct_def: &'a VariantData, mk_exprs: F) -> Vec + where + F: Fn(usize, &ast::FieldDef, Span) -> Vec>, + { + struct_def + .fields() + .iter() + .enumerate() + .map(|(i, struct_field)| { + // For this field, get an expr for each selflike_arg. E.g. for + // `PartialEq::eq`, one for each of `&self` and `other`. + let sp = struct_field.span.with_ctxt(self.span.ctxt()); + let mut exprs: Vec<_> = mk_exprs(i, struct_field, sp); + let self_expr = exprs.remove(0); + let other_selflike_exprs = exprs; + FieldInfo { + span: sp.with_ctxt(self.span.ctxt()), + name: struct_field.ident, + self_expr, + other_selflike_exprs, + } + }) + .collect() + } - (pattern, ident_exprs) + fn mk_pattern_ident(&self, prefix: &str, i: usize) -> Ident { + Ident::from_str_and_span(&format!("{}_{}", prefix, i), self.span) } - fn create_struct_field_accesses( + fn create_struct_pattern_fields( &self, cx: &mut ExtCtxt<'_>, - mut self_arg: &P, struct_def: &'a VariantData, - ) -> Vec<(Span, Option, P, &'a [ast::Attribute])> { - let mut ident_exprs = Vec::new(); - for (i, struct_field) in struct_def.fields().iter().enumerate() { - let sp = struct_field.span.with_ctxt(self.span.ctxt()); - - // We don't the need the deref, if there is one. - if let ast::ExprKind::Unary(ast::UnOp::Deref, inner) = &self_arg.kind { - self_arg = inner; - } - - // Note: we must use `struct_field.span` rather than `span` in the - // `unwrap_or_else` case otherwise the hygiene is wrong and we get - // "field `0` of struct `Point` is private" errors on tuple - // structs. - let val = cx.expr( - sp, - ast::ExprKind::Field( - self_arg.clone(), - struct_field.ident.unwrap_or_else(|| { - Ident::from_str_and_span(&i.to_string(), struct_field.span) - }), - ), - ); - ident_exprs.push((sp, struct_field.ident, val, &struct_field.attrs[..])); - } - ident_exprs + prefixes: &[String], + use_temporaries: bool, + ) -> Vec { + self.create_fields(struct_def, |i, _struct_field, sp| { + prefixes + .iter() + .map(|prefix| { + let ident = self.mk_pattern_ident(prefix, i); + let expr = cx.expr_path(cx.path_ident(sp, ident)); + if use_temporaries { expr } else { cx.expr_deref(sp, expr) } + }) + .collect() + }) } - fn create_enum_variant_pattern( + fn create_struct_field_access_fields( &self, cx: &mut ExtCtxt<'_>, - enum_ident: Ident, - variant: &'a ast::Variant, - prefix: &str, - mutbl: ast::Mutability, - ) -> (P, Vec<(Span, Option, P, &'a [ast::Attribute])>) { - let sp = variant.span.with_ctxt(self.span.ctxt()); - let variant_path = cx.path(sp, vec![enum_ident, variant.ident]); - let use_temporaries = false; // enums can't be repr(packed) - self.create_struct_pattern(cx, variant_path, &variant.data, prefix, mutbl, use_temporaries) + selflike_args: &[P], + struct_def: &'a VariantData, + ) -> Vec { + self.create_fields(struct_def, |i, struct_field, sp| { + selflike_args + .iter() + .map(|mut selflike_arg| { + // We don't the need the deref, if there is one. + if let ast::ExprKind::Unary(ast::UnOp::Deref, inner) = &selflike_arg.kind { + selflike_arg = inner; + } + // Note: we must use `struct_field.span` rather than `span` in the + // `unwrap_or_else` case otherwise the hygiene is wrong and we get + // "field `0` of struct `Point` is private" errors on tuple + // structs. + cx.expr( + sp, + ast::ExprKind::Field( + selflike_arg.clone(), + struct_field.ident.unwrap_or_else(|| { + Ident::from_str_and_span(&i.to_string(), struct_field.span) + }), + ), + ) + }) + .collect() + }) } } -/// Function to fold over fields, with three cases, to generate more efficient and concise code. -/// When the `substructure` has grouped fields, there are two cases: -/// Zero fields: call the base case function with `None` (like the usual base case of `cs_fold`). -/// One or more fields: call the base case function on the first value (which depends on -/// `use_fold`), and use that as the base case. Then perform `cs_fold` on the remainder of the -/// fields. -/// When the `substructure` is an `EnumNonMatchingCollapsed`, the result of `enum_nonmatch_f` -/// is returned. Statics may not be folded over. -pub fn cs_fold( +/// The function passed to `cs_fold` is called repeatedly with a value of this +/// type. It describes one part of the code generation. The result is always an +/// expression. +pub enum CsFold<'a> { + /// The basic case: a field expression for one or more selflike args. E.g. + /// for `PartialEq::eq` this is something like `self.x == other.x`. + Single(&'a FieldInfo), + + /// The combination of two field expressions. E.g. for `PartialEq::eq` this + /// is something like ` && `. + Combine(Span, P, P), + + // The fallback case for a struct or enum variant with no fields. + Fieldless, + + /// The fallback case for non-matching enum variants. The slice is the + /// identifiers holding the variant index value for each of the `Self` + /// arguments. + EnumNonMatching(Span, &'a [Ident]), +} + +/// Folds over fields, combining the expressions for each field in a sequence. +/// Statics may not be folded over. +pub fn cs_fold( use_foldl: bool, - mut f: F, - mut b: B, - mut enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>, cx: &mut ExtCtxt<'_>, trait_span: Span, substructure: &Substructure<'_>, + mut f: F, ) -> P where - F: FnMut(&mut ExtCtxt<'_>, Span, P, P, &[P]) -> P, - B: FnMut(&mut ExtCtxt<'_>, Option<(Span, P, &[P])>) -> P, + F: FnMut(&mut ExtCtxt<'_>, CsFold<'_>) -> P, { match *substructure.fields { EnumMatching(.., ref all_fields) | Struct(_, ref all_fields) => { - let (base, rest) = match (all_fields.is_empty(), use_foldl) { - (false, true) => { - let (first, rest) = all_fields.split_first().unwrap(); - let args = (first.span, first.self_.clone(), &first.other[..]); - (b(cx, Some(args)), rest) - } - (false, false) => { - let (last, rest) = all_fields.split_last().unwrap(); - let args = (last.span, last.self_.clone(), &last.other[..]); - (b(cx, Some(args)), rest) - } - (true, _) => (b(cx, None), &all_fields[..]), + if all_fields.is_empty() { + return f(cx, CsFold::Fieldless); + } + + let (base_field, rest) = if use_foldl { + all_fields.split_first().unwrap() + } else { + all_fields.split_last().unwrap() + }; + + let base_expr = f(cx, CsFold::Single(base_field)); + + let op = |old, field: &FieldInfo| { + let new = f(cx, CsFold::Single(field)); + f(cx, CsFold::Combine(field.span, old, new)) }; if use_foldl { - rest.iter().fold(base, |old, field| { - f(cx, field.span, old, field.self_.clone(), &field.other) - }) + rest.iter().fold(base_expr, op) } else { - rest.iter().rev().fold(base, |old, field| { - f(cx, field.span, old, field.self_.clone(), &field.other) - }) + rest.iter().rfold(base_expr, op) } } - EnumNonMatchingCollapsed(tuple) => enum_nonmatch_f(cx, trait_span, tuple), + EnumNonMatchingCollapsed(tuple) => f(cx, CsFold::EnumNonMatching(trait_span, tuple)), StaticEnum(..) | StaticStruct(..) => cx.span_bug(trait_span, "static function in `derive`"), } } diff --git a/compiler/rustc_builtin_macros/src/deriving/hash.rs b/compiler/rustc_builtin_macros/src/deriving/hash.rs index c3f7d09886b3a..2213cd6d37d2d 100644 --- a/compiler/rustc_builtin_macros/src/deriving/hash.rs +++ b/compiler/rustc_builtin_macros/src/deriving/hash.rs @@ -30,7 +30,7 @@ pub fn expand_deriving_hash( name: sym::hash, generics: Bounds { bounds: vec![(typaram, vec![path_std!(hash::Hasher)])] }, explicit_self: true, - args: vec![(Ref(Box::new(Path(arg)), Mutability::Mut), sym::state)], + nonself_args: vec![(Ref(Box::new(Path(arg)), Mutability::Mut), sym::state)], ret_ty: Unit, attributes: vec![], unify_fieldless_variants: true, @@ -49,7 +49,7 @@ fn hash_substructure( trait_span: Span, substr: &Substructure<'_>, ) -> BlockOrExpr { - let [state_expr] = substr.nonself_args else { + let [state_expr] = substr.nonselflike_args else { cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`"); }; let call_hash = |span, thing_expr| { @@ -82,7 +82,9 @@ fn hash_substructure( }; stmts.extend( - fields.iter().map(|FieldInfo { ref self_, span, .. }| call_hash(*span, self_.clone())), + fields + .iter() + .map(|FieldInfo { ref self_expr, span, .. }| call_hash(*span, self_expr.clone())), ); BlockOrExpr::new_stmts(stmts) } diff --git a/compiler/rustc_codegen_cranelift/patches/0027-sysroot-128bit-atomic-operations.patch b/compiler/rustc_codegen_cranelift/patches/0027-sysroot-128bit-atomic-operations.patch index ce1c6c99b40c8..77f437974c2d6 100644 --- a/compiler/rustc_codegen_cranelift/patches/0027-sysroot-128bit-atomic-operations.patch +++ b/compiler/rustc_codegen_cranelift/patches/0027-sysroot-128bit-atomic-operations.patch @@ -19,7 +19,7 @@ index 092b7cf..158cf71 100644 #[stable(feature = "integer_atomics_stable", since = "1.34.0")] impl RefUnwindSafe for crate::sync::atomic::AtomicI64 {} -#[cfg(target_has_atomic_load_store = "128")] --#[unstable(feature = "integer_atomics", issue = "32976")] +-#[unstable(feature = "integer_atomics", issue = "99069")] -impl RefUnwindSafe for crate::sync::atomic::AtomicI128 {} #[cfg(target_has_atomic_load_store = "ptr")] @@ -29,7 +29,7 @@ index 092b7cf..158cf71 100644 #[stable(feature = "integer_atomics_stable", since = "1.34.0")] impl RefUnwindSafe for crate::sync::atomic::AtomicU64 {} -#[cfg(target_has_atomic_load_store = "128")] --#[unstable(feature = "integer_atomics", issue = "32976")] +-#[unstable(feature = "integer_atomics", issue = "99069")] -impl RefUnwindSafe for crate::sync::atomic::AtomicU128 {} #[cfg(target_has_atomic_load_store = "8")] @@ -46,14 +46,14 @@ index d9de37e..8293fce 100644 -atomic_int! { - cfg(target_has_atomic = "128"), - cfg(target_has_atomic_equal_alignment = "128"), -- unstable(feature = "integer_atomics", issue = "32976"), -- unstable(feature = "integer_atomics", issue = "32976"), -- unstable(feature = "integer_atomics", issue = "32976"), -- unstable(feature = "integer_atomics", issue = "32976"), -- unstable(feature = "integer_atomics", issue = "32976"), -- unstable(feature = "integer_atomics", issue = "32976"), +- unstable(feature = "integer_atomics", issue = "99069"), +- unstable(feature = "integer_atomics", issue = "99069"), +- unstable(feature = "integer_atomics", issue = "99069"), +- unstable(feature = "integer_atomics", issue = "99069"), +- unstable(feature = "integer_atomics", issue = "99069"), +- unstable(feature = "integer_atomics", issue = "99069"), - rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), -- unstable(feature = "integer_atomics", issue = "32976"), +- unstable(feature = "integer_atomics", issue = "99069"), - cfg_attr(not(test), rustc_diagnostic_item = "AtomicI128"), - "i128", - "#![feature(integer_atomics)]\n\n", @@ -66,14 +66,14 @@ index d9de37e..8293fce 100644 -atomic_int! { - cfg(target_has_atomic = "128"), - cfg(target_has_atomic_equal_alignment = "128"), -- unstable(feature = "integer_atomics", issue = "32976"), -- unstable(feature = "integer_atomics", issue = "32976"), -- unstable(feature = "integer_atomics", issue = "32976"), -- unstable(feature = "integer_atomics", issue = "32976"), -- unstable(feature = "integer_atomics", issue = "32976"), -- unstable(feature = "integer_atomics", issue = "32976"), +- unstable(feature = "integer_atomics", issue = "99069"), +- unstable(feature = "integer_atomics", issue = "99069"), +- unstable(feature = "integer_atomics", issue = "99069"), +- unstable(feature = "integer_atomics", issue = "99069"), +- unstable(feature = "integer_atomics", issue = "99069"), +- unstable(feature = "integer_atomics", issue = "99069"), - rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), -- unstable(feature = "integer_atomics", issue = "32976"), +- unstable(feature = "integer_atomics", issue = "99069"), - cfg_attr(not(test), rustc_diagnostic_item = "AtomicU128"), - "u128", - "#![feature(integer_atomics)]\n\n", diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs index 64ecbc82c5600..730048d061b55 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs @@ -103,14 +103,14 @@ impl<'ll, 'tcx> CodegenUnitDebugContext<'ll, 'tcx> { // for macOS to understand. For more info see #11352 // This can be overridden using --llvm-opts -dwarf-version,N. // Android has the same issue (#22398) - if let Some(version) = sess.target.dwarf_version { - llvm::LLVMRustAddModuleFlag( - self.llmod, - llvm::LLVMModFlagBehavior::Warning, - "Dwarf Version\0".as_ptr().cast(), - version, - ) - } + let dwarf_version = + sess.opts.debugging_opts.dwarf_version.unwrap_or(sess.target.default_dwarf_version); + llvm::LLVMRustAddModuleFlag( + self.llmod, + llvm::LLVMModFlagBehavior::Warning, + "Dwarf Version\0".as_ptr().cast(), + dwarf_version, + ); // Indicate that we want CodeView debug information on MSVC if sess.target.is_like_msvc { diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs index ac529bf152f2b..29ab1d187719c 100644 --- a/compiler/rustc_const_eval/src/const_eval/machine.rs +++ b/compiler/rustc_const_eval/src/const_eval/machine.rs @@ -14,7 +14,7 @@ use rustc_middle::mir::AssertMessage; use rustc_session::Limit; use rustc_span::symbol::{sym, Symbol}; use rustc_target::abi::{Align, Size}; -use rustc_target::spec::abi::Abi; +use rustc_target::spec::abi::Abi as CallAbi; use crate::interpret::{ self, compile_time_machine, AllocId, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult, @@ -263,7 +263,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, fn find_mir_or_eval_fn( ecx: &mut InterpCx<'mir, 'tcx, Self>, instance: ty::Instance<'tcx>, - _abi: Abi, + _abi: CallAbi, args: &[OpTy<'tcx>], _dest: &PlaceTy<'tcx>, _ret: Option, diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs index b3461b414b67c..7f8eea94aeeda 100644 --- a/compiler/rustc_const_eval/src/interpret/machine.rs +++ b/compiler/rustc_const_eval/src/interpret/machine.rs @@ -10,7 +10,7 @@ use rustc_middle::mir; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_span::def_id::DefId; use rustc_target::abi::Size; -use rustc_target::spec::abi::Abi; +use rustc_target::spec::abi::Abi as CallAbi; use super::{ AllocId, AllocRange, Allocation, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult, @@ -138,7 +138,7 @@ pub trait Machine<'mir, 'tcx>: Sized { /// Whether to enforce integers and floats not having provenance. fn enforce_number_no_provenance(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool; - /// Whether function calls should be [ABI](Abi)-checked. + /// Whether function calls should be [ABI](CallAbi)-checked. fn enforce_abi(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool { true } @@ -169,7 +169,7 @@ pub trait Machine<'mir, 'tcx>: Sized { fn find_mir_or_eval_fn( ecx: &mut InterpCx<'mir, 'tcx, Self>, instance: ty::Instance<'tcx>, - abi: Abi, + abi: CallAbi, args: &[OpTy<'tcx, Self::PointerTag>], destination: &PlaceTy<'tcx, Self::PointerTag>, target: Option, @@ -181,7 +181,7 @@ pub trait Machine<'mir, 'tcx>: Sized { fn call_extra_fn( ecx: &mut InterpCx<'mir, 'tcx, Self>, fn_val: Self::ExtraFnVal, - abi: Abi, + abi: CallAbi, args: &[OpTy<'tcx, Self::PointerTag>], destination: &PlaceTy<'tcx, Self::PointerTag>, target: Option, @@ -483,7 +483,7 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) { fn call_extra_fn( _ecx: &mut InterpCx<$mir, $tcx, Self>, fn_val: !, - _abi: Abi, + _abi: CallAbi, _args: &[OpTy<$tcx>], _destination: &PlaceTy<$tcx, Self::PointerTag>, _target: Option, diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 805dcb3889539..576958a855d24 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -78,6 +78,7 @@ impl<'tcx, Tag: Provenance> Immediate { } #[inline] + #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980) pub fn to_scalar_or_uninit(self) -> ScalarMaybeUninit { match self { Immediate::Scalar(val) => val, @@ -87,11 +88,13 @@ impl<'tcx, Tag: Provenance> Immediate { } #[inline] + #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980) pub fn to_scalar(self) -> InterpResult<'tcx, Scalar> { self.to_scalar_or_uninit().check_init() } #[inline] + #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980) pub fn to_scalar_or_uninit_pair(self) -> (ScalarMaybeUninit, ScalarMaybeUninit) { match self { Immediate::ScalarPair(val1, val2) => (val1, val2), @@ -101,6 +104,7 @@ impl<'tcx, Tag: Provenance> Immediate { } #[inline] + #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980) pub fn to_scalar_pair(self) -> InterpResult<'tcx, (Scalar, Scalar)> { let (val1, val2) = self.to_scalar_or_uninit_pair(); Ok((val1.check_init()?, val2.check_init()?)) diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index acd77f5d2ee91..04f585df34cc2 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -1438,18 +1438,8 @@ impl<'hir> Body<'hir> { } /// The type of source expression that caused this generator to be created. -#[derive( - Clone, - PartialEq, - PartialOrd, - Eq, - Hash, - HashStable_Generic, - Encodable, - Decodable, - Debug, - Copy -)] +#[derive(Clone, PartialEq, PartialOrd, Eq, Hash, Debug, Copy)] +#[derive(HashStable_Generic, Encodable, Decodable)] pub enum GeneratorKind { /// An explicit `async` block or the body of an async function. Async(AsyncGeneratorKind), @@ -1481,18 +1471,8 @@ impl GeneratorKind { /// /// This helps error messages but is also used to drive coercions in /// type-checking (see #60424). -#[derive( - Clone, - PartialEq, - PartialOrd, - Eq, - Hash, - HashStable_Generic, - Encodable, - Decodable, - Debug, - Copy -)] +#[derive(Clone, PartialEq, PartialOrd, Eq, Hash, Debug, Copy)] +#[derive(HashStable_Generic, Encodable, Decodable)] pub enum AsyncGeneratorKind { /// An explicit `async` block written by the user. Block, diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index 1bb79a0264dd1..55827ff583aff 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -733,6 +733,7 @@ fn test_debugging_options_tracking_hash() { tracked!(dep_info_omit_d_target, true); tracked!(drop_tracking, true); tracked!(dual_proc_macros, true); + tracked!(dwarf_version, Some(5)); tracked!(fewer_names, Some(true)); tracked!(force_unstable_if_unmarked, true); tracked!(fuel, Some(("abc".to_string(), 99))); diff --git a/compiler/rustc_mir_transform/src/const_prop.rs b/compiler/rustc_mir_transform/src/const_prop.rs index 185f6c523d37a..fb5423dd157c2 100644 --- a/compiler/rustc_mir_transform/src/const_prop.rs +++ b/compiler/rustc_mir_transform/src/const_prop.rs @@ -22,8 +22,8 @@ use rustc_middle::ty::{ self, ConstKind, EarlyBinder, Instance, ParamEnv, Ty, TyCtxt, TypeVisitable, }; use rustc_span::{def_id::DefId, Span}; -use rustc_target::abi::{HasDataLayout, Size, TargetDataLayout}; -use rustc_target::spec::abi::Abi; +use rustc_target::abi::{self, HasDataLayout, Size, TargetDataLayout}; +use rustc_target::spec::abi::Abi as CallAbi; use rustc_trait_selection::traits; use crate::MirPass; @@ -195,7 +195,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> fn find_mir_or_eval_fn( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _instance: ty::Instance<'tcx>, - _abi: Abi, + _abi: CallAbi, _args: &[OpTy<'tcx>], _destination: &PlaceTy<'tcx>, _target: Option, @@ -659,6 +659,11 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { (Ok(_), Ok(_)) => return this.ecx.eval_rvalue_into_place(rvalue, place), }; + if !matches!(const_arg.layout.abi, abi::Abi::Scalar(..)) { + // We cannot handle Scalar Pair stuff. + return this.ecx.eval_rvalue_into_place(rvalue, place); + } + let arg_value = const_arg.to_scalar()?.to_bits(const_arg.layout.size)?; let dest = this.ecx.eval_place(place)?; diff --git a/compiler/rustc_mir_transform/src/const_prop_lint.rs b/compiler/rustc_mir_transform/src/const_prop_lint.rs index 280ed17f03cf5..09a5cb8280f35 100644 --- a/compiler/rustc_mir_transform/src/const_prop_lint.rs +++ b/compiler/rustc_mir_transform/src/const_prop_lint.rs @@ -24,7 +24,7 @@ use rustc_middle::ty::{ use rustc_session::lint; use rustc_span::{def_id::DefId, Span}; use rustc_target::abi::{HasDataLayout, Size, TargetDataLayout}; -use rustc_target::spec::abi::Abi; +use rustc_target::spec::abi::Abi as CallAbi; use rustc_trait_selection::traits; use crate::MirLint; @@ -191,7 +191,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> fn find_mir_or_eval_fn( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _instance: ty::Instance<'tcx>, - _abi: Abi, + _abi: CallAbi, _args: &[OpTy<'tcx>], _destination: &PlaceTy<'tcx>, _target: Option, diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs index ec9ae63ca366a..18ffe9528f565 100644 --- a/compiler/rustc_resolve/src/diagnostics.rs +++ b/compiler/rustc_resolve/src/diagnostics.rs @@ -28,7 +28,7 @@ use rustc_span::{BytePos, Span}; use tracing::debug; use crate::imports::{Import, ImportKind, ImportResolver}; -use crate::late::Rib; +use crate::late::{PatternSource, Rib}; use crate::path_names_to_string; use crate::{AmbiguityError, AmbiguityErrorMisc, AmbiguityKind, BindingError, Finalize}; use crate::{HasGenericParams, MacroRulesScope, Module, ModuleKind, ModuleOrUniformRoot}; @@ -896,25 +896,40 @@ impl<'a> Resolver<'a> { err } ResolutionError::BindingShadowsSomethingUnacceptable { - shadowing_binding_descr, + shadowing_binding, name, participle, article, - shadowed_binding_descr, + shadowed_binding, shadowed_binding_span, } => { + let shadowed_binding_descr = shadowed_binding.descr(); let mut err = struct_span_err!( self.session, span, E0530, "{}s cannot shadow {}s", - shadowing_binding_descr, + shadowing_binding.descr(), shadowed_binding_descr, ); err.span_label( span, format!("cannot be named the same as {} {}", article, shadowed_binding_descr), ); + match (shadowing_binding, shadowed_binding) { + ( + PatternSource::Match, + Res::Def(DefKind::Ctor(CtorOf::Variant | CtorOf::Struct, CtorKind::Fn), _), + ) => { + err.span_suggestion( + span, + "try specify the pattern arguments", + format!("{}(..)", name), + Applicability::Unspecified, + ); + } + _ => (), + } let msg = format!("the {} `{}` is {} here", shadowed_binding_descr, name, participle); err.span_label(shadowed_binding_span, msg); diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index 640d13ea43547..f37acca3d9f27 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -50,7 +50,7 @@ struct BindingInfo { } #[derive(Copy, Clone, PartialEq, Eq, Debug)] -enum PatternSource { +pub enum PatternSource { Match, Let, For, @@ -64,7 +64,7 @@ enum IsRepeatExpr { } impl PatternSource { - fn descr(self) -> &'static str { + pub fn descr(self) -> &'static str { match self { PatternSource::Match => "match binding", PatternSource::Let => "let binding", @@ -2845,11 +2845,11 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { self.report_error( ident.span, ResolutionError::BindingShadowsSomethingUnacceptable { - shadowing_binding_descr: pat_src.descr(), + shadowing_binding: pat_src, name: ident.name, participle: if binding.is_import() { "imported" } else { "defined" }, article: binding.res().article(), - shadowed_binding_descr: binding.res().descr(), + shadowed_binding: binding.res(), shadowed_binding_span: binding.span, }, ); @@ -2861,11 +2861,11 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { self.report_error( ident.span, ResolutionError::BindingShadowsSomethingUnacceptable { - shadowing_binding_descr: pat_src.descr(), + shadowing_binding: pat_src, name: ident.name, participle: "defined", article: res.article(), - shadowed_binding_descr: res.descr(), + shadowed_binding: res, shadowed_binding_span: self.r.opt_span(def_id).expect("const parameter defined outside of local crate"), } ); diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index 28ef384f2c5ad..8968179c92e4b 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -61,7 +61,7 @@ use tracing::debug; use diagnostics::{ImportSuggestion, LabelSuggestion, Suggestion}; use imports::{Import, ImportKind, ImportResolver, NameResolution}; -use late::{HasGenericParams, PathSource}; +use late::{HasGenericParams, PathSource, PatternSource}; use macros::{MacroRulesBinding, MacroRulesScope, MacroRulesScopeRef}; use crate::access_levels::AccessLevelsVisitor; @@ -230,11 +230,11 @@ enum ResolutionError<'a> { ), /// Error E0530: `X` bindings cannot shadow `Y`s. BindingShadowsSomethingUnacceptable { - shadowing_binding_descr: &'static str, + shadowing_binding: PatternSource, name: Symbol, participle: &'static str, article: &'static str, - shadowed_binding_descr: &'static str, + shadowed_binding: Res, shadowed_binding_span: Span, }, /// Error E0128: generic parameters with a default cannot use forward-declared identifiers. diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index b617eb02eb6e3..8f1057b793fa3 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -1272,6 +1272,8 @@ options! { computed `block` spans (one span encompassing a block's terminator and \ all statements). If `-Z instrument-coverage` is also enabled, create \ an additional `.html` file showing the computed coverage spans."), + dwarf_version: Option = (None, parse_opt_number, [TRACKED], + "version of DWARF debug information to emit (default: 2 or 4, depending on platform)"), emit_stack_sizes: bool = (false, parse_bool, [UNTRACKED], "emit a section containing stack size metadata (default: no)"), fewer_names: Option = (None, parse_opt_bool, [TRACKED], diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index 1eee0c3163d40..1cccef2f64fec 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -1498,6 +1498,12 @@ fn validate_commandline_args_with_session_available(sess: &Session) { )) } } + + if let Some(dwarf_version) = sess.opts.debugging_opts.dwarf_version { + if dwarf_version > 5 { + sess.err(&format!("requested DWARF version {} is greater than 5", dwarf_version)); + } + } } /// Holds data on the current incremental compilation session, if there is one. diff --git a/compiler/rustc_target/src/asm/mod.rs b/compiler/rustc_target/src/asm/mod.rs index df8ccc42a77a3..65d2cd64bf693 100644 --- a/compiler/rustc_target/src/asm/mod.rs +++ b/compiler/rustc_target/src/asm/mod.rs @@ -244,18 +244,8 @@ impl FromStr for InlineAsmArch { } } -#[derive( - Copy, - Clone, - Encodable, - Decodable, - Debug, - Eq, - PartialEq, - PartialOrd, - Hash, - HashStable_Generic -)] +#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Hash)] +#[derive(HashStable_Generic, Encodable, Decodable)] pub enum InlineAsmReg { X86(X86InlineAsmReg), Arm(ArmInlineAsmReg), @@ -406,18 +396,8 @@ impl InlineAsmReg { } } -#[derive( - Copy, - Clone, - Encodable, - Decodable, - Debug, - Eq, - PartialEq, - PartialOrd, - Hash, - HashStable_Generic -)] +#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Hash)] +#[derive(HashStable_Generic, Encodable, Decodable)] pub enum InlineAsmRegClass { X86(X86InlineAsmRegClass), Arm(ArmInlineAsmRegClass), @@ -620,18 +600,8 @@ impl InlineAsmRegClass { } } -#[derive( - Copy, - Clone, - Encodable, - Decodable, - Debug, - Eq, - PartialEq, - PartialOrd, - Hash, - HashStable_Generic -)] +#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Hash)] +#[derive(HashStable_Generic, Encodable, Decodable)] pub enum InlineAsmRegOrRegClass { Reg(InlineAsmReg), RegClass(InlineAsmRegClass), @@ -808,18 +778,8 @@ pub fn allocatable_registers( } } -#[derive( - Copy, - Clone, - Encodable, - Decodable, - Debug, - Eq, - PartialEq, - PartialOrd, - Hash, - HashStable_Generic -)] +#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Hash)] +#[derive(HashStable_Generic, Encodable, Decodable)] pub enum InlineAsmClobberAbi { X86, X86_64Win, diff --git a/compiler/rustc_target/src/spec/android_base.rs b/compiler/rustc_target/src/spec/android_base.rs index c2b9d696776f5..dc06597db6fd4 100644 --- a/compiler/rustc_target/src/spec/android_base.rs +++ b/compiler/rustc_target/src/spec/android_base.rs @@ -3,7 +3,7 @@ use crate::spec::TargetOptions; pub fn opts() -> TargetOptions { let mut base = super::linux_base::opts(); base.os = "android".into(); - base.dwarf_version = Some(2); + base.default_dwarf_version = 2; base.position_independent_executables = true; base.has_thread_local = false; // This is for backward compatibility, see https://github.com/rust-lang/rust/issues/49867 diff --git a/compiler/rustc_target/src/spec/apple_base.rs b/compiler/rustc_target/src/spec/apple_base.rs index e8460a509e2ba..713dc9a1f0ea7 100644 --- a/compiler/rustc_target/src/spec/apple_base.rs +++ b/compiler/rustc_target/src/spec/apple_base.rs @@ -28,7 +28,7 @@ pub fn opts(os: &'static str) -> TargetOptions { executables: true, families: cvs!["unix"], is_like_osx: true, - dwarf_version: Some(2), + default_dwarf_version: 2, frame_pointer: FramePointer::Always, has_rpath: true, dll_suffix: ".dylib".into(), diff --git a/compiler/rustc_target/src/spec/dragonfly_base.rs b/compiler/rustc_target/src/spec/dragonfly_base.rs index b59322d07f57a..c1e469746cb4c 100644 --- a/compiler/rustc_target/src/spec/dragonfly_base.rs +++ b/compiler/rustc_target/src/spec/dragonfly_base.rs @@ -9,7 +9,7 @@ pub fn opts() -> TargetOptions { has_rpath: true, position_independent_executables: true, relro_level: RelroLevel::Full, - dwarf_version: Some(2), + default_dwarf_version: 2, ..Default::default() } } diff --git a/compiler/rustc_target/src/spec/freebsd_base.rs b/compiler/rustc_target/src/spec/freebsd_base.rs index a7e0f9f704127..36312d26e3700 100644 --- a/compiler/rustc_target/src/spec/freebsd_base.rs +++ b/compiler/rustc_target/src/spec/freebsd_base.rs @@ -10,7 +10,7 @@ pub fn opts() -> TargetOptions { position_independent_executables: true, relro_level: RelroLevel::Full, abi_return_struct_as_int: true, - dwarf_version: Some(2), + default_dwarf_version: 2, ..Default::default() } } diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index a08603da04095..48ccb10f21426 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -1275,9 +1275,9 @@ pub struct TargetOptions { pub is_like_msvc: bool, /// Whether a target toolchain is like WASM. pub is_like_wasm: bool, - /// Version of DWARF to use if not using the default. + /// Default supported version of DWARF on this platform. /// Useful because some platforms (osx, bsd) only want up to DWARF2. - pub dwarf_version: Option, + pub default_dwarf_version: u32, /// Whether the linker support GNU-like arguments such as -O. Defaults to true. pub linker_is_gnu: bool, /// The MinGW toolchain has a known issue that prevents it from correctly @@ -1539,7 +1539,7 @@ impl Default for TargetOptions { is_like_windows: false, is_like_msvc: false, is_like_wasm: false, - dwarf_version: None, + default_dwarf_version: 4, linker_is_gnu: true, allows_weak_linkage: true, has_rpath: false, @@ -1778,13 +1778,13 @@ impl Target { base.$key_name = s; } } ); - ($key_name:ident, Option) => ( { + ($key_name:ident, u32) => ( { let name = (stringify!($key_name)).replace("_", "-"); if let Some(s) = obj.remove(&name).and_then(|b| b.as_u64()) { if s < 1 || s > 5 { return Err("Not a valid DWARF version number".into()); } - base.$key_name = Some(s as u32); + base.$key_name = s as u32; } } ); ($key_name:ident, Option) => ( { @@ -2143,7 +2143,7 @@ impl Target { key!(is_like_windows, bool); key!(is_like_msvc, bool); key!(is_like_wasm, bool); - key!(dwarf_version, Option); + key!(default_dwarf_version, u32); key!(linker_is_gnu, bool); key!(allows_weak_linkage, bool); key!(has_rpath, bool); @@ -2387,7 +2387,7 @@ impl ToJson for Target { target_option_val!(is_like_windows); target_option_val!(is_like_msvc); target_option_val!(is_like_wasm); - target_option_val!(dwarf_version); + target_option_val!(default_dwarf_version); target_option_val!(linker_is_gnu); target_option_val!(allows_weak_linkage); target_option_val!(has_rpath); diff --git a/compiler/rustc_target/src/spec/netbsd_base.rs b/compiler/rustc_target/src/spec/netbsd_base.rs index 69016d77cf97b..40ef04ba04382 100644 --- a/compiler/rustc_target/src/spec/netbsd_base.rs +++ b/compiler/rustc_target/src/spec/netbsd_base.rs @@ -11,7 +11,7 @@ pub fn opts() -> TargetOptions { position_independent_executables: true, relro_level: RelroLevel::Full, use_ctors_section: true, - dwarf_version: Some(2), + default_dwarf_version: 2, ..Default::default() } } diff --git a/compiler/rustc_target/src/spec/openbsd_base.rs b/compiler/rustc_target/src/spec/openbsd_base.rs index bbd322bb6ce2b..51cecdd47eab2 100644 --- a/compiler/rustc_target/src/spec/openbsd_base.rs +++ b/compiler/rustc_target/src/spec/openbsd_base.rs @@ -11,7 +11,7 @@ pub fn opts() -> TargetOptions { position_independent_executables: true, frame_pointer: FramePointer::Always, // FIXME 43575: should be MayOmit... relro_level: RelroLevel::Full, - dwarf_version: Some(2), + default_dwarf_version: 2, ..Default::default() } } diff --git a/compiler/rustc_typeck/src/check/fn_ctxt/suggestions.rs b/compiler/rustc_typeck/src/check/fn_ctxt/suggestions.rs index edb0ec027a0fa..863a981134f24 100644 --- a/compiler/rustc_typeck/src/check/fn_ctxt/suggestions.rs +++ b/compiler/rustc_typeck/src/check/fn_ctxt/suggestions.rs @@ -13,7 +13,6 @@ use rustc_hir::{ use rustc_infer::infer::{self, TyCtxtInferExt}; use rustc_infer::traits; use rustc_middle::lint::in_external_macro; -use rustc_middle::ty::subst::GenericArgKind; use rustc_middle::ty::{self, Binder, IsSuggestable, Subst, ToPredicate, Ty}; use rustc_span::symbol::sym; use rustc_span::Span; @@ -238,25 +237,29 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ); } } - } else if found.to_string().starts_with("Option<") - && expected.to_string() == "Option<&str>" + } else if let ty::Adt(found_adt, found_substs) = found.kind() + && self.tcx.is_diagnostic_item(sym::Option, found_adt.did()) + && let ty::Adt(expected_adt, expected_substs) = expected.kind() + && self.tcx.is_diagnostic_item(sym::Option, expected_adt.did()) + && let ty::Ref(_, inner_ty, _) = expected_substs.type_at(0).kind() + && inner_ty.is_str() { - if let ty::Adt(_def, subst) = found.kind() { - if subst.len() != 0 { - if let GenericArgKind::Type(ty) = subst[0].unpack() { - let peeled = ty.peel_refs().to_string(); - if peeled == "String" { - let ref_cnt = ty.to_string().len() - peeled.len(); - let result = format!(".map(|x| &*{}x)", "*".repeat(ref_cnt)); - err.span_suggestion_verbose( - expr.span.shrink_to_hi(), - "try converting the passed type into a `&str`", - result, - Applicability::MaybeIncorrect, - ); - } - } - } + let ty = found_substs.type_at(0); + let mut peeled = ty; + let mut ref_cnt = 0; + while let ty::Ref(_, inner, _) = peeled.kind() { + peeled = *inner; + ref_cnt += 1; + } + if let ty::Adt(adt, _) = peeled.kind() + && self.tcx.is_diagnostic_item(sym::String, adt.did()) + { + err.span_suggestion_verbose( + expr.span.shrink_to_hi(), + "try converting the passed type into a `&str`", + format!(".map(|x| &*{}x)", "*".repeat(ref_cnt)), + Applicability::MaybeIncorrect, + ); } } } diff --git a/library/core/src/panic/unwind_safe.rs b/library/core/src/panic/unwind_safe.rs index f2948aac3c235..9a6153f1253c5 100644 --- a/library/core/src/panic/unwind_safe.rs +++ b/library/core/src/panic/unwind_safe.rs @@ -217,7 +217,7 @@ impl RefUnwindSafe for crate::sync::atomic::AtomicI32 {} #[stable(feature = "integer_atomics_stable", since = "1.34.0")] impl RefUnwindSafe for crate::sync::atomic::AtomicI64 {} #[cfg(target_has_atomic_load_store = "128")] -#[unstable(feature = "integer_atomics", issue = "32976")] +#[unstable(feature = "integer_atomics", issue = "99069")] impl RefUnwindSafe for crate::sync::atomic::AtomicI128 {} #[cfg(target_has_atomic_load_store = "ptr")] @@ -236,7 +236,7 @@ impl RefUnwindSafe for crate::sync::atomic::AtomicU32 {} #[stable(feature = "integer_atomics_stable", since = "1.34.0")] impl RefUnwindSafe for crate::sync::atomic::AtomicU64 {} #[cfg(target_has_atomic_load_store = "128")] -#[unstable(feature = "integer_atomics", issue = "32976")] +#[unstable(feature = "integer_atomics", issue = "99069")] impl RefUnwindSafe for crate::sync::atomic::AtomicU128 {} #[cfg(target_has_atomic_load_store = "8")] diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs index bf8ce6971b87e..b32dcfefacda8 100644 --- a/library/core/src/sync/atomic.rs +++ b/library/core/src/sync/atomic.rs @@ -2692,7 +2692,7 @@ atomic_int! { stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), cfg_attr(not(test), rustc_diagnostic_item = "AtomicI8"), "i8", "", @@ -2712,7 +2712,7 @@ atomic_int! { stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), cfg_attr(not(test), rustc_diagnostic_item = "AtomicU8"), "u8", "", @@ -2732,7 +2732,7 @@ atomic_int! { stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), cfg_attr(not(test), rustc_diagnostic_item = "AtomicI16"), "i16", "", @@ -2752,7 +2752,7 @@ atomic_int! { stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), cfg_attr(not(test), rustc_diagnostic_item = "AtomicU16"), "u16", "", @@ -2772,7 +2772,7 @@ atomic_int! { stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), cfg_attr(not(test), rustc_diagnostic_item = "AtomicI32"), "i32", "", @@ -2792,7 +2792,7 @@ atomic_int! { stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), cfg_attr(not(test), rustc_diagnostic_item = "AtomicU32"), "u32", "", @@ -2812,7 +2812,7 @@ atomic_int! { stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), cfg_attr(not(test), rustc_diagnostic_item = "AtomicI64"), "i64", "", @@ -2832,7 +2832,7 @@ atomic_int! { stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), cfg_attr(not(test), rustc_diagnostic_item = "AtomicU64"), "u64", "", @@ -2845,14 +2845,14 @@ atomic_int! { atomic_int! { cfg(target_has_atomic = "128"), cfg(target_has_atomic_equal_alignment = "128"), - unstable(feature = "integer_atomics", issue = "32976"), - unstable(feature = "integer_atomics", issue = "32976"), - unstable(feature = "integer_atomics", issue = "32976"), - unstable(feature = "integer_atomics", issue = "32976"), - unstable(feature = "integer_atomics", issue = "32976"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), + unstable(feature = "integer_atomics", issue = "99069"), + unstable(feature = "integer_atomics", issue = "99069"), + unstable(feature = "integer_atomics", issue = "99069"), + unstable(feature = "integer_atomics", issue = "99069"), + unstable(feature = "integer_atomics", issue = "99069"), rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), cfg_attr(not(test), rustc_diagnostic_item = "AtomicI128"), "i128", "#![feature(integer_atomics)]\n\n", @@ -2865,14 +2865,14 @@ atomic_int! { atomic_int! { cfg(target_has_atomic = "128"), cfg(target_has_atomic_equal_alignment = "128"), - unstable(feature = "integer_atomics", issue = "32976"), - unstable(feature = "integer_atomics", issue = "32976"), - unstable(feature = "integer_atomics", issue = "32976"), - unstable(feature = "integer_atomics", issue = "32976"), - unstable(feature = "integer_atomics", issue = "32976"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), + unstable(feature = "integer_atomics", issue = "99069"), + unstable(feature = "integer_atomics", issue = "99069"), + unstable(feature = "integer_atomics", issue = "99069"), + unstable(feature = "integer_atomics", issue = "99069"), + unstable(feature = "integer_atomics", issue = "99069"), rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"), - unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "99069"), cfg_attr(not(test), rustc_diagnostic_item = "AtomicU128"), "u128", "#![feature(integer_atomics)]\n\n", diff --git a/src/doc/unstable-book/src/compiler-flags/dwarf-version.md b/src/doc/unstable-book/src/compiler-flags/dwarf-version.md new file mode 100644 index 0000000000000..c5e86f17df741 --- /dev/null +++ b/src/doc/unstable-book/src/compiler-flags/dwarf-version.md @@ -0,0 +1,9 @@ +## `dwarf-version` + +This option controls the version of DWARF that the compiler emits, on platforms +that use DWARF to encode debug information. It takes one of the following +values: + +* `2`: DWARF version 2 (the default on certain platforms, like macOS). +* `4`: DWARF version 4 (the default on certain platforms, like Linux). +* `5`: DWARF version 5. diff --git a/src/test/assembly/dwarf5.rs b/src/test/assembly/dwarf5.rs new file mode 100644 index 0000000000000..f41e6bd55be7d --- /dev/null +++ b/src/test/assembly/dwarf5.rs @@ -0,0 +1,20 @@ +// Makes sure that `-Z dwarf-version=5` causes `rustc` to emit DWARF version 5. +// assembly-output: emit-asm +// compile-flags: -g --target x86_64-unknown-linux-gnu -Z dwarf-version=5 +// needs-llvm-components: x86 + +#![feature(no_core, lang_items)] +#![crate_type = "rlib"] +#![no_core] + +#[lang = "sized"] +trait Sized {} +#[lang = "copy"] +trait Copy {} + +pub fn wibble() {} + +// CHECK: .section .debug_info +// CHECK-NOT: .short 2 +// CHECK-NOT: .short 4 +// CHECK: .short 5 diff --git a/src/test/ui/consts/issue-96169.rs b/src/test/ui/consts/issue-96169.rs new file mode 100644 index 0000000000000..14c0a1399a00e --- /dev/null +++ b/src/test/ui/consts/issue-96169.rs @@ -0,0 +1,18 @@ +// check-pass +// compile-flags: -Zmir-opt-level=4 --emit=mir +#![allow(unused)] +fn a() -> usize { 0 } + +fn bar(_: u32) {} + +fn baz() -> *const dyn Fn(u32) { unimplemented!() } + +fn foo() { + match () { + _ if baz() == &bar as &dyn Fn(u32) => (), + () => (), + } +} + +fn main() { +} diff --git a/src/test/ui/empty/empty-struct-tuple-pat.stderr b/src/test/ui/empty/empty-struct-tuple-pat.stderr index 9f44747381a9a..8d0f75d204c2f 100644 --- a/src/test/ui/empty/empty-struct-tuple-pat.stderr +++ b/src/test/ui/empty/empty-struct-tuple-pat.stderr @@ -5,7 +5,10 @@ LL | struct Empty2(); | ---------------- the tuple struct `Empty2` is defined here ... LL | Empty2 => () - | ^^^^^^ cannot be named the same as a tuple struct + | ^^^^^^ + | | + | cannot be named the same as a tuple struct + | help: try specify the pattern arguments: `Empty2(..)` error[E0530]: match bindings cannot shadow tuple structs --> $DIR/empty-struct-tuple-pat.rs:25:9 @@ -14,7 +17,10 @@ LL | use empty_struct::*; | --------------- the tuple struct `XEmpty6` is imported here ... LL | XEmpty6 => () - | ^^^^^^^ cannot be named the same as a tuple struct + | ^^^^^^^ + | | + | cannot be named the same as a tuple struct + | help: try specify the pattern arguments: `XEmpty6(..)` error[E0532]: expected unit struct, unit variant or constant, found tuple variant `E::Empty4` --> $DIR/empty-struct-tuple-pat.rs:29:9 diff --git a/src/test/ui/pattern/pat-tuple-field-count-cross.stderr b/src/test/ui/pattern/pat-tuple-field-count-cross.stderr index c0cc56aa86e26..d9295746158bb 100644 --- a/src/test/ui/pattern/pat-tuple-field-count-cross.stderr +++ b/src/test/ui/pattern/pat-tuple-field-count-cross.stderr @@ -5,7 +5,10 @@ LL | use declarations_for_tuple_field_count_errors::*; | -------------------------------------------- the tuple struct `Z1` is imported here ... LL | Z1 => {} - | ^^ cannot be named the same as a tuple struct + | ^^ + | | + | cannot be named the same as a tuple struct + | help: try specify the pattern arguments: `Z1(..)` error[E0532]: expected tuple struct or tuple variant, found unit struct `Z0` --> $DIR/pat-tuple-field-count-cross.rs:9:9 diff --git a/src/test/ui/pattern/pat-tuple-overfield.stderr b/src/test/ui/pattern/pat-tuple-overfield.stderr index 856e7918cb75c..54d89e03101dc 100644 --- a/src/test/ui/pattern/pat-tuple-overfield.stderr +++ b/src/test/ui/pattern/pat-tuple-overfield.stderr @@ -5,7 +5,10 @@ LL | struct Z1(); | ------------ the tuple struct `Z1` is defined here ... LL | Z1 => {} - | ^^ cannot be named the same as a tuple struct + | ^^ + | | + | cannot be named the same as a tuple struct + | help: try specify the pattern arguments: `Z1(..)` error[E0532]: expected tuple struct or tuple variant, found unit struct `Z0` --> $DIR/pat-tuple-overfield.rs:52:9 diff --git a/src/test/ui/pattern/pattern-binding-disambiguation.stderr b/src/test/ui/pattern/pattern-binding-disambiguation.stderr index faa0d7c30743b..1529e538b5521 100644 --- a/src/test/ui/pattern/pattern-binding-disambiguation.stderr +++ b/src/test/ui/pattern/pattern-binding-disambiguation.stderr @@ -5,7 +5,10 @@ LL | struct TupleStruct(); | --------------------- the tuple struct `TupleStruct` is defined here ... LL | TupleStruct => {} - | ^^^^^^^^^^^ cannot be named the same as a tuple struct + | ^^^^^^^^^^^ + | | + | cannot be named the same as a tuple struct + | help: try specify the pattern arguments: `TupleStruct(..)` error[E0530]: match bindings cannot shadow tuple variants --> $DIR/pattern-binding-disambiguation.rs:33:9 @@ -14,7 +17,10 @@ LL | use E::*; | ---- the tuple variant `TupleVariant` is imported here ... LL | TupleVariant => {} - | ^^^^^^^^^^^^ cannot be named the same as a tuple variant + | ^^^^^^^^^^^^ + | | + | cannot be named the same as a tuple variant + | help: try specify the pattern arguments: `TupleVariant(..)` error[E0530]: match bindings cannot shadow struct variants --> $DIR/pattern-binding-disambiguation.rs:36:9