From 263c426bfd2d8119ad9f615fc0616cf01984435a Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 25 Aug 2022 17:55:21 +1000 Subject: [PATCH 1/7] Add size assertions for `FnAbi` and `ArgAbi`. --- compiler/rustc_target/src/abi/call/mod.rs | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/compiler/rustc_target/src/abi/call/mod.rs b/compiler/rustc_target/src/abi/call/mod.rs index 639ce64a9f557..ab53ea6101dbf 100644 --- a/compiler/rustc_target/src/abi/call/mod.rs +++ b/compiler/rustc_target/src/abi/call/mod.rs @@ -730,3 +730,13 @@ impl<'a, Ty> FnAbi<'a, Ty> { Ok(()) } } + +// Some types are used a lot. Make sure they don't unintentionally get bigger. +#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] +mod size_asserts { + use super::*; + use rustc_data_structures::static_assert_size; + // These are in alphabetical order, which is easy to maintain. + static_assert_size!(ArgAbi<'_, usize>, 208); + static_assert_size!(FnAbi<'_, usize>, 248); +} From e4bf113027aca77fea447e15fc09a7c8ded701e9 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 25 Aug 2022 17:52:37 +1000 Subject: [PATCH 2/7] Box `CastTarget` within `PassMode`. Because `PassMode::Cast` is by far the largest variant, but is relatively rare. This requires making `PassMode` not impl `Copy`, and `Clone` is no longer necessary. This causes lots of sigil adjusting, but nothing very notable. --- .../src/abi/comments.rs | 2 +- .../src/abi/pass_mode.rs | 20 ++++---- .../src/abi/returning.rs | 6 +-- compiler/rustc_codegen_gcc/src/abi.rs | 4 +- .../rustc_codegen_gcc/src/intrinsic/mod.rs | 6 +-- compiler/rustc_codegen_llvm/src/abi.rs | 46 +++++++++---------- compiler/rustc_codegen_llvm/src/intrinsic.rs | 4 +- compiler/rustc_codegen_ssa/src/mir/block.rs | 8 ++-- .../rustc_codegen_ssa/src/mir/intrinsic.rs | 4 +- .../src/interpret/terminator.rs | 4 +- compiler/rustc_target/src/abi/call/mod.rs | 10 ++-- 11 files changed, 57 insertions(+), 57 deletions(-) diff --git a/compiler/rustc_codegen_cranelift/src/abi/comments.rs b/compiler/rustc_codegen_cranelift/src/abi/comments.rs index 37d2679c10d70..7f4619b5c940b 100644 --- a/compiler/rustc_codegen_cranelift/src/abi/comments.rs +++ b/compiler/rustc_codegen_cranelift/src/abi/comments.rs @@ -24,7 +24,7 @@ pub(super) fn add_arg_comment<'tcx>( local: Option, local_field: Option, params: &[Value], - arg_abi_mode: PassMode, + arg_abi_mode: &PassMode, arg_layout: TyAndLayout<'tcx>, ) { if !fx.clif_comments.enabled() { diff --git a/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs b/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs index 6c10baa53d415..058dee176e243 100644 --- a/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs +++ b/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs @@ -38,7 +38,7 @@ fn apply_arg_attrs_to_abi_param(mut param: AbiParam, arg_attrs: ArgAttributes) - param } -fn cast_target_to_abi_params(cast: CastTarget) -> SmallVec<[AbiParam; 2]> { +fn cast_target_to_abi_params(cast: &CastTarget) -> SmallVec<[AbiParam; 2]> { let (rest_count, rem_bytes) = if cast.rest.unit.size.bytes() == 0 { (0, 0) } else { @@ -100,7 +100,7 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> { } _ => unreachable!("{:?}", self.layout.abi), }, - PassMode::Cast(cast) => cast_target_to_abi_params(cast), + PassMode::Cast(ref cast) => cast_target_to_abi_params(cast), PassMode::Indirect { attrs, extra_attrs: None, on_stack } => { if on_stack { // Abi requires aligning struct size to pointer size @@ -145,7 +145,9 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> { } _ => unreachable!("{:?}", self.layout.abi), }, - PassMode::Cast(cast) => (None, cast_target_to_abi_params(cast).into_iter().collect()), + PassMode::Cast(ref cast) => { + (None, cast_target_to_abi_params(cast).into_iter().collect()) + } PassMode::Indirect { attrs: _, extra_attrs: None, on_stack } => { assert!(!on_stack); (Some(AbiParam::special(pointer_ty(tcx), ArgumentPurpose::StructReturn)), vec![]) @@ -160,7 +162,7 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> { pub(super) fn to_casted_value<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, arg: CValue<'tcx>, - cast: CastTarget, + cast: &CastTarget, ) -> SmallVec<[Value; 2]> { let (ptr, meta) = arg.force_stack(fx); assert!(meta.is_none()); @@ -179,7 +181,7 @@ pub(super) fn from_casted_value<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, block_params: &[Value], layout: TyAndLayout<'tcx>, - cast: CastTarget, + cast: &CastTarget, ) -> CValue<'tcx> { let abi_params = cast_target_to_abi_params(cast); let abi_param_size: u32 = abi_params.iter().map(|param| param.value_type.bytes()).sum(); @@ -224,7 +226,7 @@ pub(super) fn adjust_arg_for_abi<'tcx>( let (a, b) = arg.load_scalar_pair(fx); smallvec![a, b] } - PassMode::Cast(cast) => to_casted_value(fx, arg, cast), + PassMode::Cast(ref cast) => to_casted_value(fx, arg, cast), PassMode::Indirect { .. } => { if is_owned { match arg.force_stack(fx) { @@ -268,7 +270,7 @@ pub(super) fn cvalue_for_param<'tcx>( local, local_field, &block_params, - arg_abi.mode, + &arg_abi.mode, arg_abi.layout, ); @@ -282,7 +284,9 @@ pub(super) fn cvalue_for_param<'tcx>( assert_eq!(block_params.len(), 2, "{:?}", block_params); Some(CValue::by_val_pair(block_params[0], block_params[1], arg_abi.layout)) } - PassMode::Cast(cast) => Some(from_casted_value(fx, &block_params, arg_abi.layout, cast)), + PassMode::Cast(ref cast) => { + Some(from_casted_value(fx, &block_params, arg_abi.layout, cast)) + } PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => { assert_eq!(block_params.len(), 1, "{:?}", block_params); Some(CValue::by_ref(Pointer::new(block_params[0]), arg_abi.layout)) diff --git a/compiler/rustc_codegen_cranelift/src/abi/returning.rs b/compiler/rustc_codegen_cranelift/src/abi/returning.rs index ff3bb2dfd000f..29ef5e2dfdae6 100644 --- a/compiler/rustc_codegen_cranelift/src/abi/returning.rs +++ b/compiler/rustc_codegen_cranelift/src/abi/returning.rs @@ -44,7 +44,7 @@ pub(super) fn codegen_return_param<'tcx>( Some(RETURN_PLACE), None, &ret_param, - fx.fn_abi.as_ref().unwrap().ret.mode, + &fx.fn_abi.as_ref().unwrap().ret.mode, fx.fn_abi.as_ref().unwrap().ret.layout, ); @@ -92,7 +92,7 @@ pub(super) fn codegen_with_call_return_arg<'tcx>( ret_place .write_cvalue(fx, CValue::by_val_pair(ret_val_a, ret_val_b, ret_arg_abi.layout)); } - PassMode::Cast(cast) => { + PassMode::Cast(ref cast) => { let results = fx.bcx.inst_results(call_inst).iter().copied().collect::>(); let result = @@ -131,7 +131,7 @@ pub(crate) fn codegen_return(fx: &mut FunctionCx<'_, '_, '_>) { let (ret_val_a, ret_val_b) = place.to_cvalue(fx).load_scalar_pair(fx); fx.bcx.ins().return_(&[ret_val_a, ret_val_b]); } - PassMode::Cast(cast) => { + PassMode::Cast(ref cast) => { let place = fx.get_local_place(RETURN_PLACE); let ret_val = place.to_cvalue(fx); let ret_vals = super::pass_mode::to_casted_value(fx, ret_val, cast); diff --git a/compiler/rustc_codegen_gcc/src/abi.rs b/compiler/rustc_codegen_gcc/src/abi.rs index 0ed3e1fbe93f6..9b55db6a54764 100644 --- a/compiler/rustc_codegen_gcc/src/abi.rs +++ b/compiler/rustc_codegen_gcc/src/abi.rs @@ -133,7 +133,7 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { match self.ret.mode { PassMode::Ignore => cx.type_void(), PassMode::Direct(_) | PassMode::Pair(..) => self.ret.layout.immediate_gcc_type(cx), - PassMode::Cast(cast) => cast.gcc_type(cx), + PassMode::Cast(ref cast) => cast.gcc_type(cx), PassMode::Indirect { .. } => { argument_tys.push(cx.type_ptr_to(self.ret.memory_ty(cx))); cx.type_void() @@ -157,7 +157,7 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { PassMode::Indirect { extra_attrs: Some(_), .. } => { unimplemented!(); } - PassMode::Cast(cast) => cast.gcc_type(cx), + PassMode::Cast(ref cast) => cast.gcc_type(cx), PassMode::Indirect { extra_attrs: None, on_stack: true, .. } => { on_stack_param_indices.insert(argument_tys.len()); arg.memory_ty(cx) diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs index 5fbdedac0c45c..f4493776ea268 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs @@ -130,7 +130,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { sym::volatile_load | sym::unaligned_volatile_load => { let tp_ty = substs.type_at(0); let mut ptr = args[0].immediate(); - if let PassMode::Cast(ty) = fn_abi.ret.mode { + if let PassMode::Cast(ty) = &fn_abi.ret.mode { ptr = self.pointercast(ptr, self.type_ptr_to(ty.gcc_type(self))); } let load = self.volatile_load(ptr.get_type(), ptr); @@ -320,7 +320,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { }; if !fn_abi.ret.is_ignore() { - if let PassMode::Cast(ty) = fn_abi.ret.mode { + if let PassMode::Cast(ty) = &fn_abi.ret.mode { let ptr_llty = self.type_ptr_to(ty.gcc_type(self)); let ptr = self.pointercast(result.llval, ptr_llty); self.store(llval, ptr, result.align); @@ -416,7 +416,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> { else if self.is_unsized_indirect() { bug!("unsized `ArgAbi` must be handled through `store_fn_arg`"); } - else if let PassMode::Cast(cast) = self.mode { + else if let PassMode::Cast(ref cast) = self.mode { // FIXME(eddyb): Figure out when the simpler Store is safe, clang // uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}. let can_store_through_cast_ptr = false; diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 9eb3574e77b00..09c53afa6c814 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -213,7 +213,7 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> { OperandValue::Ref(val, None, self.layout.align.abi).store(bx, dst) } else if self.is_unsized_indirect() { bug!("unsized `ArgAbi` must be handled through `store_fn_arg`"); - } else if let PassMode::Cast(cast) = self.mode { + } else if let PassMode::Cast(cast) = &self.mode { // FIXME(eddyb): Figure out when the simpler Store is safe, clang // uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}. let can_store_through_cast_ptr = false; @@ -335,7 +335,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { if let PassMode::Indirect { .. } = self.ret.mode { 1 } else { 0 } + args_capacity, ); - let llreturn_ty = match self.ret.mode { + let llreturn_ty = match &self.ret.mode { PassMode::Ignore => cx.type_void(), PassMode::Direct(_) | PassMode::Pair(..) => self.ret.layout.immediate_llvm_type(cx), PassMode::Cast(cast) => cast.llvm_type(cx), @@ -351,7 +351,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { llargument_tys.push(ty.llvm_type(cx)); } - let llarg_ty = match arg.mode { + let llarg_ty = match &arg.mode { PassMode::Ignore => continue, PassMode::Direct(_) => arg.layout.immediate_llvm_type(cx), PassMode::Pair(..) => { @@ -426,11 +426,11 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { i += 1; i - 1 }; - match self.ret.mode { - PassMode::Direct(ref attrs) => { + match &self.ret.mode { + PassMode::Direct(attrs) => { attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn); } - PassMode::Indirect { ref attrs, extra_attrs: _, on_stack } => { + PassMode::Indirect { attrs, extra_attrs: _, on_stack } => { assert!(!on_stack); let i = apply(attrs); let sret = llvm::CreateStructRetAttr(cx.llcx, self.ret.layout.llvm_type(cx)); @@ -445,23 +445,23 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { if arg.pad.is_some() { apply(&ArgAttributes::new()); } - match arg.mode { + match &arg.mode { PassMode::Ignore => {} - PassMode::Indirect { ref attrs, extra_attrs: None, on_stack: true } => { + PassMode::Indirect { attrs, extra_attrs: None, on_stack: true } => { let i = apply(attrs); let byval = llvm::CreateByValAttr(cx.llcx, arg.layout.llvm_type(cx)); attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[byval]); } - PassMode::Direct(ref attrs) - | PassMode::Indirect { ref attrs, extra_attrs: None, on_stack: false } => { + PassMode::Direct(attrs) + | PassMode::Indirect { attrs, extra_attrs: None, on_stack: false } => { apply(attrs); } - PassMode::Indirect { ref attrs, extra_attrs: Some(ref extra_attrs), on_stack } => { + PassMode::Indirect { attrs, extra_attrs: Some(extra_attrs), on_stack } => { assert!(!on_stack); apply(attrs); apply(extra_attrs); } - PassMode::Pair(ref a, ref b) => { + PassMode::Pair(a, b) => { apply(a); apply(b); } @@ -488,11 +488,11 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { i += 1; i - 1 }; - match self.ret.mode { - PassMode::Direct(ref attrs) => { + match &self.ret.mode { + PassMode::Direct(attrs) => { attrs.apply_attrs_to_callsite(llvm::AttributePlace::ReturnValue, bx.cx, callsite); } - PassMode::Indirect { ref attrs, extra_attrs: _, on_stack } => { + PassMode::Indirect { attrs, extra_attrs: _, on_stack } => { assert!(!on_stack); let i = apply(bx.cx, attrs); let sret = llvm::CreateStructRetAttr(bx.cx.llcx, self.ret.layout.llvm_type(bx)); @@ -521,9 +521,9 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { if arg.pad.is_some() { apply(bx.cx, &ArgAttributes::new()); } - match arg.mode { + match &arg.mode { PassMode::Ignore => {} - PassMode::Indirect { ref attrs, extra_attrs: None, on_stack: true } => { + PassMode::Indirect { attrs, extra_attrs: None, on_stack: true } => { let i = apply(bx.cx, attrs); let byval = llvm::CreateByValAttr(bx.cx.llcx, arg.layout.llvm_type(bx)); attributes::apply_to_callsite( @@ -532,19 +532,15 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { &[byval], ); } - PassMode::Direct(ref attrs) - | PassMode::Indirect { ref attrs, extra_attrs: None, on_stack: false } => { + PassMode::Direct(attrs) + | PassMode::Indirect { attrs, extra_attrs: None, on_stack: false } => { apply(bx.cx, attrs); } - PassMode::Indirect { - ref attrs, - extra_attrs: Some(ref extra_attrs), - on_stack: _, - } => { + PassMode::Indirect { attrs, extra_attrs: Some(extra_attrs), on_stack: _ } => { apply(bx.cx, attrs); apply(bx.cx, extra_attrs); } - PassMode::Pair(ref a, ref b) => { + PassMode::Pair(a, b) => { apply(bx.cx, a); apply(bx.cx, b); } diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 9f3647492877c..d46a6dd32c8a5 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -161,7 +161,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> { sym::volatile_load | sym::unaligned_volatile_load => { let tp_ty = substs.type_at(0); let ptr = args[0].immediate(); - let load = if let PassMode::Cast(ty) = fn_abi.ret.mode { + let load = if let PassMode::Cast(ty) = &fn_abi.ret.mode { let llty = ty.llvm_type(self); let ptr = self.pointercast(ptr, self.type_ptr_to(llty)); self.volatile_load(llty, ptr) @@ -374,7 +374,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> { }; if !fn_abi.ret.is_ignore() { - if let PassMode::Cast(ty) = fn_abi.ret.mode { + if let PassMode::Cast(ty) = &fn_abi.ret.mode { let ptr_llty = self.type_ptr_to(ty.llvm_type(self)); let ptr = self.pointercast(result.llval, ptr_llty); self.store(llval, ptr, result.align); diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 47a40be5d8c5b..187af47114f25 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -324,7 +324,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { bx.unreachable(); return; } - let llval = match self.fn_abi.ret.mode { + let llval = match &self.fn_abi.ret.mode { PassMode::Ignore | PassMode::Indirect { .. } => { bx.ret_void(); return; @@ -360,7 +360,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { llval } }; - let ty = bx.cast_backend_type(&cast_ty); + let ty = bx.cast_backend_type(cast_ty); let addr = bx.pointercast(llslot, bx.type_ptr_to(ty)); bx.load(ty, addr, self.fn_abi.ret.layout.align.abi) } @@ -1222,8 +1222,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { if by_ref && !arg.is_indirect() { // Have to load the argument, maybe while casting it. - if let PassMode::Cast(ty) = arg.mode { - let llty = bx.cast_backend_type(&ty); + if let PassMode::Cast(ty) = &arg.mode { + let llty = bx.cast_backend_type(ty); let addr = bx.pointercast(llval, bx.type_ptr_to(llty)); llval = bx.load(llty, addr, align.min(arg.layout.align.abi)); } else { diff --git a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs index 94ac71a4dd263..e11ebdce80f77 100644 --- a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs +++ b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs @@ -597,8 +597,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { }; if !fn_abi.ret.is_ignore() { - if let PassMode::Cast(ty) = fn_abi.ret.mode { - let ptr_llty = bx.type_ptr_to(bx.cast_backend_type(&ty)); + if let PassMode::Cast(ty) = &fn_abi.ret.mode { + let ptr_llty = bx.type_ptr_to(bx.cast_backend_type(ty)); let ptr = bx.pointercast(result.llval, ptr_llty); bx.store(llval, ptr, result.align); } else { diff --git a/compiler/rustc_const_eval/src/interpret/terminator.rs b/compiler/rustc_const_eval/src/interpret/terminator.rs index c8557d172ed43..27bb828feacf7 100644 --- a/compiler/rustc_const_eval/src/interpret/terminator.rs +++ b/compiler/rustc_const_eval/src/interpret/terminator.rs @@ -218,7 +218,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // Padding must be fully equal. let pad_compat = || caller_abi.pad == callee_abi.pad; // When comparing the PassMode, we have to be smart about comparing the attributes. - let arg_attr_compat = |a1: ArgAttributes, a2: ArgAttributes| { + let arg_attr_compat = |a1: &ArgAttributes, a2: &ArgAttributes| { // There's only one regular attribute that matters for the call ABI: InReg. // Everything else is things like noalias, dereferencable, nonnull, ... // (This also applies to pointee_size, pointee_align.) @@ -233,7 +233,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } return true; }; - let mode_compat = || match (caller_abi.mode, callee_abi.mode) { + let mode_compat = || match (&caller_abi.mode, &callee_abi.mode) { (PassMode::Ignore, PassMode::Ignore) => true, (PassMode::Direct(a1), PassMode::Direct(a2)) => arg_attr_compat(a1, a2), (PassMode::Pair(a1, b1), PassMode::Pair(a2, b2)) => { diff --git a/compiler/rustc_target/src/abi/call/mod.rs b/compiler/rustc_target/src/abi/call/mod.rs index ab53ea6101dbf..d4430b1609b9c 100644 --- a/compiler/rustc_target/src/abi/call/mod.rs +++ b/compiler/rustc_target/src/abi/call/mod.rs @@ -26,7 +26,7 @@ mod x86; mod x86_64; mod x86_win64; -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, HashStable_Generic)] +#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)] pub enum PassMode { /// Ignore the argument. /// @@ -42,7 +42,7 @@ pub enum PassMode { Pair(ArgAttributes, ArgAttributes), /// Pass the argument after casting it, to either /// a single uniform or a pair of registers. - Cast(CastTarget), + Cast(Box), /// Pass the argument indirectly via a hidden pointer. /// The `extra_attrs` value, if any, is for the extra data (vtable or length) /// which indicates that it refers to an unsized rvalue. @@ -548,7 +548,7 @@ impl<'a, Ty> ArgAbi<'a, Ty> { } pub fn cast_to>(&mut self, target: T) { - self.mode = PassMode::Cast(target.into()); + self.mode = PassMode::Cast(Box::new(target.into())); } pub fn pad_with(&mut self, reg: Reg) { @@ -737,6 +737,6 @@ mod size_asserts { use super::*; use rustc_data_structures::static_assert_size; // These are in alphabetical order, which is easy to maintain. - static_assert_size!(ArgAbi<'_, usize>, 208); - static_assert_size!(FnAbi<'_, usize>, 248); + static_assert_size!(ArgAbi<'_, usize>, 72); + static_assert_size!(FnAbi<'_, usize>, 112); } From 4df7bffa953d4583f6785114519602a4f1898f8d Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 25 Aug 2022 19:02:22 +1000 Subject: [PATCH 3/7] Change `FnAbi::fixed_count` to a `u32`. --- compiler/rustc_codegen_llvm/src/abi.rs | 3 ++- compiler/rustc_middle/src/ty/layout.rs | 2 +- .../src/typeid/typeid_itanium_cxx_abi.rs | 2 +- compiler/rustc_target/src/abi/call/mod.rs | 4 ++-- compiler/rustc_target/src/abi/call/riscv.rs | 2 +- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 09c53afa6c814..b88db6f6f9804 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -325,7 +325,8 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type { // Ignore "extra" args from the call site for C variadic functions. // Only the "fixed" args are part of the LLVM function signature. - let args = if self.c_variadic { &self.args[..self.fixed_count] } else { &self.args }; + let args = + if self.c_variadic { &self.args[..self.fixed_count as usize] } else { &self.args }; let args_capacity: usize = args.iter().map(|arg| if arg.pad.is_some() { 1 } else { 0 } + diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index f5505590168d2..9dd7616a47959 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -3262,7 +3262,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { .map(|(i, ty)| arg_of(ty, Some(i))) .collect::>()?, c_variadic: sig.c_variadic, - fixed_count: inputs.len(), + fixed_count: inputs.len() as u32, conv, can_unwind: fn_can_unwind(self.tcx(), fn_def_id, sig.abi), }; diff --git a/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs b/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs index a09b52fbfdf97..cb61f76af1d73 100644 --- a/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs +++ b/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs @@ -888,7 +888,7 @@ pub fn typeid_for_fnabi<'tcx>( typeid.push('v'); } } else { - for n in 0..fn_abi.fixed_count { + for n in 0..fn_abi.fixed_count as usize { let ty = transform_ty(tcx, fn_abi.args[n].layout.ty, transform_ty_options); typeid.push_str(&encode_ty(tcx, ty, &mut dict, encode_ty_options)); } diff --git a/compiler/rustc_target/src/abi/call/mod.rs b/compiler/rustc_target/src/abi/call/mod.rs index d4430b1609b9c..a975d9027ecb6 100644 --- a/compiler/rustc_target/src/abi/call/mod.rs +++ b/compiler/rustc_target/src/abi/call/mod.rs @@ -625,7 +625,7 @@ pub struct FnAbi<'a, Ty> { /// /// Should only be different from args.len() when c_variadic is true. /// This can be used to know whether an argument is variadic or not. - pub fixed_count: usize, + pub fixed_count: u32, pub conv: Conv, @@ -738,5 +738,5 @@ mod size_asserts { use rustc_data_structures::static_assert_size; // These are in alphabetical order, which is easy to maintain. static_assert_size!(ArgAbi<'_, usize>, 72); - static_assert_size!(FnAbi<'_, usize>, 112); + static_assert_size!(FnAbi<'_, usize>, 104); } diff --git a/compiler/rustc_target/src/abi/call/riscv.rs b/compiler/rustc_target/src/abi/call/riscv.rs index 752b44f6434cf..1cb360f834e0e 100644 --- a/compiler/rustc_target/src/abi/call/riscv.rs +++ b/compiler/rustc_target/src/abi/call/riscv.rs @@ -340,7 +340,7 @@ where arg, xlen, flen, - i >= fn_abi.fixed_count, + i >= fn_abi.fixed_count as usize, &mut avail_gprs, &mut avail_fprs, ); From b75b3b3afecfb6638c7c75ad665f8f3091aab1ae Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 25 Aug 2022 19:08:04 +1000 Subject: [PATCH 4/7] Change `FnAbi::args` to a boxed slice. --- compiler/rustc_codegen_gcc/src/abi.rs | 2 +- compiler/rustc_codegen_llvm/src/abi.rs | 4 ++-- compiler/rustc_middle/src/ty/layout.rs | 2 +- compiler/rustc_target/src/abi/call/aarch64.rs | 2 +- compiler/rustc_target/src/abi/call/amdgpu.rs | 2 +- compiler/rustc_target/src/abi/call/arm.rs | 2 +- compiler/rustc_target/src/abi/call/avr.rs | 2 +- compiler/rustc_target/src/abi/call/bpf.rs | 2 +- compiler/rustc_target/src/abi/call/hexagon.rs | 2 +- compiler/rustc_target/src/abi/call/m68k.rs | 2 +- compiler/rustc_target/src/abi/call/mips.rs | 2 +- compiler/rustc_target/src/abi/call/mips64.rs | 2 +- compiler/rustc_target/src/abi/call/mod.rs | 4 ++-- compiler/rustc_target/src/abi/call/msp430.rs | 2 +- compiler/rustc_target/src/abi/call/nvptx64.rs | 4 ++-- compiler/rustc_target/src/abi/call/powerpc.rs | 2 +- compiler/rustc_target/src/abi/call/powerpc64.rs | 2 +- compiler/rustc_target/src/abi/call/s390x.rs | 2 +- compiler/rustc_target/src/abi/call/sparc.rs | 2 +- compiler/rustc_target/src/abi/call/sparc64.rs | 2 +- compiler/rustc_target/src/abi/call/wasm.rs | 4 ++-- compiler/rustc_target/src/abi/call/x86.rs | 4 ++-- compiler/rustc_target/src/abi/call/x86_64.rs | 2 +- compiler/rustc_target/src/abi/call/x86_win64.rs | 2 +- 24 files changed, 29 insertions(+), 29 deletions(-) diff --git a/compiler/rustc_codegen_gcc/src/abi.rs b/compiler/rustc_codegen_gcc/src/abi.rs index 9b55db6a54764..7f313583c82c8 100644 --- a/compiler/rustc_codegen_gcc/src/abi.rs +++ b/compiler/rustc_codegen_gcc/src/abi.rs @@ -140,7 +140,7 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { } }; - for arg in &self.args { + for arg in self.args.iter() { // add padding if let Some(ty) = arg.pad { argument_tys.push(ty.gcc_type(cx)); diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index b88db6f6f9804..2353362f83b57 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -442,7 +442,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { } _ => {} } - for arg in &self.args { + for arg in self.args.iter() { if arg.pad.is_some() { apply(&ArgAttributes::new()); } @@ -518,7 +518,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { } } } - for arg in &self.args { + for arg in self.args.iter() { if arg.pad.is_some() { apply(bx.cx, &ArgAttributes::new()); } diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index 9dd7616a47959..48b668253ab91 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -3334,7 +3334,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { } }; fixup(&mut fn_abi.ret); - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { fixup(arg); } } else { diff --git a/compiler/rustc_target/src/abi/call/aarch64.rs b/compiler/rustc_target/src/abi/call/aarch64.rs index 4613a459c51d6..8a5ad90db71f5 100644 --- a/compiler/rustc_target/src/abi/call/aarch64.rs +++ b/compiler/rustc_target/src/abi/call/aarch64.rs @@ -77,7 +77,7 @@ where classify_ret(cx, &mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/amdgpu.rs b/compiler/rustc_target/src/abi/call/amdgpu.rs index 9be97476ce8f0..e30dead63cbf8 100644 --- a/compiler/rustc_target/src/abi/call/amdgpu.rs +++ b/compiler/rustc_target/src/abi/call/amdgpu.rs @@ -26,7 +26,7 @@ where classify_ret(cx, &mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/arm.rs b/compiler/rustc_target/src/abi/call/arm.rs index e66c2132b8677..1923ea58838ba 100644 --- a/compiler/rustc_target/src/abi/call/arm.rs +++ b/compiler/rustc_target/src/abi/call/arm.rs @@ -88,7 +88,7 @@ where classify_ret(cx, &mut fn_abi.ret, vfp); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/avr.rs b/compiler/rustc_target/src/abi/call/avr.rs index c1f7a1e3af586..e20f01355a4a6 100644 --- a/compiler/rustc_target/src/abi/call/avr.rs +++ b/compiler/rustc_target/src/abi/call/avr.rs @@ -49,7 +49,7 @@ pub fn compute_abi_info(fty: &mut FnAbi<'_, Ty>) { classify_ret_ty(&mut fty.ret); } - for arg in &mut fty.args { + for arg in fty.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/bpf.rs b/compiler/rustc_target/src/abi/call/bpf.rs index 466c525531c1b..780e7df438373 100644 --- a/compiler/rustc_target/src/abi/call/bpf.rs +++ b/compiler/rustc_target/src/abi/call/bpf.rs @@ -22,7 +22,7 @@ pub fn compute_abi_info(fn_abi: &mut FnAbi<'_, Ty>) { classify_ret(&mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/hexagon.rs b/compiler/rustc_target/src/abi/call/hexagon.rs index 8028443b8a649..80a442048d53c 100644 --- a/compiler/rustc_target/src/abi/call/hexagon.rs +++ b/compiler/rustc_target/src/abi/call/hexagon.rs @@ -21,7 +21,7 @@ pub fn compute_abi_info(fn_abi: &mut FnAbi<'_, Ty>) { classify_ret(&mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/m68k.rs b/compiler/rustc_target/src/abi/call/m68k.rs index 58fdc00b69627..c1e0f54af5f1e 100644 --- a/compiler/rustc_target/src/abi/call/m68k.rs +++ b/compiler/rustc_target/src/abi/call/m68k.rs @@ -21,7 +21,7 @@ pub fn compute_abi_info(fn_abi: &mut FnAbi<'_, Ty>) { classify_ret(&mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/mips.rs b/compiler/rustc_target/src/abi/call/mips.rs index cc443197680e4..83d8657af118e 100644 --- a/compiler/rustc_target/src/abi/call/mips.rs +++ b/compiler/rustc_target/src/abi/call/mips.rs @@ -42,7 +42,7 @@ where classify_ret(cx, &mut fn_abi.ret, &mut offset); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/mips64.rs b/compiler/rustc_target/src/abi/call/mips64.rs index cd54167aa7f82..2700f67b2096b 100644 --- a/compiler/rustc_target/src/abi/call/mips64.rs +++ b/compiler/rustc_target/src/abi/call/mips64.rs @@ -158,7 +158,7 @@ where classify_ret(cx, &mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/mod.rs b/compiler/rustc_target/src/abi/call/mod.rs index a975d9027ecb6..bf30c24258e86 100644 --- a/compiler/rustc_target/src/abi/call/mod.rs +++ b/compiler/rustc_target/src/abi/call/mod.rs @@ -614,7 +614,7 @@ pub enum Conv { #[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)] pub struct FnAbi<'a, Ty> { /// The LLVM types of each argument. - pub args: Vec>, + pub args: Box<[ArgAbi<'a, Ty>]>, /// LLVM return type. pub ret: ArgAbi<'a, Ty>, @@ -738,5 +738,5 @@ mod size_asserts { use rustc_data_structures::static_assert_size; // These are in alphabetical order, which is easy to maintain. static_assert_size!(ArgAbi<'_, usize>, 72); - static_assert_size!(FnAbi<'_, usize>, 104); + static_assert_size!(FnAbi<'_, usize>, 96); } diff --git a/compiler/rustc_target/src/abi/call/msp430.rs b/compiler/rustc_target/src/abi/call/msp430.rs index 0ba73657b5b08..33ef47be00993 100644 --- a/compiler/rustc_target/src/abi/call/msp430.rs +++ b/compiler/rustc_target/src/abi/call/msp430.rs @@ -30,7 +30,7 @@ pub fn compute_abi_info(fn_abi: &mut FnAbi<'_, Ty>) { classify_ret(&mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/nvptx64.rs b/compiler/rustc_target/src/abi/call/nvptx64.rs index fc16f1c97a452..4abe51cd697e0 100644 --- a/compiler/rustc_target/src/abi/call/nvptx64.rs +++ b/compiler/rustc_target/src/abi/call/nvptx64.rs @@ -38,7 +38,7 @@ pub fn compute_abi_info(fn_abi: &mut FnAbi<'_, Ty>) { classify_ret(&mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } @@ -55,7 +55,7 @@ where panic!("Kernels should not return anything other than () or !"); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/powerpc.rs b/compiler/rustc_target/src/abi/call/powerpc.rs index 27a5c6d2fc6a9..70c32db0a871b 100644 --- a/compiler/rustc_target/src/abi/call/powerpc.rs +++ b/compiler/rustc_target/src/abi/call/powerpc.rs @@ -21,7 +21,7 @@ pub fn compute_abi_info(fn_abi: &mut FnAbi<'_, Ty>) { classify_ret(&mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/powerpc64.rs b/compiler/rustc_target/src/abi/call/powerpc64.rs index c22ef9c8f2a7f..359bb8fc09a87 100644 --- a/compiler/rustc_target/src/abi/call/powerpc64.rs +++ b/compiler/rustc_target/src/abi/call/powerpc64.rs @@ -132,7 +132,7 @@ where classify_ret(cx, &mut fn_abi.ret, abi); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/s390x.rs b/compiler/rustc_target/src/abi/call/s390x.rs index 13706e8c21725..ea23692817f50 100644 --- a/compiler/rustc_target/src/abi/call/s390x.rs +++ b/compiler/rustc_target/src/abi/call/s390x.rs @@ -48,7 +48,7 @@ where classify_ret(&mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/sparc.rs b/compiler/rustc_target/src/abi/call/sparc.rs index cc443197680e4..83d8657af118e 100644 --- a/compiler/rustc_target/src/abi/call/sparc.rs +++ b/compiler/rustc_target/src/abi/call/sparc.rs @@ -42,7 +42,7 @@ where classify_ret(cx, &mut fn_abi.ret, &mut offset); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/sparc64.rs b/compiler/rustc_target/src/abi/call/sparc64.rs index cc3a0a69999b0..1b74959ad17c2 100644 --- a/compiler/rustc_target/src/abi/call/sparc64.rs +++ b/compiler/rustc_target/src/abi/call/sparc64.rs @@ -217,7 +217,7 @@ where classify_arg(cx, &mut fn_abi.ret, Size { raw: 32 }); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/wasm.rs b/compiler/rustc_target/src/abi/call/wasm.rs index 3237cde10654a..44427ee5317c1 100644 --- a/compiler/rustc_target/src/abi/call/wasm.rs +++ b/compiler/rustc_target/src/abi/call/wasm.rs @@ -50,7 +50,7 @@ where classify_ret(cx, &mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } @@ -66,7 +66,7 @@ pub fn compute_wasm_abi_info(fn_abi: &mut FnAbi<'_, Ty>) { classify_ret(&mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/x86.rs b/compiler/rustc_target/src/abi/call/x86.rs index c7d59baf9191f..c2d70d1160c44 100644 --- a/compiler/rustc_target/src/abi/call/x86.rs +++ b/compiler/rustc_target/src/abi/call/x86.rs @@ -49,7 +49,7 @@ where } } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } @@ -72,7 +72,7 @@ where let mut free_regs = 2; - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { let attrs = match arg.mode { PassMode::Ignore | PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => { diff --git a/compiler/rustc_target/src/abi/call/x86_64.rs b/compiler/rustc_target/src/abi/call/x86_64.rs index a52e01a495a57..c0c071a614f50 100644 --- a/compiler/rustc_target/src/abi/call/x86_64.rs +++ b/compiler/rustc_target/src/abi/call/x86_64.rs @@ -239,7 +239,7 @@ where x86_64_arg_or_ret(&mut fn_abi.ret, false); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } diff --git a/compiler/rustc_target/src/abi/call/x86_win64.rs b/compiler/rustc_target/src/abi/call/x86_win64.rs index 2aad641b1ecf0..1aaf0e511ca41 100644 --- a/compiler/rustc_target/src/abi/call/x86_win64.rs +++ b/compiler/rustc_target/src/abi/call/x86_win64.rs @@ -31,7 +31,7 @@ pub fn compute_abi_info(fn_abi: &mut FnAbi<'_, Ty>) { if !fn_abi.ret.is_ignore() { fixup(&mut fn_abi.ret); } - for arg in &mut fn_abi.args { + for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } From feeaa4db3cb990463871e35b16b3449023cb791d Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 26 Aug 2022 10:37:51 +1000 Subject: [PATCH 5/7] Simplify arg capacity calculations. Currently they try to be very precise. But they are wrong, i.e. they don't match what's happening in the loop below. This code isn't hot enough for it to matter that much. --- compiler/rustc_codegen_gcc/src/abi.rs | 22 +++------------------- compiler/rustc_codegen_llvm/src/abi.rs | 7 ++----- 2 files changed, 5 insertions(+), 24 deletions(-) diff --git a/compiler/rustc_codegen_gcc/src/abi.rs b/compiler/rustc_codegen_gcc/src/abi.rs index 7f313583c82c8..87b730d29cdf2 100644 --- a/compiler/rustc_codegen_gcc/src/abi.rs +++ b/compiler/rustc_codegen_gcc/src/abi.rs @@ -107,26 +107,10 @@ pub trait FnAbiGccExt<'gcc, 'tcx> { impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { fn gcc_type(&self, cx: &CodegenCx<'gcc, 'tcx>) -> (Type<'gcc>, Vec>, bool, FxHashSet) { let mut on_stack_param_indices = FxHashSet::default(); - let args_capacity: usize = self.args.iter().map(|arg| - if arg.pad.is_some() { - 1 - } - else { - 0 - } + - if let PassMode::Pair(_, _) = arg.mode { - 2 - } else { - 1 - } - ).sum(); + + // This capacity calculation is approximate. let mut argument_tys = Vec::with_capacity( - if let PassMode::Indirect { .. } = self.ret.mode { - 1 - } - else { - 0 - } + args_capacity, + self.args.len() + if let PassMode::Indirect { .. } = self.ret.mode { 1 } else { 0 } ); let return_ty = diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 2353362f83b57..a06b07c114989 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -328,12 +328,9 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { let args = if self.c_variadic { &self.args[..self.fixed_count as usize] } else { &self.args }; - let args_capacity: usize = args.iter().map(|arg| - if arg.pad.is_some() { 1 } else { 0 } + - if let PassMode::Pair(_, _) = arg.mode { 2 } else { 1 } - ).sum(); + // This capacity calculation is approximate. let mut llargument_tys = Vec::with_capacity( - if let PassMode::Indirect { .. } = self.ret.mode { 1 } else { 0 } + args_capacity, + self.args.len() + if let PassMode::Indirect { .. } = self.ret.mode { 1 } else { 0 }, ); let llreturn_ty = match &self.ret.mode { From b853e8a6194637751bffbcfdd5bb51c7bfecdff5 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 25 Aug 2022 19:18:01 +1000 Subject: [PATCH 6/7] Turn `ArgAbi::pad` into a `bool`. Because it's only ever set to `None` or `Some(Reg::i32())`. --- compiler/rustc_codegen_gcc/src/abi.rs | 4 ++-- compiler/rustc_codegen_llvm/src/abi.rs | 8 ++++---- compiler/rustc_codegen_ssa/src/mir/block.rs | 6 +++--- compiler/rustc_codegen_ssa/src/mir/mod.rs | 4 ++-- .../rustc_const_eval/src/interpret/terminator.rs | 2 +- compiler/rustc_target/src/abi/call/mips.rs | 2 +- compiler/rustc_target/src/abi/call/mod.rs | 12 ++++++------ compiler/rustc_target/src/abi/call/sparc.rs | 2 +- 8 files changed, 20 insertions(+), 20 deletions(-) diff --git a/compiler/rustc_codegen_gcc/src/abi.rs b/compiler/rustc_codegen_gcc/src/abi.rs index 87b730d29cdf2..3186b363e359f 100644 --- a/compiler/rustc_codegen_gcc/src/abi.rs +++ b/compiler/rustc_codegen_gcc/src/abi.rs @@ -126,8 +126,8 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { for arg in self.args.iter() { // add padding - if let Some(ty) = arg.pad { - argument_tys.push(ty.gcc_type(cx)); + if arg.pad_i32 { + argument_tys.push(Reg::i32().gcc_type(cx)); } let arg_ty = match arg.mode { diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index a06b07c114989..168cf3d0b58fe 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -345,8 +345,8 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { for arg in args { // add padding - if let Some(ty) = arg.pad { - llargument_tys.push(ty.llvm_type(cx)); + if arg.pad_i32 { + llargument_tys.push(Reg::i32().llvm_type(cx)); } let llarg_ty = match &arg.mode { @@ -440,7 +440,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { _ => {} } for arg in self.args.iter() { - if arg.pad.is_some() { + if arg.pad_i32 { apply(&ArgAttributes::new()); } match &arg.mode { @@ -516,7 +516,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { } } for arg in self.args.iter() { - if arg.pad.is_some() { + if arg.pad_i32 { apply(bx.cx, &ArgAttributes::new()); } match &arg.mode { diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 187af47114f25..5b3f41263e727 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -21,7 +21,7 @@ use rustc_middle::ty::{self, Instance, Ty, TypeVisitable}; use rustc_span::source_map::Span; use rustc_span::{sym, Symbol}; use rustc_symbol_mangling::typeid::typeid_for_fnabi; -use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode}; +use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode, Reg}; use rustc_target::abi::{self, HasDataLayout, WrappingRange}; use rustc_target::spec::abi::Abi; @@ -1159,8 +1159,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { arg: &ArgAbi<'tcx, Ty<'tcx>>, ) { // Fill padding with undef value, where applicable. - if let Some(ty) = arg.pad { - llargs.push(bx.const_undef(bx.reg_backend_type(&ty))) + if arg.pad_i32 { + llargs.push(bx.const_undef(bx.reg_backend_type(&Reg::i32()))) } if arg.is_ignore() { diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs index 8ee375fa9e3e9..f13b658b2ffbe 100644 --- a/compiler/rustc_codegen_ssa/src/mir/mod.rs +++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs @@ -283,7 +283,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( for i in 0..tupled_arg_tys.len() { let arg = &fx.fn_abi.args[idx]; idx += 1; - if arg.pad.is_some() { + if arg.pad_i32 { llarg_idx += 1; } let pr_field = place.project_field(bx, i); @@ -309,7 +309,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let arg = &fx.fn_abi.args[idx]; idx += 1; - if arg.pad.is_some() { + if arg.pad_i32 { llarg_idx += 1; } diff --git a/compiler/rustc_const_eval/src/interpret/terminator.rs b/compiler/rustc_const_eval/src/interpret/terminator.rs index 27bb828feacf7..2fcdf6ded9819 100644 --- a/compiler/rustc_const_eval/src/interpret/terminator.rs +++ b/compiler/rustc_const_eval/src/interpret/terminator.rs @@ -216,7 +216,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } }; // Padding must be fully equal. - let pad_compat = || caller_abi.pad == callee_abi.pad; + let pad_compat = || caller_abi.pad_i32 == callee_abi.pad_i32; // When comparing the PassMode, we have to be smart about comparing the attributes. let arg_attr_compat = |a1: &ArgAttributes, a2: &ArgAttributes| { // There's only one regular attribute that matters for the call ABI: InReg. diff --git a/compiler/rustc_target/src/abi/call/mips.rs b/compiler/rustc_target/src/abi/call/mips.rs index 83d8657af118e..ff8f32362148c 100644 --- a/compiler/rustc_target/src/abi/call/mips.rs +++ b/compiler/rustc_target/src/abi/call/mips.rs @@ -24,7 +24,7 @@ where if arg.layout.is_aggregate() { arg.cast_to(Uniform { unit: Reg::i32(), total: size }); if !offset.is_aligned(align) { - arg.pad_with(Reg::i32()); + arg.pad_with_i32(); } } else { arg.extend_integer_width_to(32); diff --git a/compiler/rustc_target/src/abi/call/mod.rs b/compiler/rustc_target/src/abi/call/mod.rs index bf30c24258e86..33f533e68f0e8 100644 --- a/compiler/rustc_target/src/abi/call/mod.rs +++ b/compiler/rustc_target/src/abi/call/mod.rs @@ -465,7 +465,7 @@ pub struct ArgAbi<'a, Ty> { pub layout: TyAndLayout<'a, Ty>, /// Dummy argument, which is emitted before the real argument. - pub pad: Option, + pub pad_i32: bool, pub mode: PassMode, } @@ -486,7 +486,7 @@ impl<'a, Ty> ArgAbi<'a, Ty> { Abi::Vector { .. } => PassMode::Direct(ArgAttributes::new()), Abi::Aggregate { .. } => PassMode::Direct(ArgAttributes::new()), }; - ArgAbi { layout, pad: None, mode } + ArgAbi { layout, pad_i32: false, mode } } fn indirect_pass_mode(layout: &TyAndLayout<'a, Ty>) -> PassMode { @@ -551,8 +551,8 @@ impl<'a, Ty> ArgAbi<'a, Ty> { self.mode = PassMode::Cast(Box::new(target.into())); } - pub fn pad_with(&mut self, reg: Reg) { - self.pad = Some(reg); + pub fn pad_with_i32(&mut self) { + self.pad_i32 = true; } pub fn is_indirect(&self) -> bool { @@ -737,6 +737,6 @@ mod size_asserts { use super::*; use rustc_data_structures::static_assert_size; // These are in alphabetical order, which is easy to maintain. - static_assert_size!(ArgAbi<'_, usize>, 72); - static_assert_size!(FnAbi<'_, usize>, 96); + static_assert_size!(ArgAbi<'_, usize>, 64); + static_assert_size!(FnAbi<'_, usize>, 88); } diff --git a/compiler/rustc_target/src/abi/call/sparc.rs b/compiler/rustc_target/src/abi/call/sparc.rs index 83d8657af118e..ff8f32362148c 100644 --- a/compiler/rustc_target/src/abi/call/sparc.rs +++ b/compiler/rustc_target/src/abi/call/sparc.rs @@ -24,7 +24,7 @@ where if arg.layout.is_aggregate() { arg.cast_to(Uniform { unit: Reg::i32(), total: size }); if !offset.is_aligned(align) { - arg.pad_with(Reg::i32()); + arg.pad_with_i32(); } } else { arg.extend_integer_width_to(32); From f974617bdafa2e5205c1e852fe3ce61f29e2c026 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 25 Aug 2022 22:19:38 +1000 Subject: [PATCH 7/7] Move `ArgAbi::pad_i32` into `PassMode::Cast`. Because it's only needed for that variant. This shrinks the types and clarifies the logic. --- .../src/abi/pass_mode.rs | 11 ++++-- .../src/abi/returning.rs | 8 ++-- compiler/rustc_codegen_gcc/src/abi.rs | 15 +++---- .../rustc_codegen_gcc/src/intrinsic/mod.rs | 8 ++-- compiler/rustc_codegen_llvm/src/abi.rs | 39 ++++++++++--------- compiler/rustc_codegen_llvm/src/intrinsic.rs | 4 +- compiler/rustc_codegen_ssa/src/mir/block.rs | 34 +++++++--------- .../rustc_codegen_ssa/src/mir/intrinsic.rs | 2 +- compiler/rustc_codegen_ssa/src/mir/mod.rs | 4 +- .../src/interpret/terminator.rs | 6 +-- compiler/rustc_target/src/abi/call/mips.rs | 6 +-- compiler/rustc_target/src/abi/call/mod.rs | 23 +++++------ compiler/rustc_target/src/abi/call/sparc.rs | 6 +-- compiler/rustc_target/src/abi/call/x86.rs | 2 +- 14 files changed, 80 insertions(+), 88 deletions(-) diff --git a/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs b/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs index 058dee176e243..165f15bb3f122 100644 --- a/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs +++ b/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs @@ -100,7 +100,10 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> { } _ => unreachable!("{:?}", self.layout.abi), }, - PassMode::Cast(ref cast) => cast_target_to_abi_params(cast), + PassMode::Cast(ref cast, pad_i32) => { + assert!(!pad_i32, "padding support not yet implemented"); + cast_target_to_abi_params(cast) + } PassMode::Indirect { attrs, extra_attrs: None, on_stack } => { if on_stack { // Abi requires aligning struct size to pointer size @@ -145,7 +148,7 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> { } _ => unreachable!("{:?}", self.layout.abi), }, - PassMode::Cast(ref cast) => { + PassMode::Cast(ref cast, _) => { (None, cast_target_to_abi_params(cast).into_iter().collect()) } PassMode::Indirect { attrs: _, extra_attrs: None, on_stack } => { @@ -226,7 +229,7 @@ pub(super) fn adjust_arg_for_abi<'tcx>( let (a, b) = arg.load_scalar_pair(fx); smallvec![a, b] } - PassMode::Cast(ref cast) => to_casted_value(fx, arg, cast), + PassMode::Cast(ref cast, _) => to_casted_value(fx, arg, cast), PassMode::Indirect { .. } => { if is_owned { match arg.force_stack(fx) { @@ -284,7 +287,7 @@ pub(super) fn cvalue_for_param<'tcx>( assert_eq!(block_params.len(), 2, "{:?}", block_params); Some(CValue::by_val_pair(block_params[0], block_params[1], arg_abi.layout)) } - PassMode::Cast(ref cast) => { + PassMode::Cast(ref cast, _) => { Some(from_casted_value(fx, &block_params, arg_abi.layout, cast)) } PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => { diff --git a/compiler/rustc_codegen_cranelift/src/abi/returning.rs b/compiler/rustc_codegen_cranelift/src/abi/returning.rs index 29ef5e2dfdae6..aaa1418767a35 100644 --- a/compiler/rustc_codegen_cranelift/src/abi/returning.rs +++ b/compiler/rustc_codegen_cranelift/src/abi/returning.rs @@ -13,7 +13,7 @@ pub(super) fn codegen_return_param<'tcx>( block_params_iter: &mut impl Iterator, ) -> CPlace<'tcx> { let (ret_place, ret_param): (_, SmallVec<[_; 2]>) = match fx.fn_abi.as_ref().unwrap().ret.mode { - PassMode::Ignore | PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(_) => { + PassMode::Ignore | PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(..) => { let is_ssa = ssa_analyzed[RETURN_PLACE] == crate::analyze::SsaKind::Ssa; ( super::make_local_place( @@ -75,7 +75,7 @@ pub(super) fn codegen_with_call_return_arg<'tcx>( PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => { unreachable!("unsized return value") } - PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(_) => (None, None), + PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(..) => (None, None), }; let call_inst = f(fx, return_ptr); @@ -92,7 +92,7 @@ pub(super) fn codegen_with_call_return_arg<'tcx>( ret_place .write_cvalue(fx, CValue::by_val_pair(ret_val_a, ret_val_b, ret_arg_abi.layout)); } - PassMode::Cast(ref cast) => { + PassMode::Cast(ref cast, _) => { let results = fx.bcx.inst_results(call_inst).iter().copied().collect::>(); let result = @@ -131,7 +131,7 @@ pub(crate) fn codegen_return(fx: &mut FunctionCx<'_, '_, '_>) { let (ret_val_a, ret_val_b) = place.to_cvalue(fx).load_scalar_pair(fx); fx.bcx.ins().return_(&[ret_val_a, ret_val_b]); } - PassMode::Cast(ref cast) => { + PassMode::Cast(ref cast, _) => { let place = fx.get_local_place(RETURN_PLACE); let ret_val = place.to_cvalue(fx); let ret_vals = super::pass_mode::to_casted_value(fx, ret_val, cast); diff --git a/compiler/rustc_codegen_gcc/src/abi.rs b/compiler/rustc_codegen_gcc/src/abi.rs index 3186b363e359f..848c34211ff61 100644 --- a/compiler/rustc_codegen_gcc/src/abi.rs +++ b/compiler/rustc_codegen_gcc/src/abi.rs @@ -117,7 +117,7 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { match self.ret.mode { PassMode::Ignore => cx.type_void(), PassMode::Direct(_) | PassMode::Pair(..) => self.ret.layout.immediate_gcc_type(cx), - PassMode::Cast(ref cast) => cast.gcc_type(cx), + PassMode::Cast(ref cast, _) => cast.gcc_type(cx), PassMode::Indirect { .. } => { argument_tys.push(cx.type_ptr_to(self.ret.memory_ty(cx))); cx.type_void() @@ -125,11 +125,6 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { }; for arg in self.args.iter() { - // add padding - if arg.pad_i32 { - argument_tys.push(Reg::i32().gcc_type(cx)); - } - let arg_ty = match arg.mode { PassMode::Ignore => continue, PassMode::Direct(_) => arg.layout.immediate_gcc_type(cx), @@ -141,7 +136,13 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { PassMode::Indirect { extra_attrs: Some(_), .. } => { unimplemented!(); } - PassMode::Cast(ref cast) => cast.gcc_type(cx), + PassMode::Cast(ref cast, pad_i32) => { + // add padding + if pad_i32 { + argument_tys.push(Reg::i32().gcc_type(cx)); + } + cast.gcc_type(cx) + } PassMode::Indirect { extra_attrs: None, on_stack: true, .. } => { on_stack_param_indices.insert(argument_tys.len()); arg.memory_ty(cx) diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs index f4493776ea268..352fe7568eff0 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs @@ -130,7 +130,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { sym::volatile_load | sym::unaligned_volatile_load => { let tp_ty = substs.type_at(0); let mut ptr = args[0].immediate(); - if let PassMode::Cast(ty) = &fn_abi.ret.mode { + if let PassMode::Cast(ty, _) = &fn_abi.ret.mode { ptr = self.pointercast(ptr, self.type_ptr_to(ty.gcc_type(self))); } let load = self.volatile_load(ptr.get_type(), ptr); @@ -320,7 +320,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { }; if !fn_abi.ret.is_ignore() { - if let PassMode::Cast(ty) = &fn_abi.ret.mode { + if let PassMode::Cast(ty, _) = &fn_abi.ret.mode { let ptr_llty = self.type_ptr_to(ty.gcc_type(self)); let ptr = self.pointercast(result.llval, ptr_llty); self.store(llval, ptr, result.align); @@ -416,7 +416,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> { else if self.is_unsized_indirect() { bug!("unsized `ArgAbi` must be handled through `store_fn_arg`"); } - else if let PassMode::Cast(ref cast) = self.mode { + else if let PassMode::Cast(ref cast, _) = self.mode { // FIXME(eddyb): Figure out when the simpler Store is safe, clang // uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}. let can_store_through_cast_ptr = false; @@ -481,7 +481,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> { PassMode::Indirect { extra_attrs: Some(_), .. } => { OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst); }, - PassMode::Direct(_) | PassMode::Indirect { extra_attrs: None, .. } | PassMode::Cast(_) => { + PassMode::Direct(_) | PassMode::Indirect { extra_attrs: None, .. } | PassMode::Cast(..) => { let next_arg = next(); self.store(bx, next_arg, dst); }, diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 168cf3d0b58fe..0ce161d7e756c 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -213,7 +213,7 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> { OperandValue::Ref(val, None, self.layout.align.abi).store(bx, dst) } else if self.is_unsized_indirect() { bug!("unsized `ArgAbi` must be handled through `store_fn_arg`"); - } else if let PassMode::Cast(cast) = &self.mode { + } else if let PassMode::Cast(cast, _) = &self.mode { // FIXME(eddyb): Figure out when the simpler Store is safe, clang // uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}. let can_store_through_cast_ptr = false; @@ -283,7 +283,7 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> { } PassMode::Direct(_) | PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } - | PassMode::Cast(_) => { + | PassMode::Cast(..) => { let next_arg = next(); self.store(bx, next_arg, dst); } @@ -336,7 +336,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { let llreturn_ty = match &self.ret.mode { PassMode::Ignore => cx.type_void(), PassMode::Direct(_) | PassMode::Pair(..) => self.ret.layout.immediate_llvm_type(cx), - PassMode::Cast(cast) => cast.llvm_type(cx), + PassMode::Cast(cast, _) => cast.llvm_type(cx), PassMode::Indirect { .. } => { llargument_tys.push(cx.type_ptr_to(self.ret.memory_ty(cx))); cx.type_void() @@ -344,11 +344,6 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { }; for arg in args { - // add padding - if arg.pad_i32 { - llargument_tys.push(Reg::i32().llvm_type(cx)); - } - let llarg_ty = match &arg.mode { PassMode::Ignore => continue, PassMode::Direct(_) => arg.layout.immediate_llvm_type(cx), @@ -364,7 +359,13 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 1, true)); continue; } - PassMode::Cast(cast) => cast.llvm_type(cx), + PassMode::Cast(cast, pad_i32) => { + // add padding + if *pad_i32 { + llargument_tys.push(Reg::i32().llvm_type(cx)); + } + cast.llvm_type(cx) + } PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => { cx.type_ptr_to(arg.memory_ty(cx)) } @@ -434,15 +435,12 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { let sret = llvm::CreateStructRetAttr(cx.llcx, self.ret.layout.llvm_type(cx)); attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[sret]); } - PassMode::Cast(cast) => { + PassMode::Cast(cast, _) => { cast.attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn); } _ => {} } for arg in self.args.iter() { - if arg.pad_i32 { - apply(&ArgAttributes::new()); - } match &arg.mode { PassMode::Ignore => {} PassMode::Indirect { attrs, extra_attrs: None, on_stack: true } => { @@ -463,7 +461,10 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { apply(a); apply(b); } - PassMode::Cast(cast) => { + PassMode::Cast(cast, pad_i32) => { + if *pad_i32 { + apply(&ArgAttributes::new()); + } apply(&cast.attrs); } } @@ -496,7 +497,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { let sret = llvm::CreateStructRetAttr(bx.cx.llcx, self.ret.layout.llvm_type(bx)); attributes::apply_to_callsite(callsite, llvm::AttributePlace::Argument(i), &[sret]); } - PassMode::Cast(cast) => { + PassMode::Cast(cast, _) => { cast.attrs.apply_attrs_to_callsite( llvm::AttributePlace::ReturnValue, &bx.cx, @@ -516,9 +517,6 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { } } for arg in self.args.iter() { - if arg.pad_i32 { - apply(bx.cx, &ArgAttributes::new()); - } match &arg.mode { PassMode::Ignore => {} PassMode::Indirect { attrs, extra_attrs: None, on_stack: true } => { @@ -542,7 +540,10 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { apply(bx.cx, a); apply(bx.cx, b); } - PassMode::Cast(cast) => { + PassMode::Cast(cast, pad_i32) => { + if *pad_i32 { + apply(bx.cx, &ArgAttributes::new()); + } apply(bx.cx, &cast.attrs); } } diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index d46a6dd32c8a5..f4227fbd8fa3b 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -161,7 +161,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> { sym::volatile_load | sym::unaligned_volatile_load => { let tp_ty = substs.type_at(0); let ptr = args[0].immediate(); - let load = if let PassMode::Cast(ty) = &fn_abi.ret.mode { + let load = if let PassMode::Cast(ty, _) = &fn_abi.ret.mode { let llty = ty.llvm_type(self); let ptr = self.pointercast(ptr, self.type_ptr_to(llty)); self.volatile_load(llty, ptr) @@ -374,7 +374,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> { }; if !fn_abi.ret.is_ignore() { - if let PassMode::Cast(ty) = &fn_abi.ret.mode { + if let PassMode::Cast(ty, _) = &fn_abi.ret.mode { let ptr_llty = self.type_ptr_to(ty.llvm_type(self)); let ptr = self.pointercast(result.llval, ptr_llty); self.store(llval, ptr, result.align); diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 5b3f41263e727..5c67d3b6431fd 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -339,7 +339,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } } - PassMode::Cast(cast_ty) => { + PassMode::Cast(cast_ty, _) => { let op = match self.locals[mir::RETURN_PLACE] { LocalRef::Operand(Some(op)) => op, LocalRef::Operand(None) => bug!("use of return before def"), @@ -1158,39 +1158,35 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { llargs: &mut Vec, arg: &ArgAbi<'tcx, Ty<'tcx>>, ) { - // Fill padding with undef value, where applicable. - if arg.pad_i32 { - llargs.push(bx.const_undef(bx.reg_backend_type(&Reg::i32()))) - } - - if arg.is_ignore() { - return; - } - - if let PassMode::Pair(..) = arg.mode { - match op.val { + match arg.mode { + PassMode::Ignore => return, + PassMode::Cast(_, true) => { + // Fill padding with undef value, where applicable. + llargs.push(bx.const_undef(bx.reg_backend_type(&Reg::i32()))); + } + PassMode::Pair(..) => match op.val { Pair(a, b) => { llargs.push(a); llargs.push(b); return; } _ => bug!("codegen_argument: {:?} invalid for pair argument", op), - } - } else if arg.is_unsized_indirect() { - match op.val { + }, + PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => match op.val { Ref(a, Some(b), _) => { llargs.push(a); llargs.push(b); return; } _ => bug!("codegen_argument: {:?} invalid for unsized indirect argument", op), - } + }, + _ => {} } // Force by-ref if we have to load through a cast pointer. let (mut llval, align, by_ref) = match op.val { Immediate(_) | Pair(..) => match arg.mode { - PassMode::Indirect { .. } | PassMode::Cast(_) => { + PassMode::Indirect { .. } | PassMode::Cast(..) => { let scratch = PlaceRef::alloca(bx, arg.layout); op.val.store(bx, scratch); (scratch.llval, scratch.align, true) @@ -1222,7 +1218,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { if by_ref && !arg.is_indirect() { // Have to load the argument, maybe while casting it. - if let PassMode::Cast(ty) = &arg.mode { + if let PassMode::Cast(ty, _) = &arg.mode { let llty = bx.cast_backend_type(ty); let addr = bx.pointercast(llval, bx.type_ptr_to(llty)); llval = bx.load(llty, addr, align.min(arg.layout.align.abi)); @@ -1622,7 +1618,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } DirectOperand(index) => { // If there is a cast, we have to store and reload. - let op = if let PassMode::Cast(_) = ret_abi.mode { + let op = if let PassMode::Cast(..) = ret_abi.mode { let tmp = PlaceRef::alloca(bx, ret_abi.layout); tmp.storage_live(bx); bx.store_arg(&ret_abi, llval, tmp); diff --git a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs index e11ebdce80f77..16aad07194da8 100644 --- a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs +++ b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs @@ -597,7 +597,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { }; if !fn_abi.ret.is_ignore() { - if let PassMode::Cast(ty) = &fn_abi.ret.mode { + if let PassMode::Cast(ty, _) = &fn_abi.ret.mode { let ptr_llty = bx.type_ptr_to(bx.cast_backend_type(ty)); let ptr = bx.pointercast(result.llval, ptr_llty); bx.store(llval, ptr, result.align); diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs index f13b658b2ffbe..c70c2bd9f741d 100644 --- a/compiler/rustc_codegen_ssa/src/mir/mod.rs +++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs @@ -283,7 +283,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( for i in 0..tupled_arg_tys.len() { let arg = &fx.fn_abi.args[idx]; idx += 1; - if arg.pad_i32 { + if let PassMode::Cast(_, true) = arg.mode { llarg_idx += 1; } let pr_field = place.project_field(bx, i); @@ -309,7 +309,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let arg = &fx.fn_abi.args[idx]; idx += 1; - if arg.pad_i32 { + if let PassMode::Cast(_, true) = arg.mode { llarg_idx += 1; } diff --git a/compiler/rustc_const_eval/src/interpret/terminator.rs b/compiler/rustc_const_eval/src/interpret/terminator.rs index 2fcdf6ded9819..35b1fefd28658 100644 --- a/compiler/rustc_const_eval/src/interpret/terminator.rs +++ b/compiler/rustc_const_eval/src/interpret/terminator.rs @@ -215,8 +215,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { _ => false, } }; - // Padding must be fully equal. - let pad_compat = || caller_abi.pad_i32 == callee_abi.pad_i32; // When comparing the PassMode, we have to be smart about comparing the attributes. let arg_attr_compat = |a1: &ArgAttributes, a2: &ArgAttributes| { // There's only one regular attribute that matters for the call ABI: InReg. @@ -239,7 +237,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { (PassMode::Pair(a1, b1), PassMode::Pair(a2, b2)) => { arg_attr_compat(a1, a2) && arg_attr_compat(b1, b2) } - (PassMode::Cast(c1), PassMode::Cast(c2)) => c1 == c2, + (PassMode::Cast(c1, pad1), PassMode::Cast(c2, pad2)) => c1 == c2 && pad1 == pad2, ( PassMode::Indirect { attrs: a1, extra_attrs: None, on_stack: s1 }, PassMode::Indirect { attrs: a2, extra_attrs: None, on_stack: s2 }, @@ -251,7 +249,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { _ => false, }; - if layout_compat() && pad_compat() && mode_compat() { + if layout_compat() && mode_compat() { return true; } trace!( diff --git a/compiler/rustc_target/src/abi/call/mips.rs b/compiler/rustc_target/src/abi/call/mips.rs index ff8f32362148c..edcd1bab8b4b0 100644 --- a/compiler/rustc_target/src/abi/call/mips.rs +++ b/compiler/rustc_target/src/abi/call/mips.rs @@ -22,10 +22,8 @@ where let align = arg.layout.align.max(dl.i32_align).min(dl.i64_align).abi; if arg.layout.is_aggregate() { - arg.cast_to(Uniform { unit: Reg::i32(), total: size }); - if !offset.is_aligned(align) { - arg.pad_with_i32(); - } + let pad_i32 = !offset.is_aligned(align); + arg.cast_to_and_pad_i32(Uniform { unit: Reg::i32(), total: size }, pad_i32); } else { arg.extend_integer_width_to(32); } diff --git a/compiler/rustc_target/src/abi/call/mod.rs b/compiler/rustc_target/src/abi/call/mod.rs index 33f533e68f0e8..d2eb804d00480 100644 --- a/compiler/rustc_target/src/abi/call/mod.rs +++ b/compiler/rustc_target/src/abi/call/mod.rs @@ -40,9 +40,10 @@ pub enum PassMode { /// /// The argument has a layout abi of `ScalarPair`. Pair(ArgAttributes, ArgAttributes), - /// Pass the argument after casting it, to either - /// a single uniform or a pair of registers. - Cast(Box), + /// Pass the argument after casting it, to either a single uniform or a + /// pair of registers. The bool indicates if a `Reg::i32()` dummy argument + /// is emitted before the real argument. + Cast(Box, bool), /// Pass the argument indirectly via a hidden pointer. /// The `extra_attrs` value, if any, is for the extra data (vtable or length) /// which indicates that it refers to an unsized rvalue. @@ -463,10 +464,6 @@ impl<'a, Ty> TyAndLayout<'a, Ty> { #[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)] pub struct ArgAbi<'a, Ty> { pub layout: TyAndLayout<'a, Ty>, - - /// Dummy argument, which is emitted before the real argument. - pub pad_i32: bool, - pub mode: PassMode, } @@ -486,7 +483,7 @@ impl<'a, Ty> ArgAbi<'a, Ty> { Abi::Vector { .. } => PassMode::Direct(ArgAttributes::new()), Abi::Aggregate { .. } => PassMode::Direct(ArgAttributes::new()), }; - ArgAbi { layout, pad_i32: false, mode } + ArgAbi { layout, mode } } fn indirect_pass_mode(layout: &TyAndLayout<'a, Ty>) -> PassMode { @@ -548,11 +545,11 @@ impl<'a, Ty> ArgAbi<'a, Ty> { } pub fn cast_to>(&mut self, target: T) { - self.mode = PassMode::Cast(Box::new(target.into())); + self.mode = PassMode::Cast(Box::new(target.into()), false); } - pub fn pad_with_i32(&mut self) { - self.pad_i32 = true; + pub fn cast_to_and_pad_i32>(&mut self, target: T, pad_i32: bool) { + self.mode = PassMode::Cast(Box::new(target.into()), pad_i32); } pub fn is_indirect(&self) -> bool { @@ -737,6 +734,6 @@ mod size_asserts { use super::*; use rustc_data_structures::static_assert_size; // These are in alphabetical order, which is easy to maintain. - static_assert_size!(ArgAbi<'_, usize>, 64); - static_assert_size!(FnAbi<'_, usize>, 88); + static_assert_size!(ArgAbi<'_, usize>, 56); + static_assert_size!(FnAbi<'_, usize>, 80); } diff --git a/compiler/rustc_target/src/abi/call/sparc.rs b/compiler/rustc_target/src/abi/call/sparc.rs index ff8f32362148c..edcd1bab8b4b0 100644 --- a/compiler/rustc_target/src/abi/call/sparc.rs +++ b/compiler/rustc_target/src/abi/call/sparc.rs @@ -22,10 +22,8 @@ where let align = arg.layout.align.max(dl.i32_align).min(dl.i64_align).abi; if arg.layout.is_aggregate() { - arg.cast_to(Uniform { unit: Reg::i32(), total: size }); - if !offset.is_aligned(align) { - arg.pad_with_i32(); - } + let pad_i32 = !offset.is_aligned(align); + arg.cast_to_and_pad_i32(Uniform { unit: Reg::i32(), total: size }, pad_i32); } else { arg.extend_integer_width_to(32); } diff --git a/compiler/rustc_target/src/abi/call/x86.rs b/compiler/rustc_target/src/abi/call/x86.rs index c2d70d1160c44..7c26335dcf4cd 100644 --- a/compiler/rustc_target/src/abi/call/x86.rs +++ b/compiler/rustc_target/src/abi/call/x86.rs @@ -81,7 +81,7 @@ where PassMode::Direct(ref mut attrs) => attrs, PassMode::Pair(..) | PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } - | PassMode::Cast(_) => { + | PassMode::Cast(..) => { unreachable!("x86 shouldn't be passing arguments by {:?}", arg.mode) } };