From 37aeb75eb6c68dfee49e9ecdbd0c46e137b28bc7 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 21 May 2024 14:07:02 +0200 Subject: [PATCH] don't inhibit random field reordering on repr(packed(1)) --- compiler/rustc_abi/src/layout.rs | 10 ++++++---- compiler/rustc_abi/src/lib.rs | 15 ++++----------- .../src/transmute/transmute_undefined_repr.rs | 2 +- .../miri/tests/fail/reading_half_a_pointer.rs | 2 +- .../field_requires_parent_struct_alignment2.rs | 2 +- tests/mir-opt/const_allocation3.rs | 2 +- 6 files changed, 14 insertions(+), 19 deletions(-) diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs index a95ef4c460f45..9165b4f2df3d0 100644 --- a/compiler/rustc_abi/src/layout.rs +++ b/compiler/rustc_abi/src/layout.rs @@ -970,7 +970,7 @@ fn univariant< let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align }; let mut max_repr_align = repr.align; let mut inverse_memory_index: IndexVec = fields.indices().collect(); - let optimize = !repr.inhibit_struct_field_reordering_opt(); + let optimize = !repr.inhibit_struct_field_reordering(); if optimize && fields.len() > 1 { let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() }; let optimizing = &mut inverse_memory_index.raw[..end]; @@ -1007,13 +1007,15 @@ fn univariant< // Calculates a sort key to group fields by their alignment or possibly some // size-derived pseudo-alignment. let alignment_group_key = |layout: &F| { + // The two branches here return values that cannot be meaningfully compared with + // each other. However, we know that consistently for all executions of + // `alignment_group_key`, one or the other branch will be taken, so this is okay. if let Some(pack) = pack { // Return the packed alignment in bytes. layout.align.abi.min(pack).bytes() } else { - // Returns `log2(effective-align)`. This is ok since `pack` applies to all - // fields equally. The calculation assumes that size is an integer multiple of - // align, except for ZSTs. + // Returns `log2(effective-align)`. The calculation assumes that size is an + // integer multiple of align, except for ZSTs. let align = layout.align.abi.bytes(); let size = layout.size.bytes(); let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0); diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs index 53aa8ad7cca51..b1a17d5a24b67 100644 --- a/compiler/rustc_abi/src/lib.rs +++ b/compiler/rustc_abi/src/lib.rs @@ -137,23 +137,16 @@ impl ReprOptions { self.c() || self.int.is_some() } - /// Returns `true` if this `#[repr()]` should inhibit struct field reordering - /// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr()`. - pub fn inhibit_struct_field_reordering_opt(&self) -> bool { - if let Some(pack) = self.pack { - if pack.bytes() == 1 { - return true; - } - } - + /// Returns `true` if this `#[repr()]` guarantees a fixed field order, + /// e.g. `repr(C)` or `repr()`. + pub fn inhibit_struct_field_reordering(&self) -> bool { self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.int.is_some() } /// Returns `true` if this type is valid for reordering and `-Z randomize-layout` /// was enabled for its declaration crate. pub fn can_randomize_type_layout(&self) -> bool { - !self.inhibit_struct_field_reordering_opt() - && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT) + !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT) } /// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations. diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs index 9c8dd37d53dcd..3b32e4396b9f1 100644 --- a/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs +++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs @@ -278,7 +278,7 @@ fn reduce_ty<'tcx>(cx: &LateContext<'tcx>, mut ty: Ty<'tcx>) -> ReducedTy<'tcx> ty = sized_ty; continue; } - if def.repr().inhibit_struct_field_reordering_opt() { + if def.repr().inhibit_struct_field_reordering() { ReducedTy::OrderedFields(Some(sized_ty)) } else { ReducedTy::UnorderedFields(ty) diff --git a/src/tools/miri/tests/fail/reading_half_a_pointer.rs b/src/tools/miri/tests/fail/reading_half_a_pointer.rs index 7dd98eab78526..feac30b83c302 100644 --- a/src/tools/miri/tests/fail/reading_half_a_pointer.rs +++ b/src/tools/miri/tests/fail/reading_half_a_pointer.rs @@ -1,7 +1,7 @@ #![allow(dead_code)] // We use packed structs to get around alignment restrictions -#[repr(packed)] +#[repr(C, packed)] struct Data { pad: u8, ptr: &'static i32, diff --git a/src/tools/miri/tests/fail/unaligned_pointers/field_requires_parent_struct_alignment2.rs b/src/tools/miri/tests/fail/unaligned_pointers/field_requires_parent_struct_alignment2.rs index 8459c64ed2d39..06dd97deedae9 100644 --- a/src/tools/miri/tests/fail/unaligned_pointers/field_requires_parent_struct_alignment2.rs +++ b/src/tools/miri/tests/fail/unaligned_pointers/field_requires_parent_struct_alignment2.rs @@ -7,7 +7,7 @@ pub struct Aligned { _pad: [u8; 11], packed: Packed, } -#[repr(packed)] +#[repr(C, packed)] #[derive(Default, Copy, Clone)] pub struct Packed { _pad: [u8; 5], diff --git a/tests/mir-opt/const_allocation3.rs b/tests/mir-opt/const_allocation3.rs index 46be3e1e36e87..9d2006b6fe80d 100644 --- a/tests/mir-opt/const_allocation3.rs +++ b/tests/mir-opt/const_allocation3.rs @@ -7,7 +7,7 @@ fn main() { FOO; } -#[repr(packed)] +#[repr(C, packed)] struct Packed { a: [u8; 28], b: &'static i32,