Skip to content

Commit 7afce4f

Browse files
committed
Update NonZero and NonNull to not field-project (per MCP807)
1 parent 2d0ea79 commit 7afce4f

16 files changed

+583
-510
lines changed

Diff for: library/core/src/num/nonzero.rs

+28-6
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,8 @@ pub unsafe trait ZeroablePrimitive: Sized + Copy + private::Sealed {
3737
macro_rules! impl_zeroable_primitive {
3838
($($NonZeroInner:ident ( $primitive:ty )),+ $(,)?) => {
3939
mod private {
40+
use super::*;
41+
4042
#[unstable(
4143
feature = "nonzero_internals",
4244
reason = "implementation detail which may disappear or be replaced at any time",
@@ -45,7 +47,11 @@ macro_rules! impl_zeroable_primitive {
4547
pub trait Sealed {}
4648

4749
$(
48-
#[derive(Debug, Clone, Copy, PartialEq)]
50+
// This inner type is never shown directly, so intentionally does not have Debug
51+
#[expect(missing_debug_implementations)]
52+
// Since this struct is non-generic and derives Copy,
53+
// the derived Clone is `*self` and thus doesn't field-project.
54+
#[derive(Clone, Copy)]
4955
#[repr(transparent)]
5056
#[rustc_layout_scalar_valid_range_start(1)]
5157
#[rustc_nonnull_optimization_guaranteed]
@@ -55,6 +61,16 @@ macro_rules! impl_zeroable_primitive {
5561
issue = "none"
5662
)]
5763
pub struct $NonZeroInner($primitive);
64+
65+
// This is required to allow matching a constant. We don't get it from a derive
66+
// because the derived `PartialEq` would do a field projection, which is banned
67+
// by <https://github.com/rust-lang/compiler-team/issues/807>.
68+
#[unstable(
69+
feature = "nonzero_internals",
70+
reason = "implementation detail which may disappear or be replaced at any time",
71+
issue = "none"
72+
)]
73+
impl StructuralPartialEq for $NonZeroInner {}
5874
)+
5975
}
6076

@@ -172,7 +188,7 @@ where
172188
{
173189
#[inline]
174190
fn clone(&self) -> Self {
175-
Self(self.0)
191+
*self
176192
}
177193
}
178194

@@ -440,15 +456,21 @@ where
440456
#[rustc_const_stable(feature = "const_nonzero_get", since = "1.34.0")]
441457
#[inline]
442458
pub const fn get(self) -> T {
443-
// FIXME: This can be changed to simply `self.0` once LLVM supports `!range` metadata
444-
// for function arguments: https://github.com/llvm/llvm-project/issues/76628
445-
//
446459
// Rustc can set range metadata only if it loads `self` from
447460
// memory somewhere. If the value of `self` was from by-value argument
448461
// of some not-inlined function, LLVM don't have range metadata
449462
// to understand that the value cannot be zero.
450463
//
451-
// For now, using the transmute `assume`s the range at runtime.
464+
// Using the transmute `assume`s the range at runtime.
465+
//
466+
// Even once LLVM supports `!range` metadata for function arguments
467+
// (see <https://github.com/llvm/llvm-project/issues/76628>), this can't
468+
// be `.0` because MCP#807 bans field-projecting into `scalar_valid_range`
469+
// types, and it arguably wouldn't want to be anyway because if this is
470+
// MIR-inlined, there's no opportunity to put that argument metadata anywhere.
471+
//
472+
// The good answer here will eventually be pattern types, which will hopefully
473+
// allow it to go back to `.0`, maybe with a cast of some sort.
452474
//
453475
// SAFETY: `ZeroablePrimitive` guarantees that the size and bit validity
454476
// of `.0` is such that this transmute is sound.

Diff for: library/core/src/ptr/non_null.rs

+30-23
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use crate::pin::PinCoerceUnsized;
77
use crate::ptr::Unique;
88
use crate::slice::{self, SliceIndex};
99
use crate::ub_checks::assert_unsafe_precondition;
10-
use crate::{fmt, hash, intrinsics, ptr};
10+
use crate::{fmt, hash, intrinsics, mem, ptr};
1111

1212
/// `*mut T` but non-zero and [covariant].
1313
///
@@ -69,6 +69,8 @@ use crate::{fmt, hash, intrinsics, ptr};
6969
#[rustc_nonnull_optimization_guaranteed]
7070
#[rustc_diagnostic_item = "NonNull"]
7171
pub struct NonNull<T: ?Sized> {
72+
// Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to
73+
// this is banned by <https://github.com/rust-lang/compiler-team/issues/807>.
7274
pointer: *const T,
7375
}
7476

@@ -282,7 +284,7 @@ impl<T: ?Sized> NonNull<T> {
282284
pub fn addr(self) -> NonZero<usize> {
283285
// SAFETY: The pointer is guaranteed by the type to be non-null,
284286
// meaning that the address will be non-zero.
285-
unsafe { NonZero::new_unchecked(self.pointer.addr()) }
287+
unsafe { NonZero::new_unchecked(self.as_ptr().addr()) }
286288
}
287289

288290
/// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
@@ -296,7 +298,7 @@ impl<T: ?Sized> NonNull<T> {
296298
#[stable(feature = "strict_provenance", since = "CURRENT_RUSTC_VERSION")]
297299
pub fn with_addr(self, addr: NonZero<usize>) -> Self {
298300
// SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
299-
unsafe { NonNull::new_unchecked(self.pointer.with_addr(addr.get()) as *mut _) }
301+
unsafe { NonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) }
300302
}
301303

302304
/// Creates a new pointer by mapping `self`'s address to a new one, preserving the
@@ -335,7 +337,12 @@ impl<T: ?Sized> NonNull<T> {
335337
#[must_use]
336338
#[inline(always)]
337339
pub const fn as_ptr(self) -> *mut T {
338-
self.pointer as *mut T
340+
// This is a transmute for the same reasons as `NonZero::get`.
341+
342+
// SAFETY: `NonNull` is `transparent` over a `*const T`, and `*const T`
343+
// and `*mut T` have the same layout, so transitively we can transmute
344+
// our `NonNull` to a `*mut T` directly.
345+
unsafe { mem::transmute::<Self, *mut T>(self) }
339346
}
340347

341348
/// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
@@ -484,7 +491,7 @@ impl<T: ?Sized> NonNull<T> {
484491
// Additionally safety contract of `offset` guarantees that the resulting pointer is
485492
// pointing to an allocation, there can't be an allocation at null, thus it's safe to
486493
// construct `NonNull`.
487-
unsafe { NonNull { pointer: intrinsics::offset(self.pointer, count) } }
494+
unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
488495
}
489496

490497
/// Calculates the offset from a pointer in bytes.
@@ -508,7 +515,7 @@ impl<T: ?Sized> NonNull<T> {
508515
// Additionally safety contract of `offset` guarantees that the resulting pointer is
509516
// pointing to an allocation, there can't be an allocation at null, thus it's safe to
510517
// construct `NonNull`.
511-
unsafe { NonNull { pointer: self.pointer.byte_offset(count) } }
518+
unsafe { NonNull { pointer: self.as_ptr().byte_offset(count) } }
512519
}
513520

514521
/// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
@@ -560,7 +567,7 @@ impl<T: ?Sized> NonNull<T> {
560567
// Additionally safety contract of `offset` guarantees that the resulting pointer is
561568
// pointing to an allocation, there can't be an allocation at null, thus it's safe to
562569
// construct `NonNull`.
563-
unsafe { NonNull { pointer: intrinsics::offset(self.pointer, count) } }
570+
unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
564571
}
565572

566573
/// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
@@ -584,7 +591,7 @@ impl<T: ?Sized> NonNull<T> {
584591
// Additionally safety contract of `add` guarantees that the resulting pointer is pointing
585592
// to an allocation, there can't be an allocation at null, thus it's safe to construct
586593
// `NonNull`.
587-
unsafe { NonNull { pointer: self.pointer.byte_add(count) } }
594+
unsafe { NonNull { pointer: self.as_ptr().byte_add(count) } }
588595
}
589596

590597
/// Subtracts an offset from a pointer (convenience for
@@ -667,7 +674,7 @@ impl<T: ?Sized> NonNull<T> {
667674
// Additionally safety contract of `sub` guarantees that the resulting pointer is pointing
668675
// to an allocation, there can't be an allocation at null, thus it's safe to construct
669676
// `NonNull`.
670-
unsafe { NonNull { pointer: self.pointer.byte_sub(count) } }
677+
unsafe { NonNull { pointer: self.as_ptr().byte_sub(count) } }
671678
}
672679

673680
/// Calculates the distance between two pointers within the same allocation. The returned value is in
@@ -764,7 +771,7 @@ impl<T: ?Sized> NonNull<T> {
764771
T: Sized,
765772
{
766773
// SAFETY: the caller must uphold the safety contract for `offset_from`.
767-
unsafe { self.pointer.offset_from(origin.pointer) }
774+
unsafe { self.as_ptr().offset_from(origin.as_ptr()) }
768775
}
769776

770777
/// Calculates the distance between two pointers within the same allocation. The returned value is in
@@ -782,7 +789,7 @@ impl<T: ?Sized> NonNull<T> {
782789
#[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
783790
pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
784791
// SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
785-
unsafe { self.pointer.byte_offset_from(origin.pointer) }
792+
unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) }
786793
}
787794

788795
// N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
@@ -857,7 +864,7 @@ impl<T: ?Sized> NonNull<T> {
857864
T: Sized,
858865
{
859866
// SAFETY: the caller must uphold the safety contract for `sub_ptr`.
860-
unsafe { self.pointer.sub_ptr(subtracted.pointer) }
867+
unsafe { self.as_ptr().sub_ptr(subtracted.as_ptr()) }
861868
}
862869

863870
/// Calculates the distance between two pointers within the same allocation, *where it's known that
@@ -876,7 +883,7 @@ impl<T: ?Sized> NonNull<T> {
876883
#[rustc_const_unstable(feature = "const_ptr_sub_ptr", issue = "95892")]
877884
pub const unsafe fn byte_sub_ptr<U: ?Sized>(self, origin: NonNull<U>) -> usize {
878885
// SAFETY: the caller must uphold the safety contract for `byte_sub_ptr`.
879-
unsafe { self.pointer.byte_sub_ptr(origin.pointer) }
886+
unsafe { self.as_ptr().byte_sub_ptr(origin.as_ptr()) }
880887
}
881888

882889
/// Reads the value from `self` without moving it. This leaves the
@@ -894,7 +901,7 @@ impl<T: ?Sized> NonNull<T> {
894901
T: Sized,
895902
{
896903
// SAFETY: the caller must uphold the safety contract for `read`.
897-
unsafe { ptr::read(self.pointer) }
904+
unsafe { ptr::read(self.as_ptr()) }
898905
}
899906

900907
/// Performs a volatile read of the value from `self` without moving it. This
@@ -915,7 +922,7 @@ impl<T: ?Sized> NonNull<T> {
915922
T: Sized,
916923
{
917924
// SAFETY: the caller must uphold the safety contract for `read_volatile`.
918-
unsafe { ptr::read_volatile(self.pointer) }
925+
unsafe { ptr::read_volatile(self.as_ptr()) }
919926
}
920927

921928
/// Reads the value from `self` without moving it. This leaves the
@@ -935,7 +942,7 @@ impl<T: ?Sized> NonNull<T> {
935942
T: Sized,
936943
{
937944
// SAFETY: the caller must uphold the safety contract for `read_unaligned`.
938-
unsafe { ptr::read_unaligned(self.pointer) }
945+
unsafe { ptr::read_unaligned(self.as_ptr()) }
939946
}
940947

941948
/// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
@@ -955,7 +962,7 @@ impl<T: ?Sized> NonNull<T> {
955962
T: Sized,
956963
{
957964
// SAFETY: the caller must uphold the safety contract for `copy`.
958-
unsafe { ptr::copy(self.pointer, dest.as_ptr(), count) }
965+
unsafe { ptr::copy(self.as_ptr(), dest.as_ptr(), count) }
959966
}
960967

961968
/// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
@@ -975,7 +982,7 @@ impl<T: ?Sized> NonNull<T> {
975982
T: Sized,
976983
{
977984
// SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
978-
unsafe { ptr::copy_nonoverlapping(self.pointer, dest.as_ptr(), count) }
985+
unsafe { ptr::copy_nonoverlapping(self.as_ptr(), dest.as_ptr(), count) }
979986
}
980987

981988
/// Copies `count * size_of<T>` bytes from `src` to `self`. The source
@@ -995,7 +1002,7 @@ impl<T: ?Sized> NonNull<T> {
9951002
T: Sized,
9961003
{
9971004
// SAFETY: the caller must uphold the safety contract for `copy`.
998-
unsafe { ptr::copy(src.pointer, self.as_ptr(), count) }
1005+
unsafe { ptr::copy(src.as_ptr(), self.as_ptr(), count) }
9991006
}
10001007

10011008
/// Copies `count * size_of<T>` bytes from `src` to `self`. The source
@@ -1015,7 +1022,7 @@ impl<T: ?Sized> NonNull<T> {
10151022
T: Sized,
10161023
{
10171024
// SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1018-
unsafe { ptr::copy_nonoverlapping(src.pointer, self.as_ptr(), count) }
1025+
unsafe { ptr::copy_nonoverlapping(src.as_ptr(), self.as_ptr(), count) }
10191026
}
10201027

10211028
/// Executes the destructor (if any) of the pointed-to value.
@@ -1202,7 +1209,7 @@ impl<T: ?Sized> NonNull<T> {
12021209

12031210
{
12041211
// SAFETY: `align` has been checked to be a power of 2 above.
1205-
unsafe { ptr::align_offset(self.pointer, align) }
1212+
unsafe { ptr::align_offset(self.as_ptr(), align) }
12061213
}
12071214
}
12081215

@@ -1230,7 +1237,7 @@ impl<T: ?Sized> NonNull<T> {
12301237
where
12311238
T: Sized,
12321239
{
1233-
self.pointer.is_aligned()
1240+
self.as_ptr().is_aligned()
12341241
}
12351242

12361243
/// Returns whether the pointer is aligned to `align`.
@@ -1267,7 +1274,7 @@ impl<T: ?Sized> NonNull<T> {
12671274
#[must_use]
12681275
#[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
12691276
pub fn is_aligned_to(self, align: usize) -> bool {
1270-
self.pointer.is_aligned_to(align)
1277+
self.as_ptr().is_aligned_to(align)
12711278
}
12721279
}
12731280

Diff for: tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.32bit.panic-abort.diff

+2-10
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@
3131
}
3232
}
3333
scope 9 (inlined NonNull::<[u8]>::as_ptr) {
34-
let mut _17: *const [u8];
3534
}
3635
}
3736
scope 3 (inlined #[track_caller] Option::<Layout>::unwrap) {
@@ -102,16 +101,9 @@
102101
StorageDead(_16);
103102
StorageDead(_12);
104103
StorageDead(_6);
105-
- StorageLive(_17);
106-
+ nop;
107-
_17 = copy (_5.0: *const [u8]);
108-
- _4 = move _17 as *mut [u8] (PtrToPtr);
109-
- StorageDead(_17);
110-
+ _4 = copy _17 as *mut [u8] (PtrToPtr);
111-
+ nop;
104+
_4 = copy _5 as *mut [u8] (Transmute);
112105
StorageDead(_5);
113-
- _3 = move _4 as *mut u8 (PtrToPtr);
114-
+ _3 = copy _17 as *mut u8 (PtrToPtr);
106+
_3 = move _4 as *mut u8 (PtrToPtr);
115107
StorageDead(_4);
116108
StorageDead(_3);
117109
- StorageDead(_1);

Diff for: tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.32bit.panic-unwind.diff

+2-10
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
scope 5 (inlined <std::alloc::Global as Allocator>::allocate) {
2121
}
2222
scope 6 (inlined NonNull::<[u8]>::as_ptr) {
23-
let mut _12: *const [u8];
2423
}
2524
}
2625
scope 3 (inlined #[track_caller] Option::<Layout>::unwrap) {
@@ -45,16 +44,9 @@
4544

4645
bb1: {
4746
StorageDead(_6);
48-
- StorageLive(_12);
49-
+ nop;
50-
_12 = copy (_5.0: *const [u8]);
51-
- _4 = move _12 as *mut [u8] (PtrToPtr);
52-
- StorageDead(_12);
53-
+ _4 = copy _12 as *mut [u8] (PtrToPtr);
54-
+ nop;
47+
_4 = copy _5 as *mut [u8] (Transmute);
5548
StorageDead(_5);
56-
- _3 = move _4 as *mut u8 (PtrToPtr);
57-
+ _3 = copy _12 as *mut u8 (PtrToPtr);
49+
_3 = move _4 as *mut u8 (PtrToPtr);
5850
StorageDead(_4);
5951
StorageDead(_3);
6052
- StorageDead(_1);

Diff for: tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.64bit.panic-abort.diff

+2-10
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@
3131
}
3232
}
3333
scope 9 (inlined NonNull::<[u8]>::as_ptr) {
34-
let mut _17: *const [u8];
3534
}
3635
}
3736
scope 3 (inlined #[track_caller] Option::<Layout>::unwrap) {
@@ -102,16 +101,9 @@
102101
StorageDead(_16);
103102
StorageDead(_12);
104103
StorageDead(_6);
105-
- StorageLive(_17);
106-
+ nop;
107-
_17 = copy (_5.0: *const [u8]);
108-
- _4 = move _17 as *mut [u8] (PtrToPtr);
109-
- StorageDead(_17);
110-
+ _4 = copy _17 as *mut [u8] (PtrToPtr);
111-
+ nop;
104+
_4 = copy _5 as *mut [u8] (Transmute);
112105
StorageDead(_5);
113-
- _3 = move _4 as *mut u8 (PtrToPtr);
114-
+ _3 = copy _17 as *mut u8 (PtrToPtr);
106+
_3 = move _4 as *mut u8 (PtrToPtr);
115107
StorageDead(_4);
116108
StorageDead(_3);
117109
- StorageDead(_1);

0 commit comments

Comments
 (0)