|
72 | 72 | use crate::cmp::Ordering;
|
73 | 73 | use crate::fmt;
|
74 | 74 | use crate::hash;
|
75 |
| -use crate::intrinsics; |
| 75 | +use crate::intrinsics::{self, is_aligned_and_not_null, is_nonoverlapping}; |
76 | 76 | use crate::mem::{self, MaybeUninit};
|
77 | 77 |
|
78 | 78 | #[stable(feature = "rust1", since = "1.0.0")]
|
@@ -392,6 +392,10 @@ pub unsafe fn swap<T>(x: *mut T, y: *mut T) {
|
392 | 392 | #[inline]
|
393 | 393 | #[stable(feature = "swap_nonoverlapping", since = "1.27.0")]
|
394 | 394 | pub unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) {
|
| 395 | + debug_assert!(is_aligned_and_not_null(x), "attempt to swap unaligned or null pointer"); |
| 396 | + debug_assert!(is_aligned_and_not_null(y), "attempt to swap unaligned or null pointer"); |
| 397 | + debug_assert!(is_nonoverlapping(x, y, count), "attempt to swap overlapping memory"); |
| 398 | + |
395 | 399 | let x = x as *mut u8;
|
396 | 400 | let y = y as *mut u8;
|
397 | 401 | let len = mem::size_of::<T>() * count;
|
@@ -619,6 +623,7 @@ pub unsafe fn replace<T>(dst: *mut T, mut src: T) -> T {
|
619 | 623 | #[inline]
|
620 | 624 | #[stable(feature = "rust1", since = "1.0.0")]
|
621 | 625 | pub unsafe fn read<T>(src: *const T) -> T {
|
| 626 | + // `copy_nonoverlapping` takes care of debug_assert. |
622 | 627 | let mut tmp = MaybeUninit::<T>::uninit();
|
623 | 628 | copy_nonoverlapping(src, tmp.as_mut_ptr(), 1);
|
624 | 629 | tmp.assume_init()
|
@@ -712,6 +717,7 @@ pub unsafe fn read<T>(src: *const T) -> T {
|
712 | 717 | #[inline]
|
713 | 718 | #[stable(feature = "ptr_unaligned", since = "1.17.0")]
|
714 | 719 | pub unsafe fn read_unaligned<T>(src: *const T) -> T {
|
| 720 | + // `copy_nonoverlapping` takes care of debug_assert. |
715 | 721 | let mut tmp = MaybeUninit::<T>::uninit();
|
716 | 722 | copy_nonoverlapping(src as *const u8, tmp.as_mut_ptr() as *mut u8, mem::size_of::<T>());
|
717 | 723 | tmp.assume_init()
|
@@ -804,6 +810,7 @@ pub unsafe fn read_unaligned<T>(src: *const T) -> T {
|
804 | 810 | #[inline]
|
805 | 811 | #[stable(feature = "rust1", since = "1.0.0")]
|
806 | 812 | pub unsafe fn write<T>(dst: *mut T, src: T) {
|
| 813 | + debug_assert!(is_aligned_and_not_null(dst), "attempt to write to unaligned or null pointer"); |
807 | 814 | intrinsics::move_val_init(&mut *dst, src)
|
808 | 815 | }
|
809 | 816 |
|
@@ -896,6 +903,7 @@ pub unsafe fn write<T>(dst: *mut T, src: T) {
|
896 | 903 | #[inline]
|
897 | 904 | #[stable(feature = "ptr_unaligned", since = "1.17.0")]
|
898 | 905 | pub unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
|
| 906 | + // `copy_nonoverlapping` takes care of debug_assert. |
899 | 907 | copy_nonoverlapping(&src as *const T as *const u8, dst as *mut u8, mem::size_of::<T>());
|
900 | 908 | mem::forget(src);
|
901 | 909 | }
|
@@ -967,6 +975,7 @@ pub unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
|
967 | 975 | #[inline]
|
968 | 976 | #[stable(feature = "volatile", since = "1.9.0")]
|
969 | 977 | pub unsafe fn read_volatile<T>(src: *const T) -> T {
|
| 978 | + debug_assert!(is_aligned_and_not_null(src), "attempt to read from unaligned or null pointer"); |
970 | 979 | intrinsics::volatile_load(src)
|
971 | 980 | }
|
972 | 981 |
|
@@ -1035,6 +1044,7 @@ pub unsafe fn read_volatile<T>(src: *const T) -> T {
|
1035 | 1044 | #[inline]
|
1036 | 1045 | #[stable(feature = "volatile", since = "1.9.0")]
|
1037 | 1046 | pub unsafe fn write_volatile<T>(dst: *mut T, src: T) {
|
| 1047 | + debug_assert!(is_aligned_and_not_null(dst), "attempt to write to unaligned or null pointer"); |
1038 | 1048 | intrinsics::volatile_store(dst, src);
|
1039 | 1049 | }
|
1040 | 1050 |
|
|
0 commit comments