diff --git a/ostd/src/arch/x86/cpu/local.rs b/ostd/src/arch/x86/cpu/local.rs index 80e75525c..a4ca6c2c4 100644 --- a/ostd/src/arch/x86/cpu/local.rs +++ b/ostd/src/arch/x86/cpu/local.rs @@ -19,7 +19,7 @@ macro_rules! impl_numeric_single_instruction_for { ($([$typ: ty, $inout_type: ident, $register_format: expr])*) => {$( impl SingleInstructionAddAssign<$typ> for $typ { - unsafe fn add_assign(offset: *mut Self, val: Self) { + unsafe fn add_assign(offset: usize, val: Self) { // SAFETY: // 1. `gs` points to the CPU-local region (global invariant). // 2. `offset` represents the offset of a CPU-local variable @@ -40,7 +40,7 @@ macro_rules! impl_numeric_single_instruction_for { } impl SingleInstructionSubAssign<$typ> for $typ { - unsafe fn sub_assign(offset: *mut Self, val: Self) { + unsafe fn sub_assign(offset: usize, val: Self) { // SAFETY: Same as `add_assign`. unsafe { core::arch::asm!( @@ -54,7 +54,7 @@ macro_rules! impl_numeric_single_instruction_for { } impl SingleInstructionBitAndAssign<$typ> for $typ { - unsafe fn bitand_assign(offset: *mut Self, val: Self) { + unsafe fn bitand_assign(offset: usize, val: Self) { // SAFETY: Same as `add_assign`. unsafe { core::arch::asm!( @@ -68,7 +68,7 @@ macro_rules! impl_numeric_single_instruction_for { } impl SingleInstructionBitOrAssign<$typ> for $typ { - unsafe fn bitor_assign(offset: *mut Self, val: Self) { + unsafe fn bitor_assign(offset: usize, val: Self) { // SAFETY: Same as `add_assign`. unsafe { core::arch::asm!( @@ -82,7 +82,7 @@ macro_rules! impl_numeric_single_instruction_for { } impl SingleInstructionBitXorAssign<$typ> for $typ { - unsafe fn bitxor_assign(offset: *mut Self, val: Self) { + unsafe fn bitxor_assign(offset: usize, val: Self) { // SAFETY: Same as `add_assign`. unsafe { core::arch::asm!( @@ -96,7 +96,7 @@ macro_rules! impl_numeric_single_instruction_for { } impl SingleInstructionLoad for $typ { - unsafe fn load(offset: *const Self) -> Self { + unsafe fn load(offset: usize) -> Self { let val: Self; // SAFETY: Same as `add_assign`. unsafe { @@ -112,7 +112,7 @@ macro_rules! impl_numeric_single_instruction_for { } impl SingleInstructionStore for $typ { - unsafe fn store(offset: *mut Self, val: Self) { + unsafe fn store(offset: usize, val: Self) { // SAFETY: Same as `add_assign`. unsafe { core::arch::asm!( @@ -145,7 +145,7 @@ macro_rules! impl_generic_single_instruction_for { ($([<$gen_type:ident $(, $more_gen_type:ident)*>, $typ:ty])*) => {$( impl<$gen_type $(, $more_gen_type)*> SingleInstructionLoad for $typ { - unsafe fn load(offset: *const Self) -> Self { + unsafe fn load(offset: usize) -> Self { let val: Self; // SAFETY: Same as `add_assign`. unsafe { @@ -161,7 +161,7 @@ macro_rules! impl_generic_single_instruction_for { } impl<$gen_type $(, $more_gen_type)*> SingleInstructionStore for $typ { - unsafe fn store(offset: *mut Self, val: Self) { + unsafe fn store(offset: usize, val: Self) { // SAFETY: Same as `add_assign`. unsafe { core::arch::asm!( @@ -187,7 +187,7 @@ impl_generic_single_instruction_for!( // Rust reference: . impl SingleInstructionLoad for bool { - unsafe fn load(offset: *const Self) -> Self { + unsafe fn load(offset: usize) -> Self { let val: u8; // SAFETY: Same as `add_assign`. unsafe { @@ -204,7 +204,7 @@ impl SingleInstructionLoad for bool { } impl SingleInstructionStore for bool { - unsafe fn store(offset: *mut Self, val: Self) { + unsafe fn store(offset: usize, val: Self) { let val: u8 = if val { 1 } else { 0 }; // SAFETY: Same as `add_assign`. unsafe { diff --git a/ostd/src/cpu/local/cell.rs b/ostd/src/cpu/local/cell.rs index 8307088ef..c32ac53e0 100644 --- a/ostd/src/cpu/local/cell.rs +++ b/ostd/src/cpu/local/cell.rs @@ -160,7 +160,7 @@ impl> CpuLocalCell { // SAFETY: The CPU-local object is defined in the `.cpu_local` section, // so the pointer to the object is valid. And the reference is never shared. unsafe { - T::add_assign(offset as *mut T, rhs); + T::add_assign(offset, rhs); } } } @@ -177,7 +177,7 @@ impl> CpuLocalCell { // SAFETY: The CPU-local object is defined in the `.cpu_local` section, // so the pointer to the object is valid. And the reference is never shared. unsafe { - T::sub_assign(offset as *mut T, rhs); + T::sub_assign(offset, rhs); } } } @@ -192,7 +192,7 @@ impl> CpuLocalCell { // SAFETY: The CPU-local object is defined in the `.cpu_local` section, // so the pointer to the object is valid. And the reference is never shared. unsafe { - T::bitand_assign(offset as *mut T, rhs); + T::bitand_assign(offset, rhs); } } } @@ -207,7 +207,7 @@ impl> CpuLocalCell { // SAFETY: The CPU-local object is defined in the `.cpu_local` section, // so the pointer to the object is valid. And the reference is never shared. unsafe { - T::bitor_assign(offset as *mut T, rhs); + T::bitor_assign(offset, rhs); } } } @@ -222,7 +222,7 @@ impl> CpuLocalCell { // SAFETY: The CPU-local object is defined in the `.cpu_local` section, // so the pointer to the object is valid. And the reference is never shared. unsafe { - T::bitxor_assign(offset as *mut T, rhs); + T::bitxor_assign(offset, rhs); } } } @@ -236,7 +236,7 @@ impl CpuLocalCell { let offset = self as *const _ as usize - __cpu_local_start as *const () as usize; // SAFETY: The CPU-local object is defined in the `.cpu_local` section, // so the pointer to the object is valid. - unsafe { T::load(offset as *const T) } + unsafe { T::load(offset) } } } @@ -250,7 +250,7 @@ impl CpuLocalCell { // SAFETY: The CPU-local object is defined in the `.cpu_local` section, // so the pointer to the object is valid. And the reference is never shared. unsafe { - T::store(offset as *mut T, val); + T::store(offset, val); } } } diff --git a/ostd/src/cpu/local/single_instr.rs b/ostd/src/cpu/local/single_instr.rs index 89de283d1..72e4d3836 100644 --- a/ostd/src/cpu/local/single_instr.rs +++ b/ostd/src/cpu/local/single_instr.rs @@ -36,14 +36,14 @@ pub trait SingleInstructionAddAssign { /// # Safety /// /// Please refer to the module-level documentation of [`self`]. - unsafe fn add_assign(offset: *mut Self, rhs: Rhs); + unsafe fn add_assign(offset: usize, rhs: Rhs); } impl SingleInstructionAddAssign for T { - default unsafe fn add_assign(offset: *mut Self, rhs: T) { + default unsafe fn add_assign(offset: usize, rhs: T) { let _guard = crate::irq::disable_local(); let base = crate::arch::cpu::local::get_base() as usize; - let addr = (base + offset as usize) as *mut Self; + let addr = (base + offset) as *mut Self; // SAFETY: // 1. `addr` represents the address of a CPU-local variable. // 2. The variable is only accessible in the current CPU, is @@ -62,14 +62,14 @@ pub trait SingleInstructionSubAssign { /// # Safety /// /// Please refer to the module-level documentation of [`self`]. - unsafe fn sub_assign(offset: *mut Self, rhs: Rhs); + unsafe fn sub_assign(offset: usize, rhs: Rhs); } impl SingleInstructionSubAssign for T { - default unsafe fn sub_assign(offset: *mut Self, rhs: T) { + default unsafe fn sub_assign(offset: usize, rhs: T) { let _guard = crate::irq::disable_local(); let base = crate::arch::cpu::local::get_base() as usize; - let addr = (base + offset as usize) as *mut Self; + let addr = (base + offset) as *mut Self; // SAFETY: Same as `add_assign`. unsafe { addr.write(addr.read().wrapping_sub(&rhs)) }; } @@ -82,14 +82,14 @@ pub trait SingleInstructionBitOrAssign { /// # Safety /// /// Please refer to the module-level documentation of [`self`]. - unsafe fn bitor_assign(offset: *mut Self, rhs: Rhs); + unsafe fn bitor_assign(offset: usize, rhs: Rhs); } impl + Copy> SingleInstructionBitOrAssign for T { - default unsafe fn bitor_assign(offset: *mut Self, rhs: T) { + default unsafe fn bitor_assign(offset: usize, rhs: T) { let _guard = crate::irq::disable_local(); let base = crate::arch::cpu::local::get_base() as usize; - let addr = (base + offset as usize) as *mut Self; + let addr = (base + offset) as *mut Self; // SAFETY: Same as `add_assign`. unsafe { addr.write(addr.read() | rhs) }; } @@ -102,14 +102,14 @@ pub trait SingleInstructionBitAndAssign { /// # Safety /// /// Please refer to the module-level documentation of [`self`]. - unsafe fn bitand_assign(offset: *mut Self, rhs: Rhs); + unsafe fn bitand_assign(offset: usize, rhs: Rhs); } impl + Copy> SingleInstructionBitAndAssign for T { - default unsafe fn bitand_assign(offset: *mut Self, rhs: T) { + default unsafe fn bitand_assign(offset: usize, rhs: T) { let _guard = crate::irq::disable_local(); let base = crate::arch::cpu::local::get_base() as usize; - let addr = (base + offset as usize) as *mut Self; + let addr = (base + offset) as *mut Self; // SAFETY: Same as `add_assign`. unsafe { addr.write(addr.read() & rhs) }; } @@ -122,14 +122,14 @@ pub trait SingleInstructionBitXorAssign { /// # Safety /// /// Please refer to the module-level documentation of [`self`]. - unsafe fn bitxor_assign(offset: *mut Self, rhs: Rhs); + unsafe fn bitxor_assign(offset: usize, rhs: Rhs); } impl + Copy> SingleInstructionBitXorAssign for T { - default unsafe fn bitxor_assign(offset: *mut Self, rhs: T) { + default unsafe fn bitxor_assign(offset: usize, rhs: T) { let _guard = crate::irq::disable_local(); let base = crate::arch::cpu::local::get_base() as usize; - let addr = (base + offset as usize) as *mut Self; + let addr = (base + offset) as *mut Self; // SAFETY: Same as `add_assign`. unsafe { addr.write(addr.read() ^ rhs) }; } @@ -142,14 +142,14 @@ pub trait SingleInstructionLoad { /// # Safety /// /// Please refer to the module-level documentation of [`self`]. - unsafe fn load(offset: *const Self) -> Self; + unsafe fn load(offset: usize) -> Self; } impl SingleInstructionLoad for T { - default unsafe fn load(offset: *const Self) -> Self { + default unsafe fn load(offset: usize) -> Self { let _guard = crate::irq::disable_local(); let base = crate::arch::cpu::local::get_base() as usize; - let ptr = (base + offset as usize) as *const Self; + let ptr = (base + offset) as *const Self; // SAFETY: Same as `add_assign`. unsafe { ptr.read() } } @@ -162,14 +162,14 @@ pub trait SingleInstructionStore { /// # Safety /// /// Please refer to the module-level documentation of [`self`]. - unsafe fn store(offset: *mut Self, val: Self); + unsafe fn store(offset: usize, val: Self); } impl SingleInstructionStore for T { - default unsafe fn store(offset: *mut Self, val: Self) { + default unsafe fn store(offset: usize, val: Self) { let _guard = crate::irq::disable_local(); let base = crate::arch::cpu::local::get_base() as usize; - let ptr = (base + offset as usize) as *mut Self; + let ptr = (base + offset) as *mut Self; // SAFETY: Same as `add_assign`. unsafe { ptr.write(val) }; }