Replace macros by the only two function calls that are done from this
file, store_u8() and load_u8().
Signed-off-by: Clément Léger <[email protected]>
---
arch/riscv/kernel/traps_misaligned.c | 46 +++++-----------------------
1 file changed, 7 insertions(+), 39 deletions(-)
diff --git a/arch/riscv/kernel/traps_misaligned.c b/arch/riscv/kernel/traps_misaligned.c
index 378f5b151443..e7bfb33089c1 100644
--- a/arch/riscv/kernel/traps_misaligned.c
+++ b/arch/riscv/kernel/traps_misaligned.c
@@ -151,51 +151,19 @@
#define PRECISION_S 0
#define PRECISION_D 1
-#define DECLARE_UNPRIVILEGED_LOAD_FUNCTION(type, insn) \
-static inline type load_##type(const type *addr) \
-{ \
- type val; \
- asm (#insn " %0, %1" \
- : "=&r" (val) : "m" (*addr)); \
- return val; \
-}
+static inline u8 load_u8(const u8 *addr)
+{
+ u8 val;
-#define DECLARE_UNPRIVILEGED_STORE_FUNCTION(type, insn) \
-static inline void store_##type(type *addr, type val) \
-{ \
- asm volatile (#insn " %0, %1\n" \
- : : "r" (val), "m" (*addr)); \
-}
+ asm volatile("lbu %0, %1" : "=&r" (val) : "m" (*addr));
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u8, lbu)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u16, lhu)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s8, lb)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s16, lh)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s32, lw)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u8, sb)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u16, sh)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u32, sw)
-#if defined(CONFIG_64BIT)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32, lwu)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u64, ld)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u64, sd)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong, ld)
-#else
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32, lw)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong, lw)
-
-static inline u64 load_u64(const u64 *addr)
-{
- return load_u32((u32 *)addr)
- + ((u64)load_u32((u32 *)addr + 1) << 32);
+ return val;
}
-static inline void store_u64(u64 *addr, u64 val)
+static inline void store_u8(u8 *addr, u8 val)
{
- store_u32((u32 *)addr, val);
- store_u32((u32 *)addr + 1, val >> 32);
+ asm volatile ("sb %0, %1\n" : : "r" (val), "m" (*addr));
}
-#endif
static inline ulong get_insn(ulong mepc)
{
--
2.40.1