2017-03-16 19:15:36

by Joakim Tjernlund

[permalink] [raw]
Subject: [PATCH 1/2] x86: asm/bitops.h

Replace void * cast with uintptr_t to do pointer arithmetic's

Cc: <[email protected]> # v4.9+
Signed-off-by: Joakim Tjernlund <[email protected]>
---
arch/x86/include/asm/bitops.h | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/arch/x86/include/asm/bitops.h b/arch/x86/include/asm/bitops.h
index 8540227..b561304 100644
--- a/arch/x86/include/asm/bitops.h
+++ b/arch/x86/include/asm/bitops.h
@@ -50,7 +50,7 @@
* a mask operation on a byte.
*/
#define IS_IMMEDIATE(nr) (__builtin_constant_p(nr))
-#define CONST_MASK_ADDR(nr, addr) BITOP_ADDR((void *)(addr) + ((nr)>>3))
+#define CONST_MASK_ADDR(nr, addr) BITOP_ADDR((uintptr_t)(addr) + ((nr)>>3))
#define CONST_MASK(nr) (1 << ((nr) & 7))

/**
--
2.10.2


2017-03-16 19:15:38

by Joakim Tjernlund

[permalink] [raw]
Subject: [PATCH 2/2] x86: Match bitops prototypes

Adjust bitops function prototypes in asm-generic/bitops/le.h
to match the generic ones in arch/x86/include/asm/le.h
That is, replace void* with unsigned long*

Cc: <[email protected]> # v4.9+
Signed-off-by: Joakim Tjernlund <[email protected]>
---
include/asm-generic/bitops/le.h | 42 ++++++++++++++++++++++-------------------
1 file changed, 23 insertions(+), 19 deletions(-)

diff --git a/include/asm-generic/bitops/le.h b/include/asm-generic/bitops/le.h
index 6173154..92a3afa 100644
--- a/include/asm-generic/bitops/le.h
+++ b/include/asm-generic/bitops/le.h
@@ -8,20 +8,22 @@

#define BITOP_LE_SWIZZLE 0

-static inline unsigned long find_next_zero_bit_le(const void *addr,
- unsigned long size, unsigned long offset)
+static inline unsigned long find_next_zero_bit_le(const unsigned long *addr,
+ unsigned long size,
+ unsigned long offset)
{
return find_next_zero_bit(addr, size, offset);
}

-static inline unsigned long find_next_bit_le(const void *addr,
- unsigned long size, unsigned long offset)
+static inline unsigned long find_next_bit_le(const unsigned long *addr,
+ unsigned long size,
+ unsigned long offset)
{
return find_next_bit(addr, size, offset);
}

-static inline unsigned long find_first_zero_bit_le(const void *addr,
- unsigned long size)
+static inline unsigned long find_first_zero_bit_le(const unsigned long *addr,
+ unsigned long size)
{
return find_first_zero_bit(addr, size);
}
@@ -31,13 +33,15 @@ static inline unsigned long find_first_zero_bit_le(const void *addr,
#define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7)

#ifndef find_next_zero_bit_le
-extern unsigned long find_next_zero_bit_le(const void *addr,
- unsigned long size, unsigned long offset);
+extern unsigned long find_next_zero_bit_le(const unsigned long *addr,
+ unsigned long size,
+ unsigned long offset);
#endif

#ifndef find_next_bit_le
-extern unsigned long find_next_bit_le(const void *addr,
- unsigned long size, unsigned long offset);
+extern unsigned long find_next_bit_le(const unsigned long *addr,
+ unsigned long size,
+ unsigned long offset);
#endif

#ifndef find_first_zero_bit_le
@@ -49,47 +53,47 @@ extern unsigned long find_next_bit_le(const void *addr,
#error "Please fix <asm/byteorder.h>"
#endif

-static inline int test_bit_le(int nr, const void *addr)
+static inline int test_bit_le(int nr, const unsigned long *addr)
{
return test_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline void set_bit_le(int nr, void *addr)
+static inline void set_bit_le(int nr, unsigned long *addr)
{
set_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline void clear_bit_le(int nr, void *addr)
+static inline void clear_bit_le(int nr, unsigned long *addr)
{
clear_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline void __set_bit_le(int nr, void *addr)
+static inline void __set_bit_le(int nr, unsigned long *addr)
{
__set_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline void __clear_bit_le(int nr, void *addr)
+static inline void __clear_bit_le(int nr, unsigned long *addr)
{
__clear_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline int test_and_set_bit_le(int nr, void *addr)
+static inline int test_and_set_bit_le(int nr, unsigned long *addr)
{
return test_and_set_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline int test_and_clear_bit_le(int nr, void *addr)
+static inline int test_and_clear_bit_le(int nr, unsigned long *addr)
{
return test_and_clear_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline int __test_and_set_bit_le(int nr, void *addr)
+static inline int __test_and_set_bit_le(int nr, unsigned long *addr)
{
return __test_and_set_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline int __test_and_clear_bit_le(int nr, void *addr)
+static inline int __test_and_clear_bit_le(int nr, unsigned long *addr)
{
return __test_and_clear_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}
--
2.10.2

2017-03-16 19:48:12

by Andrew Morton

[permalink] [raw]
Subject: Re: [PATCH 1/2] x86: asm/bitops.h

On Thu, 16 Mar 2017 20:07:08 +0100 Joakim Tjernlund <[email protected]> wrote:

> Replace void * cast with uintptr_t to do pointer arithmetic's

Why? The changelog doesn't describe whats wrong with the current code
and gives nobody any reason to apply the patch.

> Cc: <[email protected]> # v4.9+

And you think it should be backported! That makes it even more
important that the changelog contain a full description of the
end-user visible impact of the fix.


2017-03-16 19:48:48

by Andrew Morton

[permalink] [raw]
Subject: Re: [PATCH 2/2] x86: Match bitops prototypes

On Thu, 16 Mar 2017 20:07:09 +0100 Joakim Tjernlund <[email protected]> wrote:

> Adjust bitops function prototypes in asm-generic/bitops/le.h
> to match the generic ones in arch/x86/include/asm/le.h
> That is, replace void* with unsigned long*
>
> Cc: <[email protected]> # v4.9+

Again, no reason is provided.

Also, please cc the x86 maintainers on x86 patches.