Use smp_mb__before_atomic() & smp_mb__after_atomic() in
atomic_sub_if_positive() rather than the equivalent
smp_mb__before_llsc() & smp_llsc_mb(). The former are more standard &
this preps us for avoiding redundant duplicate barriers on Loongson3 in
a later patch.
Signed-off-by: Paul Burton <[email protected]>
---
arch/mips/include/asm/atomic.h | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index 841ff274ada6..24443ef29337 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -196,7 +196,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
{
int result;
- smp_mb__before_llsc();
+ smp_mb__before_atomic();
if (kernel_uses_llsc) {
int temp;
@@ -237,7 +237,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
* another barrier here.
*/
if (!__SYNC_loongson3_war)
- smp_llsc_mb();
+ smp_mb__after_atomic();
return result;
}
--
2.23.0