We have a pair of macros on arm64 that can be used in asm code to set up
and tear down the stack frame when implementing a non-leaf function.
We will be adding support for shadow call stack and pointer
authentication to those macros, so that the code in question is less
likely to be abused for someone's ROP/JOP enjoyment. So let's fix the
existing crypto code to use those macros where they should be used.
Ard Biesheuvel (4):
crypto: arm64/aes-neonbs - use frame_push/pop consistently
crypto: arm64/aes-modes - use frame_push/pop macros consistently
crypto: arm64/crct10dif - use frame_push/pop macros consistently
crypto: arm64/ghash-ce - use frame_push/pop macros consistently
arch/arm64/crypto/aes-modes.S | 34 +++++++-------------
arch/arm64/crypto/aes-neonbs-core.S | 16 ++++-----
arch/arm64/crypto/crct10dif-ce-core.S | 5 ++-
arch/arm64/crypto/ghash-ce-core.S | 8 ++---
4 files changed, 24 insertions(+), 39 deletions(-)
--
2.35.1
Use the frame_push and frame_pop macros to set up the stack frame so
that return address protections will be enabled automically when
configured.
Signed-off-by: Ard Biesheuvel <[email protected]>
---
arch/arm64/crypto/crct10dif-ce-core.S | 5 ++---
1 file changed, 2 insertions(+), 3 deletions(-)
diff --git a/arch/arm64/crypto/crct10dif-ce-core.S b/arch/arm64/crypto/crct10dif-ce-core.S
index dce6dcebfca189ee..5604de61d06d04ee 100644
--- a/arch/arm64/crypto/crct10dif-ce-core.S
+++ b/arch/arm64/crypto/crct10dif-ce-core.S
@@ -429,7 +429,7 @@ CPU_LE( ext v0.16b, v0.16b, v0.16b, #8 )
umov w0, v0.h[0]
.ifc \p, p8
- ldp x29, x30, [sp], #16
+ frame_pop
.endif
ret
@@ -466,8 +466,7 @@ CPU_LE( ext v7.16b, v7.16b, v7.16b, #8 )
// Assumes len >= 16.
//
SYM_FUNC_START(crc_t10dif_pmull_p8)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 1
crc_t10dif_pmull p8
SYM_FUNC_END(crc_t10dif_pmull_p8)
--
2.35.1
Use the frame_push and frame_pop macros to create the stack frames in
the AES chaining mode wrappers so that they will get PAC and/or shadow
call stack protection when configured.
Signed-off-by: Ard Biesheuvel <[email protected]>
---
arch/arm64/crypto/aes-modes.S | 34 +++++++-------------
1 file changed, 12 insertions(+), 22 deletions(-)
diff --git a/arch/arm64/crypto/aes-modes.S b/arch/arm64/crypto/aes-modes.S
index 5abc834271f4a610..0e834a2c062cf265 100644
--- a/arch/arm64/crypto/aes-modes.S
+++ b/arch/arm64/crypto/aes-modes.S
@@ -52,8 +52,7 @@ SYM_FUNC_END(aes_decrypt_block5x)
*/
AES_FUNC_START(aes_ecb_encrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 0
enc_prepare w3, x2, x5
@@ -77,14 +76,13 @@ ST5( st1 {v4.16b}, [x0], #16 )
subs w4, w4, #1
bne .Lecbencloop
.Lecbencout:
- ldp x29, x30, [sp], #16
+ frame_pop
ret
AES_FUNC_END(aes_ecb_encrypt)
AES_FUNC_START(aes_ecb_decrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 0
dec_prepare w3, x2, x5
@@ -108,7 +106,7 @@ ST5( st1 {v4.16b}, [x0], #16 )
subs w4, w4, #1
bne .Lecbdecloop
.Lecbdecout:
- ldp x29, x30, [sp], #16
+ frame_pop
ret
AES_FUNC_END(aes_ecb_decrypt)
@@ -171,9 +169,6 @@ AES_FUNC_END(aes_cbc_encrypt)
AES_FUNC_END(aes_essiv_cbc_encrypt)
AES_FUNC_START(aes_essiv_cbc_decrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
-
ld1 {cbciv.16b}, [x5] /* get iv */
mov w8, #14 /* AES-256: 14 rounds */
@@ -182,11 +177,9 @@ AES_FUNC_START(aes_essiv_cbc_decrypt)
b .Lessivcbcdecstart
AES_FUNC_START(aes_cbc_decrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
-
ld1 {cbciv.16b}, [x5] /* get iv */
.Lessivcbcdecstart:
+ frame_push 0
dec_prepare w3, x2, x6
.LcbcdecloopNx:
@@ -236,7 +229,7 @@ ST5( st1 {v4.16b}, [x0], #16 )
bne .Lcbcdecloop
.Lcbcdecout:
st1 {cbciv.16b}, [x5] /* return iv */
- ldp x29, x30, [sp], #16
+ frame_pop
ret
AES_FUNC_END(aes_cbc_decrypt)
AES_FUNC_END(aes_essiv_cbc_decrypt)
@@ -337,8 +330,7 @@ AES_FUNC_END(aes_cbc_cts_decrypt)
BLOCKS .req x13
BLOCKS_W .req w13
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 0
enc_prepare ROUNDS_W, KEY, IV_PART
ld1 {vctr.16b}, [IV]
@@ -481,7 +473,7 @@ ST5( st1 {v4.16b}, [OUT], #16 )
.if !\xctr
st1 {vctr.16b}, [IV] /* return next CTR value */
.endif
- ldp x29, x30, [sp], #16
+ frame_pop
ret
.Lctrtail\xctr:
@@ -645,8 +637,7 @@ AES_FUNC_END(aes_xctr_encrypt)
.endm
AES_FUNC_START(aes_xts_encrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 0
ld1 {v4.16b}, [x6]
xts_load_mask v8
@@ -704,7 +695,7 @@ AES_FUNC_START(aes_xts_encrypt)
st1 {v0.16b}, [x0]
.Lxtsencret:
st1 {v4.16b}, [x6]
- ldp x29, x30, [sp], #16
+ frame_pop
ret
.LxtsencctsNx:
@@ -732,8 +723,7 @@ AES_FUNC_START(aes_xts_encrypt)
AES_FUNC_END(aes_xts_encrypt)
AES_FUNC_START(aes_xts_decrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 0
/* subtract 16 bytes if we are doing CTS */
sub w8, w4, #0x10
@@ -794,7 +784,7 @@ AES_FUNC_START(aes_xts_decrypt)
b .Lxtsdecloop
.Lxtsdecout:
st1 {v4.16b}, [x6]
- ldp x29, x30, [sp], #16
+ frame_pop
ret
.Lxtsdeccts:
--
2.35.1
On Tue, Nov 29, 2022 at 05:48:48PM +0100, Ard Biesheuvel wrote:
> We have a pair of macros on arm64 that can be used in asm code to set up
> and tear down the stack frame when implementing a non-leaf function.
>
> We will be adding support for shadow call stack and pointer
> authentication to those macros, so that the code in question is less
> likely to be abused for someone's ROP/JOP enjoyment. So let's fix the
> existing crypto code to use those macros where they should be used.
>
> Ard Biesheuvel (4):
> crypto: arm64/aes-neonbs - use frame_push/pop consistently
> crypto: arm64/aes-modes - use frame_push/pop macros consistently
> crypto: arm64/crct10dif - use frame_push/pop macros consistently
> crypto: arm64/ghash-ce - use frame_push/pop macros consistently
>
> arch/arm64/crypto/aes-modes.S | 34 +++++++-------------
> arch/arm64/crypto/aes-neonbs-core.S | 16 ++++-----
> arch/arm64/crypto/crct10dif-ce-core.S | 5 ++-
> arch/arm64/crypto/ghash-ce-core.S | 8 ++---
> 4 files changed, 24 insertions(+), 39 deletions(-)
>
> --
> 2.35.1
All applied. Thanks.
--
Email: Herbert Xu <[email protected]>
Home Page: http://gondor.apana.org.au/~herbert/
PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt