2018-04-09 13:53:58

by Salvatore Mesoraca

[permalink] [raw]
Subject: [PATCH 0/6] Remove several VLAs in the crypto subsystem

As suggested by Laura Abbott[2], I'm resending my patch with
MAX_BLOCKSIZE and MAX_ALIGNMASK defined in an header, so they
can be used in other places.
I take this opportuinuty to deal with some other VLAs not
handled in the old patch.

[1] http://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com
[2] http://lkml.kernel.org/r/[email protected]

Salvatore Mesoraca (6):
crypto: api - laying macros for statically allocated buffers
crypto: ctr - avoid VLA use
crypto: api - avoid VLA use
crypto: pcbc - avoid VLA use
crypto: cts - avoid VLA use
crypto: cfb - avoid VLA use

crypto/cfb.c | 14 ++++++++++----
crypto/cipher.c | 7 ++++++-
crypto/ctr.c | 13 +++++++++++--
crypto/cts.c | 8 ++++++--
crypto/internal.h | 8 ++++++++
crypto/pcbc.c | 9 +++++++--
6 files changed, 48 insertions(+), 11 deletions(-)

--
1.9.1


2018-04-09 13:54:03

by Salvatore Mesoraca

[permalink] [raw]
Subject: [PATCH 5/6] crypto: cts - avoid VLA use

We avoid 2 VLAs[1] by always allocating MAX_BLOCKSIZE*2 bytes.
We also check the selected cipher at instance creation time, if
it doesn't comply with these limits, the creation will fail.

[1] http://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com

Signed-off-by: Salvatore Mesoraca <[email protected]>
---
crypto/cts.c | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/crypto/cts.c b/crypto/cts.c
index 4773c18..12e6bd3 100644
--- a/crypto/cts.c
+++ b/crypto/cts.c
@@ -50,6 +50,7 @@
#include <crypto/scatterwalk.h>
#include <linux/slab.h>
#include <linux/compiler.h>
+#include "internal.h"

struct crypto_cts_ctx {
struct crypto_skcipher *child;
@@ -104,7 +105,7 @@ static int cts_cbc_encrypt(struct skcipher_request *req)
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct skcipher_request *subreq = &rctx->subreq;
int bsize = crypto_skcipher_blocksize(tfm);
- u8 d[bsize * 2] __aligned(__alignof__(u32));
+ u8 d[MAX_BLOCKSIZE * 2] __aligned(__alignof__(u32));
struct scatterlist *sg;
unsigned int offset;
int lastn;
@@ -183,7 +184,7 @@ static int cts_cbc_decrypt(struct skcipher_request *req)
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct skcipher_request *subreq = &rctx->subreq;
int bsize = crypto_skcipher_blocksize(tfm);
- u8 d[bsize * 2] __aligned(__alignof__(u32));
+ u8 d[MAX_BLOCKSIZE * 2] __aligned(__alignof__(u32));
struct scatterlist *sg;
unsigned int offset;
u8 *space;
@@ -359,6 +360,9 @@ static int crypto_cts_create(struct crypto_template *tmpl, struct rtattr **tb)
if (crypto_skcipher_alg_ivsize(alg) != alg->base.cra_blocksize)
goto err_drop_spawn;

+ if (alg->base.cra_blocksize > MAX_BLOCKSIZE)
+ goto err_drop_spawn;
+
if (strncmp(alg->base.cra_name, "cbc(", 4))
goto err_drop_spawn;

--
1.9.1

2018-04-09 13:54:00

by Salvatore Mesoraca

[permalink] [raw]
Subject: [PATCH 2/6] crypto: ctr - avoid VLA use

We avoid 2 VLAs[1] by always allocating MAX_BLOCKSIZE +
MAX_ALIGNMASK bytes.
We also check the selected cipher at instance creation time, if
it doesn't comply with these limits, the creation will fail.

[1] http://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com

Signed-off-by: Salvatore Mesoraca <[email protected]>
---
crypto/ctr.c | 13 +++++++++++--
1 file changed, 11 insertions(+), 2 deletions(-)

diff --git a/crypto/ctr.c b/crypto/ctr.c
index 854d924..ce62552 100644
--- a/crypto/ctr.c
+++ b/crypto/ctr.c
@@ -20,6 +20,7 @@
#include <linux/random.h>
#include <linux/scatterlist.h>
#include <linux/slab.h>
+#include "internal.h"

struct crypto_ctr_ctx {
struct crypto_cipher *child;
@@ -58,7 +59,7 @@ static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
unsigned int bsize = crypto_cipher_blocksize(tfm);
unsigned long alignmask = crypto_cipher_alignmask(tfm);
u8 *ctrblk = walk->iv;
- u8 tmp[bsize + alignmask];
+ u8 tmp[MAX_BLOCKSIZE + MAX_ALIGNMASK];
u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
u8 *src = walk->src.virt.addr;
u8 *dst = walk->dst.virt.addr;
@@ -106,7 +107,7 @@ static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
unsigned int nbytes = walk->nbytes;
u8 *ctrblk = walk->iv;
u8 *src = walk->src.virt.addr;
- u8 tmp[bsize + alignmask];
+ u8 tmp[MAX_BLOCKSIZE + MAX_ALIGNMASK];
u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);

do {
@@ -206,6 +207,14 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
if (alg->cra_blocksize < 4)
goto out_put_alg;

+ /* Block size must be <= MAX_BLOCKSIZE. */
+ if (alg->cra_blocksize > MAX_BLOCKSIZE)
+ goto out_put_alg;
+
+ /* Alignmask must be <= MAX_ALIGNMASK. */
+ if (alg->cra_alignmask > MAX_ALIGNMASK)
+ goto out_put_alg;
+
/* If this is false we'd fail the alignment of crypto_inc. */
if (alg->cra_blocksize % 4)
goto out_put_alg;
--
1.9.1

2018-04-09 13:54:01

by Salvatore Mesoraca

[permalink] [raw]
Subject: [PATCH 3/6] crypto: api - avoid VLA use

We avoid a VLA[1] by always allocating MAX_BLOCKSIZE +
MAX_ALIGNMASK bytes.
We also check the selected cipher at initialization time, if
it doesn't comply with these limits, the initialization will
fail.

[1] http://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com

Signed-off-by: Salvatore Mesoraca <[email protected]>
---
crypto/cipher.c | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/crypto/cipher.c b/crypto/cipher.c
index 94fa355..9cedf23 100644
--- a/crypto/cipher.c
+++ b/crypto/cipher.c
@@ -67,7 +67,7 @@ static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *,
{
unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
unsigned int size = crypto_tfm_alg_blocksize(tfm);
- u8 buffer[size + alignmask];
+ u8 buffer[MAX_BLOCKSIZE + MAX_ALIGNMASK];
u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);

memcpy(tmp, src, size);
@@ -105,9 +105,14 @@ static void cipher_decrypt_unaligned(struct crypto_tfm *tfm,

int crypto_init_cipher_ops(struct crypto_tfm *tfm)
{
+ const unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
+ const unsigned int size = crypto_tfm_alg_blocksize(tfm);
struct cipher_tfm *ops = &tfm->crt_cipher;
struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;

+ if (size > MAX_BLOCKSIZE || alignmask > MAX_ALIGNMASK)
+ return -EINVAL;
+
ops->cit_setkey = setkey;
ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ?
cipher_encrypt_unaligned : cipher->cia_encrypt;
--
1.9.1

2018-04-09 13:53:59

by Salvatore Mesoraca

[permalink] [raw]
Subject: [PATCH 1/6] crypto: api - laying macros for statically allocated buffers

Creating 2 new compile-time constants for internal use,
in preparation for the removal of VLAs[1] from crypto code.
All ciphers implemented in Linux have a block size less than or
equal to 16 bytes and the most demanding hw require 16 bytes
alignment for the block buffer.

[1] http://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com

Signed-off-by: Salvatore Mesoraca <[email protected]>
---
crypto/internal.h | 8 ++++++++
1 file changed, 8 insertions(+)

diff --git a/crypto/internal.h b/crypto/internal.h
index 9a3f399..89ae41e 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -26,6 +26,14 @@
#include <linux/rwsem.h>
#include <linux/slab.h>

+/*
+ * Maximum values for blocksize and alignmask, used to allocate
+ * static buffers that are big enough for any combination of
+ * ciphers and architectures.
+ */
+#define MAX_BLOCKSIZE 16
+#define MAX_ALIGNMASK 15
+
/* Crypto notification events. */
enum {
CRYPTO_MSG_ALG_REQUEST,
--
1.9.1

2018-04-09 13:54:02

by Salvatore Mesoraca

[permalink] [raw]
Subject: [PATCH 4/6] crypto: pcbc - avoid VLA use

We avoid 2 VLAs[1] by always allocating MAX_BLOCKSIZE bytes.
We also check the selected cipher at instance creation time, if
it doesn't comply with these limits, the creation will fail.

[1] http://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com

Signed-off-by: Salvatore Mesoraca <[email protected]>
---
crypto/pcbc.c | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)

diff --git a/crypto/pcbc.c b/crypto/pcbc.c
index d9e45a9..797da2f 100644
--- a/crypto/pcbc.c
+++ b/crypto/pcbc.c
@@ -21,6 +21,7 @@
#include <linux/module.h>
#include <linux/slab.h>
#include <linux/compiler.h>
+#include "internal.h"

struct crypto_pcbc_ctx {
struct crypto_cipher *child;
@@ -72,7 +73,7 @@ static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
unsigned int nbytes = walk->nbytes;
u8 *src = walk->src.virt.addr;
u8 *iv = walk->iv;
- u8 tmpbuf[bsize];
+ u8 tmpbuf[MAX_BLOCKSIZE];

do {
memcpy(tmpbuf, src, bsize);
@@ -144,7 +145,7 @@ static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
unsigned int nbytes = walk->nbytes;
u8 *src = walk->src.virt.addr;
u8 *iv = walk->iv;
- u8 tmpbuf[bsize] __aligned(__alignof__(u32));
+ u8 tmpbuf[MAX_BLOCKSIZE] __aligned(__alignof__(u32));

do {
memcpy(tmpbuf, src, bsize);
@@ -251,6 +252,10 @@ static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
if (err)
goto err_drop_spawn;

+ err = -EINVAL;
+ if (alg->cra_blocksize > MAX_BLOCKSIZE)
+ goto err_drop_spawn;
+
inst->alg.base.cra_flags = alg->cra_flags & CRYPTO_ALG_INTERNAL;
inst->alg.base.cra_priority = alg->cra_priority;
inst->alg.base.cra_blocksize = alg->cra_blocksize;
--
1.9.1

2018-04-09 13:54:04

by Salvatore Mesoraca

[permalink] [raw]
Subject: [PATCH 6/6] crypto: cfb - avoid VLA use

We avoid 3 VLAs[1] by always allocating MAX_BLOCKSIZE bytes or,
when needed for alignement, MAX_BLOCKSIZE + MAX_ALIGNMASK bytes.
We also check the selected cipher at instance creation time, if
it doesn't comply with these limits, the creation will fail.

[1] http://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com

Signed-off-by: Salvatore Mesoraca <[email protected]>
---
crypto/cfb.c | 14 ++++++++++----
1 file changed, 10 insertions(+), 4 deletions(-)

diff --git a/crypto/cfb.c b/crypto/cfb.c
index 94ee39b..f500816 100644
--- a/crypto/cfb.c
+++ b/crypto/cfb.c
@@ -28,6 +28,7 @@
#include <linux/slab.h>
#include <linux/string.h>
#include <linux/types.h>
+#include "internal.h"

struct crypto_cfb_ctx {
struct crypto_cipher *child;
@@ -53,9 +54,8 @@ static void crypto_cfb_encrypt_one(struct crypto_skcipher *tfm,
static void crypto_cfb_final(struct skcipher_walk *walk,
struct crypto_skcipher *tfm)
{
- const unsigned int bsize = crypto_cfb_bsize(tfm);
const unsigned long alignmask = crypto_skcipher_alignmask(tfm);
- u8 tmp[bsize + alignmask];
+ u8 tmp[MAX_BLOCKSIZE + MAX_ALIGNMASK];
u8 *stream = PTR_ALIGN(tmp + 0, alignmask + 1);
u8 *src = walk->src.virt.addr;
u8 *dst = walk->dst.virt.addr;
@@ -94,7 +94,7 @@ static int crypto_cfb_encrypt_inplace(struct skcipher_walk *walk,
unsigned int nbytes = walk->nbytes;
u8 *src = walk->src.virt.addr;
u8 *iv = walk->iv;
- u8 tmp[bsize];
+ u8 tmp[MAX_BLOCKSIZE];

do {
crypto_cfb_encrypt_one(tfm, iv, tmp);
@@ -164,7 +164,7 @@ static int crypto_cfb_decrypt_inplace(struct skcipher_walk *walk,
unsigned int nbytes = walk->nbytes;
u8 *src = walk->src.virt.addr;
u8 *iv = walk->iv;
- u8 tmp[bsize];
+ u8 tmp[MAX_BLOCKSIZE];

do {
crypto_cfb_encrypt_one(tfm, iv, tmp);
@@ -295,6 +295,12 @@ static int crypto_cfb_create(struct crypto_template *tmpl, struct rtattr **tb)
if (err)
goto err_drop_spawn;

+ err = -EINVAL;
+ if (alg->cra_blocksize > MAX_BLOCKSIZE)
+ goto err_drop_spawn;
+ if (alg->cra_alignmask > MAX_ALIGNMASK)
+ goto err_drop_spawn;
+
inst->alg.base.cra_priority = alg->cra_priority;
/* we're a stream cipher independend of the crypto cra_blocksize */
inst->alg.base.cra_blocksize = 1;
--
1.9.1

2018-04-09 14:02:22

by Salvatore Mesoraca

[permalink] [raw]
Subject: Re: [PATCH 0/6] Remove several VLAs in the crypto subsystem

Please ignore this thread, I sent the old patch-set again by mistake :(
I'm sorry for the noise.

Salvatore