This patchset adds support for all flavours of sha3-xxx and hmac(sha3-xxx)
ahash algorithms.
The patchset has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, including the testmgr extra tests.
changes snce v1:
- fixed crypto/Kconfig so that generic fallback is compiled as well
Pascal van Leeuwen (3):
crypto: inside-secure - Add SHA3 family of basic hash algorithms
crypto: inside-secure - Add HMAC-SHA3 family of authentication
algorithms
crypto: Kconfig - Add CRYPTO_SHA3 to CRYPTO_DEV_SAFEXCEL
drivers/crypto/Kconfig | 1 +
drivers/crypto/inside-secure/safexcel.c | 8 +
drivers/crypto/inside-secure/safexcel.h | 13 +
drivers/crypto/inside-secure/safexcel_hash.c | 790 ++++++++++++++++++++++++++-
4 files changed, 800 insertions(+), 12 deletions(-)
--
1.8.3.1
This patch adds support for sha3-224, sha3-256, sha3-384 and sha3-512
basic hashes.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, including the testmgr extra tests.
changes since v1:
- nothing
Signed-off-by: Pascal van Leeuwen <[email protected]>
---
drivers/crypto/inside-secure/safexcel.c | 4 +
drivers/crypto/inside-secure/safexcel.h | 9 +
drivers/crypto/inside-secure/safexcel_hash.c | 351 +++++++++++++++++++++++++++
3 files changed, 364 insertions(+)
diff --git a/drivers/crypto/inside-secure/safexcel.c b/drivers/crypto/inside-secure/safexcel.c
index 8f7fdd0..4c91bbf 100644
--- a/drivers/crypto/inside-secure/safexcel.c
+++ b/drivers/crypto/inside-secure/safexcel.c
@@ -1187,6 +1187,10 @@ static int safexcel_request_ring_irq(void *pdev, int irqid,
&safexcel_alg_authenc_hmac_sm3_cbc_sm4,
&safexcel_alg_authenc_hmac_sha1_ctr_sm4,
&safexcel_alg_authenc_hmac_sm3_ctr_sm4,
+ &safexcel_alg_sha3_224,
+ &safexcel_alg_sha3_256,
+ &safexcel_alg_sha3_384,
+ &safexcel_alg_sha3_512,
};
static int safexcel_register_algorithms(struct safexcel_crypto_priv *priv)
diff --git a/drivers/crypto/inside-secure/safexcel.h b/drivers/crypto/inside-secure/safexcel.h
index 1d75044..f41982d 100644
--- a/drivers/crypto/inside-secure/safexcel.h
+++ b/drivers/crypto/inside-secure/safexcel.h
@@ -360,6 +360,7 @@ struct safexcel_context_record {
#define CONTEXT_CONTROL_CRYPTO_ALG_AES256 (0x7 << 17)
#define CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 (0x8 << 17)
#define CONTEXT_CONTROL_CRYPTO_ALG_SM4 (0xd << 17)
+#define CONTEXT_CONTROL_DIGEST_INITIAL (0x0 << 21)
#define CONTEXT_CONTROL_DIGEST_PRECOMPUTED (0x1 << 21)
#define CONTEXT_CONTROL_DIGEST_XCM (0x2 << 21)
#define CONTEXT_CONTROL_DIGEST_HMAC (0x3 << 21)
@@ -375,6 +376,10 @@ struct safexcel_context_record {
#define CONTEXT_CONTROL_CRYPTO_ALG_XCBC192 (0x2 << 23)
#define CONTEXT_CONTROL_CRYPTO_ALG_XCBC256 (0x3 << 23)
#define CONTEXT_CONTROL_CRYPTO_ALG_SM3 (0x7 << 23)
+#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256 (0xb << 23)
+#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224 (0xc << 23)
+#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512 (0xd << 23)
+#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384 (0xe << 23)
#define CONTEXT_CONTROL_CRYPTO_ALG_POLY1305 (0xf << 23)
#define CONTEXT_CONTROL_INV_FR (0x5 << 24)
#define CONTEXT_CONTROL_INV_TR (0x6 << 24)
@@ -888,5 +893,9 @@ int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4;
extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4;
extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4;
+extern struct safexcel_alg_template safexcel_alg_sha3_224;
+extern struct safexcel_alg_template safexcel_alg_sha3_256;
+extern struct safexcel_alg_template safexcel_alg_sha3_384;
+extern struct safexcel_alg_template safexcel_alg_sha3_512;
#endif
diff --git a/drivers/crypto/inside-secure/safexcel_hash.c b/drivers/crypto/inside-secure/safexcel_hash.c
index 272e5fd..1d67cf2 100644
--- a/drivers/crypto/inside-secure/safexcel_hash.c
+++ b/drivers/crypto/inside-secure/safexcel_hash.c
@@ -9,6 +9,7 @@
#include <crypto/hmac.h>
#include <crypto/md5.h>
#include <crypto/sha.h>
+#include <crypto/sha3.h>
#include <crypto/skcipher.h>
#include <crypto/sm3.h>
#include <linux/device.h>
@@ -24,11 +25,14 @@ struct safexcel_ahash_ctx {
u32 alg;
u8 key_sz;
bool cbcmac;
+ bool do_fallback;
+ bool fb_init_done;
u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
struct crypto_cipher *kaes;
+ struct crypto_ahash *fback;
};
struct safexcel_ahash_req {
@@ -2355,3 +2359,350 @@ struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
},
},
};
+
+static int safexcel_sha3_224_init(struct ahash_request *areq)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
+ req->state_sz = SHA3_224_DIGEST_SIZE;
+ req->block_sz = SHA3_224_BLOCK_SIZE;
+ ctx->do_fallback = false;
+ ctx->fb_init_done = false;
+ return 0;
+}
+
+static int safexcel_sha3_fbcheck(struct ahash_request *req)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct ahash_request *subreq = ahash_request_ctx(req);
+ int ret = 0;
+
+ if (ctx->do_fallback) {
+ ahash_request_set_tfm(subreq, ctx->fback);
+ ahash_request_set_callback(subreq, req->base.flags,
+ req->base.complete, req->base.data);
+ ahash_request_set_crypt(subreq, req->src, req->result,
+ req->nbytes);
+ if (!ctx->fb_init_done) {
+ ret = crypto_ahash_init(subreq);
+ ctx->fb_init_done = true;
+ }
+ }
+ return ret;
+}
+
+static int safexcel_sha3_update(struct ahash_request *req)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct ahash_request *subreq = ahash_request_ctx(req);
+
+ ctx->do_fallback = true;
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
+}
+
+static int safexcel_sha3_final(struct ahash_request *req)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct ahash_request *subreq = ahash_request_ctx(req);
+
+ ctx->do_fallback = true;
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
+}
+
+static int safexcel_sha3_finup(struct ahash_request *req)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct ahash_request *subreq = ahash_request_ctx(req);
+
+ ctx->do_fallback |= !req->nbytes;
+ if (ctx->do_fallback)
+ /* Update or ex/import happened or len 0, cannot use the HW */
+ return safexcel_sha3_fbcheck(req) ?:
+ crypto_ahash_finup(subreq);
+ else
+ return safexcel_ahash_finup(req);
+}
+
+static int safexcel_sha3_digest_fallback(struct ahash_request *req)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct ahash_request *subreq = ahash_request_ctx(req);
+
+ ctx->do_fallback = true;
+ ctx->fb_init_done = false;
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
+}
+
+static int safexcel_sha3_224_digest(struct ahash_request *req)
+{
+ if (req->nbytes)
+ return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
+
+ /* HW cannot do zero length hash, use fallback instead */
+ return safexcel_sha3_digest_fallback(req);
+}
+
+static int safexcel_sha3_export(struct ahash_request *req, void *out)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct ahash_request *subreq = ahash_request_ctx(req);
+
+ ctx->do_fallback = true;
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
+}
+
+static int safexcel_sha3_import(struct ahash_request *req, const void *in)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct ahash_request *subreq = ahash_request_ctx(req);
+
+ ctx->do_fallback = true;
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
+ // return safexcel_ahash_import(req, in);
+}
+
+static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
+{
+ struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ safexcel_ahash_cra_init(tfm);
+
+ /* Allocate fallback implementation */
+ ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
+ CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_NEED_FALLBACK);
+ if (IS_ERR(ctx->fback))
+ return PTR_ERR(ctx->fback);
+
+ /* Update statesize from fallback algorithm! */
+ crypto_hash_alg_common(ahash)->statesize =
+ crypto_ahash_statesize(ctx->fback);
+ crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
+ sizeof(struct ahash_request) +
+ crypto_ahash_reqsize(ctx->fback)));
+ return 0;
+}
+
+static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
+{
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ crypto_free_ahash(ctx->fback);
+ safexcel_ahash_cra_exit(tfm);
+}
+
+struct safexcel_alg_template safexcel_alg_sha3_224 = {
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
+ .algo_mask = SAFEXCEL_ALG_SHA3,
+ .alg.ahash = {
+ .init = safexcel_sha3_224_init,
+ .update = safexcel_sha3_update,
+ .final = safexcel_sha3_final,
+ .finup = safexcel_sha3_finup,
+ .digest = safexcel_sha3_224_digest,
+ .export = safexcel_sha3_export,
+ .import = safexcel_sha3_import,
+ .halg = {
+ .digestsize = SHA3_224_DIGEST_SIZE,
+ .statesize = sizeof(struct safexcel_ahash_export_state),
+ .base = {
+ .cra_name = "sha3-224",
+ .cra_driver_name = "safexcel-sha3-224",
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
+ CRYPTO_ALG_NEED_FALLBACK,
+ .cra_blocksize = SHA3_224_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
+ .cra_init = safexcel_sha3_cra_init,
+ .cra_exit = safexcel_sha3_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int safexcel_sha3_256_init(struct ahash_request *areq)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
+ req->state_sz = SHA3_256_DIGEST_SIZE;
+ req->block_sz = SHA3_256_BLOCK_SIZE;
+ ctx->do_fallback = false;
+ ctx->fb_init_done = false;
+ return 0;
+}
+
+static int safexcel_sha3_256_digest(struct ahash_request *req)
+{
+ if (req->nbytes)
+ return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
+
+ /* HW cannot do zero length hash, use fallback instead */
+ return safexcel_sha3_digest_fallback(req);
+}
+
+struct safexcel_alg_template safexcel_alg_sha3_256 = {
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
+ .algo_mask = SAFEXCEL_ALG_SHA3,
+ .alg.ahash = {
+ .init = safexcel_sha3_256_init,
+ .update = safexcel_sha3_update,
+ .final = safexcel_sha3_final,
+ .finup = safexcel_sha3_finup,
+ .digest = safexcel_sha3_256_digest,
+ .export = safexcel_sha3_export,
+ .import = safexcel_sha3_import,
+ .halg = {
+ .digestsize = SHA3_256_DIGEST_SIZE,
+ .statesize = sizeof(struct safexcel_ahash_export_state),
+ .base = {
+ .cra_name = "sha3-256",
+ .cra_driver_name = "safexcel-sha3-256",
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
+ CRYPTO_ALG_NEED_FALLBACK,
+ .cra_blocksize = SHA3_256_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
+ .cra_init = safexcel_sha3_cra_init,
+ .cra_exit = safexcel_sha3_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int safexcel_sha3_384_init(struct ahash_request *areq)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
+ req->state_sz = SHA3_384_DIGEST_SIZE;
+ req->block_sz = SHA3_384_BLOCK_SIZE;
+ ctx->do_fallback = false;
+ ctx->fb_init_done = false;
+ return 0;
+}
+
+static int safexcel_sha3_384_digest(struct ahash_request *req)
+{
+ if (req->nbytes)
+ return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
+
+ /* HW cannot do zero length hash, use fallback instead */
+ return safexcel_sha3_digest_fallback(req);
+}
+
+struct safexcel_alg_template safexcel_alg_sha3_384 = {
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
+ .algo_mask = SAFEXCEL_ALG_SHA3,
+ .alg.ahash = {
+ .init = safexcel_sha3_384_init,
+ .update = safexcel_sha3_update,
+ .final = safexcel_sha3_final,
+ .finup = safexcel_sha3_finup,
+ .digest = safexcel_sha3_384_digest,
+ .export = safexcel_sha3_export,
+ .import = safexcel_sha3_import,
+ .halg = {
+ .digestsize = SHA3_384_DIGEST_SIZE,
+ .statesize = sizeof(struct safexcel_ahash_export_state),
+ .base = {
+ .cra_name = "sha3-384",
+ .cra_driver_name = "safexcel-sha3-384",
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
+ CRYPTO_ALG_NEED_FALLBACK,
+ .cra_blocksize = SHA3_384_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
+ .cra_init = safexcel_sha3_cra_init,
+ .cra_exit = safexcel_sha3_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int safexcel_sha3_512_init(struct ahash_request *areq)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
+ req->state_sz = SHA3_512_DIGEST_SIZE;
+ req->block_sz = SHA3_512_BLOCK_SIZE;
+ ctx->do_fallback = false;
+ ctx->fb_init_done = false;
+ return 0;
+}
+
+static int safexcel_sha3_512_digest(struct ahash_request *req)
+{
+ if (req->nbytes)
+ return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
+
+ /* HW cannot do zero length hash, use fallback instead */
+ return safexcel_sha3_digest_fallback(req);
+}
+
+struct safexcel_alg_template safexcel_alg_sha3_512 = {
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
+ .algo_mask = SAFEXCEL_ALG_SHA3,
+ .alg.ahash = {
+ .init = safexcel_sha3_512_init,
+ .update = safexcel_sha3_update,
+ .final = safexcel_sha3_final,
+ .finup = safexcel_sha3_finup,
+ .digest = safexcel_sha3_512_digest,
+ .export = safexcel_sha3_export,
+ .import = safexcel_sha3_import,
+ .halg = {
+ .digestsize = SHA3_512_DIGEST_SIZE,
+ .statesize = sizeof(struct safexcel_ahash_export_state),
+ .base = {
+ .cra_name = "sha3-512",
+ .cra_driver_name = "safexcel-sha3-512",
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
+ CRYPTO_ALG_NEED_FALLBACK,
+ .cra_blocksize = SHA3_512_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
+ .cra_init = safexcel_sha3_cra_init,
+ .cra_exit = safexcel_sha3_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
--
1.8.3.1
This patch adds support for hmac(sha3-224), hmac(sha3-256), hmac(sha3-384)
and hmac(sha3-512) authentication algorithms.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, including the testmgr extra tests.
changes since v1:
- nothing
Signed-off-by: Pascal van Leeuwen <[email protected]>
---
drivers/crypto/inside-secure/safexcel.c | 4 +
drivers/crypto/inside-secure/safexcel.h | 4 +
drivers/crypto/inside-secure/safexcel_hash.c | 441 ++++++++++++++++++++++++++-
3 files changed, 436 insertions(+), 13 deletions(-)
diff --git a/drivers/crypto/inside-secure/safexcel.c b/drivers/crypto/inside-secure/safexcel.c
index 4c91bbf..c8157f2 100644
--- a/drivers/crypto/inside-secure/safexcel.c
+++ b/drivers/crypto/inside-secure/safexcel.c
@@ -1191,6 +1191,10 @@ static int safexcel_request_ring_irq(void *pdev, int irqid,
&safexcel_alg_sha3_256,
&safexcel_alg_sha3_384,
&safexcel_alg_sha3_512,
+ &safexcel_alg_hmac_sha3_224,
+ &safexcel_alg_hmac_sha3_256,
+ &safexcel_alg_hmac_sha3_384,
+ &safexcel_alg_hmac_sha3_512,
};
static int safexcel_register_algorithms(struct safexcel_crypto_priv *priv)
diff --git a/drivers/crypto/inside-secure/safexcel.h b/drivers/crypto/inside-secure/safexcel.h
index f41982d..275f8b5 100644
--- a/drivers/crypto/inside-secure/safexcel.h
+++ b/drivers/crypto/inside-secure/safexcel.h
@@ -897,5 +897,9 @@ int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
extern struct safexcel_alg_template safexcel_alg_sha3_256;
extern struct safexcel_alg_template safexcel_alg_sha3_384;
extern struct safexcel_alg_template safexcel_alg_sha3_512;
+extern struct safexcel_alg_template safexcel_alg_hmac_sha3_224;
+extern struct safexcel_alg_template safexcel_alg_hmac_sha3_256;
+extern struct safexcel_alg_template safexcel_alg_hmac_sha3_384;
+extern struct safexcel_alg_template safexcel_alg_hmac_sha3_512;
#endif
diff --git a/drivers/crypto/inside-secure/safexcel_hash.c b/drivers/crypto/inside-secure/safexcel_hash.c
index 1d67cf2..85c3a07 100644
--- a/drivers/crypto/inside-secure/safexcel_hash.c
+++ b/drivers/crypto/inside-secure/safexcel_hash.c
@@ -27,12 +27,15 @@ struct safexcel_ahash_ctx {
bool cbcmac;
bool do_fallback;
bool fb_init_done;
+ bool fb_do_setkey;
- u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
- u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
+ u32 ipad[SHA3_512_BLOCK_SIZE / sizeof(u32)];
+ u32 opad[SHA3_512_BLOCK_SIZE / sizeof(u32)];
struct crypto_cipher *kaes;
struct crypto_ahash *fback;
+ struct crypto_shash *shpre;
+ struct shash_desc *shdesc;
};
struct safexcel_ahash_req {
@@ -52,7 +55,8 @@ struct safexcel_ahash_req {
u8 state_sz; /* expected state size, only set once */
u8 block_sz; /* block size, only set once */
- u32 state[SHA512_DIGEST_SIZE / sizeof(u32)] __aligned(sizeof(u32));
+ u8 digest_sz; /* output digest size, only set once */
+ u32 state[SHA3_512_BLOCK_SIZE / sizeof(u32)] __aligned(sizeof(u32));
u64 len;
u64 processed;
@@ -246,7 +250,7 @@ static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
}
if (sreq->result_dma) {
- dma_unmap_single(priv->dev, sreq->result_dma, sreq->state_sz,
+ dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
DMA_FROM_DEVICE);
sreq->result_dma = 0;
}
@@ -265,7 +269,7 @@ static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
memcpy(sreq->cache, sreq->state,
crypto_ahash_digestsize(ahash));
- memcpy(sreq->state, ctx->opad, sreq->state_sz);
+ memcpy(sreq->state, ctx->opad, sreq->digest_sz);
sreq->len = sreq->block_sz +
crypto_ahash_digestsize(ahash);
@@ -309,7 +313,7 @@ static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
struct safexcel_result_desc *rdesc;
struct scatterlist *sg;
- int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0, res_sz;
+ int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
u64 queued, len;
queued = safexcel_queued_len(req);
@@ -451,11 +455,10 @@ static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
/* Setup the context options */
safexcel_context_control(ctx, req, first_cdesc);
- /* Add the token. Note that the XCBC result is only 1 AES block. */
- res_sz = req->xcbcmac ? AES_BLOCK_SIZE : req->state_sz;
- safexcel_hash_token(first_cdesc, len, res_sz, ctx->cbcmac);
+ /* Add the token */
+ safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
- req->result_dma = dma_map_single(priv->dev, req->state, req->state_sz,
+ req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
DMA_FROM_DEVICE);
if (dma_mapping_error(priv->dev, req->result_dma)) {
ret = -EINVAL;
@@ -464,7 +467,7 @@ static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
/* Add a result descriptor */
rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
- res_sz);
+ req->digest_sz);
if (IS_ERR(rdesc)) {
ret = PTR_ERR(rdesc);
goto unmap_result;
@@ -479,7 +482,7 @@ static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
return 0;
unmap_result:
- dma_unmap_single(priv->dev, req->result_dma, req->state_sz,
+ dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
DMA_FROM_DEVICE);
unmap_sg:
if (req->nents) {
@@ -912,6 +915,7 @@ static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
ctx->priv = tmpl->priv;
ctx->base.send = safexcel_ahash_send;
ctx->base.handle_result = safexcel_handle_result;
+ ctx->fb_do_setkey = false;
crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
sizeof(struct safexcel_ahash_req));
@@ -928,6 +932,7 @@ static int safexcel_sha1_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA1_DIGEST_SIZE;
+ req->digest_sz = SHA1_DIGEST_SIZE;
req->block_sz = SHA1_BLOCK_SIZE;
return 0;
@@ -1009,6 +1014,7 @@ static int safexcel_hmac_sha1_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA1_DIGEST_SIZE;
+ req->digest_sz = SHA1_DIGEST_SIZE;
req->block_sz = SHA1_BLOCK_SIZE;
req->hmac = true;
@@ -1245,6 +1251,7 @@ static int safexcel_sha256_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA256_DIGEST_SIZE;
+ req->digest_sz = SHA256_DIGEST_SIZE;
req->block_sz = SHA256_BLOCK_SIZE;
return 0;
@@ -1300,6 +1307,7 @@ static int safexcel_sha224_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA256_DIGEST_SIZE;
+ req->digest_sz = SHA256_DIGEST_SIZE;
req->block_sz = SHA256_BLOCK_SIZE;
return 0;
@@ -1368,6 +1376,7 @@ static int safexcel_hmac_sha224_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA256_DIGEST_SIZE;
+ req->digest_sz = SHA256_DIGEST_SIZE;
req->block_sz = SHA256_BLOCK_SIZE;
req->hmac = true;
@@ -1438,6 +1447,7 @@ static int safexcel_hmac_sha256_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA256_DIGEST_SIZE;
+ req->digest_sz = SHA256_DIGEST_SIZE;
req->block_sz = SHA256_BLOCK_SIZE;
req->hmac = true;
@@ -1495,6 +1505,7 @@ static int safexcel_sha512_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA512_DIGEST_SIZE;
+ req->digest_sz = SHA512_DIGEST_SIZE;
req->block_sz = SHA512_BLOCK_SIZE;
return 0;
@@ -1550,6 +1561,7 @@ static int safexcel_sha384_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA512_DIGEST_SIZE;
+ req->digest_sz = SHA512_DIGEST_SIZE;
req->block_sz = SHA512_BLOCK_SIZE;
return 0;
@@ -1618,6 +1630,7 @@ static int safexcel_hmac_sha512_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA512_DIGEST_SIZE;
+ req->digest_sz = SHA512_DIGEST_SIZE;
req->block_sz = SHA512_BLOCK_SIZE;
req->hmac = true;
@@ -1688,6 +1701,7 @@ static int safexcel_hmac_sha384_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SHA512_DIGEST_SIZE;
+ req->digest_sz = SHA512_DIGEST_SIZE;
req->block_sz = SHA512_BLOCK_SIZE;
req->hmac = true;
@@ -1745,6 +1759,7 @@ static int safexcel_md5_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = MD5_DIGEST_SIZE;
+ req->digest_sz = MD5_DIGEST_SIZE;
req->block_sz = MD5_HMAC_BLOCK_SIZE;
return 0;
@@ -1806,6 +1821,7 @@ static int safexcel_hmac_md5_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = MD5_DIGEST_SIZE;
+ req->digest_sz = MD5_DIGEST_SIZE;
req->block_sz = MD5_HMAC_BLOCK_SIZE;
req->len_is_le = true; /* MD5 is little endian! ... */
req->hmac = true;
@@ -1887,6 +1903,7 @@ static int safexcel_crc32_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
req->digest = CONTEXT_CONTROL_DIGEST_XCM;
req->state_sz = sizeof(u32);
+ req->digest_sz = sizeof(u32);
req->block_sz = sizeof(u32);
return 0;
@@ -1958,6 +1975,7 @@ static int safexcel_cbcmac_init(struct ahash_request *areq)
req->digest = CONTEXT_CONTROL_DIGEST_XCM;
req->state_sz = ctx->key_sz;
+ req->digest_sz = AES_BLOCK_SIZE;
req->block_sz = AES_BLOCK_SIZE;
req->xcbcmac = true;
@@ -2245,6 +2263,7 @@ static int safexcel_sm3_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SM3_DIGEST_SIZE;
+ req->digest_sz = SM3_DIGEST_SIZE;
req->block_sz = SM3_BLOCK_SIZE;
return 0;
@@ -2313,6 +2332,7 @@ static int safexcel_hmac_sm3_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
req->state_sz = SM3_DIGEST_SIZE;
+ req->digest_sz = SM3_DIGEST_SIZE;
req->block_sz = SM3_BLOCK_SIZE;
req->hmac = true;
@@ -2371,6 +2391,7 @@ static int safexcel_sha3_224_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
req->state_sz = SHA3_224_DIGEST_SIZE;
+ req->digest_sz = SHA3_224_DIGEST_SIZE;
req->block_sz = SHA3_224_BLOCK_SIZE;
ctx->do_fallback = false;
ctx->fb_init_done = false;
@@ -2391,7 +2412,23 @@ static int safexcel_sha3_fbcheck(struct ahash_request *req)
ahash_request_set_crypt(subreq, req->src, req->result,
req->nbytes);
if (!ctx->fb_init_done) {
- ret = crypto_ahash_init(subreq);
+ if (ctx->fb_do_setkey) {
+ /* Set fallback cipher HMAC key */
+ u8 key[SHA3_224_BLOCK_SIZE];
+
+ memcpy(key, ctx->ipad,
+ crypto_ahash_blocksize(ctx->fback) / 2);
+ memcpy(key +
+ crypto_ahash_blocksize(ctx->fback) / 2,
+ ctx->opad,
+ crypto_ahash_blocksize(ctx->fback) / 2);
+ ret = crypto_ahash_setkey(ctx->fback, key,
+ crypto_ahash_blocksize(ctx->fback));
+ memzero_explicit(key,
+ crypto_ahash_blocksize(ctx->fback));
+ ctx->fb_do_setkey = false;
+ }
+ ret = ret ?: crypto_ahash_init(subreq);
ctx->fb_init_done = true;
}
}
@@ -2547,6 +2584,7 @@ static int safexcel_sha3_256_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
req->state_sz = SHA3_256_DIGEST_SIZE;
+ req->digest_sz = SHA3_256_DIGEST_SIZE;
req->block_sz = SHA3_256_BLOCK_SIZE;
ctx->do_fallback = false;
ctx->fb_init_done = false;
@@ -2604,6 +2642,7 @@ static int safexcel_sha3_384_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
req->state_sz = SHA3_384_DIGEST_SIZE;
+ req->digest_sz = SHA3_384_DIGEST_SIZE;
req->block_sz = SHA3_384_BLOCK_SIZE;
ctx->do_fallback = false;
ctx->fb_init_done = false;
@@ -2661,6 +2700,7 @@ static int safexcel_sha3_512_init(struct ahash_request *areq)
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
req->state_sz = SHA3_512_DIGEST_SIZE;
+ req->digest_sz = SHA3_512_DIGEST_SIZE;
req->block_sz = SHA3_512_BLOCK_SIZE;
ctx->do_fallback = false;
ctx->fb_init_done = false;
@@ -2706,3 +2746,378 @@ struct safexcel_alg_template safexcel_alg_sha3_512 = {
},
},
};
+
+static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
+{
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
+ int ret;
+
+ ret = safexcel_sha3_cra_init(tfm);
+ if (ret)
+ return ret;
+
+ /* Allocate precalc basic digest implementation */
+ ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
+ if (IS_ERR(ctx->shpre))
+ return PTR_ERR(ctx->shpre);
+
+ ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
+ crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
+ if (!ctx->shdesc) {
+ crypto_free_shash(ctx->shpre);
+ return -ENOMEM;
+ }
+ ctx->shdesc->tfm = ctx->shpre;
+ return 0;
+}
+
+static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
+{
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ crypto_free_ahash(ctx->fback);
+ crypto_free_shash(ctx->shpre);
+ kfree(ctx->shdesc);
+ safexcel_ahash_cra_exit(tfm);
+}
+
+static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ int ret = 0;
+
+ if (keylen > crypto_ahash_blocksize(tfm)) {
+ /*
+ * If the key is larger than the blocksize, then hash it
+ * first using our fallback cipher
+ */
+ ret = crypto_shash_digest(ctx->shdesc, key, keylen,
+ (u8 *)ctx->ipad);
+ keylen = crypto_shash_digestsize(ctx->shpre);
+
+ /*
+ * If the digest is larger than half the blocksize, we need to
+ * move the rest to opad due to the way our HMAC infra works.
+ */
+ if (keylen > crypto_ahash_blocksize(tfm) / 2)
+ /* Buffers overlap, need to use memmove iso memcpy! */
+ memmove(ctx->opad,
+ (u8 *)ctx->ipad +
+ crypto_ahash_blocksize(tfm) / 2,
+ keylen - crypto_ahash_blocksize(tfm) / 2);
+ } else {
+ /*
+ * Copy the key to our ipad & opad buffers
+ * Note that ipad and opad each contain one half of the key,
+ * to match the existing HMAC driver infrastructure.
+ */
+ if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
+ memcpy(ctx->ipad, key, keylen);
+ } else {
+ memcpy(ctx->ipad, key,
+ crypto_ahash_blocksize(tfm) / 2);
+ memcpy(ctx->opad,
+ key + crypto_ahash_blocksize(tfm) / 2,
+ keylen - crypto_ahash_blocksize(tfm) / 2);
+ }
+ }
+
+ /* Pad key with zeroes */
+ if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
+ memset((u8 *)ctx->ipad + keylen, 0,
+ crypto_ahash_blocksize(tfm) / 2 - keylen);
+ memset(ctx->opad, 0, crypto_ahash_blocksize(tfm) / 2);
+ } else {
+ memset((u8 *)ctx->opad + keylen -
+ crypto_ahash_blocksize(tfm) / 2, 0,
+ crypto_ahash_blocksize(tfm) - keylen);
+ }
+
+ /* If doing fallback, still need to set the new key! */
+ ctx->fb_do_setkey = true;
+ return ret;
+}
+
+static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ /* Copy (half of) the key */
+ memcpy(req->state, ctx->ipad, SHA3_224_BLOCK_SIZE / 2);
+ /* Start of HMAC should have len == processed == blocksize */
+ req->len = SHA3_224_BLOCK_SIZE;
+ req->processed = SHA3_224_BLOCK_SIZE;
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
+ req->state_sz = SHA3_224_BLOCK_SIZE / 2;
+ req->digest_sz = SHA3_224_DIGEST_SIZE;
+ req->block_sz = SHA3_224_BLOCK_SIZE;
+ req->hmac = true;
+ ctx->do_fallback = false;
+ ctx->fb_init_done = false;
+ return 0;
+}
+
+static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
+{
+ if (req->nbytes)
+ return safexcel_hmac_sha3_224_init(req) ?:
+ safexcel_ahash_finup(req);
+
+ /* HW cannot do zero length HMAC, use fallback instead */
+ return safexcel_sha3_digest_fallback(req);
+}
+
+static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
+{
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
+}
+
+struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
+ .algo_mask = SAFEXCEL_ALG_SHA3,
+ .alg.ahash = {
+ .init = safexcel_hmac_sha3_224_init,
+ .update = safexcel_sha3_update,
+ .final = safexcel_sha3_final,
+ .finup = safexcel_sha3_finup,
+ .digest = safexcel_hmac_sha3_224_digest,
+ .setkey = safexcel_hmac_sha3_setkey,
+ .export = safexcel_sha3_export,
+ .import = safexcel_sha3_import,
+ .halg = {
+ .digestsize = SHA3_224_DIGEST_SIZE,
+ .statesize = sizeof(struct safexcel_ahash_export_state),
+ .base = {
+ .cra_name = "hmac(sha3-224)",
+ .cra_driver_name = "safexcel-hmac-sha3-224",
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
+ CRYPTO_ALG_NEED_FALLBACK,
+ .cra_blocksize = SHA3_224_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
+ .cra_init = safexcel_hmac_sha3_224_cra_init,
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ /* Copy (half of) the key */
+ memcpy(req->state, ctx->ipad, SHA3_256_BLOCK_SIZE / 2);
+ /* Start of HMAC should have len == processed == blocksize */
+ req->len = SHA3_256_BLOCK_SIZE;
+ req->processed = SHA3_256_BLOCK_SIZE;
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
+ req->state_sz = SHA3_256_BLOCK_SIZE / 2;
+ req->digest_sz = SHA3_256_DIGEST_SIZE;
+ req->block_sz = SHA3_256_BLOCK_SIZE;
+ req->hmac = true;
+ ctx->do_fallback = false;
+ ctx->fb_init_done = false;
+ return 0;
+}
+
+static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
+{
+ if (req->nbytes)
+ return safexcel_hmac_sha3_256_init(req) ?:
+ safexcel_ahash_finup(req);
+
+ /* HW cannot do zero length HMAC, use fallback instead */
+ return safexcel_sha3_digest_fallback(req);
+}
+
+static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
+{
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
+}
+
+struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
+ .algo_mask = SAFEXCEL_ALG_SHA3,
+ .alg.ahash = {
+ .init = safexcel_hmac_sha3_256_init,
+ .update = safexcel_sha3_update,
+ .final = safexcel_sha3_final,
+ .finup = safexcel_sha3_finup,
+ .digest = safexcel_hmac_sha3_256_digest,
+ .setkey = safexcel_hmac_sha3_setkey,
+ .export = safexcel_sha3_export,
+ .import = safexcel_sha3_import,
+ .halg = {
+ .digestsize = SHA3_256_DIGEST_SIZE,
+ .statesize = sizeof(struct safexcel_ahash_export_state),
+ .base = {
+ .cra_name = "hmac(sha3-256)",
+ .cra_driver_name = "safexcel-hmac-sha3-256",
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
+ CRYPTO_ALG_NEED_FALLBACK,
+ .cra_blocksize = SHA3_256_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
+ .cra_init = safexcel_hmac_sha3_256_cra_init,
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ /* Copy (half of) the key */
+ memcpy(req->state, ctx->ipad, SHA3_384_BLOCK_SIZE / 2);
+ /* Start of HMAC should have len == processed == blocksize */
+ req->len = SHA3_384_BLOCK_SIZE;
+ req->processed = SHA3_384_BLOCK_SIZE;
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
+ req->state_sz = SHA3_384_BLOCK_SIZE / 2;
+ req->digest_sz = SHA3_384_DIGEST_SIZE;
+ req->block_sz = SHA3_384_BLOCK_SIZE;
+ req->hmac = true;
+ ctx->do_fallback = false;
+ ctx->fb_init_done = false;
+ return 0;
+}
+
+static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
+{
+ if (req->nbytes)
+ return safexcel_hmac_sha3_384_init(req) ?:
+ safexcel_ahash_finup(req);
+
+ /* HW cannot do zero length HMAC, use fallback instead */
+ return safexcel_sha3_digest_fallback(req);
+}
+
+static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
+{
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
+}
+
+struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
+ .algo_mask = SAFEXCEL_ALG_SHA3,
+ .alg.ahash = {
+ .init = safexcel_hmac_sha3_384_init,
+ .update = safexcel_sha3_update,
+ .final = safexcel_sha3_final,
+ .finup = safexcel_sha3_finup,
+ .digest = safexcel_hmac_sha3_384_digest,
+ .setkey = safexcel_hmac_sha3_setkey,
+ .export = safexcel_sha3_export,
+ .import = safexcel_sha3_import,
+ .halg = {
+ .digestsize = SHA3_384_DIGEST_SIZE,
+ .statesize = sizeof(struct safexcel_ahash_export_state),
+ .base = {
+ .cra_name = "hmac(sha3-384)",
+ .cra_driver_name = "safexcel-hmac-sha3-384",
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
+ CRYPTO_ALG_NEED_FALLBACK,
+ .cra_blocksize = SHA3_384_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
+ .cra_init = safexcel_hmac_sha3_384_cra_init,
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ /* Copy (half of) the key */
+ memcpy(req->state, ctx->ipad, SHA3_512_BLOCK_SIZE / 2);
+ /* Start of HMAC should have len == processed == blocksize */
+ req->len = SHA3_512_BLOCK_SIZE;
+ req->processed = SHA3_512_BLOCK_SIZE;
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
+ req->state_sz = SHA3_512_BLOCK_SIZE / 2;
+ req->digest_sz = SHA3_512_DIGEST_SIZE;
+ req->block_sz = SHA3_512_BLOCK_SIZE;
+ req->hmac = true;
+ ctx->do_fallback = false;
+ ctx->fb_init_done = false;
+ return 0;
+}
+
+static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
+{
+ if (req->nbytes)
+ return safexcel_hmac_sha3_512_init(req) ?:
+ safexcel_ahash_finup(req);
+
+ /* HW cannot do zero length HMAC, use fallback instead */
+ return safexcel_sha3_digest_fallback(req);
+}
+
+static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
+{
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
+}
+struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
+ .algo_mask = SAFEXCEL_ALG_SHA3,
+ .alg.ahash = {
+ .init = safexcel_hmac_sha3_512_init,
+ .update = safexcel_sha3_update,
+ .final = safexcel_sha3_final,
+ .finup = safexcel_sha3_finup,
+ .digest = safexcel_hmac_sha3_512_digest,
+ .setkey = safexcel_hmac_sha3_setkey,
+ .export = safexcel_sha3_export,
+ .import = safexcel_sha3_import,
+ .halg = {
+ .digestsize = SHA3_512_DIGEST_SIZE,
+ .statesize = sizeof(struct safexcel_ahash_export_state),
+ .base = {
+ .cra_name = "hmac(sha3-512)",
+ .cra_driver_name = "safexcel-hmac-sha3-512",
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
+ CRYPTO_ALG_NEED_FALLBACK,
+ .cra_blocksize = SHA3_512_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
+ .cra_init = safexcel_hmac_sha3_512_cra_init,
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
--
1.8.3.1
On Fri, Sep 13, 2019 at 08:56:46PM +0200, Pascal van Leeuwen wrote:
> This patchset adds support for all flavours of sha3-xxx and hmac(sha3-xxx)
> ahash algorithms.
>
> The patchset has been tested with the eip197c_iewxkbc configuration on the
> Xilinx VCU118 development board, including the testmgr extra tests.
>
> changes snce v1:
> - fixed crypto/Kconfig so that generic fallback is compiled as well
>
> Pascal van Leeuwen (3):
> crypto: inside-secure - Add SHA3 family of basic hash algorithms
> crypto: inside-secure - Add HMAC-SHA3 family of authentication
> algorithms
> crypto: Kconfig - Add CRYPTO_SHA3 to CRYPTO_DEV_SAFEXCEL
>
> drivers/crypto/Kconfig | 1 +
> drivers/crypto/inside-secure/safexcel.c | 8 +
> drivers/crypto/inside-secure/safexcel.h | 13 +
> drivers/crypto/inside-secure/safexcel_hash.c | 790 ++++++++++++++++++++++++++-
> 4 files changed, 800 insertions(+), 12 deletions(-)
All applied. Thanks.
--
Email: Herbert Xu <[email protected]>
Home Page: http://gondor.apana.org.au/~herbert/
PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt