2018-10-18 12:57:23

by Yael Chemla

[permalink] [raw]
Subject: [PATCH 0/3] crypto: ccree: add SM3 support

Add support for SM3 in CryptoCell 713.

Yael Chemla (3):
crypto: ccree: adjust hash length to suit certain context specifics
crypto: ccree: modify set_cipher_mode usage from cc_hash
crypto: ccree: add SM3 support

drivers/crypto/Kconfig | 1 +
drivers/crypto/ccree/cc_aead.c | 19 +++-
drivers/crypto/ccree/cc_crypto_ctx.h | 4 +-
drivers/crypto/ccree/cc_driver.c | 10 +-
drivers/crypto/ccree/cc_driver.h | 2 +-
drivers/crypto/ccree/cc_hash.c | 175 +++++++++++++++++++++++---------
drivers/crypto/ccree/cc_hw_queue_defs.h | 27 +++++
7 files changed, 182 insertions(+), 56 deletions(-)

--
2.7.4



2018-10-18 12:57:32

by Yael Chemla

[permalink] [raw]
Subject: [PATCH 1/3] crypto: ccree: adjust hash length to suit certain context specifics

Adjust hash length such that it will not be fixed and general for all algs.
Instead make it suitable for certain context information.
This is preparation for SM3 support.

Signed-off-by: Yael Chemla <[email protected]>
---
drivers/crypto/ccree/cc_aead.c | 19 ++++++++++++++-----
drivers/crypto/ccree/cc_driver.c | 10 ++++++++--
drivers/crypto/ccree/cc_driver.h | 2 +-
drivers/crypto/ccree/cc_hash.c | 40 ++++++++++++++++++++++++----------------
4 files changed, 47 insertions(+), 24 deletions(-)

diff --git a/drivers/crypto/ccree/cc_aead.c b/drivers/crypto/ccree/cc_aead.c
index 01b82b8..e0ac376c 100644
--- a/drivers/crypto/ccree/cc_aead.c
+++ b/drivers/crypto/ccree/cc_aead.c
@@ -58,6 +58,7 @@ struct cc_aead_ctx {
unsigned int enc_keylen;
unsigned int auth_keylen;
unsigned int authsize; /* Actual (reduced?) size of the MAC/ICv */
+ unsigned int hash_len;
enum drv_cipher_mode cipher_mode;
enum cc_flow_mode flow_mode;
enum drv_hash_mode auth_mode;
@@ -122,6 +123,13 @@ static void cc_aead_exit(struct crypto_aead *tfm)
}
}

+static unsigned int cc_get_aead_hash_len(struct crypto_aead *tfm)
+{
+ struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
+
+ return cc_get_default_hash_len(ctx->drvdata);
+}
+
static int cc_aead_init(struct crypto_aead *tfm)
{
struct aead_alg *alg = crypto_aead_alg(tfm);
@@ -196,6 +204,7 @@ static int cc_aead_init(struct crypto_aead *tfm)
ctx->auth_state.hmac.ipad_opad = NULL;
ctx->auth_state.hmac.padded_authkey = NULL;
}
+ ctx->hash_len = cc_get_aead_hash_len(tfm);

return 0;

@@ -327,7 +336,7 @@ static int hmac_setkey(struct cc_hw_desc *desc, struct cc_aead_ctx *ctx)
/* Load the hash current length*/
hw_desc_init(&desc[idx]);
set_cipher_mode(&desc[idx], hash_mode);
- set_din_const(&desc[idx], 0, ctx->drvdata->hash_len_sz);
+ set_din_const(&desc[idx], 0, ctx->hash_len);
set_flow_mode(&desc[idx], S_DIN_to_HASH);
set_setup_mode(&desc[idx], SETUP_LOAD_KEY0);
idx++;
@@ -465,7 +474,7 @@ static int cc_get_plain_hmac_key(struct crypto_aead *tfm, const u8 *key,
/* Load the hash current length*/
hw_desc_init(&desc[idx]);
set_cipher_mode(&desc[idx], hashmode);
- set_din_const(&desc[idx], 0, ctx->drvdata->hash_len_sz);
+ set_din_const(&desc[idx], 0, ctx->hash_len);
set_cipher_config1(&desc[idx], HASH_PADDING_ENABLED);
set_flow_mode(&desc[idx], S_DIN_to_HASH);
set_setup_mode(&desc[idx], SETUP_LOAD_KEY0);
@@ -1001,7 +1010,7 @@ static void cc_set_hmac_desc(struct aead_request *req, struct cc_hw_desc desc[],
hw_desc_init(&desc[idx]);
set_cipher_mode(&desc[idx], hash_mode);
set_din_sram(&desc[idx], cc_digest_len_addr(ctx->drvdata, hash_mode),
- ctx->drvdata->hash_len_sz);
+ ctx->hash_len);
set_flow_mode(&desc[idx], S_DIN_to_HASH);
set_setup_mode(&desc[idx], SETUP_LOAD_KEY0);
idx++;
@@ -1098,7 +1107,7 @@ static void cc_proc_scheme_desc(struct aead_request *req,
hw_desc_init(&desc[idx]);
set_cipher_mode(&desc[idx], hash_mode);
set_dout_sram(&desc[idx], aead_handle->sram_workspace_addr,
- ctx->drvdata->hash_len_sz);
+ ctx->hash_len);
set_flow_mode(&desc[idx], S_HASH_to_DOUT);
set_setup_mode(&desc[idx], SETUP_WRITE_STATE1);
set_cipher_do(&desc[idx], DO_PAD);
@@ -1128,7 +1137,7 @@ static void cc_proc_scheme_desc(struct aead_request *req,
hw_desc_init(&desc[idx]);
set_cipher_mode(&desc[idx], hash_mode);
set_din_sram(&desc[idx], cc_digest_len_addr(ctx->drvdata, hash_mode),
- ctx->drvdata->hash_len_sz);
+ ctx->hash_len);
set_cipher_config1(&desc[idx], HASH_PADDING_ENABLED);
set_flow_mode(&desc[idx], S_DIN_to_HASH);
set_setup_mode(&desc[idx], SETUP_LOAD_KEY0);
diff --git a/drivers/crypto/ccree/cc_driver.c b/drivers/crypto/ccree/cc_driver.c
index 1ff229c..14e1970 100644
--- a/drivers/crypto/ccree/cc_driver.c
+++ b/drivers/crypto/ccree/cc_driver.c
@@ -206,12 +206,10 @@ static int init_cc_resources(struct platform_device *plat_dev)
new_drvdata->hw_rev = hw_rev->rev;

if (hw_rev->rev >= CC_HW_REV_712) {
- new_drvdata->hash_len_sz = HASH_LEN_SIZE_712;
new_drvdata->axim_mon_offset = CC_REG(AXIM_MON_COMP);
new_drvdata->sig_offset = CC_REG(HOST_SIGNATURE_712);
new_drvdata->ver_offset = CC_REG(HOST_VERSION_712);
} else {
- new_drvdata->hash_len_sz = HASH_LEN_SIZE_630;
new_drvdata->axim_mon_offset = CC_REG(AXIM_MON_COMP8);
new_drvdata->sig_offset = CC_REG(HOST_SIGNATURE_630);
new_drvdata->ver_offset = CC_REG(HOST_VERSION_630);
@@ -461,6 +459,14 @@ int cc_clk_on(struct cc_drvdata *drvdata)
return 0;
}

+unsigned int cc_get_default_hash_len(struct cc_drvdata *drvdata)
+{
+ if (drvdata->hw_rev >= CC_HW_REV_712)
+ return HASH_LEN_SIZE_712;
+ else
+ return HASH_LEN_SIZE_630;
+}
+
void cc_clk_off(struct cc_drvdata *drvdata)
{
struct clk *clk = drvdata->clk;
diff --git a/drivers/crypto/ccree/cc_driver.h b/drivers/crypto/ccree/cc_driver.h
index d608a4f..27bb97c 100644
--- a/drivers/crypto/ccree/cc_driver.h
+++ b/drivers/crypto/ccree/cc_driver.h
@@ -127,7 +127,6 @@ struct cc_drvdata {
bool coherent;
char *hw_rev_name;
enum cc_hw_rev hw_rev;
- u32 hash_len_sz;
u32 axim_mon_offset;
u32 sig_offset;
u32 ver_offset;
@@ -182,6 +181,7 @@ int init_cc_regs(struct cc_drvdata *drvdata, bool is_probe);
void fini_cc_regs(struct cc_drvdata *drvdata);
int cc_clk_on(struct cc_drvdata *drvdata);
void cc_clk_off(struct cc_drvdata *drvdata);
+unsigned int cc_get_default_hash_len(struct cc_drvdata *drvdata);

static inline void cc_iowrite(struct cc_drvdata *drvdata, u32 reg, u32 val)
{
diff --git a/drivers/crypto/ccree/cc_hash.c b/drivers/crypto/ccree/cc_hash.c
index b931330..7af5b61 100644
--- a/drivers/crypto/ccree/cc_hash.c
+++ b/drivers/crypto/ccree/cc_hash.c
@@ -82,6 +82,7 @@ struct cc_hash_ctx {
int hash_mode;
int hw_mode;
int inter_digestsize;
+ unsigned int hash_len;
struct completion setkey_comp;
bool is_hmac;
};
@@ -138,10 +139,10 @@ static void cc_init_req(struct device *dev, struct ahash_req_ctx *state,
ctx->hash_mode == DRV_HASH_SHA384)
memcpy(state->digest_bytes_len,
digest_len_sha512_init,
- ctx->drvdata->hash_len_sz);
+ ctx->hash_len);
else
memcpy(state->digest_bytes_len, digest_len_init,
- ctx->drvdata->hash_len_sz);
+ ctx->hash_len);
}

if (ctx->hash_mode != DRV_HASH_NULL) {
@@ -367,7 +368,7 @@ static int cc_fin_hmac(struct cc_hw_desc *desc, struct ahash_request *req,
set_cipher_mode(&desc[idx], ctx->hw_mode);
set_din_sram(&desc[idx],
cc_digest_len_addr(ctx->drvdata, ctx->hash_mode),
- ctx->drvdata->hash_len_sz);
+ ctx->hash_len);
set_cipher_config1(&desc[idx], HASH_PADDING_ENABLED);
set_flow_mode(&desc[idx], S_DIN_to_HASH);
set_setup_mode(&desc[idx], SETUP_LOAD_KEY0);
@@ -459,9 +460,9 @@ static int cc_hash_digest(struct ahash_request *req)
if (is_hmac) {
set_din_type(&desc[idx], DMA_DLLI,
state->digest_bytes_len_dma_addr,
- ctx->drvdata->hash_len_sz, NS_BIT);
+ ctx->hash_len, NS_BIT);
} else {
- set_din_const(&desc[idx], 0, ctx->drvdata->hash_len_sz);
+ set_din_const(&desc[idx], 0, ctx->hash_len);
if (nbytes)
set_cipher_config1(&desc[idx], HASH_PADDING_ENABLED);
else
@@ -478,7 +479,7 @@ static int cc_hash_digest(struct ahash_request *req)
hw_desc_init(&desc[idx]);
set_cipher_mode(&desc[idx], ctx->hw_mode);
set_dout_dlli(&desc[idx], state->digest_buff_dma_addr,
- ctx->drvdata->hash_len_sz, NS_BIT, 0);
+ ctx->hash_len, NS_BIT, 0);
set_flow_mode(&desc[idx], S_HASH_to_DOUT);
set_setup_mode(&desc[idx], SETUP_WRITE_STATE1);
set_cipher_do(&desc[idx], DO_PAD);
@@ -516,7 +517,7 @@ static int cc_restore_hash(struct cc_hw_desc *desc, struct cc_hash_ctx *ctx,
set_cipher_mode(&desc[idx], ctx->hw_mode);
set_cipher_config1(&desc[idx], HASH_PADDING_DISABLED);
set_din_type(&desc[idx], DMA_DLLI, state->digest_bytes_len_dma_addr,
- ctx->drvdata->hash_len_sz, NS_BIT);
+ ctx->hash_len, NS_BIT);
set_flow_mode(&desc[idx], S_DIN_to_HASH);
set_setup_mode(&desc[idx], SETUP_LOAD_KEY0);
idx++;
@@ -587,7 +588,7 @@ static int cc_hash_update(struct ahash_request *req)
hw_desc_init(&desc[idx]);
set_cipher_mode(&desc[idx], ctx->hw_mode);
set_dout_dlli(&desc[idx], state->digest_bytes_len_dma_addr,
- ctx->drvdata->hash_len_sz, NS_BIT, 1);
+ ctx->hash_len, NS_BIT, 1);
set_queue_last_ind(ctx->drvdata, &desc[idx]);
set_flow_mode(&desc[idx], S_HASH_to_DOUT);
set_setup_mode(&desc[idx], SETUP_WRITE_STATE1);
@@ -651,7 +652,7 @@ static int cc_do_finup(struct ahash_request *req, bool update)
set_cipher_do(&desc[idx], DO_PAD);
set_cipher_mode(&desc[idx], ctx->hw_mode);
set_dout_dlli(&desc[idx], state->digest_bytes_len_dma_addr,
- ctx->drvdata->hash_len_sz, NS_BIT, 0);
+ ctx->hash_len, NS_BIT, 0);
set_setup_mode(&desc[idx], SETUP_WRITE_STATE1);
set_flow_mode(&desc[idx], S_HASH_to_DOUT);
idx++;
@@ -749,7 +750,7 @@ static int cc_hash_setkey(struct crypto_ahash *ahash, const u8 *key,
/* Load the hash current length*/
hw_desc_init(&desc[idx]);
set_cipher_mode(&desc[idx], ctx->hw_mode);
- set_din_const(&desc[idx], 0, ctx->drvdata->hash_len_sz);
+ set_din_const(&desc[idx], 0, ctx->hash_len);
set_cipher_config1(&desc[idx], HASH_PADDING_ENABLED);
set_flow_mode(&desc[idx], S_DIN_to_HASH);
set_setup_mode(&desc[idx], SETUP_LOAD_KEY0);
@@ -831,7 +832,7 @@ static int cc_hash_setkey(struct crypto_ahash *ahash, const u8 *key,
/* Load the hash current length*/
hw_desc_init(&desc[idx]);
set_cipher_mode(&desc[idx], ctx->hw_mode);
- set_din_const(&desc[idx], 0, ctx->drvdata->hash_len_sz);
+ set_din_const(&desc[idx], 0, ctx->hash_len);
set_flow_mode(&desc[idx], S_DIN_to_HASH);
set_setup_mode(&desc[idx], SETUP_LOAD_KEY0);
idx++;
@@ -1069,6 +1070,13 @@ static int cc_alloc_ctx(struct cc_hash_ctx *ctx)
return -ENOMEM;
}

+static int cc_get_hash_len(struct crypto_tfm *tfm)
+{
+ struct cc_hash_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ return cc_get_default_hash_len(ctx->drvdata);
+}
+
static int cc_cra_init(struct crypto_tfm *tfm)
{
struct cc_hash_ctx *ctx = crypto_tfm_ctx(tfm);
@@ -1086,7 +1094,7 @@ static int cc_cra_init(struct crypto_tfm *tfm)
ctx->hw_mode = cc_alg->hw_mode;
ctx->inter_digestsize = cc_alg->inter_digestsize;
ctx->drvdata = cc_alg->drvdata;
-
+ ctx->hash_len = cc_get_hash_len(tfm);
return cc_alloc_ctx(ctx);
}

@@ -1465,8 +1473,8 @@ static int cc_hash_export(struct ahash_request *req, void *out)
memcpy(out, state->digest_buff, ctx->inter_digestsize);
out += ctx->inter_digestsize;

- memcpy(out, state->digest_bytes_len, ctx->drvdata->hash_len_sz);
- out += ctx->drvdata->hash_len_sz;
+ memcpy(out, state->digest_bytes_len, ctx->hash_len);
+ out += ctx->hash_len;

memcpy(out, &curr_buff_cnt, sizeof(u32));
out += sizeof(u32);
@@ -1494,8 +1502,8 @@ static int cc_hash_import(struct ahash_request *req, const void *in)
memcpy(state->digest_buff, in, ctx->inter_digestsize);
in += ctx->inter_digestsize;

- memcpy(state->digest_bytes_len, in, ctx->drvdata->hash_len_sz);
- in += ctx->drvdata->hash_len_sz;
+ memcpy(state->digest_bytes_len, in, ctx->hash_len);
+ in += ctx->hash_len;

/* Sanity check the data as much as possible */
memcpy(&tmp, in, sizeof(u32));
--
2.7.4


2018-10-18 12:59:00

by Yael Chemla

[permalink] [raw]
Subject: [PATCH 2/3] crypto: ccree: modify set_cipher_mode usage from cc_hash

encapsulate set_cipher_mode call with another api,
preparation for specific hash behavior as needed in later patches
when SM3 introduced.

Signed-off-by: Yael Chemla <[email protected]>
---
drivers/crypto/ccree/cc_hash.c | 18 +++++++++---------
drivers/crypto/ccree/cc_hw_queue_defs.h | 14 ++++++++++++++
2 files changed, 23 insertions(+), 9 deletions(-)

diff --git a/drivers/crypto/ccree/cc_hash.c b/drivers/crypto/ccree/cc_hash.c
index 7af5b61..adcd9df 100644
--- a/drivers/crypto/ccree/cc_hash.c
+++ b/drivers/crypto/ccree/cc_hash.c
@@ -322,7 +322,7 @@ static int cc_fin_result(struct cc_hw_desc *desc, struct ahash_request *req,

/* Get final MAC result */
hw_desc_init(&desc[idx]);
- set_cipher_mode(&desc[idx], ctx->hw_mode);
+ set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
/* TODO */
set_dout_dlli(&desc[idx], state->digest_result_dma_addr, digestsize,
NS_BIT, 1);
@@ -441,7 +441,7 @@ static int cc_hash_digest(struct ahash_request *req)
* digest
*/
hw_desc_init(&desc[idx]);
- set_cipher_mode(&desc[idx], ctx->hw_mode);
+ set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
if (is_hmac) {
set_din_type(&desc[idx], DMA_DLLI, state->digest_buff_dma_addr,
ctx->inter_digestsize, NS_BIT);
@@ -455,7 +455,7 @@ static int cc_hash_digest(struct ahash_request *req)

/* Load the hash current length */
hw_desc_init(&desc[idx]);
- set_cipher_mode(&desc[idx], ctx->hw_mode);
+ set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);

if (is_hmac) {
set_din_type(&desc[idx], DMA_DLLI,
@@ -505,7 +505,7 @@ static int cc_restore_hash(struct cc_hw_desc *desc, struct cc_hash_ctx *ctx,
{
/* Restore hash digest */
hw_desc_init(&desc[idx]);
- set_cipher_mode(&desc[idx], ctx->hw_mode);
+ set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
set_din_type(&desc[idx], DMA_DLLI, state->digest_buff_dma_addr,
ctx->inter_digestsize, NS_BIT);
set_flow_mode(&desc[idx], S_DIN_to_HASH);
@@ -514,7 +514,7 @@ static int cc_restore_hash(struct cc_hw_desc *desc, struct cc_hash_ctx *ctx,

/* Restore hash current length */
hw_desc_init(&desc[idx]);
- set_cipher_mode(&desc[idx], ctx->hw_mode);
+ set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
set_cipher_config1(&desc[idx], HASH_PADDING_DISABLED);
set_din_type(&desc[idx], DMA_DLLI, state->digest_bytes_len_dma_addr,
ctx->hash_len, NS_BIT);
@@ -577,7 +577,7 @@ static int cc_hash_update(struct ahash_request *req)

/* store the hash digest result in context */
hw_desc_init(&desc[idx]);
- set_cipher_mode(&desc[idx], ctx->hw_mode);
+ set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
set_dout_dlli(&desc[idx], state->digest_buff_dma_addr,
ctx->inter_digestsize, NS_BIT, 0);
set_flow_mode(&desc[idx], S_HASH_to_DOUT);
@@ -586,7 +586,7 @@ static int cc_hash_update(struct ahash_request *req)

/* store current hash length in context */
hw_desc_init(&desc[idx]);
- set_cipher_mode(&desc[idx], ctx->hw_mode);
+ set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
set_dout_dlli(&desc[idx], state->digest_bytes_len_dma_addr,
ctx->hash_len, NS_BIT, 1);
set_queue_last_ind(ctx->drvdata, &desc[idx]);
@@ -650,7 +650,7 @@ static int cc_do_finup(struct ahash_request *req, bool update)
/* Pad the hash */
hw_desc_init(&desc[idx]);
set_cipher_do(&desc[idx], DO_PAD);
- set_cipher_mode(&desc[idx], ctx->hw_mode);
+ set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
set_dout_dlli(&desc[idx], state->digest_bytes_len_dma_addr,
ctx->hash_len, NS_BIT, 0);
set_setup_mode(&desc[idx], SETUP_WRITE_STATE1);
@@ -2035,7 +2035,7 @@ static void cc_setup_xcbc(struct ahash_request *areq, struct cc_hw_desc desc[],
XCBC_MAC_K1_OFFSET),
CC_AES_128_BIT_KEY_SIZE, NS_BIT);
set_setup_mode(&desc[idx], SETUP_LOAD_KEY0);
- set_cipher_mode(&desc[idx], DRV_CIPHER_XCBC_MAC);
+ set_hash_cipher_mode(&desc[idx], DRV_CIPHER_XCBC_MAC, ctx->hash_mode);
set_cipher_config0(&desc[idx], DESC_DIRECTION_ENCRYPT_ENCRYPT);
set_key_size_aes(&desc[idx], CC_AES_128_BIT_KEY_SIZE);
set_flow_mode(&desc[idx], S_DIN_to_AES);
diff --git a/drivers/crypto/ccree/cc_hw_queue_defs.h b/drivers/crypto/ccree/cc_hw_queue_defs.h
index 45985b9..b367b67 100644
--- a/drivers/crypto/ccree/cc_hw_queue_defs.h
+++ b/drivers/crypto/ccree/cc_hw_queue_defs.h
@@ -455,6 +455,20 @@ static inline void set_cipher_mode(struct cc_hw_desc *pdesc, int mode)
}

/*
+ * Set the cipher mode for hash algorithms.
+ *
+ * @pdesc: pointer HW descriptor struct
+ * @cipher_mode: Any one of the modes defined in [CC7x-DESC]
+ * @hash_mode: specifies which hash is being handled
+ */
+static inline void set_hash_cipher_mode(struct cc_hw_desc *pdesc,
+ enum drv_cipher_mode cipher_mode,
+ enum drv_hash_mode hash_mode)
+{
+ set_cipher_mode(pdesc, cipher_mode);
+}
+
+/*
* Set the cipher configuration fields.
*
* @pdesc: pointer HW descriptor struct
--
2.7.4


2018-10-18 12:59:50

by Yael Chemla

[permalink] [raw]
Subject: [PATCH 3/3] crypto: ccree: add SM3 support

Add support for SM3 cipher in CryptoCell 713.

Signed-off-by: Yael Chemla <[email protected]>
---
drivers/crypto/Kconfig | 1 +
drivers/crypto/ccree/cc_crypto_ctx.h | 4 +-
drivers/crypto/ccree/cc_hash.c | 119 ++++++++++++++++++++++++++------
drivers/crypto/ccree/cc_hw_queue_defs.h | 13 ++++
4 files changed, 113 insertions(+), 24 deletions(-)

diff --git a/drivers/crypto/Kconfig b/drivers/crypto/Kconfig
index a8c4ce0..1588cd4 100644
--- a/drivers/crypto/Kconfig
+++ b/drivers/crypto/Kconfig
@@ -751,6 +751,7 @@ config CRYPTO_DEV_CCREE
select CRYPTO_ECB
select CRYPTO_CTR
select CRYPTO_XTS
+ select CRYPTO_SM3
help
Say 'Y' to enable a driver for the REE interface of the Arm
TrustZone CryptoCell family of processors. Currently the
diff --git a/drivers/crypto/ccree/cc_crypto_ctx.h b/drivers/crypto/ccree/cc_crypto_ctx.h
index e032544..c8dac27 100644
--- a/drivers/crypto/ccree/cc_crypto_ctx.h
+++ b/drivers/crypto/ccree/cc_crypto_ctx.h
@@ -115,7 +115,8 @@ enum drv_hash_mode {
DRV_HASH_CBC_MAC = 6,
DRV_HASH_XCBC_MAC = 7,
DRV_HASH_CMAC = 8,
- DRV_HASH_MODE_NUM = 9,
+ DRV_HASH_SM3 = 9,
+ DRV_HASH_MODE_NUM = 10,
DRV_HASH_RESERVE32B = S32_MAX
};

@@ -127,6 +128,7 @@ enum drv_hash_hw_mode {
DRV_HASH_HW_SHA512 = 4,
DRV_HASH_HW_SHA384 = 12,
DRV_HASH_HW_GHASH = 6,
+ DRV_HASH_HW_SM3 = 14,
DRV_HASH_HW_RESERVE32B = S32_MAX
};

diff --git a/drivers/crypto/ccree/cc_hash.c b/drivers/crypto/ccree/cc_hash.c
index adcd9df..c80c9ae 100644
--- a/drivers/crypto/ccree/cc_hash.c
+++ b/drivers/crypto/ccree/cc_hash.c
@@ -6,6 +6,7 @@
#include <crypto/algapi.h>
#include <crypto/hash.h>
#include <crypto/md5.h>
+#include <crypto/sm3.h>
#include <crypto/internal/hash.h>

#include "cc_driver.h"
@@ -16,6 +17,7 @@

#define CC_MAX_HASH_SEQ_LEN 12
#define CC_MAX_OPAD_KEYS_SIZE CC_MAX_HASH_BLCK_SIZE
+#define CC_SM3_HASH_LEN_SIZE 8

struct cc_hash_handle {
cc_sram_addr_t digest_len_sram_addr; /* const value in SRAM*/
@@ -43,6 +45,9 @@ static u64 sha384_init[] = {
static u64 sha512_init[] = {
SHA512_H7, SHA512_H6, SHA512_H5, SHA512_H4,
SHA512_H3, SHA512_H2, SHA512_H1, SHA512_H0 };
+static const u32 sm3_init[] = {
+ SM3_IVH, SM3_IVG, SM3_IVF, SM3_IVE,
+ SM3_IVD, SM3_IVC, SM3_IVB, SM3_IVA };

static void cc_setup_xcbc(struct ahash_request *areq, struct cc_hw_desc desc[],
unsigned int *seq_size);
@@ -1074,7 +1079,10 @@ static int cc_get_hash_len(struct crypto_tfm *tfm)
{
struct cc_hash_ctx *ctx = crypto_tfm_ctx(tfm);

- return cc_get_default_hash_len(ctx->drvdata);
+ if (ctx->hash_mode == DRV_HASH_SM3)
+ return CC_SM3_HASH_LEN_SIZE;
+ else
+ return cc_get_default_hash_len(ctx->drvdata);
}

static int cc_cra_init(struct crypto_tfm *tfm)
@@ -1523,6 +1531,7 @@ struct cc_hash_template {
char mac_name[CRYPTO_MAX_ALG_NAME];
char mac_driver_name[CRYPTO_MAX_ALG_NAME];
unsigned int blocksize;
+ bool is_mac;
bool synchronize;
struct ahash_alg template_ahash;
int hash_mode;
@@ -1544,6 +1553,7 @@ static struct cc_hash_template driver_hash[] = {
.mac_name = "hmac(sha1)",
.mac_driver_name = "hmac-sha1-ccree",
.blocksize = SHA1_BLOCK_SIZE,
+ .is_mac = true,
.synchronize = false,
.template_ahash = {
.init = cc_hash_init,
@@ -1570,6 +1580,7 @@ static struct cc_hash_template driver_hash[] = {
.mac_name = "hmac(sha256)",
.mac_driver_name = "hmac-sha256-ccree",
.blocksize = SHA256_BLOCK_SIZE,
+ .is_mac = true,
.template_ahash = {
.init = cc_hash_init,
.update = cc_hash_update,
@@ -1595,6 +1606,7 @@ static struct cc_hash_template driver_hash[] = {
.mac_name = "hmac(sha224)",
.mac_driver_name = "hmac-sha224-ccree",
.blocksize = SHA224_BLOCK_SIZE,
+ .is_mac = true,
.template_ahash = {
.init = cc_hash_init,
.update = cc_hash_update,
@@ -1620,6 +1632,7 @@ static struct cc_hash_template driver_hash[] = {
.mac_name = "hmac(sha384)",
.mac_driver_name = "hmac-sha384-ccree",
.blocksize = SHA384_BLOCK_SIZE,
+ .is_mac = true,
.template_ahash = {
.init = cc_hash_init,
.update = cc_hash_update,
@@ -1645,6 +1658,7 @@ static struct cc_hash_template driver_hash[] = {
.mac_name = "hmac(sha512)",
.mac_driver_name = "hmac-sha512-ccree",
.blocksize = SHA512_BLOCK_SIZE,
+ .is_mac = true,
.template_ahash = {
.init = cc_hash_init,
.update = cc_hash_update,
@@ -1670,6 +1684,7 @@ static struct cc_hash_template driver_hash[] = {
.mac_name = "hmac(md5)",
.mac_driver_name = "hmac-md5-ccree",
.blocksize = MD5_HMAC_BLOCK_SIZE,
+ .is_mac = true,
.template_ahash = {
.init = cc_hash_init,
.update = cc_hash_update,
@@ -1690,9 +1705,34 @@ static struct cc_hash_template driver_hash[] = {
.min_hw_rev = CC_HW_REV_630,
},
{
+ .name = "sm3",
+ .driver_name = "sm3-ccree",
+ .blocksize = SM3_BLOCK_SIZE,
+ .is_mac = false,
+ .template_ahash = {
+ .init = cc_hash_init,
+ .update = cc_hash_update,
+ .final = cc_hash_final,
+ .finup = cc_hash_finup,
+ .digest = cc_hash_digest,
+ .export = cc_hash_export,
+ .import = cc_hash_import,
+ .setkey = cc_hash_setkey,
+ .halg = {
+ .digestsize = SM3_DIGEST_SIZE,
+ .statesize = CC_STATE_SIZE(SM3_DIGEST_SIZE),
+ },
+ },
+ .hash_mode = DRV_HASH_SM3,
+ .hw_mode = DRV_HASH_HW_SM3,
+ .inter_digestsize = SM3_DIGEST_SIZE,
+ .min_hw_rev = CC_HW_REV_713,
+ },
+ {
.mac_name = "xcbc(aes)",
.mac_driver_name = "xcbc-aes-ccree",
.blocksize = AES_BLOCK_SIZE,
+ .is_mac = true,
.template_ahash = {
.init = cc_hash_init,
.update = cc_mac_update,
@@ -1716,6 +1756,7 @@ static struct cc_hash_template driver_hash[] = {
.mac_name = "cmac(aes)",
.mac_driver_name = "cmac-aes-ccree",
.blocksize = AES_BLOCK_SIZE,
+ .is_mac = true,
.template_ahash = {
.init = cc_hash_init,
.update = cc_mac_update,
@@ -1788,6 +1829,7 @@ int cc_init_hash_sram(struct cc_drvdata *drvdata)
unsigned int larval_seq_len = 0;
struct cc_hw_desc larval_seq[CC_DIGEST_SIZE_MAX / sizeof(u32)];
bool large_sha_supported = (drvdata->hw_rev >= CC_HW_REV_712);
+ bool sm3_supported = (drvdata->hw_rev >= CC_HW_REV_713);
int rc = 0;

/* Copy-to-sram digest-len */
@@ -1853,6 +1895,17 @@ int cc_init_hash_sram(struct cc_drvdata *drvdata)
sram_buff_ofs += sizeof(sha256_init);
larval_seq_len = 0;

+ if (sm3_supported) {
+ cc_set_sram_desc(sm3_init, sram_buff_ofs,
+ ARRAY_SIZE(sm3_init), larval_seq,
+ &larval_seq_len);
+ rc = send_request_init(drvdata, larval_seq, larval_seq_len);
+ if (rc)
+ goto init_digest_const_err;
+ sram_buff_ofs += sizeof(sm3_init);
+ larval_seq_len = 0;
+ }
+
if (large_sha_supported) {
cc_set_sram_desc((u32 *)sha384_init, sram_buff_ofs,
(ARRAY_SIZE(sha384_init) * 2), larval_seq,
@@ -1919,6 +1972,9 @@ int cc_hash_alloc(struct cc_drvdata *drvdata)
sizeof(sha224_init) +
sizeof(sha256_init);

+ if (drvdata->hw_rev >= CC_HW_REV_713)
+ sram_size_to_alloc += sizeof(sm3_init);
+
if (drvdata->hw_rev >= CC_HW_REV_712)
sram_size_to_alloc += sizeof(digest_len_sha512_init) +
sizeof(sha384_init) + sizeof(sha512_init);
@@ -1948,27 +2004,28 @@ int cc_hash_alloc(struct cc_drvdata *drvdata)
/* We either support both HASH and MAC or none */
if (driver_hash[alg].min_hw_rev > drvdata->hw_rev)
continue;
-
- /* register hmac version */
- t_alg = cc_alloc_hash_alg(&driver_hash[alg], dev, true);
- if (IS_ERR(t_alg)) {
- rc = PTR_ERR(t_alg);
- dev_err(dev, "%s alg allocation failed\n",
- driver_hash[alg].driver_name);
- goto fail;
- }
- t_alg->drvdata = drvdata;
-
- rc = crypto_register_ahash(&t_alg->ahash_alg);
- if (rc) {
- dev_err(dev, "%s alg registration failed\n",
- driver_hash[alg].driver_name);
- kfree(t_alg);
- goto fail;
- } else {
- list_add_tail(&t_alg->entry, &hash_handle->hash_list);
+ if (driver_hash[alg].is_mac) {
+ /* register hmac version */
+ t_alg = cc_alloc_hash_alg(&driver_hash[alg], dev, true);
+ if (IS_ERR(t_alg)) {
+ rc = PTR_ERR(t_alg);
+ dev_err(dev, "%s alg allocation failed\n",
+ driver_hash[alg].driver_name);
+ goto fail;
+ }
+ t_alg->drvdata = drvdata;
+
+ rc = crypto_register_ahash(&t_alg->ahash_alg);
+ if (rc) {
+ dev_err(dev, "%s alg registration failed\n",
+ driver_hash[alg].driver_name);
+ kfree(t_alg);
+ goto fail;
+ } else {
+ list_add_tail(&t_alg->entry,
+ &hash_handle->hash_list);
+ }
}
-
if (hw_mode == DRV_CIPHER_XCBC_MAC ||
hw_mode == DRV_CIPHER_CMAC)
continue;
@@ -2170,6 +2227,8 @@ static const void *cc_larval_digest(struct device *dev, u32 mode)
return sha384_init;
case DRV_HASH_SHA512:
return sha512_init;
+ case DRV_HASH_SM3:
+ return sm3_init;
default:
dev_err(dev, "Invalid hash mode (%d)\n", mode);
return md5_init;
@@ -2190,6 +2249,8 @@ cc_sram_addr_t cc_larval_digest_addr(void *drvdata, u32 mode)
struct cc_drvdata *_drvdata = (struct cc_drvdata *)drvdata;
struct cc_hash_handle *hash_handle = _drvdata->hash_handle;
struct device *dev = drvdata_to_dev(_drvdata);
+ bool sm3_supported = (_drvdata->hw_rev >= CC_HW_REV_713);
+ cc_sram_addr_t addr;

switch (mode) {
case DRV_HASH_NULL:
@@ -2208,19 +2269,31 @@ cc_sram_addr_t cc_larval_digest_addr(void *drvdata, u32 mode)
sizeof(md5_init) +
sizeof(sha1_init) +
sizeof(sha224_init));
- case DRV_HASH_SHA384:
+ case DRV_HASH_SM3:
return (hash_handle->larval_digest_sram_addr +
sizeof(md5_init) +
sizeof(sha1_init) +
sizeof(sha224_init) +
sizeof(sha256_init));
+ case DRV_HASH_SHA384:
+ addr = (hash_handle->larval_digest_sram_addr +
+ sizeof(md5_init) +
+ sizeof(sha1_init) +
+ sizeof(sha224_init) +
+ sizeof(sha256_init));
+ if (sm3_supported)
+ addr += sizeof(sm3_init);
+ return addr;
case DRV_HASH_SHA512:
- return (hash_handle->larval_digest_sram_addr +
+ addr = (hash_handle->larval_digest_sram_addr +
sizeof(md5_init) +
sizeof(sha1_init) +
sizeof(sha224_init) +
sizeof(sha256_init) +
sizeof(sha384_init));
+ if (sm3_supported)
+ addr += sizeof(sm3_init);
+ return addr;
default:
dev_err(dev, "Invalid hash mode (%d)\n", mode);
}
diff --git a/drivers/crypto/ccree/cc_hw_queue_defs.h b/drivers/crypto/ccree/cc_hw_queue_defs.h
index b367b67..e1783ea 100644
--- a/drivers/crypto/ccree/cc_hw_queue_defs.h
+++ b/drivers/crypto/ccree/cc_hw_queue_defs.h
@@ -42,6 +42,7 @@
#define WORD3_QUEUE_LAST_IND CC_GENMASK(3, QUEUE_LAST_IND)
#define WORD4_ACK_NEEDED CC_GENMASK(4, ACK_NEEDED)
#define WORD4_AES_SEL_N_HASH CC_GENMASK(4, AES_SEL_N_HASH)
+#define WORD4_AES_XOR_CRYPTO_KEY CC_GENMASK(4, AES_XOR_CRYPTO_KEY)
#define WORD4_BYTES_SWAP CC_GENMASK(4, BYTES_SWAP)
#define WORD4_CIPHER_CONF0 CC_GENMASK(4, CIPHER_CONF0)
#define WORD4_CIPHER_CONF1 CC_GENMASK(4, CIPHER_CONF1)
@@ -394,6 +395,16 @@ static inline void set_aes_not_hash_mode(struct cc_hw_desc *pdesc)
}

/*
+ * Set aes xor crypto key, this in some secenrios select SM3 engine
+ *
+ * @pdesc: pointer HW descriptor struct
+ */
+static inline void set_aes_xor_crypto_key(struct cc_hw_desc *pdesc)
+{
+ pdesc->word[4] |= FIELD_PREP(WORD4_AES_XOR_CRYPTO_KEY, 1);
+}
+
+/*
* Set the DOUT field of a HW descriptors to SRAM mode
* Note: No need to check SRAM alignment since host requests do not use SRAM and
* adaptor will enforce alignment check.
@@ -466,6 +477,8 @@ static inline void set_hash_cipher_mode(struct cc_hw_desc *pdesc,
enum drv_hash_mode hash_mode)
{
set_cipher_mode(pdesc, cipher_mode);
+ if (hash_mode == DRV_HASH_SM3)
+ set_aes_xor_crypto_key(pdesc);
}

/*
--
2.7.4


2018-10-18 19:59:08

by kernel test robot

[permalink] [raw]
Subject: Re: [PATCH 3/3] crypto: ccree: add SM3 support

Hi Yael,

Thank you for the patch! Yet something to improve:

[auto build test ERROR on cryptodev/master]
[also build test ERROR on v4.19-rc8 next-20181018]
[if your patch is applied to the wrong git tree, please drop us a note to help improve the system]

url: https://github.com/0day-ci/linux/commits/Yael-Chemla/crypto-ccree-add-SM3-support/20181019-033131
base: https://git.kernel.org/pub/scm/linux/kernel/git/herbert/cryptodev-2.6.git master
config: i386-randconfig-x004-201841 (attached as .config)
compiler: gcc-7 (Debian 7.3.0-1) 7.3.0
reproduce:
# save the attached .config to linux build tree
make ARCH=i386

All error/warnings (new ones prefixed by >>):

>> drivers/crypto/ccree/cc_hash.c:1729:17: error: 'CC_HW_REV_713' undeclared here (not in a function); did you mean 'CC_HW_REV_712'?
.min_hw_rev = CC_HW_REV_713,
^~~~~~~~~~~~~
CC_HW_REV_712
drivers/crypto/ccree/cc_hash.c: In function 'cc_init_hash_sram':
>> drivers/crypto/ccree/cc_hash.c:1832:40: warning: comparison between pointer and integer
bool sm3_supported = (drvdata->hw_rev >= CC_HW_REV_713);
^~
drivers/crypto/ccree/cc_hash.c: In function 'cc_hash_alloc':
drivers/crypto/ccree/cc_hash.c:1975:22: warning: comparison between pointer and integer
if (drvdata->hw_rev >= CC_HW_REV_713)
^~
drivers/crypto/ccree/cc_hash.c: In function 'cc_larval_digest_addr':
drivers/crypto/ccree/cc_hash.c:2252:41: warning: comparison between pointer and integer
bool sm3_supported = (_drvdata->hw_rev >= CC_HW_REV_713);
^~

vim +1729 drivers/crypto/ccree/cc_hash.c

1543
1544 #define CC_STATE_SIZE(_x) \
1545 ((_x) + HASH_MAX_LEN_SIZE + CC_MAX_HASH_BLCK_SIZE + (2 * sizeof(u32)))
1546
1547 /* hash descriptors */
1548 static struct cc_hash_template driver_hash[] = {
1549 //Asynchronize hash template
1550 {
1551 .name = "sha1",
1552 .driver_name = "sha1-ccree",
1553 .mac_name = "hmac(sha1)",
1554 .mac_driver_name = "hmac-sha1-ccree",
1555 .blocksize = SHA1_BLOCK_SIZE,
1556 .is_mac = true,
1557 .synchronize = false,
1558 .template_ahash = {
1559 .init = cc_hash_init,
1560 .update = cc_hash_update,
1561 .final = cc_hash_final,
1562 .finup = cc_hash_finup,
1563 .digest = cc_hash_digest,
1564 .export = cc_hash_export,
1565 .import = cc_hash_import,
1566 .setkey = cc_hash_setkey,
1567 .halg = {
1568 .digestsize = SHA1_DIGEST_SIZE,
1569 .statesize = CC_STATE_SIZE(SHA1_DIGEST_SIZE),
1570 },
1571 },
1572 .hash_mode = DRV_HASH_SHA1,
1573 .hw_mode = DRV_HASH_HW_SHA1,
1574 .inter_digestsize = SHA1_DIGEST_SIZE,
1575 .min_hw_rev = CC_HW_REV_630,
1576 },
1577 {
1578 .name = "sha256",
1579 .driver_name = "sha256-ccree",
1580 .mac_name = "hmac(sha256)",
1581 .mac_driver_name = "hmac-sha256-ccree",
1582 .blocksize = SHA256_BLOCK_SIZE,
1583 .is_mac = true,
1584 .template_ahash = {
1585 .init = cc_hash_init,
1586 .update = cc_hash_update,
1587 .final = cc_hash_final,
1588 .finup = cc_hash_finup,
1589 .digest = cc_hash_digest,
1590 .export = cc_hash_export,
1591 .import = cc_hash_import,
1592 .setkey = cc_hash_setkey,
1593 .halg = {
1594 .digestsize = SHA256_DIGEST_SIZE,
1595 .statesize = CC_STATE_SIZE(SHA256_DIGEST_SIZE)
1596 },
1597 },
1598 .hash_mode = DRV_HASH_SHA256,
1599 .hw_mode = DRV_HASH_HW_SHA256,
1600 .inter_digestsize = SHA256_DIGEST_SIZE,
1601 .min_hw_rev = CC_HW_REV_630,
1602 },
1603 {
1604 .name = "sha224",
1605 .driver_name = "sha224-ccree",
1606 .mac_name = "hmac(sha224)",
1607 .mac_driver_name = "hmac-sha224-ccree",
1608 .blocksize = SHA224_BLOCK_SIZE,
1609 .is_mac = true,
1610 .template_ahash = {
1611 .init = cc_hash_init,
1612 .update = cc_hash_update,
1613 .final = cc_hash_final,
1614 .finup = cc_hash_finup,
1615 .digest = cc_hash_digest,
1616 .export = cc_hash_export,
1617 .import = cc_hash_import,
1618 .setkey = cc_hash_setkey,
1619 .halg = {
1620 .digestsize = SHA224_DIGEST_SIZE,
1621 .statesize = CC_STATE_SIZE(SHA224_DIGEST_SIZE),
1622 },
1623 },
1624 .hash_mode = DRV_HASH_SHA224,
1625 .hw_mode = DRV_HASH_HW_SHA256,
1626 .inter_digestsize = SHA256_DIGEST_SIZE,
1627 .min_hw_rev = CC_HW_REV_630,
1628 },
1629 {
1630 .name = "sha384",
1631 .driver_name = "sha384-ccree",
1632 .mac_name = "hmac(sha384)",
1633 .mac_driver_name = "hmac-sha384-ccree",
1634 .blocksize = SHA384_BLOCK_SIZE,
1635 .is_mac = true,
1636 .template_ahash = {
1637 .init = cc_hash_init,
1638 .update = cc_hash_update,
1639 .final = cc_hash_final,
1640 .finup = cc_hash_finup,
1641 .digest = cc_hash_digest,
1642 .export = cc_hash_export,
1643 .import = cc_hash_import,
1644 .setkey = cc_hash_setkey,
1645 .halg = {
1646 .digestsize = SHA384_DIGEST_SIZE,
1647 .statesize = CC_STATE_SIZE(SHA384_DIGEST_SIZE),
1648 },
1649 },
1650 .hash_mode = DRV_HASH_SHA384,
1651 .hw_mode = DRV_HASH_HW_SHA512,
1652 .inter_digestsize = SHA512_DIGEST_SIZE,
1653 .min_hw_rev = CC_HW_REV_712,
1654 },
1655 {
1656 .name = "sha512",
1657 .driver_name = "sha512-ccree",
1658 .mac_name = "hmac(sha512)",
1659 .mac_driver_name = "hmac-sha512-ccree",
1660 .blocksize = SHA512_BLOCK_SIZE,
1661 .is_mac = true,
1662 .template_ahash = {
1663 .init = cc_hash_init,
1664 .update = cc_hash_update,
1665 .final = cc_hash_final,
1666 .finup = cc_hash_finup,
1667 .digest = cc_hash_digest,
1668 .export = cc_hash_export,
1669 .import = cc_hash_import,
1670 .setkey = cc_hash_setkey,
1671 .halg = {
1672 .digestsize = SHA512_DIGEST_SIZE,
1673 .statesize = CC_STATE_SIZE(SHA512_DIGEST_SIZE),
1674 },
1675 },
1676 .hash_mode = DRV_HASH_SHA512,
1677 .hw_mode = DRV_HASH_HW_SHA512,
1678 .inter_digestsize = SHA512_DIGEST_SIZE,
1679 .min_hw_rev = CC_HW_REV_712,
1680 },
1681 {
1682 .name = "md5",
1683 .driver_name = "md5-ccree",
1684 .mac_name = "hmac(md5)",
1685 .mac_driver_name = "hmac-md5-ccree",
1686 .blocksize = MD5_HMAC_BLOCK_SIZE,
1687 .is_mac = true,
1688 .template_ahash = {
1689 .init = cc_hash_init,
1690 .update = cc_hash_update,
1691 .final = cc_hash_final,
1692 .finup = cc_hash_finup,
1693 .digest = cc_hash_digest,
1694 .export = cc_hash_export,
1695 .import = cc_hash_import,
1696 .setkey = cc_hash_setkey,
1697 .halg = {
1698 .digestsize = MD5_DIGEST_SIZE,
1699 .statesize = CC_STATE_SIZE(MD5_DIGEST_SIZE),
1700 },
1701 },
1702 .hash_mode = DRV_HASH_MD5,
1703 .hw_mode = DRV_HASH_HW_MD5,
1704 .inter_digestsize = MD5_DIGEST_SIZE,
1705 .min_hw_rev = CC_HW_REV_630,
1706 },
1707 {
1708 .name = "sm3",
1709 .driver_name = "sm3-ccree",
1710 .blocksize = SM3_BLOCK_SIZE,
1711 .is_mac = false,
1712 .template_ahash = {
1713 .init = cc_hash_init,
1714 .update = cc_hash_update,
1715 .final = cc_hash_final,
1716 .finup = cc_hash_finup,
1717 .digest = cc_hash_digest,
1718 .export = cc_hash_export,
1719 .import = cc_hash_import,
1720 .setkey = cc_hash_setkey,
1721 .halg = {
1722 .digestsize = SM3_DIGEST_SIZE,
1723 .statesize = CC_STATE_SIZE(SM3_DIGEST_SIZE),
1724 },
1725 },
1726 .hash_mode = DRV_HASH_SM3,
1727 .hw_mode = DRV_HASH_HW_SM3,
1728 .inter_digestsize = SM3_DIGEST_SIZE,
> 1729 .min_hw_rev = CC_HW_REV_713,
1730 },
1731 {
1732 .mac_name = "xcbc(aes)",
1733 .mac_driver_name = "xcbc-aes-ccree",
1734 .blocksize = AES_BLOCK_SIZE,
1735 .is_mac = true,
1736 .template_ahash = {
1737 .init = cc_hash_init,
1738 .update = cc_mac_update,
1739 .final = cc_mac_final,
1740 .finup = cc_mac_finup,
1741 .digest = cc_mac_digest,
1742 .setkey = cc_xcbc_setkey,
1743 .export = cc_hash_export,
1744 .import = cc_hash_import,
1745 .halg = {
1746 .digestsize = AES_BLOCK_SIZE,
1747 .statesize = CC_STATE_SIZE(AES_BLOCK_SIZE),
1748 },
1749 },
1750 .hash_mode = DRV_HASH_NULL,
1751 .hw_mode = DRV_CIPHER_XCBC_MAC,
1752 .inter_digestsize = AES_BLOCK_SIZE,
1753 .min_hw_rev = CC_HW_REV_630,
1754 },
1755 {
1756 .mac_name = "cmac(aes)",
1757 .mac_driver_name = "cmac-aes-ccree",
1758 .blocksize = AES_BLOCK_SIZE,
1759 .is_mac = true,
1760 .template_ahash = {
1761 .init = cc_hash_init,
1762 .update = cc_mac_update,
1763 .final = cc_mac_final,
1764 .finup = cc_mac_finup,
1765 .digest = cc_mac_digest,
1766 .setkey = cc_cmac_setkey,
1767 .export = cc_hash_export,
1768 .import = cc_hash_import,
1769 .halg = {
1770 .digestsize = AES_BLOCK_SIZE,
1771 .statesize = CC_STATE_SIZE(AES_BLOCK_SIZE),
1772 },
1773 },
1774 .hash_mode = DRV_HASH_NULL,
1775 .hw_mode = DRV_CIPHER_CMAC,
1776 .inter_digestsize = AES_BLOCK_SIZE,
1777 .min_hw_rev = CC_HW_REV_630,
1778 },
1779 };
1780
1781 static struct cc_hash_alg *cc_alloc_hash_alg(struct cc_hash_template *template,
1782 struct device *dev, bool keyed)
1783 {
1784 struct cc_hash_alg *t_crypto_alg;
1785 struct crypto_alg *alg;
1786 struct ahash_alg *halg;
1787
1788 t_crypto_alg = kzalloc(sizeof(*t_crypto_alg), GFP_KERNEL);
1789 if (!t_crypto_alg)
1790 return ERR_PTR(-ENOMEM);
1791
1792 t_crypto_alg->ahash_alg = template->template_ahash;
1793 halg = &t_crypto_alg->ahash_alg;
1794 alg = &halg->halg.base;
1795
1796 if (keyed) {
1797 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s",
1798 template->mac_name);
1799 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1800 template->mac_driver_name);
1801 } else {
1802 halg->setkey = NULL;
1803 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s",
1804 template->name);
1805 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1806 template->driver_name);
1807 }
1808 alg->cra_module = THIS_MODULE;
1809 alg->cra_ctxsize = sizeof(struct cc_hash_ctx);
1810 alg->cra_priority = CC_CRA_PRIO;
1811 alg->cra_blocksize = template->blocksize;
1812 alg->cra_alignmask = 0;
1813 alg->cra_exit = cc_cra_exit;
1814
1815 alg->cra_init = cc_cra_init;
1816 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1817
1818 t_crypto_alg->hash_mode = template->hash_mode;
1819 t_crypto_alg->hw_mode = template->hw_mode;
1820 t_crypto_alg->inter_digestsize = template->inter_digestsize;
1821
1822 return t_crypto_alg;
1823 }
1824
1825 int cc_init_hash_sram(struct cc_drvdata *drvdata)
1826 {
1827 struct cc_hash_handle *hash_handle = drvdata->hash_handle;
1828 cc_sram_addr_t sram_buff_ofs = hash_handle->digest_len_sram_addr;
1829 unsigned int larval_seq_len = 0;
1830 struct cc_hw_desc larval_seq[CC_DIGEST_SIZE_MAX / sizeof(u32)];
1831 bool large_sha_supported = (drvdata->hw_rev >= CC_HW_REV_712);
> 1832 bool sm3_supported = (drvdata->hw_rev >= CC_HW_REV_713);
1833 int rc = 0;
1834
1835 /* Copy-to-sram digest-len */
1836 cc_set_sram_desc(digest_len_init, sram_buff_ofs,
1837 ARRAY_SIZE(digest_len_init), larval_seq,
1838 &larval_seq_len);
1839 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
1840 if (rc)
1841 goto init_digest_const_err;
1842
1843 sram_buff_ofs += sizeof(digest_len_init);
1844 larval_seq_len = 0;
1845
1846 if (large_sha_supported) {
1847 /* Copy-to-sram digest-len for sha384/512 */
1848 cc_set_sram_desc(digest_len_sha512_init, sram_buff_ofs,
1849 ARRAY_SIZE(digest_len_sha512_init),
1850 larval_seq, &larval_seq_len);
1851 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
1852 if (rc)
1853 goto init_digest_const_err;
1854
1855 sram_buff_ofs += sizeof(digest_len_sha512_init);
1856 larval_seq_len = 0;
1857 }
1858
1859 /* The initial digests offset */
1860 hash_handle->larval_digest_sram_addr = sram_buff_ofs;
1861
1862 /* Copy-to-sram initial SHA* digests */
1863 cc_set_sram_desc(md5_init, sram_buff_ofs, ARRAY_SIZE(md5_init),
1864 larval_seq, &larval_seq_len);
1865 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
1866 if (rc)
1867 goto init_digest_const_err;
1868 sram_buff_ofs += sizeof(md5_init);
1869 larval_seq_len = 0;
1870
1871 cc_set_sram_desc(sha1_init, sram_buff_ofs,
1872 ARRAY_SIZE(sha1_init), larval_seq,
1873 &larval_seq_len);
1874 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
1875 if (rc)
1876 goto init_digest_const_err;
1877 sram_buff_ofs += sizeof(sha1_init);
1878 larval_seq_len = 0;
1879
1880 cc_set_sram_desc(sha224_init, sram_buff_ofs,
1881 ARRAY_SIZE(sha224_init), larval_seq,
1882 &larval_seq_len);
1883 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
1884 if (rc)
1885 goto init_digest_const_err;
1886 sram_buff_ofs += sizeof(sha224_init);
1887 larval_seq_len = 0;
1888
1889 cc_set_sram_desc(sha256_init, sram_buff_ofs,
1890 ARRAY_SIZE(sha256_init), larval_seq,
1891 &larval_seq_len);
1892 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
1893 if (rc)
1894 goto init_digest_const_err;
1895 sram_buff_ofs += sizeof(sha256_init);
1896 larval_seq_len = 0;
1897
1898 if (sm3_supported) {
1899 cc_set_sram_desc(sm3_init, sram_buff_ofs,
1900 ARRAY_SIZE(sm3_init), larval_seq,
1901 &larval_seq_len);
1902 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
1903 if (rc)
1904 goto init_digest_const_err;
1905 sram_buff_ofs += sizeof(sm3_init);
1906 larval_seq_len = 0;
1907 }
1908
1909 if (large_sha_supported) {
1910 cc_set_sram_desc((u32 *)sha384_init, sram_buff_ofs,
1911 (ARRAY_SIZE(sha384_init) * 2), larval_seq,
1912 &larval_seq_len);
1913 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
1914 if (rc)
1915 goto init_digest_const_err;
1916 sram_buff_ofs += sizeof(sha384_init);
1917 larval_seq_len = 0;
1918
1919 cc_set_sram_desc((u32 *)sha512_init, sram_buff_ofs,
1920 (ARRAY_SIZE(sha512_init) * 2), larval_seq,
1921 &larval_seq_len);
1922 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
1923 if (rc)
1924 goto init_digest_const_err;
1925 }
1926
1927 init_digest_const_err:
1928 return rc;
1929 }
1930

---
0-DAY kernel test infrastructure Open Source Technology Center
https://lists.01.org/pipermail/kbuild-all Intel Corporation


Attachments:
(No filename) (15.25 kB)
.config.gz (27.14 kB)
Download all attachments

2018-10-18 21:10:57

by Yael Chemla

[permalink] [raw]
Subject: RE: [PATCH 3/3] crypto: ccree: add SM3 support

this patch set should be applies after:
"crypto: ccree: add CryptoCell 713 baseline support"
patch set by Gilad Ben-Yossef


> -----Original Message-----
> From: kbuild test robot <[email protected]>
> Sent: Thursday, 18 October 2018 22:58
> To: Yael Chemla <[email protected]>
> Cc: [email protected]; Yael CHEmla <[email protected]>; linux-
> [email protected]; Yael Chemla <[email protected]>
> Subject: Re: [PATCH 3/3] crypto: ccree: add SM3 support
>
> Hi Yael,
>
> Thank you for the patch! Yet something to improve:
>
> [auto build test ERROR on cryptodev/master] [also build test ERROR on v4.19-
> rc8 next-20181018] [if your patch is applied to the wrong git tree, please drop us
> a note to help improve the system]
>
> url: https://github.com/0day-ci/linux/commits/Yael-Chemla/crypto-ccree-add-
> SM3-support/20181019-033131
> base: https://git.kernel.org/pub/scm/linux/kernel/git/herbert/cryptodev-
> 2.6.git master
> config: i386-randconfig-x004-201841 (attached as .config)
> compiler: gcc-7 (Debian 7.3.0-1) 7.3.0
> reproduce:
> # save the attached .config to linux build tree
> make ARCH=i386
>
> All error/warnings (new ones prefixed by >>):
>
> >> drivers/crypto/ccree/cc_hash.c:1729:17: error: 'CC_HW_REV_713'
> undeclared here (not in a function); did you mean 'CC_HW_REV_712'?
> .min_hw_rev = CC_HW_REV_713,
> ^~~~~~~~~~~~~
> CC_HW_REV_712
> drivers/crypto/ccree/cc_hash.c: In function 'cc_init_hash_sram':
> >> drivers/crypto/ccree/cc_hash.c:1832:40: warning: comparison between
> >> pointer and integer
> bool sm3_supported = (drvdata->hw_rev >= CC_HW_REV_713);
> ^~
> drivers/crypto/ccree/cc_hash.c: In function 'cc_hash_alloc':
> drivers/crypto/ccree/cc_hash.c:1975:22: warning: comparison between
> pointer and integer
> if (drvdata->hw_rev >= CC_HW_REV_713)
> ^~
> drivers/crypto/ccree/cc_hash.c: In function 'cc_larval_digest_addr':
> drivers/crypto/ccree/cc_hash.c:2252:41: warning: comparison between
> pointer and integer
> bool sm3_supported = (_drvdata->hw_rev >= CC_HW_REV_713);
> ^~
>
> vim +1729 drivers/crypto/ccree/cc_hash.c
>
> 1543
> 1544 #define CC_STATE_SIZE(_x) \
> 1545 ((_x) + HASH_MAX_LEN_SIZE + CC_MAX_HASH_BLCK_SIZE + (2 *
> sizeof(u32)))
> 1546
> 1547 /* hash descriptors */
> 1548 static struct cc_hash_template driver_hash[] = {
> 1549 //Asynchronize hash template
> 1550 {
> 1551 .name = "sha1",
> 1552 .driver_name = "sha1-ccree",
> 1553 .mac_name = "hmac(sha1)",
> 1554 .mac_driver_name = "hmac-sha1-ccree",
> 1555 .blocksize = SHA1_BLOCK_SIZE,
> 1556 .is_mac = true,
> 1557 .synchronize = false,
> 1558 .template_ahash = {
> 1559 .init = cc_hash_init,
> 1560 .update = cc_hash_update,
> 1561 .final = cc_hash_final,
> 1562 .finup = cc_hash_finup,
> 1563 .digest = cc_hash_digest,
> 1564 .export = cc_hash_export,
> 1565 .import = cc_hash_import,
> 1566 .setkey = cc_hash_setkey,
> 1567 .halg = {
> 1568 .digestsize = SHA1_DIGEST_SIZE,
> 1569 .statesize =
> CC_STATE_SIZE(SHA1_DIGEST_SIZE),
> 1570 },
> 1571 },
> 1572 .hash_mode = DRV_HASH_SHA1,
> 1573 .hw_mode = DRV_HASH_HW_SHA1,
> 1574 .inter_digestsize = SHA1_DIGEST_SIZE,
> 1575 .min_hw_rev = CC_HW_REV_630,
> 1576 },
> 1577 {
> 1578 .name = "sha256",
> 1579 .driver_name = "sha256-ccree",
> 1580 .mac_name = "hmac(sha256)",
> 1581 .mac_driver_name = "hmac-sha256-ccree",
> 1582 .blocksize = SHA256_BLOCK_SIZE,
> 1583 .is_mac = true,
> 1584 .template_ahash = {
> 1585 .init = cc_hash_init,
> 1586 .update = cc_hash_update,
> 1587 .final = cc_hash_final,
> 1588 .finup = cc_hash_finup,
> 1589 .digest = cc_hash_digest,
> 1590 .export = cc_hash_export,
> 1591 .import = cc_hash_import,
> 1592 .setkey = cc_hash_setkey,
> 1593 .halg = {
> 1594 .digestsize = SHA256_DIGEST_SIZE,
> 1595 .statesize =
> CC_STATE_SIZE(SHA256_DIGEST_SIZE)
> 1596 },
> 1597 },
> 1598 .hash_mode = DRV_HASH_SHA256,
> 1599 .hw_mode = DRV_HASH_HW_SHA256,
> 1600 .inter_digestsize = SHA256_DIGEST_SIZE,
> 1601 .min_hw_rev = CC_HW_REV_630,
> 1602 },
> 1603 {
> 1604 .name = "sha224",
> 1605 .driver_name = "sha224-ccree",
> 1606 .mac_name = "hmac(sha224)",
> 1607 .mac_driver_name = "hmac-sha224-ccree",
> 1608 .blocksize = SHA224_BLOCK_SIZE,
> 1609 .is_mac = true,
> 1610 .template_ahash = {
> 1611 .init = cc_hash_init,
> 1612 .update = cc_hash_update,
> 1613 .final = cc_hash_final,
> 1614 .finup = cc_hash_finup,
> 1615 .digest = cc_hash_digest,
> 1616 .export = cc_hash_export,
> 1617 .import = cc_hash_import,
> 1618 .setkey = cc_hash_setkey,
> 1619 .halg = {
> 1620 .digestsize = SHA224_DIGEST_SIZE,
> 1621 .statesize =
> CC_STATE_SIZE(SHA224_DIGEST_SIZE),
> 1622 },
> 1623 },
> 1624 .hash_mode = DRV_HASH_SHA224,
> 1625 .hw_mode = DRV_HASH_HW_SHA256,
> 1626 .inter_digestsize = SHA256_DIGEST_SIZE,
> 1627 .min_hw_rev = CC_HW_REV_630,
> 1628 },
> 1629 {
> 1630 .name = "sha384",
> 1631 .driver_name = "sha384-ccree",
> 1632 .mac_name = "hmac(sha384)",
> 1633 .mac_driver_name = "hmac-sha384-ccree",
> 1634 .blocksize = SHA384_BLOCK_SIZE,
> 1635 .is_mac = true,
> 1636 .template_ahash = {
> 1637 .init = cc_hash_init,
> 1638 .update = cc_hash_update,
> 1639 .final = cc_hash_final,
> 1640 .finup = cc_hash_finup,
> 1641 .digest = cc_hash_digest,
> 1642 .export = cc_hash_export,
> 1643 .import = cc_hash_import,
> 1644 .setkey = cc_hash_setkey,
> 1645 .halg = {
> 1646 .digestsize = SHA384_DIGEST_SIZE,
> 1647 .statesize =
> CC_STATE_SIZE(SHA384_DIGEST_SIZE),
> 1648 },
> 1649 },
> 1650 .hash_mode = DRV_HASH_SHA384,
> 1651 .hw_mode = DRV_HASH_HW_SHA512,
> 1652 .inter_digestsize = SHA512_DIGEST_SIZE,
> 1653 .min_hw_rev = CC_HW_REV_712,
> 1654 },
> 1655 {
> 1656 .name = "sha512",
> 1657 .driver_name = "sha512-ccree",
> 1658 .mac_name = "hmac(sha512)",
> 1659 .mac_driver_name = "hmac-sha512-ccree",
> 1660 .blocksize = SHA512_BLOCK_SIZE,
> 1661 .is_mac = true,
> 1662 .template_ahash = {
> 1663 .init = cc_hash_init,
> 1664 .update = cc_hash_update,
> 1665 .final = cc_hash_final,
> 1666 .finup = cc_hash_finup,
> 1667 .digest = cc_hash_digest,
> 1668 .export = cc_hash_export,
> 1669 .import = cc_hash_import,
> 1670 .setkey = cc_hash_setkey,
> 1671 .halg = {
> 1672 .digestsize = SHA512_DIGEST_SIZE,
> 1673 .statesize =
> CC_STATE_SIZE(SHA512_DIGEST_SIZE),
> 1674 },
> 1675 },
> 1676 .hash_mode = DRV_HASH_SHA512,
> 1677 .hw_mode = DRV_HASH_HW_SHA512,
> 1678 .inter_digestsize = SHA512_DIGEST_SIZE,
> 1679 .min_hw_rev = CC_HW_REV_712,
> 1680 },
> 1681 {
> 1682 .name = "md5",
> 1683 .driver_name = "md5-ccree",
> 1684 .mac_name = "hmac(md5)",
> 1685 .mac_driver_name = "hmac-md5-ccree",
> 1686 .blocksize = MD5_HMAC_BLOCK_SIZE,
> 1687 .is_mac = true,
> 1688 .template_ahash = {
> 1689 .init = cc_hash_init,
> 1690 .update = cc_hash_update,
> 1691 .final = cc_hash_final,
> 1692 .finup = cc_hash_finup,
> 1693 .digest = cc_hash_digest,
> 1694 .export = cc_hash_export,
> 1695 .import = cc_hash_import,
> 1696 .setkey = cc_hash_setkey,
> 1697 .halg = {
> 1698 .digestsize = MD5_DIGEST_SIZE,
> 1699 .statesize =
> CC_STATE_SIZE(MD5_DIGEST_SIZE),
> 1700 },
> 1701 },
> 1702 .hash_mode = DRV_HASH_MD5,
> 1703 .hw_mode = DRV_HASH_HW_MD5,
> 1704 .inter_digestsize = MD5_DIGEST_SIZE,
> 1705 .min_hw_rev = CC_HW_REV_630,
> 1706 },
> 1707 {
> 1708 .name = "sm3",
> 1709 .driver_name = "sm3-ccree",
> 1710 .blocksize = SM3_BLOCK_SIZE,
> 1711 .is_mac = false,
> 1712 .template_ahash = {
> 1713 .init = cc_hash_init,
> 1714 .update = cc_hash_update,
> 1715 .final = cc_hash_final,
> 1716 .finup = cc_hash_finup,
> 1717 .digest = cc_hash_digest,
> 1718 .export = cc_hash_export,
> 1719 .import = cc_hash_import,
> 1720 .setkey = cc_hash_setkey,
> 1721 .halg = {
> 1722 .digestsize = SM3_DIGEST_SIZE,
> 1723 .statesize =
> CC_STATE_SIZE(SM3_DIGEST_SIZE),
> 1724 },
> 1725 },
> 1726 .hash_mode = DRV_HASH_SM3,
> 1727 .hw_mode = DRV_HASH_HW_SM3,
> 1728 .inter_digestsize = SM3_DIGEST_SIZE,
> > 1729 .min_hw_rev = CC_HW_REV_713,
> 1730 },
> 1731 {
> 1732 .mac_name = "xcbc(aes)",
> 1733 .mac_driver_name = "xcbc-aes-ccree",
> 1734 .blocksize = AES_BLOCK_SIZE,
> 1735 .is_mac = true,
> 1736 .template_ahash = {
> 1737 .init = cc_hash_init,
> 1738 .update = cc_mac_update,
> 1739 .final = cc_mac_final,
> 1740 .finup = cc_mac_finup,
> 1741 .digest = cc_mac_digest,
> 1742 .setkey = cc_xcbc_setkey,
> 1743 .export = cc_hash_export,
> 1744 .import = cc_hash_import,
> 1745 .halg = {
> 1746 .digestsize = AES_BLOCK_SIZE,
> 1747 .statesize =
> CC_STATE_SIZE(AES_BLOCK_SIZE),
> 1748 },
> 1749 },
> 1750 .hash_mode = DRV_HASH_NULL,
> 1751 .hw_mode = DRV_CIPHER_XCBC_MAC,
> 1752 .inter_digestsize = AES_BLOCK_SIZE,
> 1753 .min_hw_rev = CC_HW_REV_630,
> 1754 },
> 1755 {
> 1756 .mac_name = "cmac(aes)",
> 1757 .mac_driver_name = "cmac-aes-ccree",
> 1758 .blocksize = AES_BLOCK_SIZE,
> 1759 .is_mac = true,
> 1760 .template_ahash = {
> 1761 .init = cc_hash_init,
> 1762 .update = cc_mac_update,
> 1763 .final = cc_mac_final,
> 1764 .finup = cc_mac_finup,
> 1765 .digest = cc_mac_digest,
> 1766 .setkey = cc_cmac_setkey,
> 1767 .export = cc_hash_export,
> 1768 .import = cc_hash_import,
> 1769 .halg = {
> 1770 .digestsize = AES_BLOCK_SIZE,
> 1771 .statesize =
> CC_STATE_SIZE(AES_BLOCK_SIZE),
> 1772 },
> 1773 },
> 1774 .hash_mode = DRV_HASH_NULL,
> 1775 .hw_mode = DRV_CIPHER_CMAC,
> 1776 .inter_digestsize = AES_BLOCK_SIZE,
> 1777 .min_hw_rev = CC_HW_REV_630,
> 1778 },
> 1779 };
> 1780
> 1781 static struct cc_hash_alg *cc_alloc_hash_alg(struct cc_hash_template
> *template,
> 1782 struct device *dev, bool
> keyed)
> 1783 {
> 1784 struct cc_hash_alg *t_crypto_alg;
> 1785 struct crypto_alg *alg;
> 1786 struct ahash_alg *halg;
> 1787
> 1788 t_crypto_alg = kzalloc(sizeof(*t_crypto_alg), GFP_KERNEL);
> 1789 if (!t_crypto_alg)
> 1790 return ERR_PTR(-ENOMEM);
> 1791
> 1792 t_crypto_alg->ahash_alg = template->template_ahash;
> 1793 halg = &t_crypto_alg->ahash_alg;
> 1794 alg = &halg->halg.base;
> 1795
> 1796 if (keyed) {
> 1797 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME,
> "%s",
> 1798 template->mac_name);
> 1799 snprintf(alg->cra_driver_name,
> CRYPTO_MAX_ALG_NAME, "%s",
> 1800 template->mac_driver_name);
> 1801 } else {
> 1802 halg->setkey = NULL;
> 1803 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME,
> "%s",
> 1804 template->name);
> 1805 snprintf(alg->cra_driver_name,
> CRYPTO_MAX_ALG_NAME, "%s",
> 1806 template->driver_name);
> 1807 }
> 1808 alg->cra_module = THIS_MODULE;
> 1809 alg->cra_ctxsize = sizeof(struct cc_hash_ctx);
> 1810 alg->cra_priority = CC_CRA_PRIO;
> 1811 alg->cra_blocksize = template->blocksize;
> 1812 alg->cra_alignmask = 0;
> 1813 alg->cra_exit = cc_cra_exit;
> 1814
> 1815 alg->cra_init = cc_cra_init;
> 1816 alg->cra_flags = CRYPTO_ALG_ASYNC |
> CRYPTO_ALG_KERN_DRIVER_ONLY;
> 1817
> 1818 t_crypto_alg->hash_mode = template->hash_mode;
> 1819 t_crypto_alg->hw_mode = template->hw_mode;
> 1820 t_crypto_alg->inter_digestsize = template->inter_digestsize;
> 1821
> 1822 return t_crypto_alg;
> 1823 }
> 1824
> 1825 int cc_init_hash_sram(struct cc_drvdata *drvdata)
> 1826 {
> 1827 struct cc_hash_handle *hash_handle = drvdata->hash_handle;
> 1828 cc_sram_addr_t sram_buff_ofs = hash_handle-
> >digest_len_sram_addr;
> 1829 unsigned int larval_seq_len = 0;
> 1830 struct cc_hw_desc larval_seq[CC_DIGEST_SIZE_MAX /
> sizeof(u32)];
> 1831 bool large_sha_supported = (drvdata->hw_rev >=
> CC_HW_REV_712);
> > 1832 bool sm3_supported = (drvdata->hw_rev >= CC_HW_REV_713);
> 1833 int rc = 0;
> 1834
> 1835 /* Copy-to-sram digest-len */
> 1836 cc_set_sram_desc(digest_len_init, sram_buff_ofs,
> 1837 ARRAY_SIZE(digest_len_init), larval_seq,
> 1838 &larval_seq_len);
> 1839 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
> 1840 if (rc)
> 1841 goto init_digest_const_err;
> 1842
> 1843 sram_buff_ofs += sizeof(digest_len_init);
> 1844 larval_seq_len = 0;
> 1845
> 1846 if (large_sha_supported) {
> 1847 /* Copy-to-sram digest-len for sha384/512 */
> 1848 cc_set_sram_desc(digest_len_sha512_init,
> sram_buff_ofs,
> 1849 ARRAY_SIZE(digest_len_sha512_init),
> 1850 larval_seq, &larval_seq_len);
> 1851 rc = send_request_init(drvdata, larval_seq,
> larval_seq_len);
> 1852 if (rc)
> 1853 goto init_digest_const_err;
> 1854
> 1855 sram_buff_ofs += sizeof(digest_len_sha512_init);
> 1856 larval_seq_len = 0;
> 1857 }
> 1858
> 1859 /* The initial digests offset */
> 1860 hash_handle->larval_digest_sram_addr = sram_buff_ofs;
> 1861
> 1862 /* Copy-to-sram initial SHA* digests */
> 1863 cc_set_sram_desc(md5_init, sram_buff_ofs,
> ARRAY_SIZE(md5_init),
> 1864 larval_seq, &larval_seq_len);
> 1865 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
> 1866 if (rc)
> 1867 goto init_digest_const_err;
> 1868 sram_buff_ofs += sizeof(md5_init);
> 1869 larval_seq_len = 0;
> 1870
> 1871 cc_set_sram_desc(sha1_init, sram_buff_ofs,
> 1872 ARRAY_SIZE(sha1_init), larval_seq,
> 1873 &larval_seq_len);
> 1874 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
> 1875 if (rc)
> 1876 goto init_digest_const_err;
> 1877 sram_buff_ofs += sizeof(sha1_init);
> 1878 larval_seq_len = 0;
> 1879
> 1880 cc_set_sram_desc(sha224_init, sram_buff_ofs,
> 1881 ARRAY_SIZE(sha224_init), larval_seq,
> 1882 &larval_seq_len);
> 1883 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
> 1884 if (rc)
> 1885 goto init_digest_const_err;
> 1886 sram_buff_ofs += sizeof(sha224_init);
> 1887 larval_seq_len = 0;
> 1888
> 1889 cc_set_sram_desc(sha256_init, sram_buff_ofs,
> 1890 ARRAY_SIZE(sha256_init), larval_seq,
> 1891 &larval_seq_len);
> 1892 rc = send_request_init(drvdata, larval_seq, larval_seq_len);
> 1893 if (rc)
> 1894 goto init_digest_const_err;
> 1895 sram_buff_ofs += sizeof(sha256_init);
> 1896 larval_seq_len = 0;
> 1897
> 1898 if (sm3_supported) {
> 1899 cc_set_sram_desc(sm3_init, sram_buff_ofs,
> 1900 ARRAY_SIZE(sm3_init), larval_seq,
> 1901 &larval_seq_len);
> 1902 rc = send_request_init(drvdata, larval_seq,
> larval_seq_len);
> 1903 if (rc)
> 1904 goto init_digest_const_err;
> 1905 sram_buff_ofs += sizeof(sm3_init);
> 1906 larval_seq_len = 0;
> 1907 }
> 1908
> 1909 if (large_sha_supported) {
> 1910 cc_set_sram_desc((u32 *)sha384_init, sram_buff_ofs,
> 1911 (ARRAY_SIZE(sha384_init) * 2),
> larval_seq,
> 1912 &larval_seq_len);
> 1913 rc = send_request_init(drvdata, larval_seq,
> larval_seq_len);
> 1914 if (rc)
> 1915 goto init_digest_const_err;
> 1916 sram_buff_ofs += sizeof(sha384_init);
> 1917 larval_seq_len = 0;
> 1918
> 1919 cc_set_sram_desc((u32 *)sha512_init, sram_buff_ofs,
> 1920 (ARRAY_SIZE(sha512_init) * 2),
> larval_seq,
> 1921 &larval_seq_len);
> 1922 rc = send_request_init(drvdata, larval_seq,
> larval_seq_len);
> 1923 if (rc)
> 1924 goto init_digest_const_err;
> 1925 }
> 1926
> 1927 init_digest_const_err:
> 1928 return rc;
> 1929 }
> 1930
>
> ---
> 0-DAY kernel test infrastructure Open Source Technology Center
> https://lists.01.org/pipermail/kbuild-all Intel Corporation