| From 89f579b0b263bfe41dc27ea4ae3b5b3f576ac90e Mon Sep 17 00:00:00 2001 |
| From: Ofir Drang <ofir.drang@arm.com> |
| Date: Thu, 16 Jan 2020 12:14:42 +0200 |
| Subject: [PATCH] crypto: ccree - fix FDE descriptor sequence |
| |
| commit 5c83e8ec4d51ac4cc58482ed04297e6882b32a09 upstream. |
| |
| In FDE mode (xts, essiv and bitlocker) the cryptocell hardware requires |
| that the the XEX key will be loaded after Key1. |
| |
| Signed-off-by: Ofir Drang <ofir.drang@arm.com> |
| Cc: stable@vger.kernel.org |
| Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> |
| Signed-off-by: Paul Gortmaker <paul.gortmaker@windriver.com> |
| |
| diff --git a/drivers/crypto/ccree/cc_cipher.c b/drivers/crypto/ccree/cc_cipher.c |
| index 5b58226ea24d..67ac87d25aea 100644 |
| --- a/drivers/crypto/ccree/cc_cipher.c |
| +++ b/drivers/crypto/ccree/cc_cipher.c |
| @@ -534,6 +534,7 @@ static void cc_setup_readiv_desc(struct crypto_tfm *tfm, |
| } |
| } |
| |
| + |
| static void cc_setup_state_desc(struct crypto_tfm *tfm, |
| struct cipher_req_ctx *req_ctx, |
| unsigned int ivsize, unsigned int nbytes, |
| @@ -545,8 +546,6 @@ static void cc_setup_state_desc(struct crypto_tfm *tfm, |
| int cipher_mode = ctx_p->cipher_mode; |
| int flow_mode = ctx_p->flow_mode; |
| int direction = req_ctx->gen_ctx.op_type; |
| - dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; |
| - unsigned int key_len = ctx_p->keylen; |
| dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; |
| unsigned int du_size = nbytes; |
| |
| @@ -582,6 +581,47 @@ static void cc_setup_state_desc(struct crypto_tfm *tfm, |
| case DRV_CIPHER_XTS: |
| case DRV_CIPHER_ESSIV: |
| case DRV_CIPHER_BITLOCKER: |
| + break; |
| + default: |
| + dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); |
| + } |
| +} |
| + |
| + |
| +static void cc_setup_xex_state_desc(struct crypto_tfm *tfm, |
| + struct cipher_req_ctx *req_ctx, |
| + unsigned int ivsize, unsigned int nbytes, |
| + struct cc_hw_desc desc[], |
| + unsigned int *seq_size) |
| +{ |
| + struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); |
| + struct device *dev = drvdata_to_dev(ctx_p->drvdata); |
| + int cipher_mode = ctx_p->cipher_mode; |
| + int flow_mode = ctx_p->flow_mode; |
| + int direction = req_ctx->gen_ctx.op_type; |
| + dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; |
| + unsigned int key_len = ctx_p->keylen; |
| + dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; |
| + unsigned int du_size = nbytes; |
| + |
| + struct cc_crypto_alg *cc_alg = |
| + container_of(tfm->__crt_alg, struct cc_crypto_alg, |
| + skcipher_alg.base); |
| + |
| + if (cc_alg->data_unit) |
| + du_size = cc_alg->data_unit; |
| + |
| + switch (cipher_mode) { |
| + case DRV_CIPHER_ECB: |
| + break; |
| + case DRV_CIPHER_CBC: |
| + case DRV_CIPHER_CBC_CTS: |
| + case DRV_CIPHER_CTR: |
| + case DRV_CIPHER_OFB: |
| + break; |
| + case DRV_CIPHER_XTS: |
| + case DRV_CIPHER_ESSIV: |
| + case DRV_CIPHER_BITLOCKER: |
| /* load XEX key */ |
| hw_desc_init(&desc[*seq_size]); |
| set_cipher_mode(&desc[*seq_size], cipher_mode); |
| @@ -892,12 +932,14 @@ static int cc_cipher_process(struct skcipher_request *req, |
| |
| /* STAT_PHASE_2: Create sequence */ |
| |
| - /* Setup IV and XEX key used */ |
| + /* Setup state (IV) */ |
| cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); |
| /* Setup MLLI line, if needed */ |
| cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len); |
| /* Setup key */ |
| cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len); |
| + /* Setup state (IV and XEX key) */ |
| + cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); |
| /* Data processing */ |
| cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len); |
| /* Read next IV */ |
| -- |
| 2.7.4 |
| |