Add IV buffer in structure virtio_crypto_sym_request to avoid unnecessary
IV buffer allocation in encrypt/decrypt process. And IV buffer is cleared
when encrypt/decrypt is finished.
Signed-off-by: Bibo Mao <[email protected]>
---
.../virtio/virtio_crypto_skcipher_algs.c | 20 +++++++------------
1 file changed, 7 insertions(+), 13 deletions(-)
diff --git a/drivers/crypto/virtio/virtio_crypto_skcipher_algs.c
b/drivers/crypto/virtio/virtio_crypto_skcipher_algs.c
index a7c7c726e6d9..c911b7ba8f13 100644
--- a/drivers/crypto/virtio/virtio_crypto_skcipher_algs.c
+++ b/drivers/crypto/virtio/virtio_crypto_skcipher_algs.c
@@ -30,9 +30,9 @@ struct virtio_crypto_sym_request {
/* Cipher or aead */
uint32_t type;
- uint8_t *iv;
/* Encryption? */
bool encrypt;
+ uint8_t iv[0];
};
struct virtio_crypto_algo {
@@ -402,12 +402,7 @@ __virtio_crypto_skcipher_do_req(struct
virtio_crypto_sym_request *vc_sym_req,
* Avoid to do DMA from the stack, switch to using
* dynamically-allocated for the IV
*/
- iv = kzalloc_node(ivsize, GFP_ATOMIC,
- dev_to_node(&vcrypto->vdev->dev));
- if (!iv) {
- err = -ENOMEM;
- goto free;
- }
+ iv = vc_sym_req->iv;
memcpy(iv, req->iv, ivsize);
if (!vc_sym_req->encrypt)
scatterwalk_map_and_copy(req->iv, req->src,
@@ -416,7 +411,6 @@ __virtio_crypto_skcipher_do_req(struct
virtio_crypto_sym_request *vc_sym_req,
sg_init_one(&iv_sg, iv, ivsize);
sgs[num_out++] = &iv_sg;
- vc_sym_req->iv = iv;
/* Source data */
for (sg = req->src; src_nents; sg = sg_next(sg), src_nents--)
@@ -438,12 +432,10 @@ __virtio_crypto_skcipher_do_req(struct
virtio_crypto_sym_request *vc_sym_req,
virtqueue_kick(data_vq->vq);
spin_unlock_irqrestore(&data_vq->lock, flags);
if (unlikely(err < 0))
- goto free_iv;
+ goto free;
return 0;
-free_iv:
- kfree_sensitive(iv);