[dpdk-stable] patch 'vhost/crypto: fix inferred misuse of enum' has been queued to LTS release 18.11.3

Kevin Traynor ktraynor at redhat.com
Mon Jun 24 17:25:25 CEST 2019


Hi,

FYI, your patch has been queued to LTS release 18.11.3

Note it hasn't been pushed to http://dpdk.org/browse/dpdk-stable yet.
It will be pushed if I get no objections before 06/27/19. So please
shout if anyone has objections.

Also note that after the patch there's a diff of the upstream commit vs the
patch applied to the branch. This will indicate if there was any rebasing
needed to apply to the stable branch. If there were code changes for rebasing
(ie: not only metadata diffs), please double check that the rebase was
correctly done.

Queued patches are on a temporary branch at:
https://github.com/kevintraynor/dpdk-stable-queue

This queued commit can be viewed at:
https://github.com/kevintraynor/dpdk-stable-queue/commit/6676ee09c8bdd8aa3b731a17aa8fa02de2bdebee

Thanks.

Kevin Traynor

---
>From 6676ee09c8bdd8aa3b731a17aa8fa02de2bdebee Mon Sep 17 00:00:00 2001
From: Fan Zhang <roy.fan.zhang at intel.com>
Date: Wed, 15 May 2019 17:09:11 +0100
Subject: [PATCH] vhost/crypto: fix inferred misuse of enum

[ upstream commit 4349d412afe4ec25b43867f4d6d4672156cb5766 ]

This patch fixes the inferred misuse of enum of crypto algorithms.

Coverity issue: 325879
Fixes: e80a98708166 ("vhost/crypto: add session message handler")

Signed-off-by: Fan Zhang <roy.fan.zhang at intel.com>
Acked-by: Marko Kovacevic <marko.kovacevic at intel.com>
Reviewed-by: Maxime Coquelin <maxime.coquelin at redhat.com>
---
 lib/librte_vhost/vhost_crypto.c | 93 +++++++++++++++------------------
 1 file changed, 41 insertions(+), 52 deletions(-)

diff --git a/lib/librte_vhost/vhost_crypto.c b/lib/librte_vhost/vhost_crypto.c
index 590f5ad83..d8a0f954b 100644
--- a/lib/librte_vhost/vhost_crypto.c
+++ b/lib/librte_vhost/vhost_crypto.c
@@ -47,114 +47,105 @@
 
 static int
-cipher_algo_transform(uint32_t virtio_cipher_algo)
+cipher_algo_transform(uint32_t virtio_cipher_algo,
+		enum rte_crypto_cipher_algorithm *algo)
 {
-	int ret;
-
 	switch (virtio_cipher_algo) {
 	case VIRTIO_CRYPTO_CIPHER_AES_CBC:
-		ret = RTE_CRYPTO_CIPHER_AES_CBC;
+		*algo = RTE_CRYPTO_CIPHER_AES_CBC;
 		break;
 	case VIRTIO_CRYPTO_CIPHER_AES_CTR:
-		ret = RTE_CRYPTO_CIPHER_AES_CTR;
+		*algo = RTE_CRYPTO_CIPHER_AES_CTR;
 		break;
 	case VIRTIO_CRYPTO_CIPHER_DES_ECB:
-		ret = -VIRTIO_CRYPTO_NOTSUPP;
+		*algo = -VIRTIO_CRYPTO_NOTSUPP;
 		break;
 	case VIRTIO_CRYPTO_CIPHER_DES_CBC:
-		ret = RTE_CRYPTO_CIPHER_DES_CBC;
+		*algo = RTE_CRYPTO_CIPHER_DES_CBC;
 		break;
 	case VIRTIO_CRYPTO_CIPHER_3DES_ECB:
-		ret = RTE_CRYPTO_CIPHER_3DES_ECB;
+		*algo = RTE_CRYPTO_CIPHER_3DES_ECB;
 		break;
 	case VIRTIO_CRYPTO_CIPHER_3DES_CBC:
-		ret = RTE_CRYPTO_CIPHER_3DES_CBC;
+		*algo = RTE_CRYPTO_CIPHER_3DES_CBC;
 		break;
 	case VIRTIO_CRYPTO_CIPHER_3DES_CTR:
-		ret = RTE_CRYPTO_CIPHER_3DES_CTR;
+		*algo = RTE_CRYPTO_CIPHER_3DES_CTR;
 		break;
 	case VIRTIO_CRYPTO_CIPHER_KASUMI_F8:
-		ret = RTE_CRYPTO_CIPHER_KASUMI_F8;
+		*algo = RTE_CRYPTO_CIPHER_KASUMI_F8;
 		break;
 	case VIRTIO_CRYPTO_CIPHER_SNOW3G_UEA2:
-		ret = RTE_CRYPTO_CIPHER_SNOW3G_UEA2;
+		*algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2;
 		break;
 	case VIRTIO_CRYPTO_CIPHER_AES_F8:
-		ret = RTE_CRYPTO_CIPHER_AES_F8;
+		*algo = RTE_CRYPTO_CIPHER_AES_F8;
 		break;
 	case VIRTIO_CRYPTO_CIPHER_AES_XTS:
-		ret = RTE_CRYPTO_CIPHER_AES_XTS;
+		*algo = RTE_CRYPTO_CIPHER_AES_XTS;
 		break;
 	case VIRTIO_CRYPTO_CIPHER_ZUC_EEA3:
-		ret = RTE_CRYPTO_CIPHER_ZUC_EEA3;
+		*algo = RTE_CRYPTO_CIPHER_ZUC_EEA3;
 		break;
 	default:
-		ret = -VIRTIO_CRYPTO_BADMSG;
+		return -VIRTIO_CRYPTO_BADMSG;
 		break;
 	}
 
-	return ret;
+	return 0;
 }
 
 static int
-auth_algo_transform(uint32_t virtio_auth_algo)
+auth_algo_transform(uint32_t virtio_auth_algo,
+		enum rte_crypto_auth_algorithm *algo)
 {
-	int ret;
-
 	switch (virtio_auth_algo) {
-
 	case VIRTIO_CRYPTO_NO_MAC:
-		ret = RTE_CRYPTO_AUTH_NULL;
+		*algo = RTE_CRYPTO_AUTH_NULL;
 		break;
 	case VIRTIO_CRYPTO_MAC_HMAC_MD5:
-		ret = RTE_CRYPTO_AUTH_MD5_HMAC;
+		*algo = RTE_CRYPTO_AUTH_MD5_HMAC;
 		break;
 	case VIRTIO_CRYPTO_MAC_HMAC_SHA1:
-		ret = RTE_CRYPTO_AUTH_SHA1_HMAC;
+		*algo = RTE_CRYPTO_AUTH_SHA1_HMAC;
 		break;
 	case VIRTIO_CRYPTO_MAC_HMAC_SHA_224:
-		ret = RTE_CRYPTO_AUTH_SHA224_HMAC;
+		*algo = RTE_CRYPTO_AUTH_SHA224_HMAC;
 		break;
 	case VIRTIO_CRYPTO_MAC_HMAC_SHA_256:
-		ret = RTE_CRYPTO_AUTH_SHA256_HMAC;
+		*algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
 		break;
 	case VIRTIO_CRYPTO_MAC_HMAC_SHA_384:
-		ret = RTE_CRYPTO_AUTH_SHA384_HMAC;
+		*algo = RTE_CRYPTO_AUTH_SHA384_HMAC;
 		break;
 	case VIRTIO_CRYPTO_MAC_HMAC_SHA_512:
-		ret = RTE_CRYPTO_AUTH_SHA512_HMAC;
-		break;
-	case VIRTIO_CRYPTO_MAC_CMAC_3DES:
-		ret = -VIRTIO_CRYPTO_NOTSUPP;
+		*algo = RTE_CRYPTO_AUTH_SHA512_HMAC;
 		break;
 	case VIRTIO_CRYPTO_MAC_CMAC_AES:
-		ret = RTE_CRYPTO_AUTH_AES_CMAC;
+		*algo = RTE_CRYPTO_AUTH_AES_CMAC;
 		break;
 	case VIRTIO_CRYPTO_MAC_KASUMI_F9:
-		ret = RTE_CRYPTO_AUTH_KASUMI_F9;
+		*algo = RTE_CRYPTO_AUTH_KASUMI_F9;
 		break;
 	case VIRTIO_CRYPTO_MAC_SNOW3G_UIA2:
-		ret = RTE_CRYPTO_AUTH_SNOW3G_UIA2;
+		*algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2;
 		break;
 	case VIRTIO_CRYPTO_MAC_GMAC_AES:
-		ret = RTE_CRYPTO_AUTH_AES_GMAC;
-		break;
-	case VIRTIO_CRYPTO_MAC_GMAC_TWOFISH:
-		ret = -VIRTIO_CRYPTO_NOTSUPP;
+		*algo = RTE_CRYPTO_AUTH_AES_GMAC;
 		break;
 	case VIRTIO_CRYPTO_MAC_CBCMAC_AES:
-		ret = RTE_CRYPTO_AUTH_AES_CBC_MAC;
-		break;
-	case VIRTIO_CRYPTO_MAC_CBCMAC_KASUMI_F9:
-		ret = -VIRTIO_CRYPTO_NOTSUPP;
+		*algo = RTE_CRYPTO_AUTH_AES_CBC_MAC;
 		break;
 	case VIRTIO_CRYPTO_MAC_XCBC_AES:
-		ret = RTE_CRYPTO_AUTH_AES_XCBC_MAC;
+		*algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC;
 		break;
+	case VIRTIO_CRYPTO_MAC_CMAC_3DES:
+	case VIRTIO_CRYPTO_MAC_GMAC_TWOFISH:
+	case VIRTIO_CRYPTO_MAC_CBCMAC_KASUMI_F9:
+		return -VIRTIO_CRYPTO_NOTSUPP;
 	default:
-		ret = -VIRTIO_CRYPTO_BADMSG;
-		break;
+		return -VIRTIO_CRYPTO_BADMSG;
 	}
 
-	return ret;
+	return 0;
 }
 
@@ -242,10 +233,9 @@ transform_cipher_param(struct rte_crypto_sym_xform *xform,
 	int ret;
 
-	ret = cipher_algo_transform(param->cipher_algo);
+	ret = cipher_algo_transform(param->cipher_algo, &xform->cipher.algo);
 	if (unlikely(ret < 0))
 		return ret;
 
 	xform->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
-	xform->cipher.algo = (enum rte_crypto_cipher_algorithm)ret;
 	xform->cipher.key.length = param->cipher_key_len;
 	if (xform->cipher.key.length > 0)
@@ -293,9 +283,9 @@ transform_chain_param(struct rte_crypto_sym_xform *xforms,
 
 	/* cipher */
-	ret = cipher_algo_transform(param->cipher_algo);
+	ret = cipher_algo_transform(param->cipher_algo,
+			&xform_cipher->cipher.algo);
 	if (unlikely(ret < 0))
 		return ret;
 	xform_cipher->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
-	xform_cipher->cipher.algo = (enum rte_crypto_cipher_algorithm)ret;
 	xform_cipher->cipher.key.length = param->cipher_key_len;
 	xform_cipher->cipher.key.data = param->cipher_key_buf;
@@ -308,8 +298,7 @@ transform_chain_param(struct rte_crypto_sym_xform *xforms,
 	/* auth */
 	xform_auth->type = RTE_CRYPTO_SYM_XFORM_AUTH;
-	ret = auth_algo_transform(param->hash_algo);
+	ret = auth_algo_transform(param->hash_algo, &xform_auth->auth.algo);
 	if (unlikely(ret < 0))
 		return ret;
-	xform_auth->auth.algo = (enum rte_crypto_auth_algorithm)ret;
 	xform_auth->auth.digest_length = param->digest_len;
 	xform_auth->auth.key.length = param->auth_key_len;
-- 
2.20.1

---
  Diff of the applied patch vs upstream commit (please double-check if non-empty:
---
--- -	2019-06-24 16:18:58.203008624 +0100
+++ 0061-vhost-crypto-fix-inferred-misuse-of-enum.patch	2019-06-24 16:18:55.136428745 +0100
@@ -1 +1 @@
-From 4349d412afe4ec25b43867f4d6d4672156cb5766 Mon Sep 17 00:00:00 2001
+From 6676ee09c8bdd8aa3b731a17aa8fa02de2bdebee Mon Sep 17 00:00:00 2001
@@ -5,0 +6,2 @@
+[ upstream commit 4349d412afe4ec25b43867f4d6d4672156cb5766 ]
+
@@ -10 +11,0 @@
-Cc: stable at dpdk.org
@@ -20 +21 @@
-index 069b56108..8652a778a 100644
+index 590f5ad83..d8a0f954b 100644
@@ -175 +176 @@
-@@ -243,10 +234,9 @@ transform_cipher_param(struct rte_crypto_sym_xform *xform,
+@@ -242,10 +233,9 @@ transform_cipher_param(struct rte_crypto_sym_xform *xform,
@@ -187 +188 @@
-@@ -294,9 +284,9 @@ transform_chain_param(struct rte_crypto_sym_xform *xforms,
+@@ -293,9 +283,9 @@ transform_chain_param(struct rte_crypto_sym_xform *xforms,
@@ -199 +200 @@
-@@ -309,8 +299,7 @@ transform_chain_param(struct rte_crypto_sym_xform *xforms,
+@@ -308,8 +298,7 @@ transform_chain_param(struct rte_crypto_sym_xform *xforms,


More information about the stable mailing list