[dpdk-dev] [PATCH v3 25/26] cryptodev: use AES-GCM/CCM as AEAD algorithms

Pablo de Lara pablo.de.lara.guarch at intel.com
Thu Jun 29 13:35:20 CEST 2017


Now that all the structures/functions for AEAD algorithms
are in place, migrate the two supported algorithms
AES-GCM and AES-CCM to these, instead of using
cipher and authentication parameters.

Signed-off-by: Pablo de Lara <pablo.de.lara.guarch at intel.com>
Acked-by: Akhil Goyal <akhil.goyal at nxp.com>
---
 doc/guides/sample_app_ug/ipsec_secgw.rst         |  11 +-
 doc/guides/sample_app_ug/l2_forward_crypto.rst   |   2 +-
 doc/guides/tools/cryptoperf.rst                  |   4 -
 drivers/crypto/aesni_gcm/aesni_gcm_pmd.c         |  76 ++--
 drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c     |  24 +-
 drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c      |   8 -
 drivers/crypto/openssl/rte_openssl_pmd.c         | 140 +++++--
 drivers/crypto/openssl/rte_openssl_pmd_ops.c     |  26 +-
 drivers/crypto/openssl/rte_openssl_pmd_private.h |   4 +
 drivers/crypto/qat/qat_crypto.c                  | 186 +++++++---
 drivers/crypto/qat/qat_crypto.h                  |   4 +
 drivers/crypto/qat/qat_crypto_capabilities.h     |  34 +-
 examples/ipsec-secgw/esp.c                       | 231 +++++++-----
 examples/ipsec-secgw/sa.c                        | 187 ++++++----
 examples/l2fwd-crypto/main.c                     |   3 +
 lib/librte_cryptodev/rte_crypto_sym.h            | 100 -----
 lib/librte_cryptodev/rte_cryptodev.c             |   4 -
 test/test/test_cryptodev.c                       | 218 +++++------
 test/test/test_cryptodev_perf.c                  | 446 ++++++++++++-----------
 19 files changed, 892 insertions(+), 816 deletions(-)

diff --git a/doc/guides/sample_app_ug/ipsec_secgw.rst b/doc/guides/sample_app_ug/ipsec_secgw.rst
index ca2a34d..75b960f 100644
--- a/doc/guides/sample_app_ug/ipsec_secgw.rst
+++ b/doc/guides/sample_app_ug/ipsec_secgw.rst
@@ -419,7 +419,6 @@ where each options means:
    * *null*: NULL algorithm
    * *aes-128-cbc*: AES-CBC 128-bit algorithm
    * *aes-128-ctr*: AES-CTR 128-bit algorithm
-   * *aes-128-gcm*: AES-GCM 128-bit algorithm
 
  * Syntax: *cipher_algo <your algorithm>*
 
@@ -447,7 +446,6 @@ where each options means:
 
     * *null*: NULL algorithm
     * *sha1-hmac*: HMAC SHA1 algorithm
-    * *aes-128-gcm*: AES-GCM 128-bit algorithm
 
 ``<auth_key>``
 
@@ -470,6 +468,10 @@ where each options means:
 
  * Optional: Yes, unless <cipher_algo> and <auth_algo> are not used
 
+ * Available options:
+
+   * *aes-128-gcm*: AES-GCM 128-bit algorithm
+
  * Syntax: *cipher_algo <your algorithm>*
 
 ``<aead_key>``
@@ -539,9 +541,8 @@ Example SA rules:
     src 1111:1111:1111:1111:1111:1111:1111:5555 \
     dst 2222:2222:2222:2222:2222:2222:2222:5555
 
-    sa in 105 cipher_algo aes-128-gcm \
-    cipher_key de:ad:be:ef:de:ad:be:ef:de:ad:be:ef:de:ad:be:ef:de:ad:be:ef \
-    auth_algo aes-128-gcm \
+    sa in 105 aead_algo aes-128-gcm \
+    aead_key de:ad:be:ef:de:ad:be:ef:de:ad:be:ef:de:ad:be:ef:de:ad:be:ef \
     mode ipv4-tunnel src 172.16.2.5 dst 172.16.1.5
 
 Routing rule syntax
diff --git a/doc/guides/sample_app_ug/l2_forward_crypto.rst b/doc/guides/sample_app_ug/l2_forward_crypto.rst
index 2880c43..2a61af7 100644
--- a/doc/guides/sample_app_ug/l2_forward_crypto.rst
+++ b/doc/guides/sample_app_ug/l2_forward_crypto.rst
@@ -156,7 +156,7 @@ where,
 
     Note that if --auth_iv is used, this will be ignored.
 
-*   aead_algo: select the AEAD algorithm
+*   aead_algo: select the AEAD algorithm (default is aes-gcm)
 
 *   aead_op: select the AEAD operation to perform: ENCRYPT or DECRYPT
 
diff --git a/doc/guides/tools/cryptoperf.rst b/doc/guides/tools/cryptoperf.rst
index 6b797a7..a077e7d 100644
--- a/doc/guides/tools/cryptoperf.rst
+++ b/doc/guides/tools/cryptoperf.rst
@@ -223,10 +223,8 @@ The following are the appication command-line options:
            3des-ecb
            3des-ctr
            aes-cbc
-           aes-ccm
            aes-ctr
            aes-ecb
-           aes-gcm
            aes-f8
            aes-xts
            arc4
@@ -257,9 +255,7 @@ The following are the appication command-line options:
 
            3des-cbc
            aes-cbc-mac
-           aes-ccm
            aes-cmac
-           aes-gcm
            aes-gmac
            aes-xcbc-mac
            md5
diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
index 36372a6..567c2ec 100644
--- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
+++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
@@ -77,13 +77,13 @@ aesni_gcm_set_session_parameters(struct aesni_gcm_session *sess,
 		const struct rte_crypto_sym_xform *xform)
 {
 	const struct rte_crypto_sym_xform *auth_xform;
-	const struct rte_crypto_sym_xform *cipher_xform;
+	const struct rte_crypto_sym_xform *aead_xform;
 	uint16_t digest_length;
 	uint8_t key_length;
 	uint8_t *key;
 
 	/* AES-GMAC */
-	if (xform->next == NULL) {
+	if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
 		auth_xform = xform;
 		if (auth_xform->auth.algo != RTE_CRYPTO_AUTH_AES_GMAC) {
 			GCM_LOG_ERR("Only AES GMAC is supported as an "
@@ -102,52 +102,39 @@ aesni_gcm_set_session_parameters(struct aesni_gcm_session *sess,
 
 		key_length = auth_xform->auth.key.length;
 		key = auth_xform->auth.key.data;
+		digest_length = auth_xform->auth.digest_length;
+
 	/* AES-GCM */
-	} else {
-		if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
-				xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
-			auth_xform = xform->next;
-			cipher_xform = xform;
-		} else if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
-				xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
-			auth_xform = xform;
-			cipher_xform = xform->next;
-		} else {
-			GCM_LOG_ERR("Cipher and auth xform required "
-					"when using AES GCM");
-			return -EINVAL;
-		}
+	} else if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
+		aead_xform = xform;
 
-		if (!(cipher_xform->cipher.algo == RTE_CRYPTO_CIPHER_AES_GCM &&
-				(auth_xform->auth.algo == RTE_CRYPTO_AUTH_AES_GCM))) {
+		if (aead_xform->aead.algo != RTE_CRYPTO_AEAD_AES_GCM) {
 			GCM_LOG_ERR("The only combined operation "
 						"supported is AES GCM");
 			return -EINVAL;
 		}
 
 		/* Set IV parameters */
-		sess->iv.offset = cipher_xform->cipher.iv.offset;
-		sess->iv.length = cipher_xform->cipher.iv.length;
+		sess->iv.offset = aead_xform->aead.iv.offset;
+		sess->iv.length = aead_xform->aead.iv.length;
 
 		/* Select Crypto operation */
-		if (cipher_xform->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
-				auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_GENERATE)
+		if (aead_xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
 			sess->op = AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION;
-		else if (cipher_xform->cipher.op == RTE_CRYPTO_CIPHER_OP_DECRYPT &&
-				auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_VERIFY)
+		else
 			sess->op = AESNI_GCM_OP_AUTHENTICATED_DECRYPTION;
-		else {
-			GCM_LOG_ERR("Cipher/Auth operations: Encrypt/Generate or"
-					" Decrypt/Verify are valid only");
-			return -EINVAL;
-		}
 
-		key_length = cipher_xform->auth.key.length;
-		key = cipher_xform->auth.key.data;
+		key_length = aead_xform->aead.key.length;
+		key = aead_xform->aead.key.data;
 
-		sess->aad_length = auth_xform->auth.add_auth_data_length;
+		sess->aad_length = aead_xform->aead.add_auth_data_length;
+		digest_length = aead_xform->aead.digest_length;
+	} else {
+		GCM_LOG_ERR("Wrong xform type, has to be AEAD or authentication");
+		return -EINVAL;
 	}
 
+
 	/* IV check */
 	if (sess->iv.length != 16 && sess->iv.length != 12 &&
 			sess->iv.length != 0) {
@@ -155,8 +142,6 @@ aesni_gcm_set_session_parameters(struct aesni_gcm_session *sess,
 		return -EINVAL;
 	}
 
-	digest_length = auth_xform->auth.digest_length;
-
 	/* Check key length and calculate GCM pre-compute. */
 	switch (key_length) {
 	case 16:
@@ -170,7 +155,7 @@ aesni_gcm_set_session_parameters(struct aesni_gcm_session *sess,
 
 		break;
 	default:
-		GCM_LOG_ERR("Unsupported cipher/auth key length");
+		GCM_LOG_ERR("Unsupported key length");
 		return -EINVAL;
 	}
 
@@ -241,9 +226,9 @@ process_gcm_crypto_op(struct rte_crypto_op *op,
 
 	if (session->op == AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION ||
 			session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION) {
-		offset = sym_op->cipher.data.offset;
+		offset = sym_op->aead.data.offset;
 		data_offset = offset;
-		data_length = sym_op->cipher.data.length;
+		data_length = sym_op->aead.data.length;
 	} else {
 		offset = sym_op->auth.data.offset;
 		data_offset = offset;
@@ -296,7 +281,7 @@ process_gcm_crypto_op(struct rte_crypto_op *op,
 
 		aesni_gcm_enc[session->key].init(&session->gdata,
 				iv_ptr,
-				sym_op->auth.aad.data,
+				sym_op->aead.aad.data,
 				(uint64_t)session->aad_length);
 
 		aesni_gcm_enc[session->key].update(&session->gdata, dst, src,
@@ -320,7 +305,7 @@ process_gcm_crypto_op(struct rte_crypto_op *op,
 		}
 
 		aesni_gcm_enc[session->key].finalize(&session->gdata,
-				sym_op->auth.digest.data,
+				sym_op->aead.digest.data,
 				(uint64_t)session->digest_length);
 	} else if (session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION) {
 		uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(sym_op->m_dst ?
@@ -334,7 +319,7 @@ process_gcm_crypto_op(struct rte_crypto_op *op,
 
 		aesni_gcm_dec[session->key].init(&session->gdata,
 				iv_ptr,
-				sym_op->auth.aad.data,
+				sym_op->aead.aad.data,
 				(uint64_t)session->aad_length);
 
 		aesni_gcm_dec[session->key].update(&session->gdata, dst, src,
@@ -414,19 +399,24 @@ post_process_gcm_crypto_op(struct rte_crypto_op *op)
 	/* Verify digest if required */
 	if (session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION ||
 			session->op == AESNI_GMAC_OP_VERIFY) {
+		uint8_t *digest;
 
 		uint8_t *tag = rte_pktmbuf_mtod_offset(m, uint8_t *,
 				m->data_len - session->digest_length);
 
+		if (session->op == AESNI_GMAC_OP_VERIFY)
+			digest = op->sym->auth.digest.data;
+		else
+			digest = op->sym->aead.digest.data;
+
 #ifdef RTE_LIBRTE_PMD_AESNI_GCM_DEBUG
 		rte_hexdump(stdout, "auth tag (orig):",
-				op->sym->auth.digest.data, session->digest_length);
+				digest, session->digest_length);
 		rte_hexdump(stdout, "auth tag (calc):",
 				tag, session->digest_length);
 #endif
 
-		if (memcmp(tag, op->sym->auth.digest.data,
-				session->digest_length) != 0)
+		if (memcmp(tag, digest,	session->digest_length) != 0)
 			op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
 
 		/* trim area used for digest from mbuf */
diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c
index 39285d0..5317657 100644
--- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c
+++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c
@@ -65,12 +65,12 @@ static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = {
 			}, }
 		}, }
 	},
-	{	/* AES GCM (AUTH) */
+	{	/* AES GCM */
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
-			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
-			{.auth = {
-				.algo = RTE_CRYPTO_AUTH_AES_GCM,
+			.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
+			{.aead = {
+				.algo = RTE_CRYPTO_AEAD_AES_GCM,
 				.block_size = 16,
 				.key_size = {
 					.min = 16,
@@ -87,22 +87,6 @@ static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = {
 					.max = 65535,
 					.increment = 1
 				},
-				.iv_size = { 0 }
-			}, }
-		}, }
-	},
-	{	/* AES GCM (CIPHER) */
-		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
-		{.sym = {
-			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
-			{.cipher = {
-				.algo = RTE_CRYPTO_CIPHER_AES_GCM,
-				.block_size = 16,
-				.key_size = {
-					.min = 16,
-					.max = 32,
-					.increment = 16
-				},
 				.iv_size = {
 					.min = 12,
 					.max = 12,
diff --git a/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c b/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c
index 8ee6ece..3620751 100644
--- a/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c
+++ b/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c
@@ -817,8 +817,6 @@ dpaa2_sec_cipher_init(struct rte_cryptodev *dev,
 		break;
 	case RTE_CRYPTO_CIPHER_AES_CTR:
 	case RTE_CRYPTO_CIPHER_3DES_CTR:
-	case RTE_CRYPTO_CIPHER_AES_GCM:
-	case RTE_CRYPTO_CIPHER_AES_CCM:
 	case RTE_CRYPTO_CIPHER_AES_ECB:
 	case RTE_CRYPTO_CIPHER_3DES_ECB:
 	case RTE_CRYPTO_CIPHER_AES_XTS:
@@ -946,7 +944,6 @@ dpaa2_sec_auth_init(struct rte_cryptodev *dev,
 		session->auth_alg = RTE_CRYPTO_AUTH_SHA224_HMAC;
 		break;
 	case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
-	case RTE_CRYPTO_AUTH_AES_GCM:
 	case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
 	case RTE_CRYPTO_AUTH_NULL:
 	case RTE_CRYPTO_AUTH_SHA1:
@@ -955,7 +952,6 @@ dpaa2_sec_auth_init(struct rte_cryptodev *dev,
 	case RTE_CRYPTO_AUTH_SHA224:
 	case RTE_CRYPTO_AUTH_SHA384:
 	case RTE_CRYPTO_AUTH_MD5:
-	case RTE_CRYPTO_AUTH_AES_CCM:
 	case RTE_CRYPTO_AUTH_AES_GMAC:
 	case RTE_CRYPTO_AUTH_KASUMI_F9:
 	case RTE_CRYPTO_AUTH_AES_CMAC:
@@ -1100,7 +1096,6 @@ dpaa2_sec_aead_init(struct rte_cryptodev *dev,
 		session->auth_alg = RTE_CRYPTO_AUTH_SHA512_HMAC;
 		break;
 	case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
-	case RTE_CRYPTO_AUTH_AES_GCM:
 	case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
 	case RTE_CRYPTO_AUTH_NULL:
 	case RTE_CRYPTO_AUTH_SHA1:
@@ -1109,7 +1104,6 @@ dpaa2_sec_aead_init(struct rte_cryptodev *dev,
 	case RTE_CRYPTO_AUTH_SHA224:
 	case RTE_CRYPTO_AUTH_SHA384:
 	case RTE_CRYPTO_AUTH_MD5:
-	case RTE_CRYPTO_AUTH_AES_CCM:
 	case RTE_CRYPTO_AUTH_AES_GMAC:
 	case RTE_CRYPTO_AUTH_KASUMI_F9:
 	case RTE_CRYPTO_AUTH_AES_CMAC:
@@ -1141,13 +1135,11 @@ dpaa2_sec_aead_init(struct rte_cryptodev *dev,
 		session->cipher_alg = RTE_CRYPTO_CIPHER_3DES_CBC;
 		ctxt->iv.length = TDES_CBC_IV_LEN;
 		break;
-	case RTE_CRYPTO_CIPHER_AES_GCM:
 	case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
 	case RTE_CRYPTO_CIPHER_NULL:
 	case RTE_CRYPTO_CIPHER_3DES_ECB:
 	case RTE_CRYPTO_CIPHER_AES_ECB:
 	case RTE_CRYPTO_CIPHER_AES_CTR:
-	case RTE_CRYPTO_CIPHER_AES_CCM:
 	case RTE_CRYPTO_CIPHER_KASUMI_F8:
 		RTE_LOG(ERR, PMD, "Crypto: Unsupported Cipher alg %u",
 			cipher_xform->algo);
diff --git a/drivers/crypto/openssl/rte_openssl_pmd.c b/drivers/crypto/openssl/rte_openssl_pmd.c
index 11260d8..7f5c6aa 100644
--- a/drivers/crypto/openssl/rte_openssl_pmd.c
+++ b/drivers/crypto/openssl/rte_openssl_pmd.c
@@ -89,6 +89,8 @@ openssl_get_chain_order(const struct rte_crypto_sym_xform *xform)
 			else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
 				res =  OPENSSL_CHAIN_CIPHER_AUTH;
 		}
+		if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
+			res = OPENSSL_CHAIN_COMBINED;
 	}
 
 	return res;
@@ -184,21 +186,6 @@ get_cipher_algo(enum rte_crypto_cipher_algorithm sess_algo, size_t keylen,
 				res = -EINVAL;
 			}
 			break;
-		case RTE_CRYPTO_CIPHER_AES_GCM:
-			switch (keylen) {
-			case 16:
-				*algo = EVP_aes_128_gcm();
-				break;
-			case 24:
-				*algo = EVP_aes_192_gcm();
-				break;
-			case 32:
-				*algo = EVP_aes_256_gcm();
-				break;
-			default:
-				res = -EINVAL;
-			}
-			break;
 		default:
 			res = -EINVAL;
 			break;
@@ -254,6 +241,41 @@ get_auth_algo(enum rte_crypto_auth_algorithm sessalgo,
 	return res;
 }
 
+/** Get adequate openssl function for input cipher algorithm */
+static uint8_t
+get_aead_algo(enum rte_crypto_aead_algorithm sess_algo, size_t keylen,
+		const EVP_CIPHER **algo)
+{
+	int res = 0;
+
+	if (algo != NULL) {
+		switch (sess_algo) {
+		case RTE_CRYPTO_AEAD_AES_GCM:
+			switch (keylen) {
+			case 16:
+				*algo = EVP_aes_128_gcm();
+				break;
+			case 24:
+				*algo = EVP_aes_192_gcm();
+				break;
+			case 32:
+				*algo = EVP_aes_256_gcm();
+				break;
+			default:
+				res = -EINVAL;
+			}
+			break;
+		default:
+			res = -EINVAL;
+			break;
+		}
+	} else {
+		res = -EINVAL;
+	}
+
+	return res;
+}
+
 /** Set session cipher parameters */
 static int
 openssl_set_session_cipher_parameters(struct openssl_session *sess,
@@ -273,7 +295,6 @@ openssl_set_session_cipher_parameters(struct openssl_session *sess,
 	case RTE_CRYPTO_CIPHER_3DES_CBC:
 	case RTE_CRYPTO_CIPHER_AES_CBC:
 	case RTE_CRYPTO_CIPHER_AES_CTR:
-	case RTE_CRYPTO_CIPHER_AES_GCM:
 		sess->cipher.mode = OPENSSL_CIPHER_LIB;
 		sess->cipher.algo = xform->cipher.algo;
 		sess->cipher.ctx = EVP_CIPHER_CTX_new();
@@ -330,12 +351,6 @@ openssl_set_session_auth_parameters(struct openssl_session *sess,
 
 	/* Select auth algo */
 	switch (xform->auth.algo) {
-	case RTE_CRYPTO_AUTH_AES_GCM:
-		/* Check additional condition for AES_GCM */
-		if (sess->cipher.algo != RTE_CRYPTO_CIPHER_AES_GCM)
-			return -EINVAL;
-		sess->chain_order = OPENSSL_CHAIN_COMBINED;
-		break;
 	case RTE_CRYPTO_AUTH_AES_GMAC:
 		sess->chain_order = OPENSSL_CHAIN_COMBINED;
 
@@ -356,7 +371,7 @@ openssl_set_session_auth_parameters(struct openssl_session *sess,
 		sess->cipher.key.length = xform->auth.key.length;
 		sess->cipher.ctx = EVP_CIPHER_CTX_new();
 
-		if (get_cipher_algo(RTE_CRYPTO_CIPHER_AES_GCM,
+		if (get_aead_algo(RTE_CRYPTO_AEAD_AES_GCM,
 				sess->cipher.key.length,
 				&sess->cipher.evp_algo) != 0)
 			return -EINVAL;
@@ -404,6 +419,50 @@ openssl_set_session_auth_parameters(struct openssl_session *sess,
 	return 0;
 }
 
+/* Set session AEAD parameters */
+static int
+openssl_set_session_aead_parameters(struct openssl_session *sess,
+		const struct rte_crypto_sym_xform *xform)
+{
+	/* Select cipher direction */
+	sess->cipher.direction = xform->cipher.op;
+	/* Select cipher key */
+	sess->cipher.key.length = xform->aead.key.length;
+
+	/* Set IV parameters */
+	sess->iv.offset = xform->aead.iv.offset;
+	sess->iv.length = xform->aead.iv.length;
+
+	/* Select auth generate/verify */
+	sess->auth.operation = xform->auth.op;
+	sess->auth.algo = xform->auth.algo;
+
+	/* Select auth algo */
+	switch (xform->aead.algo) {
+	case RTE_CRYPTO_AEAD_AES_GCM:
+		sess->cipher.mode = OPENSSL_CIPHER_LIB;
+		sess->aead_algo = xform->aead.algo;
+		sess->cipher.ctx = EVP_CIPHER_CTX_new();
+
+		if (get_aead_algo(sess->aead_algo, sess->cipher.key.length,
+				&sess->cipher.evp_algo) != 0)
+			return -EINVAL;
+
+		get_cipher_key(xform->cipher.key.data, sess->cipher.key.length,
+			sess->cipher.key.data);
+
+		sess->chain_order = OPENSSL_CHAIN_COMBINED;
+		break;
+	default:
+		return -EINVAL;
+	}
+
+	sess->auth.aad_length = xform->aead.add_auth_data_length;
+	sess->auth.digest_length = xform->aead.digest_length;
+
+	return 0;
+}
+
 /** Parse crypto xform chain and set private session parameters */
 int
 openssl_set_session_parameters(struct openssl_session *sess,
@@ -411,6 +470,7 @@ openssl_set_session_parameters(struct openssl_session *sess,
 {
 	const struct rte_crypto_sym_xform *cipher_xform = NULL;
 	const struct rte_crypto_sym_xform *auth_xform = NULL;
+	const struct rte_crypto_sym_xform *aead_xform = NULL;
 
 	sess->chain_order = openssl_get_chain_order(xform);
 	switch (sess->chain_order) {
@@ -428,6 +488,9 @@ openssl_set_session_parameters(struct openssl_session *sess,
 		auth_xform = xform;
 		cipher_xform = xform->next;
 		break;
+	case OPENSSL_CHAIN_COMBINED:
+		aead_xform = xform;
+		break;
 	default:
 		return -EINVAL;
 	}
@@ -453,6 +516,14 @@ openssl_set_session_parameters(struct openssl_session *sess,
 		}
 	}
 
+	if (aead_xform) {
+		if (openssl_set_session_aead_parameters(sess, aead_xform)) {
+			OPENSSL_LOG_ERR(
+				"Invalid/unsupported auth parameters");
+			return -EINVAL;
+		}
+	}
+
 	return 0;
 }
 
@@ -965,26 +1036,27 @@ process_openssl_combined_op
 	iv = rte_crypto_op_ctod_offset(op, uint8_t *,
 			sess->iv.offset);
 	ivlen = sess->iv.length;
-	tag = op->sym->auth.digest.data;
-	if (tag == NULL)
-		tag = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
-				op->sym->cipher.data.offset +
-				op->sym->cipher.data.length);
-
 	if (sess->auth.algo == RTE_CRYPTO_AUTH_AES_GMAC) {
 		srclen = 0;
 		offset = op->sym->auth.data.offset;
 		aadlen = op->sym->auth.data.length;
 		aad = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
 				op->sym->auth.data.offset);
-
+		tag = op->sym->auth.digest.data;
+		if (tag == NULL)
+			tag = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
+				offset + aadlen);
 	} else {
-		srclen = op->sym->cipher.data.length;
+		srclen = op->sym->aead.data.length;
 		dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
-				op->sym->cipher.data.offset);
-		offset = op->sym->cipher.data.offset;
-		aad = op->sym->auth.aad.data;
+				op->sym->aead.data.offset);
+		offset = op->sym->aead.data.offset;
+		aad = op->sym->aead.aad.data;
 		aadlen = sess->auth.aad_length;
+		tag = op->sym->aead.digest.data;
+		if (tag == NULL)
+			tag = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
+				offset + srclen);
 	}
 
 	if (sess->cipher.direction == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
diff --git a/drivers/crypto/openssl/rte_openssl_pmd_ops.c b/drivers/crypto/openssl/rte_openssl_pmd_ops.c
index fc525d9..26265b8 100644
--- a/drivers/crypto/openssl/rte_openssl_pmd_ops.c
+++ b/drivers/crypto/openssl/rte_openssl_pmd_ops.c
@@ -344,12 +344,12 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 			}, }
 		}, }
 	},
-	{	/* AES GCM (AUTH) */
+	{	/* AES GCM */
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
-			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
-			{.auth = {
-				.algo = RTE_CRYPTO_AUTH_AES_GCM,
+			.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
+			{.aead = {
+				.algo = RTE_CRYPTO_AEAD_AES_GCM,
 				.block_size = 16,
 				.key_size = {
 					.min = 16,
@@ -366,27 +366,11 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 					.max = 65535,
 					.increment = 1
 				},
-				.iv_size = { 0 }
-			}, }
-		}, }
-	},
-	{	/* AES GCM (CIPHER) */
-		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
-		{.sym = {
-			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
-			{.cipher = {
-				.algo = RTE_CRYPTO_CIPHER_AES_GCM,
-				.block_size = 16,
-				.key_size = {
-					.min = 16,
-					.max = 32,
-					.increment = 8
-				},
 				.iv_size = {
 					.min = 12,
 					.max = 16,
 					.increment = 4
-				}
+				},
 			}, }
 		}, }
 	},
diff --git a/drivers/crypto/openssl/rte_openssl_pmd_private.h b/drivers/crypto/openssl/rte_openssl_pmd_private.h
index 4c9be05..7799407 100644
--- a/drivers/crypto/openssl/rte_openssl_pmd_private.h
+++ b/drivers/crypto/openssl/rte_openssl_pmd_private.h
@@ -113,6 +113,10 @@ struct openssl_session {
 		uint16_t offset;
 	} iv;
 	/**< IV parameters */
+
+	enum rte_crypto_aead_algorithm aead_algo;
+	/**< AEAD algorithm */
+
 	/** Cipher Parameters */
 	struct {
 		enum rte_crypto_cipher_operation direction;
diff --git a/drivers/crypto/qat/qat_crypto.c b/drivers/crypto/qat/qat_crypto.c
index 81f7a1f..c3e7662 100644
--- a/drivers/crypto/qat/qat_crypto.c
+++ b/drivers/crypto/qat/qat_crypto.c
@@ -246,6 +246,14 @@ qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
 	if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
 		return ICP_QAT_FW_LA_CMD_AUTH;
 
+	/* AEAD */
+	if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
+		if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
+			return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
+		else
+			return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
+	}
+
 	if (xform->next == NULL)
 		return -1;
 
@@ -310,14 +318,6 @@ qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev,
 		}
 		session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
 		break;
-	case RTE_CRYPTO_CIPHER_AES_GCM:
-		if (qat_alg_validate_aes_key(cipher_xform->key.length,
-				&session->qat_cipher_alg) != 0) {
-			PMD_DRV_LOG(ERR, "Invalid AES cipher key size");
-			goto error_out;
-		}
-		session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
-		break;
 	case RTE_CRYPTO_CIPHER_AES_CTR:
 		if (qat_alg_validate_aes_key(cipher_xform->key.length,
 				&session->qat_cipher_alg) != 0) {
@@ -418,7 +418,6 @@ qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev,
 		break;
 	case RTE_CRYPTO_CIPHER_3DES_ECB:
 	case RTE_CRYPTO_CIPHER_AES_ECB:
-	case RTE_CRYPTO_CIPHER_AES_CCM:
 	case RTE_CRYPTO_CIPHER_AES_F8:
 	case RTE_CRYPTO_CIPHER_AES_XTS:
 	case RTE_CRYPTO_CIPHER_ARC4:
@@ -476,12 +475,26 @@ qat_crypto_sym_configure_session(struct rte_cryptodev *dev,
 	session = qat_crypto_sym_configure_session_auth(dev, xform, session);
 		break;
 	case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
-	session = qat_crypto_sym_configure_session_cipher(dev, xform, session);
-	session = qat_crypto_sym_configure_session_auth(dev, xform, session);
+		if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
+			session = qat_crypto_sym_configure_session_aead(xform,
+					session);
+		else {
+			session = qat_crypto_sym_configure_session_cipher(dev,
+					xform, session);
+			session = qat_crypto_sym_configure_session_auth(dev,
+					xform, session);
+		}
 		break;
 	case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
-	session = qat_crypto_sym_configure_session_auth(dev, xform, session);
-	session = qat_crypto_sym_configure_session_cipher(dev, xform, session);
+		if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
+			session = qat_crypto_sym_configure_session_aead(xform,
+					session);
+		else {
+			session = qat_crypto_sym_configure_session_auth(dev,
+					xform, session);
+			session = qat_crypto_sym_configure_session_cipher(dev,
+					xform, session);
+		}
 		break;
 	case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
 	case ICP_QAT_FW_LA_CMD_TRNG_TEST:
@@ -515,7 +528,6 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev,
 
 	struct qat_session *session = session_private;
 	struct rte_crypto_auth_xform *auth_xform = NULL;
-	struct rte_crypto_cipher_xform *cipher_xform = NULL;
 	struct qat_pmd_private *internals = dev->data->dev_private;
 	auth_xform = qat_get_auth_xform(xform);
 	uint8_t *key_data = auth_xform->key.data;
@@ -540,14 +552,6 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev,
 	case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
 		session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
 		break;
-	case RTE_CRYPTO_AUTH_AES_GCM:
-		cipher_xform = qat_get_cipher_xform(xform);
-
-		session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
-
-		key_data = cipher_xform->key.data;
-		key_length = cipher_xform->key.length;
-		break;
 	case RTE_CRYPTO_AUTH_AES_GMAC:
 		if (qat_alg_validate_aes_key(auth_xform->key.length,
 				&session->qat_cipher_alg) != 0) {
@@ -585,7 +589,6 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev,
 	case RTE_CRYPTO_AUTH_SHA224:
 	case RTE_CRYPTO_AUTH_SHA384:
 	case RTE_CRYPTO_AUTH_MD5:
-	case RTE_CRYPTO_AUTH_AES_CCM:
 	case RTE_CRYPTO_AUTH_AES_CMAC:
 	case RTE_CRYPTO_AUTH_AES_CBC_MAC:
 		PMD_DRV_LOG(ERR, "Crypto: Unsupported hash alg %u",
@@ -646,7 +649,7 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev,
 		if (qat_alg_aead_session_create_content_desc_auth(session,
 				key_data,
 				key_length,
-				auth_xform->add_auth_data_length,
+				0,
 				auth_xform->digest_length,
 				auth_xform->op))
 			goto error_out;
@@ -659,6 +662,85 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev,
 	return NULL;
 }
 
+struct qat_session *
+qat_crypto_sym_configure_session_aead(struct rte_crypto_sym_xform *xform,
+				struct qat_session *session_private)
+{
+	struct qat_session *session = session_private;
+	struct rte_crypto_aead_xform *aead_xform = &xform->aead;
+
+	/*
+	 * Store AEAD IV parameters as cipher IV,
+	 * to avoid unnecessary memory usage
+	 */
+	session->cipher_iv.offset = xform->aead.iv.offset;
+	session->cipher_iv.length = xform->aead.iv.length;
+
+	switch (aead_xform->algo) {
+	case RTE_CRYPTO_AEAD_AES_GCM:
+		if (qat_alg_validate_aes_key(aead_xform->key.length,
+				&session->qat_cipher_alg) != 0) {
+			PMD_DRV_LOG(ERR, "Invalid AES key size");
+			goto error_out;
+		}
+		session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
+		session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
+		break;
+	case RTE_CRYPTO_AEAD_AES_CCM:
+		PMD_DRV_LOG(ERR, "Crypto QAT PMD: Unsupported AEAD alg %u",
+				aead_xform->algo);
+		goto error_out;
+	default:
+		PMD_DRV_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
+				aead_xform->algo);
+		goto error_out;
+	}
+
+	if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
+		session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
+		/*
+		 * It needs to create cipher desc content first,
+		 * then authentication
+		 */
+		if (qat_alg_aead_session_create_content_desc_cipher(session,
+					aead_xform->key.data,
+					aead_xform->key.length))
+			goto error_out;
+
+		if (qat_alg_aead_session_create_content_desc_auth(session,
+					aead_xform->key.data,
+					aead_xform->key.length,
+					aead_xform->add_auth_data_length,
+					aead_xform->digest_length,
+					RTE_CRYPTO_AUTH_OP_GENERATE))
+			goto error_out;
+	} else {
+		session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
+		/*
+		 * It needs to create authentication desc content first,
+		 * then cipher
+		 */
+		if (qat_alg_aead_session_create_content_desc_auth(session,
+					aead_xform->key.data,
+					aead_xform->key.length,
+					aead_xform->add_auth_data_length,
+					aead_xform->digest_length,
+					RTE_CRYPTO_AUTH_OP_VERIFY))
+			goto error_out;
+
+		if (qat_alg_aead_session_create_content_desc_cipher(session,
+					aead_xform->key.data,
+					aead_xform->key.length))
+			goto error_out;
+	}
+
+	session->digest_length = aead_xform->digest_length;
+	return session;
+
+error_out:
+	return NULL;
+}
+
 unsigned qat_crypto_sym_get_session_private_size(
 		struct rte_cryptodev *dev __rte_unused)
 {
@@ -969,7 +1051,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 	struct icp_qat_fw_la_cipher_req_params *cipher_param;
 	struct icp_qat_fw_la_auth_req_params *auth_param;
 	register struct icp_qat_fw_la_bulk_req *qat_req;
-	uint8_t do_auth = 0, do_cipher = 0;
+	uint8_t do_auth = 0, do_cipher = 0, do_aead = 0;
 	uint32_t cipher_len = 0, cipher_ofs = 0;
 	uint32_t auth_len = 0, auth_ofs = 0;
 	uint32_t min_ofs = 0;
@@ -1003,9 +1085,15 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 	auth_param = (void *)((uint8_t *)cipher_param + sizeof(*cipher_param));
 
 	if (ctx->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER ||
-		ctx->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
-		do_auth = 1;
-		do_cipher = 1;
+			ctx->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
+		/* AES-GCM */
+		if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128 ||
+				ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64) {
+			do_aead = 1;
+		} else {
+			do_auth = 1;
+			do_cipher = 1;
+		}
 	} else if (ctx->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
 		do_auth = 1;
 		do_cipher = 0;
@@ -1087,18 +1175,10 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 					ICP_QAT_HW_AUTH_ALGO_GALOIS_128 ||
 				ctx->qat_hash_alg ==
 					ICP_QAT_HW_AUTH_ALGO_GALOIS_64) {
-			/* AES-GCM */
-			if (do_cipher) {
-				auth_ofs = op->sym->cipher.data.offset;
-				auth_len = op->sym->cipher.data.length;
-
-				auth_param->u1.aad_adr = op->sym->auth.aad.phys_addr;
 			/* AES-GMAC */
-			} else {
-				set_cipher_iv(ctx->auth_iv.length,
-					ctx->auth_iv.offset,
-					cipher_param, op, qat_req);
-			}
+			set_cipher_iv(ctx->auth_iv.length,
+				ctx->auth_iv.offset,
+				cipher_param, op, qat_req);
 		} else {
 			auth_ofs = op->sym->auth.data.offset;
 			auth_len = op->sym->auth.data.length;
@@ -1110,6 +1190,19 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 
 	}
 
+	if (do_aead) {
+		cipher_len = op->sym->aead.data.length;
+		cipher_ofs = op->sym->aead.data.offset;
+		auth_len = op->sym->aead.data.length;
+		auth_ofs = op->sym->aead.data.offset;
+
+		auth_param->u1.aad_adr = op->sym->aead.aad.phys_addr;
+		auth_param->auth_res_addr = op->sym->aead.digest.phys_addr;
+		set_cipher_iv(ctx->cipher_iv.length, ctx->cipher_iv.offset,
+				cipher_param, op, qat_req);
+		min_ofs = op->sym->aead.data.offset;
+	}
+
 	if (op->sym->m_src->next || (op->sym->m_dst && op->sym->m_dst->next))
 		do_sgl = 1;
 
@@ -1151,7 +1244,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 		dst_buf_start = src_buf_start;
 	}
 
-	if (do_cipher) {
+	if (do_cipher || do_aead) {
 		cipher_param->cipher_offset =
 				(uint32_t)rte_pktmbuf_mtophys_offset(
 				op->sym->m_src, cipher_ofs) - src_buf_start;
@@ -1160,7 +1253,8 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 		cipher_param->cipher_offset = 0;
 		cipher_param->cipher_length = 0;
 	}
-	if (do_auth) {
+
+	if (do_auth || do_aead) {
 		auth_param->auth_off = (uint32_t)rte_pktmbuf_mtophys_offset(
 				op->sym->m_src, auth_ofs) - src_buf_start;
 		auth_param->auth_len = auth_len;
@@ -1168,6 +1262,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 		auth_param->auth_off = 0;
 		auth_param->auth_len = 0;
 	}
+
 	qat_req->comn_mid.dst_length =
 		qat_req->comn_mid.src_length =
 		(cipher_param->cipher_offset + cipher_param->cipher_length)
@@ -1226,7 +1321,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 				ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
 		}
 		/* GMAC */
-		if (!do_cipher) {
+		if (!do_aead) {
 			qat_req->comn_mid.dst_length =
 				qat_req->comn_mid.src_length =
 					rte_pktmbuf_data_len(op->sym->m_src);
@@ -1261,7 +1356,12 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 		}
 		rte_hexdump(stdout, "digest:", op->sym->auth.digest.data,
 				ctx->digest_length);
-		rte_hexdump(stdout, "aad:", op->sym->auth.aad.data,
+	}
+
+	if (do_aead) {
+		rte_hexdump(stdout, "digest:", op->sym->aead.digest.data,
+				ctx->digest_length);
+		rte_hexdump(stdout, "aad:", op->sym->aead.aad.data,
 				ctx->aad_len);
 	}
 #endif
diff --git a/drivers/crypto/qat/qat_crypto.h b/drivers/crypto/qat/qat_crypto.h
index b740d6b..f76f3ca 100644
--- a/drivers/crypto/qat/qat_crypto.h
+++ b/drivers/crypto/qat/qat_crypto.h
@@ -117,6 +117,10 @@ qat_crypto_sym_configure_session(struct rte_cryptodev *dev,
 		struct rte_crypto_sym_xform *xform, void *session_private);
 
 struct qat_session *
+qat_crypto_sym_configure_session_aead(struct rte_crypto_sym_xform *xform,
+				struct qat_session *session_private);
+
+struct qat_session *
 qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev,
 				struct rte_crypto_sym_xform *xform,
 				struct qat_session *session_private);
diff --git a/drivers/crypto/qat/qat_crypto_capabilities.h b/drivers/crypto/qat/qat_crypto_capabilities.h
index d863ccd..fee8ee1 100644
--- a/drivers/crypto/qat/qat_crypto_capabilities.h
+++ b/drivers/crypto/qat/qat_crypto_capabilities.h
@@ -189,12 +189,12 @@
 			}, }						\
 		}, }							\
 	},								\
-	{	/* AES GCM (AUTH) */					\
+	{	/* AES GCM */						\
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
-			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
-			{.auth = {					\
-				.algo = RTE_CRYPTO_AUTH_AES_GCM,	\
+			.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,	\
+			{.aead = {					\
+				.algo = RTE_CRYPTO_AEAD_AES_GCM,	\
 				.block_size = 16,			\
 				.key_size = {				\
 					.min = 16,			\
@@ -211,7 +211,11 @@
 					.max = 240,			\
 					.increment = 1			\
 				},					\
-				.iv_size = { 0 }			\
+				.iv_size = {				\
+					.min = 12,			\
+					.max = 12,			\
+					.increment = 0			\
+				},					\
 			}, }						\
 		}, }							\
 	},								\
@@ -266,26 +270,6 @@
 			}, }						\
 		}, }							\
 	},								\
-	{	/* AES GCM (CIPHER) */					\
-		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
-		{.sym = {						\
-			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
-			{.cipher = {					\
-				.algo = RTE_CRYPTO_CIPHER_AES_GCM,	\
-				.block_size = 16,			\
-				.key_size = {				\
-					.min = 16,			\
-					.max = 32,			\
-					.increment = 8			\
-				},					\
-				.iv_size = {				\
-					.min = 12,			\
-					.max = 12,			\
-					.increment = 0			\
-				}					\
-			}, }						\
-		}, }							\
-	},								\
 	{	/* AES CBC */						\
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
diff --git a/examples/ipsec-secgw/esp.c b/examples/ipsec-secgw/esp.c
index d544a3c..c1dbd15 100644
--- a/examples/ipsec-secgw/esp.c
+++ b/examples/ipsec-secgw/esp.c
@@ -84,62 +84,79 @@ esp_inbound(struct rte_mbuf *m, struct ipsec_sa *sa,
 	}
 
 	sym_cop = get_sym_cop(cop);
-
 	sym_cop->m_src = m;
-	sym_cop->cipher.data.offset =  ip_hdr_len + sizeof(struct esp_hdr) +
-		sa->iv_len;
-	sym_cop->cipher.data.length = payload_len;
-
-	struct cnt_blk *icb;
-	uint8_t *aad;
-	uint8_t *iv = RTE_PTR_ADD(ip4, ip_hdr_len + sizeof(struct esp_hdr));
-	uint8_t *iv_ptr = rte_crypto_op_ctod_offset(cop,
-				uint8_t *, IV_OFFSET);
-
-	switch (sa->cipher_algo) {
-	case RTE_CRYPTO_CIPHER_NULL:
-	case RTE_CRYPTO_CIPHER_AES_CBC:
-		/* Copy IV at the end of crypto operation */
-		rte_memcpy(iv_ptr, iv, sa->iv_len);
-		break;
-	case RTE_CRYPTO_CIPHER_AES_CTR:
-	case RTE_CRYPTO_CIPHER_AES_GCM:
+
+	if (sa->aead_algo == RTE_CRYPTO_AEAD_AES_GCM) {
+		sym_cop->aead.data.offset =  ip_hdr_len + sizeof(struct esp_hdr) +
+			sa->iv_len;
+		sym_cop->aead.data.length = payload_len;
+
+		struct cnt_blk *icb;
+		uint8_t *aad;
+		uint8_t *iv = RTE_PTR_ADD(ip4, ip_hdr_len + sizeof(struct esp_hdr));
+
 		icb = get_cnt_blk(m);
 		icb->salt = sa->salt;
 		memcpy(&icb->iv, iv, 8);
 		icb->cnt = rte_cpu_to_be_32(1);
-		break;
-	default:
-		RTE_LOG(ERR, IPSEC_ESP, "unsupported cipher algorithm %u\n",
-				sa->cipher_algo);
-		return -EINVAL;
-	}
 
-	switch (sa->auth_algo) {
-	case RTE_CRYPTO_AUTH_NULL:
-	case RTE_CRYPTO_AUTH_SHA1_HMAC:
-	case RTE_CRYPTO_AUTH_SHA256_HMAC:
-		sym_cop->auth.data.offset = ip_hdr_len;
-		sym_cop->auth.data.length = sizeof(struct esp_hdr) +
-			sa->iv_len + payload_len;
-		break;
-	case RTE_CRYPTO_AUTH_AES_GCM:
 		aad = get_aad(m);
 		memcpy(aad, iv - sizeof(struct esp_hdr), 8);
-		sym_cop->auth.aad.data = aad;
-		sym_cop->auth.aad.phys_addr = rte_pktmbuf_mtophys_offset(m,
+		sym_cop->aead.aad.data = aad;
+		sym_cop->aead.aad.phys_addr = rte_pktmbuf_mtophys_offset(m,
 				aad - rte_pktmbuf_mtod(m, uint8_t *));
-		break;
-	default:
-		RTE_LOG(ERR, IPSEC_ESP, "unsupported auth algorithm %u\n",
-				sa->auth_algo);
-		return -EINVAL;
-	}
 
-	sym_cop->auth.digest.data = rte_pktmbuf_mtod_offset(m, void*,
-			rte_pktmbuf_pkt_len(m) - sa->digest_len);
-	sym_cop->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(m,
-			rte_pktmbuf_pkt_len(m) - sa->digest_len);
+		sym_cop->aead.digest.data = rte_pktmbuf_mtod_offset(m, void*,
+				rte_pktmbuf_pkt_len(m) - sa->digest_len);
+		sym_cop->aead.digest.phys_addr = rte_pktmbuf_mtophys_offset(m,
+				rte_pktmbuf_pkt_len(m) - sa->digest_len);
+	} else {
+		sym_cop->cipher.data.offset =  ip_hdr_len + sizeof(struct esp_hdr) +
+			sa->iv_len;
+		sym_cop->cipher.data.length = payload_len;
+
+		struct cnt_blk *icb;
+		uint8_t *iv = RTE_PTR_ADD(ip4, ip_hdr_len + sizeof(struct esp_hdr));
+		uint8_t *iv_ptr = rte_crypto_op_ctod_offset(cop,
+					uint8_t *, IV_OFFSET);
+
+		switch (sa->cipher_algo) {
+		case RTE_CRYPTO_CIPHER_NULL:
+		case RTE_CRYPTO_CIPHER_AES_CBC:
+			/* Copy IV at the end of crypto operation */
+			rte_memcpy(iv_ptr, iv, sa->iv_len);
+			break;
+		case RTE_CRYPTO_CIPHER_AES_CTR:
+			icb = get_cnt_blk(m);
+			icb->salt = sa->salt;
+			memcpy(&icb->iv, iv, 8);
+			icb->cnt = rte_cpu_to_be_32(1);
+			break;
+		default:
+			RTE_LOG(ERR, IPSEC_ESP, "unsupported cipher algorithm %u\n",
+					sa->cipher_algo);
+			return -EINVAL;
+		}
+
+		switch (sa->auth_algo) {
+		case RTE_CRYPTO_AUTH_NULL:
+		case RTE_CRYPTO_AUTH_SHA1_HMAC:
+		case RTE_CRYPTO_AUTH_SHA256_HMAC:
+			sym_cop->auth.data.offset = ip_hdr_len;
+			sym_cop->auth.data.length = sizeof(struct esp_hdr) +
+				sa->iv_len + payload_len;
+			break;
+		default:
+			RTE_LOG(ERR, IPSEC_ESP, "unsupported auth algorithm %u\n",
+					sa->auth_algo);
+			return -EINVAL;
+		}
+
+		sym_cop->auth.digest.data = rte_pktmbuf_mtod_offset(m, void*,
+				rte_pktmbuf_pkt_len(m) - sa->digest_len);
+		sym_cop->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(m,
+				rte_pktmbuf_pkt_len(m) - sa->digest_len);
+	}
 
 	return 0;
 }
@@ -308,65 +325,87 @@ esp_outbound(struct rte_mbuf *m, struct ipsec_sa *sa,
 
 	sym_cop = get_sym_cop(cop);
 	sym_cop->m_src = m;
-	switch (sa->cipher_algo) {
-	case RTE_CRYPTO_CIPHER_NULL:
-	case RTE_CRYPTO_CIPHER_AES_CBC:
-		memset(iv, 0, sa->iv_len);
-		sym_cop->cipher.data.offset = ip_hdr_len +
-			sizeof(struct esp_hdr);
-		sym_cop->cipher.data.length = pad_payload_len + sa->iv_len;
-		break;
-	case RTE_CRYPTO_CIPHER_AES_CTR:
-	case RTE_CRYPTO_CIPHER_AES_GCM:
+
+	if (sa->aead_algo == RTE_CRYPTO_AEAD_AES_GCM) {
+		uint8_t *aad;
+
 		*iv = sa->seq;
-		sym_cop->cipher.data.offset = ip_hdr_len +
+		sym_cop->aead.data.offset = ip_hdr_len +
 			sizeof(struct esp_hdr) + sa->iv_len;
-		sym_cop->cipher.data.length = pad_payload_len;
-		break;
-	default:
-		RTE_LOG(ERR, IPSEC_ESP, "unsupported cipher algorithm %u\n",
-				sa->cipher_algo);
-		return -EINVAL;
-	}
+		sym_cop->aead.data.length = pad_payload_len;
+
+		/* Fill pad_len using default sequential scheme */
+		for (i = 0; i < pad_len - 2; i++)
+			padding[i] = i + 1;
+		padding[pad_len - 2] = pad_len - 2;
+		padding[pad_len - 1] = nlp;
+
+		struct cnt_blk *icb = get_cnt_blk(m);
+		icb->salt = sa->salt;
+		icb->iv = sa->seq;
+		icb->cnt = rte_cpu_to_be_32(1);
 
-	/* Fill pad_len using default sequential scheme */
-	for (i = 0; i < pad_len - 2; i++)
-		padding[i] = i + 1;
-	padding[pad_len - 2] = pad_len - 2;
-	padding[pad_len - 1] = nlp;
-
-	struct cnt_blk *icb = get_cnt_blk(m);
-	icb->salt = sa->salt;
-	icb->iv = sa->seq;
-	icb->cnt = rte_cpu_to_be_32(1);
-
-	uint8_t *aad;
-
-	switch (sa->auth_algo) {
-	case RTE_CRYPTO_AUTH_NULL:
-	case RTE_CRYPTO_AUTH_SHA1_HMAC:
-	case RTE_CRYPTO_AUTH_SHA256_HMAC:
-		sym_cop->auth.data.offset = ip_hdr_len;
-		sym_cop->auth.data.length = sizeof(struct esp_hdr) +
-			sa->iv_len + pad_payload_len;
-		break;
-	case RTE_CRYPTO_AUTH_AES_GCM:
 		aad = get_aad(m);
 		memcpy(aad, esp, 8);
-		sym_cop->auth.aad.data = aad;
-		sym_cop->auth.aad.phys_addr = rte_pktmbuf_mtophys_offset(m,
+		sym_cop->aead.aad.data = aad;
+		sym_cop->aead.aad.phys_addr = rte_pktmbuf_mtophys_offset(m,
 				aad - rte_pktmbuf_mtod(m, uint8_t *));
-		break;
-	default:
-		RTE_LOG(ERR, IPSEC_ESP, "unsupported auth algorithm %u\n",
-				sa->auth_algo);
-		return -EINVAL;
-	}
 
-	sym_cop->auth.digest.data = rte_pktmbuf_mtod_offset(m, uint8_t *,
+		sym_cop->aead.digest.data = rte_pktmbuf_mtod_offset(m, uint8_t *,
 			rte_pktmbuf_pkt_len(m) - sa->digest_len);
-	sym_cop->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(m,
+		sym_cop->aead.digest.phys_addr = rte_pktmbuf_mtophys_offset(m,
 			rte_pktmbuf_pkt_len(m) - sa->digest_len);
+	} else {
+		switch (sa->cipher_algo) {
+		case RTE_CRYPTO_CIPHER_NULL:
+		case RTE_CRYPTO_CIPHER_AES_CBC:
+			memset(iv, 0, sa->iv_len);
+			sym_cop->cipher.data.offset = ip_hdr_len +
+				sizeof(struct esp_hdr);
+			sym_cop->cipher.data.length = pad_payload_len + sa->iv_len;
+			break;
+		case RTE_CRYPTO_CIPHER_AES_CTR:
+			*iv = sa->seq;
+			sym_cop->cipher.data.offset = ip_hdr_len +
+				sizeof(struct esp_hdr) + sa->iv_len;
+			sym_cop->cipher.data.length = pad_payload_len;
+			break;
+		default:
+			RTE_LOG(ERR, IPSEC_ESP, "unsupported cipher algorithm %u\n",
+					sa->cipher_algo);
+			return -EINVAL;
+		}
+
+		/* Fill pad_len using default sequential scheme */
+		for (i = 0; i < pad_len - 2; i++)
+			padding[i] = i + 1;
+		padding[pad_len - 2] = pad_len - 2;
+		padding[pad_len - 1] = nlp;
+
+		struct cnt_blk *icb = get_cnt_blk(m);
+		icb->salt = sa->salt;
+		icb->iv = sa->seq;
+		icb->cnt = rte_cpu_to_be_32(1);
+
+		switch (sa->auth_algo) {
+		case RTE_CRYPTO_AUTH_NULL:
+		case RTE_CRYPTO_AUTH_SHA1_HMAC:
+		case RTE_CRYPTO_AUTH_SHA256_HMAC:
+			sym_cop->auth.data.offset = ip_hdr_len;
+			sym_cop->auth.data.length = sizeof(struct esp_hdr) +
+				sa->iv_len + pad_payload_len;
+			break;
+		default:
+			RTE_LOG(ERR, IPSEC_ESP, "unsupported auth algorithm %u\n",
+					sa->auth_algo);
+			return -EINVAL;
+		}
+
+		sym_cop->auth.digest.data = rte_pktmbuf_mtod_offset(m, uint8_t *,
+				rte_pktmbuf_pkt_len(m) - sa->digest_len);
+		sym_cop->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(m,
+				rte_pktmbuf_pkt_len(m) - sa->digest_len);
+	}
 
 	return 0;
 }
diff --git a/examples/ipsec-secgw/sa.c b/examples/ipsec-secgw/sa.c
index 9d80bd3..7971f72 100644
--- a/examples/ipsec-secgw/sa.c
+++ b/examples/ipsec-secgw/sa.c
@@ -64,7 +64,6 @@ struct supported_auth_algo {
 	enum rte_crypto_auth_algorithm algo;
 	uint16_t digest_len;
 	uint16_t key_len;
-	uint8_t aad_len;
 	uint8_t key_not_req;
 };
 
@@ -95,13 +94,6 @@ const struct supported_cipher_algo cipher_algos[] = {
 		.key_len = 16
 	},
 	{
-		.keyword = "aes-128-gcm",
-		.algo = RTE_CRYPTO_CIPHER_AES_GCM,
-		.iv_len = 8,
-		.block_size = 4,
-		.key_len = 20
-	},
-	{
 		.keyword = "aes-128-ctr",
 		.algo = RTE_CRYPTO_CIPHER_AES_CTR,
 		.iv_len = 8,
@@ -129,18 +121,21 @@ const struct supported_auth_algo auth_algos[] = {
 		.algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
 		.digest_len = 12,
 		.key_len = 32
-	},
+	}
+};
+
+const struct supported_aead_algo aead_algos[] = {
 	{
 		.keyword = "aes-128-gcm",
-		.algo = RTE_CRYPTO_AUTH_AES_GCM,
+		.algo = RTE_CRYPTO_AEAD_AES_GCM,
+		.iv_len = 8,
+		.block_size = 4,
+		.key_len = 20,
 		.digest_len = 16,
 		.aad_len = 8,
-		.key_not_req = 1
 	}
 };
 
-const struct supported_aead_algo aead_algos[] = { { } };
-
 struct ipsec_sa sa_out[IPSEC_SA_MAX_ENTRIES];
 uint32_t nb_sa_out;
 
@@ -349,8 +344,7 @@ parse_sa_tokens(char **tokens, uint32_t n_tokens,
 			if (algo->algo == RTE_CRYPTO_CIPHER_AES_CBC)
 				rule->salt = (uint32_t)rte_rand();
 
-			if ((algo->algo == RTE_CRYPTO_CIPHER_AES_CTR) ||
-				(algo->algo == RTE_CRYPTO_CIPHER_AES_GCM)) {
+			if (algo->algo == RTE_CRYPTO_CIPHER_AES_CTR) {
 				key_len -= 4;
 				rule->cipher_key_len = key_len;
 				memcpy(&rule->salt,
@@ -712,75 +706,110 @@ sa_add_rules(struct sa_ctx *sa_ctx, const struct ipsec_sa entries[],
 			sa->dst.ip.ip4 = rte_cpu_to_be_32(sa->dst.ip.ip4);
 		}
 
-		switch (sa->cipher_algo) {
-		case RTE_CRYPTO_CIPHER_NULL:
-		case RTE_CRYPTO_CIPHER_AES_CBC:
-			iv_length = sa->iv_len;
-			break;
-		case RTE_CRYPTO_CIPHER_AES_CTR:
-		case RTE_CRYPTO_CIPHER_AES_GCM:
+		if (sa->aead_algo == RTE_CRYPTO_AEAD_AES_GCM) {
 			iv_length = 16;
-			break;
-		default:
-			RTE_LOG(ERR, IPSEC_ESP, "unsupported cipher algorithm %u\n",
-					sa->cipher_algo);
-			return -EINVAL;
-		}
 
-		if (inbound) {
-			sa_ctx->xf[idx].b.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
-			sa_ctx->xf[idx].b.cipher.algo = sa->cipher_algo;
-			sa_ctx->xf[idx].b.cipher.key.data = sa->cipher_key;
-			sa_ctx->xf[idx].b.cipher.key.length =
-				sa->cipher_key_len;
-			sa_ctx->xf[idx].b.cipher.op =
-				RTE_CRYPTO_CIPHER_OP_DECRYPT;
-			sa_ctx->xf[idx].b.cipher.iv.offset = IV_OFFSET;
-			sa_ctx->xf[idx].b.cipher.iv.length = iv_length;
-			sa_ctx->xf[idx].b.next = NULL;
+			if (inbound) {
+				sa_ctx->xf[idx].a.type = RTE_CRYPTO_SYM_XFORM_AEAD;
+				sa_ctx->xf[idx].a.aead.algo = sa->aead_algo;
+				sa_ctx->xf[idx].a.aead.key.data = sa->cipher_key;
+				sa_ctx->xf[idx].a.aead.key.length =
+					sa->cipher_key_len;
+				sa_ctx->xf[idx].a.aead.op =
+					RTE_CRYPTO_AEAD_OP_DECRYPT;
+				sa_ctx->xf[idx].a.next = NULL;
+				sa_ctx->xf[idx].a.aead.iv.offset = IV_OFFSET;
+				sa_ctx->xf[idx].a.aead.iv.length = iv_length;
+				sa_ctx->xf[idx].a.aead.add_auth_data_length =
+					sa->aad_len;
+				sa_ctx->xf[idx].a.aead.digest_length =
+					sa->digest_len;
+			} else { /* outbound */
+				sa_ctx->xf[idx].a.type = RTE_CRYPTO_SYM_XFORM_AEAD;
+				sa_ctx->xf[idx].a.aead.algo = sa->aead_algo;
+				sa_ctx->xf[idx].a.aead.key.data = sa->cipher_key;
+				sa_ctx->xf[idx].a.aead.key.length =
+					sa->cipher_key_len;
+				sa_ctx->xf[idx].a.aead.op =
+					RTE_CRYPTO_AEAD_OP_ENCRYPT;
+				sa_ctx->xf[idx].a.next = NULL;
+				sa_ctx->xf[idx].a.aead.iv.offset = IV_OFFSET;
+				sa_ctx->xf[idx].a.aead.iv.length = iv_length;
+				sa_ctx->xf[idx].a.aead.add_auth_data_length =
+					sa->aad_len;
+				sa_ctx->xf[idx].a.aead.digest_length =
+					sa->digest_len;
+			}
 
-			sa_ctx->xf[idx].a.type = RTE_CRYPTO_SYM_XFORM_AUTH;
-			sa_ctx->xf[idx].a.auth.algo = sa->auth_algo;
-			sa_ctx->xf[idx].a.auth.add_auth_data_length =
-				sa->aad_len;
-			sa_ctx->xf[idx].a.auth.key.data = sa->auth_key;
-			sa_ctx->xf[idx].a.auth.key.length =
-				sa->auth_key_len;
-			sa_ctx->xf[idx].a.auth.digest_length =
-				sa->digest_len;
-			sa_ctx->xf[idx].a.auth.op =
-				RTE_CRYPTO_AUTH_OP_VERIFY;
-
-		} else { /* outbound */
-			sa_ctx->xf[idx].a.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
-			sa_ctx->xf[idx].a.cipher.algo = sa->cipher_algo;
-			sa_ctx->xf[idx].a.cipher.key.data = sa->cipher_key;
-			sa_ctx->xf[idx].a.cipher.key.length =
-				sa->cipher_key_len;
-			sa_ctx->xf[idx].a.cipher.op =
-				RTE_CRYPTO_CIPHER_OP_ENCRYPT;
-			sa_ctx->xf[idx].a.cipher.iv.offset = IV_OFFSET;
-			sa_ctx->xf[idx].a.cipher.iv.length = iv_length;
-			sa_ctx->xf[idx].a.next = NULL;
-
-			sa_ctx->xf[idx].b.type = RTE_CRYPTO_SYM_XFORM_AUTH;
-			sa_ctx->xf[idx].b.auth.algo = sa->auth_algo;
-			sa_ctx->xf[idx].b.auth.add_auth_data_length =
-				sa->aad_len;
-			sa_ctx->xf[idx].b.auth.key.data = sa->auth_key;
-			sa_ctx->xf[idx].b.auth.key.length =
-				sa->auth_key_len;
-			sa_ctx->xf[idx].b.auth.digest_length =
-				sa->digest_len;
-			sa_ctx->xf[idx].b.auth.op =
-				RTE_CRYPTO_AUTH_OP_GENERATE;
-		}
+			sa->xforms = &sa_ctx->xf[idx].a;
 
-		sa_ctx->xf[idx].a.next = &sa_ctx->xf[idx].b;
-		sa_ctx->xf[idx].b.next = NULL;
-		sa->xforms = &sa_ctx->xf[idx].a;
+			print_one_sa_rule(sa, inbound);
+		} else {
+			switch (sa->cipher_algo) {
+			case RTE_CRYPTO_CIPHER_NULL:
+			case RTE_CRYPTO_CIPHER_AES_CBC:
+				iv_length = sa->iv_len;
+				break;
+			case RTE_CRYPTO_CIPHER_AES_CTR:
+				iv_length = 16;
+				break;
+			default:
+				RTE_LOG(ERR, IPSEC_ESP,
+						"unsupported cipher algorithm %u\n",
+						sa->cipher_algo);
+				return -EINVAL;
+			}
+
+			if (inbound) {
+				sa_ctx->xf[idx].b.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
+				sa_ctx->xf[idx].b.cipher.algo = sa->cipher_algo;
+				sa_ctx->xf[idx].b.cipher.key.data = sa->cipher_key;
+				sa_ctx->xf[idx].b.cipher.key.length =
+					sa->cipher_key_len;
+				sa_ctx->xf[idx].b.cipher.op =
+					RTE_CRYPTO_CIPHER_OP_DECRYPT;
+				sa_ctx->xf[idx].b.next = NULL;
+				sa_ctx->xf[idx].b.cipher.iv.offset = IV_OFFSET;
+				sa_ctx->xf[idx].b.cipher.iv.length = iv_length;
+
+				sa_ctx->xf[idx].a.type = RTE_CRYPTO_SYM_XFORM_AUTH;
+				sa_ctx->xf[idx].a.auth.algo = sa->auth_algo;
+				sa_ctx->xf[idx].a.auth.key.data = sa->auth_key;
+				sa_ctx->xf[idx].a.auth.key.length =
+					sa->auth_key_len;
+				sa_ctx->xf[idx].a.auth.digest_length =
+					sa->digest_len;
+				sa_ctx->xf[idx].a.auth.op =
+					RTE_CRYPTO_AUTH_OP_VERIFY;
+			} else { /* outbound */
+				sa_ctx->xf[idx].a.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
+				sa_ctx->xf[idx].a.cipher.algo = sa->cipher_algo;
+				sa_ctx->xf[idx].a.cipher.key.data = sa->cipher_key;
+				sa_ctx->xf[idx].a.cipher.key.length =
+					sa->cipher_key_len;
+				sa_ctx->xf[idx].a.cipher.op =
+					RTE_CRYPTO_CIPHER_OP_ENCRYPT;
+				sa_ctx->xf[idx].a.next = NULL;
+				sa_ctx->xf[idx].a.cipher.iv.offset = IV_OFFSET;
+				sa_ctx->xf[idx].a.cipher.iv.length = iv_length;
+
+				sa_ctx->xf[idx].b.type = RTE_CRYPTO_SYM_XFORM_AUTH;
+				sa_ctx->xf[idx].b.auth.algo = sa->auth_algo;
+				sa_ctx->xf[idx].b.auth.key.data = sa->auth_key;
+				sa_ctx->xf[idx].b.auth.key.length =
+					sa->auth_key_len;
+				sa_ctx->xf[idx].b.auth.digest_length =
+					sa->digest_len;
+				sa_ctx->xf[idx].b.auth.op =
+					RTE_CRYPTO_AUTH_OP_GENERATE;
+			}
 
-		print_one_sa_rule(sa, inbound);
+			sa_ctx->xf[idx].a.next = &sa_ctx->xf[idx].b;
+			sa_ctx->xf[idx].b.next = NULL;
+			sa->xforms = &sa_ctx->xf[idx].a;
+
+			print_one_sa_rule(sa, inbound);
+		}
 	}
 
 	return 0;
diff --git a/examples/l2fwd-crypto/main.c b/examples/l2fwd-crypto/main.c
index 914f8ed..f28e62a 100644
--- a/examples/l2fwd-crypto/main.c
+++ b/examples/l2fwd-crypto/main.c
@@ -1428,6 +1428,9 @@ l2fwd_crypto_default_options(struct l2fwd_crypto_options *options)
 	options->aead_iv_random_size = -1;
 	options->aead_iv.length = 0;
 
+	options->auth_xform.aead.algo = RTE_CRYPTO_AEAD_AES_GCM;
+	options->auth_xform.aead.op = RTE_CRYPTO_AEAD_OP_ENCRYPT;
+
 	options->aad_param = 0;
 	options->aad_random_size = -1;
 	options->aad.length = 0;
diff --git a/lib/librte_cryptodev/rte_crypto_sym.h b/lib/librte_cryptodev/rte_crypto_sym.h
index f03d2fd..dab042b 100644
--- a/lib/librte_cryptodev/rte_crypto_sym.h
+++ b/lib/librte_cryptodev/rte_crypto_sym.h
@@ -68,27 +68,12 @@ enum rte_crypto_cipher_algorithm {
 
 	RTE_CRYPTO_CIPHER_AES_CBC,
 	/**< AES algorithm in CBC mode */
-	RTE_CRYPTO_CIPHER_AES_CCM,
-	/**< AES algorithm in CCM mode. When this cipher algorithm is used the
-	 * *RTE_CRYPTO_AUTH_AES_CCM* element of the
-	 * *rte_crypto_hash_algorithm* enum MUST be used to set up the related
-	 * *rte_crypto_auth_xform* structure in the session context or in
-	 * the op_params of the crypto operation structure in the case of a
-	 * session-less crypto operation
-	 */
 	RTE_CRYPTO_CIPHER_AES_CTR,
 	/**< AES algorithm in Counter mode */
 	RTE_CRYPTO_CIPHER_AES_ECB,
 	/**< AES algorithm in ECB mode */
 	RTE_CRYPTO_CIPHER_AES_F8,
 	/**< AES algorithm in F8 mode */
-	RTE_CRYPTO_CIPHER_AES_GCM,
-	/**< AES algorithm in GCM mode. When this cipher algorithm is used the
-	 * *RTE_CRYPTO_AUTH_AES_GCM* element of the *rte_crypto_auth_algorithm*
-	 * enum MUST be used to set up the related *rte_crypto_auth_setup_data*
-	 * structure in the session context or in the op_params of the crypto
-	 * operation structure in the case of a session-less crypto operation.
-	 */
 	RTE_CRYPTO_CIPHER_AES_XTS,
 	/**< AES algorithm in XTS mode */
 
@@ -247,25 +232,8 @@ enum rte_crypto_auth_algorithm {
 
 	RTE_CRYPTO_AUTH_AES_CBC_MAC,
 	/**< AES-CBC-MAC algorithm. Only 128-bit keys are supported. */
-	RTE_CRYPTO_AUTH_AES_CCM,
-	/**< AES algorithm in CCM mode. This is an authenticated cipher. When
-	 * this hash algorithm is used, the *RTE_CRYPTO_CIPHER_AES_CCM*
-	 * element of the *rte_crypto_cipher_algorithm* enum MUST be used to
-	 * set up the related rte_crypto_cipher_setup_data structure in the
-	 * session context or the corresponding parameter in the crypto
-	 * operation data structures op_params parameter MUST be set for a
-	 * session-less crypto operation.
-	 */
 	RTE_CRYPTO_AUTH_AES_CMAC,
 	/**< AES CMAC algorithm. */
-	RTE_CRYPTO_AUTH_AES_GCM,
-	/**< AES algorithm in GCM mode. When this hash algorithm
-	 * is used, the RTE_CRYPTO_CIPHER_AES_GCM element of the
-	 * rte_crypto_cipher_algorithm enum MUST be used to set up the related
-	 * rte_crypto_cipher_setup_data structure in the session context, or
-	 * the corresponding parameter in the crypto operation data structures
-	 * op_params parameter MUST be set for a session-less crypto operation.
-	 */
 	RTE_CRYPTO_AUTH_AES_GMAC,
 	/**< AES GMAC algorithm. */
 	RTE_CRYPTO_AUTH_AES_XCBC_MAC,
@@ -363,20 +331,6 @@ struct rte_crypto_auth_xform {
 	 * The maximum permitted value is 65535 (2^16 - 1) bytes, unless
 	 * otherwise specified below.
 	 *
-	 * This field must be specified when the hash algorithm is one of the
-	 * following:
-	 *
-	 * - For GCM (@ref RTE_CRYPTO_AUTH_AES_GCM).  In this case, this is
-	 *   the length of the Additional Authenticated Data (called A, in NIST
-	 *   SP800-38D).
-	 *
-	 * - For CCM (@ref RTE_CRYPTO_AUTH_AES_CCM).  In this case, this is
-	 *   the length of the associated data (called A, in NIST SP800-38C).
-	 *   Note that this does NOT include the length of any padding, or the
-	 *   18 bytes reserved at the start of the above field to store the
-	 *   block B0 and the encoded length.  The maximum permitted value in
-	 *   this case is 222 bytes.
-	 *
 	 */
 
 	struct {
@@ -658,15 +612,6 @@ struct rte_crypto_sym_op {
 					  * also the same as the result length.
 					  *
 					  * @note
-					  * In the case of CCM
-					  * @ref RTE_CRYPTO_AUTH_AES_CCM, this value
-					  * should not include the length of the padding
-					  * or the length of the MAC; the driver will
-					  * compute the actual number of bytes over
-					  * which the encryption will occur, which will
-					  * include these values.
-					  *
-					  * @note
 					  * For SNOW 3G @ RTE_CRYPTO_AUTH_SNOW3G_UEA2,
 					  * KASUMI @ RTE_CRYPTO_CIPHER_KASUMI_F8
 					  * and ZUC @ RTE_CRYPTO_CIPHER_ZUC_EEA3,
@@ -683,12 +628,6 @@ struct rte_crypto_sym_op {
 					  * packet in source buffer.
 					  *
 					  * @note
-					  * For CCM and GCM modes of operation,
-					  * this field is ignored.
-					  * The field @ref aad field should be set
-					  * instead.
-					  *
-					  * @note
 					  * For SNOW 3G @ RTE_CRYPTO_AUTH_SNOW3G_UIA2,
 					  * KASUMI @ RTE_CRYPTO_AUTH_KASUMI_F9
 					  * and ZUC @ RTE_CRYPTO_AUTH_ZUC_EIA3,
@@ -699,11 +638,6 @@ struct rte_crypto_sym_op {
 					  * buffer that the hash will be computed on.
 					  *
 					  * @note
-					  * For CCM and GCM modes of operation,
-					  * this field is ignored. The field @ref aad
-					  * field should be set instead.
-					  *
-					  * @note
 					  * For SNOW 3G @ RTE_CRYPTO_AUTH_SNOW3G_UIA2,
 					  * KASUMI @ RTE_CRYPTO_AUTH_KASUMI_F9
 					  * and ZUC @ RTE_CRYPTO_AUTH_ZUC_EIA3,
@@ -732,9 +666,6 @@ struct rte_crypto_sym_op {
 					 * For digest generation, the digest result
 					 * will overwrite any data at this location.
 					 *
-					 * @note
-					 * For GCM (@ref RTE_CRYPTO_AUTH_AES_GCM), for
-					 * "digest result" read "authentication tag T".
 					 */
 					phys_addr_t phys_addr;
 					/**< Physical address of digest */
@@ -754,37 +685,6 @@ struct rte_crypto_sym_op {
 					 * This length must not exceed 65535 (2^16-1)
 					 * bytes.
 					 *
-					 * Specifically for CCM
-					 * (@ref RTE_CRYPTO_AUTH_AES_CCM),
-					 * the caller should setup this field as follows:
-					 *
-					 * - the nonce should be written starting at
-					 * an offset of one byte into the array,
-					 * leaving room for the implementation to
-					 * write in the flags to the first byte.
-					 *
-					 * - the additional authentication data
-					 * itself should be written starting at
-					 * an offset of 18 bytes into the array,
-					 * leaving room for the length encoding in
-					 * the first two bytes of the second block.
-					 *
-					 * - the array should be big enough to hold
-					 * the above fields, plus any padding to
-					 * round this up to the nearest multiple of
-					 * the block size (16 bytes).
-					 * Padding will be added by the implementation.
-					 *
-					 * Finally, for GCM
-					 * (@ref RTE_CRYPTO_AUTH_AES_GCM), the
-					 * caller should setup this field as follows:
-					 *
-					 * - the AAD is written in starting at byte 0
-					 * - the array must be big enough to hold
-					 * the AAD, plus any space to round this up to
-					 * the nearest multiple of the block size
-					 * (16 bytes).
-					 *
 					 */
 					phys_addr_t phys_addr;	/**< physical address */
 				} aad;
diff --git a/lib/librte_cryptodev/rte_cryptodev.c b/lib/librte_cryptodev/rte_cryptodev.c
index 60dc5e5..497f9ce 100644
--- a/lib/librte_cryptodev/rte_cryptodev.c
+++ b/lib/librte_cryptodev/rte_cryptodev.c
@@ -111,11 +111,9 @@ rte_crypto_cipher_algorithm_strings[] = {
 	[RTE_CRYPTO_CIPHER_3DES_CTR]	= "3des-ctr",
 
 	[RTE_CRYPTO_CIPHER_AES_CBC]	= "aes-cbc",
-	[RTE_CRYPTO_CIPHER_AES_CCM]	= "aes-ccm",
 	[RTE_CRYPTO_CIPHER_AES_CTR]	= "aes-ctr",
 	[RTE_CRYPTO_CIPHER_AES_DOCSISBPI]	= "aes-docsisbpi",
 	[RTE_CRYPTO_CIPHER_AES_ECB]	= "aes-ecb",
-	[RTE_CRYPTO_CIPHER_AES_GCM]	= "aes-gcm",
 	[RTE_CRYPTO_CIPHER_AES_F8]	= "aes-f8",
 	[RTE_CRYPTO_CIPHER_AES_XTS]	= "aes-xts",
 
@@ -148,9 +146,7 @@ rte_crypto_cipher_operation_strings[] = {
 const char *
 rte_crypto_auth_algorithm_strings[] = {
 	[RTE_CRYPTO_AUTH_AES_CBC_MAC]	= "aes-cbc-mac",
-	[RTE_CRYPTO_AUTH_AES_CCM]	= "aes-ccm",
 	[RTE_CRYPTO_AUTH_AES_CMAC]	= "aes-cmac",
-	[RTE_CRYPTO_AUTH_AES_GCM]	= "aes-gcm",
 	[RTE_CRYPTO_AUTH_AES_GMAC]	= "aes-gmac",
 	[RTE_CRYPTO_AUTH_AES_XCBC_MAC]	= "aes-xcbc-mac",
 
diff --git a/test/test/test_cryptodev.c b/test/test/test_cryptodev.c
index 00c32a4..21c6270 100644
--- a/test/test/test_cryptodev.c
+++ b/test/test/test_cryptodev.c
@@ -76,6 +76,7 @@ struct crypto_testsuite_params {
 struct crypto_unittest_params {
 	struct rte_crypto_sym_xform cipher_xform;
 	struct rte_crypto_sym_xform auth_xform;
+	struct rte_crypto_sym_xform aead_xform;
 
 	struct rte_cryptodev_sym_session *sess;
 
@@ -4629,54 +4630,34 @@ test_3DES_cipheronly_openssl_all(void)
 /* ***** AES-GCM Tests ***** */
 
 static int
-create_gcm_session(uint8_t dev_id, enum rte_crypto_cipher_operation op,
+create_gcm_session(uint8_t dev_id, enum rte_crypto_aead_operation op,
 		const uint8_t *key, const uint8_t key_len,
 		const uint16_t aad_len, const uint8_t auth_len,
-		uint8_t iv_len,
-		enum rte_crypto_auth_operation auth_op)
+		uint8_t iv_len)
 {
-	uint8_t cipher_key[key_len];
+	uint8_t aead_key[key_len];
 
 	struct crypto_unittest_params *ut_params = &unittest_params;
 
-	memcpy(cipher_key, key, key_len);
+	memcpy(aead_key, key, key_len);
 
-	/* Setup Cipher Parameters */
-	ut_params->cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
-	ut_params->cipher_xform.next = NULL;
-
-	ut_params->cipher_xform.cipher.algo = RTE_CRYPTO_CIPHER_AES_GCM;
-	ut_params->auth_xform.auth.op = auth_op;
-	ut_params->cipher_xform.cipher.op = op;
-	ut_params->cipher_xform.cipher.key.data = cipher_key;
-	ut_params->cipher_xform.cipher.key.length = key_len;
-	ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET;
-	ut_params->cipher_xform.cipher.iv.length = iv_len;
+	/* Setup AEAD Parameters */
+	ut_params->aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
+	ut_params->aead_xform.next = NULL;
+	ut_params->aead_xform.aead.algo = RTE_CRYPTO_AEAD_AES_GCM;
+	ut_params->aead_xform.aead.op = op;
+	ut_params->aead_xform.aead.key.data = aead_key;
+	ut_params->aead_xform.aead.key.length = key_len;
+	ut_params->aead_xform.aead.iv.offset = IV_OFFSET;
+	ut_params->aead_xform.aead.iv.length = iv_len;
+	ut_params->aead_xform.aead.digest_length = auth_len;
+	ut_params->aead_xform.aead.add_auth_data_length = aad_len;
 
 	TEST_HEXDUMP(stdout, "key:", key, key_len);
 
-	/* Setup Authentication Parameters */
-	ut_params->auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
-	ut_params->auth_xform.next = NULL;
-
-	ut_params->auth_xform.auth.algo = RTE_CRYPTO_AUTH_AES_GCM;
-
-	ut_params->auth_xform.auth.digest_length = auth_len;
-	ut_params->auth_xform.auth.add_auth_data_length = aad_len;
-	ut_params->auth_xform.auth.key.length = 0;
-	ut_params->auth_xform.auth.key.data = NULL;
-
-	if (op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
-		ut_params->cipher_xform.next = &ut_params->auth_xform;
-
-		/* Create Crypto session*/
-		ut_params->sess = rte_cryptodev_sym_session_create(dev_id,
-				&ut_params->cipher_xform);
-	} else {/* Create Crypto session*/
-		ut_params->auth_xform.next = &ut_params->cipher_xform;
-		ut_params->sess = rte_cryptodev_sym_session_create(dev_id,
-				&ut_params->auth_xform);
-	}
+	/* Create Crypto session*/
+	ut_params->sess = rte_cryptodev_sym_session_create(dev_id,
+			&ut_params->aead_xform);
 
 	TEST_ASSERT_NOT_NULL(ut_params->sess, "Session creation failed");
 
@@ -4685,43 +4666,35 @@ create_gcm_session(uint8_t dev_id, enum rte_crypto_cipher_operation op,
 
 static int
 create_gcm_xforms(struct rte_crypto_op *op,
-		enum rte_crypto_cipher_operation cipher_op,
+		enum rte_crypto_aead_operation aead_op,
 		uint8_t *key, const uint8_t key_len,
 		const uint8_t aad_len, const uint8_t auth_len,
-		uint8_t iv_len,
-		enum rte_crypto_auth_operation auth_op)
+		uint8_t iv_len)
 {
-	TEST_ASSERT_NOT_NULL(rte_crypto_op_sym_xforms_alloc(op, 2),
-			"failed to allocate space for crypto transforms");
+	TEST_ASSERT_NOT_NULL(rte_crypto_op_sym_xforms_alloc(op, 1),
+			"failed to allocate space for crypto transform");
 
 	struct rte_crypto_sym_op *sym_op = op->sym;
 
-	/* Setup Cipher Parameters */
-	sym_op->xform->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
-	sym_op->xform->cipher.algo = RTE_CRYPTO_CIPHER_AES_GCM;
-	sym_op->xform->cipher.op = cipher_op;
-	sym_op->xform->cipher.key.data = key;
-	sym_op->xform->cipher.key.length = key_len;
-	sym_op->xform->cipher.iv.offset = IV_OFFSET;
-	sym_op->xform->cipher.iv.length = iv_len;
+	/* Setup AEAD Parameters */
+	sym_op->xform->type = RTE_CRYPTO_SYM_XFORM_AEAD;
+	sym_op->xform->next = NULL;
+	sym_op->xform->aead.algo = RTE_CRYPTO_AEAD_AES_GCM;
+	sym_op->xform->aead.op = aead_op;
+	sym_op->xform->aead.key.data = key;
+	sym_op->xform->aead.key.length = key_len;
+	sym_op->xform->aead.iv.offset = IV_OFFSET;
+	sym_op->xform->aead.iv.length = iv_len;
+	sym_op->xform->aead.digest_length = auth_len;
+	sym_op->xform->aead.add_auth_data_length = aad_len;
 
 	TEST_HEXDUMP(stdout, "key:", key, key_len);
 
-	/* Setup Authentication Parameters */
-	sym_op->xform->next->type = RTE_CRYPTO_SYM_XFORM_AUTH;
-	sym_op->xform->next->auth.algo = RTE_CRYPTO_AUTH_AES_GCM;
-	sym_op->xform->next->auth.op = auth_op;
-	sym_op->xform->next->auth.digest_length = auth_len;
-	sym_op->xform->next->auth.add_auth_data_length = aad_len;
-	sym_op->xform->next->auth.key.length = 0;
-	sym_op->xform->next->auth.key.data = NULL;
-	sym_op->xform->next->next = NULL;
-
 	return 0;
 }
 
 static int
-create_gcm_operation(enum rte_crypto_cipher_operation op,
+create_gcm_operation(enum rte_crypto_aead_operation op,
 		const struct gcm_test_data *tdata)
 {
 	struct crypto_testsuite_params *ts_params = &testsuite_params;
@@ -4740,15 +4713,15 @@ create_gcm_operation(enum rte_crypto_cipher_operation op,
 
 	/* Append aad data */
 	aad_pad_len = RTE_ALIGN_CEIL(tdata->aad.len, 16);
-	sym_op->auth.aad.data = (uint8_t *)rte_pktmbuf_append(ut_params->ibuf,
+	sym_op->aead.aad.data = (uint8_t *)rte_pktmbuf_append(ut_params->ibuf,
 			aad_pad_len);
-	TEST_ASSERT_NOT_NULL(sym_op->auth.aad.data,
+	TEST_ASSERT_NOT_NULL(sym_op->aead.aad.data,
 			"no room to append aad");
 
-	sym_op->auth.aad.phys_addr =
+	sym_op->aead.aad.phys_addr =
 			rte_pktmbuf_mtophys(ut_params->ibuf);
-	memcpy(sym_op->auth.aad.data, tdata->aad.data, tdata->aad.len);
-	TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data,
+	memcpy(sym_op->aead.aad.data, tdata->aad.data, tdata->aad.len);
+	TEST_HEXDUMP(stdout, "aad:", sym_op->aead.aad.data,
 		tdata->aad.len);
 
 	/* Append IV at the end of the crypto operation*/
@@ -4760,7 +4733,7 @@ create_gcm_operation(enum rte_crypto_cipher_operation op,
 		tdata->iv.len);
 
 	/* Append plaintext/ciphertext */
-	if (op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
+	if (op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
 		plaintext_pad_len = RTE_ALIGN_CEIL(tdata->plaintext.len, 16);
 		plaintext = (uint8_t *)rte_pktmbuf_append(ut_params->ibuf,
 				plaintext_pad_len);
@@ -4805,40 +4778,37 @@ create_gcm_operation(enum rte_crypto_cipher_operation op,
 	}
 
 	/* Append digest data */
-	if (op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
-		sym_op->auth.digest.data = (uint8_t *)rte_pktmbuf_append(
+	if (op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
+		sym_op->aead.digest.data = (uint8_t *)rte_pktmbuf_append(
 				ut_params->obuf ? ut_params->obuf :
 						ut_params->ibuf,
 						tdata->auth_tag.len);
-		TEST_ASSERT_NOT_NULL(sym_op->auth.digest.data,
+		TEST_ASSERT_NOT_NULL(sym_op->aead.digest.data,
 				"no room to append digest");
-		memset(sym_op->auth.digest.data, 0, tdata->auth_tag.len);
-		sym_op->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(
+		memset(sym_op->aead.digest.data, 0, tdata->auth_tag.len);
+		sym_op->aead.digest.phys_addr = rte_pktmbuf_mtophys_offset(
 				ut_params->obuf ? ut_params->obuf :
 						ut_params->ibuf,
 						plaintext_pad_len +
 						aad_pad_len);
 	} else {
-		sym_op->auth.digest.data = (uint8_t *)rte_pktmbuf_append(
+		sym_op->aead.digest.data = (uint8_t *)rte_pktmbuf_append(
 				ut_params->ibuf, tdata->auth_tag.len);
-		TEST_ASSERT_NOT_NULL(sym_op->auth.digest.data,
+		TEST_ASSERT_NOT_NULL(sym_op->aead.digest.data,
 				"no room to append digest");
-		sym_op->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(
+		sym_op->aead.digest.phys_addr = rte_pktmbuf_mtophys_offset(
 				ut_params->ibuf,
 				plaintext_pad_len + aad_pad_len);
 
-		rte_memcpy(sym_op->auth.digest.data, tdata->auth_tag.data,
+		rte_memcpy(sym_op->aead.digest.data, tdata->auth_tag.data,
 			tdata->auth_tag.len);
 		TEST_HEXDUMP(stdout, "digest:",
-			sym_op->auth.digest.data,
+			sym_op->aead.digest.data,
 			tdata->auth_tag.len);
 	}
 
-	sym_op->cipher.data.length = tdata->plaintext.len;
-	sym_op->cipher.data.offset = aad_pad_len;
-
-	sym_op->auth.data.length = tdata->plaintext.len;
-	sym_op->auth.data.offset = aad_pad_len;
+	sym_op->aead.data.length = tdata->plaintext.len;
+	sym_op->aead.data.offset = aad_pad_len;
 
 	return 0;
 }
@@ -4856,11 +4826,10 @@ test_mb_AES_GCM_authenticated_encryption(const struct gcm_test_data *tdata)
 
 	/* Create GCM session */
 	retval = create_gcm_session(ts_params->valid_devs[0],
-			RTE_CRYPTO_CIPHER_OP_ENCRYPT,
+			RTE_CRYPTO_AEAD_OP_ENCRYPT,
 			tdata->key.data, tdata->key.len,
 			tdata->aad.len, tdata->auth_tag.len,
-			tdata->iv.len,
-			RTE_CRYPTO_AUTH_OP_GENERATE);
+			tdata->iv.len);
 	if (retval < 0)
 		return retval;
 
@@ -4877,7 +4846,7 @@ test_mb_AES_GCM_authenticated_encryption(const struct gcm_test_data *tdata)
 			rte_pktmbuf_tailroom(ut_params->ibuf));
 
 	/* Create GCM operation */
-	retval = create_gcm_operation(RTE_CRYPTO_CIPHER_OP_ENCRYPT, tdata);
+	retval = create_gcm_operation(RTE_CRYPTO_AEAD_OP_ENCRYPT, tdata);
 	if (retval < 0)
 		return retval;
 
@@ -5034,11 +5003,10 @@ test_mb_AES_GCM_authenticated_decryption(const struct gcm_test_data *tdata)
 
 	/* Create GCM session */
 	retval = create_gcm_session(ts_params->valid_devs[0],
-			RTE_CRYPTO_CIPHER_OP_DECRYPT,
+			RTE_CRYPTO_AEAD_OP_DECRYPT,
 			tdata->key.data, tdata->key.len,
 			tdata->aad.len, tdata->auth_tag.len,
-			tdata->iv.len,
-			RTE_CRYPTO_AUTH_OP_VERIFY);
+			tdata->iv.len);
 	if (retval < 0)
 		return retval;
 
@@ -5055,7 +5023,7 @@ test_mb_AES_GCM_authenticated_decryption(const struct gcm_test_data *tdata)
 			rte_pktmbuf_tailroom(ut_params->ibuf));
 
 	/* Create GCM operation */
-	retval = create_gcm_operation(RTE_CRYPTO_CIPHER_OP_DECRYPT, tdata);
+	retval = create_gcm_operation(RTE_CRYPTO_AEAD_OP_DECRYPT, tdata);
 	if (retval < 0)
 		return retval;
 
@@ -5201,11 +5169,10 @@ test_AES_GCM_authenticated_encryption_oop(const struct gcm_test_data *tdata)
 
 	/* Create GCM session */
 	retval = create_gcm_session(ts_params->valid_devs[0],
-			RTE_CRYPTO_CIPHER_OP_ENCRYPT,
+			RTE_CRYPTO_AEAD_OP_ENCRYPT,
 			tdata->key.data, tdata->key.len,
 			tdata->aad.len, tdata->auth_tag.len,
-			tdata->iv.len,
-			RTE_CRYPTO_AUTH_OP_GENERATE);
+			tdata->iv.len);
 	if (retval < 0)
 		return retval;
 
@@ -5219,7 +5186,7 @@ test_AES_GCM_authenticated_encryption_oop(const struct gcm_test_data *tdata)
 			rte_pktmbuf_tailroom(ut_params->obuf));
 
 	/* Create GCM operation */
-	retval = create_gcm_operation(RTE_CRYPTO_CIPHER_OP_ENCRYPT, tdata);
+	retval = create_gcm_operation(RTE_CRYPTO_AEAD_OP_ENCRYPT, tdata);
 	if (retval < 0)
 		return retval;
 
@@ -5278,11 +5245,10 @@ test_AES_GCM_authenticated_decryption_oop(const struct gcm_test_data *tdata)
 
 	/* Create GCM session */
 	retval = create_gcm_session(ts_params->valid_devs[0],
-			RTE_CRYPTO_CIPHER_OP_DECRYPT,
+			RTE_CRYPTO_AEAD_OP_DECRYPT,
 			tdata->key.data, tdata->key.len,
 			tdata->aad.len, tdata->auth_tag.len,
-			tdata->iv.len,
-			RTE_CRYPTO_AUTH_OP_VERIFY);
+			tdata->iv.len);
 	if (retval < 0)
 		return retval;
 
@@ -5296,7 +5262,7 @@ test_AES_GCM_authenticated_decryption_oop(const struct gcm_test_data *tdata)
 			rte_pktmbuf_tailroom(ut_params->obuf));
 
 	/* Create GCM operation */
-	retval = create_gcm_operation(RTE_CRYPTO_CIPHER_OP_DECRYPT, tdata);
+	retval = create_gcm_operation(RTE_CRYPTO_AEAD_OP_DECRYPT, tdata);
 	if (retval < 0)
 		return retval;
 
@@ -5355,18 +5321,17 @@ test_AES_GCM_authenticated_encryption_sessionless(
 			rte_pktmbuf_tailroom(ut_params->ibuf));
 
 	/* Create GCM operation */
-	retval = create_gcm_operation(RTE_CRYPTO_CIPHER_OP_ENCRYPT, tdata);
+	retval = create_gcm_operation(RTE_CRYPTO_AEAD_OP_ENCRYPT, tdata);
 	if (retval < 0)
 		return retval;
 
 	/* Create GCM xforms */
 	memcpy(key, tdata->key.data, tdata->key.len);
 	retval = create_gcm_xforms(ut_params->op,
-			RTE_CRYPTO_CIPHER_OP_ENCRYPT,
+			RTE_CRYPTO_AEAD_OP_ENCRYPT,
 			key, tdata->key.len,
 			tdata->aad.len, tdata->auth_tag.len,
-			tdata->iv.len,
-			RTE_CRYPTO_AUTH_OP_GENERATE);
+			tdata->iv.len);
 	if (retval < 0)
 		return retval;
 
@@ -5436,18 +5401,17 @@ test_AES_GCM_authenticated_decryption_sessionless(
 			rte_pktmbuf_tailroom(ut_params->ibuf));
 
 	/* Create GCM operation */
-	retval = create_gcm_operation(RTE_CRYPTO_CIPHER_OP_DECRYPT, tdata);
+	retval = create_gcm_operation(RTE_CRYPTO_AEAD_OP_DECRYPT, tdata);
 	if (retval < 0)
 		return retval;
 
 	/* Create GCM xforms */
 	memcpy(key, tdata->key.data, tdata->key.len);
 	retval = create_gcm_xforms(ut_params->op,
-			RTE_CRYPTO_CIPHER_OP_DECRYPT,
+			RTE_CRYPTO_AEAD_OP_DECRYPT,
 			key, tdata->key.len,
 			tdata->aad.len, tdata->auth_tag.len,
-			tdata->iv.len,
-			RTE_CRYPTO_AUTH_OP_VERIFY);
+			tdata->iv.len);
 	if (retval < 0)
 		return retval;
 
@@ -7137,7 +7101,7 @@ test_authenticated_decryption_fail_when_corruption(
 }
 
 static int
-create_gcm_operation_SGL(enum rte_crypto_cipher_operation op,
+create_gcm_operation_SGL(enum rte_crypto_aead_operation op,
 		const struct gcm_test_data *tdata,
 		void *digest_mem, uint64_t digest_phys)
 {
@@ -7156,18 +7120,18 @@ create_gcm_operation_SGL(enum rte_crypto_cipher_operation op,
 
 	struct rte_crypto_sym_op *sym_op = ut_params->op->sym;
 
-	sym_op->auth.digest.data = digest_mem;
+	sym_op->aead.digest.data = digest_mem;
 
-	TEST_ASSERT_NOT_NULL(sym_op->auth.digest.data,
+	TEST_ASSERT_NOT_NULL(sym_op->aead.digest.data,
 			"no room to append digest");
 
-	sym_op->auth.digest.phys_addr = digest_phys;
+	sym_op->aead.digest.phys_addr = digest_phys;
 
-	if (op == RTE_CRYPTO_CIPHER_OP_DECRYPT) {
-		rte_memcpy(sym_op->auth.digest.data, tdata->auth_tag.data,
+	if (op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
+		rte_memcpy(sym_op->aead.digest.data, tdata->auth_tag.data,
 				auth_tag_len);
 		TEST_HEXDUMP(stdout, "digest:",
-				sym_op->auth.digest.data,
+				sym_op->aead.digest.data,
 				auth_tag_len);
 	}
 
@@ -7176,25 +7140,22 @@ create_gcm_operation_SGL(enum rte_crypto_cipher_operation op,
 
 	rte_memcpy(iv_ptr, tdata->iv.data, iv_len);
 
-	sym_op->auth.aad.data = (uint8_t *)rte_pktmbuf_prepend(
+	sym_op->aead.aad.data = (uint8_t *)rte_pktmbuf_prepend(
 			ut_params->ibuf, aad_len);
-	TEST_ASSERT_NOT_NULL(sym_op->auth.aad.data,
+	TEST_ASSERT_NOT_NULL(sym_op->aead.aad.data,
 			"no room to prepend aad");
-	sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(
+	sym_op->aead.aad.phys_addr = rte_pktmbuf_mtophys(
 			ut_params->ibuf);
 
-	memset(sym_op->auth.aad.data, 0, aad_len);
-	rte_memcpy(sym_op->auth.aad.data, tdata->aad.data, aad_len);
+	memset(sym_op->aead.aad.data, 0, aad_len);
+	rte_memcpy(sym_op->aead.aad.data, tdata->aad.data, aad_len);
 
 	TEST_HEXDUMP(stdout, "iv:", iv_ptr, iv_len);
 	TEST_HEXDUMP(stdout, "aad:",
-			sym_op->auth.aad.data, aad_len);
+			sym_op->aead.aad.data, aad_len);
 
-	sym_op->cipher.data.length = tdata->plaintext.len;
-	sym_op->cipher.data.offset = aad_len;
-
-	sym_op->auth.data.offset = aad_len;
-	sym_op->auth.data.length = tdata->plaintext.len;
+	sym_op->aead.data.length = tdata->plaintext.len;
+	sym_op->aead.data.offset = aad_len;
 
 	return 0;
 }
@@ -7249,11 +7210,10 @@ test_AES_GCM_authenticated_encryption_SGL(const struct gcm_test_data *tdata,
 
 	/* Create GCM session */
 	retval = create_gcm_session(ts_params->valid_devs[0],
-			RTE_CRYPTO_CIPHER_OP_ENCRYPT,
+			RTE_CRYPTO_AEAD_OP_ENCRYPT,
 			tdata->key.data, tdata->key.len,
 			tdata->aad.len, tdata->auth_tag.len,
-			tdata->iv.len,
-			RTE_CRYPTO_AUTH_OP_GENERATE);
+			tdata->iv.len);
 	if (retval < 0)
 		return retval;
 
@@ -7379,7 +7339,7 @@ test_AES_GCM_authenticated_encryption_SGL(const struct gcm_test_data *tdata,
 	}
 
 	/* Create GCM opertaion */
-	retval = create_gcm_operation_SGL(RTE_CRYPTO_CIPHER_OP_ENCRYPT,
+	retval = create_gcm_operation_SGL(RTE_CRYPTO_AEAD_OP_ENCRYPT,
 			tdata, digest_mem, digest_phys);
 
 	if (retval < 0)
diff --git a/test/test/test_cryptodev_perf.c b/test/test/test_cryptodev_perf.c
index 3bd9351..5b2468d 100644
--- a/test/test/test_cryptodev_perf.c
+++ b/test/test/test_cryptodev_perf.c
@@ -65,7 +65,8 @@ enum chain_mode {
 	CIPHER_HASH,
 	HASH_CIPHER,
 	CIPHER_ONLY,
-	HASH_ONLY
+	HASH_ONLY,
+	AEAD
 };
 
 
@@ -86,6 +87,7 @@ struct symmetric_op {
 struct symmetric_session_attrs {
 	enum rte_crypto_cipher_operation cipher;
 	enum rte_crypto_auth_operation auth;
+	enum rte_crypto_aead_operation aead;
 
 	enum rte_crypto_cipher_algorithm cipher_algorithm;
 	const uint8_t *key_cipher_data;
@@ -95,6 +97,10 @@ struct symmetric_session_attrs {
 	const uint8_t *key_auth_data;
 	uint32_t key_auth_len;
 
+	enum rte_crypto_aead_algorithm aead_algorithm;
+	const uint8_t *key_aead_data;
+	uint32_t key_aead_len;
+
 	const uint8_t *iv_data;
 	uint16_t iv_len;
 	uint16_t aad_len;
@@ -124,8 +130,9 @@ struct perf_test_params {
 	enum chain_mode chain;
 
 	enum rte_crypto_cipher_algorithm cipher_algo;
-	unsigned cipher_key_length;
+	unsigned int key_length;
 	enum rte_crypto_auth_algorithm auth_algo;
+	enum rte_crypto_aead_algorithm aead_algo;
 
 	struct symmetric_session_attrs *session_attrs;
 
@@ -157,7 +164,8 @@ static struct rte_cryptodev_sym_session *
 test_perf_create_openssl_session(uint8_t dev_id, enum chain_mode chain,
 		enum rte_crypto_cipher_algorithm cipher_algo,
 		unsigned int cipher_key_len,
-		enum rte_crypto_auth_algorithm auth_algo);
+		enum rte_crypto_auth_algorithm auth_algo,
+		enum rte_crypto_aead_algorithm aead_algo);
 static struct rte_cryptodev_sym_session *
 test_perf_create_armv8_session(uint8_t dev_id, enum chain_mode chain,
 		enum rte_crypto_cipher_algorithm cipher_algo,
@@ -191,6 +199,7 @@ static const char *chain_mode_name(enum chain_mode mode)
 	case HASH_CIPHER: return "hash_cipher"; break;
 	case CIPHER_ONLY: return "cipher_only"; break;
 	case HASH_ONLY: return "hash_only"; break;
+	case AEAD: return "aead"; break;
 	default: return ""; break;
 	}
 }
@@ -2085,7 +2094,7 @@ test_perf_snow3G_optimise_cyclecount(struct perf_test_params *pparams)
 	/* Create Crypto session*/
 	sess = test_perf_create_snow3g_session(ts_params->dev_id,
 			pparams->chain, pparams->cipher_algo,
-			pparams->cipher_key_length, pparams->auth_algo);
+			pparams->key_length, pparams->auth_algo);
 	TEST_ASSERT_NOT_NULL(sess, "Session creation failed");
 
 	/* Generate Crypto op data structure(s)*/
@@ -2106,7 +2115,16 @@ test_perf_snow3G_optimise_cyclecount(struct perf_test_params *pparams)
 		c_ops[i] = op;
 	}
 
-	printf("\nOn %s dev%u qp%u, %s, cipher algo:%s, auth_algo:%s, "
+	if (pparams->chain == AEAD)
+		printf("\nOn %s dev%u qp%u, %s, aead algo:%s, "
+			"Packet Size %u bytes",
+			pmd_name(gbl_cryptodev_perftest_devtype),
+			ts_params->dev_id, 0,
+			chain_mode_name(pparams->chain),
+			rte_crypto_aead_algorithm_strings[pparams->aead_algo],
+			pparams->buf_size);
+	else
+		printf("\nOn %s dev%u qp%u, %s, cipher algo:%s, auth_algo:%s, "
 			"Packet Size %u bytes",
 			pmd_name(gbl_cryptodev_perftest_devtype),
 			ts_params->dev_id, 0,
@@ -2196,14 +2214,14 @@ test_perf_snow3G_vary_burst_size(void)
 			{
 					.chain = CIPHER_ONLY,
 					.cipher_algo  = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
-					.cipher_key_length = 16,
+					.key_length = 16,
 					.auth_algo  = RTE_CRYPTO_AUTH_NULL,
 			},
 			{
 					.chain = HASH_ONLY,
 					.cipher_algo = RTE_CRYPTO_CIPHER_NULL,
 					.auth_algo  = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
-					.cipher_key_length = 16
+					.key_length = 16
 			},
 	};
 
@@ -2260,7 +2278,8 @@ test_perf_openssl_optimise_cyclecount(struct perf_test_params *pparams)
 	/* Create Crypto session*/
 	sess = test_perf_create_openssl_session(ts_params->dev_id,
 			pparams->chain, pparams->cipher_algo,
-			pparams->cipher_key_length, pparams->auth_algo);
+			pparams->key_length, pparams->auth_algo,
+			pparams->aead_algo);
 	TEST_ASSERT_NOT_NULL(sess, "Session creation failed");
 
 	/* Generate Crypto op data structure(s)*/
@@ -2275,21 +2294,22 @@ test_perf_openssl_optimise_cyclecount(struct perf_test_params *pparams)
 						RTE_CRYPTO_OP_TYPE_SYMMETRIC);
 		TEST_ASSERT_NOT_NULL(op, "Failed to allocate op");
 
-		switch (pparams->cipher_algo) {
-		case RTE_CRYPTO_CIPHER_3DES_CBC:
-		case RTE_CRYPTO_CIPHER_3DES_CTR:
-			test_perf_set_crypto_op = test_perf_set_crypto_op_3des;
-			break;
-		case RTE_CRYPTO_CIPHER_AES_CBC:
-		case RTE_CRYPTO_CIPHER_AES_CTR:
-			test_perf_set_crypto_op = test_perf_set_crypto_op_aes;
-			break;
-		case RTE_CRYPTO_CIPHER_AES_GCM:
+		if (pparams->chain == AEAD)
 			test_perf_set_crypto_op =
 						test_perf_set_crypto_op_aes_gcm;
-			break;
-		default:
-			return TEST_FAILED;
+		else {
+			switch (pparams->cipher_algo) {
+			case RTE_CRYPTO_CIPHER_3DES_CBC:
+			case RTE_CRYPTO_CIPHER_3DES_CTR:
+				test_perf_set_crypto_op = test_perf_set_crypto_op_3des;
+				break;
+			case RTE_CRYPTO_CIPHER_AES_CBC:
+			case RTE_CRYPTO_CIPHER_AES_CTR:
+				test_perf_set_crypto_op = test_perf_set_crypto_op_aes;
+				break;
+			default:
+				return TEST_FAILED;
+			}
 		}
 
 		op = test_perf_set_crypto_op(op, m, sess, pparams->buf_size,
@@ -2299,14 +2319,24 @@ test_perf_openssl_optimise_cyclecount(struct perf_test_params *pparams)
 		c_ops[i] = op;
 	}
 
-	printf("\nOn %s dev%u qp%u, %s, cipher algo:%s, cipher key length:%u, "
-			"auth_algo:%s, Packet Size %u bytes",
+	if (pparams->chain == AEAD)
+		printf("\nOn %s dev%u qp%u, %s, aead_algo:%s, "
+			"key length:%u, Packet Size %u bytes",
+			pmd_name(gbl_cryptodev_perftest_devtype),
+			ts_params->dev_id, 0,
+			chain_mode_name(pparams->chain),
+			rte_crypto_aead_algorithm_strings[pparams->aead_algo],
+			pparams->key_length,
+			pparams->buf_size);
+	else
+		printf("\nOn %s dev%u qp%u, %s, cipher algo:%s, auth_algo:%s, "
+			"key length:%u, Packet Size %u bytes",
 			pmd_name(gbl_cryptodev_perftest_devtype),
 			ts_params->dev_id, 0,
 			chain_mode_name(pparams->chain),
 			rte_crypto_cipher_algorithm_strings[pparams->cipher_algo],
-			pparams->cipher_key_length,
 			rte_crypto_auth_algorithm_strings[pparams->auth_algo],
+			pparams->key_length,
 			pparams->buf_size);
 	printf("\nOps Tx\tOps Rx\tOps/burst  ");
 	printf("Retries  EmptyPolls\tIACycles/CyOp\tIACycles/Burst\t"
@@ -2410,7 +2440,7 @@ test_perf_armv8_optimise_cyclecount(struct perf_test_params *pparams)
 	/* Create Crypto session*/
 	sess = test_perf_create_armv8_session(ts_params->dev_id,
 			pparams->chain, pparams->cipher_algo,
-			pparams->cipher_key_length, pparams->auth_algo);
+			pparams->key_length, pparams->auth_algo);
 	TEST_ASSERT_NOT_NULL(sess, "Session creation failed");
 
 	/* Generate Crypto op data structure(s)*/
@@ -2438,7 +2468,7 @@ test_perf_armv8_optimise_cyclecount(struct perf_test_params *pparams)
 			ts_params->dev_id, 0,
 			chain_mode_name(pparams->chain),
 			rte_crypto_cipher_algorithm_strings[pparams->cipher_algo],
-			pparams->cipher_key_length,
+			pparams->key_length,
 			rte_crypto_auth_algorithm_strings[pparams->auth_algo],
 			pparams->buf_size);
 	printf("\nOps Tx\tOps Rx\tOps/burst  ");
@@ -2532,8 +2562,6 @@ static uint32_t get_auth_key_max_length(enum rte_crypto_auth_algorithm algo)
 		return 128;
 	case RTE_CRYPTO_AUTH_SHA512_HMAC:
 		return 128;
-	case RTE_CRYPTO_AUTH_AES_GCM:
-		return 0;
 	default:
 		return 0;
 	}
@@ -2554,7 +2582,15 @@ static uint32_t get_auth_digest_length(enum rte_crypto_auth_algorithm algo)
 		return TRUNCATED_DIGEST_BYTE_LENGTH_SHA384;
 	case RTE_CRYPTO_AUTH_SHA512_HMAC:
 		return TRUNCATED_DIGEST_BYTE_LENGTH_SHA512;
-	case RTE_CRYPTO_AUTH_AES_GCM:
+	default:
+		return 0;
+	}
+}
+
+static uint32_t get_aead_digest_length(enum rte_crypto_aead_algorithm algo)
+{
+	switch (algo) {
+	case RTE_CRYPTO_AEAD_AES_GCM:
 		return DIGEST_BYTE_LENGTH_AES_GCM;
 	default:
 		return 0;
@@ -2732,55 +2768,73 @@ test_perf_create_snow3g_session(uint8_t dev_id, enum chain_mode chain,
 static struct rte_cryptodev_sym_session *
 test_perf_create_openssl_session(uint8_t dev_id, enum chain_mode chain,
 		enum rte_crypto_cipher_algorithm cipher_algo,
-		unsigned int cipher_key_len,
-		enum rte_crypto_auth_algorithm auth_algo)
+		unsigned int key_len,
+		enum rte_crypto_auth_algorithm auth_algo,
+		enum rte_crypto_aead_algorithm aead_algo)
 {
 	struct rte_crypto_sym_xform cipher_xform = { 0 };
 	struct rte_crypto_sym_xform auth_xform = { 0 };
+	struct rte_crypto_sym_xform aead_xform = { 0 };
 
-	/* Setup Cipher Parameters */
-	cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
-	cipher_xform.cipher.algo = cipher_algo;
-	cipher_xform.cipher.iv.offset = IV_OFFSET;
-	cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
+	if (chain == CIPHER_HASH || chain == HASH_CIPHER) {
+		/* Setup Cipher Parameters */
+		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
+		cipher_xform.cipher.algo = cipher_algo;
+		cipher_xform.cipher.iv.offset = IV_OFFSET;
+		cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
 
-	switch (cipher_algo) {
-	case RTE_CRYPTO_CIPHER_3DES_CBC:
-	case RTE_CRYPTO_CIPHER_3DES_CTR:
-		cipher_xform.cipher.key.data = triple_des_key;
-		cipher_xform.cipher.iv.length = TRIPLE_DES_CIPHER_IV_LENGTH;
-		break;
-	case RTE_CRYPTO_CIPHER_AES_CBC:
-	case RTE_CRYPTO_CIPHER_AES_CTR:
-	case RTE_CRYPTO_CIPHER_AES_GCM:
-		cipher_xform.cipher.key.data = aes_key;
-		cipher_xform.cipher.iv.length = AES_CIPHER_IV_LENGTH;
-		break;
-	default:
-		return NULL;
-	}
+		switch (cipher_algo) {
+		case RTE_CRYPTO_CIPHER_3DES_CBC:
+		case RTE_CRYPTO_CIPHER_3DES_CTR:
+			cipher_xform.cipher.key.data = triple_des_key;
+			cipher_xform.cipher.iv.length = TRIPLE_DES_CIPHER_IV_LENGTH;
+			break;
+		case RTE_CRYPTO_CIPHER_AES_CBC:
+		case RTE_CRYPTO_CIPHER_AES_CTR:
+			cipher_xform.cipher.key.data = aes_key;
+			cipher_xform.cipher.iv.length = AES_CIPHER_IV_LENGTH;
+			break;
+		default:
+			return NULL;
+		}
 
-	cipher_xform.cipher.key.length = cipher_key_len;
+		cipher_xform.cipher.key.length = key_len;
 
-	/* Setup Auth Parameters */
-	auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
-	auth_xform.auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
-	auth_xform.auth.algo = auth_algo;
+		/* Setup Auth Parameters */
+		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
+		auth_xform.auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
+		auth_xform.auth.algo = auth_algo;
 
-	switch (auth_algo) {
-	case RTE_CRYPTO_AUTH_SHA1_HMAC:
-		auth_xform.auth.key.data = hmac_sha_key;
-		break;
-	case RTE_CRYPTO_AUTH_AES_GCM:
-		auth_xform.auth.key.data = NULL;
-		auth_xform.auth.add_auth_data_length = AES_GCM_AAD_LENGTH;
-		break;
-	default:
-		return NULL;
-	}
+		switch (auth_algo) {
+		case RTE_CRYPTO_AUTH_SHA1_HMAC:
+			auth_xform.auth.key.data = hmac_sha_key;
+			break;
+		default:
+			return NULL;
+		}
 
-	auth_xform.auth.key.length =  get_auth_key_max_length(auth_algo);
-	auth_xform.auth.digest_length = get_auth_digest_length(auth_algo);
+		auth_xform.auth.key.length =  get_auth_key_max_length(auth_algo);
+		auth_xform.auth.digest_length = get_auth_digest_length(auth_algo);
+	} else if (chain == AEAD) {
+		/* Setup AEAD Parameters */
+		aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
+		aead_xform.aead.op = RTE_CRYPTO_AEAD_OP_ENCRYPT;
+		aead_xform.aead.algo = aead_algo;
+		aead_xform.aead.iv.offset = IV_OFFSET;
+
+		switch (aead_algo) {
+		case RTE_CRYPTO_AEAD_AES_GCM:
+			aead_xform.aead.key.data = aes_key;
+			aead_xform.aead.iv.length = AES_CIPHER_IV_LENGTH;
+			aead_xform.aead.add_auth_data_length = AES_GCM_AAD_LENGTH;
+			aead_xform.aead.digest_length = get_aead_digest_length(auth_algo);
+			break;
+		default:
+			return NULL;
+		}
+
+		aead_xform.aead.key.length = key_len;
+	}
 
 	switch (chain) {
 	case CIPHER_HASH:
@@ -2793,6 +2847,9 @@ test_perf_create_openssl_session(uint8_t dev_id, enum chain_mode chain,
 		cipher_xform.next = NULL;
 		/* Create Crypto session*/
 		return rte_cryptodev_sym_session_create(dev_id,	&auth_xform);
+	case AEAD:
+		/* Create Crypto session*/
+		return rte_cryptodev_sym_session_create(dev_id,	&aead_xform);
 	default:
 		return NULL;
 	}
@@ -2916,22 +2973,19 @@ test_perf_set_crypto_op_aes_gcm(struct rte_crypto_op *op, struct rte_mbuf *m,
 	}
 
 	/* Authentication Parameters */
-	op->sym->auth.digest.data = (uint8_t *)m->buf_addr +
+	op->sym->aead.digest.data = (uint8_t *)m->buf_addr +
 					(m->data_off + data_len);
-	op->sym->auth.digest.phys_addr =
+	op->sym->aead.digest.phys_addr =
 				rte_pktmbuf_mtophys_offset(m, data_len);
-	op->sym->auth.aad.data = aes_gcm_aad;
+	op->sym->aead.aad.data = aes_gcm_aad;
 
 	/* Copy IV at the end of the crypto operation */
 	rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
 			aes_iv, AES_CIPHER_IV_LENGTH);
 
 	/* Data lengths/offsets Parameters */
-	op->sym->auth.data.offset = 0;
-	op->sym->auth.data.length = data_len;
-
-	op->sym->cipher.data.offset = 0;
-	op->sym->cipher.data.length = data_len;
+	op->sym->aead.data.offset = 0;
+	op->sym->aead.data.length = data_len;
 
 	op->sym->m_src = m;
 
@@ -3102,7 +3156,7 @@ test_perf_aes_sha(uint8_t dev_id, uint16_t queue_id,
 	/* Create Crypto session*/
 	sess = test_perf_create_aes_sha_session(ts_params->dev_id,
 			pparams->chain, pparams->cipher_algo,
-			pparams->cipher_key_length, pparams->auth_algo);
+			pparams->key_length, pparams->auth_algo);
 	TEST_ASSERT_NOT_NULL(sess, "Session creation failed");
 
 	/* Generate a burst of crypto operations */
@@ -3235,7 +3289,7 @@ test_perf_snow3g(uint8_t dev_id, uint16_t queue_id,
 	/* Create Crypto session*/
 	sess = test_perf_create_snow3g_session(ts_params->dev_id,
 			pparams->chain, pparams->cipher_algo,
-			pparams->cipher_key_length, pparams->auth_algo);
+			pparams->key_length, pparams->auth_algo);
 	TEST_ASSERT_NOT_NULL(sess, "Session creation failed");
 
 	/* Generate a burst of crypto operations */
@@ -3394,20 +3448,22 @@ test_perf_openssl(uint8_t dev_id, uint16_t queue_id,
 					unsigned int,
 					enum chain_mode);
 
-	switch (pparams->cipher_algo) {
-	case RTE_CRYPTO_CIPHER_3DES_CBC:
-	case RTE_CRYPTO_CIPHER_3DES_CTR:
-		test_perf_set_crypto_op = test_perf_set_crypto_op_3des;
-		break;
-	case RTE_CRYPTO_CIPHER_AES_CBC:
-	case RTE_CRYPTO_CIPHER_AES_CTR:
-		test_perf_set_crypto_op = test_perf_set_crypto_op_aes;
-		break;
-	case RTE_CRYPTO_CIPHER_AES_GCM:
-		test_perf_set_crypto_op = test_perf_set_crypto_op_aes_gcm;
-		break;
-	default:
-		return TEST_FAILED;
+	if (pparams->chain == AEAD)
+		test_perf_set_crypto_op =
+					test_perf_set_crypto_op_aes_gcm;
+	else {
+		switch (pparams->cipher_algo) {
+		case RTE_CRYPTO_CIPHER_3DES_CBC:
+		case RTE_CRYPTO_CIPHER_3DES_CTR:
+			test_perf_set_crypto_op = test_perf_set_crypto_op_3des;
+			break;
+		case RTE_CRYPTO_CIPHER_AES_CBC:
+		case RTE_CRYPTO_CIPHER_AES_CTR:
+			test_perf_set_crypto_op = test_perf_set_crypto_op_aes;
+			break;
+		default:
+			return TEST_FAILED;
+		}
 	}
 
 	if (rte_cryptodev_count() == 0) {
@@ -3418,7 +3474,8 @@ test_perf_openssl(uint8_t dev_id, uint16_t queue_id,
 	/* Create Crypto session*/
 	sess = test_perf_create_openssl_session(ts_params->dev_id,
 			pparams->chain, pparams->cipher_algo,
-			pparams->cipher_key_length, pparams->auth_algo);
+			pparams->key_length, pparams->auth_algo,
+			pparams->aead_algo);
 	TEST_ASSERT_NOT_NULL(sess, "Session creation failed");
 
 	/* Generate a burst of crypto operations */
@@ -3548,7 +3605,7 @@ test_perf_armv8(uint8_t dev_id, uint16_t queue_id,
 	/* Create Crypto session*/
 	sess = test_perf_create_armv8_session(ts_params->dev_id,
 			pparams->chain, pparams->cipher_algo,
-			pparams->cipher_key_length, pparams->auth_algo);
+			pparams->key_length, pparams->auth_algo);
 	TEST_ASSERT_NOT_NULL(sess, "Session creation failed");
 
 	/* Generate a burst of crypto operations */
@@ -3674,48 +3731,48 @@ test_perf_aes_cbc_encrypt_digest_vary_pkt_size(void)
 		{
 			.chain = CIPHER_ONLY,
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_NULL
 		},
 		{
 			.chain = CIPHER_HASH,
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA256_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA512_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 32,
+			.key_length = 32,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 32,
+			.key_length = 32,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA256_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 32,
+			.key_length = 32,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA512_HMAC
 		},
 	};
@@ -3729,7 +3786,7 @@ test_perf_aes_cbc_encrypt_digest_vary_pkt_size(void)
 			chain_mode_name(params_set[i].chain),
 			rte_crypto_cipher_algorithm_strings[params_set[i].cipher_algo],
 			rte_crypto_auth_algorithm_strings[params_set[i].auth_algo],
-			params_set[i].cipher_key_length,
+			params_set[i].key_length,
 			burst_size);
 		printf("\nBuffer Size(B)\tOPS(M)\tThroughput(Gbps)\t"
 			"Retries\tEmptyPolls\n");
@@ -3755,14 +3812,14 @@ test_perf_snow3G_vary_pkt_size(void)
 		{
 			.chain = CIPHER_ONLY,
 			.cipher_algo  = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo  = RTE_CRYPTO_AUTH_NULL,
 		},
 		{
 			.chain = HASH_ONLY,
 			.cipher_algo = RTE_CRYPTO_CIPHER_NULL,
 			.auth_algo  = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
-			.cipher_key_length = 16
+			.key_length = 16
 		},
 	};
 
@@ -3817,63 +3874,77 @@ test_perf_openssl_vary_pkt_size(void)
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_3DES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_3DES_CBC,
-			.cipher_key_length = 24,
+			.key_length = 24,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CTR,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CTR,
-			.cipher_key_length = 32,
+			.key_length = 32,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_3DES_CTR,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_3DES_CTR,
-			.cipher_key_length = 24,
+			.key_length = 24,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
-			.chain = CIPHER_HASH,
+			.chain = AEAD,
 
-			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_GCM,
-			.cipher_key_length = 16,
-			.auth_algo = RTE_CRYPTO_AUTH_AES_GCM
+			.aead_algo  = RTE_CRYPTO_AEAD_AES_GCM,
+			.key_length = 16,
 		},
 	};
 
 	for (i = 0; i < RTE_DIM(params_set); i++) {
 		params_set[i].total_operations = total_operations;
 		params_set[i].burst_size = burst_size;
-		printf("\n%s. cipher algo: %s auth algo: %s cipher key size=%u."
-			" burst_size: %d ops\n",
-			chain_mode_name(params_set[i].chain),
-			rte_crypto_cipher_algorithm_strings[params_set[i].cipher_algo],
-			rte_crypto_auth_algorithm_strings[params_set[i].auth_algo],
-			params_set[i].cipher_key_length,
-			burst_size);
+		if (params_set[i].chain == AEAD) {
+			enum rte_crypto_aead_algorithm aead_algo =
+				params_set[i].aead_algo;
+			printf("\n%s. aead algo: %s  key size=%u."
+				" burst_size: %d ops\n",
+				chain_mode_name(params_set[i].chain),
+				rte_crypto_aead_algorithm_strings[aead_algo],
+				params_set[i].key_length,
+				burst_size);
+		} else {
+			enum rte_crypto_cipher_algorithm cipher_algo =
+				params_set[i].cipher_algo;
+			enum rte_crypto_auth_algorithm auth_algo =
+				params_set[i].auth_algo;
+			printf("\n%s. cipher algo: %s auth algo: %s key size=%u."
+				" burst_size: %d ops\n",
+				chain_mode_name(params_set[i].chain),
+				rte_crypto_cipher_algorithm_strings[cipher_algo],
+				rte_crypto_auth_algorithm_strings[auth_algo],
+				params_set[i].key_length,
+				burst_size);
+		}
 		printf("\nBuffer Size(B)\tOPS(M)\tThroughput(Gbps)\tRetries\t"
 				"EmptyPolls\n");
 		for (j = 0; j < RTE_DIM(buf_lengths); j++) {
@@ -3898,50 +3969,49 @@ test_perf_openssl_vary_burst_size(void)
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_3DES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_3DES_CBC,
-			.cipher_key_length = 24,
+			.key_length = 24,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CTR,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CTR,
-			.cipher_key_length = 32,
+			.key_length = 32,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_3DES_CTR,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_3DES_CTR,
-			.cipher_key_length = 24,
+			.key_length = 24,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
-			.chain = CIPHER_HASH,
+			.chain = AEAD,
 
-			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_GCM,
-			.cipher_key_length = 16,
-			.auth_algo = RTE_CRYPTO_AUTH_AES_GCM
+			.aead_algo  = RTE_CRYPTO_AEAD_AES_GCM,
+			.key_length = 16,
 		},
 	};
 
@@ -3978,28 +4048,28 @@ test_perf_armv8_vary_pkt_size(void)
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = HASH_CIPHER,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA256_HMAC
 		},
 		{
 			.chain = HASH_CIPHER,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA256_HMAC
 		},
 	};
@@ -4012,7 +4082,7 @@ test_perf_armv8_vary_pkt_size(void)
 			chain_mode_name(params_set[i].chain),
 			rte_crypto_cipher_algorithm_strings[params_set[i].cipher_algo],
 			rte_crypto_auth_algorithm_strings[params_set[i].auth_algo],
-			params_set[i].cipher_key_length,
+			params_set[i].key_length,
 			burst_size);
 		printf("\nBuffer Size(B)\tOPS(M)\tThroughput(Gbps)\tRetries\t"
 				"EmptyPolls\n");
@@ -4038,28 +4108,28 @@ test_perf_armv8_vary_burst_size(void)
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = HASH_CIPHER,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 		},
 		{
 			.chain = CIPHER_HASH,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA256_HMAC
 		},
 		{
 			.chain = HASH_CIPHER,
 
 			.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-			.cipher_key_length = 16,
+			.key_length = 16,
 			.auth_algo = RTE_CRYPTO_AUTH_SHA256_HMAC
 		},
 	};
@@ -4094,48 +4164,26 @@ static struct rte_cryptodev_sym_session *
 test_perf_create_session(uint8_t dev_id, struct perf_test_params *pparams)
 {
 	static struct rte_cryptodev_sym_session *sess;
-	struct rte_crypto_sym_xform cipher_xform = { 0 };
-	struct rte_crypto_sym_xform auth_xform = { 0 };
-
-	uint8_t cipher_key[pparams->session_attrs->key_cipher_len];
-	uint8_t auth_key[pparams->session_attrs->key_auth_len];
-
-	memcpy(cipher_key, pparams->session_attrs->key_cipher_data,
-		 pparams->session_attrs->key_cipher_len);
-	memcpy(auth_key, pparams->session_attrs->key_auth_data,
-		 pparams->session_attrs->key_auth_len);
-
-	cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
-	cipher_xform.next = NULL;
-
-	cipher_xform.cipher.algo = pparams->session_attrs->cipher_algorithm;
-	cipher_xform.cipher.op = pparams->session_attrs->cipher;
-	cipher_xform.cipher.key.data = cipher_key;
-	cipher_xform.cipher.key.length = pparams->session_attrs->key_cipher_len;
-	cipher_xform.cipher.iv.length = pparams->session_attrs->iv_len;
-	cipher_xform.cipher.iv.offset = IV_OFFSET;
+	struct rte_crypto_sym_xform aead_xform = { 0 };
 
-	auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
-	auth_xform.next = NULL;
+	uint8_t aead_key[pparams->session_attrs->key_aead_len];
 
-	auth_xform.auth.op = pparams->session_attrs->auth;
-	auth_xform.auth.algo = pparams->session_attrs->auth_algorithm;
+	memcpy(aead_key, pparams->session_attrs->key_aead_data,
+		 pparams->session_attrs->key_aead_len);
 
-	auth_xform.auth.add_auth_data_length = pparams->session_attrs->aad_len;
-	auth_xform.auth.digest_length = pparams->session_attrs->digest_len;
-	auth_xform.auth.key.length = pparams->session_attrs->key_auth_len;
+	aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
+	aead_xform.next = NULL;
 
+	aead_xform.aead.algo = pparams->session_attrs->aead_algorithm;
+	aead_xform.aead.op = pparams->session_attrs->aead;
+	aead_xform.aead.key.data = aead_key;
+	aead_xform.aead.key.length = pparams->session_attrs->key_aead_len;
+	aead_xform.aead.iv.length = pparams->session_attrs->iv_len;
+	aead_xform.aead.iv.offset = IV_OFFSET;
+	aead_xform.aead.add_auth_data_length = pparams->session_attrs->aad_len;
+	aead_xform.aead.digest_length = pparams->session_attrs->digest_len;
 
-	cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
-	if (cipher_xform.cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
-		cipher_xform.next = &auth_xform;
-		sess = rte_cryptodev_sym_session_create(dev_id,
-				&cipher_xform);
-	} else {
-		auth_xform.next = &cipher_xform;
-		sess = rte_cryptodev_sym_session_create(dev_id,
-				&auth_xform);
-	}
+	sess = rte_cryptodev_sym_session_create(dev_id,	&aead_xform);
 
 	return sess;
 }
@@ -4154,17 +4202,17 @@ perf_gcm_set_crypto_op(struct rte_crypto_op *op, struct rte_mbuf *m,
 		return NULL;
 	}
 
-	op->sym->auth.digest.data = m_hlp->digest;
-	op->sym->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(
+	op->sym->aead.digest.data = m_hlp->digest;
+	op->sym->aead.digest.phys_addr = rte_pktmbuf_mtophys_offset(
 					  m,
 					  params->session_attrs->aad_len +
 					  params->symmetric_op->p_len);
 
 
-	op->sym->auth.aad.data = m_hlp->aad;
-	op->sym->auth.aad.phys_addr = rte_pktmbuf_mtophys(m);
+	op->sym->aead.aad.data = m_hlp->aad;
+	op->sym->aead.aad.phys_addr = rte_pktmbuf_mtophys(m);
 
-	rte_memcpy(op->sym->auth.aad.data, params->symmetric_op->aad_data,
+	rte_memcpy(op->sym->aead.aad.data, params->symmetric_op->aad_data,
 		       params->session_attrs->aad_len);
 
 	rte_memcpy(iv_ptr, params->session_attrs->iv_data,
@@ -4172,13 +4220,9 @@ perf_gcm_set_crypto_op(struct rte_crypto_op *op, struct rte_mbuf *m,
 	if (params->session_attrs->iv_len == 12)
 		iv_ptr[15] = 1;
 
-	op->sym->auth.data.offset =
-			params->session_attrs->aad_len;
-	op->sym->auth.data.length = params->symmetric_op->p_len;
-
-	op->sym->cipher.data.offset =
+	op->sym->aead.data.offset =
 			params->session_attrs->aad_len;
-	op->sym->cipher.data.length = params->symmetric_op->p_len;
+	op->sym->aead.data.length = params->symmetric_op->p_len;
 
 	op->sym->m_src = m;
 
@@ -4392,20 +4436,14 @@ test_perf_AES_GCM(int continual_buf_len, int continual_size)
 
 		gcm_test = gcm_tests[i];
 
-		session_attrs[i].cipher =
-				RTE_CRYPTO_CIPHER_OP_ENCRYPT;
-		session_attrs[i].cipher_algorithm =
-				RTE_CRYPTO_CIPHER_AES_GCM;
-		session_attrs[i].key_cipher_data =
+		session_attrs[i].aead =
+				RTE_CRYPTO_AEAD_OP_ENCRYPT;
+		session_attrs[i].aead_algorithm =
+				RTE_CRYPTO_AEAD_AES_GCM;
+		session_attrs[i].key_aead_data =
 				gcm_test->key.data;
-		session_attrs[i].key_cipher_len =
+		session_attrs[i].key_aead_len =
 				gcm_test->key.len;
-		session_attrs[i].auth_algorithm =
-				RTE_CRYPTO_AUTH_AES_GCM;
-		session_attrs[i].auth =
-			RTE_CRYPTO_AUTH_OP_GENERATE;
-		session_attrs[i].key_auth_data = NULL;
-		session_attrs[i].key_auth_len = 0;
 		session_attrs[i].aad_len = gcm_test->aad.len;
 		session_attrs[i].digest_len =
 				gcm_test->auth_tag.len;
@@ -4420,7 +4458,7 @@ test_perf_AES_GCM(int continual_buf_len, int continual_size)
 		ops_set[i].t_data = gcm_test->auth_tags[i].data;
 		ops_set[i].t_len = gcm_test->auth_tags[i].len;
 
-		params_set[i].chain = CIPHER_HASH;
+		params_set[i].chain = AEAD;
 		params_set[i].session_attrs = &session_attrs[i];
 		params_set[i].symmetric_op = &ops_set[i];
 		if (continual_buf_len)
@@ -4522,7 +4560,7 @@ test_perf_continual_performance_test(void)
 		.chain = CIPHER_HASH,
 
 		.cipher_algo  = RTE_CRYPTO_CIPHER_AES_CBC,
-		.cipher_key_length = 16,
+		.key_length = 16,
 		.auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC
 	};
 
@@ -4532,7 +4570,7 @@ test_perf_continual_performance_test(void)
 			chain_mode_name(params_set.chain),
 			rte_crypto_cipher_algorithm_strings[params_set.cipher_algo],
 			rte_crypto_auth_algorithm_strings[params_set.auth_algo],
-			params_set.cipher_key_length,
+			params_set.key_length,
 			burst_size);
 		printf("\nBuffer Size(B)\tOPS(M)\tThroughput(Gbps)\t"
 				"Retries\tEmptyPolls\n");
-- 
2.9.4



More information about the dev mailing list