@@ -318,7 +318,15 @@ cperf_set_ops_aead(struct rte_crypto_op **ops,
/* AEAD parameters */
sym_op->aead.data.length = options->test_buffer_size;
- sym_op->aead.data.offset =
+ /*
+ * If doing AES-CCM, first 18 bytes has to be reserved,
+ * and actual AAD should start from byte 18
+ */
+ if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM)
+ sym_op->aead.data.offset =
+ RTE_ALIGN_CEIL(options->aead_aad_sz + 18, 16);
+ else
+ sym_op->aead.data.offset =
RTE_ALIGN_CEIL(options->aead_aad_sz, 16);
sym_op->aead.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
@@ -358,8 +366,17 @@ cperf_set_ops_aead(struct rte_crypto_op **ops,
uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
uint8_t *, iv_offset);
- memcpy(iv_ptr, test_vector->aead_iv.data,
+ /*
+ * If doing AES-CCM, nonce is copied one byte
+ * after the start of IV field
+ */
+ if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM)
+ memcpy(iv_ptr + 1, test_vector->aead_iv.data,
test_vector->aead_iv.length);
+ else
+ memcpy(iv_ptr, test_vector->aead_iv.data,
+ test_vector->aead_iv.length);
+
}
}
@@ -175,13 +175,29 @@ cperf_mbuf_create(struct rte_mempool *mempool,
}
if (options->op_type == CPERF_AEAD) {
- uint8_t *aead = (uint8_t *)rte_pktmbuf_prepend(mbuf,
- RTE_ALIGN_CEIL(options->aead_aad_sz, 16));
+ /*
+ * If doing AES-CCM, first 18 bytes has to be reserved,
+ * and actual AAD should start from byte 18
+ */
+ if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
+ uint8_t *aad = (uint8_t *)rte_pktmbuf_prepend(mbuf,
+ RTE_ALIGN_CEIL(options->aead_aad_sz + 18, 16));
+
+ if (aad == NULL)
+ goto error;
+
+ memcpy(aad + 18, test_vector->aad.data,
+ test_vector->aad.length);
+ } else {
+ uint8_t *aad = (uint8_t *)rte_pktmbuf_prepend(mbuf,
+ RTE_ALIGN_CEIL(options->aead_aad_sz, 16));
- if (aead == NULL)
- goto error;
+ if (aad == NULL)
+ goto error;
- memcpy(aead, test_vector->aad.data, test_vector->aad.length);
+ memcpy(aad, test_vector->aad.data,
+ test_vector->aad.length);
+ }
}
return mbuf;
@@ -293,6 +309,14 @@ cperf_latency_test_constructor(struct rte_mempool *sess_mp,
test_vector->cipher_iv.length +
test_vector->auth_iv.length +
test_vector->aead_iv.length;
+ /*
+ * If doing AES-CCM, 16 bytes need to be reserved,
+ * regardless the IV length
+ */
+ if (options->op_type == CPERF_AEAD &&
+ options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM)
+ priv_size = 16;
+
ctx->crypto_op_pool = rte_crypto_op_pool_create(pool_name,
RTE_CRYPTO_OP_TYPE_SYMMETRIC, options->pool_sz,
512, priv_size, rte_socket_id());
@@ -159,13 +159,29 @@ cperf_mbuf_create(struct rte_mempool *mempool,
}
if (options->op_type == CPERF_AEAD) {
- uint8_t *aead = (uint8_t *)rte_pktmbuf_prepend(mbuf,
- RTE_ALIGN_CEIL(options->aead_aad_sz, 16));
+ /*
+ * If doing AES-CCM, first 18 bytes has to be reserved,
+ * and actual AAD should start from byte 18
+ */
+ if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
+ uint8_t *aad = (uint8_t *)rte_pktmbuf_prepend(mbuf,
+ RTE_ALIGN_CEIL(options->aead_aad_sz + 18, 16));
+
+ if (aad == NULL)
+ goto error;
+
+ memcpy(aad + 18, test_vector->aad.data,
+ test_vector->aad.length);
+ } else {
+ uint8_t *aad = (uint8_t *)rte_pktmbuf_prepend(mbuf,
+ RTE_ALIGN_CEIL(options->aead_aad_sz, 16));
- if (aead == NULL)
- goto error;
+ if (aad == NULL)
+ goto error;
- memcpy(aead, test_vector->aad.data, test_vector->aad.length);
+ memcpy(aad, test_vector->aad.data,
+ test_vector->aad.length);
+ }
}
return mbuf;
@@ -273,6 +289,14 @@ cperf_throughput_test_constructor(struct rte_mempool *sess_mp,
uint16_t priv_size = test_vector->cipher_iv.length +
test_vector->auth_iv.length + test_vector->aead_iv.length;
+ /*
+ * If doing AES-CCM, 16 bytes need to be reserved,
+ * regardless the IV length
+ */
+ if (options->op_type == CPERF_AEAD &&
+ options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM)
+ priv_size = 16;
+
ctx->crypto_op_pool = rte_crypto_op_pool_create(pool_name,
RTE_CRYPTO_OP_TYPE_SYMMETRIC, options->pool_sz,
512, priv_size, rte_socket_id());
@@ -163,13 +163,29 @@ cperf_mbuf_create(struct rte_mempool *mempool,
}
if (options->op_type == CPERF_AEAD) {
- uint8_t *aead = (uint8_t *)rte_pktmbuf_prepend(mbuf,
- RTE_ALIGN_CEIL(options->aead_aad_sz, 16));
+ /*
+ * If doing AES-CCM, first 18 bytes has to be reserved,
+ * and actual AAD should start from byte 18
+ */
+ if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
+ uint8_t *aad = (uint8_t *)rte_pktmbuf_prepend(mbuf,
+ RTE_ALIGN_CEIL(options->aead_aad_sz + 18, 16));
- if (aead == NULL)
- goto error;
+ if (aad == NULL)
+ goto error;
+
+ memcpy(aad + 18, test_vector->aad.data,
+ test_vector->aad.length);
+ } else {
+ uint8_t *aad = (uint8_t *)rte_pktmbuf_prepend(mbuf,
+ RTE_ALIGN_CEIL(options->aead_aad_sz, 16));
- memcpy(aead, test_vector->aad.data, test_vector->aad.length);
+ if (aad == NULL)
+ goto error;
+
+ memcpy(aad, test_vector->aad.data,
+ test_vector->aad.length);
+ }
}
return mbuf;
@@ -276,6 +292,15 @@ cperf_verify_test_constructor(struct rte_mempool *sess_mp,
uint16_t priv_size = test_vector->cipher_iv.length +
test_vector->auth_iv.length + test_vector->aead_iv.length;
+
+ /*
+ * If doing AES-CCM, 16 bytes need to be reserved,
+ * regardless the IV length
+ */
+ if (options->op_type == CPERF_AEAD &&
+ options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM)
+ priv_size = 16;
+
ctx->crypto_op_pool = rte_crypto_op_pool_create(pool_name,
RTE_CRYPTO_OP_TYPE_SYMMETRIC, options->pool_sz,
512, priv_size, rte_socket_id());