mirror of
https://git.proxmox.com/git/mirror_zfs.git
synced 2025-05-03 15:32:43 +03:00
ICP: AES-GCM: Unify gcm_init_ctx() and gmac_init_ctx()
gmac_init_ctx() duplicates most of the code in gcm_int_ctx() while it just needs to set its own IV length and AAD tag length. Introduce gcm_init_ctx_impl() which handles the GCM and GMAC differences while reusing the duplicated code. While here, fix a flaw where the AVX implementation would accept a context using a byte swapped key schedule which it could not handle. Also constify the IV and AAD pointers passed to gcm_init{,_avx}(). Reviewed-by: Brian Behlendorf <behlendorf1@llnl.gov> Reviewed-by: Rob Norris <robn@despairlabs.com> Signed-off-by: Attila Fülöp <attila@fueloep.org> Closes #14529
This commit is contained in:
parent
7d638df09b
commit
8d9752569b
@ -23,6 +23,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
#include <sys/zfs_context.h>
|
#include <sys/zfs_context.h>
|
||||||
|
#include <sys/cmn_err.h>
|
||||||
#include <modes/modes.h>
|
#include <modes/modes.h>
|
||||||
#include <sys/crypto/common.h>
|
#include <sys/crypto/common.h>
|
||||||
#include <sys/crypto/icp.h>
|
#include <sys/crypto/icp.h>
|
||||||
@ -49,6 +50,11 @@
|
|||||||
static uint32_t icp_gcm_impl = IMPL_FASTEST;
|
static uint32_t icp_gcm_impl = IMPL_FASTEST;
|
||||||
static uint32_t user_sel_impl = IMPL_FASTEST;
|
static uint32_t user_sel_impl = IMPL_FASTEST;
|
||||||
|
|
||||||
|
static inline int gcm_init_ctx_impl(boolean_t, gcm_ctx_t *, char *, size_t,
|
||||||
|
int (*)(const void *, const uint8_t *, uint8_t *),
|
||||||
|
void (*)(uint8_t *, uint8_t *),
|
||||||
|
void (*)(uint8_t *, uint8_t *));
|
||||||
|
|
||||||
#ifdef CAN_USE_GCM_ASM
|
#ifdef CAN_USE_GCM_ASM
|
||||||
/* Does the architecture we run on support the MOVBE instruction? */
|
/* Does the architecture we run on support the MOVBE instruction? */
|
||||||
boolean_t gcm_avx_can_use_movbe = B_FALSE;
|
boolean_t gcm_avx_can_use_movbe = B_FALSE;
|
||||||
@ -71,7 +77,7 @@ static int gcm_mode_encrypt_contiguous_blocks_avx(gcm_ctx_t *, char *, size_t,
|
|||||||
|
|
||||||
static int gcm_encrypt_final_avx(gcm_ctx_t *, crypto_data_t *, size_t);
|
static int gcm_encrypt_final_avx(gcm_ctx_t *, crypto_data_t *, size_t);
|
||||||
static int gcm_decrypt_final_avx(gcm_ctx_t *, crypto_data_t *, size_t);
|
static int gcm_decrypt_final_avx(gcm_ctx_t *, crypto_data_t *, size_t);
|
||||||
static int gcm_init_avx(gcm_ctx_t *, unsigned char *, size_t, unsigned char *,
|
static int gcm_init_avx(gcm_ctx_t *, const uint8_t *, size_t, const uint8_t *,
|
||||||
size_t, size_t);
|
size_t, size_t);
|
||||||
#endif /* ifdef CAN_USE_GCM_ASM */
|
#endif /* ifdef CAN_USE_GCM_ASM */
|
||||||
|
|
||||||
@ -478,7 +484,7 @@ gcm_validate_args(CK_AES_GCM_PARAMS *gcm_param)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
gcm_format_initial_blocks(uchar_t *iv, ulong_t iv_len,
|
gcm_format_initial_blocks(const uint8_t *iv, ulong_t iv_len,
|
||||||
gcm_ctx_t *ctx, size_t block_size,
|
gcm_ctx_t *ctx, size_t block_size,
|
||||||
void (*copy_block)(uint8_t *, uint8_t *),
|
void (*copy_block)(uint8_t *, uint8_t *),
|
||||||
void (*xor_block)(uint8_t *, uint8_t *))
|
void (*xor_block)(uint8_t *, uint8_t *))
|
||||||
@ -527,8 +533,8 @@ gcm_format_initial_blocks(uchar_t *iv, ulong_t iv_len,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static int
|
static int
|
||||||
gcm_init(gcm_ctx_t *ctx, unsigned char *iv, size_t iv_len,
|
gcm_init(gcm_ctx_t *ctx, const uint8_t *iv, size_t iv_len,
|
||||||
unsigned char *auth_data, size_t auth_data_len, size_t block_size,
|
const uint8_t *auth_data, size_t auth_data_len, size_t block_size,
|
||||||
int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
|
int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
|
||||||
void (*copy_block)(uint8_t *, uint8_t *),
|
void (*copy_block)(uint8_t *, uint8_t *),
|
||||||
void (*xor_block)(uint8_t *, uint8_t *))
|
void (*xor_block)(uint8_t *, uint8_t *))
|
||||||
@ -587,8 +593,6 @@ gcm_init(gcm_ctx_t *ctx, unsigned char *iv, size_t iv_len,
|
|||||||
/*
|
/*
|
||||||
* The following function is called at encrypt or decrypt init time
|
* The following function is called at encrypt or decrypt init time
|
||||||
* for AES GCM mode.
|
* for AES GCM mode.
|
||||||
*
|
|
||||||
* Init the GCM context struct. Handle the cycle and avx implementations here.
|
|
||||||
*/
|
*/
|
||||||
int
|
int
|
||||||
gcm_init_ctx(gcm_ctx_t *gcm_ctx, char *param, size_t block_size,
|
gcm_init_ctx(gcm_ctx_t *gcm_ctx, char *param, size_t block_size,
|
||||||
@ -596,31 +600,75 @@ gcm_init_ctx(gcm_ctx_t *gcm_ctx, char *param, size_t block_size,
|
|||||||
void (*copy_block)(uint8_t *, uint8_t *),
|
void (*copy_block)(uint8_t *, uint8_t *),
|
||||||
void (*xor_block)(uint8_t *, uint8_t *))
|
void (*xor_block)(uint8_t *, uint8_t *))
|
||||||
{
|
{
|
||||||
int rv;
|
return (gcm_init_ctx_impl(B_FALSE, gcm_ctx, param, block_size,
|
||||||
|
encrypt_block, copy_block, xor_block));
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The following function is called at encrypt or decrypt init time
|
||||||
|
* for AES GMAC mode.
|
||||||
|
*/
|
||||||
|
int
|
||||||
|
gmac_init_ctx(gcm_ctx_t *gcm_ctx, char *param, size_t block_size,
|
||||||
|
int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
|
||||||
|
void (*copy_block)(uint8_t *, uint8_t *),
|
||||||
|
void (*xor_block)(uint8_t *, uint8_t *))
|
||||||
|
{
|
||||||
|
return (gcm_init_ctx_impl(B_TRUE, gcm_ctx, param, block_size,
|
||||||
|
encrypt_block, copy_block, xor_block));
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Init the GCM context struct. Handle the cycle and avx implementations here.
|
||||||
|
* Initialization of a GMAC context differs slightly from a GCM context.
|
||||||
|
*/
|
||||||
|
static inline int
|
||||||
|
gcm_init_ctx_impl(boolean_t gmac_mode, gcm_ctx_t *gcm_ctx, char *param,
|
||||||
|
size_t block_size, int (*encrypt_block)(const void *, const uint8_t *,
|
||||||
|
uint8_t *), void (*copy_block)(uint8_t *, uint8_t *),
|
||||||
|
void (*xor_block)(uint8_t *, uint8_t *))
|
||||||
|
{
|
||||||
CK_AES_GCM_PARAMS *gcm_param;
|
CK_AES_GCM_PARAMS *gcm_param;
|
||||||
|
int rv = CRYPTO_SUCCESS;
|
||||||
|
size_t tag_len, iv_len;
|
||||||
|
|
||||||
if (param != NULL) {
|
if (param != NULL) {
|
||||||
gcm_param = (CK_AES_GCM_PARAMS *)(void *)param;
|
gcm_param = (CK_AES_GCM_PARAMS *)(void *)param;
|
||||||
|
|
||||||
if ((rv = gcm_validate_args(gcm_param)) != 0) {
|
if (gmac_mode == B_FALSE) {
|
||||||
return (rv);
|
/* GCM mode. */
|
||||||
}
|
if ((rv = gcm_validate_args(gcm_param)) != 0) {
|
||||||
|
return (rv);
|
||||||
|
}
|
||||||
|
gcm_ctx->gcm_flags |= GCM_MODE;
|
||||||
|
|
||||||
gcm_ctx->gcm_tag_len = gcm_param->ulTagBits;
|
size_t tbits = gcm_param->ulTagBits;
|
||||||
gcm_ctx->gcm_tag_len >>= 3;
|
tag_len = CRYPTO_BITS2BYTES(tbits);
|
||||||
|
iv_len = gcm_param->ulIvLen;
|
||||||
|
} else {
|
||||||
|
/* GMAC mode. */
|
||||||
|
gcm_ctx->gcm_flags |= GMAC_MODE;
|
||||||
|
tag_len = CRYPTO_BITS2BYTES(AES_GMAC_TAG_BITS);
|
||||||
|
iv_len = AES_GMAC_IV_LEN;
|
||||||
|
}
|
||||||
|
gcm_ctx->gcm_tag_len = tag_len;
|
||||||
gcm_ctx->gcm_processed_data_len = 0;
|
gcm_ctx->gcm_processed_data_len = 0;
|
||||||
|
|
||||||
/* these values are in bits */
|
/* these values are in bits */
|
||||||
gcm_ctx->gcm_len_a_len_c[0]
|
gcm_ctx->gcm_len_a_len_c[0]
|
||||||
= htonll(CRYPTO_BYTES2BITS(gcm_param->ulAADLen));
|
= htonll(CRYPTO_BYTES2BITS(gcm_param->ulAADLen));
|
||||||
|
|
||||||
rv = CRYPTO_SUCCESS;
|
|
||||||
gcm_ctx->gcm_flags |= GCM_MODE;
|
|
||||||
} else {
|
} else {
|
||||||
return (CRYPTO_MECHANISM_PARAM_INVALID);
|
return (CRYPTO_MECHANISM_PARAM_INVALID);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const uint8_t *iv = (const uint8_t *)gcm_param->pIv;
|
||||||
|
const uint8_t *aad = (const uint8_t *)gcm_param->pAAD;
|
||||||
|
size_t aad_len = gcm_param->ulAADLen;
|
||||||
|
|
||||||
#ifdef CAN_USE_GCM_ASM
|
#ifdef CAN_USE_GCM_ASM
|
||||||
|
boolean_t needs_bswap =
|
||||||
|
((aes_key_t *)gcm_ctx->gcm_keysched)->ops->needs_byteswap;
|
||||||
|
|
||||||
if (GCM_IMPL_READ(icp_gcm_impl) != IMPL_CYCLE) {
|
if (GCM_IMPL_READ(icp_gcm_impl) != IMPL_CYCLE) {
|
||||||
gcm_ctx->gcm_use_avx = GCM_IMPL_USE_AVX;
|
gcm_ctx->gcm_use_avx = GCM_IMPL_USE_AVX;
|
||||||
} else {
|
} else {
|
||||||
@ -629,96 +677,41 @@ gcm_init_ctx(gcm_ctx_t *gcm_ctx, char *param, size_t block_size,
|
|||||||
* non-avx contexts alternately.
|
* non-avx contexts alternately.
|
||||||
*/
|
*/
|
||||||
gcm_ctx->gcm_use_avx = gcm_toggle_avx();
|
gcm_ctx->gcm_use_avx = gcm_toggle_avx();
|
||||||
/*
|
|
||||||
* We don't handle byte swapped key schedules in the avx
|
/* The avx impl. doesn't handle byte swapped key schedules. */
|
||||||
* code path.
|
if (gcm_ctx->gcm_use_avx == B_TRUE && needs_bswap == B_TRUE) {
|
||||||
*/
|
|
||||||
aes_key_t *ks = (aes_key_t *)gcm_ctx->gcm_keysched;
|
|
||||||
if (ks->ops->needs_byteswap == B_TRUE) {
|
|
||||||
gcm_ctx->gcm_use_avx = B_FALSE;
|
gcm_ctx->gcm_use_avx = B_FALSE;
|
||||||
}
|
}
|
||||||
/* Use the MOVBE and the BSWAP variants alternately. */
|
/*
|
||||||
if (gcm_ctx->gcm_use_avx == B_TRUE &&
|
* If this is a GCM context, use the MOVBE and the BSWAP
|
||||||
|
* variants alternately. GMAC contexts code paths do not
|
||||||
|
* use the MOVBE instruction.
|
||||||
|
*/
|
||||||
|
if (gcm_ctx->gcm_use_avx == B_TRUE && gmac_mode == B_FALSE &&
|
||||||
zfs_movbe_available() == B_TRUE) {
|
zfs_movbe_available() == B_TRUE) {
|
||||||
(void) atomic_toggle_boolean_nv(
|
(void) atomic_toggle_boolean_nv(
|
||||||
(volatile boolean_t *)&gcm_avx_can_use_movbe);
|
(volatile boolean_t *)&gcm_avx_can_use_movbe);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/* Allocate Htab memory as needed. */
|
|
||||||
if (gcm_ctx->gcm_use_avx == B_TRUE) {
|
|
||||||
size_t htab_len = gcm_simd_get_htab_size(gcm_ctx->gcm_use_avx);
|
|
||||||
|
|
||||||
if (htab_len == 0) {
|
|
||||||
return (CRYPTO_MECHANISM_PARAM_INVALID);
|
|
||||||
}
|
|
||||||
gcm_ctx->gcm_htab_len = htab_len;
|
|
||||||
gcm_ctx->gcm_Htable =
|
|
||||||
kmem_alloc(htab_len, KM_SLEEP);
|
|
||||||
|
|
||||||
if (gcm_ctx->gcm_Htable == NULL) {
|
|
||||||
return (CRYPTO_HOST_MEMORY);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/* Avx and non avx context initialization differs from here on. */
|
|
||||||
if (gcm_ctx->gcm_use_avx == B_FALSE) {
|
|
||||||
#endif /* ifdef CAN_USE_GCM_ASM */
|
|
||||||
if (gcm_init(gcm_ctx, gcm_param->pIv, gcm_param->ulIvLen,
|
|
||||||
gcm_param->pAAD, gcm_param->ulAADLen, block_size,
|
|
||||||
encrypt_block, copy_block, xor_block) != 0) {
|
|
||||||
rv = CRYPTO_MECHANISM_PARAM_INVALID;
|
|
||||||
}
|
|
||||||
#ifdef CAN_USE_GCM_ASM
|
|
||||||
} else {
|
|
||||||
if (gcm_init_avx(gcm_ctx, gcm_param->pIv, gcm_param->ulIvLen,
|
|
||||||
gcm_param->pAAD, gcm_param->ulAADLen, block_size) != 0) {
|
|
||||||
rv = CRYPTO_MECHANISM_PARAM_INVALID;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#endif /* ifdef CAN_USE_GCM_ASM */
|
|
||||||
|
|
||||||
return (rv);
|
|
||||||
}
|
|
||||||
|
|
||||||
int
|
|
||||||
gmac_init_ctx(gcm_ctx_t *gcm_ctx, char *param, size_t block_size,
|
|
||||||
int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
|
|
||||||
void (*copy_block)(uint8_t *, uint8_t *),
|
|
||||||
void (*xor_block)(uint8_t *, uint8_t *))
|
|
||||||
{
|
|
||||||
int rv;
|
|
||||||
CK_AES_GMAC_PARAMS *gmac_param;
|
|
||||||
|
|
||||||
if (param != NULL) {
|
|
||||||
gmac_param = (CK_AES_GMAC_PARAMS *)(void *)param;
|
|
||||||
|
|
||||||
gcm_ctx->gcm_tag_len = CRYPTO_BITS2BYTES(AES_GMAC_TAG_BITS);
|
|
||||||
gcm_ctx->gcm_processed_data_len = 0;
|
|
||||||
|
|
||||||
/* these values are in bits */
|
|
||||||
gcm_ctx->gcm_len_a_len_c[0]
|
|
||||||
= htonll(CRYPTO_BYTES2BITS(gmac_param->ulAADLen));
|
|
||||||
|
|
||||||
rv = CRYPTO_SUCCESS;
|
|
||||||
gcm_ctx->gcm_flags |= GMAC_MODE;
|
|
||||||
} else {
|
|
||||||
return (CRYPTO_MECHANISM_PARAM_INVALID);
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifdef CAN_USE_GCM_ASM
|
|
||||||
/*
|
/*
|
||||||
* Handle the "cycle" implementation by creating avx and non avx
|
* We don't handle byte swapped key schedules in the avx code path,
|
||||||
* contexts alternately.
|
* still they could be created by the aes generic implementation.
|
||||||
|
* Make sure not to use them since we'll corrupt data if we do.
|
||||||
*/
|
*/
|
||||||
if (GCM_IMPL_READ(icp_gcm_impl) != IMPL_CYCLE) {
|
if (gcm_ctx->gcm_use_avx == B_TRUE && needs_bswap == B_TRUE) {
|
||||||
gcm_ctx->gcm_use_avx = GCM_IMPL_USE_AVX;
|
|
||||||
} else {
|
|
||||||
gcm_ctx->gcm_use_avx = gcm_toggle_avx();
|
|
||||||
}
|
|
||||||
/* We don't handle byte swapped key schedules in the avx code path. */
|
|
||||||
aes_key_t *ks = (aes_key_t *)gcm_ctx->gcm_keysched;
|
|
||||||
if (ks->ops->needs_byteswap == B_TRUE) {
|
|
||||||
gcm_ctx->gcm_use_avx = B_FALSE;
|
gcm_ctx->gcm_use_avx = B_FALSE;
|
||||||
|
|
||||||
|
cmn_err_once(CE_WARN,
|
||||||
|
"ICP: Can't use the aes generic or cycle implementations "
|
||||||
|
"in combination with the gcm avx implementation!");
|
||||||
|
cmn_err_once(CE_WARN,
|
||||||
|
"ICP: Falling back to a compatible implementation, "
|
||||||
|
"aes-gcm performance will likely be degraded.");
|
||||||
|
cmn_err_once(CE_WARN,
|
||||||
|
"ICP: Choose at least the x86_64 aes implementation to "
|
||||||
|
"restore performance.");
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Allocate Htab memory as needed. */
|
/* Allocate Htab memory as needed. */
|
||||||
if (gcm_ctx->gcm_use_avx == B_TRUE) {
|
if (gcm_ctx->gcm_use_avx == B_TRUE) {
|
||||||
size_t htab_len = gcm_simd_get_htab_size(gcm_ctx->gcm_use_avx);
|
size_t htab_len = gcm_simd_get_htab_size(gcm_ctx->gcm_use_avx);
|
||||||
@ -734,19 +727,17 @@ gmac_init_ctx(gcm_ctx_t *gcm_ctx, char *param, size_t block_size,
|
|||||||
return (CRYPTO_HOST_MEMORY);
|
return (CRYPTO_HOST_MEMORY);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Avx and non avx context initialization differs from here on. */
|
/* Avx and non avx context initialization differs from here on. */
|
||||||
if (gcm_ctx->gcm_use_avx == B_FALSE) {
|
if (gcm_ctx->gcm_use_avx == B_FALSE) {
|
||||||
#endif /* ifdef CAN_USE_GCM_ASM */
|
#endif /* ifdef CAN_USE_GCM_ASM */
|
||||||
if (gcm_init(gcm_ctx, gmac_param->pIv, AES_GMAC_IV_LEN,
|
if (gcm_init(gcm_ctx, iv, iv_len, aad, aad_len, block_size,
|
||||||
gmac_param->pAAD, gmac_param->ulAADLen, block_size,
|
encrypt_block, copy_block, xor_block) != CRYPTO_SUCCESS) {
|
||||||
encrypt_block, copy_block, xor_block) != 0) {
|
|
||||||
rv = CRYPTO_MECHANISM_PARAM_INVALID;
|
rv = CRYPTO_MECHANISM_PARAM_INVALID;
|
||||||
}
|
}
|
||||||
#ifdef CAN_USE_GCM_ASM
|
#ifdef CAN_USE_GCM_ASM
|
||||||
} else {
|
} else {
|
||||||
if (gcm_init_avx(gcm_ctx, gmac_param->pIv, AES_GMAC_IV_LEN,
|
if (gcm_init_avx(gcm_ctx, iv, iv_len, aad, aad_len,
|
||||||
gmac_param->pAAD, gmac_param->ulAADLen, block_size) != 0) {
|
block_size) != CRYPTO_SUCCESS) {
|
||||||
rv = CRYPTO_MECHANISM_PARAM_INVALID;
|
rv = CRYPTO_MECHANISM_PARAM_INVALID;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1162,6 +1153,8 @@ gcm_mode_encrypt_contiguous_blocks_avx(gcm_ctx_t *ctx, char *data,
|
|||||||
int rv = CRYPTO_SUCCESS;
|
int rv = CRYPTO_SUCCESS;
|
||||||
|
|
||||||
ASSERT(block_size == GCM_BLOCK_LEN);
|
ASSERT(block_size == GCM_BLOCK_LEN);
|
||||||
|
ASSERT3S(((aes_key_t *)ctx->gcm_keysched)->ops->needs_byteswap, ==,
|
||||||
|
B_FALSE);
|
||||||
/*
|
/*
|
||||||
* If the last call left an incomplete block, try to fill
|
* If the last call left an incomplete block, try to fill
|
||||||
* it first.
|
* it first.
|
||||||
@ -1306,6 +1299,8 @@ gcm_encrypt_final_avx(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size)
|
|||||||
int rv;
|
int rv;
|
||||||
|
|
||||||
ASSERT(block_size == GCM_BLOCK_LEN);
|
ASSERT(block_size == GCM_BLOCK_LEN);
|
||||||
|
ASSERT3S(((aes_key_t *)ctx->gcm_keysched)->ops->needs_byteswap, ==,
|
||||||
|
B_FALSE);
|
||||||
|
|
||||||
if (out->cd_length < (rem_len + ctx->gcm_tag_len)) {
|
if (out->cd_length < (rem_len + ctx->gcm_tag_len)) {
|
||||||
return (CRYPTO_DATA_LEN_RANGE);
|
return (CRYPTO_DATA_LEN_RANGE);
|
||||||
@ -1361,6 +1356,8 @@ gcm_decrypt_final_avx(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size)
|
|||||||
{
|
{
|
||||||
ASSERT3U(ctx->gcm_processed_data_len, ==, ctx->gcm_pt_buf_len);
|
ASSERT3U(ctx->gcm_processed_data_len, ==, ctx->gcm_pt_buf_len);
|
||||||
ASSERT3U(block_size, ==, 16);
|
ASSERT3U(block_size, ==, 16);
|
||||||
|
ASSERT3S(((aes_key_t *)ctx->gcm_keysched)->ops->needs_byteswap, ==,
|
||||||
|
B_FALSE);
|
||||||
|
|
||||||
size_t chunk_size = (size_t)GCM_CHUNK_SIZE_READ;
|
size_t chunk_size = (size_t)GCM_CHUNK_SIZE_READ;
|
||||||
size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
|
size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
|
||||||
@ -1466,18 +1463,20 @@ gcm_decrypt_final_avx(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size)
|
|||||||
* initial counter block.
|
* initial counter block.
|
||||||
*/
|
*/
|
||||||
static int
|
static int
|
||||||
gcm_init_avx(gcm_ctx_t *ctx, unsigned char *iv, size_t iv_len,
|
gcm_init_avx(gcm_ctx_t *ctx, const uint8_t *iv, size_t iv_len,
|
||||||
unsigned char *auth_data, size_t auth_data_len, size_t block_size)
|
const uint8_t *auth_data, size_t auth_data_len, size_t block_size)
|
||||||
{
|
{
|
||||||
uint8_t *cb = (uint8_t *)ctx->gcm_cb;
|
uint8_t *cb = (uint8_t *)ctx->gcm_cb;
|
||||||
uint64_t *H = ctx->gcm_H;
|
uint64_t *H = ctx->gcm_H;
|
||||||
const void *keysched = ((aes_key_t *)ctx->gcm_keysched)->encr_ks.ks32;
|
const void *keysched = ((aes_key_t *)ctx->gcm_keysched)->encr_ks.ks32;
|
||||||
int aes_rounds = ((aes_key_t *)ctx->gcm_keysched)->nr;
|
int aes_rounds = ((aes_key_t *)ctx->gcm_keysched)->nr;
|
||||||
uint8_t *datap = auth_data;
|
const uint8_t *datap = auth_data;
|
||||||
size_t chunk_size = (size_t)GCM_CHUNK_SIZE_READ;
|
size_t chunk_size = (size_t)GCM_CHUNK_SIZE_READ;
|
||||||
size_t bleft;
|
size_t bleft;
|
||||||
|
|
||||||
ASSERT(block_size == GCM_BLOCK_LEN);
|
ASSERT(block_size == GCM_BLOCK_LEN);
|
||||||
|
ASSERT3S(((aes_key_t *)ctx->gcm_keysched)->ops->needs_byteswap, ==,
|
||||||
|
B_FALSE);
|
||||||
|
|
||||||
/* Init H (encrypt zero block) and create the initial counter block. */
|
/* Init H (encrypt zero block) and create the initial counter block. */
|
||||||
memset(ctx->gcm_ghash, 0, sizeof (ctx->gcm_ghash));
|
memset(ctx->gcm_ghash, 0, sizeof (ctx->gcm_ghash));
|
||||||
|
Loading…
Reference in New Issue
Block a user