Remove checks for null out value in encryption paths

These paths are never exercised, as the parameters given are always
different cipher and plaintext `crypto_data_t` pointers.

Reviewed-by: Richard Laager <rlaager@wiktel.com>
Reviewed-by: Brian Behlendorf <behlendorf1@llnl.gov>
Reviewed-by: Attila Fueloep <attila@fueloep.org>
Signed-off-by: Dirkjan Bussink <d.bussink@gmail.com>
Closes #9661 
Closes #10015
This commit is contained in:
Dirkjan Bussink 2020-03-26 18:41:57 +01:00 committed by GitHub
parent 1d2ddb9bb9
commit 112c1bff94
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 104 additions and 217 deletions

View File

@ -60,8 +60,7 @@ cbc_encrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
} }
lastp = (uint8_t *)ctx->cbc_iv; lastp = (uint8_t *)ctx->cbc_iv;
if (out != NULL) crypto_init_ptrs(out, &iov_or_mp, &offset);
crypto_init_ptrs(out, &iov_or_mp, &offset);
do { do {
/* Unprocessed data from last call. */ /* Unprocessed data from last call. */
@ -79,47 +78,28 @@ cbc_encrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
blockp = datap; blockp = datap;
} }
if (out == NULL) { /*
/* * XOR the previous cipher block or IV with the
* XOR the previous cipher block or IV with the * current clear block.
* current clear block. */
*/ xor_block(blockp, lastp);
xor_block(lastp, blockp); encrypt(ctx->cbc_keysched, lastp, lastp);
encrypt(ctx->cbc_keysched, blockp, blockp); crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
&out_data_1_len, &out_data_2, block_size);
ctx->cbc_lastp = blockp; /* copy block to where it belongs */
lastp = blockp; if (out_data_1_len == block_size) {
copy_block(lastp, out_data_1);
if (ctx->cbc_remainder_len > 0) {
bcopy(blockp, ctx->cbc_copy_to,
ctx->cbc_remainder_len);
bcopy(blockp + ctx->cbc_remainder_len, datap,
need);
}
} else { } else {
/* bcopy(lastp, out_data_1, out_data_1_len);
* XOR the previous cipher block or IV with the if (out_data_2 != NULL) {
* current clear block. bcopy(lastp + out_data_1_len,
*/ out_data_2,
xor_block(blockp, lastp); block_size - out_data_1_len);
encrypt(ctx->cbc_keysched, lastp, lastp);
crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
&out_data_1_len, &out_data_2, block_size);
/* copy block to where it belongs */
if (out_data_1_len == block_size) {
copy_block(lastp, out_data_1);
} else {
bcopy(lastp, out_data_1, out_data_1_len);
if (out_data_2 != NULL) {
bcopy(lastp + out_data_1_len,
out_data_2,
block_size - out_data_1_len);
}
} }
/* update offset */
out->cd_offset += block_size;
} }
/* update offset */
out->cd_offset += block_size;
/* Update pointer to next block of data to be processed. */ /* Update pointer to next block of data to be processed. */
if (ctx->cbc_remainder_len != 0) { if (ctx->cbc_remainder_len != 0) {
@ -187,8 +167,7 @@ cbc_decrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
} }
lastp = ctx->cbc_lastp; lastp = ctx->cbc_lastp;
if (out != NULL) crypto_init_ptrs(out, &iov_or_mp, &offset);
crypto_init_ptrs(out, &iov_or_mp, &offset);
do { do {
/* Unprocessed data from last call. */ /* Unprocessed data from last call. */
@ -209,13 +188,9 @@ cbc_decrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
/* LINTED: pointer alignment */ /* LINTED: pointer alignment */
copy_block(blockp, (uint8_t *)OTHER((uint64_t *)lastp, ctx)); copy_block(blockp, (uint8_t *)OTHER((uint64_t *)lastp, ctx));
if (out != NULL) { decrypt(ctx->cbc_keysched, blockp,
decrypt(ctx->cbc_keysched, blockp, (uint8_t *)ctx->cbc_remainder);
(uint8_t *)ctx->cbc_remainder); blockp = (uint8_t *)ctx->cbc_remainder;
blockp = (uint8_t *)ctx->cbc_remainder;
} else {
decrypt(ctx->cbc_keysched, blockp, blockp);
}
/* /*
* XOR the previous cipher block or IV with the * XOR the previous cipher block or IV with the
@ -226,25 +201,18 @@ cbc_decrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
/* LINTED: pointer alignment */ /* LINTED: pointer alignment */
lastp = (uint8_t *)OTHER((uint64_t *)lastp, ctx); lastp = (uint8_t *)OTHER((uint64_t *)lastp, ctx);
if (out != NULL) { crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, &out_data_1_len, &out_data_2, block_size);
&out_data_1_len, &out_data_2, block_size);
bcopy(blockp, out_data_1, out_data_1_len); bcopy(blockp, out_data_1, out_data_1_len);
if (out_data_2 != NULL) { if (out_data_2 != NULL) {
bcopy(blockp + out_data_1_len, out_data_2, bcopy(blockp + out_data_1_len, out_data_2,
block_size - out_data_1_len); block_size - out_data_1_len);
}
/* update offset */
out->cd_offset += block_size;
} else if (ctx->cbc_remainder_len > 0) {
/* copy temporary block to where it belongs */
bcopy(blockp, ctx->cbc_copy_to, ctx->cbc_remainder_len);
bcopy(blockp + ctx->cbc_remainder_len, datap, need);
} }
/* update offset */
out->cd_offset += block_size;
/* Update pointer to next block of data to be processed. */ /* Update pointer to next block of data to be processed. */
if (ctx->cbc_remainder_len != 0) { if (ctx->cbc_remainder_len != 0) {
datap += need; datap += need;

View File

@ -68,8 +68,7 @@ ccm_mode_encrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
} }
lastp = (uint8_t *)ctx->ccm_cb; lastp = (uint8_t *)ctx->ccm_cb;
if (out != NULL) crypto_init_ptrs(out, &iov_or_mp, &offset);
crypto_init_ptrs(out, &iov_or_mp, &offset);
mac_buf = (uint8_t *)ctx->ccm_mac_buf; mac_buf = (uint8_t *)ctx->ccm_mac_buf;
@ -126,31 +125,22 @@ ccm_mode_encrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
ctx->ccm_processed_data_len += block_size; ctx->ccm_processed_data_len += block_size;
if (out == NULL) { crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
if (ctx->ccm_remainder_len > 0) { &out_data_1_len, &out_data_2, block_size);
bcopy(blockp, ctx->ccm_copy_to,
ctx->ccm_remainder_len);
bcopy(blockp + ctx->ccm_remainder_len, datap,
need);
}
} else {
crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
&out_data_1_len, &out_data_2, block_size);
/* copy block to where it belongs */ /* copy block to where it belongs */
if (out_data_1_len == block_size) { if (out_data_1_len == block_size) {
copy_block(lastp, out_data_1); copy_block(lastp, out_data_1);
} else { } else {
bcopy(lastp, out_data_1, out_data_1_len); bcopy(lastp, out_data_1, out_data_1_len);
if (out_data_2 != NULL) { if (out_data_2 != NULL) {
bcopy(lastp + out_data_1_len, bcopy(lastp + out_data_1_len,
out_data_2, out_data_2,
block_size - out_data_1_len); block_size - out_data_1_len);
}
} }
/* update offset */
out->cd_offset += block_size;
} }
/* update offset */
out->cd_offset += block_size;
/* Update pointer to next block of data to be processed. */ /* Update pointer to next block of data to be processed. */
if (ctx->ccm_remainder_len != 0) { if (ctx->ccm_remainder_len != 0) {

View File

@ -61,8 +61,7 @@ ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
} }
lastp = (uint8_t *)ctx->ctr_cb; lastp = (uint8_t *)ctx->ctr_cb;
if (out != NULL) crypto_init_ptrs(out, &iov_or_mp, &offset);
crypto_init_ptrs(out, &iov_or_mp, &offset);
do { do {
/* Unprocessed data from last call. */ /* Unprocessed data from last call. */
@ -111,26 +110,17 @@ ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
*/ */
xor_block(blockp, lastp); xor_block(blockp, lastp);
if (out == NULL) { crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
if (ctx->ctr_remainder_len > 0) { &out_data_1_len, &out_data_2, block_size);
bcopy(lastp, ctx->ctr_copy_to,
ctx->ctr_remainder_len);
bcopy(lastp + ctx->ctr_remainder_len, datap,
need);
}
} else {
crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
&out_data_1_len, &out_data_2, block_size);
/* copy block to where it belongs */ /* copy block to where it belongs */
bcopy(lastp, out_data_1, out_data_1_len); bcopy(lastp, out_data_1, out_data_1_len);
if (out_data_2 != NULL) { if (out_data_2 != NULL) {
bcopy(lastp + out_data_1_len, out_data_2, bcopy(lastp + out_data_1_len, out_data_2,
block_size - out_data_1_len); block_size - out_data_1_len);
}
/* update offset */
out->cd_offset += block_size;
} }
/* update offset */
out->cd_offset += block_size;
/* Update pointer to next block of data to be processed. */ /* Update pointer to next block of data to be processed. */
if (ctx->ctr_remainder_len != 0) { if (ctx->ctr_remainder_len != 0) {

View File

@ -58,8 +58,7 @@ ecb_cipher_contiguous_blocks(ecb_ctx_t *ctx, char *data, size_t length,
} }
lastp = (uint8_t *)ctx->ecb_iv; lastp = (uint8_t *)ctx->ecb_iv;
if (out != NULL) crypto_init_ptrs(out, &iov_or_mp, &offset);
crypto_init_ptrs(out, &iov_or_mp, &offset);
do { do {
/* Unprocessed data from last call. */ /* Unprocessed data from last call. */
@ -77,32 +76,18 @@ ecb_cipher_contiguous_blocks(ecb_ctx_t *ctx, char *data, size_t length,
blockp = datap; blockp = datap;
} }
if (out == NULL) { cipher(ctx->ecb_keysched, blockp, lastp);
cipher(ctx->ecb_keysched, blockp, blockp); crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
&out_data_1_len, &out_data_2, block_size);
ctx->ecb_lastp = blockp; /* copy block to where it belongs */
lastp = blockp; bcopy(lastp, out_data_1, out_data_1_len);
if (out_data_2 != NULL) {
if (ctx->ecb_remainder_len > 0) { bcopy(lastp + out_data_1_len, out_data_2,
bcopy(blockp, ctx->ecb_copy_to, block_size - out_data_1_len);
ctx->ecb_remainder_len);
bcopy(blockp + ctx->ecb_remainder_len, datap,
need);
}
} else {
cipher(ctx->ecb_keysched, blockp, lastp);
crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
&out_data_1_len, &out_data_2, block_size);
/* copy block to where it belongs */
bcopy(lastp, out_data_1, out_data_1_len);
if (out_data_2 != NULL) {
bcopy(lastp + out_data_1_len, out_data_2,
block_size - out_data_1_len);
}
/* update offset */
out->cd_offset += block_size;
} }
/* update offset */
out->cd_offset += block_size;
/* Update pointer to next block of data to be processed. */ /* Update pointer to next block of data to be processed. */
if (ctx->ecb_remainder_len != 0) { if (ctx->ecb_remainder_len != 0) {

View File

@ -117,8 +117,7 @@ gcm_mode_encrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length,
} }
lastp = (uint8_t *)ctx->gcm_cb; lastp = (uint8_t *)ctx->gcm_cb;
if (out != NULL) crypto_init_ptrs(out, &iov_or_mp, &offset);
crypto_init_ptrs(out, &iov_or_mp, &offset);
gops = gcm_impl_get_ops(); gops = gcm_impl_get_ops();
do { do {
@ -154,39 +153,22 @@ gcm_mode_encrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length,
ctx->gcm_processed_data_len += block_size; ctx->gcm_processed_data_len += block_size;
/* crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
* The following copies a complete GCM block back to where it &out_data_1_len, &out_data_2, block_size);
* came from if there was a remainder in the last call and out
* is NULL. That doesn't seem to make sense. So we assert this
* can't happen and leave the code in for reference.
* See https://github.com/zfsonlinux/zfs/issues/9661
*/
ASSERT(out != NULL);
if (out == NULL) {
if (ctx->gcm_remainder_len > 0) {
bcopy(blockp, ctx->gcm_copy_to,
ctx->gcm_remainder_len);
bcopy(blockp + ctx->gcm_remainder_len, datap,
need);
}
} else {
crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
&out_data_1_len, &out_data_2, block_size);
/* copy block to where it belongs */ /* copy block to where it belongs */
if (out_data_1_len == block_size) { if (out_data_1_len == block_size) {
copy_block(lastp, out_data_1); copy_block(lastp, out_data_1);
} else { } else {
bcopy(lastp, out_data_1, out_data_1_len); bcopy(lastp, out_data_1, out_data_1_len);
if (out_data_2 != NULL) { if (out_data_2 != NULL) {
bcopy(lastp + out_data_1_len, bcopy(lastp + out_data_1_len,
out_data_2, out_data_2,
block_size - out_data_1_len); block_size - out_data_1_len);
}
} }
/* update offset */
out->cd_offset += block_size;
} }
/* update offset */
out->cd_offset += block_size;
/* add ciphertext to the hash */ /* add ciphertext to the hash */
GHASH(ctx, ctx->gcm_tmp, ctx->gcm_ghash, gops); GHASH(ctx, ctx->gcm_tmp, ctx->gcm_ghash, gops);
@ -1093,7 +1075,7 @@ gcm_toggle_avx(void)
} }
/* /*
* Clear senssitve data in the context. * Clear sensitive data in the context.
* *
* ctx->gcm_remainder may contain a plaintext remainder. ctx->gcm_H and * ctx->gcm_remainder may contain a plaintext remainder. ctx->gcm_H and
* ctx->gcm_Htable contain the hash sub key which protects authentication. * ctx->gcm_Htable contain the hash sub key which protects authentication.
@ -1189,13 +1171,6 @@ gcm_mode_encrypt_contiguous_blocks_avx(gcm_ctx_t *ctx, char *data,
GHASH_AVX(ctx, tmp, block_size); GHASH_AVX(ctx, tmp, block_size);
clear_fpu_regs(); clear_fpu_regs();
kfpu_end(); kfpu_end();
/*
* We don't follow gcm_mode_encrypt_contiguous_blocks() here
* but assert that out is not null.
* See gcm_mode_encrypt_contiguous_blocks() above and
* https://github.com/zfsonlinux/zfs/issues/9661
*/
ASSERT(out != NULL);
rv = crypto_put_output_data(tmp, out, block_size); rv = crypto_put_output_data(tmp, out, block_size);
out->cd_offset += block_size; out->cd_offset += block_size;
gcm_incr_counter_block(ctx); gcm_incr_counter_block(ctx);
@ -1217,13 +1192,11 @@ gcm_mode_encrypt_contiguous_blocks_avx(gcm_ctx_t *ctx, char *data,
rv = CRYPTO_FAILED; rv = CRYPTO_FAILED;
goto out_nofpu; goto out_nofpu;
} }
if (out != NULL) { rv = crypto_put_output_data(ct_buf, out, chunk_size);
rv = crypto_put_output_data(ct_buf, out, chunk_size); if (rv != CRYPTO_SUCCESS) {
if (rv != CRYPTO_SUCCESS) { goto out_nofpu;
goto out_nofpu;
}
out->cd_offset += chunk_size;
} }
out->cd_offset += chunk_size;
datap += chunk_size; datap += chunk_size;
ctx->gcm_processed_data_len += chunk_size; ctx->gcm_processed_data_len += chunk_size;
} }
@ -1239,13 +1212,11 @@ gcm_mode_encrypt_contiguous_blocks_avx(gcm_ctx_t *ctx, char *data,
rv = CRYPTO_FAILED; rv = CRYPTO_FAILED;
goto out; goto out;
} }
if (out != NULL) { rv = crypto_put_output_data(ct_buf, out, done);
rv = crypto_put_output_data(ct_buf, out, done); if (rv != CRYPTO_SUCCESS) {
if (rv != CRYPTO_SUCCESS) { goto out;
goto out;
}
out->cd_offset += done;
} }
out->cd_offset += done;
ctx->gcm_processed_data_len += done; ctx->gcm_processed_data_len += done;
datap += done; datap += done;
bleft -= done; bleft -= done;
@ -1265,13 +1236,11 @@ gcm_mode_encrypt_contiguous_blocks_avx(gcm_ctx_t *ctx, char *data,
gcm_xor_avx(datap, tmp); gcm_xor_avx(datap, tmp);
GHASH_AVX(ctx, tmp, block_size); GHASH_AVX(ctx, tmp, block_size);
if (out != NULL) { rv = crypto_put_output_data(tmp, out, block_size);
rv = crypto_put_output_data(tmp, out, block_size); if (rv != CRYPTO_SUCCESS) {
if (rv != CRYPTO_SUCCESS) { goto out;
goto out;
}
out->cd_offset += block_size;
} }
out->cd_offset += block_size;
gcm_incr_counter_block(ctx); gcm_incr_counter_block(ctx);
ctx->gcm_processed_data_len += block_size; ctx->gcm_processed_data_len += block_size;
datap += block_size; datap += block_size;

View File

@ -30,9 +30,6 @@
#include <sys/crypto/spi.h> #include <sys/crypto/spi.h>
#include <sys/crypto/sched_impl.h> #include <sys/crypto/sched_impl.h>
#define CRYPTO_OPS_OFFSET(f) offsetof(crypto_ops_t, co_##f)
#define CRYPTO_CIPHER_OFFSET(f) offsetof(crypto_cipher_ops_t, f)
/* /*
* Encryption and decryption routines. * Encryption and decryption routines.
*/ */

View File

@ -30,9 +30,6 @@
#include <sys/crypto/spi.h> #include <sys/crypto/spi.h>
#include <sys/crypto/sched_impl.h> #include <sys/crypto/sched_impl.h>
#define CRYPTO_OPS_OFFSET(f) offsetof(crypto_ops_t, co_##f)
#define CRYPTO_DIGEST_OFFSET(f) offsetof(crypto_digest_ops_t, f)
/* /*
* Message digest routines * Message digest routines
*/ */

View File

@ -30,9 +30,6 @@
#include <sys/crypto/spi.h> #include <sys/crypto/spi.h>
#include <sys/crypto/sched_impl.h> #include <sys/crypto/sched_impl.h>
#define CRYPTO_OPS_OFFSET(f) offsetof(crypto_ops_t, co_##f)
#define CRYPTO_MAC_OFFSET(f) offsetof(crypto_mac_ops_t, f)
/* /*
* Message authentication codes routines. * Message authentication codes routines.
*/ */

View File

@ -149,6 +149,7 @@ crypto_update_iov(void *ctx, crypto_data_t *input, crypto_data_t *output,
common_ctx_t *common_ctx = ctx; common_ctx_t *common_ctx = ctx;
int rv; int rv;
ASSERT(input != output);
if (input->cd_miscdata != NULL) { if (input->cd_miscdata != NULL) {
copy_block((uint8_t *)input->cd_miscdata, copy_block((uint8_t *)input->cd_miscdata,
&common_ctx->cc_iv[0]); &common_ctx->cc_iv[0]);
@ -158,7 +159,7 @@ crypto_update_iov(void *ctx, crypto_data_t *input, crypto_data_t *output,
return (CRYPTO_ARGUMENTS_BAD); return (CRYPTO_ARGUMENTS_BAD);
rv = (cipher)(ctx, input->cd_raw.iov_base + input->cd_offset, rv = (cipher)(ctx, input->cd_raw.iov_base + input->cd_offset,
input->cd_length, (input == output) ? NULL : output); input->cd_length, output);
return (rv); return (rv);
} }
@ -175,6 +176,7 @@ crypto_update_uio(void *ctx, crypto_data_t *input, crypto_data_t *output,
uint_t vec_idx; uint_t vec_idx;
size_t cur_len; size_t cur_len;
ASSERT(input != output);
if (input->cd_miscdata != NULL) { if (input->cd_miscdata != NULL) {
copy_block((uint8_t *)input->cd_miscdata, copy_block((uint8_t *)input->cd_miscdata,
&common_ctx->cc_iv[0]); &common_ctx->cc_iv[0]);
@ -208,7 +210,7 @@ crypto_update_uio(void *ctx, crypto_data_t *input, crypto_data_t *output,
offset, length); offset, length);
int rv = (cipher)(ctx, uiop->uio_iov[vec_idx].iov_base + offset, int rv = (cipher)(ctx, uiop->uio_iov[vec_idx].iov_base + offset,
cur_len, (input == output) ? NULL : output); cur_len, output);
if (rv != CRYPTO_SUCCESS) { if (rv != CRYPTO_SUCCESS) {
return (rv); return (rv);

View File

@ -241,9 +241,6 @@ typedef struct crypto_logout32 {
#define CRYPTO_LOGIN CRYPTO(40) #define CRYPTO_LOGIN CRYPTO(40)
#define CRYPTO_LOGOUT CRYPTO(41) #define CRYPTO_LOGOUT CRYPTO(41)
/* flag for encrypt and decrypt operations */
#define CRYPTO_INPLACE_OPERATION 0x00000001
/* /*
* Cryptographic Ioctls * Cryptographic Ioctls
*/ */

View File

@ -92,11 +92,6 @@ static crypto_mech_info_t aes_mech_info_tab[] = {
AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES} AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
}; };
/* operations are in-place if the output buffer is NULL */
#define AES_ARG_INPLACE(input, output) \
if ((output) == NULL) \
(output) = (input);
static void aes_provider_status(crypto_provider_handle_t, uint_t *); static void aes_provider_status(crypto_provider_handle_t, uint_t *);
static crypto_control_ops_t aes_control_ops = { static crypto_control_ops_t aes_control_ops = {
@ -413,7 +408,7 @@ aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
== 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0) == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
return (CRYPTO_DATA_LEN_RANGE); return (CRYPTO_DATA_LEN_RANGE);
AES_ARG_INPLACE(plaintext, ciphertext); ASSERT(ciphertext != NULL);
/* /*
* We need to just return the length needed to store the output. * We need to just return the length needed to store the output.
@ -530,7 +525,7 @@ aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
} }
AES_ARG_INPLACE(ciphertext, plaintext); ASSERT(plaintext != NULL);
/* /*
* Return length needed to store the output. * Return length needed to store the output.
@ -635,7 +630,7 @@ aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
ASSERT(ctx->cc_provider_private != NULL); ASSERT(ctx->cc_provider_private != NULL);
aes_ctx = ctx->cc_provider_private; aes_ctx = ctx->cc_provider_private;
AES_ARG_INPLACE(plaintext, ciphertext); ASSERT(ciphertext != NULL);
/* compute number of bytes that will hold the ciphertext */ /* compute number of bytes that will hold the ciphertext */
out_len = aes_ctx->ac_remainder_len; out_len = aes_ctx->ac_remainder_len;
@ -705,7 +700,7 @@ aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
ASSERT(ctx->cc_provider_private != NULL); ASSERT(ctx->cc_provider_private != NULL);
aes_ctx = ctx->cc_provider_private; aes_ctx = ctx->cc_provider_private;
AES_ARG_INPLACE(ciphertext, plaintext); ASSERT(plaintext != NULL);
/* /*
* Compute number of bytes that will hold the plaintext. * Compute number of bytes that will hold the plaintext.
@ -947,7 +942,7 @@ aes_encrypt_atomic(crypto_provider_handle_t provider,
size_t length_needed; size_t length_needed;
int ret; int ret;
AES_ARG_INPLACE(plaintext, ciphertext); ASSERT(ciphertext != NULL);
/* /*
* CTR, CCM, GCM, and GMAC modes do not require that plaintext * CTR, CCM, GCM, and GMAC modes do not require that plaintext
@ -1073,7 +1068,7 @@ aes_decrypt_atomic(crypto_provider_handle_t provider,
size_t length_needed; size_t length_needed;
int ret; int ret;
AES_ARG_INPLACE(ciphertext, plaintext); ASSERT(plaintext != NULL);
/* /*
* CCM, GCM, CTR, and GMAC modes do not require that ciphertext * CCM, GCM, CTR, and GMAC modes do not require that ciphertext