提交 c5307d9c 编写于 作者: A Andy Polyakov

evp/e_aes.c: replace calls to one-liners with references in GCM.

Even though calls can be viewed as styling improvement, they do come
with cost. It's not big cost and shows only on short inputs, but it is
measurable, 2-3% on some platforms.
Reviewed-by: NRich Salz <rsalz@openssl.org>
(Merged from https://github.com/openssl/openssl/pull/6312)
上级 1e653d0f
...@@ -2814,8 +2814,8 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) ...@@ -2814,8 +2814,8 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
case EVP_CTRL_INIT: case EVP_CTRL_INIT:
gctx->key_set = 0; gctx->key_set = 0;
gctx->iv_set = 0; gctx->iv_set = 0;
gctx->ivlen = EVP_CIPHER_CTX_iv_length(c); gctx->ivlen = c->cipher->iv_len;
gctx->iv = EVP_CIPHER_CTX_iv_noconst(c); gctx->iv = c->iv;
gctx->taglen = -1; gctx->taglen = -1;
gctx->iv_gen = 0; gctx->iv_gen = 0;
gctx->tls_aad_len = -1; gctx->tls_aad_len = -1;
...@@ -2826,7 +2826,7 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) ...@@ -2826,7 +2826,7 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
return 0; return 0;
/* Allocate memory for IV if needed */ /* Allocate memory for IV if needed */
if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) { if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c)) if (gctx->iv != c->iv)
OPENSSL_free(gctx->iv); OPENSSL_free(gctx->iv);
if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) { if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
...@@ -2837,17 +2837,17 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) ...@@ -2837,17 +2837,17 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
return 1; return 1;
case EVP_CTRL_AEAD_SET_TAG: case EVP_CTRL_AEAD_SET_TAG:
if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c)) if (arg <= 0 || arg > 16 || c->encrypt)
return 0; return 0;
memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg); memcpy(c->buf, ptr, arg);
gctx->taglen = arg; gctx->taglen = arg;
return 1; return 1;
case EVP_CTRL_AEAD_GET_TAG: case EVP_CTRL_AEAD_GET_TAG:
if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c) if (arg <= 0 || arg > 16 || !c->encrypt
|| gctx->taglen < 0) || gctx->taglen < 0)
return 0; return 0;
memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg); memcpy(ptr, c->buf, arg);
return 1; return 1;
case EVP_CTRL_GCM_SET_IV_FIXED: case EVP_CTRL_GCM_SET_IV_FIXED:
...@@ -2865,8 +2865,7 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) ...@@ -2865,8 +2865,7 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
return 0; return 0;
if (arg) if (arg)
memcpy(gctx->iv, ptr, arg); memcpy(gctx->iv, ptr, arg);
if (EVP_CIPHER_CTX_encrypting(c) if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
&& RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
return 0; return 0;
gctx->iv_gen = 1; gctx->iv_gen = 1;
return 1; return 1;
...@@ -2887,8 +2886,7 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) ...@@ -2887,8 +2886,7 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
return 1; return 1;
case EVP_CTRL_GCM_SET_IV_INV: case EVP_CTRL_GCM_SET_IV_INV:
if (gctx->iv_gen == 0 || gctx->key_set == 0 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
|| EVP_CIPHER_CTX_encrypting(c))
return 0; return 0;
memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg); memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
...@@ -2899,24 +2897,22 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) ...@@ -2899,24 +2897,22 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
/* Save the AAD for later use */ /* Save the AAD for later use */
if (arg != EVP_AEAD_TLS1_AAD_LEN) if (arg != EVP_AEAD_TLS1_AAD_LEN)
return 0; return 0;
memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg); memcpy(c->buf, ptr, arg);
gctx->tls_aad_len = arg; gctx->tls_aad_len = arg;
{ {
unsigned int len = unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
| EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
/* Correct length for explicit IV */ /* Correct length for explicit IV */
if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN) if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
return 0; return 0;
len -= EVP_GCM_TLS_EXPLICIT_IV_LEN; len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
/* If decrypting correct for tag too */ /* If decrypting correct for tag too */
if (!EVP_CIPHER_CTX_encrypting(c)) { if (!c->encrypt) {
if (len < EVP_GCM_TLS_TAG_LEN) if (len < EVP_GCM_TLS_TAG_LEN)
return 0; return 0;
len -= EVP_GCM_TLS_TAG_LEN; len -= EVP_GCM_TLS_TAG_LEN;
} }
EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8; c->buf[arg - 2] = len >> 8;
EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff; c->buf[arg - 1] = len & 0xff;
} }
/* Extra padding: tag appended to record */ /* Extra padding: tag appended to record */
return EVP_GCM_TLS_TAG_LEN; return EVP_GCM_TLS_TAG_LEN;
...@@ -2930,8 +2926,8 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) ...@@ -2930,8 +2926,8 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
return 0; return 0;
gctx_out->gcm.key = &gctx_out->ks; gctx_out->gcm.key = &gctx_out->ks;
} }
if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c)) if (gctx->iv == c->iv)
gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out); gctx_out->iv = out->iv;
else { else {
if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) { if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
...@@ -2958,8 +2954,7 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, ...@@ -2958,8 +2954,7 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
do { do {
#ifdef HWAES_CAPABLE #ifdef HWAES_CAPABLE
if (HWAES_CAPABLE) { if (HWAES_CAPABLE) {
HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
&gctx->ks.ks);
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
(block128_f) HWAES_encrypt); (block128_f) HWAES_encrypt);
# ifdef HWAES_ctr32_encrypt_blocks # ifdef HWAES_ctr32_encrypt_blocks
...@@ -2972,8 +2967,7 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, ...@@ -2972,8 +2967,7 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
#endif #endif
#ifdef BSAES_CAPABLE #ifdef BSAES_CAPABLE
if (BSAES_CAPABLE) { if (BSAES_CAPABLE) {
AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
&gctx->ks.ks);
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
(block128_f) AES_encrypt); (block128_f) AES_encrypt);
gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks; gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
...@@ -2982,8 +2976,7 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, ...@@ -2982,8 +2976,7 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
#endif #endif
#ifdef VPAES_CAPABLE #ifdef VPAES_CAPABLE
if (VPAES_CAPABLE) { if (VPAES_CAPABLE) {
vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
&gctx->ks.ks);
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
(block128_f) vpaes_encrypt); (block128_f) vpaes_encrypt);
gctx->ctr = NULL; gctx->ctr = NULL;
...@@ -2992,8 +2985,7 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, ...@@ -2992,8 +2985,7 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
#endif #endif
(void)0; /* terminate potentially open 'else' */ (void)0; /* terminate potentially open 'else' */
AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
&gctx->ks.ks);
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
(block128_f) AES_encrypt); (block128_f) AES_encrypt);
#ifdef AES_CTR_ASM #ifdef AES_CTR_ASM
...@@ -3045,19 +3037,18 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, ...@@ -3045,19 +3037,18 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
* Set IV from start of buffer or generate IV and write to start of * Set IV from start of buffer or generate IV and write to start of
* buffer. * buffer.
*/ */
if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ? if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV, : EVP_CTRL_GCM_SET_IV_INV,
EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0) EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
goto err; goto err;
/* Use saved AAD */ /* Use saved AAD */
if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
gctx->tls_aad_len))
goto err; goto err;
/* Fix buffer and length to point to payload */ /* Fix buffer and length to point to payload */
in += EVP_GCM_TLS_EXPLICIT_IV_LEN; in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
out += EVP_GCM_TLS_EXPLICIT_IV_LEN; out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
if (EVP_CIPHER_CTX_encrypting(ctx)) { if (ctx->encrypt) {
/* Encrypt payload */ /* Encrypt payload */
if (gctx->ctr) { if (gctx->ctr) {
size_t bulk = 0; size_t bulk = 0;
...@@ -3136,11 +3127,9 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, ...@@ -3136,11 +3127,9 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
goto err; goto err;
} }
/* Retrieve tag */ /* Retrieve tag */
CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
EVP_GCM_TLS_TAG_LEN);
/* If tag mismatch wipe buffer */ /* If tag mismatch wipe buffer */
if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len, if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
EVP_GCM_TLS_TAG_LEN)) {
OPENSSL_cleanse(out, len); OPENSSL_cleanse(out, len);
goto err; goto err;
} }
...@@ -3170,7 +3159,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, ...@@ -3170,7 +3159,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
if (out == NULL) { if (out == NULL) {
if (CRYPTO_gcm128_aad(&gctx->gcm, in, len)) if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
return -1; return -1;
} else if (EVP_CIPHER_CTX_encrypting(ctx)) { } else if (ctx->encrypt) {
if (gctx->ctr) { if (gctx->ctr) {
size_t bulk = 0; size_t bulk = 0;
#if defined(AES_GCM_ASM) #if defined(AES_GCM_ASM)
...@@ -3261,17 +3250,15 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, ...@@ -3261,17 +3250,15 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
} }
return len; return len;
} else { } else {
if (!EVP_CIPHER_CTX_encrypting(ctx)) { if (!ctx->encrypt) {
if (gctx->taglen < 0) if (gctx->taglen < 0)
return -1; return -1;
if (CRYPTO_gcm128_finish(&gctx->gcm, if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
EVP_CIPHER_CTX_buf_noconst(ctx),
gctx->taglen) != 0)
return -1; return -1;
gctx->iv_set = 0; gctx->iv_set = 0;
return 0; return 0;
} }
CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16); CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
gctx->taglen = 16; gctx->taglen = 16;
/* Don't reuse the IV */ /* Don't reuse the IV */
gctx->iv_set = 0; gctx->iv_set = 0;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册