| Index: openssl/crypto/evp/e_aes.c
|
| diff --git a/openssl/crypto/evp/e_aes.c b/openssl/crypto/evp/e_aes.c
|
| index 1bfb5d92b340172c8e97ec8d391b9d3e757b60a7..69165a2d1af2b0cd1d4d5f488b95905675b309a1 100644
|
| --- a/openssl/crypto/evp/e_aes.c
|
| +++ b/openssl/crypto/evp/e_aes.c
|
| @@ -814,44 +814,45 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
|
| }
|
| }
|
|
|
| -static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
|
| - const unsigned char *iv, int enc)
|
| +static ctr128_f aes_gcm_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
|
| + const unsigned char *key, size_t key_len)
|
| {
|
| - EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
|
| - if (!iv && !key)
|
| - return 1;
|
| - if (key)
|
| - { do {
|
| #ifdef BSAES_CAPABLE
|
| if (BSAES_CAPABLE)
|
| {
|
| - AES_set_encrypt_key(key,ctx->key_len*8,&gctx->ks);
|
| - CRYPTO_gcm128_init(&gctx->gcm,&gctx->ks,
|
| + AES_set_encrypt_key(key,key_len*8,aes_key);
|
| + CRYPTO_gcm128_init(gcm_ctx,aes_key,
|
| (block128_f)AES_encrypt);
|
| - gctx->ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
|
| - break;
|
| + return (ctr128_f)bsaes_ctr32_encrypt_blocks;
|
| }
|
| - else
|
| #endif
|
| #ifdef VPAES_CAPABLE
|
| if (VPAES_CAPABLE)
|
| {
|
| - vpaes_set_encrypt_key(key,ctx->key_len*8,&gctx->ks);
|
| - CRYPTO_gcm128_init(&gctx->gcm,&gctx->ks,
|
| + vpaes_set_encrypt_key(key,key_len*8,aes_key);
|
| + CRYPTO_gcm128_init(gcm_ctx,aes_key,
|
| (block128_f)vpaes_encrypt);
|
| - gctx->ctr = NULL;
|
| - break;
|
| + return NULL;
|
| }
|
| #endif
|
| - AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks);
|
| - CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)AES_encrypt);
|
| + AES_set_encrypt_key(key, key_len*8, aes_key);
|
| + CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
|
| #ifdef AES_CTR_ASM
|
| - gctx->ctr = (ctr128_f)AES_ctr32_encrypt;
|
| + return (ctr128_f)AES_ctr32_encrypt;
|
| #else
|
| - gctx->ctr = NULL;
|
| + return NULL;
|
| #endif
|
| - } while (0);
|
| + }
|
|
|
| +static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
|
| + const unsigned char *iv, int enc)
|
| + {
|
| + EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
|
| + if (!iv && !key)
|
| + return 1;
|
| + if (key)
|
| + {
|
| + gctx->ctr = aes_gcm_set_key(&gctx->ks, &gctx->gcm, key, ctx->key_len);
|
| /* If we have an iv can set it directly, otherwise use
|
| * saved IV.
|
| */
|
| @@ -1310,5 +1311,176 @@ BLOCK_CIPHER_custom(NID_aes,128,1,12,ccm,CCM,EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
|
| BLOCK_CIPHER_custom(NID_aes,192,1,12,ccm,CCM,EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
|
| BLOCK_CIPHER_custom(NID_aes,256,1,12,ccm,CCM,EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
|
|
|
| +#define EVP_AEAD_AES_128_GCM_TAG_LEN 16
|
| +
|
| +struct aead_aes_128_gcm_ctx {
|
| + union { double align; AES_KEY ks; } ks;
|
| + GCM128_CONTEXT gcm;
|
| + ctr128_f ctr;
|
| + unsigned char tag_len;
|
| +};
|
| +
|
| +static int aead_aes_128_gcm_init(EVP_AEAD_CTX *ctx,
|
| + const unsigned char *key, size_t key_len, size_t tag_len)
|
| + {
|
| + struct aead_aes_128_gcm_ctx *gcm_ctx;
|
| +
|
| + if (key_len*8 != 128)
|
| + {
|
| + EVPerr(EVP_F_AEAD_AES_128_GCM_INIT, EVP_R_BAD_KEY_LENGTH);
|
| + return 0; /* EVP_AEAD_CTX_init should catch this. */
|
| + }
|
| +
|
| + if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH)
|
| + tag_len = EVP_AEAD_AES_128_GCM_TAG_LEN;
|
| +
|
| + if (tag_len > EVP_AEAD_AES_128_GCM_TAG_LEN)
|
| + {
|
| + EVPerr(EVP_F_AEAD_AES_128_GCM_INIT, EVP_R_TAG_TOO_LARGE);
|
| + return 0;
|
| + }
|
| +
|
| + gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_128_gcm_ctx));
|
| + if (gcm_ctx == NULL)
|
| + return 0;
|
| +
|
| +#ifdef AESNI_CAPABLE
|
| + if (AESNI_CAPABLE)
|
| + {
|
| + aesni_set_encrypt_key(key, key_len * 8, &gcm_ctx->ks.ks);
|
| + CRYPTO_gcm128_init(&gcm_ctx->gcm, &gcm_ctx->ks.ks,
|
| + (block128_f)aesni_encrypt);
|
| + gcm_ctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
|
| + }
|
| + else
|
| +#endif
|
| + {
|
| + gcm_ctx->ctr = aes_gcm_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm,
|
| + key, key_len);
|
| + }
|
| + gcm_ctx->tag_len = tag_len;
|
| + ctx->aead_state = gcm_ctx;
|
| +
|
| + return 1;
|
| + }
|
| +
|
| +static void aead_aes_128_gcm_cleanup(EVP_AEAD_CTX *ctx)
|
| + {
|
| + struct aead_aes_128_gcm_ctx *gcm_ctx = ctx->aead_state;
|
| + OPENSSL_free(gcm_ctx);
|
| + }
|
| +
|
| +static ssize_t aead_aes_128_gcm_seal(const EVP_AEAD_CTX *ctx,
|
| + unsigned char *out, size_t max_out_len,
|
| + const unsigned char *nonce, size_t nonce_len,
|
| + const unsigned char *in, size_t in_len,
|
| + const unsigned char *ad, size_t ad_len)
|
| + {
|
| + size_t bulk = 0;
|
| + const struct aead_aes_128_gcm_ctx *gcm_ctx = ctx->aead_state;
|
| + GCM128_CONTEXT gcm;
|
| +
|
| + if (max_out_len < in_len + gcm_ctx->tag_len)
|
| + {
|
| + EVPerr(EVP_F_AEAD_AES_128_GCM_SEAL, EVP_R_BUFFER_TOO_SMALL);
|
| + return -1;
|
| + }
|
| +
|
| + memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
|
| + CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
|
| +
|
| + if (ad_len > 0 && CRYPTO_gcm128_aad(&gcm, ad, ad_len))
|
| + return -1;
|
| +
|
| + if (gcm_ctx->ctr)
|
| + {
|
| + if (CRYPTO_gcm128_encrypt_ctr32(&gcm, in + bulk, out + bulk,
|
| + in_len - bulk, gcm_ctx->ctr))
|
| + return -1;
|
| + }
|
| + else
|
| + {
|
| + if (CRYPTO_gcm128_encrypt(&gcm, in + bulk, out + bulk,
|
| + in_len - bulk))
|
| + return -1;
|
| + }
|
| +
|
| + CRYPTO_gcm128_tag(&gcm, out + in_len, gcm_ctx->tag_len);
|
| + return in_len + gcm_ctx->tag_len;
|
| + }
|
| +
|
| +static ssize_t aead_aes_128_gcm_open(const EVP_AEAD_CTX *ctx,
|
| + unsigned char *out, size_t max_out_len,
|
| + const unsigned char *nonce, size_t nonce_len,
|
| + const unsigned char *in, size_t in_len,
|
| + const unsigned char *ad, size_t ad_len)
|
| + {
|
| + size_t bulk = 0;
|
| + const struct aead_aes_128_gcm_ctx *gcm_ctx = ctx->aead_state;
|
| + unsigned char tag[EVP_AEAD_AES_128_GCM_TAG_LEN];
|
| + size_t out_len;
|
| + GCM128_CONTEXT gcm;
|
| +
|
| + if (in_len < gcm_ctx->tag_len)
|
| + {
|
| + EVPerr(EVP_F_AEAD_AES_128_GCM_OPEN, EVP_R_BAD_DECRYPT);
|
| + return -1;
|
| + }
|
| +
|
| + out_len = in_len - gcm_ctx->tag_len;
|
| +
|
| + if (max_out_len < out_len)
|
| + {
|
| + EVPerr(EVP_F_AEAD_AES_128_GCM_OPEN, EVP_R_BUFFER_TOO_SMALL);
|
| + return -1;
|
| + }
|
| +
|
| + memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
|
| + CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
|
| +
|
| + if (CRYPTO_gcm128_aad(&gcm, ad, ad_len))
|
| + return -1;
|
| +
|
| + if (gcm_ctx->ctr)
|
| + {
|
| + if (CRYPTO_gcm128_decrypt_ctr32(&gcm, in + bulk, out + bulk,
|
| + in_len-bulk-gcm_ctx->tag_len,
|
| + gcm_ctx->ctr))
|
| + return -1;
|
| + }
|
| + else
|
| + {
|
| + if (CRYPTO_gcm128_decrypt(&gcm, in + bulk, out + bulk,
|
| + in_len - bulk - gcm_ctx->tag_len))
|
| + return -1;
|
| + }
|
| +
|
| + CRYPTO_gcm128_tag(&gcm, tag, gcm_ctx->tag_len);
|
| + if (CRYPTO_memcmp(tag, in + out_len, gcm_ctx->tag_len) != 0)
|
| + {
|
| + EVPerr(EVP_F_AEAD_AES_128_GCM_OPEN, EVP_R_BAD_DECRYPT);
|
| + return -1;
|
| + }
|
| +
|
| + return out_len;
|
| + }
|
| +
|
| +static const EVP_AEAD aead_aes_128_gcm = {
|
| + 16, /* key len */
|
| + 12, /* nonce len */
|
| + EVP_AEAD_AES_128_GCM_TAG_LEN, /* overhead */
|
| + EVP_AEAD_AES_128_GCM_TAG_LEN, /* max tag length */
|
| +
|
| + aead_aes_128_gcm_init,
|
| + aead_aes_128_gcm_cleanup,
|
| + aead_aes_128_gcm_seal,
|
| + aead_aes_128_gcm_open,
|
| +};
|
| +
|
| +const EVP_AEAD *EVP_aead_aes_128_gcm()
|
| + {
|
| + return &aead_aes_128_gcm;
|
| + }
|
| +
|
| #endif
|
| #endif
|
|
|