blob: 24013dcb0d56cdf7462af2f416534133a3c4bd9c [file] [log] [blame]
Adam Langley95c29f32014-06-20 12:00:00 -07001/* ====================================================================
2 * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 *
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in
13 * the documentation and/or other materials provided with the
14 * distribution.
15 *
16 * 3. All advertising materials mentioning features or use of this
17 * software must display the following acknowledgment:
18 * "This product includes software developed by the OpenSSL Project
19 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
20 *
21 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22 * endorse or promote products derived from this software without
23 * prior written permission. For written permission, please contact
24 * openssl-core@openssl.org.
25 *
26 * 5. Products derived from this software may not be called "OpenSSL"
27 * nor may "OpenSSL" appear in their names without prior written
28 * permission of the OpenSSL Project.
29 *
30 * 6. Redistributions of any form whatsoever must retain the following
31 * acknowledgment:
32 * "This product includes software developed by the OpenSSL Project
33 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
34 *
35 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
39 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46 * OF THE POSSIBILITY OF SUCH DAMAGE.
47 * ==================================================================== */
48
Adam Langley2b2d66d2015-01-30 17:08:37 -080049#include <string.h>
50
Adam Langleyfd772a52014-06-20 12:00:00 -070051#include <openssl/aead.h>
Adam Langley95c29f32014-06-20 12:00:00 -070052#include <openssl/aes.h>
53#include <openssl/cipher.h>
54#include <openssl/cpu.h>
55#include <openssl/err.h>
56#include <openssl/mem.h>
57#include <openssl/modes.h>
Adam Langley95c29f32014-06-20 12:00:00 -070058#include <openssl/obj.h>
Adam Langleyfd772a52014-06-20 12:00:00 -070059#include <openssl/rand.h>
Adam Langley0e782a92015-03-13 12:11:00 -070060#include <openssl/sha.h>
Adam Langley95c29f32014-06-20 12:00:00 -070061
62#include "internal.h"
Adam Langley0e782a92015-03-13 12:11:00 -070063#include "../internal.h"
Adam Langley95c29f32014-06-20 12:00:00 -070064#include "../modes/internal.h"
65
Adam Langleye49da742015-01-16 14:06:22 -080066#if defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
67#include "../arm_arch.h"
68#endif
69
Adam Langley95c29f32014-06-20 12:00:00 -070070
71typedef struct {
72 union {
73 double align;
74 AES_KEY ks;
75 } ks;
76 block128_f block;
77 union {
78 cbc128_f cbc;
79 ctr128_f ctr;
80 } stream;
81} EVP_AES_KEY;
82
83typedef struct {
84 union {
85 double align;
86 AES_KEY ks;
87 } ks; /* AES key schedule to use */
88 int key_set; /* Set if key initialised */
89 int iv_set; /* Set if an iv is set */
90 GCM128_CONTEXT gcm;
91 uint8_t *iv; /* Temporary IV store */
92 int ivlen; /* IV length */
93 int taglen;
94 int iv_gen; /* It is OK to generate IVs */
Adam Langley95c29f32014-06-20 12:00:00 -070095 ctr128_f ctr;
96} EVP_AES_GCM_CTX;
97
Adam Langley95c29f32014-06-20 12:00:00 -070098#if !defined(OPENSSL_NO_ASM) && \
99 (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
100#define VPAES
David Benjaminc44d2f42014-08-20 16:24:00 -0400101static char vpaes_capable(void) {
Adam Langley95c29f32014-06-20 12:00:00 -0700102 return (OPENSSL_ia32cap_P[1] & (1 << (41 - 32))) != 0;
103}
104
105#if defined(OPENSSL_X86_64)
106#define BSAES
David Benjaminc44d2f42014-08-20 16:24:00 -0400107static char bsaes_capable(void) {
Adam Langley95c29f32014-06-20 12:00:00 -0700108 return vpaes_capable();
109}
110#endif
111
Adam Langley3e652652015-01-09 15:44:37 -0800112#elif !defined(OPENSSL_NO_ASM) && \
113 (defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64))
Adam Langley95c29f32014-06-20 12:00:00 -0700114#include "../arm_arch.h"
Adam Langley3e652652015-01-09 15:44:37 -0800115
Joel Klinghed485a50a2015-05-29 15:02:09 +0200116#if defined(OPENSSL_ARM) && __ARM_MAX_ARCH__ >= 7
Adam Langley95c29f32014-06-20 12:00:00 -0700117#define BSAES
David Benjaminc44d2f42014-08-20 16:24:00 -0400118static char bsaes_capable(void) {
Adam Langley95c29f32014-06-20 12:00:00 -0700119 return CRYPTO_is_NEON_capable();
120}
Adam Langley3e652652015-01-09 15:44:37 -0800121#endif
122
123#define HWAES
124static char hwaes_capable(void) {
125 return (OPENSSL_armcap_P & ARMV8_AES) != 0;
126}
127
128int aes_v8_set_encrypt_key(const uint8_t *user_key, const int bits,
129 AES_KEY *key);
130int aes_v8_set_decrypt_key(const uint8_t *user_key, const int bits,
131 AES_KEY *key);
132void aes_v8_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
133void aes_v8_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
134void aes_v8_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
135 const AES_KEY *key, uint8_t *ivec, const int enc);
136void aes_v8_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
137 const AES_KEY *key, const uint8_t ivec[16]);
138
Adam Langley95c29f32014-06-20 12:00:00 -0700139#endif /* OPENSSL_ARM */
140
141#if defined(BSAES)
142/* On platforms where BSAES gets defined (just above), then these functions are
143 * provided by asm. */
144void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
145 const AES_KEY *key, uint8_t ivec[16], int enc);
146void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
147 const AES_KEY *key, const uint8_t ivec[16]);
148#else
David Benjaminc44d2f42014-08-20 16:24:00 -0400149static char bsaes_capable(void) {
Adam Langley95c29f32014-06-20 12:00:00 -0700150 return 0;
151}
152
153/* On other platforms, bsaes_capable() will always return false and so the
154 * following will never be called. */
155void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
156 const AES_KEY *key, uint8_t ivec[16], int enc) {
157 abort();
158}
159
160void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
161 const AES_KEY *key, const uint8_t ivec[16]) {
162 abort();
163}
164#endif
165
166#if defined(VPAES)
Adam Langleyfd772a52014-06-20 12:00:00 -0700167/* On platforms where VPAES gets defined (just above), then these functions are
Adam Langley95c29f32014-06-20 12:00:00 -0700168 * provided by asm. */
169int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
170int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
171
172void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
173void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
174
175void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
176 const AES_KEY *key, uint8_t *ivec, int enc);
177#else
David Benjaminc44d2f42014-08-20 16:24:00 -0400178static char vpaes_capable(void) {
Adam Langley95c29f32014-06-20 12:00:00 -0700179 return 0;
180}
181
182/* On other platforms, vpaes_capable() will always return false and so the
183 * following will never be called. */
184int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
185 abort();
186}
187int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
188 abort();
189}
190void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
191 abort();
192}
193void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
194 abort();
195}
196void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
197 const AES_KEY *key, uint8_t *ivec, int enc) {
198 abort();
199}
200#endif
201
Adam Langley3e652652015-01-09 15:44:37 -0800202#if !defined(HWAES)
203/* If HWAES isn't defined then we provide dummy functions for each of the hwaes
204 * functions. */
Brian Smithefed2212015-01-28 16:20:02 -0800205int hwaes_capable(void) {
Adam Langley3e652652015-01-09 15:44:37 -0800206 return 0;
207}
208
209int aes_v8_set_encrypt_key(const uint8_t *user_key, int bits,
210 AES_KEY *key) {
211 abort();
212}
213
214int aes_v8_set_decrypt_key(const uint8_t *user_key, int bits, AES_KEY *key) {
215 abort();
216}
217
218void aes_v8_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
219 abort();
220}
221
222void aes_v8_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
223 abort();
224}
225
226void aes_v8_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
227 const AES_KEY *key, uint8_t *ivec, int enc) {
228 abort();
229}
230
231void aes_v8_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
232 const AES_KEY *key, const uint8_t ivec[16]) {
233 abort();
234}
235#endif
236
Adam Langley95c29f32014-06-20 12:00:00 -0700237#if !defined(OPENSSL_NO_ASM) && \
238 (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
239int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
240int aesni_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
241
242void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
243void aesni_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
244
245void aesni_ecb_encrypt(const uint8_t *in, uint8_t *out, size_t length,
246 const AES_KEY *key, int enc);
247void aesni_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
248 const AES_KEY *key, uint8_t *ivec, int enc);
249
250void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t blocks,
251 const void *key, const uint8_t *ivec);
252
253#if defined(OPENSSL_X86_64)
254size_t aesni_gcm_encrypt(const uint8_t *in, uint8_t *out, size_t len,
255 const void *key, uint8_t ivec[16], uint64_t *Xi);
256#define AES_gcm_encrypt aesni_gcm_encrypt
257size_t aesni_gcm_decrypt(const uint8_t *in, uint8_t *out, size_t len,
258 const void *key, uint8_t ivec[16], uint64_t *Xi);
259#define AES_gcm_decrypt aesni_gcm_decrypt
260void gcm_ghash_avx(uint64_t Xi[2], const u128 Htable[16], const uint8_t *in,
261 size_t len);
262#define AES_GCM_ASM(gctx) \
263 (gctx->ctr == aesni_ctr32_encrypt_blocks && gctx->gcm.ghash == gcm_ghash_avx)
Adam Langleyfd772a52014-06-20 12:00:00 -0700264#endif /* OPENSSL_X86_64 */
Adam Langley95c29f32014-06-20 12:00:00 -0700265
266#else
267
268/* On other platforms, aesni_capable() will always return false and so the
269 * following will never be called. */
270void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
271 abort();
272}
273int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
274 abort();
275}
276void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t blocks,
277 const void *key, const uint8_t *ivec) {
278 abort();
279}
280
281#endif
282
283static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
Brian Smith1a9bc442015-03-16 19:27:05 -1000284 const uint8_t *iv, int enc)
285 OPENSSL_SUPPRESS_UNREACHABLE_CODE_WARNINGS {
Adam Langley95c29f32014-06-20 12:00:00 -0700286 int ret, mode;
287 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
288
289 mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
290 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
Adam Langley3e652652015-01-09 15:44:37 -0800291 if (hwaes_capable()) {
292 ret = aes_v8_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
293 dat->block = (block128_f)aes_v8_decrypt;
294 dat->stream.cbc = NULL;
295 if (mode == EVP_CIPH_CBC_MODE) {
296 dat->stream.cbc = (cbc128_f)aes_v8_cbc_encrypt;
297 }
298 } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
Adam Langley95c29f32014-06-20 12:00:00 -0700299 ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
300 dat->block = (block128_f)AES_decrypt;
301 dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
302 } else if (vpaes_capable()) {
303 ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
304 dat->block = (block128_f)vpaes_decrypt;
305 dat->stream.cbc =
306 mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
307 } else {
308 ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
309 dat->block = (block128_f)AES_decrypt;
310 dat->stream.cbc =
311 mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
312 }
Adam Langley3e652652015-01-09 15:44:37 -0800313 } else if (hwaes_capable()) {
314 ret = aes_v8_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
315 dat->block = (block128_f)aes_v8_encrypt;
316 dat->stream.cbc = NULL;
317 if (mode == EVP_CIPH_CBC_MODE) {
318 dat->stream.cbc = (cbc128_f)aes_v8_cbc_encrypt;
319 } else if (mode == EVP_CIPH_CTR_MODE) {
320 dat->stream.ctr = (ctr128_f)aes_v8_ctr32_encrypt_blocks;
321 }
Adam Langley95c29f32014-06-20 12:00:00 -0700322 } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
323 ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
324 dat->block = (block128_f)AES_encrypt;
325 dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
326 } else if (vpaes_capable()) {
327 ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
328 dat->block = (block128_f)vpaes_encrypt;
329 dat->stream.cbc =
330 mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
331 } else {
332 ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
333 dat->block = (block128_f)AES_encrypt;
334 dat->stream.cbc =
335 mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
336 }
337
338 if (ret < 0) {
David Benjamin3570d732015-06-29 00:28:17 -0400339 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
Adam Langley95c29f32014-06-20 12:00:00 -0700340 return 0;
341 }
342
343 return 1;
344}
345
Adam Langley087930f2015-02-24 12:44:40 -0800346static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
347 size_t len) {
Adam Langley95c29f32014-06-20 12:00:00 -0700348 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
349
350 if (dat->stream.cbc) {
351 (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
352 } else if (ctx->encrypt) {
353 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
354 } else {
355 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
356 }
357
358 return 1;
359}
360
Adam Langley087930f2015-02-24 12:44:40 -0800361static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
362 size_t len) {
Adam Langley95c29f32014-06-20 12:00:00 -0700363 size_t bl = ctx->cipher->block_size;
364 size_t i;
365 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
366
367 if (len < bl) {
368 return 1;
369 }
370
371 for (i = 0, len -= bl; i <= len; i += bl) {
372 (*dat->block)(in + i, out + i, &dat->ks);
373 }
374
375 return 1;
376}
377
Adam Langley087930f2015-02-24 12:44:40 -0800378static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
379 size_t len) {
Adam Langley95c29f32014-06-20 12:00:00 -0700380 unsigned int num = ctx->num;
381 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
382
383 if (dat->stream.ctr) {
384 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
385 dat->stream.ctr);
386 } else {
387 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
388 dat->block);
389 }
390 ctx->num = (size_t)num;
391 return 1;
392}
393
Adam Langley087930f2015-02-24 12:44:40 -0800394static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
395 size_t len) {
396 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
397
398 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num, dat->block);
399 return 1;
400}
401
Adam Langley52f9f622015-03-13 11:49:22 -0700402static char aesni_capable(void);
403
404static ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
405 block128_f *out_block, const uint8_t *key,
Brian Smith1a9bc442015-03-16 19:27:05 -1000406 size_t key_len)
407 OPENSSL_SUPPRESS_UNREACHABLE_CODE_WARNINGS {
Adam Langley52f9f622015-03-13 11:49:22 -0700408 if (aesni_capable()) {
409 aesni_set_encrypt_key(key, key_len * 8, aes_key);
410 if (gcm_ctx != NULL) {
411 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aesni_encrypt);
412 }
413 if (out_block) {
414 *out_block = (block128_f) aesni_encrypt;
415 }
416 return (ctr128_f)aesni_ctr32_encrypt_blocks;
417 }
418
Adam Langley3e652652015-01-09 15:44:37 -0800419 if (hwaes_capable()) {
420 aes_v8_set_encrypt_key(key, key_len * 8, aes_key);
Adam Langley52f9f622015-03-13 11:49:22 -0700421 if (gcm_ctx != NULL) {
422 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aes_v8_encrypt);
423 }
424 if (out_block) {
425 *out_block = (block128_f) aes_v8_encrypt;
426 }
Adam Langley3e652652015-01-09 15:44:37 -0800427 return (ctr128_f)aes_v8_ctr32_encrypt_blocks;
428 }
429
Adam Langleyfd772a52014-06-20 12:00:00 -0700430 if (bsaes_capable()) {
431 AES_set_encrypt_key(key, key_len * 8, aes_key);
Adam Langley52f9f622015-03-13 11:49:22 -0700432 if (gcm_ctx != NULL) {
433 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
434 }
435 if (out_block) {
436 *out_block = (block128_f) AES_encrypt;
437 }
Adam Langleyfd772a52014-06-20 12:00:00 -0700438 return (ctr128_f)bsaes_ctr32_encrypt_blocks;
439 }
440
441 if (vpaes_capable()) {
442 vpaes_set_encrypt_key(key, key_len * 8, aes_key);
Adam Langley52f9f622015-03-13 11:49:22 -0700443 if (out_block) {
444 *out_block = (block128_f) vpaes_encrypt;
445 }
446 if (gcm_ctx != NULL) {
447 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt);
448 }
Adam Langleyfd772a52014-06-20 12:00:00 -0700449 return NULL;
450 }
451
452 AES_set_encrypt_key(key, key_len * 8, aes_key);
Adam Langley52f9f622015-03-13 11:49:22 -0700453 if (gcm_ctx != NULL) {
454 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
455 }
456 if (out_block) {
457 *out_block = (block128_f) AES_encrypt;
458 }
Adam Langleyfd772a52014-06-20 12:00:00 -0700459 return NULL;
460}
461
Adam Langley95c29f32014-06-20 12:00:00 -0700462static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
463 const uint8_t *iv, int enc) {
464 EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
465 if (!iv && !key) {
466 return 1;
467 }
Adam Langley95c29f32014-06-20 12:00:00 -0700468 if (key) {
Adam Langley52f9f622015-03-13 11:49:22 -0700469 gctx->ctr =
470 aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm, NULL, key, ctx->key_len);
Adam Langleyfd772a52014-06-20 12:00:00 -0700471 /* If we have an iv can set it directly, otherwise use saved IV. */
Adam Langley95c29f32014-06-20 12:00:00 -0700472 if (iv == NULL && gctx->iv_set) {
473 iv = gctx->iv;
474 }
475 if (iv) {
476 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
477 gctx->iv_set = 1;
478 }
479 gctx->key_set = 1;
480 } else {
481 /* If key set use IV, otherwise copy */
482 if (gctx->key_set) {
483 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
484 } else {
485 memcpy(gctx->iv, iv, gctx->ivlen);
486 }
487 gctx->iv_set = 1;
488 gctx->iv_gen = 0;
489 }
490 return 1;
491}
492
David Benjamin3f5917f2015-02-23 02:15:50 -0500493static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
Adam Langley95c29f32014-06-20 12:00:00 -0700494 EVP_AES_GCM_CTX *gctx = c->cipher_data;
495 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
496 if (gctx->iv != c->iv) {
497 OPENSSL_free(gctx->iv);
498 }
Adam Langley95c29f32014-06-20 12:00:00 -0700499}
500
501/* increment counter (64-bit int) by 1 */
502static void ctr64_inc(uint8_t *counter) {
503 int n = 8;
504 uint8_t c;
505
506 do {
507 --n;
508 c = counter[n];
509 ++c;
510 counter[n] = c;
511 if (c) {
512 return;
513 }
514 } while (n);
515}
516
517static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
518 EVP_AES_GCM_CTX *gctx = c->cipher_data;
519 switch (type) {
520 case EVP_CTRL_INIT:
521 gctx->key_set = 0;
522 gctx->iv_set = 0;
523 gctx->ivlen = c->cipher->iv_len;
524 gctx->iv = c->iv;
525 gctx->taglen = -1;
526 gctx->iv_gen = 0;
Adam Langley95c29f32014-06-20 12:00:00 -0700527 return 1;
528
529 case EVP_CTRL_GCM_SET_IVLEN:
530 if (arg <= 0) {
531 return 0;
532 }
533
534 /* Allocate memory for IV if needed */
535 if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
536 if (gctx->iv != c->iv) {
537 OPENSSL_free(gctx->iv);
538 }
539 gctx->iv = OPENSSL_malloc(arg);
540 if (!gctx->iv) {
541 return 0;
542 }
543 }
544 gctx->ivlen = arg;
545 return 1;
546
547 case EVP_CTRL_GCM_SET_TAG:
548 if (arg <= 0 || arg > 16 || c->encrypt) {
549 return 0;
550 }
551 memcpy(c->buf, ptr, arg);
552 gctx->taglen = arg;
553 return 1;
554
555 case EVP_CTRL_GCM_GET_TAG:
556 if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
557 return 0;
558 }
559 memcpy(ptr, c->buf, arg);
560 return 1;
561
562 case EVP_CTRL_GCM_SET_IV_FIXED:
563 /* Special case: -1 length restores whole IV */
564 if (arg == -1) {
565 memcpy(gctx->iv, ptr, gctx->ivlen);
566 gctx->iv_gen = 1;
567 return 1;
568 }
569 /* Fixed field must be at least 4 bytes and invocation field
570 * at least 8. */
571 if (arg < 4 || (gctx->ivlen - arg) < 8) {
572 return 0;
573 }
574 if (arg) {
575 memcpy(gctx->iv, ptr, arg);
576 }
David Benjamina6d81012014-12-16 07:48:10 -0500577 if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
Adam Langley95c29f32014-06-20 12:00:00 -0700578 return 0;
579 }
580 gctx->iv_gen = 1;
581 return 1;
582
583 case EVP_CTRL_GCM_IV_GEN:
584 if (gctx->iv_gen == 0 || gctx->key_set == 0) {
585 return 0;
586 }
587 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
588 if (arg <= 0 || arg > gctx->ivlen) {
589 arg = gctx->ivlen;
590 }
591 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
592 /* Invocation field will be at least 8 bytes in size and
593 * so no need to check wrap around or increment more than
594 * last 8 bytes. */
595 ctr64_inc(gctx->iv + gctx->ivlen - 8);
596 gctx->iv_set = 1;
597 return 1;
598
599 case EVP_CTRL_GCM_SET_IV_INV:
600 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
601 return 0;
602 }
603 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
604 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
605 gctx->iv_set = 1;
606 return 1;
607
Adam Langley7578f3f2014-07-24 17:42:11 -0700608 case EVP_CTRL_COPY: {
609 EVP_CIPHER_CTX *out = ptr;
610 EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
611 if (gctx->gcm.key) {
612 if (gctx->gcm.key != &gctx->ks) {
613 return 0;
614 }
615 gctx_out->gcm.key = &gctx_out->ks;
616 }
617 if (gctx->iv == c->iv) {
618 gctx_out->iv = out->iv;
619 } else {
620 gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
621 if (!gctx_out->iv) {
622 return 0;
623 }
624 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
625 }
626 return 1;
627 }
628
Adam Langley95c29f32014-06-20 12:00:00 -0700629 default:
630 return -1;
631 }
632}
633
Adam Langley95c29f32014-06-20 12:00:00 -0700634static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
635 size_t len) {
636 EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
Adam Langleyfd772a52014-06-20 12:00:00 -0700637
Adam Langley95c29f32014-06-20 12:00:00 -0700638 /* If not set up, return error */
Adam Langleyfd772a52014-06-20 12:00:00 -0700639 if (!gctx->key_set) {
Adam Langley95c29f32014-06-20 12:00:00 -0700640 return -1;
Adam Langleyfd772a52014-06-20 12:00:00 -0700641 }
Adam Langleyfd772a52014-06-20 12:00:00 -0700642 if (!gctx->iv_set) {
Adam Langley95c29f32014-06-20 12:00:00 -0700643 return -1;
Adam Langleyfd772a52014-06-20 12:00:00 -0700644 }
645
Adam Langley95c29f32014-06-20 12:00:00 -0700646 if (in) {
647 if (out == NULL) {
Adam Langleyfd772a52014-06-20 12:00:00 -0700648 if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
Adam Langley95c29f32014-06-20 12:00:00 -0700649 return -1;
Adam Langleyfd772a52014-06-20 12:00:00 -0700650 }
Adam Langley95c29f32014-06-20 12:00:00 -0700651 } else if (ctx->encrypt) {
652 if (gctx->ctr) {
653 size_t bulk = 0;
654#if defined(AES_GCM_ASM)
655 if (len >= 32 && AES_GCM_ASM(gctx)) {
656 size_t res = (16 - gctx->gcm.mres) % 16;
657
Adam Langleyfd772a52014-06-20 12:00:00 -0700658 if (!CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) {
Adam Langley95c29f32014-06-20 12:00:00 -0700659 return -1;
Adam Langleyfd772a52014-06-20 12:00:00 -0700660 }
Adam Langley95c29f32014-06-20 12:00:00 -0700661
662 bulk = AES_gcm_encrypt(in + res, out + res, len - res, gctx->gcm.key,
663 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
664 gctx->gcm.len.u[1] += bulk;
665 bulk += res;
666 }
667#endif
668 if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, in + bulk, out + bulk,
Adam Langleyfd772a52014-06-20 12:00:00 -0700669 len - bulk, gctx->ctr)) {
Adam Langley95c29f32014-06-20 12:00:00 -0700670 return -1;
Adam Langleyfd772a52014-06-20 12:00:00 -0700671 }
Adam Langley95c29f32014-06-20 12:00:00 -0700672 } else {
673 size_t bulk = 0;
674 if (!CRYPTO_gcm128_encrypt(&gctx->gcm, in + bulk, out + bulk,
Adam Langleyfd772a52014-06-20 12:00:00 -0700675 len - bulk)) {
Adam Langley95c29f32014-06-20 12:00:00 -0700676 return -1;
Adam Langleyfd772a52014-06-20 12:00:00 -0700677 }
Adam Langley95c29f32014-06-20 12:00:00 -0700678 }
679 } else {
680 if (gctx->ctr) {
681 size_t bulk = 0;
682#if defined(AES_GCM_ASM)
683 if (len >= 16 && AES_GCM_ASM(gctx)) {
684 size_t res = (16 - gctx->gcm.mres) % 16;
685
Adam Langleyfd772a52014-06-20 12:00:00 -0700686 if (!CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) {
Adam Langley95c29f32014-06-20 12:00:00 -0700687 return -1;
Adam Langleyfd772a52014-06-20 12:00:00 -0700688 }
Adam Langley95c29f32014-06-20 12:00:00 -0700689
690 bulk = AES_gcm_decrypt(in + res, out + res, len - res, gctx->gcm.key,
691 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
692 gctx->gcm.len.u[1] += bulk;
693 bulk += res;
694 }
695#endif
696 if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, in + bulk, out + bulk,
Adam Langleyfd772a52014-06-20 12:00:00 -0700697 len - bulk, gctx->ctr)) {
Adam Langley95c29f32014-06-20 12:00:00 -0700698 return -1;
Adam Langleyfd772a52014-06-20 12:00:00 -0700699 }
Adam Langley95c29f32014-06-20 12:00:00 -0700700 } else {
701 size_t bulk = 0;
702 if (!CRYPTO_gcm128_decrypt(&gctx->gcm, in + bulk, out + bulk,
Adam Langleyfd772a52014-06-20 12:00:00 -0700703 len - bulk)) {
Adam Langley95c29f32014-06-20 12:00:00 -0700704 return -1;
Adam Langleyfd772a52014-06-20 12:00:00 -0700705 }
Adam Langley95c29f32014-06-20 12:00:00 -0700706 }
707 }
708 return len;
709 } else {
710 if (!ctx->encrypt) {
Adam Langleyfd772a52014-06-20 12:00:00 -0700711 if (gctx->taglen < 0 ||
712 !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0) {
Adam Langley95c29f32014-06-20 12:00:00 -0700713 return -1;
Adam Langleyfd772a52014-06-20 12:00:00 -0700714 }
Adam Langley95c29f32014-06-20 12:00:00 -0700715 gctx->iv_set = 0;
716 return 0;
717 }
718 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
719 gctx->taglen = 16;
720 /* Don't reuse the IV */
721 gctx->iv_set = 0;
722 return 0;
723 }
724}
725
726static const EVP_CIPHER aes_128_cbc = {
727 NID_aes_128_cbc, 16 /* block_size */, 16 /* key_size */,
728 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
729 NULL /* app_data */, aes_init_key, aes_cbc_cipher,
730 NULL /* cleanup */, NULL /* ctrl */};
731
732static const EVP_CIPHER aes_128_ctr = {
733 NID_aes_128_ctr, 1 /* block_size */, 16 /* key_size */,
734 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
735 NULL /* app_data */, aes_init_key, aes_ctr_cipher,
736 NULL /* cleanup */, NULL /* ctrl */};
737
738static const EVP_CIPHER aes_128_ecb = {
739 NID_aes_128_ecb, 16 /* block_size */, 16 /* key_size */,
Adam Langley4a674152014-10-20 16:20:55 -0700740 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
Adam Langley95c29f32014-06-20 12:00:00 -0700741 NULL /* app_data */, aes_init_key, aes_ecb_cipher,
742 NULL /* cleanup */, NULL /* ctrl */};
743
Adam Langley087930f2015-02-24 12:44:40 -0800744static const EVP_CIPHER aes_128_ofb = {
745 NID_aes_128_ofb128, 1 /* block_size */, 16 /* key_size */,
746 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
747 NULL /* app_data */, aes_init_key, aes_ofb_cipher,
748 NULL /* cleanup */, NULL /* ctrl */};
749
Adam Langley95c29f32014-06-20 12:00:00 -0700750static const EVP_CIPHER aes_128_gcm = {
751 NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
752 sizeof(EVP_AES_GCM_CTX),
753 EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
754 EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
755 EVP_CIPH_FLAG_AEAD_CIPHER,
756 NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
757 aes_gcm_ctrl};
758
759
Adam Langley1049e262015-04-02 13:09:01 -0700760static const EVP_CIPHER aes_192_cbc = {
761 NID_aes_192_cbc, 16 /* block_size */, 24 /* key_size */,
762 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
763 NULL /* app_data */, aes_init_key, aes_cbc_cipher,
764 NULL /* cleanup */, NULL /* ctrl */};
765
766static const EVP_CIPHER aes_192_ctr = {
767 NID_aes_192_ctr, 1 /* block_size */, 24 /* key_size */,
768 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
769 NULL /* app_data */, aes_init_key, aes_ctr_cipher,
770 NULL /* cleanup */, NULL /* ctrl */};
771
Adam Langley5dca0312015-05-04 17:41:23 -0700772static const EVP_CIPHER aes_192_ecb = {
773 NID_aes_192_ecb, 16 /* block_size */, 24 /* key_size */,
774 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
775 NULL /* app_data */, aes_init_key, aes_ecb_cipher,
776 NULL /* cleanup */, NULL /* ctrl */};
777
Adam Langley1049e262015-04-02 13:09:01 -0700778static const EVP_CIPHER aes_192_gcm = {
779 NID_aes_192_gcm, 1 /* block_size */, 24 /* key_size */, 12 /* iv_len */,
780 sizeof(EVP_AES_GCM_CTX),
781 EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
782 EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
783 EVP_CIPH_FLAG_AEAD_CIPHER,
784 NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
785 aes_gcm_ctrl};
786
787
Adam Langley95c29f32014-06-20 12:00:00 -0700788static const EVP_CIPHER aes_256_cbc = {
Adam Langley23343e42015-02-02 11:22:49 -0800789 NID_aes_256_cbc, 16 /* block_size */, 32 /* key_size */,
Adam Langley95c29f32014-06-20 12:00:00 -0700790 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
791 NULL /* app_data */, aes_init_key, aes_cbc_cipher,
792 NULL /* cleanup */, NULL /* ctrl */};
793
794static const EVP_CIPHER aes_256_ctr = {
Adam Langley23343e42015-02-02 11:22:49 -0800795 NID_aes_256_ctr, 1 /* block_size */, 32 /* key_size */,
Adam Langley95c29f32014-06-20 12:00:00 -0700796 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
797 NULL /* app_data */, aes_init_key, aes_ctr_cipher,
798 NULL /* cleanup */, NULL /* ctrl */};
799
800static const EVP_CIPHER aes_256_ecb = {
Adam Langley23343e42015-02-02 11:22:49 -0800801 NID_aes_256_ecb, 16 /* block_size */, 32 /* key_size */,
Adam Langley4a674152014-10-20 16:20:55 -0700802 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
Adam Langley95c29f32014-06-20 12:00:00 -0700803 NULL /* app_data */, aes_init_key, aes_ecb_cipher,
804 NULL /* cleanup */, NULL /* ctrl */};
805
Adam Langley087930f2015-02-24 12:44:40 -0800806static const EVP_CIPHER aes_256_ofb = {
807 NID_aes_256_ofb128, 1 /* block_size */, 32 /* key_size */,
808 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
809 NULL /* app_data */, aes_init_key, aes_ofb_cipher,
810 NULL /* cleanup */, NULL /* ctrl */};
811
Adam Langley95c29f32014-06-20 12:00:00 -0700812static const EVP_CIPHER aes_256_gcm = {
Adam Langley23343e42015-02-02 11:22:49 -0800813 NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
Adam Langley95c29f32014-06-20 12:00:00 -0700814 sizeof(EVP_AES_GCM_CTX),
815 EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
816 EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
817 EVP_CIPH_FLAG_AEAD_CIPHER,
818 NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
819 aes_gcm_ctrl};
820
821#if !defined(OPENSSL_NO_ASM) && \
822 (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
823
824/* AES-NI section. */
825
David Benjaminc44d2f42014-08-20 16:24:00 -0400826static char aesni_capable(void) {
Adam Langley95c29f32014-06-20 12:00:00 -0700827 return (OPENSSL_ia32cap_P[1] & (1 << (57 - 32))) != 0;
828}
829
830static int aesni_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
831 const uint8_t *iv, int enc) {
832 int ret, mode;
833 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
834
835 mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
836 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
837 ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
838 dat->block = (block128_f)aesni_decrypt;
839 dat->stream.cbc =
840 mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
841 } else {
842 ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
843 dat->block = (block128_f)aesni_encrypt;
844 if (mode == EVP_CIPH_CBC_MODE) {
845 dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
846 } else if (mode == EVP_CIPH_CTR_MODE) {
847 dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
848 } else {
849 dat->stream.cbc = NULL;
850 }
851 }
852
853 if (ret < 0) {
David Benjamin3570d732015-06-29 00:28:17 -0400854 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
Adam Langley95c29f32014-06-20 12:00:00 -0700855 return 0;
856 }
857
858 return 1;
859}
860
861static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
862 const uint8_t *in, size_t len) {
863 aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
864
865 return 1;
866}
867
868static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
869 const uint8_t *in, size_t len) {
870 size_t bl = ctx->cipher->block_size;
871
872 if (len < bl) {
873 return 1;
874 }
875
876 aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
877
878 return 1;
879}
880
881static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
882 const uint8_t *iv, int enc) {
883 EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
Adam Langleyfd772a52014-06-20 12:00:00 -0700884 if (!iv && !key) {
Adam Langley95c29f32014-06-20 12:00:00 -0700885 return 1;
Adam Langleyfd772a52014-06-20 12:00:00 -0700886 }
Adam Langley95c29f32014-06-20 12:00:00 -0700887 if (key) {
888 aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
889 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt);
890 gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
891 /* If we have an iv can set it directly, otherwise use
892 * saved IV. */
893 if (iv == NULL && gctx->iv_set) {
894 iv = gctx->iv;
895 }
896 if (iv) {
897 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
898 gctx->iv_set = 1;
899 }
900 gctx->key_set = 1;
901 } else {
902 /* If key set use IV, otherwise copy */
903 if (gctx->key_set) {
904 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
905 } else {
906 memcpy(gctx->iv, iv, gctx->ivlen);
907 }
908 gctx->iv_set = 1;
909 gctx->iv_gen = 0;
910 }
911 return 1;
912}
913
914static const EVP_CIPHER aesni_128_cbc = {
915 NID_aes_128_cbc, 16 /* block_size */, 16 /* key_size */,
916 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
917 NULL /* app_data */, aesni_init_key, aesni_cbc_cipher,
918 NULL /* cleanup */, NULL /* ctrl */};
919
920static const EVP_CIPHER aesni_128_ctr = {
921 NID_aes_128_ctr, 1 /* block_size */, 16 /* key_size */,
922 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
923 NULL /* app_data */, aesni_init_key, aes_ctr_cipher,
924 NULL /* cleanup */, NULL /* ctrl */};
925
926static const EVP_CIPHER aesni_128_ecb = {
927 NID_aes_128_ecb, 16 /* block_size */, 16 /* key_size */,
Adam Langley4a674152014-10-20 16:20:55 -0700928 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
Adam Langley95c29f32014-06-20 12:00:00 -0700929 NULL /* app_data */, aesni_init_key, aesni_ecb_cipher,
930 NULL /* cleanup */, NULL /* ctrl */};
931
Adam Langley087930f2015-02-24 12:44:40 -0800932static const EVP_CIPHER aesni_128_ofb = {
933 NID_aes_128_ofb128, 1 /* block_size */, 16 /* key_size */,
934 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
935 NULL /* app_data */, aesni_init_key, aes_ofb_cipher,
936 NULL /* cleanup */, NULL /* ctrl */};
937
Adam Langley95c29f32014-06-20 12:00:00 -0700938static const EVP_CIPHER aesni_128_gcm = {
939 NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
940 sizeof(EVP_AES_GCM_CTX),
941 EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
942 EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
943 EVP_CIPH_FLAG_AEAD_CIPHER,
944 NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
945 aes_gcm_ctrl};
946
947
Adam Langley1049e262015-04-02 13:09:01 -0700948static const EVP_CIPHER aesni_192_cbc = {
949 NID_aes_192_cbc, 16 /* block_size */, 24 /* key_size */,
950 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
951 NULL /* app_data */, aesni_init_key, aesni_cbc_cipher,
952 NULL /* cleanup */, NULL /* ctrl */};
953
954static const EVP_CIPHER aesni_192_ctr = {
955 NID_aes_192_ctr, 1 /* block_size */, 24 /* key_size */,
956 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
957 NULL /* app_data */, aesni_init_key, aes_ctr_cipher,
958 NULL /* cleanup */, NULL /* ctrl */};
959
Adam Langley5dca0312015-05-04 17:41:23 -0700960static const EVP_CIPHER aesni_192_ecb = {
961 NID_aes_192_ecb, 16 /* block_size */, 24 /* key_size */,
962 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
963 NULL /* app_data */, aesni_init_key, aesni_ecb_cipher,
964 NULL /* cleanup */, NULL /* ctrl */};
965
Adam Langley1049e262015-04-02 13:09:01 -0700966static const EVP_CIPHER aesni_192_gcm = {
967 NID_aes_192_gcm, 1 /* block_size */, 24 /* key_size */, 12 /* iv_len */,
968 sizeof(EVP_AES_GCM_CTX),
969 EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
970 EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
971 EVP_CIPH_FLAG_AEAD_CIPHER,
972 NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
973 aes_gcm_ctrl};
974
975
Adam Langley95c29f32014-06-20 12:00:00 -0700976static const EVP_CIPHER aesni_256_cbc = {
Adam Langley23343e42015-02-02 11:22:49 -0800977 NID_aes_256_cbc, 16 /* block_size */, 32 /* key_size */,
Adam Langley95c29f32014-06-20 12:00:00 -0700978 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
979 NULL /* app_data */, aesni_init_key, aesni_cbc_cipher,
980 NULL /* cleanup */, NULL /* ctrl */};
981
982static const EVP_CIPHER aesni_256_ctr = {
Adam Langley23343e42015-02-02 11:22:49 -0800983 NID_aes_256_ctr, 1 /* block_size */, 32 /* key_size */,
Adam Langley95c29f32014-06-20 12:00:00 -0700984 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
985 NULL /* app_data */, aesni_init_key, aes_ctr_cipher,
986 NULL /* cleanup */, NULL /* ctrl */};
987
988static const EVP_CIPHER aesni_256_ecb = {
Adam Langley23343e42015-02-02 11:22:49 -0800989 NID_aes_256_ecb, 16 /* block_size */, 32 /* key_size */,
Adam Langley4a674152014-10-20 16:20:55 -0700990 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
Adam Langley95c29f32014-06-20 12:00:00 -0700991 NULL /* app_data */, aesni_init_key, aesni_ecb_cipher,
992 NULL /* cleanup */, NULL /* ctrl */};
993
Adam Langley087930f2015-02-24 12:44:40 -0800994static const EVP_CIPHER aesni_256_ofb = {
995 NID_aes_256_ofb128, 1 /* block_size */, 32 /* key_size */,
996 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
997 NULL /* app_data */, aesni_init_key, aes_ofb_cipher,
998 NULL /* cleanup */, NULL /* ctrl */};
999
Adam Langley95c29f32014-06-20 12:00:00 -07001000static const EVP_CIPHER aesni_256_gcm = {
1001 NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
1002 sizeof(EVP_AES_GCM_CTX),
1003 EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
Adam Langley7578f3f2014-07-24 17:42:11 -07001004 EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY |
Adam Langley95c29f32014-06-20 12:00:00 -07001005 EVP_CIPH_FLAG_AEAD_CIPHER,
1006 NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
1007 aes_gcm_ctrl};
1008
1009#define EVP_CIPHER_FUNCTION(keybits, mode) \
1010 const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
1011 if (aesni_capable()) { \
1012 return &aesni_##keybits##_##mode; \
1013 } else { \
1014 return &aes_##keybits##_##mode; \
1015 } \
1016 }
1017
1018#else /* ^^^ OPENSSL_X86_64 || OPENSSL_X86 */
1019
David Benjaminc44d2f42014-08-20 16:24:00 -04001020static char aesni_capable(void) {
Adam Langleyfd772a52014-06-20 12:00:00 -07001021 return 0;
1022}
1023
Adam Langley95c29f32014-06-20 12:00:00 -07001024#define EVP_CIPHER_FUNCTION(keybits, mode) \
1025 const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
1026 return &aes_##keybits##_##mode; \
1027 }
1028
1029#endif
1030
1031EVP_CIPHER_FUNCTION(128, cbc)
1032EVP_CIPHER_FUNCTION(128, ctr)
1033EVP_CIPHER_FUNCTION(128, ecb)
Adam Langley087930f2015-02-24 12:44:40 -08001034EVP_CIPHER_FUNCTION(128, ofb)
Adam Langley95c29f32014-06-20 12:00:00 -07001035EVP_CIPHER_FUNCTION(128, gcm)
1036
Adam Langley1049e262015-04-02 13:09:01 -07001037EVP_CIPHER_FUNCTION(192, cbc)
1038EVP_CIPHER_FUNCTION(192, ctr)
Adam Langley5dca0312015-05-04 17:41:23 -07001039EVP_CIPHER_FUNCTION(192, ecb)
Adam Langley1049e262015-04-02 13:09:01 -07001040EVP_CIPHER_FUNCTION(192, gcm)
1041
Adam Langley95c29f32014-06-20 12:00:00 -07001042EVP_CIPHER_FUNCTION(256, cbc)
1043EVP_CIPHER_FUNCTION(256, ctr)
1044EVP_CIPHER_FUNCTION(256, ecb)
Adam Langley087930f2015-02-24 12:44:40 -08001045EVP_CIPHER_FUNCTION(256, ofb)
Adam Langley95c29f32014-06-20 12:00:00 -07001046EVP_CIPHER_FUNCTION(256, gcm)
Adam Langleyfd772a52014-06-20 12:00:00 -07001047
1048
1049#define EVP_AEAD_AES_GCM_TAG_LEN 16
1050
1051struct aead_aes_gcm_ctx {
1052 union {
1053 double align;
1054 AES_KEY ks;
1055 } ks;
1056 GCM128_CONTEXT gcm;
1057 ctr128_f ctr;
1058 uint8_t tag_len;
1059};
1060
1061static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1062 size_t key_len, size_t tag_len) {
1063 struct aead_aes_gcm_ctx *gcm_ctx;
1064 const size_t key_bits = key_len * 8;
1065
1066 if (key_bits != 128 && key_bits != 256) {
David Benjamin3570d732015-06-29 00:28:17 -04001067 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
Adam Langleyfd772a52014-06-20 12:00:00 -07001068 return 0; /* EVP_AEAD_CTX_init should catch this. */
1069 }
1070
1071 if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
1072 tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1073 }
1074
1075 if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
David Benjamin3570d732015-06-29 00:28:17 -04001076 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
Adam Langleyfd772a52014-06-20 12:00:00 -07001077 return 0;
1078 }
1079
1080 gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx));
1081 if (gcm_ctx == NULL) {
1082 return 0;
1083 }
1084
Adam Langley52f9f622015-03-13 11:49:22 -07001085 gcm_ctx->ctr =
1086 aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, NULL, key, key_len);
Adam Langleyfd772a52014-06-20 12:00:00 -07001087 gcm_ctx->tag_len = tag_len;
1088 ctx->aead_state = gcm_ctx;
1089
1090 return 1;
1091}
1092
1093static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {
1094 struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1095 OPENSSL_cleanse(gcm_ctx, sizeof(struct aead_aes_gcm_ctx));
1096 OPENSSL_free(gcm_ctx);
1097}
1098
1099static int aead_aes_gcm_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
1100 size_t *out_len, size_t max_out_len,
1101 const uint8_t *nonce, size_t nonce_len,
1102 const uint8_t *in, size_t in_len,
1103 const uint8_t *ad, size_t ad_len) {
1104 size_t bulk = 0;
1105 const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1106 GCM128_CONTEXT gcm;
1107
1108 if (in_len + gcm_ctx->tag_len < in_len) {
David Benjamin3570d732015-06-29 00:28:17 -04001109 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
Adam Langleyfd772a52014-06-20 12:00:00 -07001110 return 0;
1111 }
1112
1113 if (max_out_len < in_len + gcm_ctx->tag_len) {
David Benjamin3570d732015-06-29 00:28:17 -04001114 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
Adam Langleyfd772a52014-06-20 12:00:00 -07001115 return 0;
1116 }
1117
1118 memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1119 CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
1120
1121 if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1122 return 0;
1123 }
1124
1125 if (gcm_ctx->ctr) {
1126 if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, in + bulk, out + bulk, in_len - bulk,
1127 gcm_ctx->ctr)) {
1128 return 0;
1129 }
1130 } else {
1131 if (!CRYPTO_gcm128_encrypt(&gcm, in + bulk, out + bulk, in_len - bulk)) {
1132 return 0;
1133 }
1134 }
1135
1136 CRYPTO_gcm128_tag(&gcm, out + in_len, gcm_ctx->tag_len);
1137 *out_len = in_len + gcm_ctx->tag_len;
1138 return 1;
1139}
1140
1141static int aead_aes_gcm_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
1142 size_t *out_len, size_t max_out_len,
1143 const uint8_t *nonce, size_t nonce_len,
1144 const uint8_t *in, size_t in_len,
1145 const uint8_t *ad, size_t ad_len) {
1146 size_t bulk = 0;
1147 const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1148 uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
1149 size_t plaintext_len;
1150 GCM128_CONTEXT gcm;
1151
1152 if (in_len < gcm_ctx->tag_len) {
David Benjamin3570d732015-06-29 00:28:17 -04001153 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
Adam Langleyfd772a52014-06-20 12:00:00 -07001154 return 0;
1155 }
1156
1157 plaintext_len = in_len - gcm_ctx->tag_len;
1158
1159 if (max_out_len < plaintext_len) {
David Benjamin3570d732015-06-29 00:28:17 -04001160 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
Adam Langleyfd772a52014-06-20 12:00:00 -07001161 return 0;
1162 }
1163
1164 memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1165 CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
1166
1167 if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1168 return 0;
1169 }
1170
1171 if (gcm_ctx->ctr) {
1172 if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, in + bulk, out + bulk,
1173 in_len - bulk - gcm_ctx->tag_len,
1174 gcm_ctx->ctr)) {
1175 return 0;
1176 }
1177 } else {
1178 if (!CRYPTO_gcm128_decrypt(&gcm, in + bulk, out + bulk,
1179 in_len - bulk - gcm_ctx->tag_len)) {
1180 return 0;
1181 }
1182 }
1183
1184 CRYPTO_gcm128_tag(&gcm, tag, gcm_ctx->tag_len);
1185 if (CRYPTO_memcmp(tag, in + plaintext_len, gcm_ctx->tag_len) != 0) {
David Benjamin3570d732015-06-29 00:28:17 -04001186 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
Adam Langleyfd772a52014-06-20 12:00:00 -07001187 return 0;
1188 }
1189
1190 *out_len = plaintext_len;
1191 return 1;
1192}
1193
1194static const EVP_AEAD aead_aes_128_gcm = {
1195 16, /* key len */
1196 12, /* nonce len */
1197 EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
1198 EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
David Benjaminb34f5102015-02-28 03:59:33 -05001199 aead_aes_gcm_init,
1200 NULL, /* init_with_direction */
1201 aead_aes_gcm_cleanup,
1202 aead_aes_gcm_seal,
1203 aead_aes_gcm_open,
Adam Langley3f92d212015-02-20 15:32:52 -08001204 NULL, /* get_rc4_state */
Adam Langleyfd772a52014-06-20 12:00:00 -07001205};
1206
1207static const EVP_AEAD aead_aes_256_gcm = {
1208 32, /* key len */
1209 12, /* nonce len */
1210 EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
1211 EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
David Benjaminb34f5102015-02-28 03:59:33 -05001212 aead_aes_gcm_init,
1213 NULL, /* init_with_direction */
1214 aead_aes_gcm_cleanup,
1215 aead_aes_gcm_seal,
1216 aead_aes_gcm_open,
Adam Langley3f92d212015-02-20 15:32:52 -08001217 NULL, /* get_rc4_state */
Adam Langleyfd772a52014-06-20 12:00:00 -07001218};
1219
David Benjaminc44d2f42014-08-20 16:24:00 -04001220const EVP_AEAD *EVP_aead_aes_128_gcm(void) { return &aead_aes_128_gcm; }
Adam Langleyfd772a52014-06-20 12:00:00 -07001221
David Benjaminc44d2f42014-08-20 16:24:00 -04001222const EVP_AEAD *EVP_aead_aes_256_gcm(void) { return &aead_aes_256_gcm; }
Adam Langley93a3dcd2014-07-25 15:40:44 -07001223
1224
1225/* AES Key Wrap is specified in
1226 * http://csrc.nist.gov/groups/ST/toolkit/documents/kms/key-wrap.pdf
1227 * or https://tools.ietf.org/html/rfc3394 */
1228
1229struct aead_aes_key_wrap_ctx {
1230 uint8_t key[32];
1231 unsigned key_bits;
1232};
1233
1234static int aead_aes_key_wrap_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1235 size_t key_len, size_t tag_len) {
1236 struct aead_aes_key_wrap_ctx *kw_ctx;
1237 const size_t key_bits = key_len * 8;
1238
1239 if (key_bits != 128 && key_bits != 256) {
David Benjamin3570d732015-06-29 00:28:17 -04001240 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001241 return 0; /* EVP_AEAD_CTX_init should catch this. */
1242 }
1243
1244 if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
1245 tag_len = 8;
1246 }
1247
1248 if (tag_len != 8) {
David Benjamin3570d732015-06-29 00:28:17 -04001249 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_TAG_SIZE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001250 return 0;
1251 }
1252
1253 kw_ctx = OPENSSL_malloc(sizeof(struct aead_aes_key_wrap_ctx));
1254 if (kw_ctx == NULL) {
David Benjamin3570d732015-06-29 00:28:17 -04001255 OPENSSL_PUT_ERROR(CIPHER, ERR_R_MALLOC_FAILURE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001256 return 0;
1257 }
1258
1259 memcpy(kw_ctx->key, key, key_len);
1260 kw_ctx->key_bits = key_bits;
1261
1262 ctx->aead_state = kw_ctx;
1263 return 1;
1264}
1265
1266static void aead_aes_key_wrap_cleanup(EVP_AEAD_CTX *ctx) {
1267 struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
1268 OPENSSL_cleanse(kw_ctx, sizeof(struct aead_aes_key_wrap_ctx));
1269 OPENSSL_free(kw_ctx);
1270}
1271
1272/* kDefaultAESKeyWrapNonce is the default nonce value given in 2.2.3.1. */
1273static const uint8_t kDefaultAESKeyWrapNonce[8] = {0xa6, 0xa6, 0xa6, 0xa6,
1274 0xa6, 0xa6, 0xa6, 0xa6};
1275
1276
1277static int aead_aes_key_wrap_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
1278 size_t *out_len, size_t max_out_len,
1279 const uint8_t *nonce, size_t nonce_len,
1280 const uint8_t *in, size_t in_len,
1281 const uint8_t *ad, size_t ad_len) {
1282 const struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
1283 union {
1284 double align;
1285 AES_KEY ks;
1286 } ks;
1287 /* Variables in this function match up with the variables in the second half
1288 * of section 2.2.1. */
1289 unsigned i, j, n;
1290 uint8_t A[AES_BLOCK_SIZE];
1291
1292 if (ad_len != 0) {
David Benjamin3570d732015-06-29 00:28:17 -04001293 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_AD_SIZE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001294 return 0;
1295 }
1296
1297 if (nonce_len == 0) {
1298 nonce = kDefaultAESKeyWrapNonce;
1299 nonce_len = sizeof(kDefaultAESKeyWrapNonce);
1300 }
1301
1302 if (nonce_len != 8) {
David Benjamin3570d732015-06-29 00:28:17 -04001303 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001304 return 0;
1305 }
1306
1307 if (in_len % 8 != 0) {
David Benjamin3570d732015-06-29 00:28:17 -04001308 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_INPUT_SIZE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001309 return 0;
1310 }
1311
1312 /* The code below only handles a 32-bit |t| thus 6*|n| must be less than
1313 * 2^32, where |n| is |in_len| / 8. So in_len < 4/3 * 2^32 and we
1314 * conservatively cap it to 2^32-16 to stop 32-bit platforms complaining that
Adam Langley0ed0cf62015-01-06 10:49:48 -08001315 * a comparison is always true. */
Adam Langley93a3dcd2014-07-25 15:40:44 -07001316 if (in_len > 0xfffffff0) {
David Benjamin3570d732015-06-29 00:28:17 -04001317 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001318 return 0;
1319 }
1320
1321 n = in_len / 8;
1322
1323 if (n < 2) {
David Benjamin3570d732015-06-29 00:28:17 -04001324 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_INPUT_SIZE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001325 return 0;
1326 }
1327
1328 if (in_len + 8 < in_len) {
David Benjamin3570d732015-06-29 00:28:17 -04001329 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001330 return 0;
1331 }
1332
1333 if (max_out_len < in_len + 8) {
David Benjamin3570d732015-06-29 00:28:17 -04001334 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001335 return 0;
1336 }
1337
1338 if (AES_set_encrypt_key(kw_ctx->key, kw_ctx->key_bits, &ks.ks) < 0) {
David Benjamin3570d732015-06-29 00:28:17 -04001339 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001340 return 0;
1341 }
1342
1343 memmove(out + 8, in, in_len);
1344 memcpy(A, nonce, 8);
1345
1346 for (j = 0; j < 6; j++) {
1347 for (i = 1; i <= n; i++) {
1348 uint32_t t;
1349
1350 memcpy(A + 8, out + 8 * i, 8);
1351 AES_encrypt(A, A, &ks.ks);
1352 t = n * j + i;
1353 A[7] ^= t & 0xff;
1354 A[6] ^= (t >> 8) & 0xff;
1355 A[5] ^= (t >> 16) & 0xff;
1356 A[4] ^= (t >> 24) & 0xff;
1357 memcpy(out + 8 * i, A + 8, 8);
1358 }
1359 }
1360
1361 memcpy(out, A, 8);
1362 *out_len = in_len + 8;
1363 return 1;
1364}
1365
1366static int aead_aes_key_wrap_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
1367 size_t *out_len, size_t max_out_len,
1368 const uint8_t *nonce, size_t nonce_len,
1369 const uint8_t *in, size_t in_len,
1370 const uint8_t *ad, size_t ad_len) {
1371 const struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
1372 union {
1373 double align;
1374 AES_KEY ks;
1375 } ks;
1376 /* Variables in this function match up with the variables in the second half
1377 * of section 2.2.1. */
1378 unsigned i, j, n;
1379 uint8_t A[AES_BLOCK_SIZE];
1380
1381 if (ad_len != 0) {
David Benjamin3570d732015-06-29 00:28:17 -04001382 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_AD_SIZE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001383 return 0;
1384 }
1385
1386 if (nonce_len == 0) {
1387 nonce = kDefaultAESKeyWrapNonce;
1388 nonce_len = sizeof(kDefaultAESKeyWrapNonce);
1389 }
1390
1391 if (nonce_len != 8) {
David Benjamin3570d732015-06-29 00:28:17 -04001392 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001393 return 0;
1394 }
1395
1396 if (in_len % 8 != 0) {
David Benjamin3570d732015-06-29 00:28:17 -04001397 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_INPUT_SIZE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001398 return 0;
1399 }
1400
1401 /* The code below only handles a 32-bit |t| thus 6*|n| must be less than
1402 * 2^32, where |n| is |in_len| / 8. So in_len < 4/3 * 2^32 and we
1403 * conservatively cap it to 2^32-8 to stop 32-bit platforms complaining that
Adam Langley0ed0cf62015-01-06 10:49:48 -08001404 * a comparison is always true. */
Adam Langley93a3dcd2014-07-25 15:40:44 -07001405 if (in_len > 0xfffffff8) {
David Benjamin3570d732015-06-29 00:28:17 -04001406 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001407 return 0;
1408 }
1409
1410 if (in_len < 24) {
David Benjamin3570d732015-06-29 00:28:17 -04001411 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001412 return 0;
1413 }
1414
1415 n = (in_len / 8) - 1;
1416
1417 if (max_out_len < in_len - 8) {
David Benjamin3570d732015-06-29 00:28:17 -04001418 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001419 return 0;
1420 }
1421
1422 if (AES_set_decrypt_key(kw_ctx->key, kw_ctx->key_bits, &ks.ks) < 0) {
David Benjamin3570d732015-06-29 00:28:17 -04001423 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001424 return 0;
1425 }
1426
1427 memcpy(A, in, 8);
Adam Langleya3d21e22014-07-29 19:44:10 -07001428 memmove(out, in + 8, in_len - 8);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001429
1430 for (j = 5; j < 6; j--) {
1431 for (i = n; i > 0; i--) {
1432 uint32_t t;
1433
1434 t = n * j + i;
1435 A[7] ^= t & 0xff;
1436 A[6] ^= (t >> 8) & 0xff;
1437 A[5] ^= (t >> 16) & 0xff;
1438 A[4] ^= (t >> 24) & 0xff;
1439 memcpy(A + 8, out + 8 * (i - 1), 8);
1440 AES_decrypt(A, A, &ks.ks);
1441 memcpy(out + 8 * (i - 1), A + 8, 8);
1442 }
1443 }
1444
1445 if (CRYPTO_memcmp(A, nonce, 8) != 0) {
David Benjamin3570d732015-06-29 00:28:17 -04001446 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
Adam Langley93a3dcd2014-07-25 15:40:44 -07001447 return 0;
1448 }
1449
1450 *out_len = in_len - 8;
1451 return 1;
1452}
1453
1454static const EVP_AEAD aead_aes_128_key_wrap = {
1455 16, /* key len */
1456 8, /* nonce len */
1457 8, /* overhead */
1458 8, /* max tag length */
David Benjaminb34f5102015-02-28 03:59:33 -05001459 aead_aes_key_wrap_init,
1460 NULL, /* init_with_direction */
1461 aead_aes_key_wrap_cleanup,
1462 aead_aes_key_wrap_seal,
1463 aead_aes_key_wrap_open,
Adam Langley3f92d212015-02-20 15:32:52 -08001464 NULL, /* get_rc4_state */
Adam Langley93a3dcd2014-07-25 15:40:44 -07001465};
1466
1467static const EVP_AEAD aead_aes_256_key_wrap = {
1468 32, /* key len */
1469 8, /* nonce len */
1470 8, /* overhead */
1471 8, /* max tag length */
David Benjaminb34f5102015-02-28 03:59:33 -05001472 aead_aes_key_wrap_init,
1473 NULL, /* init_with_direction */
1474 aead_aes_key_wrap_cleanup,
1475 aead_aes_key_wrap_seal,
1476 aead_aes_key_wrap_open,
Adam Langley3f92d212015-02-20 15:32:52 -08001477 NULL, /* get_rc4_state */
Adam Langley93a3dcd2014-07-25 15:40:44 -07001478};
1479
David Benjaminc44d2f42014-08-20 16:24:00 -04001480const EVP_AEAD *EVP_aead_aes_128_key_wrap(void) { return &aead_aes_128_key_wrap; }
Adam Langley93a3dcd2014-07-25 15:40:44 -07001481
David Benjaminc44d2f42014-08-20 16:24:00 -04001482const EVP_AEAD *EVP_aead_aes_256_key_wrap(void) { return &aead_aes_256_key_wrap; }
David Benjamin5213df42014-08-20 14:19:54 -04001483
Adam Langley0e782a92015-03-13 12:11:00 -07001484
1485#define EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN SHA256_DIGEST_LENGTH
1486#define EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN 12
1487
1488struct aead_aes_ctr_hmac_sha256_ctx {
1489 union {
1490 double align;
1491 AES_KEY ks;
1492 } ks;
1493 ctr128_f ctr;
1494 block128_f block;
1495 SHA256_CTX inner_init_state;
1496 SHA256_CTX outer_init_state;
1497 uint8_t tag_len;
1498};
1499
1500static void hmac_init(SHA256_CTX *out_inner, SHA256_CTX *out_outer,
1501 const uint8_t hmac_key[32]) {
1502 static const size_t hmac_key_len = 32;
1503 uint8_t block[SHA256_CBLOCK];
1504 memcpy(block, hmac_key, hmac_key_len);
1505 memset(block + hmac_key_len, 0x36, sizeof(block) - hmac_key_len);
1506
1507 unsigned i;
1508 for (i = 0; i < hmac_key_len; i++) {
1509 block[i] ^= 0x36;
1510 }
1511
1512 SHA256_Init(out_inner);
1513 SHA256_Update(out_inner, block, sizeof(block));
1514
1515 memset(block + hmac_key_len, 0x5c, sizeof(block) - hmac_key_len);
1516 for (i = 0; i < hmac_key_len; i++) {
1517 block[i] ^= (0x36 ^ 0x5c);
1518 }
1519
1520 SHA256_Init(out_outer);
1521 SHA256_Update(out_outer, block, sizeof(block));
1522}
1523
1524static int aead_aes_ctr_hmac_sha256_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1525 size_t key_len, size_t tag_len) {
1526 struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx;
1527 static const size_t hmac_key_len = 32;
1528
1529 if (key_len < hmac_key_len) {
David Benjamin3570d732015-06-29 00:28:17 -04001530 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
Adam Langley0e782a92015-03-13 12:11:00 -07001531 return 0; /* EVP_AEAD_CTX_init should catch this. */
1532 }
1533
1534 const size_t aes_key_len = key_len - hmac_key_len;
1535 if (aes_key_len != 16 && aes_key_len != 32) {
David Benjamin3570d732015-06-29 00:28:17 -04001536 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
Adam Langley0e782a92015-03-13 12:11:00 -07001537 return 0; /* EVP_AEAD_CTX_init should catch this. */
1538 }
1539
1540 if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
1541 tag_len = EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN;
1542 }
1543
1544 if (tag_len > EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN) {
David Benjamin3570d732015-06-29 00:28:17 -04001545 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
Adam Langley0e782a92015-03-13 12:11:00 -07001546 return 0;
1547 }
1548
1549 aes_ctx = OPENSSL_malloc(sizeof(struct aead_aes_ctr_hmac_sha256_ctx));
1550 if (aes_ctx == NULL) {
David Benjamin3570d732015-06-29 00:28:17 -04001551 OPENSSL_PUT_ERROR(CIPHER, ERR_R_MALLOC_FAILURE);
Adam Langley0e782a92015-03-13 12:11:00 -07001552 return 0;
1553 }
1554
1555 aes_ctx->ctr =
1556 aes_ctr_set_key(&aes_ctx->ks.ks, NULL, &aes_ctx->block, key, aes_key_len);
1557 aes_ctx->tag_len = tag_len;
1558 hmac_init(&aes_ctx->inner_init_state, &aes_ctx->outer_init_state,
1559 key + aes_key_len);
1560
1561 ctx->aead_state = aes_ctx;
1562
1563 return 1;
1564}
1565
1566static void aead_aes_ctr_hmac_sha256_cleanup(EVP_AEAD_CTX *ctx) {
1567 struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
1568 OPENSSL_cleanse(aes_ctx, sizeof(struct aead_aes_ctr_hmac_sha256_ctx));
1569 OPENSSL_free(aes_ctx);
1570}
1571
1572static void hmac_update_uint64(SHA256_CTX *sha256, uint64_t value) {
1573 unsigned i;
1574 uint8_t bytes[8];
1575
1576 for (i = 0; i < sizeof(bytes); i++) {
1577 bytes[i] = value & 0xff;
1578 value >>= 8;
1579 }
1580 SHA256_Update(sha256, bytes, sizeof(bytes));
1581}
1582
1583static void hmac_calculate(uint8_t out[SHA256_DIGEST_LENGTH],
1584 const SHA256_CTX *inner_init_state,
1585 const SHA256_CTX *outer_init_state,
1586 const uint8_t *ad, size_t ad_len,
1587 const uint8_t *nonce, const uint8_t *ciphertext,
1588 size_t ciphertext_len) {
1589 SHA256_CTX sha256;
1590 memcpy(&sha256, inner_init_state, sizeof(sha256));
1591 hmac_update_uint64(&sha256, ad_len);
1592 hmac_update_uint64(&sha256, ciphertext_len);
1593 SHA256_Update(&sha256, nonce, EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN);
1594 SHA256_Update(&sha256, ad, ad_len);
1595
1596 /* Pad with zeros to the end of the SHA-256 block. */
1597 const unsigned num_padding =
1598 (SHA256_CBLOCK - ((sizeof(uint64_t)*2 +
1599 EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN + ad_len) %
1600 SHA256_CBLOCK)) %
1601 SHA256_CBLOCK;
1602 uint8_t padding[SHA256_CBLOCK];
1603 memset(padding, 0, num_padding);
1604 SHA256_Update(&sha256, padding, num_padding);
1605
1606 SHA256_Update(&sha256, ciphertext, ciphertext_len);
1607
1608 uint8_t inner_digest[SHA256_DIGEST_LENGTH];
1609 SHA256_Final(inner_digest, &sha256);
1610
1611 memcpy(&sha256, outer_init_state, sizeof(sha256));
1612 SHA256_Update(&sha256, inner_digest, sizeof(inner_digest));
1613 SHA256_Final(out, &sha256);
1614}
1615
1616static void aead_aes_ctr_hmac_sha256_crypt(
1617 const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx, uint8_t *out,
1618 const uint8_t *in, size_t len, const uint8_t *nonce) {
1619 /* Since the AEAD operation is one-shot, keeping a buffer of unused keystream
1620 * bytes is pointless. However, |CRYPTO_ctr128_encrypt| requires it. */
1621 uint8_t partial_block_buffer[AES_BLOCK_SIZE];
1622 unsigned partial_block_offset = 0;
1623 memset(partial_block_buffer, 0, sizeof(partial_block_buffer));
1624
1625 uint8_t counter[AES_BLOCK_SIZE];
1626 memcpy(counter, nonce, EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN);
1627 memset(counter + EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN, 0, 4);
1628
1629 if (aes_ctx->ctr) {
1630 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &aes_ctx->ks.ks, counter,
1631 partial_block_buffer, &partial_block_offset,
1632 aes_ctx->ctr);
1633 } else {
1634 CRYPTO_ctr128_encrypt(in, out, len, &aes_ctx->ks.ks, counter,
1635 partial_block_buffer, &partial_block_offset,
1636 aes_ctx->block);
1637 }
1638}
1639
1640static int aead_aes_ctr_hmac_sha256_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
1641 size_t *out_len, size_t max_out_len,
1642 const uint8_t *nonce, size_t nonce_len,
1643 const uint8_t *in, size_t in_len,
1644 const uint8_t *ad, size_t ad_len) {
1645 const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
1646 const uint64_t in_len_64 = in_len;
1647
1648 if (in_len + aes_ctx->tag_len < in_len ||
1649 /* This input is so large it would overflow the 32-bit block counter. */
1650 in_len_64 >= (OPENSSL_U64(1) << 32) * AES_BLOCK_SIZE) {
David Benjamin3570d732015-06-29 00:28:17 -04001651 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
Adam Langley0e782a92015-03-13 12:11:00 -07001652 return 0;
1653 }
1654
1655 if (max_out_len < in_len + aes_ctx->tag_len) {
David Benjamin3570d732015-06-29 00:28:17 -04001656 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
Adam Langley0e782a92015-03-13 12:11:00 -07001657 return 0;
1658 }
1659
1660 if (nonce_len != EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN) {
David Benjamin3570d732015-06-29 00:28:17 -04001661 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
Adam Langley0e782a92015-03-13 12:11:00 -07001662 return 0;
1663 }
1664
1665 aead_aes_ctr_hmac_sha256_crypt(aes_ctx, out, in, in_len, nonce);
1666
1667 uint8_t hmac_result[SHA256_DIGEST_LENGTH];
1668 hmac_calculate(hmac_result, &aes_ctx->inner_init_state,
1669 &aes_ctx->outer_init_state, ad, ad_len, nonce, out, in_len);
1670 memcpy(out + in_len, hmac_result, aes_ctx->tag_len);
1671 *out_len = in_len + aes_ctx->tag_len;
1672
1673 return 1;
1674}
1675
1676static int aead_aes_ctr_hmac_sha256_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
1677 size_t *out_len, size_t max_out_len,
1678 const uint8_t *nonce, size_t nonce_len,
1679 const uint8_t *in, size_t in_len,
1680 const uint8_t *ad, size_t ad_len) {
1681 const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
1682 size_t plaintext_len;
1683
1684 if (in_len < aes_ctx->tag_len) {
David Benjamin3570d732015-06-29 00:28:17 -04001685 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
Adam Langley0e782a92015-03-13 12:11:00 -07001686 return 0;
1687 }
1688
1689 plaintext_len = in_len - aes_ctx->tag_len;
1690
1691 if (max_out_len < plaintext_len) {
David Benjamin3570d732015-06-29 00:28:17 -04001692 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
Adam Langley0e782a92015-03-13 12:11:00 -07001693 return 0;
1694 }
1695
1696 if (nonce_len != EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN) {
David Benjamin3570d732015-06-29 00:28:17 -04001697 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
Adam Langley0e782a92015-03-13 12:11:00 -07001698 return 0;
1699 }
1700
1701 uint8_t hmac_result[SHA256_DIGEST_LENGTH];
1702 hmac_calculate(hmac_result, &aes_ctx->inner_init_state,
1703 &aes_ctx->outer_init_state, ad, ad_len, nonce, in,
1704 plaintext_len);
1705 if (CRYPTO_memcmp(hmac_result, in + plaintext_len, aes_ctx->tag_len) != 0) {
David Benjamin3570d732015-06-29 00:28:17 -04001706 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
Adam Langley0e782a92015-03-13 12:11:00 -07001707 return 0;
1708 }
1709
1710 aead_aes_ctr_hmac_sha256_crypt(aes_ctx, out, in, plaintext_len, nonce);
1711
1712 *out_len = plaintext_len;
1713 return 1;
1714}
1715
1716static const EVP_AEAD aead_aes_128_ctr_hmac_sha256 = {
1717 16 /* AES key */ + 32 /* HMAC key */,
1718 12, /* nonce length */
1719 EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* overhead */
1720 EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* max tag length */
1721
1722 aead_aes_ctr_hmac_sha256_init,
1723 NULL /* init_with_direction */,
1724 aead_aes_ctr_hmac_sha256_cleanup,
1725 aead_aes_ctr_hmac_sha256_seal,
1726 aead_aes_ctr_hmac_sha256_open,
Adam Langleyc10bc852015-05-04 17:42:06 -07001727 NULL /* get_rc4_state */,
Adam Langley0e782a92015-03-13 12:11:00 -07001728};
1729
1730static const EVP_AEAD aead_aes_256_ctr_hmac_sha256 = {
1731 32 /* AES key */ + 32 /* HMAC key */,
1732 12, /* nonce length */
1733 EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* overhead */
1734 EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* max tag length */
1735
1736 aead_aes_ctr_hmac_sha256_init,
1737 NULL /* init_with_direction */,
1738 aead_aes_ctr_hmac_sha256_cleanup,
1739 aead_aes_ctr_hmac_sha256_seal,
1740 aead_aes_ctr_hmac_sha256_open,
Adam Langleyc10bc852015-05-04 17:42:06 -07001741 NULL /* get_rc4_state */,
Adam Langley0e782a92015-03-13 12:11:00 -07001742};
1743
1744const EVP_AEAD *EVP_aead_aes_128_ctr_hmac_sha256(void) {
1745 return &aead_aes_128_ctr_hmac_sha256;
1746}
1747
1748const EVP_AEAD *EVP_aead_aes_256_ctr_hmac_sha256(void) {
1749 return &aead_aes_256_ctr_hmac_sha256;
1750}
1751
David Benjamin5213df42014-08-20 14:19:54 -04001752int EVP_has_aes_hardware(void) {
1753#if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
1754 return aesni_capable() && crypto_gcm_clmul_enabled();
Adam Langley3e652652015-01-09 15:44:37 -08001755#elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
1756 return hwaes_capable() && (OPENSSL_armcap_P & ARMV8_PMULL);
David Benjamin5213df42014-08-20 14:19:54 -04001757#else
1758 return 0;
1759#endif
1760}