blob: 69a4785acd978600918b49cee0f948bca542e6b6 [file] [log] [blame]
David Benjamin4690bb52015-05-10 03:10:07 -04001/*
2 * Written by Dr Stephen N Henson (steve@openssl.org) for the OpenSSL
3 * project.
4 */
5/* ====================================================================
6 * Copyright (c) 2015 The OpenSSL Project. All rights reserved.
7 *
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
10 * are met:
11 *
12 * 1. Redistributions of source code must retain the above copyright
13 * notice, this list of conditions and the following disclaimer.
14 *
15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in
17 * the documentation and/or other materials provided with the
18 * distribution.
19 *
20 * 3. All advertising materials mentioning features or use of this
21 * software must display the following acknowledgment:
22 * "This product includes software developed by the OpenSSL Project
23 * for use in the OpenSSL Toolkit. (http://www.OpenSSL.org/)"
24 *
25 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
26 * endorse or promote products derived from this software without
27 * prior written permission. For written permission, please contact
28 * licensing@OpenSSL.org.
29 *
30 * 5. Products derived from this software may not be called "OpenSSL"
31 * nor may "OpenSSL" appear in their names without prior written
32 * permission of the OpenSSL Project.
33 *
34 * 6. Redistributions of any form whatsoever must retain the following
35 * acknowledgment:
36 * "This product includes software developed by the OpenSSL Project
37 * for use in the OpenSSL Toolkit (http://www.OpenSSL.org/)"
38 *
39 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
40 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
41 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
42 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
43 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
44 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
45 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
46 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
47 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
48 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
49 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
50 * OF THE POSSIBILITY OF SUCH DAMAGE.
51 * ====================================================================
52 */
53
David Benjamin8e75ae42018-05-01 17:01:04 -040054#include <limits.h>
David Benjamin4690bb52015-05-10 03:10:07 -040055#include <stdlib.h>
56#include <string.h>
57
David Benjamin8e75ae42018-05-01 17:01:04 -040058#include <algorithm>
David Benjamin4690bb52015-05-10 03:10:07 -040059#include <string>
60#include <vector>
61
David Benjamin6757fbf2017-05-24 00:50:35 -040062#include <gtest/gtest.h>
63
David Benjamin8819e0b2020-05-20 15:56:09 -040064#include <openssl/aes.h>
David Benjaminf0e935d2016-09-06 18:10:19 -040065#include <openssl/cipher.h>
David Benjamin4690bb52015-05-10 03:10:07 -040066#include <openssl/err.h>
David Benjamin8819e0b2020-05-20 15:56:09 -040067#include <openssl/nid.h>
David Benjamin669ffe62021-04-07 16:17:50 -040068#include <openssl/rand.h>
69#include <openssl/sha.h>
David Benjamin8e75ae42018-05-01 17:01:04 -040070#include <openssl/span.h>
David Benjamin4690bb52015-05-10 03:10:07 -040071
David Benjamin361647e2023-11-05 21:33:13 +010072#include "../internal.h"
David Benjamin4690bb52015-05-10 03:10:07 -040073#include "../test/file_test.h"
David Benjamin6757fbf2017-05-24 00:50:35 -040074#include "../test/test_util.h"
David Benjamin8e75ae42018-05-01 17:01:04 -040075#include "../test/wycheproof_util.h"
David Benjamin669ffe62021-04-07 16:17:50 -040076#include "./internal.h"
David Benjamin4690bb52015-05-10 03:10:07 -040077
78
79static const EVP_CIPHER *GetCipher(const std::string &name) {
80 if (name == "DES-CBC") {
81 return EVP_des_cbc();
Matt Braithwaite98d2f1f2015-08-18 20:27:03 -070082 } else if (name == "DES-ECB") {
83 return EVP_des_ecb();
Matt Braithwaited82a7b22015-08-19 14:25:32 -070084 } else if (name == "DES-EDE") {
85 return EVP_des_ede();
Adam Langleya5334492017-04-11 11:08:08 -070086 } else if (name == "DES-EDE3") {
87 return EVP_des_ede3();
Matt Braithwaite8c413a22015-08-11 17:19:35 -070088 } else if (name == "DES-EDE-CBC") {
89 return EVP_des_ede_cbc();
David Benjamin4690bb52015-05-10 03:10:07 -040090 } else if (name == "DES-EDE3-CBC") {
91 return EVP_des_ede3_cbc();
92 } else if (name == "RC4") {
93 return EVP_rc4();
94 } else if (name == "AES-128-ECB") {
95 return EVP_aes_128_ecb();
96 } else if (name == "AES-256-ECB") {
97 return EVP_aes_256_ecb();
98 } else if (name == "AES-128-CBC") {
99 return EVP_aes_128_cbc();
100 } else if (name == "AES-128-GCM") {
101 return EVP_aes_128_gcm();
102 } else if (name == "AES-128-OFB") {
103 return EVP_aes_128_ofb();
104 } else if (name == "AES-192-CBC") {
105 return EVP_aes_192_cbc();
Martin Kreichgauer23aff6b2017-04-11 08:53:08 -0700106 } else if (name == "AES-192-CTR") {
107 return EVP_aes_192_ctr();
David Benjamin4690bb52015-05-10 03:10:07 -0400108 } else if (name == "AES-192-ECB") {
109 return EVP_aes_192_ecb();
David Benjamin9992ad22022-11-25 17:02:34 -0500110 } else if (name == "AES-192-GCM") {
111 return EVP_aes_192_gcm();
David Benjaminf6e5d0d2018-06-15 20:14:16 -0400112 } else if (name == "AES-192-OFB") {
113 return EVP_aes_192_ofb();
David Benjamin4690bb52015-05-10 03:10:07 -0400114 } else if (name == "AES-256-CBC") {
115 return EVP_aes_256_cbc();
116 } else if (name == "AES-128-CTR") {
117 return EVP_aes_128_ctr();
118 } else if (name == "AES-256-CTR") {
119 return EVP_aes_256_ctr();
120 } else if (name == "AES-256-GCM") {
121 return EVP_aes_256_gcm();
122 } else if (name == "AES-256-OFB") {
123 return EVP_aes_256_ofb();
124 }
125 return nullptr;
126}
127
David Benjamin1ce2ec72022-11-27 18:34:32 -0500128enum class Operation {
129 // kBoth tests both encryption and decryption.
130 kBoth,
131 // kEncrypt tests encryption. The result of encryption should always
132 // successfully decrypt, so this should only be used if the test file has a
133 // matching decrypt-only vector.
134 kEncrypt,
135 // kDecrypt tests decryption. This should only be used if the test file has a
136 // matching encrypt-only input, or if multiple ciphertexts are valid for
137 // a given plaintext and this is a non-canonical ciphertext.
138 kDecrypt,
139 // kInvalidDecrypt tests decryption and expects it to fail, e.g. due to
140 // invalid tag or padding.
141 kInvalidDecrypt,
142};
David Benjamin8e75ae42018-05-01 17:01:04 -0400143
David Benjamin1ce2ec72022-11-27 18:34:32 -0500144static const char *OperationToString(Operation op) {
145 switch (op) {
146 case Operation::kBoth:
147 return "Both";
148 case Operation::kEncrypt:
149 return "Encrypt";
150 case Operation::kDecrypt:
151 return "Decrypt";
152 case Operation::kInvalidDecrypt:
153 return "InvalidDecrypt";
David Benjamin8e75ae42018-05-01 17:01:04 -0400154 }
David Benjamin1ce2ec72022-11-27 18:34:32 -0500155 abort();
David Benjamin8e75ae42018-05-01 17:01:04 -0400156}
157
David Benjaminb5b1c612022-11-27 18:46:46 -0500158// MaybeCopyCipherContext, if |copy| is true, replaces |*ctx| with a, hopefully
159// equivalent, copy of it.
160static bool MaybeCopyCipherContext(bool copy,
161 bssl::UniquePtr<EVP_CIPHER_CTX> *ctx) {
162 if (!copy) {
163 return true;
164 }
165 bssl::UniquePtr<EVP_CIPHER_CTX> ctx2(EVP_CIPHER_CTX_new());
166 if (!ctx2 || !EVP_CIPHER_CTX_copy(ctx2.get(), ctx->get())) {
167 return false;
168 }
169 *ctx = std::move(ctx2);
170 return true;
171}
172
David Benjamin1ce2ec72022-11-27 18:34:32 -0500173static void TestCipherAPI(const EVP_CIPHER *cipher, Operation op, bool padding,
David Benjamine40d0f82022-11-27 19:27:09 -0500174 bool copy, bool in_place, bool use_evp_cipher,
175 size_t chunk_size, bssl::Span<const uint8_t> key,
David Benjamine0d601a2022-11-27 17:37:37 -0500176 bssl::Span<const uint8_t> iv,
177 bssl::Span<const uint8_t> plaintext,
178 bssl::Span<const uint8_t> ciphertext,
179 bssl::Span<const uint8_t> aad,
180 bssl::Span<const uint8_t> tag) {
David Benjamin1ce2ec72022-11-27 18:34:32 -0500181 bool encrypt = op == Operation::kEncrypt;
David Benjamine40d0f82022-11-27 19:27:09 -0500182 bool is_custom_cipher =
183 EVP_CIPHER_flags(cipher) & EVP_CIPH_FLAG_CUSTOM_CIPHER;
David Benjamine0d601a2022-11-27 17:37:37 -0500184 bssl::Span<const uint8_t> in = encrypt ? plaintext : ciphertext;
185 bssl::Span<const uint8_t> expected = encrypt ? ciphertext : plaintext;
David Benjamin4690bb52015-05-10 03:10:07 -0400186 bool is_aead = EVP_CIPHER_mode(cipher) == EVP_CIPH_GCM_MODE;
187
David Benjaminb5b1c612022-11-27 18:46:46 -0500188 // Some |EVP_CIPHER|s take a variable-length key, and need to first be
189 // configured with the key length, which requires configuring the cipher.
190 bssl::UniquePtr<EVP_CIPHER_CTX> ctx(EVP_CIPHER_CTX_new());
191 ASSERT_TRUE(ctx);
192 ASSERT_TRUE(EVP_CipherInit_ex(ctx.get(), cipher, /*engine=*/nullptr,
193 /*key=*/nullptr, /*iv=*/nullptr,
David Benjamin65dc45c2019-04-15 12:54:01 -0500194 encrypt ? 1 : 0));
David Benjaminb5b1c612022-11-27 18:46:46 -0500195 ASSERT_TRUE(EVP_CIPHER_CTX_set_key_length(ctx.get(), key.size()));
196 if (!padding) {
197 ASSERT_TRUE(EVP_CIPHER_CTX_set_padding(ctx.get(), 0));
198 }
199
200 // Configure the key.
201 ASSERT_TRUE(MaybeCopyCipherContext(copy, &ctx));
202 ASSERT_TRUE(EVP_CipherInit_ex(ctx.get(), /*cipher=*/nullptr,
203 /*engine=*/nullptr, key.data(), /*iv=*/nullptr,
204 /*enc=*/-1));
205
206 // Configure the IV to run the actual operation. Callers that wish to use a
207 // key for multiple, potentially concurrent, operations will likely copy at
208 // this point. The |EVP_CIPHER_CTX| API uses the same type to represent a
209 // pre-computed key schedule and a streaming operation.
210 ASSERT_TRUE(MaybeCopyCipherContext(copy, &ctx));
David Benjamine0d601a2022-11-27 17:37:37 -0500211 if (is_aead) {
David Benjamina4385192023-03-25 01:26:49 -0400212 ASSERT_LE(iv.size(), size_t{INT_MAX});
213 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IVLEN,
David Benjamin361647e2023-11-05 21:33:13 +0100214 static_cast<int>(iv.size()), nullptr));
Alex Gaynordb1e9872023-09-01 15:32:46 -0400215 ASSERT_EQ(EVP_CIPHER_CTX_iv_length(ctx.get()), iv.size());
David Benjamine0d601a2022-11-27 17:37:37 -0500216 } else {
David Benjaminb5b1c612022-11-27 18:46:46 -0500217 ASSERT_EQ(iv.size(), EVP_CIPHER_CTX_iv_length(ctx.get()));
David Benjamin4690bb52015-05-10 03:10:07 -0400218 }
David Benjaminb5b1c612022-11-27 18:46:46 -0500219 ASSERT_TRUE(EVP_CipherInit_ex(ctx.get(), /*cipher=*/nullptr,
220 /*engine=*/nullptr,
221 /*key=*/nullptr, iv.data(), /*enc=*/-1));
David Benjamin1ce2ec72022-11-27 18:34:32 -0500222
David Benjamin6757fbf2017-05-24 00:50:35 -0400223 if (is_aead && !encrypt) {
David Benjaminb5b1c612022-11-27 18:46:46 -0500224 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_TAG,
225 tag.size(),
David Benjamin6757fbf2017-05-24 00:50:35 -0400226 const_cast<uint8_t *>(tag.data())));
David Benjamin4690bb52015-05-10 03:10:07 -0400227 }
David Benjamin1ce2ec72022-11-27 18:34:32 -0500228
David Benjamina3202d72018-08-09 11:56:34 -0500229 // Note: the deprecated |EVP_CIPHER|-based AEAD API is sensitive to whether
David Benjamin4690bb52015-05-10 03:10:07 -0400230 // parameters are NULL, so it is important to skip the |in| and |aad|
231 // |EVP_CipherUpdate| calls when empty.
David Benjamin8417bac2022-11-27 19:12:36 -0500232 while (!aad.empty()) {
233 size_t todo =
234 chunk_size == 0 ? aad.size() : std::min(aad.size(), chunk_size);
David Benjamine40d0f82022-11-27 19:27:09 -0500235 if (use_evp_cipher) {
236 // AEADs always use the "custom cipher" return value convention. Passing a
237 // null output pointer triggers the AAD logic.
238 ASSERT_TRUE(is_custom_cipher);
239 ASSERT_EQ(static_cast<int>(todo),
240 EVP_Cipher(ctx.get(), nullptr, aad.data(), todo));
241 } else {
242 int len;
243 ASSERT_TRUE(EVP_CipherUpdate(ctx.get(), nullptr, &len, aad.data(), todo));
244 // Although it doesn't output anything, |EVP_CipherUpdate| should claim to
245 // output the input length.
246 EXPECT_EQ(len, static_cast<int>(todo));
247 }
David Benjamin8417bac2022-11-27 19:12:36 -0500248 aad = aad.subspan(todo);
David Benjaminf0786392015-06-30 10:28:40 -0400249 }
David Benjamin1ce2ec72022-11-27 18:34:32 -0500250
251 // Set up the output buffer.
252 size_t max_out = in.size();
David Benjaminb5b1c612022-11-27 18:46:46 -0500253 size_t block_size = EVP_CIPHER_CTX_block_size(ctx.get());
David Benjamin1ce2ec72022-11-27 18:34:32 -0500254 if (block_size > 1 &&
David Benjaminb5b1c612022-11-27 18:46:46 -0500255 (EVP_CIPHER_CTX_flags(ctx.get()) & EVP_CIPH_NO_PADDING) == 0 &&
256 EVP_CIPHER_CTX_encrypting(ctx.get())) {
David Benjamin1ce2ec72022-11-27 18:34:32 -0500257 max_out += block_size - (max_out % block_size);
258 }
259 std::vector<uint8_t> result(max_out);
260 if (in_place) {
261 std::copy(in.begin(), in.end(), result.begin());
262 in = bssl::MakeConstSpan(result).first(in.size());
David Benjamin4690bb52015-05-10 03:10:07 -0400263 }
David Benjamin8819e0b2020-05-20 15:56:09 -0400264
David Benjamin1ce2ec72022-11-27 18:34:32 -0500265 size_t total = 0;
266 int len;
267 while (!in.empty()) {
268 size_t todo = chunk_size == 0 ? in.size() : std::min(in.size(), chunk_size);
269 EXPECT_LE(todo, static_cast<size_t>(INT_MAX));
David Benjaminb5b1c612022-11-27 18:46:46 -0500270 ASSERT_TRUE(MaybeCopyCipherContext(copy, &ctx));
David Benjamine40d0f82022-11-27 19:27:09 -0500271 if (use_evp_cipher) {
272 // |EVP_Cipher| sometimes returns the number of bytes written, or -1 on
273 // error, and sometimes 1 or 0, implicitly writing |in_len| bytes.
274 if (is_custom_cipher) {
275 len = EVP_Cipher(ctx.get(), result.data() + total, in.data(), todo);
276 } else {
277 ASSERT_EQ(
278 1, EVP_Cipher(ctx.get(), result.data() + total, in.data(), todo));
279 len = static_cast<int>(todo);
280 }
281 } else {
282 ASSERT_TRUE(EVP_CipherUpdate(ctx.get(), result.data() + total, &len,
283 in.data(), static_cast<int>(todo)));
284 }
David Benjamin1ce2ec72022-11-27 18:34:32 -0500285 ASSERT_GE(len, 0);
286 total += static_cast<size_t>(len);
287 in = in.subspan(todo);
288 }
289 if (op == Operation::kInvalidDecrypt) {
David Benjamine40d0f82022-11-27 19:27:09 -0500290 if (use_evp_cipher) {
291 // Only the "custom cipher" return value convention can report failures.
292 // Passing all nulls should act like |EVP_CipherFinal_ex|.
293 ASSERT_TRUE(is_custom_cipher);
294 EXPECT_EQ(-1, EVP_Cipher(ctx.get(), nullptr, nullptr, 0));
295 } else {
296 // Invalid padding and invalid tags all appear as a failed
297 // |EVP_CipherFinal_ex|.
298 EXPECT_FALSE(EVP_CipherFinal_ex(ctx.get(), result.data() + total, &len));
299 }
David Benjamin1ce2ec72022-11-27 18:34:32 -0500300 } else {
David Benjamine40d0f82022-11-27 19:27:09 -0500301 if (use_evp_cipher) {
302 if (is_custom_cipher) {
303 // Only the "custom cipher" convention has an |EVP_CipherFinal_ex|
304 // equivalent.
305 len = EVP_Cipher(ctx.get(), nullptr, nullptr, 0);
306 } else {
307 len = 0;
308 }
309 } else {
310 ASSERT_TRUE(EVP_CipherFinal_ex(ctx.get(), result.data() + total, &len));
311 }
David Benjamin1ce2ec72022-11-27 18:34:32 -0500312 ASSERT_GE(len, 0);
313 total += static_cast<size_t>(len);
314 result.resize(total);
315 EXPECT_EQ(Bytes(expected), Bytes(result));
316 if (encrypt && is_aead) {
317 uint8_t rtag[16];
318 ASSERT_LE(tag.size(), sizeof(rtag));
David Benjaminb5b1c612022-11-27 18:46:46 -0500319 ASSERT_TRUE(MaybeCopyCipherContext(copy, &ctx));
320 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_GET_TAG,
321 tag.size(), rtag));
David Benjamin1ce2ec72022-11-27 18:34:32 -0500322 EXPECT_EQ(Bytes(tag), Bytes(rtag, tag.size()));
323 }
324 }
325}
326
327static void TestLowLevelAPI(
328 const EVP_CIPHER *cipher, Operation op, bool in_place, size_t chunk_size,
329 bssl::Span<const uint8_t> key, bssl::Span<const uint8_t> iv,
330 bssl::Span<const uint8_t> plaintext, bssl::Span<const uint8_t> ciphertext) {
331 bool encrypt = op == Operation::kEncrypt;
332 bssl::Span<const uint8_t> in = encrypt ? plaintext : ciphertext;
333 bssl::Span<const uint8_t> expected = encrypt ? ciphertext : plaintext;
334 int nid = EVP_CIPHER_nid(cipher);
335 bool is_ctr = nid == NID_aes_128_ctr || nid == NID_aes_192_ctr ||
336 nid == NID_aes_256_ctr;
337 bool is_cbc = nid == NID_aes_128_cbc || nid == NID_aes_192_cbc ||
338 nid == NID_aes_256_cbc;
339 bool is_ofb = nid == NID_aes_128_ofb128 || nid == NID_aes_192_ofb128 ||
340 nid == NID_aes_256_ofb128;
341 if (!is_ctr && !is_cbc && !is_ofb) {
342 return;
343 }
344
345 // Invalid ciphertexts are not possible in any of the ciphers where this API
346 // applies.
347 ASSERT_NE(op, Operation::kInvalidDecrypt);
348
349 AES_KEY aes;
350 if (encrypt || !is_cbc) {
351 ASSERT_EQ(0, AES_set_encrypt_key(key.data(), key.size() * 8, &aes));
352 } else {
353 ASSERT_EQ(0, AES_set_decrypt_key(key.data(), key.size() * 8, &aes));
354 }
355
356 std::vector<uint8_t> result;
357 if (in_place) {
358 result.assign(in.begin(), in.end());
359 } else {
360 result.resize(expected.size());
361 }
362 bssl::Span<uint8_t> out = bssl::MakeSpan(result);
363 // Input and output sizes for all the low-level APIs should match.
364 ASSERT_EQ(in.size(), out.size());
365
366 // The low-level APIs all use block-size IVs.
367 ASSERT_EQ(iv.size(), size_t{AES_BLOCK_SIZE});
368 uint8_t ivec[AES_BLOCK_SIZE];
369 OPENSSL_memcpy(ivec, iv.data(), iv.size());
370
371 if (is_ctr) {
372 unsigned num = 0;
373 uint8_t ecount_buf[AES_BLOCK_SIZE];
374 if (chunk_size == 0) {
375 AES_ctr128_encrypt(in.data(), out.data(), in.size(), &aes, ivec,
376 ecount_buf, &num);
377 } else {
378 do {
379 size_t todo = std::min(in.size(), chunk_size);
380 AES_ctr128_encrypt(in.data(), out.data(), todo, &aes, ivec, ecount_buf,
381 &num);
382 in = in.subspan(todo);
383 out = out.subspan(todo);
384 } while (!in.empty());
385 }
386 EXPECT_EQ(Bytes(expected), Bytes(result));
387 } else if (is_cbc && chunk_size % AES_BLOCK_SIZE == 0) {
388 // Note |AES_cbc_encrypt| requires block-aligned chunks.
389 if (chunk_size == 0) {
390 AES_cbc_encrypt(in.data(), out.data(), in.size(), &aes, ivec, encrypt);
391 } else {
392 do {
393 size_t todo = std::min(in.size(), chunk_size);
394 AES_cbc_encrypt(in.data(), out.data(), todo, &aes, ivec, encrypt);
395 in = in.subspan(todo);
396 out = out.subspan(todo);
397 } while (!in.empty());
398 }
399 EXPECT_EQ(Bytes(expected), Bytes(result));
400 } else if (is_ofb) {
401 int num = 0;
402 if (chunk_size == 0) {
403 AES_ofb128_encrypt(in.data(), out.data(), in.size(), &aes, ivec, &num);
404 } else {
405 do {
406 size_t todo = std::min(in.size(), chunk_size);
407 AES_ofb128_encrypt(in.data(), out.data(), todo, &aes, ivec, &num);
408 in = in.subspan(todo);
409 out = out.subspan(todo);
410 } while (!in.empty());
411 }
412 EXPECT_EQ(Bytes(expected), Bytes(result));
413 }
414}
415
416static void TestCipher(const EVP_CIPHER *cipher, Operation input_op,
417 bool padding, bssl::Span<const uint8_t> key,
418 bssl::Span<const uint8_t> iv,
419 bssl::Span<const uint8_t> plaintext,
420 bssl::Span<const uint8_t> ciphertext,
421 bssl::Span<const uint8_t> aad,
422 bssl::Span<const uint8_t> tag) {
David Benjamine40d0f82022-11-27 19:27:09 -0500423 size_t block_size = EVP_CIPHER_block_size(cipher);
David Benjamin1ce2ec72022-11-27 18:34:32 -0500424 std::vector<Operation> ops;
425 if (input_op == Operation::kBoth) {
426 ops = {Operation::kEncrypt, Operation::kDecrypt};
427 } else {
428 ops = {input_op};
429 }
430 for (Operation op : ops) {
431 SCOPED_TRACE(OperationToString(op));
432 // Zero indicates a single-shot API.
433 static const size_t kChunkSizes[] = {0, 1, 2, 5, 7, 8, 9, 15, 16,
434 17, 31, 32, 33, 63, 64, 65, 512};
435 for (size_t chunk_size : kChunkSizes) {
436 SCOPED_TRACE(chunk_size);
David Benjamin8417bac2022-11-27 19:12:36 -0500437 if (chunk_size > plaintext.size() && chunk_size > ciphertext.size() &&
438 chunk_size > aad.size()) {
David Benjamin1ce2ec72022-11-27 18:34:32 -0500439 continue;
David Benjamin8819e0b2020-05-20 15:56:09 -0400440 }
David Benjamin1ce2ec72022-11-27 18:34:32 -0500441 for (bool in_place : {false, true}) {
442 SCOPED_TRACE(in_place);
443 for (bool copy : {false, true}) {
444 SCOPED_TRACE(copy);
David Benjamine40d0f82022-11-27 19:27:09 -0500445 TestCipherAPI(cipher, op, padding, copy, in_place,
446 /*use_evp_cipher=*/false, chunk_size, key, iv,
447 plaintext, ciphertext, aad, tag);
448 if (!padding && chunk_size % block_size == 0) {
449 TestCipherAPI(cipher, op, padding, copy, in_place,
450 /*use_evp_cipher=*/true, chunk_size, key, iv,
451 plaintext, ciphertext, aad, tag);
452 }
David Benjamin8819e0b2020-05-20 15:56:09 -0400453 }
David Benjamin1ce2ec72022-11-27 18:34:32 -0500454 if (!padding) {
455 TestLowLevelAPI(cipher, op, in_place, chunk_size, key, iv, plaintext,
456 ciphertext);
David Benjamin8819e0b2020-05-20 15:56:09 -0400457 }
David Benjamin8819e0b2020-05-20 15:56:09 -0400458 }
459 }
460 }
David Benjamin4690bb52015-05-10 03:10:07 -0400461}
462
David Benjamin1ce2ec72022-11-27 18:34:32 -0500463static void CipherFileTest(FileTest *t) {
David Benjamin4690bb52015-05-10 03:10:07 -0400464 std::string cipher_str;
David Benjamin6757fbf2017-05-24 00:50:35 -0400465 ASSERT_TRUE(t->GetAttribute(&cipher_str, "Cipher"));
David Benjamin4690bb52015-05-10 03:10:07 -0400466 const EVP_CIPHER *cipher = GetCipher(cipher_str);
David Benjamin6757fbf2017-05-24 00:50:35 -0400467 ASSERT_TRUE(cipher);
David Benjamin4690bb52015-05-10 03:10:07 -0400468
469 std::vector<uint8_t> key, iv, plaintext, ciphertext, aad, tag;
David Benjamin6757fbf2017-05-24 00:50:35 -0400470 ASSERT_TRUE(t->GetBytes(&key, "Key"));
471 ASSERT_TRUE(t->GetBytes(&plaintext, "Plaintext"));
472 ASSERT_TRUE(t->GetBytes(&ciphertext, "Ciphertext"));
473 if (EVP_CIPHER_iv_length(cipher) > 0) {
474 ASSERT_TRUE(t->GetBytes(&iv, "IV"));
David Benjamin4690bb52015-05-10 03:10:07 -0400475 }
476 if (EVP_CIPHER_mode(cipher) == EVP_CIPH_GCM_MODE) {
David Benjamin6757fbf2017-05-24 00:50:35 -0400477 ASSERT_TRUE(t->GetBytes(&aad, "AAD"));
478 ASSERT_TRUE(t->GetBytes(&tag, "Tag"));
David Benjamin4690bb52015-05-10 03:10:07 -0400479 }
480
David Benjamin1ce2ec72022-11-27 18:34:32 -0500481 Operation op = Operation::kBoth;
David Benjamin4690bb52015-05-10 03:10:07 -0400482 if (t->HasAttribute("Operation")) {
483 const std::string &str = t->GetAttributeOrDie("Operation");
David Benjamin9992ad22022-11-25 17:02:34 -0500484 if (str == "Encrypt" || str == "ENCRYPT") {
David Benjamin1ce2ec72022-11-27 18:34:32 -0500485 op = Operation::kEncrypt;
David Benjamin9992ad22022-11-25 17:02:34 -0500486 } else if (str == "Decrypt" || str == "DECRYPT") {
David Benjamin1ce2ec72022-11-27 18:34:32 -0500487 op = Operation::kDecrypt;
David Benjamin9992ad22022-11-25 17:02:34 -0500488 } else if (str == "InvalidDecrypt") {
489 op = Operation::kInvalidDecrypt;
David Benjamin4690bb52015-05-10 03:10:07 -0400490 } else {
David Benjamin6757fbf2017-05-24 00:50:35 -0400491 FAIL() << "Unknown operation: " << str;
David Benjamin4690bb52015-05-10 03:10:07 -0400492 }
493 }
494
David Benjamin1ce2ec72022-11-27 18:34:32 -0500495 TestCipher(cipher, op, /*padding=*/false, key, iv, plaintext, ciphertext, aad,
496 tag);
David Benjamin4690bb52015-05-10 03:10:07 -0400497}
498
David Benjamin6757fbf2017-05-24 00:50:35 -0400499TEST(CipherTest, TestVectors) {
David Benjamin1ce2ec72022-11-27 18:34:32 -0500500 FileTestGTest("crypto/cipher_extra/test/cipher_tests.txt", CipherFileTest);
David Benjamin6757fbf2017-05-24 00:50:35 -0400501}
David Benjamin4690bb52015-05-10 03:10:07 -0400502
David Benjamin6757fbf2017-05-24 00:50:35 -0400503TEST(CipherTest, CAVP_AES_128_CBC) {
504 FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_128_cbc.txt",
David Benjamin1ce2ec72022-11-27 18:34:32 -0500505 CipherFileTest);
David Benjamin6757fbf2017-05-24 00:50:35 -0400506}
David Benjamin4690bb52015-05-10 03:10:07 -0400507
David Benjamin6757fbf2017-05-24 00:50:35 -0400508TEST(CipherTest, CAVP_AES_128_CTR) {
509 FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_128_ctr.txt",
David Benjamin1ce2ec72022-11-27 18:34:32 -0500510 CipherFileTest);
David Benjamin6757fbf2017-05-24 00:50:35 -0400511}
512
513TEST(CipherTest, CAVP_AES_192_CBC) {
514 FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_192_cbc.txt",
David Benjamin1ce2ec72022-11-27 18:34:32 -0500515 CipherFileTest);
David Benjamin6757fbf2017-05-24 00:50:35 -0400516}
517
518TEST(CipherTest, CAVP_AES_192_CTR) {
519 FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_192_ctr.txt",
David Benjamin1ce2ec72022-11-27 18:34:32 -0500520 CipherFileTest);
David Benjamin6757fbf2017-05-24 00:50:35 -0400521}
522
523TEST(CipherTest, CAVP_AES_256_CBC) {
524 FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_256_cbc.txt",
David Benjamin1ce2ec72022-11-27 18:34:32 -0500525 CipherFileTest);
David Benjamin6757fbf2017-05-24 00:50:35 -0400526}
527
528TEST(CipherTest, CAVP_AES_256_CTR) {
529 FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_256_ctr.txt",
David Benjamin1ce2ec72022-11-27 18:34:32 -0500530 CipherFileTest);
David Benjamin6757fbf2017-05-24 00:50:35 -0400531}
532
533TEST(CipherTest, CAVP_TDES_CBC) {
David Benjamin1ce2ec72022-11-27 18:34:32 -0500534 FileTestGTest("crypto/cipher_extra/test/nist_cavp/tdes_cbc.txt",
535 CipherFileTest);
David Benjamin6757fbf2017-05-24 00:50:35 -0400536}
537
538TEST(CipherTest, CAVP_TDES_ECB) {
David Benjamin1ce2ec72022-11-27 18:34:32 -0500539 FileTestGTest("crypto/cipher_extra/test/nist_cavp/tdes_ecb.txt",
540 CipherFileTest);
David Benjamin4690bb52015-05-10 03:10:07 -0400541}
David Benjamin8e75ae42018-05-01 17:01:04 -0400542
543TEST(CipherTest, WycheproofAESCBC) {
David Benjamin1ce2ec72022-11-27 18:34:32 -0500544 FileTestGTest("third_party/wycheproof_testvectors/aes_cbc_pkcs5_test.txt",
545 [](FileTest *t) {
546 t->IgnoreInstruction("type");
547 t->IgnoreInstruction("ivSize");
David Benjamin8e75ae42018-05-01 17:01:04 -0400548
David Benjamin1ce2ec72022-11-27 18:34:32 -0500549 std::string key_size;
550 ASSERT_TRUE(t->GetInstruction(&key_size, "keySize"));
551 const EVP_CIPHER *cipher;
552 switch (atoi(key_size.c_str())) {
553 case 128:
554 cipher = EVP_aes_128_cbc();
555 break;
556 case 192:
557 cipher = EVP_aes_192_cbc();
558 break;
559 case 256:
560 cipher = EVP_aes_256_cbc();
561 break;
562 default:
563 FAIL() << "Unsupported key size: " << key_size;
564 }
David Benjamin8e75ae42018-05-01 17:01:04 -0400565
David Benjamin1ce2ec72022-11-27 18:34:32 -0500566 std::vector<uint8_t> key, iv, msg, ct;
567 ASSERT_TRUE(t->GetBytes(&key, "key"));
568 ASSERT_TRUE(t->GetBytes(&iv, "iv"));
569 ASSERT_TRUE(t->GetBytes(&msg, "msg"));
570 ASSERT_TRUE(t->GetBytes(&ct, "ct"));
571 WycheproofResult result;
572 ASSERT_TRUE(GetWycheproofResult(t, &result));
573 TestCipher(cipher,
574 result.IsValid() ? Operation::kBoth
575 : Operation::kInvalidDecrypt,
576 /*padding=*/true, key, iv, msg, ct, /*aad=*/{},
577 /*tag=*/{});
578 });
David Benjamin8e75ae42018-05-01 17:01:04 -0400579}
David Benjamin669ffe62021-04-07 16:17:50 -0400580
581TEST(CipherTest, SHA1WithSecretSuffix) {
582 uint8_t buf[SHA_CBLOCK * 4];
583 RAND_bytes(buf, sizeof(buf));
584 // Hashing should run in time independent of the bytes.
585 CONSTTIME_SECRET(buf, sizeof(buf));
586
587 // Exhaustively testing interesting cases in this function is cubic in the
588 // block size, so we test in 3-byte increments.
589 constexpr size_t kSkip = 3;
590 // This value should be less than 8 to test the edge case when the 8-byte
591 // length wraps to the next block.
592 static_assert(kSkip < 8, "kSkip is too large");
593
594 // |EVP_sha1_final_with_secret_suffix| is sensitive to the public length of
595 // the partial block previously hashed. In TLS, this is the HMAC prefix, the
596 // header, and the public minimum padding length.
597 for (size_t prefix = 0; prefix < SHA_CBLOCK; prefix += kSkip) {
598 SCOPED_TRACE(prefix);
599 // The first block is treated differently, so we run with up to three
600 // blocks of length variability.
601 for (size_t max_len = 0; max_len < 3 * SHA_CBLOCK; max_len += kSkip) {
602 SCOPED_TRACE(max_len);
603 for (size_t len = 0; len <= max_len; len += kSkip) {
604 SCOPED_TRACE(len);
605
606 uint8_t expected[SHA_DIGEST_LENGTH];
607 SHA1(buf, prefix + len, expected);
608 CONSTTIME_DECLASSIFY(expected, sizeof(expected));
609
610 // Make a copy of the secret length to avoid interfering with the loop.
611 size_t secret_len = len;
612 CONSTTIME_SECRET(&secret_len, sizeof(secret_len));
613
614 SHA_CTX ctx;
615 SHA1_Init(&ctx);
616 SHA1_Update(&ctx, buf, prefix);
617 uint8_t computed[SHA_DIGEST_LENGTH];
618 ASSERT_TRUE(EVP_sha1_final_with_secret_suffix(
619 &ctx, computed, buf + prefix, secret_len, max_len));
620
621 CONSTTIME_DECLASSIFY(computed, sizeof(computed));
622 EXPECT_EQ(Bytes(expected), Bytes(computed));
623 }
624 }
625 }
626}
David Benjamin03cae7a2021-09-24 12:25:41 -0400627
Adam Langleyb1c6f452023-05-03 14:33:37 -0700628TEST(CipherTest, SHA256WithSecretSuffix) {
629 uint8_t buf[SHA256_CBLOCK * 4];
630 RAND_bytes(buf, sizeof(buf));
631 // Hashing should run in time independent of the bytes.
632 CONSTTIME_SECRET(buf, sizeof(buf));
633
634 // Exhaustively testing interesting cases in this function is cubic in the
635 // block size, so we test in 3-byte increments.
636 constexpr size_t kSkip = 3;
637 // This value should be less than 8 to test the edge case when the 8-byte
638 // length wraps to the next block.
639 static_assert(kSkip < 8, "kSkip is too large");
640
641 // |EVP_sha256_final_with_secret_suffix| is sensitive to the public length of
642 // the partial block previously hashed. In TLS, this is the HMAC prefix, the
643 // header, and the public minimum padding length.
644 for (size_t prefix = 0; prefix < SHA256_CBLOCK; prefix += kSkip) {
645 SCOPED_TRACE(prefix);
646 // The first block is treated differently, so we run with up to three
647 // blocks of length variability.
648 for (size_t max_len = 0; max_len < 3 * SHA256_CBLOCK; max_len += kSkip) {
649 SCOPED_TRACE(max_len);
650 for (size_t len = 0; len <= max_len; len += kSkip) {
651 SCOPED_TRACE(len);
652
653 uint8_t expected[SHA256_DIGEST_LENGTH];
654 SHA256(buf, prefix + len, expected);
655 CONSTTIME_DECLASSIFY(expected, sizeof(expected));
656
657 // Make a copy of the secret length to avoid interfering with the loop.
658 size_t secret_len = len;
659 CONSTTIME_SECRET(&secret_len, sizeof(secret_len));
660
661 SHA256_CTX ctx;
662 SHA256_Init(&ctx);
663 SHA256_Update(&ctx, buf, prefix);
664 uint8_t computed[SHA256_DIGEST_LENGTH];
665 ASSERT_TRUE(EVP_sha256_final_with_secret_suffix(
666 &ctx, computed, buf + prefix, secret_len, max_len));
667
668 CONSTTIME_DECLASSIFY(computed, sizeof(computed));
669 EXPECT_EQ(Bytes(expected), Bytes(computed));
670 }
671 }
672 }
673}
674
David Benjamin03cae7a2021-09-24 12:25:41 -0400675TEST(CipherTest, GetCipher) {
676 const EVP_CIPHER *cipher = EVP_get_cipherbynid(NID_aes_128_gcm);
677 ASSERT_TRUE(cipher);
678 EXPECT_EQ(NID_aes_128_gcm, EVP_CIPHER_nid(cipher));
679
680 cipher = EVP_get_cipherbyname("aes-128-gcm");
681 ASSERT_TRUE(cipher);
682 EXPECT_EQ(NID_aes_128_gcm, EVP_CIPHER_nid(cipher));
683
684 cipher = EVP_get_cipherbyname("AES-128-GCM");
685 ASSERT_TRUE(cipher);
686 EXPECT_EQ(NID_aes_128_gcm, EVP_CIPHER_nid(cipher));
687
688 // We support a tcpdump-specific alias for 3DES.
689 cipher = EVP_get_cipherbyname("3des");
690 ASSERT_TRUE(cipher);
691 EXPECT_EQ(NID_des_ede3_cbc, EVP_CIPHER_nid(cipher));
692}
David Benjamin361647e2023-11-05 21:33:13 +0100693
694// Test the AES-GCM EVP_CIPHER's internal IV management APIs. OpenSSH uses these
695// APIs.
696TEST(CipherTest, GCMIncrementingIV) {
697 const EVP_CIPHER *kCipher = EVP_aes_128_gcm();
698 static const uint8_t kKey[16] = {0, 1, 2, 3, 4, 5, 6, 7,
699 8, 9, 10, 11, 12, 13, 14, 15};
700 static const uint8_t kInput[] = {'h', 'e', 'l', 'l', 'o'};
701
702 auto expect_iv = [&](EVP_CIPHER_CTX *ctx, bssl::Span<const uint8_t> iv,
703 bool enc) {
704 // Make a reference ciphertext.
705 bssl::ScopedEVP_CIPHER_CTX ref;
706 ASSERT_TRUE(EVP_EncryptInit_ex(ref.get(), kCipher, /*impl=*/nullptr,
707 kKey, /*iv=*/nullptr));
708 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ref.get(), EVP_CTRL_AEAD_SET_IVLEN,
709 static_cast<int>(iv.size()), nullptr));
710 ASSERT_TRUE(EVP_EncryptInit_ex(ref.get(), /*cipher=*/nullptr,
711 /*impl=*/nullptr, /*key=*/nullptr,
712 iv.data()));
713 uint8_t ciphertext[sizeof(kInput)];
714 int ciphertext_len;
715 ASSERT_TRUE(EVP_EncryptUpdate(ref.get(), ciphertext, &ciphertext_len,
716 kInput, sizeof(kInput)));
717 int extra_len;
718 ASSERT_TRUE(EVP_EncryptFinal_ex(ref.get(), nullptr, &extra_len));
719 ASSERT_EQ(extra_len, 0);
720 uint8_t tag[16];
721 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ref.get(), EVP_CTRL_AEAD_GET_TAG,
722 sizeof(tag), tag));
723
724 if (enc) {
725 uint8_t actual[sizeof(kInput)];
726 int actual_len;
727 ASSERT_TRUE(
728 EVP_EncryptUpdate(ctx, actual, &actual_len, kInput, sizeof(kInput)));
729 ASSERT_TRUE(EVP_EncryptFinal_ex(ctx, nullptr, &extra_len));
730 ASSERT_EQ(extra_len, 0);
731 EXPECT_EQ(Bytes(actual, actual_len), Bytes(ciphertext, ciphertext_len));
732 uint8_t actual_tag[16];
733 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG,
734 sizeof(actual_tag), actual_tag));
735 EXPECT_EQ(Bytes(actual_tag), Bytes(tag));
736 } else {
737 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, sizeof(tag),
738 const_cast<uint8_t *>(tag)));
739 uint8_t actual[sizeof(kInput)];
740 int actual_len;
741 ASSERT_TRUE(EVP_DecryptUpdate(ctx, actual, &actual_len, ciphertext,
742 sizeof(ciphertext)));
743 ASSERT_TRUE(EVP_DecryptFinal_ex(ctx, nullptr, &extra_len));
744 ASSERT_EQ(extra_len, 0);
745 EXPECT_EQ(Bytes(actual, actual_len), Bytes(kInput));
746 }
747 };
748
749 {
750 // Passing in a fixed IV length of -1 sets the whole IV, but then configures
751 // |EVP_CIPHER_CTX| to increment the bottom 8 bytes of the IV.
752 static const uint8_t kIV1[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12};
753 static const uint8_t kIV2[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13};
754 static const uint8_t kIV3[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 14};
755 static const uint8_t kIV4[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 15};
756
757 bssl::ScopedEVP_CIPHER_CTX ctx;
758 ASSERT_TRUE(EVP_EncryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
759 /*iv=*/nullptr));
760 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, -1,
761 const_cast<uint8_t *>(kIV1)));
762
763 // EVP_CTRL_GCM_IV_GEN both configures and returns the IV.
764 uint8_t iv[12];
765 ASSERT_TRUE(
766 EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
767 EXPECT_EQ(Bytes(iv), Bytes(kIV1));
768 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV1, /*enc=*/true));
769
770 // Continuing to run EVP_CTRL_GCM_IV_GEN should increment the IV.
771 ASSERT_TRUE(
772 EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
773 EXPECT_EQ(Bytes(iv), Bytes(kIV2));
774 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV2, /*enc=*/true));
775
776 // Passing in a shorter length outputs the suffix portion.
777 uint8_t suffix[8];
778 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN,
779 sizeof(suffix), suffix));
780 EXPECT_EQ(Bytes(suffix),
781 Bytes(bssl::MakeConstSpan(kIV3).last(sizeof(suffix))));
782 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV3, /*enc=*/true));
783
784 // A length of -1 returns the whole IV.
785 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, -1, iv));
786 EXPECT_EQ(Bytes(iv), Bytes(kIV4));
787 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV4, /*enc=*/true));
788 }
789
790 {
791 // Similar to the above, but for decrypting.
792 static const uint8_t kIV1[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12};
793 static const uint8_t kIV2[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13};
794
795 bssl::ScopedEVP_CIPHER_CTX ctx;
796 ASSERT_TRUE(EVP_DecryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
797 /*iv=*/nullptr));
798 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, -1,
799 const_cast<uint8_t *>(kIV1)));
800
801 uint8_t iv[12];
802 ASSERT_TRUE(
803 EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
804 EXPECT_EQ(Bytes(iv), Bytes(kIV1));
805 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV1, /*enc=*/false));
806
807 ASSERT_TRUE(
808 EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
809 EXPECT_EQ(Bytes(iv), Bytes(kIV2));
810 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV2, /*enc=*/false));
811 }
812
813 {
814 // Test that only the bottom 8 bytes are used as a counter.
815 static const uint8_t kIV1[12] = {0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
816 0xff, 0xff, 0xff, 0xff, 0xff, 0xff};
817 static const uint8_t kIV2[12] = {0xff, 0xff, 0xff, 0xff, 0x00, 0x00,
818 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
819 static const uint8_t kIV3[12] = {0xff, 0xff, 0xff, 0xff, 0x00, 0x00,
820 0x00, 0x00, 0x00, 0x00, 0x00, 0x01};
821
822 bssl::ScopedEVP_CIPHER_CTX ctx;
823 ASSERT_TRUE(EVP_EncryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
824 /*iv=*/nullptr));
825 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, -1,
826 const_cast<uint8_t *>(kIV1)));
827
828 uint8_t iv[12];
829 ASSERT_TRUE(
830 EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
831 EXPECT_EQ(Bytes(iv), Bytes(kIV1));
832 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV1, /*enc=*/true));
833
834 ASSERT_TRUE(
835 EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
836 EXPECT_EQ(Bytes(iv), Bytes(kIV2));
837 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV2, /*enc=*/true));
838
839 ASSERT_TRUE(
840 EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
841 EXPECT_EQ(Bytes(iv), Bytes(kIV3));
842 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV3, /*enc=*/true));
843 }
844
845 {
846 // Test with a longer IV length.
847 static const uint8_t kIV1[16] = {0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
848 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
849 0xff, 0xff, 0xff, 0xff};
850 static const uint8_t kIV2[16] = {0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
851 0xff, 0xff, 0x00, 0x00, 0x00, 0x00,
852 0x00, 0x00, 0x00, 0x00};
853 static const uint8_t kIV3[16] = {0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
854 0xff, 0xff, 0x00, 0x00, 0x00, 0x00,
855 0x00, 0x00, 0x00, 0x01};
856
857 bssl::ScopedEVP_CIPHER_CTX ctx;
858 ASSERT_TRUE(EVP_EncryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
859 /*iv=*/nullptr));
860 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IVLEN,
861 sizeof(kIV1), nullptr));
862 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, -1,
863 const_cast<uint8_t *>(kIV1)));
864
865 uint8_t iv[16];
866 ASSERT_TRUE(
867 EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
868 EXPECT_EQ(Bytes(iv), Bytes(kIV1));
869 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV1, /*enc=*/true));
870
871 ASSERT_TRUE(
872 EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
873 EXPECT_EQ(Bytes(iv), Bytes(kIV2));
874 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV2, /*enc=*/true));
875
876 ASSERT_TRUE(
877 EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
878 EXPECT_EQ(Bytes(iv), Bytes(kIV3));
879 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV3, /*enc=*/true));
880 }
881
882 {
883 // When decrypting, callers are expected to configure the fixed half and
884 // invocation half separately. The two will get stitched together into the
885 // final IV.
886 const uint8_t kIV[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12};
887
888 bssl::ScopedEVP_CIPHER_CTX ctx;
889 ASSERT_TRUE(EVP_DecryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
890 /*iv=*/nullptr));
891 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 4,
892 const_cast<uint8_t *>(kIV)));
893 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_SET_IV_INV, 8,
894 const_cast<uint8_t *>(kIV + 4)));
895 // EVP_CTRL_GCM_SET_IV_INV is sufficient to configure the IV. There is no
896 // need to call EVP_CTRL_GCM_IV_GEN.
897 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV, /*enc=*/false));
898 }
899
900 {
901 // Stitching together a decryption IV that exceeds the standard IV length.
902 const uint8_t kIV[16] = {1, 2, 3, 4, 5, 6, 7, 8,
903 9, 10, 11, 12, 13, 14, 15, 16};
904
905 bssl::ScopedEVP_CIPHER_CTX ctx;
906 ASSERT_TRUE(EVP_DecryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
907 /*iv=*/nullptr));
908 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IVLEN,
909 sizeof(kIV), nullptr));
910
911 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 4,
912 const_cast<uint8_t *>(kIV)));
913 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_SET_IV_INV, 12,
914 const_cast<uint8_t *>(kIV + 4)));
915 // EVP_CTRL_GCM_SET_IV_INV is sufficient to configure the IV. There is no
916 // need to call EVP_CTRL_GCM_IV_GEN.
917 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV, /*enc=*/false));
918 }
919
920 {
921 // Fixed IVs must be at least 4 bytes and admit at least an 8 byte counter.
922 const uint8_t kIV[16] = {1, 2, 3, 4, 5, 6, 7, 8,
923 9, 10, 11, 12, 13, 14, 15, 16};
924
925 bssl::ScopedEVP_CIPHER_CTX ctx;
926 ASSERT_TRUE(EVP_DecryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
927 /*iv=*/nullptr));
928
929 // This means the default IV length only allows a 4/8 split.
930 EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 0,
931 const_cast<uint8_t *>(kIV)));
932 EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 3,
933 const_cast<uint8_t *>(kIV)));
934 EXPECT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 4,
935 const_cast<uint8_t *>(kIV)));
936 EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 5,
937 const_cast<uint8_t *>(kIV)));
938 EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 16,
939 const_cast<uint8_t *>(kIV)));
940
941 // A longer IV allows a wider range.
942 ASSERT_TRUE(
943 EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IVLEN, 16, nullptr));
944 EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 0,
945 const_cast<uint8_t *>(kIV)));
946 EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 3,
947 const_cast<uint8_t *>(kIV)));
948 EXPECT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 4,
949 const_cast<uint8_t *>(kIV)));
950 EXPECT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 6,
951 const_cast<uint8_t *>(kIV)));
952 EXPECT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 8,
953 const_cast<uint8_t *>(kIV)));
954 EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 9,
955 const_cast<uint8_t *>(kIV)));
956 EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 16,
957 const_cast<uint8_t *>(kIV)));
958 }
959
960 {
961 // When encrypting, setting a fixed IV randomizes the counter portion.
962 const uint8_t kFixedIV[4] = {1, 2, 3, 4};
963 bssl::ScopedEVP_CIPHER_CTX ctx;
964 ASSERT_TRUE(EVP_EncryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
965 /*iv=*/nullptr));
966 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED,
967 sizeof(kFixedIV),
968 const_cast<uint8_t *>(kFixedIV)));
969 uint8_t counter[8];
970 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN,
971 sizeof(counter), counter));
972
973 uint8_t iv[12];
974 memcpy(iv, kFixedIV, sizeof(kFixedIV));
975 memcpy(iv + sizeof(kFixedIV), counter, sizeof(counter));
976 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), iv, /*enc=*/true));
977
978 // The counter continues to act as a counter.
979 uint8_t counter2[8];
980 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN,
981 sizeof(counter2), counter2));
982 EXPECT_EQ(CRYPTO_load_u64_be(counter2), CRYPTO_load_u64_be(counter) + 1);
983 memcpy(iv + sizeof(kFixedIV), counter2, sizeof(counter2));
984 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), iv, /*enc=*/true));
985 }
986
987 {
988 // Same as above, but with a larger IV.
989 const uint8_t kFixedIV[8] = {1, 2, 3, 4, 5, 6, 7, 8};
990 bssl::ScopedEVP_CIPHER_CTX ctx;
991 ASSERT_TRUE(EVP_EncryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
992 /*iv=*/nullptr));
993 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IVLEN,
994 sizeof(kFixedIV) + 8, nullptr));
995 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED,
996 sizeof(kFixedIV),
997 const_cast<uint8_t *>(kFixedIV)));
998 uint8_t counter[8];
999 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN,
1000 sizeof(counter), counter));
1001
1002 uint8_t iv[16];
1003 memcpy(iv, kFixedIV, sizeof(kFixedIV));
1004 memcpy(iv + sizeof(kFixedIV), counter, sizeof(counter));
1005 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), iv, /*enc=*/true));
1006
1007 // The counter continues to act as a counter.
1008 uint8_t counter2[8];
1009 ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN,
1010 sizeof(counter2), counter2));
1011 EXPECT_EQ(CRYPTO_load_u64_be(counter2), CRYPTO_load_u64_be(counter) + 1);
1012 memcpy(iv + sizeof(kFixedIV), counter2, sizeof(counter2));
1013 ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), iv, /*enc=*/true));
1014 }
1015}