Fix Unicode strings for C++20 u8 strings in C++20 are char8_t instead of char; in order to compile on both C++17 and C++20 we need to remove the prefix. Change-Id: I85d1a9d72d24e8fa96ca22b1d99be9982fee8fb5 Reviewed-on: https://boringssl-review.googlesource.com/c/boringssl/+/51065 Reviewed-by: David Benjamin <davidben@google.com> Commit-Queue: David Benjamin <davidben@google.com>
diff --git a/crypto/pkcs8/pkcs12_test.cc b/crypto/pkcs8/pkcs12_test.cc index e67630d..958bd8d 100644 --- a/crypto/pkcs8/pkcs12_test.cc +++ b/crypto/pkcs8/pkcs12_test.cc
@@ -34,7 +34,7 @@ static const char kPassword[] = "foo"; // kUnicodePassword is the password for unicode_password.p12 -static const char kUnicodePassword[] = u8"Hello, 世界"; +static const char kUnicodePassword[] = "Hello, 世界"; static bssl::Span<const uint8_t> StringToBytes(const std::string &str) { return bssl::MakeConstSpan(reinterpret_cast<const uint8_t *>(str.data()), @@ -391,7 +391,7 @@ {bssl::Span<const uint8_t>(kTestCert2)}, 0, 0, 0, 0); // Test some Unicode. - TestRoundTrip(kPassword, u8"Hello, 世界!", + TestRoundTrip(kPassword, "Hello, 世界!", bssl::Span<const uint8_t>(kTestKey), bssl::Span<const uint8_t>(kTestCert), {bssl::Span<const uint8_t>(kTestCert2)}, 0, 0, 0, 0);