summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Benjamin <davidben@chromium.org>2014-08-20 14:19:54 -0400
committerAdam Langley <agl@google.com>2014-08-20 20:53:31 +0000
commit5213df4e9ed9ca130c40f142893cb91f2e18eee1 (patch)
treecd317a619f5a9efa8c575b4a38e068164db7d5dd
parent92909a6206037f8266d93979028215a8b7f20614 (diff)
downloadsrc-5213df4e9ed9ca130c40f142893cb91f2e18eee1.tar.gz
Prefer AES-GCM when hardware support is available.
BUG=396787 Change-Id: I72ddb0ec3c71dbc70054403163930cbbde4b6009 Reviewed-on: https://boringssl-review.googlesource.com/1581 Reviewed-by: Adam Langley <agl@google.com>
-rw-r--r--crypto/cipher/e_aes.c8
-rw-r--r--crypto/modes/gcm.c14
-rw-r--r--crypto/modes/internal.h7
-rw-r--r--include/openssl/aead.h4
-rw-r--r--include/openssl/cpu.h5
-rw-r--r--ssl/ssl_ciph.c28
6 files changed, 53 insertions, 13 deletions
diff --git a/crypto/cipher/e_aes.c b/crypto/cipher/e_aes.c
index 03f9e28..15a886c 100644
--- a/crypto/cipher/e_aes.c
+++ b/crypto/cipher/e_aes.c
@@ -1271,3 +1271,11 @@ static const EVP_AEAD aead_aes_256_key_wrap = {
const EVP_AEAD *EVP_aead_aes_128_key_wrap() { return &aead_aes_128_key_wrap; }
const EVP_AEAD *EVP_aead_aes_256_key_wrap() { return &aead_aes_256_key_wrap; }
+
+int EVP_has_aes_hardware(void) {
+#if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
+ return aesni_capable() && crypto_gcm_clmul_enabled();
+#else
+ return 0;
+#endif
+}
diff --git a/crypto/modes/gcm.c b/crypto/modes/gcm.c
index 982f823..065c457 100644
--- a/crypto/modes/gcm.c
+++ b/crypto/modes/gcm.c
@@ -406,8 +406,7 @@ void CRYPTO_gcm128_init(GCM128_CONTEXT *ctx, void *key, block128_f block) {
}
#if defined(GHASH_ASM_X86_OR_64)
- if (OPENSSL_ia32cap_P[0] & (1 << 24) && /* check FXSR bit */
- OPENSSL_ia32cap_P[1] & (1 << 1)) { /* check PCLMULQDQ bit */
+ if (crypto_gcm_clmul_enabled()) {
if (((OPENSSL_ia32cap_P[1] >> 22) & 0x41) == 0x41) { /* AVX+MOVBE */
gcm_init_avx(ctx->Htable, ctx->H.u);
ctx->gmult = gcm_gmult_avx;
@@ -1189,3 +1188,14 @@ void CRYPTO_gcm128_release(GCM128_CONTEXT *ctx) {
OPENSSL_free(ctx);
}
}
+
+#if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
+int crypto_gcm_clmul_enabled(void) {
+#ifdef GHASH_ASM
+ return OPENSSL_ia32cap_P[0] & (1 << 24) && /* check FXSR bit */
+ OPENSSL_ia32cap_P[1] & (1 << 1); /* check PCLMULQDQ bit */
+#else
+ return 0;
+#endif
+}
+#endif
diff --git a/crypto/modes/internal.h b/crypto/modes/internal.h
index 4fa0ec6..4659eab 100644
--- a/crypto/modes/internal.h
+++ b/crypto/modes/internal.h
@@ -194,6 +194,13 @@ struct ccm128_context {
#endif
+#if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
+/* crypto_gcm_clmul_enabled returns one if the CLMUL implementation of GCM is
+ * used. */
+int crypto_gcm_clmul_enabled(void);
+#endif
+
+
#if defined(__cplusplus)
} /* extern C */
#endif
diff --git a/include/openssl/aead.h b/include/openssl/aead.h
index 7e4682c..6f66e9c 100644
--- a/include/openssl/aead.h
+++ b/include/openssl/aead.h
@@ -115,6 +115,10 @@ OPENSSL_EXPORT const EVP_AEAD *EVP_aead_aes_128_key_wrap();
* See |EVP_aead_aes_128_key_wrap| for details. */
OPENSSL_EXPORT const EVP_AEAD *EVP_aead_aes_256_key_wrap();
+/* EVP_has_aes_hardware returns one if we enable hardware support for fast and
+ * constant-time AES-GCM. */
+OPENSSL_EXPORT int EVP_has_aes_hardware(void);
+
/* TLS specific AEAD algorithms.
*
diff --git a/include/openssl/cpu.h b/include/openssl/cpu.h
index bec157f..3cc1e5e 100644
--- a/include/openssl/cpu.h
+++ b/include/openssl/cpu.h
@@ -79,7 +79,10 @@ extern "C" {
* Index 1:
* ECX for CPUID where EAX = 1
* Index 2:
- * EBX for CPUID where EAX = 7 */
+ * EBX for CPUID where EAX = 7
+ *
+ * Note: the CPUID bits are pre-adjusted for the OSXSAVE bit and the YMM and XMM
+ * bits in XCR0, so it is not necessary to check those. */
extern uint32_t OPENSSL_ia32cap_P[4];
#endif
diff --git a/ssl/ssl_ciph.c b/ssl/ssl_ciph.c
index d9a4def..0caed0b 100644
--- a/ssl/ssl_ciph.c
+++ b/ssl/ssl_ciph.c
@@ -1193,16 +1193,24 @@ STACK_OF(SSL_CIPHER) *ssl_create_cipher_list(const SSL_METHOD *ssl_method,
ssl_cipher_apply_rule(0, SSL_kEECDH, 0, 0, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
ssl_cipher_apply_rule(0, SSL_kEECDH, 0, 0, 0, 0, 0, CIPHER_DEL, -1, 0, &head, &tail);
- /* Order the bulk ciphers.
- * 1. CHACHA20_POLY1305.
- * 2. AES_256_GCM and AES_128_GCM.
- * 3. Legacy non-AEAD ciphers. AES_256_CBC, AES-128_CBC, RC4_128_SHA,
- * RC4_128_MD5, 3DES_EDE_CBC_SHA.
- * TODO(davidben): Prefer AES_GCM over CHACHA20 if there is hardware
- * support. */
- ssl_cipher_apply_rule(0, 0, 0, SSL_CHACHA20POLY1305, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
- ssl_cipher_apply_rule(0, 0, 0, SSL_AES256GCM, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
- ssl_cipher_apply_rule(0, 0, 0, SSL_AES128GCM, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
+ /* Order the bulk ciphers. First the preferred AEAD ciphers. We prefer
+ * CHACHA20 unless there is hardware support for fast and constant-time
+ * AES_GCM. */
+ if (EVP_has_aes_hardware())
+ {
+ ssl_cipher_apply_rule(0, 0, 0, SSL_AES256GCM, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
+ ssl_cipher_apply_rule(0, 0, 0, SSL_AES128GCM, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
+ ssl_cipher_apply_rule(0, 0, 0, SSL_CHACHA20POLY1305, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
+ }
+ else
+ {
+ ssl_cipher_apply_rule(0, 0, 0, SSL_CHACHA20POLY1305, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
+ ssl_cipher_apply_rule(0, 0, 0, SSL_AES256GCM, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
+ ssl_cipher_apply_rule(0, 0, 0, SSL_AES128GCM, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
+ }
+
+ /* Then the legacy non-AEAD ciphers: AES_256_CBC, AES-128_CBC,
+ * RC4_128_SHA, RC4_128_MD5, 3DES_EDE_CBC_SHA. */
ssl_cipher_apply_rule(0, 0, 0, SSL_AES256, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
ssl_cipher_apply_rule(0, 0, 0, SSL_AES128, 0, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);
ssl_cipher_apply_rule(0, 0, 0, SSL_RC4, ~SSL_MD5, 0, 0, CIPHER_ADD, -1, 0, &head, &tail);