summaryrefslogtreecommitdiff
path: root/src/crypto/fipsmodule/cipher/e_aes.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/crypto/fipsmodule/cipher/e_aes.c')
-rw-r--r--src/crypto/fipsmodule/cipher/e_aes.c70
1 files changed, 35 insertions, 35 deletions
diff --git a/src/crypto/fipsmodule/cipher/e_aes.c b/src/crypto/fipsmodule/cipher/e_aes.c
index 2c6fc417..bd9847ce 100644
--- a/src/crypto/fipsmodule/cipher/e_aes.c
+++ b/src/crypto/fipsmodule/cipher/e_aes.c
@@ -68,7 +68,7 @@
#endif
-OPENSSL_MSVC_PRAGMA(warning(disable: 4702)) /* Unreachable code. */
+OPENSSL_MSVC_PRAGMA(warning(disable: 4702)) // Unreachable code.
typedef struct {
union {
@@ -86,14 +86,14 @@ typedef struct {
union {
double align;
AES_KEY ks;
- } ks; /* AES key schedule to use */
- int key_set; /* Set if key initialised */
- int iv_set; /* Set if an iv is set */
+ } ks; // AES key schedule to use
+ int key_set; // Set if key initialised
+ int iv_set; // Set if an iv is set
GCM128_CONTEXT gcm;
- uint8_t *iv; /* Temporary IV store */
- int ivlen; /* IV length */
+ uint8_t *iv; // Temporary IV store
+ int ivlen; // IV length
int taglen;
- int iv_gen; /* It is OK to generate IVs */
+ int iv_gen; // It is OK to generate IVs
ctr128_f ctr;
} EVP_AES_GCM_CTX;
@@ -125,8 +125,8 @@ static char bsaes_capable(void) {
#if defined(BSAES)
-/* On platforms where BSAES gets defined (just above), then these functions are
- * provided by asm. */
+// On platforms where BSAES gets defined (just above), then these functions are
+// provided by asm.
void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
const AES_KEY *key, uint8_t ivec[16], int enc);
void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
@@ -136,8 +136,8 @@ static char bsaes_capable(void) {
return 0;
}
-/* On other platforms, bsaes_capable() will always return false and so the
- * following will never be called. */
+// On other platforms, bsaes_capable() will always return false and so the
+// following will never be called.
static void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
const AES_KEY *key, uint8_t ivec[16], int enc) {
abort();
@@ -151,8 +151,8 @@ static void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
#endif
#if defined(VPAES)
-/* On platforms where VPAES gets defined (just above), then these functions are
- * provided by asm. */
+// On platforms where VPAES gets defined (just above), then these functions are
+// provided by asm.
int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
@@ -166,8 +166,8 @@ static char vpaes_capable(void) {
return 0;
}
-/* On other platforms, vpaes_capable() will always return false and so the
- * following will never be called. */
+// On other platforms, vpaes_capable() will always return false and so the
+// following will never be called.
static int vpaes_set_encrypt_key(const uint8_t *userKey, int bits,
AES_KEY *key) {
abort();
@@ -203,8 +203,8 @@ void aesni_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
#else
-/* On other platforms, aesni_capable() will always return false and so the
- * following will never be called. */
+// On other platforms, aesni_capable() will always return false and so the
+// following will never be called.
static void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
abort();
}
@@ -404,7 +404,7 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
if (key) {
gctx->ctr =
aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm, NULL, key, ctx->key_len);
- /* If we have an iv can set it directly, otherwise use saved IV. */
+ // If we have an iv can set it directly, otherwise use saved IV.
if (iv == NULL && gctx->iv_set) {
iv = gctx->iv;
}
@@ -414,7 +414,7 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
}
gctx->key_set = 1;
} else {
- /* If key set use IV, otherwise copy */
+ // If key set use IV, otherwise copy
if (gctx->key_set) {
CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
} else {
@@ -434,7 +434,7 @@ static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
}
}
-/* increment counter (64-bit int) by 1 */
+// increment counter (64-bit int) by 1
static void ctr64_inc(uint8_t *counter) {
int n = 8;
uint8_t c;
@@ -467,7 +467,7 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
return 0;
}
- /* Allocate memory for IV if needed */
+ // Allocate memory for IV if needed
if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
if (gctx->iv != c->iv) {
OPENSSL_free(gctx->iv);
@@ -496,14 +496,14 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
return 1;
case EVP_CTRL_GCM_SET_IV_FIXED:
- /* Special case: -1 length restores whole IV */
+ // Special case: -1 length restores whole IV
if (arg == -1) {
OPENSSL_memcpy(gctx->iv, ptr, gctx->ivlen);
gctx->iv_gen = 1;
return 1;
}
- /* Fixed field must be at least 4 bytes and invocation field
- * at least 8. */
+ // Fixed field must be at least 4 bytes and invocation field
+ // at least 8.
if (arg < 4 || (gctx->ivlen - arg) < 8) {
return 0;
}
@@ -525,9 +525,9 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
arg = gctx->ivlen;
}
OPENSSL_memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
- /* Invocation field will be at least 8 bytes in size and
- * so no need to check wrap around or increment more than
- * last 8 bytes. */
+ // Invocation field will be at least 8 bytes in size and
+ // so no need to check wrap around or increment more than
+ // last 8 bytes.
ctr64_inc(gctx->iv + gctx->ivlen - 8);
gctx->iv_set = 1;
return 1;
@@ -565,7 +565,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
size_t len) {
EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
- /* If not set up, return error */
+ // If not set up, return error
if (!gctx->key_set) {
return -1;
}
@@ -613,7 +613,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
}
CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
gctx->taglen = 16;
- /* Don't reuse the IV */
+ // Don't reuse the IV
gctx->iv_set = 0;
return 0;
}
@@ -813,7 +813,7 @@ DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_gcm_generic) {
#if !defined(OPENSSL_NO_ASM) && \
(defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
-/* AES-NI section. */
+// AES-NI section.
static char aesni_capable(void) {
return (OPENSSL_ia32cap_P[1] & (1 << (57 - 32))) != 0;
@@ -880,8 +880,8 @@ static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt, 1);
gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
- /* If we have an iv can set it directly, otherwise use
- * saved IV. */
+ // If we have an iv can set it directly, otherwise use
+ // saved IV.
if (iv == NULL && gctx->iv_set) {
iv = gctx->iv;
}
@@ -891,7 +891,7 @@ static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
}
gctx->key_set = 1;
} else {
- /* If key set use IV, otherwise copy */
+ // If key set use IV, otherwise copy
if (gctx->key_set) {
CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
} else {
@@ -1104,7 +1104,7 @@ DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_gcm) {
} \
}
-#else /* ^^^ OPENSSL_X86_64 || OPENSSL_X86 */
+#else // ^^^ OPENSSL_X86_64 || OPENSSL_X86
static char aesni_capable(void) {
return 0;
@@ -1158,7 +1158,7 @@ static int aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx *gcm_ctx,
if (key_bits != 128 && key_bits != 256) {
OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
- return 0; /* EVP_AEAD_CTX_init should catch this. */
+ return 0; // EVP_AEAD_CTX_init should catch this.
}
if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {