Skip to content

Commit ec38a93

Browse files
Ard Biesheuvelherbertx
authored andcommitted
crypto: aes-generic - drop alignment requirement
The generic AES code exposes a 32-bit align mask, which forces all users of the code to use temporary buffers or take other measures to ensure the alignment requirement is adhered to, even on architectures that don't care about alignment for software algorithms such as this one. So drop the align mask, and fix the code to use get_unaligned_le32() where appropriate, which will resolve to whatever is optimal for the architecture. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
1 parent c459bd7 commit ec38a93

File tree

1 file changed

+32
-32
lines changed

1 file changed

+32
-32
lines changed

crypto/aes_generic.c

Lines changed: 32 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@
5454
#include <linux/errno.h>
5555
#include <linux/crypto.h>
5656
#include <asm/byteorder.h>
57+
#include <asm/unaligned.h>
5758

5859
static inline u8 byte(const u32 x, const unsigned n)
5960
{
@@ -1216,7 +1217,6 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
12161217
int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
12171218
unsigned int key_len)
12181219
{
1219-
const __le32 *key = (const __le32 *)in_key;
12201220
u32 i, t, u, v, w, j;
12211221

12221222
if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
@@ -1225,10 +1225,15 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
12251225

12261226
ctx->key_length = key_len;
12271227

1228-
ctx->key_dec[key_len + 24] = ctx->key_enc[0] = le32_to_cpu(key[0]);
1229-
ctx->key_dec[key_len + 25] = ctx->key_enc[1] = le32_to_cpu(key[1]);
1230-
ctx->key_dec[key_len + 26] = ctx->key_enc[2] = le32_to_cpu(key[2]);
1231-
ctx->key_dec[key_len + 27] = ctx->key_enc[3] = le32_to_cpu(key[3]);
1228+
ctx->key_enc[0] = get_unaligned_le32(in_key);
1229+
ctx->key_enc[1] = get_unaligned_le32(in_key + 4);
1230+
ctx->key_enc[2] = get_unaligned_le32(in_key + 8);
1231+
ctx->key_enc[3] = get_unaligned_le32(in_key + 12);
1232+
1233+
ctx->key_dec[key_len + 24] = ctx->key_enc[0];
1234+
ctx->key_dec[key_len + 25] = ctx->key_enc[1];
1235+
ctx->key_dec[key_len + 26] = ctx->key_enc[2];
1236+
ctx->key_dec[key_len + 27] = ctx->key_enc[3];
12321237

12331238
switch (key_len) {
12341239
case AES_KEYSIZE_128:
@@ -1238,17 +1243,17 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
12381243
break;
12391244

12401245
case AES_KEYSIZE_192:
1241-
ctx->key_enc[4] = le32_to_cpu(key[4]);
1242-
t = ctx->key_enc[5] = le32_to_cpu(key[5]);
1246+
ctx->key_enc[4] = get_unaligned_le32(in_key + 16);
1247+
t = ctx->key_enc[5] = get_unaligned_le32(in_key + 20);
12431248
for (i = 0; i < 8; ++i)
12441249
loop6(i);
12451250
break;
12461251

12471252
case AES_KEYSIZE_256:
1248-
ctx->key_enc[4] = le32_to_cpu(key[4]);
1249-
ctx->key_enc[5] = le32_to_cpu(key[5]);
1250-
ctx->key_enc[6] = le32_to_cpu(key[6]);
1251-
t = ctx->key_enc[7] = le32_to_cpu(key[7]);
1253+
ctx->key_enc[4] = get_unaligned_le32(in_key + 16);
1254+
ctx->key_enc[5] = get_unaligned_le32(in_key + 20);
1255+
ctx->key_enc[6] = get_unaligned_le32(in_key + 24);
1256+
t = ctx->key_enc[7] = get_unaligned_le32(in_key + 28);
12521257
for (i = 0; i < 6; ++i)
12531258
loop8(i);
12541259
loop8tophalf(i);
@@ -1329,16 +1334,14 @@ EXPORT_SYMBOL_GPL(crypto_aes_set_key);
13291334
static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
13301335
{
13311336
const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
1332-
const __le32 *src = (const __le32 *)in;
1333-
__le32 *dst = (__le32 *)out;
13341337
u32 b0[4], b1[4];
13351338
const u32 *kp = ctx->key_enc + 4;
13361339
const int key_len = ctx->key_length;
13371340

1338-
b0[0] = le32_to_cpu(src[0]) ^ ctx->key_enc[0];
1339-
b0[1] = le32_to_cpu(src[1]) ^ ctx->key_enc[1];
1340-
b0[2] = le32_to_cpu(src[2]) ^ ctx->key_enc[2];
1341-
b0[3] = le32_to_cpu(src[3]) ^ ctx->key_enc[3];
1341+
b0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in);
1342+
b0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4);
1343+
b0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8);
1344+
b0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12);
13421345

13431346
if (key_len > 24) {
13441347
f_nround(b1, b0, kp);
@@ -1361,10 +1364,10 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
13611364
f_nround(b1, b0, kp);
13621365
f_lround(b0, b1, kp);
13631366

1364-
dst[0] = cpu_to_le32(b0[0]);
1365-
dst[1] = cpu_to_le32(b0[1]);
1366-
dst[2] = cpu_to_le32(b0[2]);
1367-
dst[3] = cpu_to_le32(b0[3]);
1367+
put_unaligned_le32(b0[0], out);
1368+
put_unaligned_le32(b0[1], out + 4);
1369+
put_unaligned_le32(b0[2], out + 8);
1370+
put_unaligned_le32(b0[3], out + 12);
13681371
}
13691372

13701373
/* decrypt a block of text */
@@ -1401,16 +1404,14 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
14011404
static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
14021405
{
14031406
const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
1404-
const __le32 *src = (const __le32 *)in;
1405-
__le32 *dst = (__le32 *)out;
14061407
u32 b0[4], b1[4];
14071408
const int key_len = ctx->key_length;
14081409
const u32 *kp = ctx->key_dec + 4;
14091410

1410-
b0[0] = le32_to_cpu(src[0]) ^ ctx->key_dec[0];
1411-
b0[1] = le32_to_cpu(src[1]) ^ ctx->key_dec[1];
1412-
b0[2] = le32_to_cpu(src[2]) ^ ctx->key_dec[2];
1413-
b0[3] = le32_to_cpu(src[3]) ^ ctx->key_dec[3];
1411+
b0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in);
1412+
b0[1] = ctx->key_dec[1] ^ get_unaligned_le32(in + 4);
1413+
b0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8);
1414+
b0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12);
14141415

14151416
if (key_len > 24) {
14161417
i_nround(b1, b0, kp);
@@ -1433,10 +1434,10 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
14331434
i_nround(b1, b0, kp);
14341435
i_lround(b0, b1, kp);
14351436

1436-
dst[0] = cpu_to_le32(b0[0]);
1437-
dst[1] = cpu_to_le32(b0[1]);
1438-
dst[2] = cpu_to_le32(b0[2]);
1439-
dst[3] = cpu_to_le32(b0[3]);
1437+
put_unaligned_le32(b0[0], out);
1438+
put_unaligned_le32(b0[1], out + 4);
1439+
put_unaligned_le32(b0[2], out + 8);
1440+
put_unaligned_le32(b0[3], out + 12);
14401441
}
14411442

14421443
static struct crypto_alg aes_alg = {
@@ -1446,7 +1447,6 @@ static struct crypto_alg aes_alg = {
14461447
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
14471448
.cra_blocksize = AES_BLOCK_SIZE,
14481449
.cra_ctxsize = sizeof(struct crypto_aes_ctx),
1449-
.cra_alignmask = 3,
14501450
.cra_module = THIS_MODULE,
14511451
.cra_u = {
14521452
.cipher = {

0 commit comments

Comments
 (0)