54
54
#include <linux/errno.h>
55
55
#include <linux/crypto.h>
56
56
#include <asm/byteorder.h>
57
+ #include <asm/unaligned.h>
57
58
58
59
static inline u8 byte (const u32 x , const unsigned n )
59
60
{
@@ -1216,7 +1217,6 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
1216
1217
int crypto_aes_expand_key (struct crypto_aes_ctx * ctx , const u8 * in_key ,
1217
1218
unsigned int key_len )
1218
1219
{
1219
- const __le32 * key = (const __le32 * )in_key ;
1220
1220
u32 i , t , u , v , w , j ;
1221
1221
1222
1222
if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
@@ -1225,10 +1225,15 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
1225
1225
1226
1226
ctx -> key_length = key_len ;
1227
1227
1228
- ctx -> key_dec [key_len + 24 ] = ctx -> key_enc [0 ] = le32_to_cpu (key [0 ]);
1229
- ctx -> key_dec [key_len + 25 ] = ctx -> key_enc [1 ] = le32_to_cpu (key [1 ]);
1230
- ctx -> key_dec [key_len + 26 ] = ctx -> key_enc [2 ] = le32_to_cpu (key [2 ]);
1231
- ctx -> key_dec [key_len + 27 ] = ctx -> key_enc [3 ] = le32_to_cpu (key [3 ]);
1228
+ ctx -> key_enc [0 ] = get_unaligned_le32 (in_key );
1229
+ ctx -> key_enc [1 ] = get_unaligned_le32 (in_key + 4 );
1230
+ ctx -> key_enc [2 ] = get_unaligned_le32 (in_key + 8 );
1231
+ ctx -> key_enc [3 ] = get_unaligned_le32 (in_key + 12 );
1232
+
1233
+ ctx -> key_dec [key_len + 24 ] = ctx -> key_enc [0 ];
1234
+ ctx -> key_dec [key_len + 25 ] = ctx -> key_enc [1 ];
1235
+ ctx -> key_dec [key_len + 26 ] = ctx -> key_enc [2 ];
1236
+ ctx -> key_dec [key_len + 27 ] = ctx -> key_enc [3 ];
1232
1237
1233
1238
switch (key_len ) {
1234
1239
case AES_KEYSIZE_128 :
@@ -1238,17 +1243,17 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
1238
1243
break ;
1239
1244
1240
1245
case AES_KEYSIZE_192 :
1241
- ctx -> key_enc [4 ] = le32_to_cpu ( key [ 4 ] );
1242
- t = ctx -> key_enc [5 ] = le32_to_cpu ( key [ 5 ] );
1246
+ ctx -> key_enc [4 ] = get_unaligned_le32 ( in_key + 16 );
1247
+ t = ctx -> key_enc [5 ] = get_unaligned_le32 ( in_key + 20 );
1243
1248
for (i = 0 ; i < 8 ; ++ i )
1244
1249
loop6 (i );
1245
1250
break ;
1246
1251
1247
1252
case AES_KEYSIZE_256 :
1248
- ctx -> key_enc [4 ] = le32_to_cpu ( key [ 4 ] );
1249
- ctx -> key_enc [5 ] = le32_to_cpu ( key [ 5 ] );
1250
- ctx -> key_enc [6 ] = le32_to_cpu ( key [ 6 ] );
1251
- t = ctx -> key_enc [7 ] = le32_to_cpu ( key [ 7 ] );
1253
+ ctx -> key_enc [4 ] = get_unaligned_le32 ( in_key + 16 );
1254
+ ctx -> key_enc [5 ] = get_unaligned_le32 ( in_key + 20 );
1255
+ ctx -> key_enc [6 ] = get_unaligned_le32 ( in_key + 24 );
1256
+ t = ctx -> key_enc [7 ] = get_unaligned_le32 ( in_key + 28 );
1252
1257
for (i = 0 ; i < 6 ; ++ i )
1253
1258
loop8 (i );
1254
1259
loop8tophalf (i );
@@ -1329,16 +1334,14 @@ EXPORT_SYMBOL_GPL(crypto_aes_set_key);
1329
1334
static void aes_encrypt (struct crypto_tfm * tfm , u8 * out , const u8 * in )
1330
1335
{
1331
1336
const struct crypto_aes_ctx * ctx = crypto_tfm_ctx (tfm );
1332
- const __le32 * src = (const __le32 * )in ;
1333
- __le32 * dst = (__le32 * )out ;
1334
1337
u32 b0 [4 ], b1 [4 ];
1335
1338
const u32 * kp = ctx -> key_enc + 4 ;
1336
1339
const int key_len = ctx -> key_length ;
1337
1340
1338
- b0 [0 ] = le32_to_cpu ( src [ 0 ]) ^ ctx -> key_enc [0 ];
1339
- b0 [1 ] = le32_to_cpu ( src [ 1 ]) ^ ctx -> key_enc [1 ];
1340
- b0 [2 ] = le32_to_cpu ( src [ 2 ]) ^ ctx -> key_enc [2 ];
1341
- b0 [3 ] = le32_to_cpu ( src [ 3 ]) ^ ctx -> key_enc [3 ];
1341
+ b0 [0 ] = ctx -> key_enc [0 ] ^ get_unaligned_le32 ( in ) ;
1342
+ b0 [1 ] = ctx -> key_enc [1 ] ^ get_unaligned_le32 ( in + 4 ) ;
1343
+ b0 [2 ] = ctx -> key_enc [2 ] ^ get_unaligned_le32 ( in + 8 ) ;
1344
+ b0 [3 ] = ctx -> key_enc [3 ] ^ get_unaligned_le32 ( in + 12 ) ;
1342
1345
1343
1346
if (key_len > 24 ) {
1344
1347
f_nround (b1 , b0 , kp );
@@ -1361,10 +1364,10 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1361
1364
f_nround (b1 , b0 , kp );
1362
1365
f_lround (b0 , b1 , kp );
1363
1366
1364
- dst [ 0 ] = cpu_to_le32 (b0 [0 ]);
1365
- dst [ 1 ] = cpu_to_le32 (b0 [1 ]);
1366
- dst [ 2 ] = cpu_to_le32 (b0 [2 ]);
1367
- dst [ 3 ] = cpu_to_le32 (b0 [3 ]);
1367
+ put_unaligned_le32 (b0 [0 ], out );
1368
+ put_unaligned_le32 (b0 [1 ], out + 4 );
1369
+ put_unaligned_le32 (b0 [2 ], out + 8 );
1370
+ put_unaligned_le32 (b0 [3 ], out + 12 );
1368
1371
}
1369
1372
1370
1373
/* decrypt a block of text */
@@ -1401,16 +1404,14 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1401
1404
static void aes_decrypt (struct crypto_tfm * tfm , u8 * out , const u8 * in )
1402
1405
{
1403
1406
const struct crypto_aes_ctx * ctx = crypto_tfm_ctx (tfm );
1404
- const __le32 * src = (const __le32 * )in ;
1405
- __le32 * dst = (__le32 * )out ;
1406
1407
u32 b0 [4 ], b1 [4 ];
1407
1408
const int key_len = ctx -> key_length ;
1408
1409
const u32 * kp = ctx -> key_dec + 4 ;
1409
1410
1410
- b0 [0 ] = le32_to_cpu ( src [ 0 ]) ^ ctx -> key_dec [0 ];
1411
- b0 [1 ] = le32_to_cpu ( src [ 1 ]) ^ ctx -> key_dec [1 ];
1412
- b0 [2 ] = le32_to_cpu ( src [ 2 ]) ^ ctx -> key_dec [2 ];
1413
- b0 [3 ] = le32_to_cpu ( src [ 3 ]) ^ ctx -> key_dec [3 ];
1411
+ b0 [0 ] = ctx -> key_dec [0 ] ^ get_unaligned_le32 ( in ) ;
1412
+ b0 [1 ] = ctx -> key_dec [1 ] ^ get_unaligned_le32 ( in + 4 ) ;
1413
+ b0 [2 ] = ctx -> key_dec [2 ] ^ get_unaligned_le32 ( in + 8 ) ;
1414
+ b0 [3 ] = ctx -> key_dec [3 ] ^ get_unaligned_le32 ( in + 12 ) ;
1414
1415
1415
1416
if (key_len > 24 ) {
1416
1417
i_nround (b1 , b0 , kp );
@@ -1433,10 +1434,10 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1433
1434
i_nround (b1 , b0 , kp );
1434
1435
i_lround (b0 , b1 , kp );
1435
1436
1436
- dst [ 0 ] = cpu_to_le32 (b0 [0 ]);
1437
- dst [ 1 ] = cpu_to_le32 (b0 [1 ]);
1438
- dst [ 2 ] = cpu_to_le32 (b0 [2 ]);
1439
- dst [ 3 ] = cpu_to_le32 (b0 [3 ]);
1437
+ put_unaligned_le32 (b0 [0 ], out );
1438
+ put_unaligned_le32 (b0 [1 ], out + 4 );
1439
+ put_unaligned_le32 (b0 [2 ], out + 8 );
1440
+ put_unaligned_le32 (b0 [3 ], out + 12 );
1440
1441
}
1441
1442
1442
1443
static struct crypto_alg aes_alg = {
@@ -1446,7 +1447,6 @@ static struct crypto_alg aes_alg = {
1446
1447
.cra_flags = CRYPTO_ALG_TYPE_CIPHER ,
1447
1448
.cra_blocksize = AES_BLOCK_SIZE ,
1448
1449
.cra_ctxsize = sizeof (struct crypto_aes_ctx ),
1449
- .cra_alignmask = 3 ,
1450
1450
.cra_module = THIS_MODULE ,
1451
1451
.cra_u = {
1452
1452
.cipher = {
0 commit comments