1
1
/*
2
2
* linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
3
3
*
4
- * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
4
+ * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
5
5
*
6
6
* This program is free software; you can redistribute it and/or modify
7
7
* it under the terms of the GNU General Public License version 2 as
11
11
#include <asm/neon.h>
12
12
#include <asm/hwcap.h>
13
13
#include <crypto/aes.h>
14
+ #include <crypto/internal/hash.h>
14
15
#include <crypto/internal/simd.h>
15
16
#include <crypto/internal/skcipher.h>
16
17
#include <linux/module.h>
31
32
#define aes_ctr_encrypt ce_aes_ctr_encrypt
32
33
#define aes_xts_encrypt ce_aes_xts_encrypt
33
34
#define aes_xts_decrypt ce_aes_xts_decrypt
35
+ #define aes_mac_update ce_aes_mac_update
34
36
MODULE_DESCRIPTION ("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions" );
35
37
#else
36
38
#define MODE "neon"
@@ -44,11 +46,15 @@ MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
44
46
#define aes_ctr_encrypt neon_aes_ctr_encrypt
45
47
#define aes_xts_encrypt neon_aes_xts_encrypt
46
48
#define aes_xts_decrypt neon_aes_xts_decrypt
49
+ #define aes_mac_update neon_aes_mac_update
47
50
MODULE_DESCRIPTION ("AES-ECB/CBC/CTR/XTS using ARMv8 NEON" );
48
51
MODULE_ALIAS_CRYPTO ("ecb(aes)" );
49
52
MODULE_ALIAS_CRYPTO ("cbc(aes)" );
50
53
MODULE_ALIAS_CRYPTO ("ctr(aes)" );
51
54
MODULE_ALIAS_CRYPTO ("xts(aes)" );
55
+ MODULE_ALIAS_CRYPTO ("cmac(aes)" );
56
+ MODULE_ALIAS_CRYPTO ("xcbc(aes)" );
57
+ MODULE_ALIAS_CRYPTO ("cbcmac(aes)" );
52
58
#endif
53
59
54
60
MODULE_AUTHOR ("Ard Biesheuvel <ard.biesheuvel@linaro.org>" );
@@ -75,11 +81,25 @@ asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
75
81
int rounds , int blocks , u8 const rk2 [], u8 iv [],
76
82
int first );
77
83
84
+ asmlinkage void aes_mac_update (u8 const in [], u32 const rk [], int rounds ,
85
+ int blocks , u8 dg [], int enc_before ,
86
+ int enc_after );
87
+
78
88
struct crypto_aes_xts_ctx {
79
89
struct crypto_aes_ctx key1 ;
80
90
struct crypto_aes_ctx __aligned (8 ) key2 ;
81
91
};
82
92
93
+ struct mac_tfm_ctx {
94
+ struct crypto_aes_ctx key ;
95
+ u8 __aligned (8 ) consts [];
96
+ };
97
+
98
+ struct mac_desc_ctx {
99
+ unsigned int len ;
100
+ u8 dg [AES_BLOCK_SIZE ];
101
+ };
102
+
83
103
static int skcipher_aes_setkey (struct crypto_skcipher * tfm , const u8 * in_key ,
84
104
unsigned int key_len )
85
105
{
@@ -357,6 +377,217 @@ static struct skcipher_alg aes_algs[] = { {
357
377
.decrypt = xts_decrypt ,
358
378
} };
359
379
380
+ static int cbcmac_setkey (struct crypto_shash * tfm , const u8 * in_key ,
381
+ unsigned int key_len )
382
+ {
383
+ struct mac_tfm_ctx * ctx = crypto_shash_ctx (tfm );
384
+ int err ;
385
+
386
+ err = aes_expandkey (& ctx -> key , in_key , key_len );
387
+ if (err )
388
+ crypto_shash_set_flags (tfm , CRYPTO_TFM_RES_BAD_KEY_LEN );
389
+
390
+ return err ;
391
+ }
392
+
393
+ static void cmac_gf128_mul_by_x (be128 * y , const be128 * x )
394
+ {
395
+ u64 a = be64_to_cpu (x -> a );
396
+ u64 b = be64_to_cpu (x -> b );
397
+
398
+ y -> a = cpu_to_be64 ((a << 1 ) | (b >> 63 ));
399
+ y -> b = cpu_to_be64 ((b << 1 ) ^ ((a >> 63 ) ? 0x87 : 0 ));
400
+ }
401
+
402
+ static int cmac_setkey (struct crypto_shash * tfm , const u8 * in_key ,
403
+ unsigned int key_len )
404
+ {
405
+ struct mac_tfm_ctx * ctx = crypto_shash_ctx (tfm );
406
+ be128 * consts = (be128 * )ctx -> consts ;
407
+ u8 * rk = (u8 * )ctx -> key .key_enc ;
408
+ int rounds = 6 + key_len / 4 ;
409
+ int err ;
410
+
411
+ err = cbcmac_setkey (tfm , in_key , key_len );
412
+ if (err )
413
+ return err ;
414
+
415
+ /* encrypt the zero vector */
416
+ kernel_neon_begin ();
417
+ aes_ecb_encrypt (ctx -> consts , (u8 [AES_BLOCK_SIZE ]){}, rk , rounds , 1 , 1 );
418
+ kernel_neon_end ();
419
+
420
+ cmac_gf128_mul_by_x (consts , consts );
421
+ cmac_gf128_mul_by_x (consts + 1 , consts );
422
+
423
+ return 0 ;
424
+ }
425
+
426
+ static int xcbc_setkey (struct crypto_shash * tfm , const u8 * in_key ,
427
+ unsigned int key_len )
428
+ {
429
+ static u8 const ks [3 ][AES_BLOCK_SIZE ] = {
430
+ { [0 ... AES_BLOCK_SIZE - 1 ] = 0x1 },
431
+ { [0 ... AES_BLOCK_SIZE - 1 ] = 0x2 },
432
+ { [0 ... AES_BLOCK_SIZE - 1 ] = 0x3 },
433
+ };
434
+
435
+ struct mac_tfm_ctx * ctx = crypto_shash_ctx (tfm );
436
+ u8 * rk = (u8 * )ctx -> key .key_enc ;
437
+ int rounds = 6 + key_len / 4 ;
438
+ u8 key [AES_BLOCK_SIZE ];
439
+ int err ;
440
+
441
+ err = cbcmac_setkey (tfm , in_key , key_len );
442
+ if (err )
443
+ return err ;
444
+
445
+ kernel_neon_begin ();
446
+ aes_ecb_encrypt (key , ks [0 ], rk , rounds , 1 , 1 );
447
+ aes_ecb_encrypt (ctx -> consts , ks [1 ], rk , rounds , 2 , 0 );
448
+ kernel_neon_end ();
449
+
450
+ return cbcmac_setkey (tfm , key , sizeof (key ));
451
+ }
452
+
453
+ static int mac_init (struct shash_desc * desc )
454
+ {
455
+ struct mac_desc_ctx * ctx = shash_desc_ctx (desc );
456
+
457
+ memset (ctx -> dg , 0 , AES_BLOCK_SIZE );
458
+ ctx -> len = 0 ;
459
+
460
+ return 0 ;
461
+ }
462
+
463
+ static int mac_update (struct shash_desc * desc , const u8 * p , unsigned int len )
464
+ {
465
+ struct mac_tfm_ctx * tctx = crypto_shash_ctx (desc -> tfm );
466
+ struct mac_desc_ctx * ctx = shash_desc_ctx (desc );
467
+ int rounds = 6 + tctx -> key .key_length / 4 ;
468
+
469
+ while (len > 0 ) {
470
+ unsigned int l ;
471
+
472
+ if ((ctx -> len % AES_BLOCK_SIZE ) == 0 &&
473
+ (ctx -> len + len ) > AES_BLOCK_SIZE ) {
474
+
475
+ int blocks = len / AES_BLOCK_SIZE ;
476
+
477
+ len %= AES_BLOCK_SIZE ;
478
+
479
+ kernel_neon_begin ();
480
+ aes_mac_update (p , tctx -> key .key_enc , rounds , blocks ,
481
+ ctx -> dg , (ctx -> len != 0 ), (len != 0 ));
482
+ kernel_neon_end ();
483
+
484
+ p += blocks * AES_BLOCK_SIZE ;
485
+
486
+ if (!len ) {
487
+ ctx -> len = AES_BLOCK_SIZE ;
488
+ break ;
489
+ }
490
+ ctx -> len = 0 ;
491
+ }
492
+
493
+ l = min (len , AES_BLOCK_SIZE - ctx -> len );
494
+
495
+ if (l <= AES_BLOCK_SIZE ) {
496
+ crypto_xor (ctx -> dg + ctx -> len , p , l );
497
+ ctx -> len += l ;
498
+ len -= l ;
499
+ p += l ;
500
+ }
501
+ }
502
+
503
+ return 0 ;
504
+ }
505
+
506
+ static int cbcmac_final (struct shash_desc * desc , u8 * out )
507
+ {
508
+ struct mac_tfm_ctx * tctx = crypto_shash_ctx (desc -> tfm );
509
+ struct mac_desc_ctx * ctx = shash_desc_ctx (desc );
510
+ int rounds = 6 + tctx -> key .key_length / 4 ;
511
+
512
+ kernel_neon_begin ();
513
+ aes_mac_update (NULL , tctx -> key .key_enc , rounds , 0 , ctx -> dg , 1 , 0 );
514
+ kernel_neon_end ();
515
+
516
+ memcpy (out , ctx -> dg , AES_BLOCK_SIZE );
517
+
518
+ return 0 ;
519
+ }
520
+
521
+ static int cmac_final (struct shash_desc * desc , u8 * out )
522
+ {
523
+ struct mac_tfm_ctx * tctx = crypto_shash_ctx (desc -> tfm );
524
+ struct mac_desc_ctx * ctx = shash_desc_ctx (desc );
525
+ int rounds = 6 + tctx -> key .key_length / 4 ;
526
+ u8 * consts = tctx -> consts ;
527
+
528
+ if (ctx -> len != AES_BLOCK_SIZE ) {
529
+ ctx -> dg [ctx -> len ] ^= 0x80 ;
530
+ consts += AES_BLOCK_SIZE ;
531
+ }
532
+
533
+ kernel_neon_begin ();
534
+ aes_mac_update (consts , tctx -> key .key_enc , rounds , 1 , ctx -> dg , 0 , 1 );
535
+ kernel_neon_end ();
536
+
537
+ memcpy (out , ctx -> dg , AES_BLOCK_SIZE );
538
+
539
+ return 0 ;
540
+ }
541
+
542
+ static struct shash_alg mac_algs [] = { {
543
+ .base .cra_name = "cmac(aes)" ,
544
+ .base .cra_driver_name = "cmac-aes-" MODE ,
545
+ .base .cra_priority = PRIO ,
546
+ .base .cra_flags = CRYPTO_ALG_TYPE_SHASH ,
547
+ .base .cra_blocksize = AES_BLOCK_SIZE ,
548
+ .base .cra_ctxsize = sizeof (struct mac_tfm_ctx ) +
549
+ 2 * AES_BLOCK_SIZE ,
550
+ .base .cra_module = THIS_MODULE ,
551
+
552
+ .digestsize = AES_BLOCK_SIZE ,
553
+ .init = mac_init ,
554
+ .update = mac_update ,
555
+ .final = cmac_final ,
556
+ .setkey = cmac_setkey ,
557
+ .descsize = sizeof (struct mac_desc_ctx ),
558
+ }, {
559
+ .base .cra_name = "xcbc(aes)" ,
560
+ .base .cra_driver_name = "xcbc-aes-" MODE ,
561
+ .base .cra_priority = PRIO ,
562
+ .base .cra_flags = CRYPTO_ALG_TYPE_SHASH ,
563
+ .base .cra_blocksize = AES_BLOCK_SIZE ,
564
+ .base .cra_ctxsize = sizeof (struct mac_tfm_ctx ) +
565
+ 2 * AES_BLOCK_SIZE ,
566
+ .base .cra_module = THIS_MODULE ,
567
+
568
+ .digestsize = AES_BLOCK_SIZE ,
569
+ .init = mac_init ,
570
+ .update = mac_update ,
571
+ .final = cmac_final ,
572
+ .setkey = xcbc_setkey ,
573
+ .descsize = sizeof (struct mac_desc_ctx ),
574
+ }, {
575
+ .base .cra_name = "cbcmac(aes)" ,
576
+ .base .cra_driver_name = "cbcmac-aes-" MODE ,
577
+ .base .cra_priority = PRIO ,
578
+ .base .cra_flags = CRYPTO_ALG_TYPE_SHASH ,
579
+ .base .cra_blocksize = 1 ,
580
+ .base .cra_ctxsize = sizeof (struct mac_tfm_ctx ),
581
+ .base .cra_module = THIS_MODULE ,
582
+
583
+ .digestsize = AES_BLOCK_SIZE ,
584
+ .init = mac_init ,
585
+ .update = mac_update ,
586
+ .final = cbcmac_final ,
587
+ .setkey = cbcmac_setkey ,
588
+ .descsize = sizeof (struct mac_desc_ctx ),
589
+ } };
590
+
360
591
static struct simd_skcipher_alg * aes_simd_algs [ARRAY_SIZE (aes_algs )];
361
592
362
593
static void aes_exit (void )
@@ -367,6 +598,7 @@ static void aes_exit(void)
367
598
if (aes_simd_algs [i ])
368
599
simd_skcipher_free (aes_simd_algs [i ]);
369
600
601
+ crypto_unregister_shashes (mac_algs , ARRAY_SIZE (mac_algs ));
370
602
crypto_unregister_skciphers (aes_algs , ARRAY_SIZE (aes_algs ));
371
603
}
372
604
@@ -383,6 +615,10 @@ static int __init aes_init(void)
383
615
if (err )
384
616
return err ;
385
617
618
+ err = crypto_register_shashes (mac_algs , ARRAY_SIZE (mac_algs ));
619
+ if (err )
620
+ goto unregister_ciphers ;
621
+
386
622
for (i = 0 ; i < ARRAY_SIZE (aes_algs ); i ++ ) {
387
623
if (!(aes_algs [i ].base .cra_flags & CRYPTO_ALG_INTERNAL ))
388
624
continue ;
@@ -402,6 +638,8 @@ static int __init aes_init(void)
402
638
403
639
unregister_simds :
404
640
aes_exit ();
641
+ unregister_ciphers :
642
+ crypto_unregister_skciphers (aes_algs , ARRAY_SIZE (aes_algs ));
405
643
return err ;
406
644
}
407
645
0 commit comments