@@ -167,62 +167,12 @@ static int omap_aes_gcm_copy_buffers(struct omap_aes_dev *dd,
167
167
return 0 ;
168
168
}
169
169
170
- static void omap_aes_gcm_complete (struct crypto_async_request * req , int err )
171
- {
172
- struct omap_aes_gcm_result * res = req -> data ;
173
-
174
- if (err == - EINPROGRESS )
175
- return ;
176
-
177
- res -> err = err ;
178
- complete (& res -> completion );
179
- }
180
-
181
170
static int do_encrypt_iv (struct aead_request * req , u32 * tag , u32 * iv )
182
171
{
183
- struct scatterlist iv_sg , tag_sg ;
184
- struct skcipher_request * sk_req ;
185
- struct omap_aes_gcm_result result ;
186
- struct omap_aes_ctx * ctx = crypto_aead_ctx (crypto_aead_reqtfm (req ));
187
- int ret = 0 ;
188
-
189
- sk_req = skcipher_request_alloc (ctx -> ctr , GFP_KERNEL );
190
- if (!sk_req ) {
191
- pr_err ("skcipher: Failed to allocate request\n" );
192
- return - ENOMEM ;
193
- }
194
-
195
- init_completion (& result .completion );
196
-
197
- sg_init_one (& iv_sg , iv , AES_BLOCK_SIZE );
198
- sg_init_one (& tag_sg , tag , AES_BLOCK_SIZE );
199
- skcipher_request_set_callback (sk_req , CRYPTO_TFM_REQ_MAY_BACKLOG ,
200
- omap_aes_gcm_complete , & result );
201
- ret = crypto_skcipher_setkey (ctx -> ctr , (u8 * )ctx -> key , ctx -> keylen );
202
- skcipher_request_set_crypt (sk_req , & iv_sg , & tag_sg , AES_BLOCK_SIZE ,
203
- NULL );
204
- ret = crypto_skcipher_encrypt (sk_req );
205
- switch (ret ) {
206
- case 0 :
207
- break ;
208
- case - EINPROGRESS :
209
- case - EBUSY :
210
- ret = wait_for_completion_interruptible (& result .completion );
211
- if (!ret ) {
212
- ret = result .err ;
213
- if (!ret ) {
214
- reinit_completion (& result .completion );
215
- break ;
216
- }
217
- }
218
- /* fall through */
219
- default :
220
- pr_err ("Encryption of IV failed for GCM mode\n" );
221
- break ;
222
- }
172
+ struct omap_aes_gcm_ctx * ctx = crypto_aead_ctx (crypto_aead_reqtfm (req ));
223
173
224
- skcipher_request_free ( sk_req );
225
- return ret ;
174
+ aes_encrypt ( & ctx -> actx , ( u8 * ) tag , ( u8 * ) iv );
175
+ return 0 ;
226
176
}
227
177
228
178
void omap_aes_gcm_dma_out_callback (void * data )
@@ -252,7 +202,7 @@ void omap_aes_gcm_dma_out_callback(void *data)
252
202
static int omap_aes_gcm_handle_queue (struct omap_aes_dev * dd ,
253
203
struct aead_request * req )
254
204
{
255
- struct omap_aes_ctx * ctx ;
205
+ struct omap_aes_gcm_ctx * ctx ;
256
206
struct aead_request * backlog ;
257
207
struct omap_aes_reqctx * rctx ;
258
208
unsigned long flags ;
@@ -281,7 +231,7 @@ static int omap_aes_gcm_handle_queue(struct omap_aes_dev *dd,
281
231
ctx = crypto_aead_ctx (crypto_aead_reqtfm (req ));
282
232
rctx = aead_request_ctx (req );
283
233
284
- dd -> ctx = ctx ;
234
+ dd -> ctx = & ctx -> octx ;
285
235
rctx -> dd = dd ;
286
236
dd -> aead_req = req ;
287
237
@@ -360,10 +310,10 @@ int omap_aes_gcm_decrypt(struct aead_request *req)
360
310
361
311
int omap_aes_4106gcm_encrypt (struct aead_request * req )
362
312
{
363
- struct omap_aes_ctx * ctx = crypto_aead_ctx (crypto_aead_reqtfm (req ));
313
+ struct omap_aes_gcm_ctx * ctx = crypto_aead_ctx (crypto_aead_reqtfm (req ));
364
314
struct omap_aes_reqctx * rctx = aead_request_ctx (req );
365
315
366
- memcpy (rctx -> iv , ctx -> nonce , 4 );
316
+ memcpy (rctx -> iv , ctx -> octx . nonce , 4 );
367
317
memcpy (rctx -> iv + 4 , req -> iv , 8 );
368
318
return crypto_ipsec_check_assoclen (req -> assoclen ) ?:
369
319
omap_aes_gcm_crypt (req , FLAGS_ENCRYPT | FLAGS_GCM |
@@ -372,10 +322,10 @@ int omap_aes_4106gcm_encrypt(struct aead_request *req)
372
322
373
323
int omap_aes_4106gcm_decrypt (struct aead_request * req )
374
324
{
375
- struct omap_aes_ctx * ctx = crypto_aead_ctx (crypto_aead_reqtfm (req ));
325
+ struct omap_aes_gcm_ctx * ctx = crypto_aead_ctx (crypto_aead_reqtfm (req ));
376
326
struct omap_aes_reqctx * rctx = aead_request_ctx (req );
377
327
378
- memcpy (rctx -> iv , ctx -> nonce , 4 );
328
+ memcpy (rctx -> iv , ctx -> octx . nonce , 4 );
379
329
memcpy (rctx -> iv + 4 , req -> iv , 8 );
380
330
return crypto_ipsec_check_assoclen (req -> assoclen ) ?:
381
331
omap_aes_gcm_crypt (req , FLAGS_GCM | FLAGS_RFC4106_GCM );
@@ -384,34 +334,36 @@ int omap_aes_4106gcm_decrypt(struct aead_request *req)
384
334
int omap_aes_gcm_setkey (struct crypto_aead * tfm , const u8 * key ,
385
335
unsigned int keylen )
386
336
{
387
- struct omap_aes_ctx * ctx = crypto_aead_ctx (tfm );
337
+ struct omap_aes_gcm_ctx * ctx = crypto_aead_ctx (tfm );
338
+ int ret ;
388
339
389
- if ( keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
390
- keylen != AES_KEYSIZE_256 )
391
- return - EINVAL ;
340
+ ret = aes_expandkey ( & ctx -> actx , key , keylen );
341
+ if ( ret )
342
+ return ret ;
392
343
393
- memcpy (ctx -> key , key , keylen );
394
- ctx -> keylen = keylen ;
344
+ memcpy (ctx -> octx . key , key , keylen );
345
+ ctx -> octx . keylen = keylen ;
395
346
396
347
return 0 ;
397
348
}
398
349
399
350
int omap_aes_4106gcm_setkey (struct crypto_aead * tfm , const u8 * key ,
400
351
unsigned int keylen )
401
352
{
402
- struct omap_aes_ctx * ctx = crypto_aead_ctx (tfm );
353
+ struct omap_aes_gcm_ctx * ctx = crypto_aead_ctx (tfm );
354
+ int ret ;
403
355
404
356
if (keylen < 4 )
405
357
return - EINVAL ;
406
-
407
358
keylen -= 4 ;
408
- if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
409
- keylen != AES_KEYSIZE_256 )
410
- return - EINVAL ;
411
359
412
- memcpy (ctx -> key , key , keylen );
413
- memcpy (ctx -> nonce , key + keylen , 4 );
414
- ctx -> keylen = keylen ;
360
+ ret = aes_expandkey (& ctx -> actx , key , keylen );
361
+ if (ret )
362
+ return ret ;
363
+
364
+ memcpy (ctx -> octx .key , key , keylen );
365
+ memcpy (ctx -> octx .nonce , key + keylen , 4 );
366
+ ctx -> octx .keylen = keylen ;
415
367
416
368
return 0 ;
417
369
}
0 commit comments