Skip to content

Commit f79d2d2

Browse files
committed
crypto: skcipher - Use restrict rather than hand-rolling accesses
Rather than accessing 'alg' directly to avoid the aliasing issue which leads to unnecessary reloads, use the __restrict keyword to explicitly tell the compiler that there is no aliasing. This generates equivalent if not superior code on x86 with gcc 12. Note that in skcipher_walk_virt the alg assignment is moved after might_sleep_if because that function is a compiler barrier and forces a reload. Signed-off-by: Herbert Xu <[email protected]>
1 parent 006401d commit f79d2d2

File tree

2 files changed

+31
-34
lines changed

2 files changed

+31
-34
lines changed

crypto/skcipher.c

+23-28
Original file line numberDiff line numberDiff line change
@@ -293,14 +293,16 @@ static int skcipher_walk_first(struct skcipher_walk *walk)
293293
return skcipher_walk_next(walk);
294294
}
295295

296-
int skcipher_walk_virt(struct skcipher_walk *walk,
297-
struct skcipher_request *req, bool atomic)
296+
int skcipher_walk_virt(struct skcipher_walk *__restrict walk,
297+
struct skcipher_request *__restrict req, bool atomic)
298298
{
299-
const struct skcipher_alg *alg =
300-
crypto_skcipher_alg(crypto_skcipher_reqtfm(req));
299+
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
300+
struct skcipher_alg *alg;
301301

302302
might_sleep_if(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
303303

304+
alg = crypto_skcipher_alg(tfm);
305+
304306
walk->total = req->cryptlen;
305307
walk->nbytes = 0;
306308
walk->iv = req->iv;
@@ -316,14 +318,9 @@ int skcipher_walk_virt(struct skcipher_walk *walk,
316318
scatterwalk_start(&walk->in, req->src);
317319
scatterwalk_start(&walk->out, req->dst);
318320

319-
/*
320-
* Accessing 'alg' directly generates better code than using the
321-
* crypto_skcipher_blocksize() and similar helper functions here, as it
322-
* prevents the algorithm pointer from being repeatedly reloaded.
323-
*/
324-
walk->blocksize = alg->base.cra_blocksize;
325-
walk->ivsize = alg->co.ivsize;
326-
walk->alignmask = alg->base.cra_alignmask;
321+
walk->blocksize = crypto_skcipher_blocksize(tfm);
322+
walk->ivsize = crypto_skcipher_ivsize(tfm);
323+
walk->alignmask = crypto_skcipher_alignmask(tfm);
327324

328325
if (alg->co.base.cra_type != &crypto_skcipher_type)
329326
walk->stride = alg->co.chunksize;
@@ -334,10 +331,11 @@ int skcipher_walk_virt(struct skcipher_walk *walk,
334331
}
335332
EXPORT_SYMBOL_GPL(skcipher_walk_virt);
336333

337-
static int skcipher_walk_aead_common(struct skcipher_walk *walk,
338-
struct aead_request *req, bool atomic)
334+
static int skcipher_walk_aead_common(struct skcipher_walk *__restrict walk,
335+
struct aead_request *__restrict req,
336+
bool atomic)
339337
{
340-
const struct aead_alg *alg = crypto_aead_alg(crypto_aead_reqtfm(req));
338+
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
341339

342340
walk->nbytes = 0;
343341
walk->iv = req->iv;
@@ -353,30 +351,27 @@ static int skcipher_walk_aead_common(struct skcipher_walk *walk,
353351
scatterwalk_start_at_pos(&walk->in, req->src, req->assoclen);
354352
scatterwalk_start_at_pos(&walk->out, req->dst, req->assoclen);
355353

356-
/*
357-
* Accessing 'alg' directly generates better code than using the
358-
* crypto_aead_blocksize() and similar helper functions here, as it
359-
* prevents the algorithm pointer from being repeatedly reloaded.
360-
*/
361-
walk->blocksize = alg->base.cra_blocksize;
362-
walk->stride = alg->chunksize;
363-
walk->ivsize = alg->ivsize;
364-
walk->alignmask = alg->base.cra_alignmask;
354+
walk->blocksize = crypto_aead_blocksize(tfm);
355+
walk->stride = crypto_aead_chunksize(tfm);
356+
walk->ivsize = crypto_aead_ivsize(tfm);
357+
walk->alignmask = crypto_aead_alignmask(tfm);
365358

366359
return skcipher_walk_first(walk);
367360
}
368361

369-
int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
370-
struct aead_request *req, bool atomic)
362+
int skcipher_walk_aead_encrypt(struct skcipher_walk *__restrict walk,
363+
struct aead_request *__restrict req,
364+
bool atomic)
371365
{
372366
walk->total = req->cryptlen;
373367

374368
return skcipher_walk_aead_common(walk, req, atomic);
375369
}
376370
EXPORT_SYMBOL_GPL(skcipher_walk_aead_encrypt);
377371

378-
int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
379-
struct aead_request *req, bool atomic)
372+
int skcipher_walk_aead_decrypt(struct skcipher_walk *__restrict walk,
373+
struct aead_request *__restrict req,
374+
bool atomic)
380375
{
381376
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
382377

include/crypto/internal/skcipher.h

+8-6
Original file line numberDiff line numberDiff line change
@@ -197,13 +197,15 @@ int lskcipher_register_instance(struct crypto_template *tmpl,
197197
struct lskcipher_instance *inst);
198198

199199
int skcipher_walk_done(struct skcipher_walk *walk, int res);
200-
int skcipher_walk_virt(struct skcipher_walk *walk,
201-
struct skcipher_request *req,
200+
int skcipher_walk_virt(struct skcipher_walk *__restrict walk,
201+
struct skcipher_request *__restrict req,
202202
bool atomic);
203-
int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
204-
struct aead_request *req, bool atomic);
205-
int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
206-
struct aead_request *req, bool atomic);
203+
int skcipher_walk_aead_encrypt(struct skcipher_walk *__restrict walk,
204+
struct aead_request *__restrict req,
205+
bool atomic);
206+
int skcipher_walk_aead_decrypt(struct skcipher_walk *__restrict walk,
207+
struct aead_request *__restrict req,
208+
bool atomic);
207209

208210
static inline void skcipher_walk_abort(struct skcipher_walk *walk)
209211
{

0 commit comments

Comments
 (0)