linux/crypto/cryptd.c
<<
>>
Prefs
   1/*
   2 * Software async crypto daemon.
   3 *
   4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
   5 *
   6 * Added AEAD support to cryptd.
   7 *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
   8 *             Adrian Hoban <adrian.hoban@intel.com>
   9 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
  10 *             Aidan O'Mahony (aidan.o.mahony@intel.com)
  11 *    Copyright (c) 2010, Intel Corporation.
  12 *
  13 * This program is free software; you can redistribute it and/or modify it
  14 * under the terms of the GNU General Public License as published by the Free
  15 * Software Foundation; either version 2 of the License, or (at your option)
  16 * any later version.
  17 *
  18 */
  19
  20#include <crypto/algapi.h>
  21#include <crypto/internal/hash.h>
  22#include <crypto/internal/aead.h>
  23#include <crypto/cryptd.h>
  24#include <crypto/crypto_wq.h>
  25#include <linux/atomic.h>
  26#include <linux/err.h>
  27#include <linux/init.h>
  28#include <linux/kernel.h>
  29#include <linux/list.h>
  30#include <linux/module.h>
  31#include <linux/scatterlist.h>
  32#include <linux/sched.h>
  33#include <linux/slab.h>
  34
  35#define CRYPTD_MAX_CPU_QLEN 1000
  36
  37struct cryptd_cpu_queue {
  38        struct crypto_queue queue;
  39        struct work_struct work;
  40};
  41
  42struct cryptd_queue {
  43        struct cryptd_cpu_queue __percpu *cpu_queue;
  44};
  45
  46struct cryptd_instance_ctx {
  47        struct crypto_spawn spawn;
  48        struct cryptd_queue *queue;
  49};
  50
  51struct hashd_instance_ctx {
  52        struct crypto_shash_spawn spawn;
  53        struct cryptd_queue *queue;
  54};
  55
  56struct aead_instance_ctx {
  57        struct crypto_aead_spawn aead_spawn;
  58        struct cryptd_queue *queue;
  59};
  60
  61struct cryptd_blkcipher_ctx {
  62        atomic_t refcnt;
  63        struct crypto_blkcipher *child;
  64};
  65
  66struct cryptd_blkcipher_request_ctx {
  67        crypto_completion_t complete;
  68};
  69
  70struct cryptd_hash_ctx {
  71        atomic_t refcnt;
  72        struct crypto_shash *child;
  73};
  74
  75struct cryptd_hash_request_ctx {
  76        crypto_completion_t complete;
  77        struct shash_desc desc;
  78};
  79
  80struct cryptd_aead_ctx {
  81        atomic_t refcnt;
  82        struct crypto_aead *child;
  83};
  84
  85struct cryptd_aead_request_ctx {
  86        crypto_completion_t complete;
  87};
  88
  89static void cryptd_queue_worker(struct work_struct *work);
  90
  91static int cryptd_init_queue(struct cryptd_queue *queue,
  92                             unsigned int max_cpu_qlen)
  93{
  94        int cpu;
  95        struct cryptd_cpu_queue *cpu_queue;
  96
  97        queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
  98        if (!queue->cpu_queue)
  99                return -ENOMEM;
 100        for_each_possible_cpu(cpu) {
 101                cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
 102                crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
 103                INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
 104        }
 105        return 0;
 106}
 107
 108static void cryptd_fini_queue(struct cryptd_queue *queue)
 109{
 110        int cpu;
 111        struct cryptd_cpu_queue *cpu_queue;
 112
 113        for_each_possible_cpu(cpu) {
 114                cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
 115                BUG_ON(cpu_queue->queue.qlen);
 116        }
 117        free_percpu(queue->cpu_queue);
 118}
 119
 120static int cryptd_enqueue_request(struct cryptd_queue *queue,
 121                                  struct crypto_async_request *request)
 122{
 123        int cpu, err;
 124        struct cryptd_cpu_queue *cpu_queue;
 125        struct crypto_tfm *tfm;
 126        atomic_t *refcnt;
 127        bool may_backlog;
 128
 129        cpu = get_cpu();
 130        cpu_queue = this_cpu_ptr(queue->cpu_queue);
 131        err = crypto_enqueue_request(&cpu_queue->queue, request);
 132
 133        refcnt = crypto_tfm_ctx(request->tfm);
 134        may_backlog = request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG;
 135
 136        if (err == -EBUSY && !may_backlog)
 137                goto out_put_cpu;
 138
 139        queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
 140
 141        if (!atomic_read(refcnt))
 142                goto out_put_cpu;
 143
 144        tfm = request->tfm;
 145        atomic_inc(refcnt);
 146
 147out_put_cpu:
 148        put_cpu();
 149
 150        return err;
 151}
 152
 153/* Called in workqueue context, do one real cryption work (via
 154 * req->complete) and reschedule itself if there are more work to
 155 * do. */
 156static void cryptd_queue_worker(struct work_struct *work)
 157{
 158        struct cryptd_cpu_queue *cpu_queue;
 159        struct crypto_async_request *req, *backlog;
 160
 161        cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
 162        /*
 163         * Only handle one request at a time to avoid hogging crypto workqueue.
 164         * preempt_disable/enable is used to prevent being preempted by
 165         * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
 166         * cryptd_enqueue_request() being accessed from software interrupts.
 167         */
 168        local_bh_disable();
 169        preempt_disable();
 170        backlog = crypto_get_backlog(&cpu_queue->queue);
 171        req = crypto_dequeue_request(&cpu_queue->queue);
 172        preempt_enable();
 173        local_bh_enable();
 174
 175        if (!req)
 176                return;
 177
 178        if (backlog)
 179                backlog->complete(backlog, -EINPROGRESS);
 180        req->complete(req, 0);
 181
 182        if (cpu_queue->queue.qlen)
 183                queue_work(kcrypto_wq, &cpu_queue->work);
 184}
 185
 186static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
 187{
 188        struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
 189        struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
 190        return ictx->queue;
 191}
 192
 193static inline void cryptd_check_internal(struct rtattr **tb, u32 *type,
 194                                         u32 *mask)
 195{
 196        struct crypto_attr_type *algt;
 197
 198        algt = crypto_get_attr_type(tb);
 199        if (IS_ERR(algt))
 200                return;
 201
 202        *type |= algt->type & CRYPTO_ALG_INTERNAL;
 203        *mask |= algt->mask & CRYPTO_ALG_INTERNAL;
 204}
 205
 206static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
 207                                   const u8 *key, unsigned int keylen)
 208{
 209        struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
 210        struct crypto_blkcipher *child = ctx->child;
 211        int err;
 212
 213        crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 214        crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
 215                                          CRYPTO_TFM_REQ_MASK);
 216        err = crypto_blkcipher_setkey(child, key, keylen);
 217        crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
 218                                            CRYPTO_TFM_RES_MASK);
 219        return err;
 220}
 221
 222static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
 223                                   struct crypto_blkcipher *child,
 224                                   int err,
 225                                   int (*crypt)(struct blkcipher_desc *desc,
 226                                                struct scatterlist *dst,
 227                                                struct scatterlist *src,
 228                                                unsigned int len))
 229{
 230        struct cryptd_blkcipher_request_ctx *rctx;
 231        struct cryptd_blkcipher_ctx *ctx;
 232        struct crypto_ablkcipher *tfm;
 233        struct blkcipher_desc desc;
 234        int refcnt;
 235
 236        rctx = ablkcipher_request_ctx(req);
 237
 238        if (unlikely(err == -EINPROGRESS))
 239                goto out;
 240
 241        desc.tfm = child;
 242        desc.info = req->info;
 243        desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
 244
 245        err = crypt(&desc, req->dst, req->src, req->nbytes);
 246
 247        req->base.complete = rctx->complete;
 248
 249out:
 250        tfm = crypto_ablkcipher_reqtfm(req);
 251        ctx = crypto_ablkcipher_ctx(tfm);
 252        refcnt = atomic_read(&ctx->refcnt);
 253
 254        local_bh_disable();
 255        rctx->complete(&req->base, err);
 256        local_bh_enable();
 257
 258        if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
 259                crypto_free_ablkcipher(tfm);
 260}
 261
 262static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
 263{
 264        struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
 265        struct crypto_blkcipher *child = ctx->child;
 266
 267        cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
 268                               crypto_blkcipher_crt(child)->encrypt);
 269}
 270
 271static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
 272{
 273        struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
 274        struct crypto_blkcipher *child = ctx->child;
 275
 276        cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
 277                               crypto_blkcipher_crt(child)->decrypt);
 278}
 279
 280static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
 281                                    crypto_completion_t compl)
 282{
 283        struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
 284        struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
 285        struct cryptd_queue *queue;
 286
 287        queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
 288        rctx->complete = req->base.complete;
 289        req->base.complete = compl;
 290
 291        return cryptd_enqueue_request(queue, &req->base);
 292}
 293
 294static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
 295{
 296        return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
 297}
 298
 299static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
 300{
 301        return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
 302}
 303
 304static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
 305{
 306        struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
 307        struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
 308        struct crypto_spawn *spawn = &ictx->spawn;
 309        struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
 310        struct crypto_blkcipher *cipher;
 311
 312        cipher = crypto_spawn_blkcipher(spawn);
 313        if (IS_ERR(cipher))
 314                return PTR_ERR(cipher);
 315
 316        ctx->child = cipher;
 317        tfm->crt_ablkcipher.reqsize =
 318                sizeof(struct cryptd_blkcipher_request_ctx);
 319        return 0;
 320}
 321
 322static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
 323{
 324        struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
 325
 326        crypto_free_blkcipher(ctx->child);
 327}
 328
 329static int cryptd_init_instance(struct crypto_instance *inst,
 330                                struct crypto_alg *alg)
 331{
 332        if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
 333                     "cryptd(%s)",
 334                     alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
 335                return -ENAMETOOLONG;
 336
 337        memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
 338
 339        inst->alg.cra_priority = alg->cra_priority + 50;
 340        inst->alg.cra_blocksize = alg->cra_blocksize;
 341        inst->alg.cra_alignmask = alg->cra_alignmask;
 342
 343        return 0;
 344}
 345
 346static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
 347                                   unsigned int tail)
 348{
 349        char *p;
 350        struct crypto_instance *inst;
 351        int err;
 352
 353        p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
 354        if (!p)
 355                return ERR_PTR(-ENOMEM);
 356
 357        inst = (void *)(p + head);
 358
 359        err = cryptd_init_instance(inst, alg);
 360        if (err)
 361                goto out_free_inst;
 362
 363out:
 364        return p;
 365
 366out_free_inst:
 367        kfree(p);
 368        p = ERR_PTR(err);
 369        goto out;
 370}
 371
 372static int cryptd_create_blkcipher(struct crypto_template *tmpl,
 373                                   struct rtattr **tb,
 374                                   struct cryptd_queue *queue)
 375{
 376        struct cryptd_instance_ctx *ctx;
 377        struct crypto_instance *inst;
 378        struct crypto_alg *alg;
 379        u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
 380        u32 mask = CRYPTO_ALG_TYPE_MASK;
 381        int err;
 382
 383        cryptd_check_internal(tb, &type, &mask);
 384
 385        alg = crypto_get_attr_alg(tb, type, mask);
 386        if (IS_ERR(alg))
 387                return PTR_ERR(alg);
 388
 389        inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
 390        err = PTR_ERR(inst);
 391        if (IS_ERR(inst))
 392                goto out_put_alg;
 393
 394        ctx = crypto_instance_ctx(inst);
 395        ctx->queue = queue;
 396
 397        err = crypto_init_spawn(&ctx->spawn, alg, inst,
 398                                CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
 399        if (err)
 400                goto out_free_inst;
 401
 402        type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
 403        if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
 404                type |= CRYPTO_ALG_INTERNAL;
 405        inst->alg.cra_flags = type;
 406        inst->alg.cra_type = &crypto_ablkcipher_type;
 407
 408        inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
 409        inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
 410        inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
 411
 412        inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
 413
 414        inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
 415
 416        inst->alg.cra_init = cryptd_blkcipher_init_tfm;
 417        inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
 418
 419        inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
 420        inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
 421        inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
 422
 423        err = crypto_register_instance(tmpl, inst);
 424        if (err) {
 425                crypto_drop_spawn(&ctx->spawn);
 426out_free_inst:
 427                kfree(inst);
 428        }
 429
 430out_put_alg:
 431        crypto_mod_put(alg);
 432        return err;
 433}
 434
 435static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
 436{
 437        struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
 438        struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
 439        struct crypto_shash_spawn *spawn = &ictx->spawn;
 440        struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
 441        struct crypto_shash *hash;
 442
 443        hash = crypto_spawn_shash(spawn);
 444        if (IS_ERR(hash))
 445                return PTR_ERR(hash);
 446
 447        ctx->child = hash;
 448        crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
 449                                 sizeof(struct cryptd_hash_request_ctx) +
 450                                 crypto_shash_descsize(hash));
 451        return 0;
 452}
 453
 454static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
 455{
 456        struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
 457
 458        crypto_free_shash(ctx->child);
 459}
 460
 461static int cryptd_hash_setkey(struct crypto_ahash *parent,
 462                                   const u8 *key, unsigned int keylen)
 463{
 464        struct cryptd_hash_ctx *ctx   = crypto_ahash_ctx(parent);
 465        struct crypto_shash *child = ctx->child;
 466        int err;
 467
 468        crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 469        crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
 470                                      CRYPTO_TFM_REQ_MASK);
 471        err = crypto_shash_setkey(child, key, keylen);
 472        crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
 473                                       CRYPTO_TFM_RES_MASK);
 474        return err;
 475}
 476
 477static int cryptd_hash_enqueue(struct ahash_request *req,
 478                                crypto_completion_t compl)
 479{
 480        struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
 481        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
 482        struct cryptd_queue *queue =
 483                cryptd_get_queue(crypto_ahash_tfm(tfm));
 484
 485        rctx->complete = req->base.complete;
 486        req->base.complete = compl;
 487
 488        return cryptd_enqueue_request(queue, &req->base);
 489}
 490
 491static void cryptd_hash_complete(struct ahash_request *req, int err)
 492{
 493        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
 494        struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
 495        struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
 496        int refcnt = atomic_read(&ctx->refcnt);
 497
 498        local_bh_disable();
 499        rctx->complete(&req->base, err);
 500        local_bh_enable();
 501
 502        if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
 503                crypto_free_ahash(tfm);
 504}
 505
 506static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
 507{
 508        struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
 509        struct crypto_shash *child = ctx->child;
 510        struct ahash_request *req = ahash_request_cast(req_async);
 511        struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
 512        struct shash_desc *desc = &rctx->desc;
 513
 514        if (unlikely(err == -EINPROGRESS))
 515                goto out;
 516
 517        desc->tfm = child;
 518        desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
 519
 520        err = crypto_shash_init(desc);
 521
 522        req->base.complete = rctx->complete;
 523
 524out:
 525        cryptd_hash_complete(req, err);
 526}
 527
 528static int cryptd_hash_init_enqueue(struct ahash_request *req)
 529{
 530        return cryptd_hash_enqueue(req, cryptd_hash_init);
 531}
 532
 533static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
 534{
 535        struct ahash_request *req = ahash_request_cast(req_async);
 536        struct cryptd_hash_request_ctx *rctx;
 537
 538        rctx = ahash_request_ctx(req);
 539
 540        if (unlikely(err == -EINPROGRESS))
 541                goto out;
 542
 543        err = shash_ahash_update(req, &rctx->desc);
 544
 545        req->base.complete = rctx->complete;
 546
 547out:
 548        cryptd_hash_complete(req, err);
 549}
 550
 551static int cryptd_hash_update_enqueue(struct ahash_request *req)
 552{
 553        return cryptd_hash_enqueue(req, cryptd_hash_update);
 554}
 555
 556static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
 557{
 558        struct ahash_request *req = ahash_request_cast(req_async);
 559        struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
 560
 561        if (unlikely(err == -EINPROGRESS))
 562                goto out;
 563
 564        err = crypto_shash_final(&rctx->desc, req->result);
 565
 566        req->base.complete = rctx->complete;
 567
 568out:
 569        cryptd_hash_complete(req, err);
 570}
 571
 572static int cryptd_hash_final_enqueue(struct ahash_request *req)
 573{
 574        return cryptd_hash_enqueue(req, cryptd_hash_final);
 575}
 576
 577static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
 578{
 579        struct ahash_request *req = ahash_request_cast(req_async);
 580        struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
 581
 582        if (unlikely(err == -EINPROGRESS))
 583                goto out;
 584
 585        err = shash_ahash_finup(req, &rctx->desc);
 586
 587        req->base.complete = rctx->complete;
 588
 589out:
 590        cryptd_hash_complete(req, err);
 591}
 592
 593static int cryptd_hash_finup_enqueue(struct ahash_request *req)
 594{
 595        return cryptd_hash_enqueue(req, cryptd_hash_finup);
 596}
 597
 598static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
 599{
 600        struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
 601        struct crypto_shash *child = ctx->child;
 602        struct ahash_request *req = ahash_request_cast(req_async);
 603        struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
 604        struct shash_desc *desc = &rctx->desc;
 605
 606        if (unlikely(err == -EINPROGRESS))
 607                goto out;
 608
 609        desc->tfm = child;
 610        desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
 611
 612        err = shash_ahash_digest(req, desc);
 613
 614        req->base.complete = rctx->complete;
 615
 616out:
 617        cryptd_hash_complete(req, err);
 618}
 619
 620static int cryptd_hash_digest_enqueue(struct ahash_request *req)
 621{
 622        return cryptd_hash_enqueue(req, cryptd_hash_digest);
 623}
 624
 625static int cryptd_hash_export(struct ahash_request *req, void *out)
 626{
 627        struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
 628
 629        return crypto_shash_export(&rctx->desc, out);
 630}
 631
 632static int cryptd_hash_import(struct ahash_request *req, const void *in)
 633{
 634        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
 635        struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
 636        struct shash_desc *desc = cryptd_shash_desc(req);
 637
 638        desc->tfm = ctx->child;
 639        desc->flags = req->base.flags;
 640
 641        return crypto_shash_import(desc, in);
 642}
 643
 644static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
 645                              struct cryptd_queue *queue)
 646{
 647        struct hashd_instance_ctx *ctx;
 648        struct ahash_instance *inst;
 649        struct shash_alg *salg;
 650        struct crypto_alg *alg;
 651        u32 type = 0;
 652        u32 mask = 0;
 653        int err;
 654
 655        cryptd_check_internal(tb, &type, &mask);
 656
 657        salg = shash_attr_alg(tb[1], type, mask);
 658        if (IS_ERR(salg))
 659                return PTR_ERR(salg);
 660
 661        alg = &salg->base;
 662        inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
 663                                     sizeof(*ctx));
 664        err = PTR_ERR(inst);
 665        if (IS_ERR(inst))
 666                goto out_put_alg;
 667
 668        ctx = ahash_instance_ctx(inst);
 669        ctx->queue = queue;
 670
 671        err = crypto_init_shash_spawn(&ctx->spawn, salg,
 672                                      ahash_crypto_instance(inst));
 673        if (err)
 674                goto out_free_inst;
 675
 676        type = CRYPTO_ALG_ASYNC;
 677        if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
 678                type |= CRYPTO_ALG_INTERNAL;
 679        inst->alg.halg.base.cra_flags = type;
 680
 681        inst->alg.halg.digestsize = salg->digestsize;
 682        inst->alg.halg.statesize = salg->statesize;
 683        inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
 684
 685        inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
 686        inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
 687
 688        inst->alg.init   = cryptd_hash_init_enqueue;
 689        inst->alg.update = cryptd_hash_update_enqueue;
 690        inst->alg.final  = cryptd_hash_final_enqueue;
 691        inst->alg.finup  = cryptd_hash_finup_enqueue;
 692        inst->alg.export = cryptd_hash_export;
 693        inst->alg.import = cryptd_hash_import;
 694        inst->alg.setkey = cryptd_hash_setkey;
 695        inst->alg.digest = cryptd_hash_digest_enqueue;
 696
 697        err = ahash_register_instance(tmpl, inst);
 698        if (err) {
 699                crypto_drop_shash(&ctx->spawn);
 700out_free_inst:
 701                kfree(inst);
 702        }
 703
 704out_put_alg:
 705        crypto_mod_put(alg);
 706        return err;
 707}
 708
 709static int cryptd_aead_setkey(struct crypto_aead *parent,
 710                              const u8 *key, unsigned int keylen)
 711{
 712        struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
 713        struct crypto_aead *child = ctx->child;
 714
 715        return crypto_aead_setkey(child, key, keylen);
 716}
 717
 718static int cryptd_aead_setauthsize(struct crypto_aead *parent,
 719                                   unsigned int authsize)
 720{
 721        struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
 722        struct crypto_aead *child = ctx->child;
 723
 724        return crypto_aead_setauthsize(child, authsize);
 725}
 726
 727static void cryptd_aead_crypt(struct aead_request *req,
 728                        struct crypto_aead *child,
 729                        int err,
 730                        int (*crypt)(struct aead_request *req))
 731{
 732        struct cryptd_aead_request_ctx *rctx;
 733        struct cryptd_aead_ctx *ctx;
 734        crypto_completion_t compl;
 735        struct crypto_aead *tfm;
 736        int refcnt;
 737
 738        rctx = aead_request_ctx(req);
 739        compl = rctx->complete;
 740
 741        tfm = crypto_aead_reqtfm(req);
 742
 743        if (unlikely(err == -EINPROGRESS))
 744                goto out;
 745        aead_request_set_tfm(req, child);
 746        err = crypt( req );
 747
 748out:
 749        ctx = crypto_aead_ctx(tfm);
 750        refcnt = atomic_read(&ctx->refcnt);
 751
 752        local_bh_disable();
 753        compl(&req->base, err);
 754        local_bh_enable();
 755
 756        if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
 757                crypto_free_aead(tfm);
 758}
 759
 760static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
 761{
 762        struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
 763        struct crypto_aead *child = ctx->child;
 764        struct aead_request *req;
 765
 766        req = container_of(areq, struct aead_request, base);
 767        cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
 768}
 769
 770static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
 771{
 772        struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
 773        struct crypto_aead *child = ctx->child;
 774        struct aead_request *req;
 775
 776        req = container_of(areq, struct aead_request, base);
 777        cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
 778}
 779
 780static int cryptd_aead_enqueue(struct aead_request *req,
 781                                    crypto_completion_t compl)
 782{
 783        struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
 784        struct crypto_aead *tfm = crypto_aead_reqtfm(req);
 785        struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
 786
 787        rctx->complete = req->base.complete;
 788        req->base.complete = compl;
 789        return cryptd_enqueue_request(queue, &req->base);
 790}
 791
 792static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
 793{
 794        return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
 795}
 796
 797static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
 798{
 799        return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
 800}
 801
 802static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
 803{
 804        struct aead_instance *inst = aead_alg_instance(tfm);
 805        struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
 806        struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
 807        struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
 808        struct crypto_aead *cipher;
 809
 810        cipher = crypto_spawn_aead(spawn);
 811        if (IS_ERR(cipher))
 812                return PTR_ERR(cipher);
 813
 814        ctx->child = cipher;
 815        crypto_aead_set_reqsize(
 816                tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
 817                         crypto_aead_reqsize(cipher)));
 818        return 0;
 819}
 820
 821static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
 822{
 823        struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
 824        crypto_free_aead(ctx->child);
 825}
 826
 827static int cryptd_create_aead(struct crypto_template *tmpl,
 828                              struct rtattr **tb,
 829                              struct cryptd_queue *queue)
 830{
 831        struct aead_instance_ctx *ctx;
 832        struct aead_instance *inst;
 833        struct aead_alg *alg;
 834        const char *name;
 835        u32 type = 0;
 836        u32 mask = CRYPTO_ALG_ASYNC;
 837        int err;
 838
 839        cryptd_check_internal(tb, &type, &mask);
 840
 841        name = crypto_attr_alg_name(tb[1]);
 842        if (IS_ERR(name))
 843                return PTR_ERR(name);
 844
 845        inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
 846        if (!inst)
 847                return -ENOMEM;
 848
 849        ctx = aead_instance_ctx(inst);
 850        ctx->queue = queue;
 851
 852        crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst));
 853        err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask);
 854        if (err)
 855                goto out_free_inst;
 856
 857        alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
 858        err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
 859        if (err)
 860                goto out_drop_aead;
 861
 862        inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
 863                                   (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
 864        inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
 865
 866        inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
 867        inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
 868
 869        inst->alg.init = cryptd_aead_init_tfm;
 870        inst->alg.exit = cryptd_aead_exit_tfm;
 871        inst->alg.setkey = cryptd_aead_setkey;
 872        inst->alg.setauthsize = cryptd_aead_setauthsize;
 873        inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
 874        inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
 875
 876        err = aead_register_instance(tmpl, inst);
 877        if (err) {
 878out_drop_aead:
 879                crypto_drop_aead(&ctx->aead_spawn);
 880out_free_inst:
 881                kfree(inst);
 882        }
 883        return err;
 884}
 885
 886static struct cryptd_queue queue;
 887
 888static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
 889{
 890        struct crypto_attr_type *algt;
 891
 892        algt = crypto_get_attr_type(tb);
 893        if (IS_ERR(algt))
 894                return PTR_ERR(algt);
 895
 896        switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
 897        case CRYPTO_ALG_TYPE_BLKCIPHER:
 898                return cryptd_create_blkcipher(tmpl, tb, &queue);
 899        case CRYPTO_ALG_TYPE_DIGEST:
 900                return cryptd_create_hash(tmpl, tb, &queue);
 901        case CRYPTO_ALG_TYPE_AEAD:
 902                return cryptd_create_aead(tmpl, tb, &queue);
 903        }
 904
 905        return -EINVAL;
 906}
 907
 908static void cryptd_free(struct crypto_instance *inst)
 909{
 910        struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
 911        struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
 912        struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
 913
 914        switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
 915        case CRYPTO_ALG_TYPE_AHASH:
 916                crypto_drop_shash(&hctx->spawn);
 917                kfree(ahash_instance(inst));
 918                return;
 919        case CRYPTO_ALG_TYPE_AEAD:
 920                crypto_drop_aead(&aead_ctx->aead_spawn);
 921                kfree(aead_instance(inst));
 922                return;
 923        default:
 924                crypto_drop_spawn(&ctx->spawn);
 925                kfree(inst);
 926        }
 927}
 928
 929static struct crypto_template cryptd_tmpl = {
 930        .name = "cryptd",
 931        .create = cryptd_create,
 932        .free = cryptd_free,
 933        .module = THIS_MODULE,
 934};
 935
 936struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
 937                                                  u32 type, u32 mask)
 938{
 939        char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
 940        struct cryptd_blkcipher_ctx *ctx;
 941        struct crypto_tfm *tfm;
 942
 943        if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
 944                     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
 945                return ERR_PTR(-EINVAL);
 946        type = crypto_skcipher_type(type);
 947        mask &= ~CRYPTO_ALG_TYPE_MASK;
 948        mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
 949        tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
 950        if (IS_ERR(tfm))
 951                return ERR_CAST(tfm);
 952        if (tfm->__crt_alg->cra_module != THIS_MODULE) {
 953                crypto_free_tfm(tfm);
 954                return ERR_PTR(-EINVAL);
 955        }
 956
 957        ctx = crypto_tfm_ctx(tfm);
 958        atomic_set(&ctx->refcnt, 1);
 959
 960        return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
 961}
 962EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
 963
 964struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
 965{
 966        struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
 967        return ctx->child;
 968}
 969EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
 970
 971bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm)
 972{
 973        struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
 974
 975        return atomic_read(&ctx->refcnt) - 1;
 976}
 977EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued);
 978
 979void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
 980{
 981        struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
 982
 983        if (atomic_dec_and_test(&ctx->refcnt))
 984                crypto_free_ablkcipher(&tfm->base);
 985}
 986EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
 987
 988struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
 989                                        u32 type, u32 mask)
 990{
 991        char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
 992        struct cryptd_hash_ctx *ctx;
 993        struct crypto_ahash *tfm;
 994
 995        if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
 996                     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
 997                return ERR_PTR(-EINVAL);
 998        tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
 999        if (IS_ERR(tfm))
1000                return ERR_CAST(tfm);
1001        if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1002                crypto_free_ahash(tfm);
1003                return ERR_PTR(-EINVAL);
1004        }
1005
1006        ctx = crypto_ahash_ctx(tfm);
1007        atomic_set(&ctx->refcnt, 1);
1008
1009        return __cryptd_ahash_cast(tfm);
1010}
1011EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
1012
1013struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
1014{
1015        struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1016
1017        return ctx->child;
1018}
1019EXPORT_SYMBOL_GPL(cryptd_ahash_child);
1020
1021struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
1022{
1023        struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
1024        return &rctx->desc;
1025}
1026EXPORT_SYMBOL_GPL(cryptd_shash_desc);
1027
1028bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
1029{
1030        struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1031
1032        return atomic_read(&ctx->refcnt) - 1;
1033}
1034EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1035
1036void cryptd_free_ahash(struct cryptd_ahash *tfm)
1037{
1038        struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1039
1040        if (atomic_dec_and_test(&ctx->refcnt))
1041                crypto_free_ahash(&tfm->base);
1042}
1043EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1044
1045struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1046                                                  u32 type, u32 mask)
1047{
1048        char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1049        struct cryptd_aead_ctx *ctx;
1050        struct crypto_aead *tfm;
1051
1052        if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1053                     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1054                return ERR_PTR(-EINVAL);
1055        tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1056        if (IS_ERR(tfm))
1057                return ERR_CAST(tfm);
1058        if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1059                crypto_free_aead(tfm);
1060                return ERR_PTR(-EINVAL);
1061        }
1062
1063        ctx = crypto_aead_ctx(tfm);
1064        atomic_set(&ctx->refcnt, 1);
1065
1066        return __cryptd_aead_cast(tfm);
1067}
1068EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1069
1070struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1071{
1072        struct cryptd_aead_ctx *ctx;
1073        ctx = crypto_aead_ctx(&tfm->base);
1074        return ctx->child;
1075}
1076EXPORT_SYMBOL_GPL(cryptd_aead_child);
1077
1078bool cryptd_aead_queued(struct cryptd_aead *tfm)
1079{
1080        struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1081
1082        return atomic_read(&ctx->refcnt) - 1;
1083}
1084EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1085
1086void cryptd_free_aead(struct cryptd_aead *tfm)
1087{
1088        struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1089
1090        if (atomic_dec_and_test(&ctx->refcnt))
1091                crypto_free_aead(&tfm->base);
1092}
1093EXPORT_SYMBOL_GPL(cryptd_free_aead);
1094
1095static int __init cryptd_init(void)
1096{
1097        int err;
1098
1099        err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN);
1100        if (err)
1101                return err;
1102
1103        err = crypto_register_template(&cryptd_tmpl);
1104        if (err)
1105                cryptd_fini_queue(&queue);
1106
1107        return err;
1108}
1109
1110static void __exit cryptd_exit(void)
1111{
1112        cryptd_fini_queue(&queue);
1113        crypto_unregister_template(&cryptd_tmpl);
1114}
1115
1116subsys_initcall(cryptd_init);
1117module_exit(cryptd_exit);
1118
1119MODULE_LICENSE("GPL");
1120MODULE_DESCRIPTION("Software async crypto daemon");
1121MODULE_ALIAS_CRYPTO("cryptd");
1122