LCOV - code coverage report
Current view: top level - crypto - ahash.c (source / functions) Hit Total Coverage
Test: landlock.info Lines: 0 295 0.0 %
Date: 2021-04-22 12:43:58 Functions: 0 37 0.0 %

          Line data    Source code
       1             : // SPDX-License-Identifier: GPL-2.0-or-later
       2             : /*
       3             :  * Asynchronous Cryptographic Hash operations.
       4             :  *
       5             :  * This is the asynchronous version of hash.c with notification of
       6             :  * completion via a callback.
       7             :  *
       8             :  * Copyright (c) 2008 Loc Ho <lho@amcc.com>
       9             :  */
      10             : 
      11             : #include <crypto/internal/hash.h>
      12             : #include <crypto/scatterwalk.h>
      13             : #include <linux/err.h>
      14             : #include <linux/kernel.h>
      15             : #include <linux/module.h>
      16             : #include <linux/sched.h>
      17             : #include <linux/slab.h>
      18             : #include <linux/seq_file.h>
      19             : #include <linux/cryptouser.h>
      20             : #include <linux/compiler.h>
      21             : #include <net/netlink.h>
      22             : 
      23             : #include "internal.h"
      24             : 
      25             : static const struct crypto_type crypto_ahash_type;
      26             : 
      27             : struct ahash_request_priv {
      28             :         crypto_completion_t complete;
      29             :         void *data;
      30             :         u8 *result;
      31             :         u32 flags;
      32             :         void *ubuf[] CRYPTO_MINALIGN_ATTR;
      33             : };
      34             : 
      35           0 : static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash)
      36             : {
      37           0 :         return container_of(crypto_hash_alg_common(hash), struct ahash_alg,
      38             :                             halg);
      39             : }
      40             : 
      41           0 : static int hash_walk_next(struct crypto_hash_walk *walk)
      42             : {
      43           0 :         unsigned int alignmask = walk->alignmask;
      44           0 :         unsigned int offset = walk->offset;
      45           0 :         unsigned int nbytes = min(walk->entrylen,
      46             :                                   ((unsigned int)(PAGE_SIZE)) - offset);
      47             : 
      48           0 :         walk->data = kmap_atomic(walk->pg);
      49           0 :         walk->data += offset;
      50             : 
      51           0 :         if (offset & alignmask) {
      52           0 :                 unsigned int unaligned = alignmask + 1 - (offset & alignmask);
      53             : 
      54           0 :                 if (nbytes > unaligned)
      55             :                         nbytes = unaligned;
      56             :         }
      57             : 
      58           0 :         walk->entrylen -= nbytes;
      59           0 :         return nbytes;
      60             : }
      61             : 
      62           0 : static int hash_walk_new_entry(struct crypto_hash_walk *walk)
      63             : {
      64           0 :         struct scatterlist *sg;
      65             : 
      66           0 :         sg = walk->sg;
      67           0 :         walk->offset = sg->offset;
      68           0 :         walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
      69           0 :         walk->offset = offset_in_page(walk->offset);
      70           0 :         walk->entrylen = sg->length;
      71             : 
      72           0 :         if (walk->entrylen > walk->total)
      73           0 :                 walk->entrylen = walk->total;
      74           0 :         walk->total -= walk->entrylen;
      75             : 
      76           0 :         return hash_walk_next(walk);
      77             : }
      78             : 
      79           0 : int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
      80             : {
      81           0 :         unsigned int alignmask = walk->alignmask;
      82             : 
      83           0 :         walk->data -= walk->offset;
      84             : 
      85           0 :         if (walk->entrylen && (walk->offset & alignmask) && !err) {
      86           0 :                 unsigned int nbytes;
      87             : 
      88           0 :                 walk->offset = ALIGN(walk->offset, alignmask + 1);
      89           0 :                 nbytes = min(walk->entrylen,
      90             :                              (unsigned int)(PAGE_SIZE - walk->offset));
      91           0 :                 if (nbytes) {
      92           0 :                         walk->entrylen -= nbytes;
      93           0 :                         walk->data += walk->offset;
      94           0 :                         return nbytes;
      95             :                 }
      96             :         }
      97             : 
      98           0 :         kunmap_atomic(walk->data);
      99           0 :         crypto_yield(walk->flags);
     100             : 
     101           0 :         if (err)
     102             :                 return err;
     103             : 
     104           0 :         if (walk->entrylen) {
     105           0 :                 walk->offset = 0;
     106           0 :                 walk->pg++;
     107           0 :                 return hash_walk_next(walk);
     108             :         }
     109             : 
     110           0 :         if (!walk->total)
     111             :                 return 0;
     112             : 
     113           0 :         walk->sg = sg_next(walk->sg);
     114             : 
     115           0 :         return hash_walk_new_entry(walk);
     116             : }
     117             : EXPORT_SYMBOL_GPL(crypto_hash_walk_done);
     118             : 
     119           0 : int crypto_hash_walk_first(struct ahash_request *req,
     120             :                            struct crypto_hash_walk *walk)
     121             : {
     122           0 :         walk->total = req->nbytes;
     123             : 
     124           0 :         if (!walk->total) {
     125           0 :                 walk->entrylen = 0;
     126           0 :                 return 0;
     127             :         }
     128             : 
     129           0 :         walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req));
     130           0 :         walk->sg = req->src;
     131           0 :         walk->flags = req->base.flags;
     132             : 
     133           0 :         return hash_walk_new_entry(walk);
     134             : }
     135             : EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
     136             : 
     137           0 : static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
     138             :                                 unsigned int keylen)
     139             : {
     140           0 :         unsigned long alignmask = crypto_ahash_alignmask(tfm);
     141           0 :         int ret;
     142           0 :         u8 *buffer, *alignbuffer;
     143           0 :         unsigned long absize;
     144             : 
     145           0 :         absize = keylen + alignmask;
     146           0 :         buffer = kmalloc(absize, GFP_KERNEL);
     147           0 :         if (!buffer)
     148             :                 return -ENOMEM;
     149             : 
     150           0 :         alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
     151           0 :         memcpy(alignbuffer, key, keylen);
     152           0 :         ret = tfm->setkey(tfm, alignbuffer, keylen);
     153           0 :         kfree_sensitive(buffer);
     154           0 :         return ret;
     155             : }
     156             : 
     157           0 : static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
     158             :                           unsigned int keylen)
     159             : {
     160           0 :         return -ENOSYS;
     161             : }
     162             : 
     163           0 : static void ahash_set_needkey(struct crypto_ahash *tfm)
     164             : {
     165           0 :         const struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
     166             : 
     167           0 :         if (tfm->setkey != ahash_nosetkey &&
     168           0 :             !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
     169           0 :                 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
     170           0 : }
     171             : 
     172           0 : int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
     173             :                         unsigned int keylen)
     174             : {
     175           0 :         unsigned long alignmask = crypto_ahash_alignmask(tfm);
     176           0 :         int err;
     177             : 
     178           0 :         if ((unsigned long)key & alignmask)
     179           0 :                 err = ahash_setkey_unaligned(tfm, key, keylen);
     180             :         else
     181           0 :                 err = tfm->setkey(tfm, key, keylen);
     182             : 
     183           0 :         if (unlikely(err)) {
     184           0 :                 ahash_set_needkey(tfm);
     185           0 :                 return err;
     186             :         }
     187             : 
     188           0 :         crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
     189           0 :         return 0;
     190             : }
     191             : EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
     192             : 
     193           0 : static inline unsigned int ahash_align_buffer_size(unsigned len,
     194             :                                                    unsigned long mask)
     195             : {
     196           0 :         return len + (mask & ~(crypto_tfm_ctx_alignment() - 1));
     197             : }
     198             : 
     199           0 : static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt)
     200             : {
     201           0 :         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
     202           0 :         unsigned long alignmask = crypto_ahash_alignmask(tfm);
     203           0 :         unsigned int ds = crypto_ahash_digestsize(tfm);
     204           0 :         struct ahash_request_priv *priv;
     205             : 
     206           0 :         priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask),
     207           0 :                        (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
     208             :                        GFP_KERNEL : GFP_ATOMIC);
     209           0 :         if (!priv)
     210             :                 return -ENOMEM;
     211             : 
     212             :         /*
     213             :          * WARNING: Voodoo programming below!
     214             :          *
     215             :          * The code below is obscure and hard to understand, thus explanation
     216             :          * is necessary. See include/crypto/hash.h and include/linux/crypto.h
     217             :          * to understand the layout of structures used here!
     218             :          *
     219             :          * The code here will replace portions of the ORIGINAL request with
     220             :          * pointers to new code and buffers so the hashing operation can store
     221             :          * the result in aligned buffer. We will call the modified request
     222             :          * an ADJUSTED request.
     223             :          *
     224             :          * The newly mangled request will look as such:
     225             :          *
     226             :          * req {
     227             :          *   .result        = ADJUSTED[new aligned buffer]
     228             :          *   .base.complete = ADJUSTED[pointer to completion function]
     229             :          *   .base.data     = ADJUSTED[*req (pointer to self)]
     230             :          *   .priv          = ADJUSTED[new priv] {
     231             :          *           .result   = ORIGINAL(result)
     232             :          *           .complete = ORIGINAL(base.complete)
     233             :          *           .data     = ORIGINAL(base.data)
     234             :          *   }
     235             :          */
     236             : 
     237           0 :         priv->result = req->result;
     238           0 :         priv->complete = req->base.complete;
     239           0 :         priv->data = req->base.data;
     240           0 :         priv->flags = req->base.flags;
     241             : 
     242             :         /*
     243             :          * WARNING: We do not backup req->priv here! The req->priv
     244             :          *          is for internal use of the Crypto API and the
     245             :          *          user must _NOT_ _EVER_ depend on it's content!
     246             :          */
     247             : 
     248           0 :         req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1);
     249           0 :         req->base.complete = cplt;
     250           0 :         req->base.data = req;
     251           0 :         req->priv = priv;
     252             : 
     253           0 :         return 0;
     254             : }
     255             : 
     256           0 : static void ahash_restore_req(struct ahash_request *req, int err)
     257             : {
     258           0 :         struct ahash_request_priv *priv = req->priv;
     259             : 
     260           0 :         if (!err)
     261           0 :                 memcpy(priv->result, req->result,
     262           0 :                        crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
     263             : 
     264             :         /* Restore the original crypto request. */
     265           0 :         req->result = priv->result;
     266             : 
     267           0 :         ahash_request_set_callback(req, priv->flags,
     268             :                                    priv->complete, priv->data);
     269           0 :         req->priv = NULL;
     270             : 
     271             :         /* Free the req->priv.priv from the ADJUSTED request. */
     272           0 :         kfree_sensitive(priv);
     273           0 : }
     274             : 
     275           0 : static void ahash_notify_einprogress(struct ahash_request *req)
     276             : {
     277           0 :         struct ahash_request_priv *priv = req->priv;
     278           0 :         struct crypto_async_request oreq;
     279             : 
     280           0 :         oreq.data = priv->data;
     281             : 
     282           0 :         priv->complete(&oreq, -EINPROGRESS);
     283           0 : }
     284             : 
     285           0 : static void ahash_op_unaligned_done(struct crypto_async_request *req, int err)
     286             : {
     287           0 :         struct ahash_request *areq = req->data;
     288             : 
     289           0 :         if (err == -EINPROGRESS) {
     290           0 :                 ahash_notify_einprogress(areq);
     291           0 :                 return;
     292             :         }
     293             : 
     294             :         /*
     295             :          * Restore the original request, see ahash_op_unaligned() for what
     296             :          * goes where.
     297             :          *
     298             :          * The "struct ahash_request *req" here is in fact the "req.base"
     299             :          * from the ADJUSTED request from ahash_op_unaligned(), thus as it
     300             :          * is a pointer to self, it is also the ADJUSTED "req" .
     301             :          */
     302             : 
     303             :         /* First copy req->result into req->priv.result */
     304           0 :         ahash_restore_req(areq, err);
     305             : 
     306             :         /* Complete the ORIGINAL request. */
     307           0 :         areq->base.complete(&areq->base, err);
     308             : }
     309             : 
     310           0 : static int ahash_op_unaligned(struct ahash_request *req,
     311             :                               int (*op)(struct ahash_request *))
     312             : {
     313           0 :         int err;
     314             : 
     315           0 :         err = ahash_save_req(req, ahash_op_unaligned_done);
     316           0 :         if (err)
     317             :                 return err;
     318             : 
     319           0 :         err = op(req);
     320           0 :         if (err == -EINPROGRESS || err == -EBUSY)
     321             :                 return err;
     322             : 
     323           0 :         ahash_restore_req(req, err);
     324             : 
     325           0 :         return err;
     326             : }
     327             : 
     328           0 : static int crypto_ahash_op(struct ahash_request *req,
     329             :                            int (*op)(struct ahash_request *))
     330             : {
     331           0 :         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
     332           0 :         unsigned long alignmask = crypto_ahash_alignmask(tfm);
     333             : 
     334           0 :         if ((unsigned long)req->result & alignmask)
     335           0 :                 return ahash_op_unaligned(req, op);
     336             : 
     337           0 :         return op(req);
     338             : }
     339             : 
     340           0 : int crypto_ahash_final(struct ahash_request *req)
     341             : {
     342           0 :         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
     343           0 :         struct crypto_alg *alg = tfm->base.__crt_alg;
     344           0 :         unsigned int nbytes = req->nbytes;
     345           0 :         int ret;
     346             : 
     347           0 :         crypto_stats_get(alg);
     348           0 :         ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final);
     349           0 :         crypto_stats_ahash_final(nbytes, ret, alg);
     350           0 :         return ret;
     351             : }
     352             : EXPORT_SYMBOL_GPL(crypto_ahash_final);
     353             : 
     354           0 : int crypto_ahash_finup(struct ahash_request *req)
     355             : {
     356           0 :         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
     357           0 :         struct crypto_alg *alg = tfm->base.__crt_alg;
     358           0 :         unsigned int nbytes = req->nbytes;
     359           0 :         int ret;
     360             : 
     361           0 :         crypto_stats_get(alg);
     362           0 :         ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup);
     363           0 :         crypto_stats_ahash_final(nbytes, ret, alg);
     364           0 :         return ret;
     365             : }
     366             : EXPORT_SYMBOL_GPL(crypto_ahash_finup);
     367             : 
     368           0 : int crypto_ahash_digest(struct ahash_request *req)
     369             : {
     370           0 :         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
     371           0 :         struct crypto_alg *alg = tfm->base.__crt_alg;
     372           0 :         unsigned int nbytes = req->nbytes;
     373           0 :         int ret;
     374             : 
     375           0 :         crypto_stats_get(alg);
     376           0 :         if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
     377             :                 ret = -ENOKEY;
     378             :         else
     379           0 :                 ret = crypto_ahash_op(req, tfm->digest);
     380           0 :         crypto_stats_ahash_final(nbytes, ret, alg);
     381           0 :         return ret;
     382             : }
     383             : EXPORT_SYMBOL_GPL(crypto_ahash_digest);
     384             : 
     385           0 : static void ahash_def_finup_done2(struct crypto_async_request *req, int err)
     386             : {
     387           0 :         struct ahash_request *areq = req->data;
     388             : 
     389           0 :         if (err == -EINPROGRESS)
     390             :                 return;
     391             : 
     392           0 :         ahash_restore_req(areq, err);
     393             : 
     394           0 :         areq->base.complete(&areq->base, err);
     395             : }
     396             : 
     397           0 : static int ahash_def_finup_finish1(struct ahash_request *req, int err)
     398             : {
     399           0 :         if (err)
     400           0 :                 goto out;
     401             : 
     402           0 :         req->base.complete = ahash_def_finup_done2;
     403             : 
     404           0 :         err = crypto_ahash_reqtfm(req)->final(req);
     405           0 :         if (err == -EINPROGRESS || err == -EBUSY)
     406             :                 return err;
     407             : 
     408           0 : out:
     409           0 :         ahash_restore_req(req, err);
     410           0 :         return err;
     411             : }
     412             : 
     413           0 : static void ahash_def_finup_done1(struct crypto_async_request *req, int err)
     414             : {
     415           0 :         struct ahash_request *areq = req->data;
     416             : 
     417           0 :         if (err == -EINPROGRESS) {
     418           0 :                 ahash_notify_einprogress(areq);
     419           0 :                 return;
     420             :         }
     421             : 
     422           0 :         areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
     423             : 
     424           0 :         err = ahash_def_finup_finish1(areq, err);
     425           0 :         if (areq->priv)
     426             :                 return;
     427             : 
     428           0 :         areq->base.complete(&areq->base, err);
     429             : }
     430             : 
     431           0 : static int ahash_def_finup(struct ahash_request *req)
     432             : {
     433           0 :         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
     434           0 :         int err;
     435             : 
     436           0 :         err = ahash_save_req(req, ahash_def_finup_done1);
     437           0 :         if (err)
     438             :                 return err;
     439             : 
     440           0 :         err = tfm->update(req);
     441           0 :         if (err == -EINPROGRESS || err == -EBUSY)
     442             :                 return err;
     443             : 
     444           0 :         return ahash_def_finup_finish1(req, err);
     445             : }
     446             : 
     447           0 : static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
     448             : {
     449           0 :         struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
     450           0 :         struct ahash_alg *alg = crypto_ahash_alg(hash);
     451             : 
     452           0 :         alg->exit_tfm(hash);
     453           0 : }
     454             : 
     455           0 : static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
     456             : {
     457           0 :         struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
     458           0 :         struct ahash_alg *alg = crypto_ahash_alg(hash);
     459             : 
     460           0 :         hash->setkey = ahash_nosetkey;
     461             : 
     462           0 :         if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
     463           0 :                 return crypto_init_shash_ops_async(tfm);
     464             : 
     465           0 :         hash->init = alg->init;
     466           0 :         hash->update = alg->update;
     467           0 :         hash->final = alg->final;
     468           0 :         hash->finup = alg->finup ?: ahash_def_finup;
     469           0 :         hash->digest = alg->digest;
     470           0 :         hash->export = alg->export;
     471           0 :         hash->import = alg->import;
     472             : 
     473           0 :         if (alg->setkey) {
     474           0 :                 hash->setkey = alg->setkey;
     475           0 :                 ahash_set_needkey(hash);
     476             :         }
     477             : 
     478           0 :         if (alg->exit_tfm)
     479           0 :                 tfm->exit = crypto_ahash_exit_tfm;
     480             : 
     481           0 :         return alg->init_tfm ? alg->init_tfm(hash) : 0;
     482             : }
     483             : 
     484           0 : static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
     485             : {
     486           0 :         if (alg->cra_type != &crypto_ahash_type)
     487             :                 return sizeof(struct crypto_shash *);
     488             : 
     489           0 :         return crypto_alg_extsize(alg);
     490             : }
     491             : 
     492           0 : static void crypto_ahash_free_instance(struct crypto_instance *inst)
     493             : {
     494           0 :         struct ahash_instance *ahash = ahash_instance(inst);
     495             : 
     496           0 :         ahash->free(ahash);
     497           0 : }
     498             : 
     499             : #ifdef CONFIG_NET
     500           0 : static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
     501             : {
     502           0 :         struct crypto_report_hash rhash;
     503             : 
     504           0 :         memset(&rhash, 0, sizeof(rhash));
     505             : 
     506           0 :         strscpy(rhash.type, "ahash", sizeof(rhash.type));
     507             : 
     508           0 :         rhash.blocksize = alg->cra_blocksize;
     509           0 :         rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
     510             : 
     511           0 :         return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
     512             : }
     513             : #else
     514             : static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
     515             : {
     516             :         return -ENOSYS;
     517             : }
     518             : #endif
     519             : 
     520             : static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
     521             :         __maybe_unused;
     522           0 : static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
     523             : {
     524           0 :         seq_printf(m, "type         : ahash\n");
     525           0 :         seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
     526             :                                              "yes" : "no");
     527           0 :         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
     528           0 :         seq_printf(m, "digestsize   : %u\n",
     529           0 :                    __crypto_hash_alg_common(alg)->digestsize);
     530           0 : }
     531             : 
     532             : static const struct crypto_type crypto_ahash_type = {
     533             :         .extsize = crypto_ahash_extsize,
     534             :         .init_tfm = crypto_ahash_init_tfm,
     535             :         .free = crypto_ahash_free_instance,
     536             : #ifdef CONFIG_PROC_FS
     537             :         .show = crypto_ahash_show,
     538             : #endif
     539             :         .report = crypto_ahash_report,
     540             :         .maskclear = ~CRYPTO_ALG_TYPE_MASK,
     541             :         .maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
     542             :         .type = CRYPTO_ALG_TYPE_AHASH,
     543             :         .tfmsize = offsetof(struct crypto_ahash, base),
     544             : };
     545             : 
     546           0 : int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
     547             :                       struct crypto_instance *inst,
     548             :                       const char *name, u32 type, u32 mask)
     549             : {
     550           0 :         spawn->base.frontend = &crypto_ahash_type;
     551           0 :         return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
     552             : }
     553             : EXPORT_SYMBOL_GPL(crypto_grab_ahash);
     554             : 
     555           0 : struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
     556             :                                         u32 mask)
     557             : {
     558           0 :         return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
     559             : }
     560             : EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
     561             : 
     562           0 : int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
     563             : {
     564           0 :         return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
     565             : }
     566             : EXPORT_SYMBOL_GPL(crypto_has_ahash);
     567             : 
     568           0 : static int ahash_prepare_alg(struct ahash_alg *alg)
     569             : {
     570           0 :         struct crypto_alg *base = &alg->halg.base;
     571             : 
     572           0 :         if (alg->halg.digestsize > HASH_MAX_DIGESTSIZE ||
     573           0 :             alg->halg.statesize > HASH_MAX_STATESIZE ||
     574             :             alg->halg.statesize == 0)
     575             :                 return -EINVAL;
     576             : 
     577           0 :         base->cra_type = &crypto_ahash_type;
     578           0 :         base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
     579           0 :         base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
     580             : 
     581           0 :         return 0;
     582             : }
     583             : 
     584           0 : int crypto_register_ahash(struct ahash_alg *alg)
     585             : {
     586           0 :         struct crypto_alg *base = &alg->halg.base;
     587           0 :         int err;
     588             : 
     589           0 :         err = ahash_prepare_alg(alg);
     590           0 :         if (err)
     591             :                 return err;
     592             : 
     593           0 :         return crypto_register_alg(base);
     594             : }
     595             : EXPORT_SYMBOL_GPL(crypto_register_ahash);
     596             : 
     597           0 : void crypto_unregister_ahash(struct ahash_alg *alg)
     598             : {
     599           0 :         crypto_unregister_alg(&alg->halg.base);
     600           0 : }
     601             : EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
     602             : 
     603           0 : int crypto_register_ahashes(struct ahash_alg *algs, int count)
     604             : {
     605           0 :         int i, ret;
     606             : 
     607           0 :         for (i = 0; i < count; i++) {
     608           0 :                 ret = crypto_register_ahash(&algs[i]);
     609           0 :                 if (ret)
     610           0 :                         goto err;
     611             :         }
     612             : 
     613             :         return 0;
     614             : 
     615           0 : err:
     616           0 :         for (--i; i >= 0; --i)
     617           0 :                 crypto_unregister_ahash(&algs[i]);
     618             : 
     619             :         return ret;
     620             : }
     621             : EXPORT_SYMBOL_GPL(crypto_register_ahashes);
     622             : 
     623           0 : void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
     624             : {
     625           0 :         int i;
     626             : 
     627           0 :         for (i = count - 1; i >= 0; --i)
     628           0 :                 crypto_unregister_ahash(&algs[i]);
     629           0 : }
     630             : EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
     631             : 
     632           0 : int ahash_register_instance(struct crypto_template *tmpl,
     633             :                             struct ahash_instance *inst)
     634             : {
     635           0 :         int err;
     636             : 
     637           0 :         if (WARN_ON(!inst->free))
     638             :                 return -EINVAL;
     639             : 
     640           0 :         err = ahash_prepare_alg(&inst->alg);
     641           0 :         if (err)
     642             :                 return err;
     643             : 
     644           0 :         return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
     645             : }
     646             : EXPORT_SYMBOL_GPL(ahash_register_instance);
     647             : 
     648           0 : bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
     649             : {
     650           0 :         struct crypto_alg *alg = &halg->base;
     651             : 
     652           0 :         if (alg->cra_type != &crypto_ahash_type)
     653           0 :                 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
     654             : 
     655           0 :         return __crypto_ahash_alg(alg)->setkey != NULL;
     656             : }
     657             : EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey);
     658             : 
     659             : MODULE_LICENSE("GPL");
     660             : MODULE_DESCRIPTION("Asynchronous cryptographic hash type");

Generated by: LCOV version 1.14