LCOV - code coverage report
Current view: top level - crypto - skcipher.c (source / functions) Hit Total Coverage
Test: landlock.info Lines: 18 492 3.7 %
Date: 2021-04-22 12:43:58 Functions: 2 40 5.0 %

          Line data    Source code
       1             : // SPDX-License-Identifier: GPL-2.0-or-later
       2             : /*
       3             :  * Symmetric key cipher operations.
       4             :  *
       5             :  * Generic encrypt/decrypt wrapper for ciphers, handles operations across
       6             :  * multiple page boundaries by using temporary blocks.  In user context,
       7             :  * the kernel is given a chance to schedule us once per page.
       8             :  *
       9             :  * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au>
      10             :  */
      11             : 
      12             : #include <crypto/internal/aead.h>
      13             : #include <crypto/internal/cipher.h>
      14             : #include <crypto/internal/skcipher.h>
      15             : #include <crypto/scatterwalk.h>
      16             : #include <linux/bug.h>
      17             : #include <linux/cryptouser.h>
      18             : #include <linux/compiler.h>
      19             : #include <linux/list.h>
      20             : #include <linux/module.h>
      21             : #include <linux/rtnetlink.h>
      22             : #include <linux/seq_file.h>
      23             : #include <net/netlink.h>
      24             : 
      25             : #include "internal.h"
      26             : 
      27             : enum {
      28             :         SKCIPHER_WALK_PHYS = 1 << 0,
      29             :         SKCIPHER_WALK_SLOW = 1 << 1,
      30             :         SKCIPHER_WALK_COPY = 1 << 2,
      31             :         SKCIPHER_WALK_DIFF = 1 << 3,
      32             :         SKCIPHER_WALK_SLEEP = 1 << 4,
      33             : };
      34             : 
      35             : struct skcipher_walk_buffer {
      36             :         struct list_head entry;
      37             :         struct scatter_walk dst;
      38             :         unsigned int len;
      39             :         u8 *data;
      40             :         u8 buffer[];
      41             : };
      42             : 
      43             : static int skcipher_walk_next(struct skcipher_walk *walk);
      44             : 
      45           0 : static inline void skcipher_unmap(struct scatter_walk *walk, void *vaddr)
      46             : {
      47           0 :         if (PageHighMem(scatterwalk_page(walk)))
      48           0 :                 kunmap_atomic(vaddr);
      49             : }
      50             : 
      51           0 : static inline void *skcipher_map(struct scatter_walk *walk)
      52             : {
      53           0 :         struct page *page = scatterwalk_page(walk);
      54             : 
      55           0 :         return (PageHighMem(page) ? kmap_atomic(page) : page_address(page)) +
      56           0 :                offset_in_page(walk->offset);
      57             : }
      58             : 
      59           0 : static inline void skcipher_map_src(struct skcipher_walk *walk)
      60             : {
      61           0 :         walk->src.virt.addr = skcipher_map(&walk->in);
      62             : }
      63             : 
      64           0 : static inline void skcipher_map_dst(struct skcipher_walk *walk)
      65             : {
      66           0 :         walk->dst.virt.addr = skcipher_map(&walk->out);
      67           0 : }
      68             : 
      69           0 : static inline void skcipher_unmap_src(struct skcipher_walk *walk)
      70             : {
      71           0 :         skcipher_unmap(&walk->in, walk->src.virt.addr);
      72             : }
      73             : 
      74           0 : static inline void skcipher_unmap_dst(struct skcipher_walk *walk)
      75             : {
      76           0 :         skcipher_unmap(&walk->out, walk->dst.virt.addr);
      77             : }
      78             : 
      79           0 : static inline gfp_t skcipher_walk_gfp(struct skcipher_walk *walk)
      80             : {
      81           0 :         return walk->flags & SKCIPHER_WALK_SLEEP ? GFP_KERNEL : GFP_ATOMIC;
      82             : }
      83             : 
      84             : /* Get a spot of the specified length that does not straddle a page.
      85             :  * The caller needs to ensure that there is enough space for this operation.
      86             :  */
      87           0 : static inline u8 *skcipher_get_spot(u8 *start, unsigned int len)
      88             : {
      89           0 :         u8 *end_page = (u8 *)(((unsigned long)(start + len - 1)) & PAGE_MASK);
      90             : 
      91           0 :         return max(start, end_page);
      92             : }
      93             : 
      94           0 : static int skcipher_done_slow(struct skcipher_walk *walk, unsigned int bsize)
      95             : {
      96           0 :         u8 *addr;
      97             : 
      98           0 :         addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
      99           0 :         addr = skcipher_get_spot(addr, bsize);
     100           0 :         scatterwalk_copychunks(addr, &walk->out, bsize,
     101           0 :                                (walk->flags & SKCIPHER_WALK_PHYS) ? 2 : 1);
     102           0 :         return 0;
     103             : }
     104             : 
     105           0 : int skcipher_walk_done(struct skcipher_walk *walk, int err)
     106             : {
     107           0 :         unsigned int n = walk->nbytes;
     108           0 :         unsigned int nbytes = 0;
     109             : 
     110           0 :         if (!n)
     111           0 :                 goto finish;
     112             : 
     113           0 :         if (likely(err >= 0)) {
     114           0 :                 n -= err;
     115           0 :                 nbytes = walk->total - n;
     116             :         }
     117             : 
     118           0 :         if (likely(!(walk->flags & (SKCIPHER_WALK_PHYS |
     119             :                                     SKCIPHER_WALK_SLOW |
     120             :                                     SKCIPHER_WALK_COPY |
     121             :                                     SKCIPHER_WALK_DIFF)))) {
     122           0 : unmap_src:
     123           0 :                 skcipher_unmap_src(walk);
     124           0 :         } else if (walk->flags & SKCIPHER_WALK_DIFF) {
     125           0 :                 skcipher_unmap_dst(walk);
     126           0 :                 goto unmap_src;
     127           0 :         } else if (walk->flags & SKCIPHER_WALK_COPY) {
     128           0 :                 skcipher_map_dst(walk);
     129           0 :                 memcpy(walk->dst.virt.addr, walk->page, n);
     130           0 :                 skcipher_unmap_dst(walk);
     131           0 :         } else if (unlikely(walk->flags & SKCIPHER_WALK_SLOW)) {
     132           0 :                 if (err > 0) {
     133             :                         /*
     134             :                          * Didn't process all bytes.  Either the algorithm is
     135             :                          * broken, or this was the last step and it turned out
     136             :                          * the message wasn't evenly divisible into blocks but
     137             :                          * the algorithm requires it.
     138             :                          */
     139             :                         err = -EINVAL;
     140             :                         nbytes = 0;
     141             :                 } else
     142           0 :                         n = skcipher_done_slow(walk, n);
     143             :         }
     144             : 
     145           0 :         if (err > 0)
     146             :                 err = 0;
     147             : 
     148           0 :         walk->total = nbytes;
     149           0 :         walk->nbytes = 0;
     150             : 
     151           0 :         scatterwalk_advance(&walk->in, n);
     152           0 :         scatterwalk_advance(&walk->out, n);
     153           0 :         scatterwalk_done(&walk->in, 0, nbytes);
     154           0 :         scatterwalk_done(&walk->out, 1, nbytes);
     155             : 
     156           0 :         if (nbytes) {
     157           0 :                 crypto_yield(walk->flags & SKCIPHER_WALK_SLEEP ?
     158           0 :                              CRYPTO_TFM_REQ_MAY_SLEEP : 0);
     159           0 :                 return skcipher_walk_next(walk);
     160             :         }
     161             : 
     162           0 : finish:
     163             :         /* Short-circuit for the common/fast path. */
     164           0 :         if (!((unsigned long)walk->buffer | (unsigned long)walk->page))
     165           0 :                 goto out;
     166             : 
     167           0 :         if (walk->flags & SKCIPHER_WALK_PHYS)
     168           0 :                 goto out;
     169             : 
     170           0 :         if (walk->iv != walk->oiv)
     171           0 :                 memcpy(walk->oiv, walk->iv, walk->ivsize);
     172           0 :         if (walk->buffer != walk->page)
     173           0 :                 kfree(walk->buffer);
     174           0 :         if (walk->page)
     175           0 :                 free_page((unsigned long)walk->page);
     176             : 
     177           0 : out:
     178             :         return err;
     179             : }
     180             : EXPORT_SYMBOL_GPL(skcipher_walk_done);
     181             : 
     182           0 : void skcipher_walk_complete(struct skcipher_walk *walk, int err)
     183             : {
     184           0 :         struct skcipher_walk_buffer *p, *tmp;
     185             : 
     186           0 :         list_for_each_entry_safe(p, tmp, &walk->buffers, entry) {
     187           0 :                 u8 *data;
     188             : 
     189           0 :                 if (err)
     190           0 :                         goto done;
     191             : 
     192           0 :                 data = p->data;
     193           0 :                 if (!data) {
     194           0 :                         data = PTR_ALIGN(&p->buffer[0], walk->alignmask + 1);
     195           0 :                         data = skcipher_get_spot(data, walk->stride);
     196             :                 }
     197             : 
     198           0 :                 scatterwalk_copychunks(data, &p->dst, p->len, 1);
     199             : 
     200           0 :                 if (offset_in_page(p->data) + p->len + walk->stride >
     201             :                     PAGE_SIZE)
     202           0 :                         free_page((unsigned long)p->data);
     203             : 
     204           0 : done:
     205           0 :                 list_del(&p->entry);
     206           0 :                 kfree(p);
     207             :         }
     208             : 
     209           0 :         if (!err && walk->iv != walk->oiv)
     210           0 :                 memcpy(walk->oiv, walk->iv, walk->ivsize);
     211           0 :         if (walk->buffer != walk->page)
     212           0 :                 kfree(walk->buffer);
     213           0 :         if (walk->page)
     214           0 :                 free_page((unsigned long)walk->page);
     215           0 : }
     216             : EXPORT_SYMBOL_GPL(skcipher_walk_complete);
     217             : 
     218           0 : static void skcipher_queue_write(struct skcipher_walk *walk,
     219             :                                  struct skcipher_walk_buffer *p)
     220             : {
     221           0 :         p->dst = walk->out;
     222           0 :         list_add_tail(&p->entry, &walk->buffers);
     223             : }
     224             : 
     225           0 : static int skcipher_next_slow(struct skcipher_walk *walk, unsigned int bsize)
     226             : {
     227           0 :         bool phys = walk->flags & SKCIPHER_WALK_PHYS;
     228           0 :         unsigned alignmask = walk->alignmask;
     229           0 :         struct skcipher_walk_buffer *p;
     230           0 :         unsigned a;
     231           0 :         unsigned n;
     232           0 :         u8 *buffer;
     233           0 :         void *v;
     234             : 
     235           0 :         if (!phys) {
     236           0 :                 if (!walk->buffer)
     237           0 :                         walk->buffer = walk->page;
     238           0 :                 buffer = walk->buffer;
     239           0 :                 if (buffer)
     240           0 :                         goto ok;
     241             :         }
     242             : 
     243             :         /* Start with the minimum alignment of kmalloc. */
     244           0 :         a = crypto_tfm_ctx_alignment() - 1;
     245           0 :         n = bsize;
     246             : 
     247           0 :         if (phys) {
     248             :                 /* Calculate the minimum alignment of p->buffer. */
     249           0 :                 a &= (sizeof(*p) ^ (sizeof(*p) - 1)) >> 1;
     250           0 :                 n += sizeof(*p);
     251             :         }
     252             : 
     253             :         /* Minimum size to align p->buffer by alignmask. */
     254           0 :         n += alignmask & ~a;
     255             : 
     256             :         /* Minimum size to ensure p->buffer does not straddle a page. */
     257           0 :         n += (bsize - 1) & ~(alignmask | a);
     258             : 
     259           0 :         v = kzalloc(n, skcipher_walk_gfp(walk));
     260           0 :         if (!v)
     261           0 :                 return skcipher_walk_done(walk, -ENOMEM);
     262             : 
     263           0 :         if (phys) {
     264           0 :                 p = v;
     265           0 :                 p->len = bsize;
     266           0 :                 skcipher_queue_write(walk, p);
     267           0 :                 buffer = p->buffer;
     268             :         } else {
     269           0 :                 walk->buffer = v;
     270           0 :                 buffer = v;
     271             :         }
     272             : 
     273           0 : ok:
     274           0 :         walk->dst.virt.addr = PTR_ALIGN(buffer, alignmask + 1);
     275           0 :         walk->dst.virt.addr = skcipher_get_spot(walk->dst.virt.addr, bsize);
     276           0 :         walk->src.virt.addr = walk->dst.virt.addr;
     277             : 
     278           0 :         scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0);
     279             : 
     280           0 :         walk->nbytes = bsize;
     281           0 :         walk->flags |= SKCIPHER_WALK_SLOW;
     282             : 
     283           0 :         return 0;
     284             : }
     285             : 
     286           0 : static int skcipher_next_copy(struct skcipher_walk *walk)
     287             : {
     288           0 :         struct skcipher_walk_buffer *p;
     289           0 :         u8 *tmp = walk->page;
     290             : 
     291           0 :         skcipher_map_src(walk);
     292           0 :         memcpy(tmp, walk->src.virt.addr, walk->nbytes);
     293           0 :         skcipher_unmap_src(walk);
     294             : 
     295           0 :         walk->src.virt.addr = tmp;
     296           0 :         walk->dst.virt.addr = tmp;
     297             : 
     298           0 :         if (!(walk->flags & SKCIPHER_WALK_PHYS))
     299             :                 return 0;
     300             : 
     301           0 :         p = kmalloc(sizeof(*p), skcipher_walk_gfp(walk));
     302           0 :         if (!p)
     303             :                 return -ENOMEM;
     304             : 
     305           0 :         p->data = walk->page;
     306           0 :         p->len = walk->nbytes;
     307           0 :         skcipher_queue_write(walk, p);
     308             : 
     309           0 :         if (offset_in_page(walk->page) + walk->nbytes + walk->stride >
     310             :             PAGE_SIZE)
     311           0 :                 walk->page = NULL;
     312             :         else
     313           0 :                 walk->page += walk->nbytes;
     314             : 
     315             :         return 0;
     316             : }
     317             : 
     318           0 : static int skcipher_next_fast(struct skcipher_walk *walk)
     319             : {
     320           0 :         unsigned long diff;
     321             : 
     322           0 :         walk->src.phys.page = scatterwalk_page(&walk->in);
     323           0 :         walk->src.phys.offset = offset_in_page(walk->in.offset);
     324           0 :         walk->dst.phys.page = scatterwalk_page(&walk->out);
     325           0 :         walk->dst.phys.offset = offset_in_page(walk->out.offset);
     326             : 
     327           0 :         if (walk->flags & SKCIPHER_WALK_PHYS)
     328             :                 return 0;
     329             : 
     330           0 :         diff = walk->src.phys.offset - walk->dst.phys.offset;
     331           0 :         diff |= walk->src.virt.page - walk->dst.virt.page;
     332             : 
     333           0 :         skcipher_map_src(walk);
     334           0 :         walk->dst.virt.addr = walk->src.virt.addr;
     335             : 
     336           0 :         if (diff) {
     337           0 :                 walk->flags |= SKCIPHER_WALK_DIFF;
     338           0 :                 skcipher_map_dst(walk);
     339             :         }
     340             : 
     341             :         return 0;
     342             : }
     343             : 
     344           0 : static int skcipher_walk_next(struct skcipher_walk *walk)
     345             : {
     346           0 :         unsigned int bsize;
     347           0 :         unsigned int n;
     348           0 :         int err;
     349             : 
     350           0 :         walk->flags &= ~(SKCIPHER_WALK_SLOW | SKCIPHER_WALK_COPY |
     351             :                          SKCIPHER_WALK_DIFF);
     352             : 
     353           0 :         n = walk->total;
     354           0 :         bsize = min(walk->stride, max(n, walk->blocksize));
     355           0 :         n = scatterwalk_clamp(&walk->in, n);
     356           0 :         n = scatterwalk_clamp(&walk->out, n);
     357             : 
     358           0 :         if (unlikely(n < bsize)) {
     359           0 :                 if (unlikely(walk->total < walk->blocksize))
     360           0 :                         return skcipher_walk_done(walk, -EINVAL);
     361             : 
     362           0 : slow_path:
     363           0 :                 err = skcipher_next_slow(walk, bsize);
     364           0 :                 goto set_phys_lowmem;
     365             :         }
     366             : 
     367           0 :         if (unlikely((walk->in.offset | walk->out.offset) & walk->alignmask)) {
     368           0 :                 if (!walk->page) {
     369           0 :                         gfp_t gfp = skcipher_walk_gfp(walk);
     370             : 
     371           0 :                         walk->page = (void *)__get_free_page(gfp);
     372           0 :                         if (!walk->page)
     373           0 :                                 goto slow_path;
     374             :                 }
     375             : 
     376           0 :                 walk->nbytes = min_t(unsigned, n,
     377             :                                      PAGE_SIZE - offset_in_page(walk->page));
     378           0 :                 walk->flags |= SKCIPHER_WALK_COPY;
     379           0 :                 err = skcipher_next_copy(walk);
     380           0 :                 goto set_phys_lowmem;
     381             :         }
     382             : 
     383           0 :         walk->nbytes = n;
     384             : 
     385           0 :         return skcipher_next_fast(walk);
     386             : 
     387           0 : set_phys_lowmem:
     388           0 :         if (!err && (walk->flags & SKCIPHER_WALK_PHYS)) {
     389           0 :                 walk->src.phys.page = virt_to_page(walk->src.virt.addr);
     390           0 :                 walk->dst.phys.page = virt_to_page(walk->dst.virt.addr);
     391           0 :                 walk->src.phys.offset &= PAGE_SIZE - 1;
     392           0 :                 walk->dst.phys.offset &= PAGE_SIZE - 1;
     393             :         }
     394             :         return err;
     395             : }
     396             : 
     397           0 : static int skcipher_copy_iv(struct skcipher_walk *walk)
     398             : {
     399           0 :         unsigned a = crypto_tfm_ctx_alignment() - 1;
     400           0 :         unsigned alignmask = walk->alignmask;
     401           0 :         unsigned ivsize = walk->ivsize;
     402           0 :         unsigned bs = walk->stride;
     403           0 :         unsigned aligned_bs;
     404           0 :         unsigned size;
     405           0 :         u8 *iv;
     406             : 
     407           0 :         aligned_bs = ALIGN(bs, alignmask + 1);
     408             : 
     409             :         /* Minimum size to align buffer by alignmask. */
     410           0 :         size = alignmask & ~a;
     411             : 
     412           0 :         if (walk->flags & SKCIPHER_WALK_PHYS)
     413           0 :                 size += ivsize;
     414             :         else {
     415           0 :                 size += aligned_bs + ivsize;
     416             : 
     417             :                 /* Minimum size to ensure buffer does not straddle a page. */
     418           0 :                 size += (bs - 1) & ~(alignmask | a);
     419             :         }
     420             : 
     421           0 :         walk->buffer = kmalloc(size, skcipher_walk_gfp(walk));
     422           0 :         if (!walk->buffer)
     423             :                 return -ENOMEM;
     424             : 
     425           0 :         iv = PTR_ALIGN(walk->buffer, alignmask + 1);
     426           0 :         iv = skcipher_get_spot(iv, bs) + aligned_bs;
     427             : 
     428           0 :         walk->iv = memcpy(iv, walk->iv, walk->ivsize);
     429           0 :         return 0;
     430             : }
     431             : 
     432           0 : static int skcipher_walk_first(struct skcipher_walk *walk)
     433             : {
     434           0 :         if (WARN_ON_ONCE(in_irq()))
     435             :                 return -EDEADLK;
     436             : 
     437           0 :         walk->buffer = NULL;
     438           0 :         if (unlikely(((unsigned long)walk->iv & walk->alignmask))) {
     439           0 :                 int err = skcipher_copy_iv(walk);
     440           0 :                 if (err)
     441             :                         return err;
     442             :         }
     443             : 
     444           0 :         walk->page = NULL;
     445             : 
     446           0 :         return skcipher_walk_next(walk);
     447             : }
     448             : 
     449           0 : static int skcipher_walk_skcipher(struct skcipher_walk *walk,
     450             :                                   struct skcipher_request *req)
     451             : {
     452           0 :         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
     453             : 
     454           0 :         walk->total = req->cryptlen;
     455           0 :         walk->nbytes = 0;
     456           0 :         walk->iv = req->iv;
     457           0 :         walk->oiv = req->iv;
     458             : 
     459           0 :         if (unlikely(!walk->total))
     460             :                 return 0;
     461             : 
     462           0 :         scatterwalk_start(&walk->in, req->src);
     463           0 :         scatterwalk_start(&walk->out, req->dst);
     464             : 
     465           0 :         walk->flags &= ~SKCIPHER_WALK_SLEEP;
     466           0 :         walk->flags |= req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
     467           0 :                        SKCIPHER_WALK_SLEEP : 0;
     468             : 
     469           0 :         walk->blocksize = crypto_skcipher_blocksize(tfm);
     470           0 :         walk->stride = crypto_skcipher_walksize(tfm);
     471           0 :         walk->ivsize = crypto_skcipher_ivsize(tfm);
     472           0 :         walk->alignmask = crypto_skcipher_alignmask(tfm);
     473             : 
     474           0 :         return skcipher_walk_first(walk);
     475             : }
     476             : 
     477           0 : int skcipher_walk_virt(struct skcipher_walk *walk,
     478             :                        struct skcipher_request *req, bool atomic)
     479             : {
     480           0 :         int err;
     481             : 
     482           0 :         might_sleep_if(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
     483             : 
     484           0 :         walk->flags &= ~SKCIPHER_WALK_PHYS;
     485             : 
     486           0 :         err = skcipher_walk_skcipher(walk, req);
     487             : 
     488           0 :         walk->flags &= atomic ? ~SKCIPHER_WALK_SLEEP : ~0;
     489             : 
     490           0 :         return err;
     491             : }
     492             : EXPORT_SYMBOL_GPL(skcipher_walk_virt);
     493             : 
     494           0 : int skcipher_walk_async(struct skcipher_walk *walk,
     495             :                         struct skcipher_request *req)
     496             : {
     497           0 :         walk->flags |= SKCIPHER_WALK_PHYS;
     498             : 
     499           0 :         INIT_LIST_HEAD(&walk->buffers);
     500             : 
     501           0 :         return skcipher_walk_skcipher(walk, req);
     502             : }
     503             : EXPORT_SYMBOL_GPL(skcipher_walk_async);
     504             : 
     505           0 : static int skcipher_walk_aead_common(struct skcipher_walk *walk,
     506             :                                      struct aead_request *req, bool atomic)
     507             : {
     508           0 :         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
     509           0 :         int err;
     510             : 
     511           0 :         walk->nbytes = 0;
     512           0 :         walk->iv = req->iv;
     513           0 :         walk->oiv = req->iv;
     514             : 
     515           0 :         if (unlikely(!walk->total))
     516             :                 return 0;
     517             : 
     518           0 :         walk->flags &= ~SKCIPHER_WALK_PHYS;
     519             : 
     520           0 :         scatterwalk_start(&walk->in, req->src);
     521           0 :         scatterwalk_start(&walk->out, req->dst);
     522             : 
     523           0 :         scatterwalk_copychunks(NULL, &walk->in, req->assoclen, 2);
     524           0 :         scatterwalk_copychunks(NULL, &walk->out, req->assoclen, 2);
     525             : 
     526           0 :         scatterwalk_done(&walk->in, 0, walk->total);
     527           0 :         scatterwalk_done(&walk->out, 0, walk->total);
     528             : 
     529           0 :         if (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP)
     530           0 :                 walk->flags |= SKCIPHER_WALK_SLEEP;
     531             :         else
     532           0 :                 walk->flags &= ~SKCIPHER_WALK_SLEEP;
     533             : 
     534           0 :         walk->blocksize = crypto_aead_blocksize(tfm);
     535           0 :         walk->stride = crypto_aead_chunksize(tfm);
     536           0 :         walk->ivsize = crypto_aead_ivsize(tfm);
     537           0 :         walk->alignmask = crypto_aead_alignmask(tfm);
     538             : 
     539           0 :         err = skcipher_walk_first(walk);
     540             : 
     541           0 :         if (atomic)
     542           0 :                 walk->flags &= ~SKCIPHER_WALK_SLEEP;
     543             : 
     544             :         return err;
     545             : }
     546             : 
     547           0 : int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
     548             :                                struct aead_request *req, bool atomic)
     549             : {
     550           0 :         walk->total = req->cryptlen;
     551             : 
     552           0 :         return skcipher_walk_aead_common(walk, req, atomic);
     553             : }
     554             : EXPORT_SYMBOL_GPL(skcipher_walk_aead_encrypt);
     555             : 
     556           0 : int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
     557             :                                struct aead_request *req, bool atomic)
     558             : {
     559           0 :         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
     560             : 
     561           0 :         walk->total = req->cryptlen - crypto_aead_authsize(tfm);
     562             : 
     563           0 :         return skcipher_walk_aead_common(walk, req, atomic);
     564             : }
     565             : EXPORT_SYMBOL_GPL(skcipher_walk_aead_decrypt);
     566             : 
     567           0 : static void skcipher_set_needkey(struct crypto_skcipher *tfm)
     568             : {
     569           0 :         if (crypto_skcipher_max_keysize(tfm) != 0)
     570           0 :                 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
     571             : }
     572             : 
     573           0 : static int skcipher_setkey_unaligned(struct crypto_skcipher *tfm,
     574             :                                      const u8 *key, unsigned int keylen)
     575             : {
     576           0 :         unsigned long alignmask = crypto_skcipher_alignmask(tfm);
     577           0 :         struct skcipher_alg *cipher = crypto_skcipher_alg(tfm);
     578           0 :         u8 *buffer, *alignbuffer;
     579           0 :         unsigned long absize;
     580           0 :         int ret;
     581             : 
     582           0 :         absize = keylen + alignmask;
     583           0 :         buffer = kmalloc(absize, GFP_ATOMIC);
     584           0 :         if (!buffer)
     585             :                 return -ENOMEM;
     586             : 
     587           0 :         alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
     588           0 :         memcpy(alignbuffer, key, keylen);
     589           0 :         ret = cipher->setkey(tfm, alignbuffer, keylen);
     590           0 :         kfree_sensitive(buffer);
     591           0 :         return ret;
     592             : }
     593             : 
     594           0 : int crypto_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
     595             :                            unsigned int keylen)
     596             : {
     597           0 :         struct skcipher_alg *cipher = crypto_skcipher_alg(tfm);
     598           0 :         unsigned long alignmask = crypto_skcipher_alignmask(tfm);
     599           0 :         int err;
     600             : 
     601           0 :         if (keylen < cipher->min_keysize || keylen > cipher->max_keysize)
     602             :                 return -EINVAL;
     603             : 
     604           0 :         if ((unsigned long)key & alignmask)
     605           0 :                 err = skcipher_setkey_unaligned(tfm, key, keylen);
     606             :         else
     607           0 :                 err = cipher->setkey(tfm, key, keylen);
     608             : 
     609           0 :         if (unlikely(err)) {
     610           0 :                 skcipher_set_needkey(tfm);
     611           0 :                 return err;
     612             :         }
     613             : 
     614           0 :         crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
     615           0 :         return 0;
     616             : }
     617             : EXPORT_SYMBOL_GPL(crypto_skcipher_setkey);
     618             : 
     619           0 : int crypto_skcipher_encrypt(struct skcipher_request *req)
     620             : {
     621           0 :         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
     622           0 :         struct crypto_alg *alg = tfm->base.__crt_alg;
     623           0 :         unsigned int cryptlen = req->cryptlen;
     624           0 :         int ret;
     625             : 
     626           0 :         crypto_stats_get(alg);
     627           0 :         if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
     628             :                 ret = -ENOKEY;
     629             :         else
     630           0 :                 ret = crypto_skcipher_alg(tfm)->encrypt(req);
     631           0 :         crypto_stats_skcipher_encrypt(cryptlen, ret, alg);
     632           0 :         return ret;
     633             : }
     634             : EXPORT_SYMBOL_GPL(crypto_skcipher_encrypt);
     635             : 
     636           0 : int crypto_skcipher_decrypt(struct skcipher_request *req)
     637             : {
     638           0 :         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
     639           0 :         struct crypto_alg *alg = tfm->base.__crt_alg;
     640           0 :         unsigned int cryptlen = req->cryptlen;
     641           0 :         int ret;
     642             : 
     643           0 :         crypto_stats_get(alg);
     644           0 :         if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
     645             :                 ret = -ENOKEY;
     646             :         else
     647           0 :                 ret = crypto_skcipher_alg(tfm)->decrypt(req);
     648           0 :         crypto_stats_skcipher_decrypt(cryptlen, ret, alg);
     649           0 :         return ret;
     650             : }
     651             : EXPORT_SYMBOL_GPL(crypto_skcipher_decrypt);
     652             : 
     653           0 : static void crypto_skcipher_exit_tfm(struct crypto_tfm *tfm)
     654             : {
     655           0 :         struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
     656           0 :         struct skcipher_alg *alg = crypto_skcipher_alg(skcipher);
     657             : 
     658           0 :         alg->exit(skcipher);
     659           0 : }
     660             : 
     661           0 : static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm)
     662             : {
     663           0 :         struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
     664           0 :         struct skcipher_alg *alg = crypto_skcipher_alg(skcipher);
     665             : 
     666           0 :         skcipher_set_needkey(skcipher);
     667             : 
     668           0 :         if (alg->exit)
     669           0 :                 skcipher->base.exit = crypto_skcipher_exit_tfm;
     670             : 
     671           0 :         if (alg->init)
     672           0 :                 return alg->init(skcipher);
     673             : 
     674             :         return 0;
     675             : }
     676             : 
     677           0 : static void crypto_skcipher_free_instance(struct crypto_instance *inst)
     678             : {
     679           0 :         struct skcipher_instance *skcipher =
     680           0 :                 container_of(inst, struct skcipher_instance, s.base);
     681             : 
     682           0 :         skcipher->free(skcipher);
     683           0 : }
     684             : 
     685             : static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg)
     686             :         __maybe_unused;
     687           0 : static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg)
     688             : {
     689           0 :         struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg,
     690             :                                                      base);
     691             : 
     692           0 :         seq_printf(m, "type         : skcipher\n");
     693           0 :         seq_printf(m, "async        : %s\n",
     694           0 :                    alg->cra_flags & CRYPTO_ALG_ASYNC ?  "yes" : "no");
     695           0 :         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
     696           0 :         seq_printf(m, "min keysize  : %u\n", skcipher->min_keysize);
     697           0 :         seq_printf(m, "max keysize  : %u\n", skcipher->max_keysize);
     698           0 :         seq_printf(m, "ivsize       : %u\n", skcipher->ivsize);
     699           0 :         seq_printf(m, "chunksize    : %u\n", skcipher->chunksize);
     700           0 :         seq_printf(m, "walksize     : %u\n", skcipher->walksize);
     701           0 : }
     702             : 
     703             : #ifdef CONFIG_NET
     704           0 : static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
     705             : {
     706           0 :         struct crypto_report_blkcipher rblkcipher;
     707           0 :         struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg,
     708             :                                                      base);
     709             : 
     710           0 :         memset(&rblkcipher, 0, sizeof(rblkcipher));
     711             : 
     712           0 :         strscpy(rblkcipher.type, "skcipher", sizeof(rblkcipher.type));
     713           0 :         strscpy(rblkcipher.geniv, "<none>", sizeof(rblkcipher.geniv));
     714             : 
     715           0 :         rblkcipher.blocksize = alg->cra_blocksize;
     716           0 :         rblkcipher.min_keysize = skcipher->min_keysize;
     717           0 :         rblkcipher.max_keysize = skcipher->max_keysize;
     718           0 :         rblkcipher.ivsize = skcipher->ivsize;
     719             : 
     720           0 :         return nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
     721             :                        sizeof(rblkcipher), &rblkcipher);
     722             : }
     723             : #else
     724             : static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
     725             : {
     726             :         return -ENOSYS;
     727             : }
     728             : #endif
     729             : 
     730             : static const struct crypto_type crypto_skcipher_type = {
     731             :         .extsize = crypto_alg_extsize,
     732             :         .init_tfm = crypto_skcipher_init_tfm,
     733             :         .free = crypto_skcipher_free_instance,
     734             : #ifdef CONFIG_PROC_FS
     735             :         .show = crypto_skcipher_show,
     736             : #endif
     737             :         .report = crypto_skcipher_report,
     738             :         .maskclear = ~CRYPTO_ALG_TYPE_MASK,
     739             :         .maskset = CRYPTO_ALG_TYPE_MASK,
     740             :         .type = CRYPTO_ALG_TYPE_SKCIPHER,
     741             :         .tfmsize = offsetof(struct crypto_skcipher, base),
     742             : };
     743             : 
     744           0 : int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
     745             :                          struct crypto_instance *inst,
     746             :                          const char *name, u32 type, u32 mask)
     747             : {
     748           0 :         spawn->base.frontend = &crypto_skcipher_type;
     749           0 :         return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
     750             : }
     751             : EXPORT_SYMBOL_GPL(crypto_grab_skcipher);
     752             : 
     753           0 : struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name,
     754             :                                               u32 type, u32 mask)
     755             : {
     756           0 :         return crypto_alloc_tfm(alg_name, &crypto_skcipher_type, type, mask);
     757             : }
     758             : EXPORT_SYMBOL_GPL(crypto_alloc_skcipher);
     759             : 
     760           0 : struct crypto_sync_skcipher *crypto_alloc_sync_skcipher(
     761             :                                 const char *alg_name, u32 type, u32 mask)
     762             : {
     763           0 :         struct crypto_skcipher *tfm;
     764             : 
     765             :         /* Only sync algorithms allowed. */
     766           0 :         mask |= CRYPTO_ALG_ASYNC;
     767             : 
     768           0 :         tfm = crypto_alloc_tfm(alg_name, &crypto_skcipher_type, type, mask);
     769             : 
     770             :         /*
     771             :          * Make sure we do not allocate something that might get used with
     772             :          * an on-stack request: check the request size.
     773             :          */
     774           0 :         if (!IS_ERR(tfm) && WARN_ON(crypto_skcipher_reqsize(tfm) >
     775             :                                     MAX_SYNC_SKCIPHER_REQSIZE)) {
     776           0 :                 crypto_free_skcipher(tfm);
     777           0 :                 return ERR_PTR(-EINVAL);
     778             :         }
     779             : 
     780             :         return (struct crypto_sync_skcipher *)tfm;
     781             : }
     782             : EXPORT_SYMBOL_GPL(crypto_alloc_sync_skcipher);
     783             : 
     784           0 : int crypto_has_skcipher(const char *alg_name, u32 type, u32 mask)
     785             : {
     786           0 :         return crypto_type_has_alg(alg_name, &crypto_skcipher_type, type, mask);
     787             : }
     788             : EXPORT_SYMBOL_GPL(crypto_has_skcipher);
     789             : 
     790           1 : static int skcipher_prepare_alg(struct skcipher_alg *alg)
     791             : {
     792           1 :         struct crypto_alg *base = &alg->base;
     793             : 
     794           1 :         if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8 ||
     795           1 :             alg->walksize > PAGE_SIZE / 8)
     796             :                 return -EINVAL;
     797             : 
     798           1 :         if (!alg->chunksize)
     799           1 :                 alg->chunksize = base->cra_blocksize;
     800           1 :         if (!alg->walksize)
     801           1 :                 alg->walksize = alg->chunksize;
     802             : 
     803           1 :         base->cra_type = &crypto_skcipher_type;
     804           1 :         base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
     805           1 :         base->cra_flags |= CRYPTO_ALG_TYPE_SKCIPHER;
     806             : 
     807           1 :         return 0;
     808             : }
     809             : 
     810           1 : int crypto_register_skcipher(struct skcipher_alg *alg)
     811             : {
     812           1 :         struct crypto_alg *base = &alg->base;
     813           1 :         int err;
     814             : 
     815           1 :         err = skcipher_prepare_alg(alg);
     816           1 :         if (err)
     817             :                 return err;
     818             : 
     819           1 :         return crypto_register_alg(base);
     820             : }
     821             : EXPORT_SYMBOL_GPL(crypto_register_skcipher);
     822             : 
     823           0 : void crypto_unregister_skcipher(struct skcipher_alg *alg)
     824             : {
     825           0 :         crypto_unregister_alg(&alg->base);
     826           0 : }
     827             : EXPORT_SYMBOL_GPL(crypto_unregister_skcipher);
     828             : 
     829           0 : int crypto_register_skciphers(struct skcipher_alg *algs, int count)
     830             : {
     831           0 :         int i, ret;
     832             : 
     833           0 :         for (i = 0; i < count; i++) {
     834           0 :                 ret = crypto_register_skcipher(&algs[i]);
     835           0 :                 if (ret)
     836           0 :                         goto err;
     837             :         }
     838             : 
     839             :         return 0;
     840             : 
     841           0 : err:
     842           0 :         for (--i; i >= 0; --i)
     843           0 :                 crypto_unregister_skcipher(&algs[i]);
     844             : 
     845             :         return ret;
     846             : }
     847             : EXPORT_SYMBOL_GPL(crypto_register_skciphers);
     848             : 
     849           0 : void crypto_unregister_skciphers(struct skcipher_alg *algs, int count)
     850             : {
     851           0 :         int i;
     852             : 
     853           0 :         for (i = count - 1; i >= 0; --i)
     854           0 :                 crypto_unregister_skcipher(&algs[i]);
     855           0 : }
     856             : EXPORT_SYMBOL_GPL(crypto_unregister_skciphers);
     857             : 
     858           0 : int skcipher_register_instance(struct crypto_template *tmpl,
     859             :                            struct skcipher_instance *inst)
     860             : {
     861           0 :         int err;
     862             : 
     863           0 :         if (WARN_ON(!inst->free))
     864             :                 return -EINVAL;
     865             : 
     866           0 :         err = skcipher_prepare_alg(&inst->alg);
     867           0 :         if (err)
     868             :                 return err;
     869             : 
     870           0 :         return crypto_register_instance(tmpl, skcipher_crypto_instance(inst));
     871             : }
     872             : EXPORT_SYMBOL_GPL(skcipher_register_instance);
     873             : 
     874           0 : static int skcipher_setkey_simple(struct crypto_skcipher *tfm, const u8 *key,
     875             :                                   unsigned int keylen)
     876             : {
     877           0 :         struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
     878             : 
     879           0 :         crypto_cipher_clear_flags(cipher, CRYPTO_TFM_REQ_MASK);
     880           0 :         crypto_cipher_set_flags(cipher, crypto_skcipher_get_flags(tfm) &
     881             :                                 CRYPTO_TFM_REQ_MASK);
     882           0 :         return crypto_cipher_setkey(cipher, key, keylen);
     883             : }
     884             : 
     885           0 : static int skcipher_init_tfm_simple(struct crypto_skcipher *tfm)
     886             : {
     887           0 :         struct skcipher_instance *inst = skcipher_alg_instance(tfm);
     888           0 :         struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
     889           0 :         struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
     890           0 :         struct crypto_cipher *cipher;
     891             : 
     892           0 :         cipher = crypto_spawn_cipher(spawn);
     893           0 :         if (IS_ERR(cipher))
     894           0 :                 return PTR_ERR(cipher);
     895             : 
     896           0 :         ctx->cipher = cipher;
     897           0 :         return 0;
     898             : }
     899             : 
     900           0 : static void skcipher_exit_tfm_simple(struct crypto_skcipher *tfm)
     901             : {
     902           0 :         struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
     903             : 
     904           0 :         crypto_free_cipher(ctx->cipher);
     905           0 : }
     906             : 
     907           0 : static void skcipher_free_instance_simple(struct skcipher_instance *inst)
     908             : {
     909           0 :         crypto_drop_cipher(skcipher_instance_ctx(inst));
     910           0 :         kfree(inst);
     911           0 : }
     912             : 
     913             : /**
     914             :  * skcipher_alloc_instance_simple - allocate instance of simple block cipher mode
     915             :  *
     916             :  * Allocate an skcipher_instance for a simple block cipher mode of operation,
     917             :  * e.g. cbc or ecb.  The instance context will have just a single crypto_spawn,
     918             :  * that for the underlying cipher.  The {min,max}_keysize, ivsize, blocksize,
     919             :  * alignmask, and priority are set from the underlying cipher but can be
     920             :  * overridden if needed.  The tfm context defaults to skcipher_ctx_simple, and
     921             :  * default ->setkey(), ->init(), and ->exit() methods are installed.
     922             :  *
     923             :  * @tmpl: the template being instantiated
     924             :  * @tb: the template parameters
     925             :  *
     926             :  * Return: a pointer to the new instance, or an ERR_PTR().  The caller still
     927             :  *         needs to register the instance.
     928             :  */
     929           0 : struct skcipher_instance *skcipher_alloc_instance_simple(
     930             :         struct crypto_template *tmpl, struct rtattr **tb)
     931             : {
     932           0 :         u32 mask;
     933           0 :         struct skcipher_instance *inst;
     934           0 :         struct crypto_cipher_spawn *spawn;
     935           0 :         struct crypto_alg *cipher_alg;
     936           0 :         int err;
     937             : 
     938           0 :         err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
     939           0 :         if (err)
     940           0 :                 return ERR_PTR(err);
     941             : 
     942           0 :         inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
     943           0 :         if (!inst)
     944           0 :                 return ERR_PTR(-ENOMEM);
     945           0 :         spawn = skcipher_instance_ctx(inst);
     946             : 
     947           0 :         err = crypto_grab_cipher(spawn, skcipher_crypto_instance(inst),
     948             :                                  crypto_attr_alg_name(tb[1]), 0, mask);
     949           0 :         if (err)
     950           0 :                 goto err_free_inst;
     951           0 :         cipher_alg = crypto_spawn_cipher_alg(spawn);
     952             : 
     953           0 :         err = crypto_inst_setname(skcipher_crypto_instance(inst), tmpl->name,
     954             :                                   cipher_alg);
     955           0 :         if (err)
     956           0 :                 goto err_free_inst;
     957             : 
     958           0 :         inst->free = skcipher_free_instance_simple;
     959             : 
     960             :         /* Default algorithm properties, can be overridden */
     961           0 :         inst->alg.base.cra_blocksize = cipher_alg->cra_blocksize;
     962           0 :         inst->alg.base.cra_alignmask = cipher_alg->cra_alignmask;
     963           0 :         inst->alg.base.cra_priority = cipher_alg->cra_priority;
     964           0 :         inst->alg.min_keysize = cipher_alg->cra_cipher.cia_min_keysize;
     965           0 :         inst->alg.max_keysize = cipher_alg->cra_cipher.cia_max_keysize;
     966           0 :         inst->alg.ivsize = cipher_alg->cra_blocksize;
     967             : 
     968             :         /* Use skcipher_ctx_simple by default, can be overridden */
     969           0 :         inst->alg.base.cra_ctxsize = sizeof(struct skcipher_ctx_simple);
     970           0 :         inst->alg.setkey = skcipher_setkey_simple;
     971           0 :         inst->alg.init = skcipher_init_tfm_simple;
     972           0 :         inst->alg.exit = skcipher_exit_tfm_simple;
     973             : 
     974           0 :         return inst;
     975             : 
     976           0 : err_free_inst:
     977           0 :         skcipher_free_instance_simple(inst);
     978           0 :         return ERR_PTR(err);
     979             : }
     980             : EXPORT_SYMBOL_GPL(skcipher_alloc_instance_simple);
     981             : 
     982             : MODULE_LICENSE("GPL");
     983             : MODULE_DESCRIPTION("Symmetric key cipher type");
     984             : MODULE_IMPORT_NS(CRYPTO_INTERNAL);

Generated by: LCOV version 1.14