LCOV - code coverage report
Current view: top level - mm/kasan - init.c (source / functions) Hit Total Coverage
Test: landlock.info Lines: 88 233 37.8 %
Date: 2021-04-22 12:43:58 Functions: 6 19 31.6 %

          Line data    Source code
       1             : // SPDX-License-Identifier: GPL-2.0
       2             : /*
       3             :  * This file contains KASAN shadow initialization code.
       4             :  *
       5             :  * Copyright (c) 2015 Samsung Electronics Co., Ltd.
       6             :  * Author: Andrey Ryabinin <ryabinin.a.a@gmail.com>
       7             :  */
       8             : 
       9             : #include <linux/memblock.h>
      10             : #include <linux/init.h>
      11             : #include <linux/kasan.h>
      12             : #include <linux/kernel.h>
      13             : #include <linux/mm.h>
      14             : #include <linux/pfn.h>
      15             : #include <linux/slab.h>
      16             : 
      17             : #include <asm/page.h>
      18             : #include <asm/pgalloc.h>
      19             : 
      20             : #include "kasan.h"
      21             : 
      22             : /*
      23             :  * This page serves two purposes:
      24             :  *   - It used as early shadow memory. The entire shadow region populated
      25             :  *     with this page, before we will be able to setup normal shadow memory.
      26             :  *   - Latter it reused it as zero shadow to cover large ranges of memory
      27             :  *     that allowed to access, but not handled by kasan (vmalloc/vmemmap ...).
      28             :  */
      29             : unsigned char kasan_early_shadow_page[PAGE_SIZE] __page_aligned_bss;
      30             : 
      31             : #if CONFIG_PGTABLE_LEVELS > 4
      32             : p4d_t kasan_early_shadow_p4d[MAX_PTRS_PER_P4D] __page_aligned_bss;
      33             : static inline bool kasan_p4d_table(pgd_t pgd)
      34             : {
      35             :         return pgd_page(pgd) == virt_to_page(lm_alias(kasan_early_shadow_p4d));
      36             : }
      37             : #else
      38           0 : static inline bool kasan_p4d_table(pgd_t pgd)
      39             : {
      40           0 :         return false;
      41             : }
      42             : #endif
      43             : #if CONFIG_PGTABLE_LEVELS > 3
      44             : pud_t kasan_early_shadow_pud[PTRS_PER_PUD] __page_aligned_bss;
      45           0 : static inline bool kasan_pud_table(p4d_t p4d)
      46             : {
      47           0 :         return p4d_page(p4d) == virt_to_page(lm_alias(kasan_early_shadow_pud));
      48             : }
      49             : #else
      50             : static inline bool kasan_pud_table(p4d_t p4d)
      51             : {
      52             :         return false;
      53             : }
      54             : #endif
      55             : #if CONFIG_PGTABLE_LEVELS > 2
      56             : pmd_t kasan_early_shadow_pmd[PTRS_PER_PMD] __page_aligned_bss;
      57           0 : static inline bool kasan_pmd_table(pud_t pud)
      58             : {
      59           0 :         return pud_page(pud) == virt_to_page(lm_alias(kasan_early_shadow_pmd));
      60             : }
      61             : #else
      62             : static inline bool kasan_pmd_table(pud_t pud)
      63             : {
      64             :         return false;
      65             : }
      66             : #endif
      67             : pte_t kasan_early_shadow_pte[PTRS_PER_PTE + PTE_HWTABLE_PTRS]
      68             :         __page_aligned_bss;
      69             : 
      70           0 : static inline bool kasan_pte_table(pmd_t pmd)
      71             : {
      72           0 :         return pmd_page(pmd) == virt_to_page(lm_alias(kasan_early_shadow_pte));
      73             : }
      74             : 
      75           0 : static inline bool kasan_early_shadow_page_entry(pte_t pte)
      76             : {
      77           0 :         return pte_page(pte) == virt_to_page(lm_alias(kasan_early_shadow_page));
      78             : }
      79             : 
      80           4 : static __init void *early_alloc(size_t size, int node)
      81             : {
      82           4 :         void *ptr = memblock_alloc_try_nid(size, size, __pa(MAX_DMA_ADDRESS),
      83             :                                            MEMBLOCK_ALLOC_ACCESSIBLE, node);
      84             : 
      85           4 :         if (!ptr)
      86           0 :                 panic("%s: Failed to allocate %zu bytes align=%zx nid=%d from=%llx\n",
      87           0 :                       __func__, size, size, node, (u64)__pa(MAX_DMA_ADDRESS));
      88             : 
      89           4 :         return ptr;
      90             : }
      91             : 
      92           1 : static void __ref zero_pte_populate(pmd_t *pmd, unsigned long addr,
      93             :                                 unsigned long end)
      94             : {
      95           1 :         pte_t *pte = pte_offset_kernel(pmd, addr);
      96           1 :         pte_t zero_pte;
      97             : 
      98           2 :         zero_pte = pfn_pte(PFN_DOWN(__pa_symbol(kasan_early_shadow_page)),
      99           1 :                                 PAGE_KERNEL);
     100           1 :         zero_pte = pte_wrprotect(zero_pte);
     101             : 
     102         394 :         while (addr + PAGE_SIZE <= end) {
     103         393 :                 set_pte_at(&init_mm, addr, pte, zero_pte);
     104         393 :                 addr += PAGE_SIZE;
     105         786 :                 pte = pte_offset_kernel(pmd, addr);
     106             :         }
     107           1 : }
     108             : 
     109           3 : static int __ref zero_pmd_populate(pud_t *pud, unsigned long addr,
     110             :                                 unsigned long end)
     111             : {
     112           6 :         pmd_t *pmd = pmd_offset(pud, addr);
     113         897 :         unsigned long next;
     114             : 
     115         897 :         do {
     116         897 :                 next = pmd_addr_end(addr, end);
     117             : 
     118         897 :                 if (IS_ALIGNED(addr, PMD_SIZE) && end - addr >= PMD_SIZE) {
     119        1792 :                         pmd_populate_kernel(&init_mm, pmd,
     120         896 :                                         lm_alias(kasan_early_shadow_pte));
     121         896 :                         continue;
     122             :                 }
     123             : 
     124           1 :                 if (pmd_none(*pmd)) {
     125           0 :                         pte_t *p;
     126             : 
     127           0 :                         if (slab_is_available())
     128           0 :                                 p = pte_alloc_one_kernel(&init_mm);
     129             :                         else
     130           0 :                                 p = early_alloc(PAGE_SIZE, NUMA_NO_NODE);
     131           0 :                         if (!p)
     132             :                                 return -ENOMEM;
     133             : 
     134           0 :                         pmd_populate_kernel(&init_mm, pmd, p);
     135             :                 }
     136           1 :                 zero_pte_populate(pmd, addr, next);
     137         897 :         } while (pmd++, addr = next, addr != end);
     138             : 
     139             :         return 0;
     140             : }
     141             : 
     142           6 : static int __ref zero_pud_populate(p4d_t *p4d, unsigned long addr,
     143             :                                 unsigned long end)
     144             : {
     145           6 :         pud_t *pud = pud_offset(p4d, addr);
     146        1025 :         unsigned long next;
     147             : 
     148        1025 :         do {
     149        1025 :                 next = pud_addr_end(addr, end);
     150        1025 :                 if (IS_ALIGNED(addr, PUD_SIZE) && end - addr >= PUD_SIZE) {
     151        1022 :                         pmd_t *pmd;
     152             : 
     153        2044 :                         pud_populate(&init_mm, pud,
     154        1022 :                                         lm_alias(kasan_early_shadow_pmd));
     155        1022 :                         pmd = pmd_offset(pud, addr);
     156        2044 :                         pmd_populate_kernel(&init_mm, pmd,
     157        1022 :                                         lm_alias(kasan_early_shadow_pte));
     158        1022 :                         continue;
     159             :                 }
     160             : 
     161           3 :                 if (pud_none(*pud)) {
     162           1 :                         pmd_t *p;
     163             : 
     164           1 :                         if (slab_is_available()) {
     165           0 :                                 p = pmd_alloc(&init_mm, pud, addr);
     166           0 :                                 if (!p)
     167             :                                         return -ENOMEM;
     168             :                         } else {
     169           4 :                                 pud_populate(&init_mm, pud,
     170           1 :                                         early_alloc(PAGE_SIZE, NUMA_NO_NODE));
     171             :                         }
     172             :                 }
     173           3 :                 zero_pmd_populate(pud, addr, next);
     174        1025 :         } while (pud++, addr = next, addr != end);
     175             : 
     176             :         return 0;
     177             : }
     178             : 
     179           6 : static int __ref zero_p4d_populate(pgd_t *pgd, unsigned long addr,
     180             :                                 unsigned long end)
     181             : {
     182           6 :         p4d_t *p4d = p4d_offset(pgd, addr);
     183           6 :         unsigned long next;
     184             : 
     185           6 :         do {
     186           6 :                 next = p4d_addr_end(addr, end);
     187           6 :                 if (IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) {
     188           0 :                         pud_t *pud;
     189           0 :                         pmd_t *pmd;
     190             : 
     191           0 :                         p4d_populate(&init_mm, p4d,
     192           0 :                                         lm_alias(kasan_early_shadow_pud));
     193           0 :                         pud = pud_offset(p4d, addr);
     194           0 :                         pud_populate(&init_mm, pud,
     195           0 :                                         lm_alias(kasan_early_shadow_pmd));
     196           0 :                         pmd = pmd_offset(pud, addr);
     197           0 :                         pmd_populate_kernel(&init_mm, pmd,
     198           0 :                                         lm_alias(kasan_early_shadow_pte));
     199           0 :                         continue;
     200             :                 }
     201             : 
     202           6 :                 if (p4d_none(*p4d)) {
     203           3 :                         pud_t *p;
     204             : 
     205           3 :                         if (slab_is_available()) {
     206           0 :                                 p = pud_alloc(&init_mm, p4d, addr);
     207           0 :                                 if (!p)
     208             :                                         return -ENOMEM;
     209             :                         } else {
     210           9 :                                 p4d_populate(&init_mm, p4d,
     211           3 :                                         early_alloc(PAGE_SIZE, NUMA_NO_NODE));
     212             :                         }
     213             :                 }
     214           6 :                 zero_pud_populate(p4d, addr, next);
     215           6 :         } while (p4d++, addr = next, addr != end);
     216             : 
     217           6 :         return 0;
     218             : }
     219             : 
     220             : /**
     221             :  * kasan_populate_early_shadow - populate shadow memory region with
     222             :  *                               kasan_early_shadow_page
     223             :  * @shadow_start - start of the memory range to populate
     224             :  * @shadow_end   - end of the memory range to populate
     225             :  */
     226           5 : int __ref kasan_populate_early_shadow(const void *shadow_start,
     227             :                                         const void *shadow_end)
     228             : {
     229           5 :         unsigned long addr = (unsigned long)shadow_start;
     230           5 :         unsigned long end = (unsigned long)shadow_end;
     231           5 :         pgd_t *pgd = pgd_offset_k(addr);
     232          12 :         unsigned long next;
     233             : 
     234          12 :         do {
     235          12 :                 next = pgd_addr_end(addr, end);
     236             : 
     237          12 :                 if (IS_ALIGNED(addr, PGDIR_SIZE) && end - addr >= PGDIR_SIZE) {
     238           6 :                         p4d_t *p4d;
     239           6 :                         pud_t *pud;
     240           6 :                         pmd_t *pmd;
     241             : 
     242             :                         /*
     243             :                          * kasan_early_shadow_pud should be populated with pmds
     244             :                          * at this moment.
     245             :                          * [pud,pmd]_populate*() below needed only for
     246             :                          * 3,2 - level page tables where we don't have
     247             :                          * puds,pmds, so pgd_populate(), pud_populate()
     248             :                          * is noops.
     249             :                          */
     250           6 :                         pgd_populate(&init_mm, pgd,
     251             :                                         lm_alias(kasan_early_shadow_p4d));
     252           6 :                         p4d = p4d_offset(pgd, addr);
     253          12 :                         p4d_populate(&init_mm, p4d,
     254           6 :                                         lm_alias(kasan_early_shadow_pud));
     255           6 :                         pud = pud_offset(p4d, addr);
     256          12 :                         pud_populate(&init_mm, pud,
     257           6 :                                         lm_alias(kasan_early_shadow_pmd));
     258           6 :                         pmd = pmd_offset(pud, addr);
     259          12 :                         pmd_populate_kernel(&init_mm, pmd,
     260           6 :                                         lm_alias(kasan_early_shadow_pte));
     261           6 :                         continue;
     262             :                 }
     263             : 
     264           6 :                 if (pgd_none(*pgd)) {
     265             :                         p4d_t *p;
     266             : 
     267             :                         if (slab_is_available()) {
     268             :                                 p = p4d_alloc(&init_mm, pgd, addr);
     269             :                                 if (!p)
     270             :                                         return -ENOMEM;
     271             :                         } else {
     272           6 :                                 pgd_populate(&init_mm, pgd,
     273             :                                         early_alloc(PAGE_SIZE, NUMA_NO_NODE));
     274             :                         }
     275             :                 }
     276           6 :                 zero_p4d_populate(pgd, addr, next);
     277          12 :         } while (pgd++, addr = next, addr != end);
     278             : 
     279           5 :         return 0;
     280             : }
     281             : 
     282           0 : static void kasan_free_pte(pte_t *pte_start, pmd_t *pmd)
     283             : {
     284           0 :         pte_t *pte;
     285           0 :         int i;
     286             : 
     287           0 :         for (i = 0; i < PTRS_PER_PTE; i++) {
     288           0 :                 pte = pte_start + i;
     289           0 :                 if (!pte_none(*pte))
     290             :                         return;
     291             :         }
     292             : 
     293           0 :         pte_free_kernel(&init_mm, (pte_t *)page_to_virt(pmd_page(*pmd)));
     294           0 :         pmd_clear(pmd);
     295             : }
     296             : 
     297           0 : static void kasan_free_pmd(pmd_t *pmd_start, pud_t *pud)
     298             : {
     299           0 :         pmd_t *pmd;
     300           0 :         int i;
     301             : 
     302           0 :         for (i = 0; i < PTRS_PER_PMD; i++) {
     303           0 :                 pmd = pmd_start + i;
     304           0 :                 if (!pmd_none(*pmd))
     305             :                         return;
     306             :         }
     307             : 
     308           0 :         pmd_free(&init_mm, (pmd_t *)page_to_virt(pud_page(*pud)));
     309           0 :         pud_clear(pud);
     310             : }
     311             : 
     312           0 : static void kasan_free_pud(pud_t *pud_start, p4d_t *p4d)
     313             : {
     314           0 :         pud_t *pud;
     315           0 :         int i;
     316             : 
     317           0 :         for (i = 0; i < PTRS_PER_PUD; i++) {
     318           0 :                 pud = pud_start + i;
     319           0 :                 if (!pud_none(*pud))
     320             :                         return;
     321             :         }
     322             : 
     323           0 :         pud_free(&init_mm, (pud_t *)page_to_virt(p4d_page(*p4d)));
     324           0 :         p4d_clear(p4d);
     325             : }
     326             : 
     327           0 : static void kasan_free_p4d(p4d_t *p4d_start, pgd_t *pgd)
     328             : {
     329             :         p4d_t *p4d;
     330             :         int i;
     331             : 
     332             :         for (i = 0; i < PTRS_PER_P4D; i++) {
     333             :                 p4d = p4d_start + i;
     334             :                 if (!p4d_none(*p4d))
     335             :                         return;
     336             :         }
     337             : 
     338             :         p4d_free(&init_mm, (p4d_t *)page_to_virt(pgd_page(*pgd)));
     339           0 :         pgd_clear(pgd);
     340             : }
     341             : 
     342           0 : static void kasan_remove_pte_table(pte_t *pte, unsigned long addr,
     343             :                                 unsigned long end)
     344             : {
     345           0 :         unsigned long next;
     346             : 
     347           0 :         for (; addr < end; addr = next, pte++) {
     348           0 :                 next = (addr + PAGE_SIZE) & PAGE_MASK;
     349           0 :                 if (next > end)
     350             :                         next = end;
     351             : 
     352           0 :                 if (!pte_present(*pte))
     353           0 :                         continue;
     354             : 
     355           0 :                 if (WARN_ON(!kasan_early_shadow_page_entry(*pte)))
     356           0 :                         continue;
     357           0 :                 pte_clear(&init_mm, addr, pte);
     358             :         }
     359           0 : }
     360             : 
     361           0 : static void kasan_remove_pmd_table(pmd_t *pmd, unsigned long addr,
     362             :                                 unsigned long end)
     363             : {
     364           0 :         unsigned long next;
     365             : 
     366           0 :         for (; addr < end; addr = next, pmd++) {
     367           0 :                 pte_t *pte;
     368             : 
     369           0 :                 next = pmd_addr_end(addr, end);
     370             : 
     371           0 :                 if (!pmd_present(*pmd))
     372           0 :                         continue;
     373             : 
     374           0 :                 if (kasan_pte_table(*pmd)) {
     375           0 :                         if (IS_ALIGNED(addr, PMD_SIZE) &&
     376           0 :                             IS_ALIGNED(next, PMD_SIZE)) {
     377           0 :                                 pmd_clear(pmd);
     378           0 :                                 continue;
     379             :                         }
     380             :                 }
     381           0 :                 pte = pte_offset_kernel(pmd, addr);
     382           0 :                 kasan_remove_pte_table(pte, addr, next);
     383           0 :                 kasan_free_pte(pte_offset_kernel(pmd, 0), pmd);
     384             :         }
     385           0 : }
     386             : 
     387           0 : static void kasan_remove_pud_table(pud_t *pud, unsigned long addr,
     388             :                                 unsigned long end)
     389             : {
     390           0 :         unsigned long next;
     391             : 
     392           0 :         for (; addr < end; addr = next, pud++) {
     393           0 :                 pmd_t *pmd, *pmd_base;
     394             : 
     395           0 :                 next = pud_addr_end(addr, end);
     396             : 
     397           0 :                 if (!pud_present(*pud))
     398           0 :                         continue;
     399             : 
     400           0 :                 if (kasan_pmd_table(*pud)) {
     401           0 :                         if (IS_ALIGNED(addr, PUD_SIZE) &&
     402           0 :                             IS_ALIGNED(next, PUD_SIZE)) {
     403           0 :                                 pud_clear(pud);
     404           0 :                                 continue;
     405             :                         }
     406             :                 }
     407           0 :                 pmd = pmd_offset(pud, addr);
     408           0 :                 pmd_base = pmd_offset(pud, 0);
     409           0 :                 kasan_remove_pmd_table(pmd, addr, next);
     410           0 :                 kasan_free_pmd(pmd_base, pud);
     411             :         }
     412           0 : }
     413             : 
     414           0 : static void kasan_remove_p4d_table(p4d_t *p4d, unsigned long addr,
     415             :                                 unsigned long end)
     416             : {
     417           0 :         unsigned long next;
     418             : 
     419           0 :         for (; addr < end; addr = next, p4d++) {
     420           0 :                 pud_t *pud;
     421             : 
     422           0 :                 next = p4d_addr_end(addr, end);
     423             : 
     424           0 :                 if (!p4d_present(*p4d))
     425           0 :                         continue;
     426             : 
     427           0 :                 if (kasan_pud_table(*p4d)) {
     428           0 :                         if (IS_ALIGNED(addr, P4D_SIZE) &&
     429           0 :                             IS_ALIGNED(next, P4D_SIZE)) {
     430           0 :                                 p4d_clear(p4d);
     431           0 :                                 continue;
     432             :                         }
     433             :                 }
     434           0 :                 pud = pud_offset(p4d, addr);
     435           0 :                 kasan_remove_pud_table(pud, addr, next);
     436           0 :                 kasan_free_pud(pud_offset(p4d, 0), p4d);
     437             :         }
     438           0 : }
     439             : 
     440           0 : void kasan_remove_zero_shadow(void *start, unsigned long size)
     441             : {
     442           0 :         unsigned long addr, end, next;
     443           0 :         pgd_t *pgd;
     444             : 
     445           0 :         addr = (unsigned long)kasan_mem_to_shadow(start);
     446           0 :         end = addr + (size >> KASAN_SHADOW_SCALE_SHIFT);
     447             : 
     448           0 :         if (WARN_ON((unsigned long)start % KASAN_MEMORY_PER_SHADOW_PAGE) ||
     449           0 :             WARN_ON(size % KASAN_MEMORY_PER_SHADOW_PAGE))
     450             :                 return;
     451             : 
     452           0 :         for (; addr < end; addr = next) {
     453           0 :                 p4d_t *p4d;
     454             : 
     455           0 :                 next = pgd_addr_end(addr, end);
     456             : 
     457           0 :                 pgd = pgd_offset_k(addr);
     458           0 :                 if (!pgd_present(*pgd))
     459             :                         continue;
     460             : 
     461           0 :                 if (kasan_p4d_table(*pgd)) {
     462             :                         if (IS_ALIGNED(addr, PGDIR_SIZE) &&
     463             :                             IS_ALIGNED(next, PGDIR_SIZE)) {
     464             :                                 pgd_clear(pgd);
     465             :                                 continue;
     466             :                         }
     467             :                 }
     468             : 
     469           0 :                 p4d = p4d_offset(pgd, addr);
     470           0 :                 kasan_remove_p4d_table(p4d, addr, next);
     471           0 :                 kasan_free_p4d(p4d_offset(pgd, 0), pgd);
     472             :         }
     473             : }
     474             : 
     475           0 : int kasan_add_zero_shadow(void *start, unsigned long size)
     476             : {
     477           0 :         int ret;
     478           0 :         void *shadow_start, *shadow_end;
     479             : 
     480           0 :         shadow_start = kasan_mem_to_shadow(start);
     481           0 :         shadow_end = shadow_start + (size >> KASAN_SHADOW_SCALE_SHIFT);
     482             : 
     483           0 :         if (WARN_ON((unsigned long)start % KASAN_MEMORY_PER_SHADOW_PAGE) ||
     484           0 :             WARN_ON(size % KASAN_MEMORY_PER_SHADOW_PAGE))
     485             :                 return -EINVAL;
     486             : 
     487           0 :         ret = kasan_populate_early_shadow(shadow_start, shadow_end);
     488           0 :         if (ret)
     489           0 :                 kasan_remove_zero_shadow(start, size);
     490             :         return ret;
     491             : }

Generated by: LCOV version 1.14