LCOV - code coverage report
Current view: top level - arch/x86/kernel - static_call.c (source / functions) Hit Total Coverage
Test: landlock.info Lines: 38 42 90.5 %
Date: 2021-04-22 12:43:58 Functions: 3 3 100.0 %

          Line data    Source code
       1             : // SPDX-License-Identifier: GPL-2.0
       2             : #include <linux/static_call.h>
       3             : #include <linux/memory.h>
       4             : #include <linux/bug.h>
       5             : #include <asm/text-patching.h>
       6             : 
       7             : enum insn_type {
       8             :         CALL = 0, /* site call */
       9             :         NOP = 1,  /* site cond-call */
      10             :         JMP = 2,  /* tramp / site tail-call */
      11             :         RET = 3,  /* tramp / site cond-tail-call */
      12             : };
      13             : 
      14             : /*
      15             :  * data16 data16 xorq %rax, %rax - a single 5 byte instruction that clears %rax
      16             :  * The REX.W cancels the effect of any data16.
      17             :  */
      18             : static const u8 xor5rax[] = { 0x66, 0x66, 0x48, 0x31, 0xc0 };
      19             : 
      20         569 : static void __ref __static_call_transform(void *insn, enum insn_type type, void *func)
      21             : {
      22         569 :         const void *emulate = NULL;
      23         569 :         int size = CALL_INSN_SIZE;
      24         569 :         const void *code;
      25             : 
      26         569 :         switch (type) {
      27             :         case CALL:
      28         532 :                 code = text_gen_insn(CALL_INSN_OPCODE, insn, func);
      29         532 :                 if (func == &__static_call_return0) {
      30           1 :                         emulate = code;
      31           1 :                         code = &xor5rax;
      32             :                 }
      33             : 
      34             :                 break;
      35             : 
      36          19 :         case NOP:
      37          19 :                 code = ideal_nops[NOP_ATOMIC5];
      38          19 :                 break;
      39             : 
      40             :         case JMP:
      41          18 :                 code = text_gen_insn(JMP32_INSN_OPCODE, insn, func);
      42          18 :                 break;
      43             : 
      44             :         case RET:
      45           0 :                 code = text_gen_insn(RET_INSN_OPCODE, insn, func);
      46           0 :                 size = RET_INSN_SIZE;
      47           0 :                 break;
      48             :         }
      49             : 
      50         569 :         if (memcmp(insn, code, size) == 0)
      51             :                 return;
      52             : 
      53         569 :         if (unlikely(system_state == SYSTEM_BOOTING))
      54         531 :                 return text_poke_early(insn, code, size);
      55             : 
      56          38 :         text_poke_bp(insn, code, size, emulate);
      57             : }
      58             : 
      59         569 : static void __static_call_validate(void *insn, bool tail)
      60             : {
      61         569 :         u8 opcode = *(u8 *)insn;
      62             : 
      63         569 :         if (tail) {
      64          18 :                 if (opcode == JMP32_INSN_OPCODE ||
      65          18 :                     opcode == RET_INSN_OPCODE)
      66             :                         return;
      67             :         } else {
      68         551 :                 if (opcode == CALL_INSN_OPCODE ||
      69          20 :                     !memcmp(insn, ideal_nops[NOP_ATOMIC5], 5) ||
      70           1 :                     !memcmp(insn, xor5rax, 5))
      71             :                         return;
      72             :         }
      73             : 
      74             :         /*
      75             :          * If we ever trigger this, our text is corrupt, we'll probably not live long.
      76             :          */
      77           0 :         WARN_ONCE(1, "unexpected static_call insn opcode 0x%x at %pS\n", opcode, insn);
      78             : }
      79             : 
      80         569 : static inline enum insn_type __sc_insn(bool null, bool tail)
      81             : {
      82             :         /*
      83             :          * Encode the following table without branches:
      84             :          *
      85             :          *      tail    null    insn
      86             :          *      -----+-------+------
      87             :          *        0  |   0   |  CALL
      88             :          *        0  |   1   |  NOP
      89             :          *        1  |   0   |  JMP
      90             :          *        1  |   1   |  RET
      91             :          */
      92         569 :         return 2*tail + null;
      93             : }
      94             : 
      95         569 : void arch_static_call_transform(void *site, void *tramp, void *func, bool tail)
      96             : {
      97         569 :         mutex_lock(&text_mutex);
      98             : 
      99         569 :         if (tramp) {
     100          18 :                 __static_call_validate(tramp, true);
     101          18 :                 __static_call_transform(tramp, __sc_insn(!func, true), func);
     102             :         }
     103             : 
     104         569 :         if (IS_ENABLED(CONFIG_HAVE_STATIC_CALL_INLINE) && site) {
     105         551 :                 __static_call_validate(site, tail);
     106         551 :                 __static_call_transform(site, __sc_insn(!func, tail), func);
     107             :         }
     108             : 
     109         569 :         mutex_unlock(&text_mutex);
     110         569 : }
     111             : EXPORT_SYMBOL_GPL(arch_static_call_transform);

Generated by: LCOV version 1.14