LCOV - code coverage report
Current view: top level - arch/x86/include/asm - futex.h (source / functions) Hit Total Coverage
Test: landlock.info Lines: 7 20 35.0 %
Date: 2021-04-22 12:43:58 Functions: 1 1 100.0 %

          Line data    Source code
       1             : /* SPDX-License-Identifier: GPL-2.0 */
       2             : #ifndef _ASM_X86_FUTEX_H
       3             : #define _ASM_X86_FUTEX_H
       4             : 
       5             : #ifdef __KERNEL__
       6             : 
       7             : #include <linux/futex.h>
       8             : #include <linux/uaccess.h>
       9             : 
      10             : #include <asm/asm.h>
      11             : #include <asm/errno.h>
      12             : #include <asm/processor.h>
      13             : #include <asm/smap.h>
      14             : 
      15             : #define unsafe_atomic_op1(insn, oval, uaddr, oparg, label)      \
      16             : do {                                                            \
      17             :         int oldval = 0, ret;                                    \
      18             :         asm volatile("1:\t" insn "\n"                               \
      19             :                      "2:\n"                                   \
      20             :                      "\t.section .fixup,\"ax\"\n"           \
      21             :                      "3:\tmov\t%3, %1\n"                      \
      22             :                      "\tjmp\t2b\n"                            \
      23             :                      "\t.previous\n"                          \
      24             :                      _ASM_EXTABLE_UA(1b, 3b)                    \
      25             :                      : "=r" (oldval), "=r" (ret), "+m" (*uaddr)   \
      26             :                      : "i" (-EFAULT), "0" (oparg), "1" (0));      \
      27             :         if (ret)                                                \
      28             :                 goto label;                                     \
      29             :         *oval = oldval;                                         \
      30             : } while(0)
      31             : 
      32             : 
      33             : #define unsafe_atomic_op2(insn, oval, uaddr, oparg, label)      \
      34             : do {                                                            \
      35             :         int oldval = 0, ret, tem;                               \
      36             :         asm volatile("1:\tmovl     %2, %0\n"                  \
      37             :                      "2:\tmovl\t%0, %3\n"                     \
      38             :                      "\t" insn "\n"                         \
      39             :                      "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
      40             :                      "\tjnz\t2b\n"                            \
      41             :                      "4:\n"                                   \
      42             :                      "\t.section .fixup,\"ax\"\n"           \
      43             :                      "5:\tmov\t%5, %1\n"                      \
      44             :                      "\tjmp\t4b\n"                            \
      45             :                      "\t.previous\n"                          \
      46             :                      _ASM_EXTABLE_UA(1b, 5b)                    \
      47             :                      _ASM_EXTABLE_UA(3b, 5b)                    \
      48             :                      : "=&a" (oldval), "=&r" (ret),         \
      49             :                        "+m" (*uaddr), "=&r" (tem)               \
      50             :                      : "r" (oparg), "i" (-EFAULT), "1" (0));      \
      51             :         if (ret)                                                \
      52             :                 goto label;                                     \
      53             :         *oval = oldval;                                         \
      54             : } while(0)
      55             : 
      56           0 : static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
      57             :                 u32 __user *uaddr)
      58             : {
      59           0 :         if (!user_access_begin(uaddr, sizeof(u32)))
      60             :                 return -EFAULT;
      61             : 
      62           0 :         switch (op) {
      63           0 :         case FUTEX_OP_SET:
      64           0 :                 unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
      65             :                 break;
      66           0 :         case FUTEX_OP_ADD:
      67           0 :                 unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval,
      68             :                                    uaddr, oparg, Efault);
      69             :                 break;
      70           0 :         case FUTEX_OP_OR:
      71           0 :                 unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
      72             :                 break;
      73           0 :         case FUTEX_OP_ANDN:
      74           0 :                 unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
      75             :                 break;
      76           0 :         case FUTEX_OP_XOR:
      77           0 :                 unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
      78             :                 break;
      79             :         default:
      80             :                 user_access_end();
      81             :                 return -ENOSYS;
      82             :         }
      83             :         user_access_end();
      84             :         return 0;
      85             : Efault:
      86             :         user_access_end();
      87             :         return -EFAULT;
      88             : }
      89             : 
      90           1 : static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
      91             :                                                 u32 oldval, u32 newval)
      92             : {
      93           1 :         int ret = 0;
      94             : 
      95           1 :         if (!user_access_begin(uaddr, sizeof(u32)))
      96             :                 return -EFAULT;
      97           1 :         asm volatile("\n"
      98             :                 "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
      99             :                 "2:\n"
     100             :                 "\t.section .fixup, \"ax\"\n"
     101             :                 "3:\tmov     %3, %0\n"
     102             :                 "\tjmp     2b\n"
     103             :                 "\t.previous\n"
     104             :                 _ASM_EXTABLE_UA(1b, 3b)
     105             :                 : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
     106             :                 : "i" (-EFAULT), "r" (newval), "1" (oldval)
     107             :                 : "memory"
     108             :         );
     109           1 :         user_access_end();
     110           1 :         *uval = oldval;
     111           1 :         return ret;
     112             : }
     113             : 
     114             : #endif
     115             : #endif /* _ASM_X86_FUTEX_H */

Generated by: LCOV version 1.14