LCOV - code coverage report
Current view: top level - arch/x86/include/asm - string_64.h (source / functions) Hit Total Coverage
Test: landlock.info Lines: 4 22 18.2 %
Date: 2021-04-22 12:43:58 Functions: 0 0 -

          Line data    Source code
       1             : /* SPDX-License-Identifier: GPL-2.0 */
       2             : #ifndef _ASM_X86_STRING_64_H
       3             : #define _ASM_X86_STRING_64_H
       4             : 
       5             : #ifdef __KERNEL__
       6             : #include <linux/jump_label.h>
       7             : 
       8             : /* Written 2002 by Andi Kleen */
       9             : 
      10             : /* Even with __builtin_ the compiler may decide to use the out of line
      11             :    function. */
      12             : 
      13             : #define __HAVE_ARCH_MEMCPY 1
      14             : extern void *memcpy(void *to, const void *from, size_t len);
      15             : extern void *__memcpy(void *to, const void *from, size_t len);
      16             : 
      17             : #define __HAVE_ARCH_MEMSET
      18             : void *memset(void *s, int c, size_t n);
      19             : void *__memset(void *s, int c, size_t n);
      20             : 
      21             : #define __HAVE_ARCH_MEMSET16
      22           8 : static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
      23             : {
      24           8 :         long d0, d1;
      25           8 :         asm volatile("rep\n\t"
      26             :                      "stosw"
      27             :                      : "=&c" (d0), "=&D" (d1)
      28             :                      : "a" (v), "1" (s), "0" (n)
      29             :                      : "memory");
      30           8 :         return s;
      31             : }
      32             : 
      33             : #define __HAVE_ARCH_MEMSET32
      34           0 : static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
      35             : {
      36           0 :         long d0, d1;
      37           0 :         asm volatile("rep\n\t"
      38             :                      "stosl"
      39             :                      : "=&c" (d0), "=&D" (d1)
      40             :                      : "a" (v), "1" (s), "0" (n)
      41             :                      : "memory");
      42           0 :         return s;
      43             : }
      44             : 
      45             : #define __HAVE_ARCH_MEMSET64
      46             : static inline void *memset64(uint64_t *s, uint64_t v, size_t n)
      47             : {
      48             :         long d0, d1;
      49             :         asm volatile("rep\n\t"
      50             :                      "stosq"
      51             :                      : "=&c" (d0), "=&D" (d1)
      52             :                      : "a" (v), "1" (s), "0" (n)
      53             :                      : "memory");
      54             :         return s;
      55             : }
      56             : 
      57             : #define __HAVE_ARCH_MEMMOVE
      58             : void *memmove(void *dest, const void *src, size_t count);
      59             : void *__memmove(void *dest, const void *src, size_t count);
      60             : 
      61             : int memcmp(const void *cs, const void *ct, size_t count);
      62             : size_t strlen(const char *s);
      63             : char *strcpy(char *dest, const char *src);
      64             : char *strcat(char *dest, const char *src);
      65             : int strcmp(const char *cs, const char *ct);
      66             : 
      67             : #if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
      68             : 
      69             : /*
      70             :  * For files that not instrumented (e.g. mm/slub.c) we
      71             :  * should use not instrumented version of mem* functions.
      72             :  */
      73             : 
      74             : #undef memcpy
      75             : #define memcpy(dst, src, len) __memcpy(dst, src, len)
      76             : #define memmove(dst, src, len) __memmove(dst, src, len)
      77             : #define memset(s, c, n) __memset(s, c, n)
      78             : 
      79             : #ifndef __NO_FORTIFY
      80             : #define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
      81             : #endif
      82             : 
      83             : #endif
      84             : 
      85             : #ifdef CONFIG_ARCH_HAS_UACCESS_FLUSHCACHE
      86             : #define __HAVE_ARCH_MEMCPY_FLUSHCACHE 1
      87             : void __memcpy_flushcache(void *dst, const void *src, size_t cnt);
      88           0 : static __always_inline void memcpy_flushcache(void *dst, const void *src, size_t cnt)
      89             : {
      90           0 :         if (__builtin_constant_p(cnt)) {
      91           0 :                 switch (cnt) {
      92           0 :                         case 4:
      93           0 :                                 asm ("movntil %1, %0" : "=m"(*(u32 *)dst) : "r"(*(u32 *)src));
      94           0 :                                 return;
      95           0 :                         case 8:
      96           0 :                                 asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
      97           0 :                                 return;
      98           0 :                         case 16:
      99           0 :                                 asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
     100           0 :                                 asm ("movntiq %1, %0" : "=m"(*(u64 *)(dst + 8)) : "r"(*(u64 *)(src + 8)));
     101           0 :                                 return;
     102             :                 }
     103             :         }
     104           0 :         __memcpy_flushcache(dst, src, cnt);
     105             : }
     106             : #endif
     107             : 
     108             : #endif /* __KERNEL__ */
     109             : 
     110             : #endif /* _ASM_X86_STRING_64_H */

Generated by: LCOV version 1.14