diff options
| -rw-r--r-- | include/asm-x86/processor_64.h | 6 | ||||
| -rw-r--r-- | include/linux/prefetch.h | 9 | 
2 files changed, 2 insertions, 13 deletions
diff --git a/include/asm-x86/processor_64.h b/include/asm-x86/processor_64.h index f422becbddd..398c39160fc 100644 --- a/include/asm-x86/processor_64.h +++ b/include/asm-x86/processor_64.h @@ -390,12 +390,6 @@ static inline void sync_core(void)  	asm volatile("cpuid" : "=a" (tmp) : "0" (1) : "ebx","ecx","edx","memory");  }  -#define ARCH_HAS_PREFETCH -static inline void prefetch(void *x)  -{  -	asm volatile("prefetcht0 (%0)" :: "r" (x)); -}  -  #define ARCH_HAS_PREFETCHW 1  static inline void prefetchw(void *x)   {  diff --git a/include/linux/prefetch.h b/include/linux/prefetch.h index 1adfe668d03..af7c36a5a52 100644 --- a/include/linux/prefetch.h +++ b/include/linux/prefetch.h @@ -34,17 +34,12 @@  */ -/* - *	These cannot be do{}while(0) macros. See the mental gymnastics in - *	the loop macro. - */ -   #ifndef ARCH_HAS_PREFETCH -static inline void prefetch(const void *x) {;} +#define prefetch(x) __builtin_prefetch(x)  #endif  #ifndef ARCH_HAS_PREFETCHW -static inline void prefetchw(const void *x) {;} +#define prefetchw(x) __builtin_prefetch(x,1)  #endif  #ifndef ARCH_HAS_SPINLOCK_PREFETCH  |