From ab483570a13be2a34c0502b166df8f8b26802103 Mon Sep 17 00:00:00 2001 From: Andi Kleen Date: Fri, 19 Oct 2007 20:35:04 +0200 Subject: x86 & generic: change to __builtin_prefetch() gcc 3.2+ supports __builtin_prefetch, so it's possible to use it on all architectures. Change the generic fallback in linux/prefetch.h to use it instead of noping it out. gcc should do the right thing when the architecture doesn't support prefetching Undefine the x86-64 inline assembler version and use the fallback. Signed-off-by: Andi Kleen Signed-off-by: Ingo Molnar Signed-off-by: Thomas Gleixner --- include/asm-x86/processor_64.h | 6 ------ include/linux/prefetch.h | 9 ++------- 2 files changed, 2 insertions(+), 13 deletions(-) (limited to 'include') diff --git a/include/asm-x86/processor_64.h b/include/asm-x86/processor_64.h index f422becbddd9..398c39160fce 100644 --- a/include/asm-x86/processor_64.h +++ b/include/asm-x86/processor_64.h @@ -390,12 +390,6 @@ static inline void sync_core(void) asm volatile("cpuid" : "=a" (tmp) : "0" (1) : "ebx","ecx","edx","memory"); } -#define ARCH_HAS_PREFETCH -static inline void prefetch(void *x) -{ - asm volatile("prefetcht0 (%0)" :: "r" (x)); -} - #define ARCH_HAS_PREFETCHW 1 static inline void prefetchw(void *x) { diff --git a/include/linux/prefetch.h b/include/linux/prefetch.h index 1adfe668d031..af7c36a5a521 100644 --- a/include/linux/prefetch.h +++ b/include/linux/prefetch.h @@ -34,17 +34,12 @@ */ -/* - * These cannot be do{}while(0) macros. See the mental gymnastics in - * the loop macro. - */ - #ifndef ARCH_HAS_PREFETCH -static inline void prefetch(const void *x) {;} +#define prefetch(x) __builtin_prefetch(x) #endif #ifndef ARCH_HAS_PREFETCHW -static inline void prefetchw(const void *x) {;} +#define prefetchw(x) __builtin_prefetch(x,1) #endif #ifndef ARCH_HAS_SPINLOCK_PREFETCH -- cgit v1.2.3