1 2 3 4 5 6 7 8 9 10 11 12 13 14
#if defined(__i386__) || defined(__x86_64__) #define barrier() asm volatile("" ::: "memory") #define mb() __sync_synchronize() #define smp_mb() mb() # define smp_rmb() barrier() # define smp_wmb() barrier() /* Weak barriers should be used. If not - it's a bug */ # define rmb() abort() # define wmb() abort() #else #error Please fill in barrier macros #endif