barrier.h 2.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485
  1. #ifndef _ASM_X86_BARRIER_H
  2. #define _ASM_X86_BARRIER_H
  3. #include <asm/alternative.h>
  4. #include <asm/nops.h>
  5. /*
  6. * Force strict CPU ordering.
  7. * And yes, this might be required on UP too when we're talking
  8. * to devices.
  9. */
  10. #ifdef CONFIG_X86_32
  11. #define mb() asm volatile(ALTERNATIVE("lock; addl $0,0(%%esp)", "mfence", \
  12. X86_FEATURE_XMM2) ::: "memory", "cc")
  13. #define rmb() asm volatile(ALTERNATIVE("lock; addl $0,0(%%esp)", "lfence", \
  14. X86_FEATURE_XMM2) ::: "memory", "cc")
  15. #define wmb() asm volatile(ALTERNATIVE("lock; addl $0,0(%%esp)", "sfence", \
  16. X86_FEATURE_XMM2) ::: "memory", "cc")
  17. #else
  18. #define mb() asm volatile("mfence":::"memory")
  19. #define rmb() asm volatile("lfence":::"memory")
  20. #define wmb() asm volatile("sfence" ::: "memory")
  21. #endif
  22. #ifdef CONFIG_X86_PPRO_FENCE
  23. #define dma_rmb() rmb()
  24. #else
  25. #define dma_rmb() barrier()
  26. #endif
  27. #define dma_wmb() barrier()
  28. #define __smp_mb() mb()
  29. #define __smp_rmb() dma_rmb()
  30. #define __smp_wmb() barrier()
  31. #define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
  32. #if defined(CONFIG_X86_PPRO_FENCE)
  33. /*
  34. * For this option x86 doesn't have a strong TSO memory
  35. * model and we should fall back to full barriers.
  36. */
  37. #define __smp_store_release(p, v) \
  38. do { \
  39. compiletime_assert_atomic_type(*p); \
  40. __smp_mb(); \
  41. WRITE_ONCE(*p, v); \
  42. } while (0)
  43. #define __smp_load_acquire(p) \
  44. ({ \
  45. typeof(*p) ___p1 = READ_ONCE(*p); \
  46. compiletime_assert_atomic_type(*p); \
  47. __smp_mb(); \
  48. ___p1; \
  49. })
  50. #else /* regular x86 TSO memory ordering */
  51. #define __smp_store_release(p, v) \
  52. do { \
  53. compiletime_assert_atomic_type(*p); \
  54. barrier(); \
  55. WRITE_ONCE(*p, v); \
  56. } while (0)
  57. #define __smp_load_acquire(p) \
  58. ({ \
  59. typeof(*p) ___p1 = READ_ONCE(*p); \
  60. compiletime_assert_atomic_type(*p); \
  61. barrier(); \
  62. ___p1; \
  63. })
  64. #endif
  65. /* Atomic operations are already serializing on x86 */
  66. #define __smp_mb__before_atomic() barrier()
  67. #define __smp_mb__after_atomic() barrier()
  68. #include <asm-generic/barrier.h>
  69. #endif /* _ASM_X86_BARRIER_H */