atomic.h 2.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109
  1. #ifndef __ARCH_H8300_ATOMIC__
  2. #define __ARCH_H8300_ATOMIC__
  3. #include <linux/types.h>
  4. #include <asm/cmpxchg.h>
  5. /*
  6. * Atomic operations that C can't guarantee us. Useful for
  7. * resource counting etc..
  8. */
  9. #define ATOMIC_INIT(i) { (i) }
  10. #define atomic_read(v) READ_ONCE((v)->counter)
  11. #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
  12. #include <linux/kernel.h>
  13. #define ATOMIC_OP_RETURN(op, c_op) \
  14. static inline int atomic_##op##_return(int i, atomic_t *v) \
  15. { \
  16. h8300flags flags; \
  17. int ret; \
  18. \
  19. flags = arch_local_irq_save(); \
  20. ret = v->counter c_op i; \
  21. arch_local_irq_restore(flags); \
  22. return ret; \
  23. }
  24. #define ATOMIC_FETCH_OP(op, c_op) \
  25. static inline int atomic_fetch_##op(int i, atomic_t *v) \
  26. { \
  27. h8300flags flags; \
  28. int ret; \
  29. \
  30. flags = arch_local_irq_save(); \
  31. ret = v->counter; \
  32. v->counter c_op i; \
  33. arch_local_irq_restore(flags); \
  34. return ret; \
  35. }
  36. #define ATOMIC_OP(op, c_op) \
  37. static inline void atomic_##op(int i, atomic_t *v) \
  38. { \
  39. h8300flags flags; \
  40. \
  41. flags = arch_local_irq_save(); \
  42. v->counter c_op i; \
  43. arch_local_irq_restore(flags); \
  44. }
  45. ATOMIC_OP_RETURN(add, +=)
  46. ATOMIC_OP_RETURN(sub, -=)
  47. #define ATOMIC_OPS(op, c_op) \
  48. ATOMIC_OP(op, c_op) \
  49. ATOMIC_FETCH_OP(op, c_op)
  50. ATOMIC_OPS(and, &=)
  51. ATOMIC_OPS(or, |=)
  52. ATOMIC_OPS(xor, ^=)
  53. ATOMIC_OPS(add, +=)
  54. ATOMIC_OPS(sub, -=)
  55. #undef ATOMIC_OPS
  56. #undef ATOMIC_OP_RETURN
  57. #undef ATOMIC_OP
  58. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  59. #define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
  60. #define atomic_inc_return(v) atomic_add_return(1, v)
  61. #define atomic_dec_return(v) atomic_sub_return(1, v)
  62. #define atomic_inc(v) (void)atomic_inc_return(v)
  63. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  64. #define atomic_dec(v) (void)atomic_dec_return(v)
  65. #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
  66. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  67. {
  68. int ret;
  69. h8300flags flags;
  70. flags = arch_local_irq_save();
  71. ret = v->counter;
  72. if (likely(ret == old))
  73. v->counter = new;
  74. arch_local_irq_restore(flags);
  75. return ret;
  76. }
  77. static inline int __atomic_add_unless(atomic_t *v, int a, int u)
  78. {
  79. int ret;
  80. h8300flags flags;
  81. flags = arch_local_irq_save();
  82. ret = v->counter;
  83. if (ret != u)
  84. v->counter += a;
  85. arch_local_irq_restore(flags);
  86. return ret;
  87. }
  88. #endif /* __ARCH_H8300_ATOMIC __ */