futex.h 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. #ifndef _ASM_POWERPC_FUTEX_H
  2. #define _ASM_POWERPC_FUTEX_H
  3. #ifdef __KERNEL__
  4. #include <linux/futex.h>
  5. #include <linux/uaccess.h>
  6. #include <asm/errno.h>
  7. #include <asm/synch.h>
  8. #include <asm/asm-compat.h>
  9. #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
  10. __asm__ __volatile ( \
  11. PPC_ATOMIC_ENTRY_BARRIER \
  12. "1: lwarx %0,0,%2\n" \
  13. insn \
  14. PPC405_ERR77(0, %2) \
  15. "2: stwcx. %1,0,%2\n" \
  16. "bne- 1b\n" \
  17. PPC_ATOMIC_EXIT_BARRIER \
  18. "li %1,0\n" \
  19. "3: .section .fixup,\"ax\"\n" \
  20. "4: li %1,%3\n" \
  21. "b 3b\n" \
  22. ".previous\n" \
  23. ".section __ex_table,\"a\"\n" \
  24. ".align 3\n" \
  25. PPC_LONG "1b,4b,2b,4b\n" \
  26. ".previous" \
  27. : "=&r" (oldval), "=&r" (ret) \
  28. : "b" (uaddr), "i" (-EFAULT), "r" (oparg) \
  29. : "cr0", "memory")
  30. static inline int futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
  31. {
  32. int op = (encoded_op >> 28) & 7;
  33. int cmp = (encoded_op >> 24) & 15;
  34. int oparg = (encoded_op << 8) >> 20;
  35. int cmparg = (encoded_op << 20) >> 20;
  36. int oldval = 0, ret;
  37. if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  38. oparg = 1 << oparg;
  39. if (! access_ok (VERIFY_WRITE, uaddr, sizeof(u32)))
  40. return -EFAULT;
  41. pagefault_disable();
  42. switch (op) {
  43. case FUTEX_OP_SET:
  44. __futex_atomic_op("mr %1,%4\n", ret, oldval, uaddr, oparg);
  45. break;
  46. case FUTEX_OP_ADD:
  47. __futex_atomic_op("add %1,%0,%4\n", ret, oldval, uaddr, oparg);
  48. break;
  49. case FUTEX_OP_OR:
  50. __futex_atomic_op("or %1,%0,%4\n", ret, oldval, uaddr, oparg);
  51. break;
  52. case FUTEX_OP_ANDN:
  53. __futex_atomic_op("andc %1,%0,%4\n", ret, oldval, uaddr, oparg);
  54. break;
  55. case FUTEX_OP_XOR:
  56. __futex_atomic_op("xor %1,%0,%4\n", ret, oldval, uaddr, oparg);
  57. break;
  58. default:
  59. ret = -ENOSYS;
  60. }
  61. pagefault_enable();
  62. if (!ret) {
  63. switch (cmp) {
  64. case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
  65. case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
  66. case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
  67. case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
  68. case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
  69. case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
  70. default: ret = -ENOSYS;
  71. }
  72. }
  73. return ret;
  74. }
  75. static inline int
  76. futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  77. u32 oldval, u32 newval)
  78. {
  79. int ret = 0;
  80. u32 prev;
  81. if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
  82. return -EFAULT;
  83. __asm__ __volatile__ (
  84. PPC_ATOMIC_ENTRY_BARRIER
  85. "1: lwarx %1,0,%3 # futex_atomic_cmpxchg_inatomic\n\
  86. cmpw 0,%1,%4\n\
  87. bne- 3f\n"
  88. PPC405_ERR77(0,%3)
  89. "2: stwcx. %5,0,%3\n\
  90. bne- 1b\n"
  91. PPC_ATOMIC_EXIT_BARRIER
  92. "3: .section .fixup,\"ax\"\n\
  93. 4: li %0,%6\n\
  94. b 3b\n\
  95. .previous\n\
  96. .section __ex_table,\"a\"\n\
  97. .align 3\n\
  98. " PPC_LONG "1b,4b,2b,4b\n\
  99. .previous" \
  100. : "+r" (ret), "=&r" (prev), "+m" (*uaddr)
  101. : "r" (uaddr), "r" (oldval), "r" (newval), "i" (-EFAULT)
  102. : "cc", "memory");
  103. *uval = prev;
  104. return ret;
  105. }
  106. #endif /* __KERNEL__ */
  107. #endif /* _ASM_POWERPC_FUTEX_H */