futex.h 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201
  1. /*
  2. * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
  3. *
  4. * This program is free software; you can redistribute it and/or modify
  5. * it under the terms of the GNU General Public License version 2 as
  6. * published by the Free Software Foundation.
  7. *
  8. * Vineetg: August 2010: From Android kernel work
  9. */
  10. #ifndef _ASM_FUTEX_H
  11. #define _ASM_FUTEX_H
  12. #include <linux/futex.h>
  13. #include <linux/preempt.h>
  14. #include <linux/uaccess.h>
  15. #include <asm/errno.h>
  16. #ifdef CONFIG_ARC_HAS_LLSC
  17. #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\
  18. \
  19. smp_mb(); \
  20. __asm__ __volatile__( \
  21. "1: llock %1, [%2] \n" \
  22. insn "\n" \
  23. "2: scond %0, [%2] \n" \
  24. " bnz 1b \n" \
  25. " mov %0, 0 \n" \
  26. "3: \n" \
  27. " .section .fixup,\"ax\" \n" \
  28. " .align 4 \n" \
  29. "4: mov %0, %4 \n" \
  30. " j 3b \n" \
  31. " .previous \n" \
  32. " .section __ex_table,\"a\" \n" \
  33. " .align 4 \n" \
  34. " .word 1b, 4b \n" \
  35. " .word 2b, 4b \n" \
  36. " .previous \n" \
  37. \
  38. : "=&r" (ret), "=&r" (oldval) \
  39. : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \
  40. : "cc", "memory"); \
  41. smp_mb() \
  42. #else /* !CONFIG_ARC_HAS_LLSC */
  43. #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\
  44. \
  45. smp_mb(); \
  46. __asm__ __volatile__( \
  47. "1: ld %1, [%2] \n" \
  48. insn "\n" \
  49. "2: st %0, [%2] \n" \
  50. " mov %0, 0 \n" \
  51. "3: \n" \
  52. " .section .fixup,\"ax\" \n" \
  53. " .align 4 \n" \
  54. "4: mov %0, %4 \n" \
  55. " j 3b \n" \
  56. " .previous \n" \
  57. " .section __ex_table,\"a\" \n" \
  58. " .align 4 \n" \
  59. " .word 1b, 4b \n" \
  60. " .word 2b, 4b \n" \
  61. " .previous \n" \
  62. \
  63. : "=&r" (ret), "=&r" (oldval) \
  64. : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \
  65. : "cc", "memory"); \
  66. smp_mb() \
  67. #endif
  68. static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
  69. {
  70. int op = (encoded_op >> 28) & 7;
  71. int cmp = (encoded_op >> 24) & 15;
  72. int oparg = (encoded_op << 8) >> 20;
  73. int cmparg = (encoded_op << 20) >> 20;
  74. int oldval = 0, ret;
  75. if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  76. oparg = 1 << oparg;
  77. if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
  78. return -EFAULT;
  79. #ifndef CONFIG_ARC_HAS_LLSC
  80. preempt_disable(); /* to guarantee atomic r-m-w of futex op */
  81. #endif
  82. pagefault_disable();
  83. switch (op) {
  84. case FUTEX_OP_SET:
  85. __futex_atomic_op("mov %0, %3", ret, oldval, uaddr, oparg);
  86. break;
  87. case FUTEX_OP_ADD:
  88. /* oldval = *uaddr; *uaddr += oparg ; ret = *uaddr */
  89. __futex_atomic_op("add %0, %1, %3", ret, oldval, uaddr, oparg);
  90. break;
  91. case FUTEX_OP_OR:
  92. __futex_atomic_op("or %0, %1, %3", ret, oldval, uaddr, oparg);
  93. break;
  94. case FUTEX_OP_ANDN:
  95. __futex_atomic_op("bic %0, %1, %3", ret, oldval, uaddr, oparg);
  96. break;
  97. case FUTEX_OP_XOR:
  98. __futex_atomic_op("xor %0, %1, %3", ret, oldval, uaddr, oparg);
  99. break;
  100. default:
  101. ret = -ENOSYS;
  102. }
  103. pagefault_enable();
  104. #ifndef CONFIG_ARC_HAS_LLSC
  105. preempt_enable();
  106. #endif
  107. if (!ret) {
  108. switch (cmp) {
  109. case FUTEX_OP_CMP_EQ:
  110. ret = (oldval == cmparg);
  111. break;
  112. case FUTEX_OP_CMP_NE:
  113. ret = (oldval != cmparg);
  114. break;
  115. case FUTEX_OP_CMP_LT:
  116. ret = (oldval < cmparg);
  117. break;
  118. case FUTEX_OP_CMP_GE:
  119. ret = (oldval >= cmparg);
  120. break;
  121. case FUTEX_OP_CMP_LE:
  122. ret = (oldval <= cmparg);
  123. break;
  124. case FUTEX_OP_CMP_GT:
  125. ret = (oldval > cmparg);
  126. break;
  127. default:
  128. ret = -ENOSYS;
  129. }
  130. }
  131. return ret;
  132. }
  133. /*
  134. * cmpxchg of futex (pagefaults disabled by caller)
  135. * Return 0 for success, -EFAULT otherwise
  136. */
  137. static inline int
  138. futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 expval,
  139. u32 newval)
  140. {
  141. int ret = 0;
  142. u32 existval;
  143. if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
  144. return -EFAULT;
  145. #ifndef CONFIG_ARC_HAS_LLSC
  146. preempt_disable(); /* to guarantee atomic r-m-w of futex op */
  147. #endif
  148. smp_mb();
  149. __asm__ __volatile__(
  150. #ifdef CONFIG_ARC_HAS_LLSC
  151. "1: llock %1, [%4] \n"
  152. " brne %1, %2, 3f \n"
  153. "2: scond %3, [%4] \n"
  154. " bnz 1b \n"
  155. #else
  156. "1: ld %1, [%4] \n"
  157. " brne %1, %2, 3f \n"
  158. "2: st %3, [%4] \n"
  159. #endif
  160. "3: \n"
  161. " .section .fixup,\"ax\" \n"
  162. "4: mov %0, %5 \n"
  163. " j 3b \n"
  164. " .previous \n"
  165. " .section __ex_table,\"a\" \n"
  166. " .align 4 \n"
  167. " .word 1b, 4b \n"
  168. " .word 2b, 4b \n"
  169. " .previous\n"
  170. : "+&r"(ret), "=&r"(existval)
  171. : "r"(expval), "r"(newval), "r"(uaddr), "ir"(-EFAULT)
  172. : "cc", "memory");
  173. smp_mb();
  174. #ifndef CONFIG_ARC_HAS_LLSC
  175. preempt_enable();
  176. #endif
  177. *uval = existval;
  178. return ret;
  179. }
  180. #endif