futex.h 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117
  1. #ifndef _ASM_X86_FUTEX_H
  2. #define _ASM_X86_FUTEX_H
  3. #ifdef __KERNEL__
  4. #include <linux/futex.h>
  5. #include <linux/uaccess.h>
  6. #include <asm/asm.h>
  7. #include <asm/errno.h>
  8. #include <asm/processor.h>
  9. #include <asm/smap.h>
  10. #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
  11. asm volatile("\t" ASM_STAC "\n" \
  12. "1:\t" insn "\n" \
  13. "2:\t" ASM_CLAC "\n" \
  14. "\t.section .fixup,\"ax\"\n" \
  15. "3:\tmov\t%3, %1\n" \
  16. "\tjmp\t2b\n" \
  17. "\t.previous\n" \
  18. _ASM_EXTABLE(1b, 3b) \
  19. : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
  20. : "i" (-EFAULT), "0" (oparg), "1" (0))
  21. #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
  22. asm volatile("\t" ASM_STAC "\n" \
  23. "1:\tmovl %2, %0\n" \
  24. "\tmovl\t%0, %3\n" \
  25. "\t" insn "\n" \
  26. "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
  27. "\tjnz\t1b\n" \
  28. "3:\t" ASM_CLAC "\n" \
  29. "\t.section .fixup,\"ax\"\n" \
  30. "4:\tmov\t%5, %1\n" \
  31. "\tjmp\t3b\n" \
  32. "\t.previous\n" \
  33. _ASM_EXTABLE(1b, 4b) \
  34. _ASM_EXTABLE(2b, 4b) \
  35. : "=&a" (oldval), "=&r" (ret), \
  36. "+m" (*uaddr), "=&r" (tem) \
  37. : "r" (oparg), "i" (-EFAULT), "1" (0))
  38. static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
  39. {
  40. int op = (encoded_op >> 28) & 7;
  41. int cmp = (encoded_op >> 24) & 15;
  42. int oparg = (encoded_op << 8) >> 20;
  43. int cmparg = (encoded_op << 20) >> 20;
  44. int oldval = 0, ret, tem;
  45. if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  46. oparg = 1 << oparg;
  47. if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
  48. return -EFAULT;
  49. pagefault_disable();
  50. switch (op) {
  51. case FUTEX_OP_SET:
  52. __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
  53. break;
  54. case FUTEX_OP_ADD:
  55. __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
  56. uaddr, oparg);
  57. break;
  58. case FUTEX_OP_OR:
  59. __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
  60. break;
  61. case FUTEX_OP_ANDN:
  62. __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
  63. break;
  64. case FUTEX_OP_XOR:
  65. __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
  66. break;
  67. default:
  68. ret = -ENOSYS;
  69. }
  70. pagefault_enable();
  71. if (!ret) {
  72. switch (cmp) {
  73. case FUTEX_OP_CMP_EQ:
  74. ret = (oldval == cmparg);
  75. break;
  76. case FUTEX_OP_CMP_NE:
  77. ret = (oldval != cmparg);
  78. break;
  79. case FUTEX_OP_CMP_LT:
  80. ret = (oldval < cmparg);
  81. break;
  82. case FUTEX_OP_CMP_GE:
  83. ret = (oldval >= cmparg);
  84. break;
  85. case FUTEX_OP_CMP_LE:
  86. ret = (oldval <= cmparg);
  87. break;
  88. case FUTEX_OP_CMP_GT:
  89. ret = (oldval > cmparg);
  90. break;
  91. default:
  92. ret = -ENOSYS;
  93. }
  94. }
  95. return ret;
  96. }
  97. static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  98. u32 oldval, u32 newval)
  99. {
  100. return user_atomic_cmpxchg_inatomic(uval, uaddr, oldval, newval);
  101. }
  102. #endif
  103. #endif /* _ASM_X86_FUTEX_H */