[PATCH] lightweight robust futexes: i386
[linux-2.6.git] / include / asm-i386 / futex.h
1 #ifndef _ASM_FUTEX_H
2 #define _ASM_FUTEX_H
3
4 #ifdef __KERNEL__
5
6 #include <linux/futex.h>
7 #include <asm/errno.h>
8 #include <asm/system.h>
9 #include <asm/processor.h>
10 #include <asm/uaccess.h>
11
12 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
13   __asm__ __volatile (                                          \
14 "1:     " insn "\n"                                             \
15 "2:     .section .fixup,\"ax\"\n\
16 3:      mov     %3, %1\n\
17         jmp     2b\n\
18         .previous\n\
19         .section __ex_table,\"a\"\n\
20         .align  8\n\
21         .long   1b,3b\n\
22         .previous"                                              \
23         : "=r" (oldval), "=r" (ret), "=m" (*uaddr)              \
24         : "i" (-EFAULT), "m" (*uaddr), "0" (oparg), "1" (0))
25
26 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
27   __asm__ __volatile (                                          \
28 "1:     movl    %2, %0\n\
29         movl    %0, %3\n"                                       \
30         insn "\n"                                               \
31 "2:     " LOCK_PREFIX "cmpxchgl %3, %2\n\
32         jnz     1b\n\
33 3:      .section .fixup,\"ax\"\n\
34 4:      mov     %5, %1\n\
35         jmp     3b\n\
36         .previous\n\
37         .section __ex_table,\"a\"\n\
38         .align  8\n\
39         .long   1b,4b,2b,4b\n\
40         .previous"                                              \
41         : "=&a" (oldval), "=&r" (ret), "=m" (*uaddr),           \
42           "=&r" (tem)                                           \
43         : "r" (oparg), "i" (-EFAULT), "m" (*uaddr), "1" (0))
44
45 static inline int
46 futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
47 {
48         int op = (encoded_op >> 28) & 7;
49         int cmp = (encoded_op >> 24) & 15;
50         int oparg = (encoded_op << 8) >> 20;
51         int cmparg = (encoded_op << 20) >> 20;
52         int oldval = 0, ret, tem;
53         if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
54                 oparg = 1 << oparg;
55
56         if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
57                 return -EFAULT;
58
59         inc_preempt_count();
60
61         if (op == FUTEX_OP_SET)
62                 __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
63         else {
64 #ifndef CONFIG_X86_BSWAP
65                 if (boot_cpu_data.x86 == 3)
66                         ret = -ENOSYS;
67                 else
68 #endif
69                 switch (op) {
70                 case FUTEX_OP_ADD:
71                         __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret,
72                                            oldval, uaddr, oparg);
73                         break;
74                 case FUTEX_OP_OR:
75                         __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr,
76                                            oparg);
77                         break;
78                 case FUTEX_OP_ANDN:
79                         __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr,
80                                            ~oparg);
81                         break;
82                 case FUTEX_OP_XOR:
83                         __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr,
84                                            oparg);
85                         break;
86                 default:
87                         ret = -ENOSYS;
88                 }
89         }
90
91         dec_preempt_count();
92
93         if (!ret) {
94                 switch (cmp) {
95                 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
96                 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
97                 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
98                 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
99                 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
100                 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
101                 default: ret = -ENOSYS;
102                 }
103         }
104         return ret;
105 }
106
107 static inline int
108 futex_atomic_cmpxchg_inuser(int __user *uaddr, int oldval, int newval)
109 {
110         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
111                 return -EFAULT;
112
113         __asm__ __volatile__(
114                 "1:     " LOCK_PREFIX "cmpxchgl %3, %1          \n"
115
116                 "2:     .section .fixup, \"ax\"                 \n"
117                 "3:     mov     %2, %0                          \n"
118                 "       jmp     2b                              \n"
119                 "       .previous                               \n"
120
121                 "       .section __ex_table, \"a\"              \n"
122                 "       .align  8                               \n"
123                 "       .long   1b,3b                           \n"
124                 "       .previous                               \n"
125
126                 : "=a" (oldval), "=m" (*uaddr)
127                 : "i" (-EFAULT), "r" (newval), "0" (oldval)
128                 : "memory"
129         );
130
131         return oldval;
132 }
133
134 #endif
135 #endif