Commit | Line | Data |
---|---|---|
77b4cf5c IK |
1 | #ifndef _ASM_ALPHA_FUTEX_H |
2 | #define _ASM_ALPHA_FUTEX_H | |
4732efbe | 3 | |
77b4cf5c | 4 | #ifdef __KERNEL__ |
4732efbe | 5 | |
77b4cf5c IK |
6 | #include <linux/futex.h> |
7 | #include <linux/uaccess.h> | |
8 | #include <asm/errno.h> | |
9 | #include <asm/barrier.h> | |
10 | ||
11 | #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ | |
12 | __asm__ __volatile__( \ | |
13 | __ASM_SMP_MB \ | |
14 | "1: ldl_l %0,0(%2)\n" \ | |
15 | insn \ | |
16 | "2: stl_c %1,0(%2)\n" \ | |
17 | " beq %1,4f\n" \ | |
18 | " mov $31,%1\n" \ | |
19 | "3: .subsection 2\n" \ | |
20 | "4: br 1b\n" \ | |
21 | " .previous\n" \ | |
22 | " .section __ex_table,\"a\"\n" \ | |
23 | " .long 1b-.\n" \ | |
24 | " lda $31,3b-1b(%1)\n" \ | |
25 | " .long 2b-.\n" \ | |
26 | " lda $31,3b-2b(%1)\n" \ | |
27 | " .previous\n" \ | |
28 | : "=&r" (oldval), "=&r"(ret) \ | |
29 | : "r" (uaddr), "r"(oparg) \ | |
30 | : "memory") | |
31 | ||
8d7718aa | 32 | static inline int futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr) |
77b4cf5c IK |
33 | { |
34 | int op = (encoded_op >> 28) & 7; | |
35 | int cmp = (encoded_op >> 24) & 15; | |
36 | int oparg = (encoded_op << 8) >> 20; | |
37 | int cmparg = (encoded_op << 20) >> 20; | |
38 | int oldval = 0, ret; | |
39 | if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28)) | |
40 | oparg = 1 << oparg; | |
41 | ||
8d7718aa | 42 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) |
77b4cf5c IK |
43 | return -EFAULT; |
44 | ||
45 | pagefault_disable(); | |
46 | ||
47 | switch (op) { | |
48 | case FUTEX_OP_SET: | |
49 | __futex_atomic_op("mov %3,%1\n", ret, oldval, uaddr, oparg); | |
50 | break; | |
51 | case FUTEX_OP_ADD: | |
52 | __futex_atomic_op("addl %0,%3,%1\n", ret, oldval, uaddr, oparg); | |
53 | break; | |
54 | case FUTEX_OP_OR: | |
55 | __futex_atomic_op("or %0,%3,%1\n", ret, oldval, uaddr, oparg); | |
56 | break; | |
57 | case FUTEX_OP_ANDN: | |
58 | __futex_atomic_op("andnot %0,%3,%1\n", ret, oldval, uaddr, oparg); | |
59 | break; | |
60 | case FUTEX_OP_XOR: | |
61 | __futex_atomic_op("xor %0,%3,%1\n", ret, oldval, uaddr, oparg); | |
62 | break; | |
63 | default: | |
64 | ret = -ENOSYS; | |
65 | } | |
66 | ||
67 | pagefault_enable(); | |
68 | ||
69 | if (!ret) { | |
70 | switch (cmp) { | |
71 | case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break; | |
72 | case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break; | |
73 | case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break; | |
74 | case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break; | |
75 | case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break; | |
76 | case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break; | |
77 | default: ret = -ENOSYS; | |
78 | } | |
79 | } | |
80 | return ret; | |
81 | } | |
82 | ||
83 | static inline int | |
8d7718aa ML |
84 | futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, |
85 | u32 oldval, u32 newval) | |
77b4cf5c | 86 | { |
8d7718aa ML |
87 | int ret = 0, cmp; |
88 | u32 prev; | |
77b4cf5c | 89 | |
8d7718aa | 90 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) |
77b4cf5c IK |
91 | return -EFAULT; |
92 | ||
93 | __asm__ __volatile__ ( | |
94 | __ASM_SMP_MB | |
37a9d912 ML |
95 | "1: ldl_l %1,0(%3)\n" |
96 | " cmpeq %1,%4,%2\n" | |
97 | " beq %2,3f\n" | |
98 | " mov %5,%2\n" | |
99 | "2: stl_c %2,0(%3)\n" | |
100 | " beq %2,4f\n" | |
77b4cf5c IK |
101 | "3: .subsection 2\n" |
102 | "4: br 1b\n" | |
103 | " .previous\n" | |
104 | " .section __ex_table,\"a\"\n" | |
105 | " .long 1b-.\n" | |
106 | " lda $31,3b-1b(%0)\n" | |
107 | " .long 2b-.\n" | |
108 | " lda $31,3b-2b(%0)\n" | |
109 | " .previous\n" | |
37a9d912 | 110 | : "+r"(ret), "=&r"(prev), "=&r"(cmp) |
77b4cf5c IK |
111 | : "r"(uaddr), "r"((long)oldval), "r"(newval) |
112 | : "memory"); | |
113 | ||
37a9d912 ML |
114 | *uval = prev; |
115 | return ret; | |
77b4cf5c IK |
116 | } |
117 | ||
118 | #endif /* __KERNEL__ */ | |
119 | #endif /* _ASM_ALPHA_FUTEX_H */ |