XXX In future we might even want to avoid it on UP machines. */
+/* Nonzero if locking is needed. */
+extern int __libc_locking_needed attribute_hidden;
+
# define lll_trylock(futex) \
({ unsigned char ret; \
- __asm __volatile ("cmpl $0, __libc_locking_needed\n\t" \
+ __asm __volatile ("cmpl $0, %5\n\t" \
"je,pt 0f\n\t" \
"lock\n" \
"0:\tcmpxchgl %2, %1; setne %0" \
: "=a" (ret), "=m" (futex) \
- : "r" (0), "1" (futex), "0" (1) \
+ : "r" (0), "1" (futex), "0" (1), \
+ "m" (__libc_locking_needed) \
: "memory"); \
ret; })
# define lll_lock(futex) \
(void) ({ int ignore1, ignore2; \
- __asm __volatile ("cmpl $0, __libc_locking_needed\n\t" \
+ __asm __volatile ("cmpl $0, %5\n\t" \
"je,pt 0f\n\t" \
"lock\n" \
"0:\txaddl %0, %2\n\t" \
".previous\n" \
"2:" \
: "=a" (ignore1), "=&c" (ignore2), "=m" (futex) \
- : "0" (-1), "2" (futex) \
+ : "0" (-1), "2" (futex), \
+ "m" (__libc_locking_needed) \
: "memory"); })
# define lll_unlock(futex) \
(void) ({ int ignore; \
- __asm __volatile ("cmpl $0, __libc_locking_needed\n\t" \
+ __asm __volatile ("cmpl $0, %3\n\t" \
"je,pt 0f\n\t" \
"lock\n" \
"0:\tincl %0\n\t" \
".previous\n" \
"2:" \
: "=m" (futex), "=&a" (ignore) \
- : "0" (futex) \
+ : "0" (futex), "m" (__libc_locking_needed) \
: "memory"); })
#endif