movl 8(%esp), %ebx
/* Get the mutex. */
- orl $-1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
- xaddl %eax, MUTEX(%ebx)
+ cmpxchgl %edx, MUTEX(%ebx)
+ testl %eax, %eax
jne 1f
/* One less waiter. If this was the last one needed wake
/* Release the mutex. */
LOCK
- addl $1, MUTEX(%ebx)
- jng 6f
+ subl $1, MUTEX(%ebx)
+ jne 6f
/* Wait for the remaining threads. The call will return immediately
if the CURR_EVENT memory has meanwhile been changed. */
waking the waiting threads since otherwise a new thread might
arrive and gets waken up, too. */
LOCK
- addl $1, MUTEX(%ebx)
- jng 4f
+ subl $1, MUTEX(%ebx)
+ jne 4f
5: orl $-1, %eax /* == PTHREAD_BARRIER_SERIAL_THREAD */
ret
1: leal MUTEX(%ebx), %ecx
- call __lll_lock_wait
+ call __lll_mutex_lock_wait
jmp 2b
4: leal MUTEX(%ebx), %eax
- call __lll_unlock_wake
+ call __lll_mutex_unlock_wake
jmp 5b
6: leal MUTEX(%ebx), %eax
- call __lll_unlock_wake
+ call __lll_mutex_unlock_wake
jmp 7b
.size pthread_barrier_wait,.-pthread_barrier_wait
movl 16(%esp), %ebx
/* Get internal lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if cond_lock == 0
- xaddl %eax, (%ebx)
+ cmpxchgl %edx, (%ebx)
#else
- xaddl %eax, cond_lock(%ebx)
+ cmpxchgl %edx, cond_lock(%ebx)
#endif
testl %eax, %eax
jne 1f
movl 28(%esp), %ebp
/* Get internal lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if cond_lock == 0
- xaddl %eax, (%ebx)
+ cmpxchgl %edx, (%ebx)
#else
- xaddl %eax, cond_lock(%ebx)
+ cmpxchgl %edx, cond_lock(%ebx)
#endif
testl %eax, %eax
jne 1f
.LcleanupEND:
/* Lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if cond_lock == 0
- xaddl %eax, (%ebx)
+ cmpxchgl %edx, (%ebx)
#else
- xaddl %eax, cond_lock(%ebx)
+ cmpxchgl %edx, cond_lock(%ebx)
#endif
testl %eax, %eax
jne 5f
movl %eax, %esi
/* Get internal lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if cond_lock == 0
- xaddl %eax, (%ebx)
+ cmpxchgl %edx, (%ebx)
#else
- xaddl %eax, cond_lock(%ebx)
+ cmpxchgl %edx, cond_lock(%ebx)
#endif
testl %eax, %eax
je 1f
movl 16(%esp), %ebx
/* Get internal lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if cond_lock == 0
- xaddl %eax, (%ebx)
+ cmpxchgl %edx, (%ebx)
#else
- xaddl %eax, cond_lock(%ebx)
+ cmpxchgl %edx, cond_lock(%ebx)
#endif
testl %eax, %eax
jne 1f
.LcleanupEND:
/* Lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if cond_lock == 0
- xaddl %eax, (%ebx)
+ cmpxchgl %edx, (%ebx)
#else
- xaddl %eax, cond_lock(%ebx)
+ cmpxchgl %edx, cond_lock(%ebx)
#endif
testl %eax, %eax
jne 5f
movl %eax, %esi
/* Get internal lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if cond_lock == 0
- xaddl %eax, (%ebx)
+ cmpxchgl %edx, (%ebx)
#else
- xaddl %eax, cond_lock(%ebx)
+ cmpxchgl %edx, cond_lock(%ebx)
#endif
testl %eax, %eax
je 1f
adcl $0, woken_seq+4(%ebx)
LOCK
+#if cond_lock == 0
+ subl $1, (%ebx)
+#else
subl $1, cond_lock(%ebx)
+#endif
je 2f
#if cond_lock == 0
movl 12(%esp), %ebx
/* Get the lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if MUTEX == 0
- xaddl %eax, (%ebx)
+ cmpxchgl %edx, (%ebx)
#else
- xaddl %eax, MUTEX(%ebx)
+ cmpxchgl %edx, MUTEX(%ebx)
#endif
testl %eax, %eax
jne 1f
subl $READERS_WAKEUP, %ebx
/* Reget the lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if MUTEX == 0
- xaddl %eax, (%ebx)
+ cmpxchgl %edx, (%ebx)
#else
- xaddl %eax, MUTEX(%ebx)
+ cmpxchgl %edx, MUTEX(%ebx)
#endif
testl %eax, %eax
jne 12f
movl 32(%esp), %edi
/* Get the lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if MUTEX == 0
- xaddl %eax, (%ebp)
+ cmpxchgl %edx, (%ebp)
#else
- xaddl %eax, MUTEX(%ebp)
+ cmpxchgl %edx, MUTEX(%ebp)
#endif
testl %eax, %eax
jne 1f
leal READERS_WAKEUP(%ebp), %ebx
movl $SYS_futex, %eax
ENTER_KERNEL
- movl %eax, %edx
+ movl %eax, %ecx
17:
/* Reget the lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if MUTEX == 0
- xaddl %eax, (%ebp)
+ cmpxchgl %edx, (%ebp)
#else
- xaddl %eax, MUTEX(%ebp)
+ cmpxchgl %edx, MUTEX(%ebp)
#endif
testl %eax, %eax
jne 12f
13: subl $1, READERS_QUEUED(%ebp)
- cmpl $-ETIMEDOUT, %edx
+ cmpl $-ETIMEDOUT, %ecx
jne 2b
18: movl $ETIMEDOUT, %ecx
call __lll_mutex_lock_wait
jmp 13b
-16: movl $-ETIMEDOUT, %edx
+16: movl $-ETIMEDOUT, %ecx
jmp 17b
19: movl $EINVAL, %ecx
movl 32(%esp), %edi
/* Get the lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if MUTEX == 0
- xaddl %eax, (%ebp)
+ cmpxchgl %edx, (%ebp)
#else
- xaddl %eax, MUTEX(%ebp)
+ cmpxchgl %edx, MUTEX(%ebp)
#endif
testl %eax, %eax
jne 1f
leal WRITERS_WAKEUP(%ebp), %ebx
movl $SYS_futex, %eax
ENTER_KERNEL
- movl %eax, %edx
+ movl %eax, %ecx
17:
/* Reget the lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if MUTEX == 0
- xaddl %eax, (%ebp)
+ cmpxchgl %edx, (%ebp)
#else
- xaddl %eax, MUTEX(%ebp)
+ cmpxchgl %edx, MUTEX(%ebp)
#endif
testl %eax, %eax
jne 12f
13: subl $1, WRITERS_QUEUED(%ebp)
- cmpl $-ETIMEDOUT, %edx
+ cmpl $-ETIMEDOUT, %ecx
jne 2b
18: movl $ETIMEDOUT, %ecx
call __lll_mutex_lock_wait
jmp 13b
-16: movl $-ETIMEDOUT, %edx
+16: movl $-ETIMEDOUT, %ecx
jmp 17b
19: movl $EINVAL, %ecx
movl 12(%esp), %edi
/* Get the lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if MUTEX == 0
- xaddl %eax, (%edi)
+ cmpxchgl %edx, (%edi)
#else
- xaddl %eax, MUTEX(%edi)
+ cmpxchgl %edx, MUTEX(%edi)
#endif
testl %eax, %eax
jne 1f
movl 12(%esp), %ebx
/* Get the lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if MUTEX == 0
- xaddl %eax, (%ebx)
+ cmpxchgl %edx, (%ebx)
#else
- xaddl %eax, MUTEX(%ebx)
+ cmpxchgl %edx, MUTEX(%ebx)
#endif
testl %eax, %eax
jne 1f
subl $WRITERS_WAKEUP, %ebx
/* Reget the lock. */
- movl $1, %eax
+ movl $1, %edx
+ xorl %eax, %eax
LOCK
#if MUTEX == 0
- xaddl %eax, (%ebx)
+ cmpxchgl %edx, (%ebx)
#else
- xaddl %eax, MUTEX(%ebx)
+ cmpxchgl %edx, MUTEX(%ebx)
#endif
testl %eax, %eax
jne 12f