pushl %ebx
movl 12(%esp), %ebx
-#if cond_lock != 0
- addl $cond_lock, %ebx
-#endif
/* Get internal lock. */
movl $1, %eax
testl %eax, %eax
jne 1f
-2: movl total_seq+4(%ebx), %eax
- movl total_seq(%ebx), %ecx
- cmpl wakeup_seq+4(%ebx), %eax
+2: addl $wakeup_seq, %ebx
+ movl total_seq+4-wakeup_seq(%ebx), %eax
+ movl total_seq-wakeup_seq(%ebx), %ecx
+ cmpl 4(%ebx), %eax
ja 3f
jb 4f
- cmpl wakeup_seq(%ebx), %ecx
+ cmpl (%ebx), %ecx
jna 4f
- /* Case all currently waiting threads to wake up. */
-3: movl %ecx, wakeup_seq(%ebx)
- movl %eax, wakeup_seq+4(%ebx)
+ /* Cause all currently waiting threads to recognize they are
+ woken up. */
+3: movl %ecx, (%ebx)
+ movl %eax, 4(%ebx)
+
+ /* Unlock. */
+ LOCK
+ decl cond_lock-wakeup_seq(%ebx)
+ jne 7f
/* Wake up all threads. */
- addl $wakeup_seq-cond_lock, %ebx
- movl $FUTEX_WAKE, %ecx
+8: movl $FUTEX_WAKE, %ecx
xorl %esi, %esi
movl $SYS_futex, %eax
movl $0x7fffffff, %edx
ENTER_KERNEL
- subl $wakeup_seq-cond_lock, %ebx
+ xorl %eax, %eax
+ popl %ebx
+ popl %esi
+ ret
+ .align 16
/* Unlock. */
4: LOCK
-#if cond_lock == 0
- decl (%ebx)
-#else
- decl cond_lock(%ebx)
-#endif
+ decl cond_lock-wakeup_seq(%ebx)
jne 5f
6: xorl %eax, %eax
jmp 2b
/* Unlock in loop requires waekup. */
-5:
-#if cond_lock == 0
- movl %ebx, %eax
-#else
- leal cond_lock(%ebx), %eax
-#endif
+5: leal cond_lock-wakeup_seq(%ebx), %eax
call __lll_mutex_unlock_wake
jmp 6b
+
+ /* Unlock in loop requires waekup. */
+7: leal cond_lock-wakeup_seq(%ebx), %eax
+ call __lll_mutex_unlock_wake
+ jmp 8b
.size __pthread_cond_broadcast, .-__pthread_cond_broadcast
versioned_symbol (libpthread, __pthread_cond_broadcast, pthread_cond_broadcast,
GLIBC_2_3_2)