{
bool first;
bool retiring = false;
- bool locked = false;
int num_entries = 0;
unsigned int qc_idx = 0;
unsigned long iflags;
if (qc_idx >= sdebug_max_queue)
return 0;
+ spin_lock_irqsave(&sqp->qc_lock, iflags);
+
for (first = true; first || qc_idx + 1 < sdebug_max_queue; ) {
- if (!locked) {
- spin_lock_irqsave(&sqp->qc_lock, iflags);
- locked = true;
- }
if (first) {
first = false;
if (!test_bit(qc_idx, sqp->in_use_bm))
}
WRITE_ONCE(sd_dp->defer_t, SDEB_DEFER_NONE);
spin_unlock_irqrestore(&sqp->qc_lock, iflags);
- locked = false;
scsi_done(scp); /* callback to mid level */
num_entries++;
+ spin_lock_irqsave(&sqp->qc_lock, iflags);
if (find_first_bit(sqp->in_use_bm, sdebug_max_queue) >= sdebug_max_queue)
- break; /* if no more then exit without retaking spinlock */
+ break;
}
- if (locked)
- spin_unlock_irqrestore(&sqp->qc_lock, iflags);
+
+ spin_unlock_irqrestore(&sqp->qc_lock, iflags);
+
if (num_entries > 0)
atomic_add(num_entries, &sdeb_mq_poll_count);
return num_entries;