Lines Matching refs:sqp

4928 	struct sdebug_queue *sqp;  in sdebug_q_cmd_complete()  local
4936 sqp = sdebug_q_arr + sd_dp->sqa_idx; in sdebug_q_cmd_complete()
4946 spin_lock_irqsave(&sqp->qc_lock, iflags); in sdebug_q_cmd_complete()
4948 sqcp = &sqp->qc_arr[qc_idx]; in sdebug_q_cmd_complete()
4951 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in sdebug_q_cmd_complete()
4965 if (unlikely(!test_and_clear_bit(qc_idx, sqp->in_use_bm))) { in sdebug_q_cmd_complete()
4966 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in sdebug_q_cmd_complete()
4976 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in sdebug_q_cmd_complete()
4980 k = find_last_bit(sqp->in_use_bm, retval); in sdebug_q_cmd_complete()
4986 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in sdebug_q_cmd_complete()
5266 struct sdebug_queue *sqp; in stop_queued_cmnd() local
5271 for (j = 0, sqp = sdebug_q_arr; j < submit_queues; ++j, ++sqp) { in stop_queued_cmnd()
5272 spin_lock_irqsave(&sqp->qc_lock, iflags); in stop_queued_cmnd()
5278 if (test_bit(k, sqp->in_use_bm)) { in stop_queued_cmnd()
5279 sqcp = &sqp->qc_arr[k]; in stop_queued_cmnd()
5294 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in stop_queued_cmnd()
5296 clear_bit(k, sqp->in_use_bm); in stop_queued_cmnd()
5300 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in stop_queued_cmnd()
5311 struct sdebug_queue *sqp; in stop_all_queued() local
5316 for (j = 0, sqp = sdebug_q_arr; j < submit_queues; ++j, ++sqp) { in stop_all_queued()
5317 spin_lock_irqsave(&sqp->qc_lock, iflags); in stop_all_queued()
5319 if (test_bit(k, sqp->in_use_bm)) { in stop_all_queued()
5320 sqcp = &sqp->qc_arr[k]; in stop_all_queued()
5334 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in stop_all_queued()
5336 clear_bit(k, sqp->in_use_bm); in stop_all_queued()
5337 spin_lock_irqsave(&sqp->qc_lock, iflags); in stop_all_queued()
5340 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in stop_all_queued()
5348 struct sdebug_queue *sqp; in free_all_queued() local
5351 for (j = 0, sqp = sdebug_q_arr; j < submit_queues; ++j, ++sqp) { in free_all_queued()
5353 sqcp = &sqp->qc_arr[k]; in free_all_queued()
5541 struct sdebug_queue *sqp; in block_unblock_all_queues() local
5543 for (j = 0, sqp = sdebug_q_arr; j < submit_queues; ++j, ++sqp) in block_unblock_all_queues()
5544 atomic_set(&sqp->blocked, (int)block); in block_unblock_all_queues()
5597 struct sdebug_queue *sqp; in schedule_resp() local
5612 sqp = get_queue(cmnd); in schedule_resp()
5613 spin_lock_irqsave(&sqp->qc_lock, iflags); in schedule_resp()
5614 if (unlikely(atomic_read(&sqp->blocked))) { in schedule_resp()
5615 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in schedule_resp()
5622 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in schedule_resp()
5638 k = find_first_zero_bit(sqp->in_use_bm, sdebug_max_queue); in schedule_resp()
5640 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in schedule_resp()
5649 set_bit(k, sqp->in_use_bm); in schedule_resp()
5651 sqcp = &sqp->qc_arr[k]; in schedule_resp()
5655 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in schedule_resp()
5661 clear_bit(k, sqp->in_use_bm); in schedule_resp()
5718 spin_lock_irqsave(&sqp->qc_lock, iflags); in schedule_resp()
5721 clear_bit(k, sqp->in_use_bm); in schedule_resp()
5722 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in schedule_resp()
5735 spin_lock_irqsave(&sqp->qc_lock, iflags); in schedule_resp()
5739 sd_dp->sqa_idx = sqp - sdebug_q_arr; in schedule_resp()
5743 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in schedule_resp()
5751 sd_dp->sqa_idx = sqp - sdebug_q_arr; in schedule_resp()
5766 spin_lock_irqsave(&sqp->qc_lock, iflags); in schedule_resp()
5770 sd_dp->sqa_idx = sqp - sdebug_q_arr; in schedule_resp()
5774 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in schedule_resp()
5779 sd_dp->sqa_idx = sqp - sdebug_q_arr; in schedule_resp()
6005 struct sdebug_queue *sqp; in scsi_debug_show_info() local
6034 for (j = 0, sqp = sdebug_q_arr; j < submit_queues; ++j, ++sqp) { in scsi_debug_show_info()
6036 f = find_first_bit(sqp->in_use_bm, sdebug_max_queue); in scsi_debug_show_info()
6038 l = find_last_bit(sqp->in_use_bm, sdebug_max_queue); in scsi_debug_show_info()
6090 struct sdebug_queue *sqp; in delay_store() local
6093 for (j = 0, sqp = sdebug_q_arr; j < submit_queues; in delay_store()
6094 ++j, ++sqp) { in delay_store()
6095 k = find_first_bit(sqp->in_use_bm, in delay_store()
6130 struct sdebug_queue *sqp; in ndelay_store() local
6133 for (j = 0, sqp = sdebug_q_arr; j < submit_queues; in ndelay_store()
6134 ++j, ++sqp) { in ndelay_store()
6135 k = find_first_bit(sqp->in_use_bm, in ndelay_store()
6456 struct sdebug_queue *sqp; in max_queue_store() local
6463 for (j = 0, sqp = sdebug_q_arr; j < submit_queues; in max_queue_store()
6464 ++j, ++sqp) { in max_queue_store()
6465 a = find_last_bit(sqp->in_use_bm, SDEBUG_CANQUEUE); in max_queue_store()
7526 struct sdebug_queue *sqp; in sdebug_blk_mq_poll() local
7532 sqp = sdebug_q_arr + queue_num; in sdebug_blk_mq_poll()
7534 spin_lock_irqsave(&sqp->qc_lock, iflags); in sdebug_blk_mq_poll()
7536 qc_idx = find_first_bit(sqp->in_use_bm, sdebug_max_queue); in sdebug_blk_mq_poll()
7543 if (!test_bit(qc_idx, sqp->in_use_bm)) in sdebug_blk_mq_poll()
7546 qc_idx = find_next_bit(sqp->in_use_bm, sdebug_max_queue, qc_idx + 1); in sdebug_blk_mq_poll()
7551 sqcp = &sqp->qc_arr[qc_idx]; in sdebug_blk_mq_poll()
7576 if (unlikely(!test_and_clear_bit(qc_idx, sqp->in_use_bm))) { in sdebug_blk_mq_poll()
7578 sqp, queue_num, qc_idx, __func__); in sdebug_blk_mq_poll()
7589 k = find_last_bit(sqp->in_use_bm, retval); in sdebug_blk_mq_poll()
7596 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in sdebug_blk_mq_poll()
7599 spin_lock_irqsave(&sqp->qc_lock, iflags); in sdebug_blk_mq_poll()
7600 if (find_first_bit(sqp->in_use_bm, sdebug_max_queue) >= sdebug_max_queue) in sdebug_blk_mq_poll()
7605 spin_unlock_irqrestore(&sqp->qc_lock, iflags); in sdebug_blk_mq_poll()