Home
last modified time | relevance | path

Searched refs:qc (Results 1 – 25 of 97) sorted by relevance

1234

/drivers/ata/
A Dlibata-sff.c607 qc->curbytes = qc->nbytes; in ata_pio_sector()
610 if (qc->curbytes == qc->nbytes - qc->sect_size) in ata_pio_sector()
621 count = min(qc->cursg->length - qc->cursg_ofs, qc->sect_size); in ata_pio_sector()
643 qc->cursg = sg_next(qc->cursg); in ata_pio_sector()
668 nsect = min((qc->nbytes - qc->curbytes) / qc->sect_size, in ata_pio_sectors()
695 ap->ops->sff_data_xfer(qc, qc->cdb, qc->dev->cdb_len, 1); in atapi_send_cdb()
746 qc->nbytes, qc->curbytes, bytes); in __atapi_pio_bytes()
775 qc->cursg = sg_next(qc->cursg); in __atapi_pio_bytes()
889 qc = ata_qc_from_tag(ap, qc->tag); in ata_hsm_qc_complete()
1302 ata_tf_to_host(ap, &qc->tf, qc->tag); in ata_sff_qc_issue()
[all …]
A Dpdc_adma.c266 for_each_sg(qc->sg, sg, qc->n_elem, si) { in adma_fill_sg()
335 buf[i++] = qc->tf.lbal; in adma_qc_prep()
337 buf[i++] = qc->tf.lbam; in adma_qc_prep()
339 buf[i++] = qc->tf.lbah; in adma_qc_prep()
351 i = adma_fill_sg(qc); in adma_qc_prep()
372 adma_packet_start(qc); in adma_qc_issue()
406 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in adma_intr_pkt()
417 if (!qc->err_mask) in adma_intr_pkt()
418 ata_qc_complete(qc); in adma_intr_pkt()
449 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in adma_intr_mmio()
[all …]
A Dlibata-scsi.c229 information = ata_tf_read_block(&qc->result_tf, qc->dev); in ata_scsi_set_sense_information()
765 qc->tag = qc->hw_tag = tag; in ata_scsi_qc_new()
766 qc->ap = ap; in ata_scsi_qc_new()
767 qc->dev = dev; in ata_scsi_qc_new()
780 return qc; in ata_scsi_qc_new()
794 qc->nbytes = scsi_bufflen(scmd) + qc->extrabytes; in ata_qc_set_pc_nbytes()
1269 if (!ata_dev_power_init_tf(qc->dev, &qc->tf, cdb[4] & 0x1)) { in ata_scsi_start_stop_xlat()
1649 ata_qc_free(qc); in ata_qc_done()
1683 ata_qc_done(qc); in ata_scsi_qc_complete()
1720 if (!qc) in ata_scsi_translate()
[all …]
A Dsata_dwc_460ex.c347 desc = dmaengine_prep_slave_sg(hsdevp->chan, qc->sg, qc->n_elem, in dma_dwc_xfer_setup()
358 qc->sg, qc->n_elem, &hsdev->dmadr); in dma_dwc_xfer_setup()
447 if (qc) in sata_dwc_error_intr()
534 if (unlikely(!qc || (qc->tf.flags & ATA_TFLAG_POLLING))) { in sata_dwc_isr()
537 __func__, qc); in sata_dwc_isr()
719 if (!qc) { in sata_dwc_dma_xfer_complete()
744 u8 tag = qc->hw_tag; in sata_dwc_qc_complete()
755 qc->tf.command, status, ap->print_id, qc->tf.protocol); in sata_dwc_qc_complete()
761 ata_qc_complete(qc); in sata_dwc_qc_complete()
942 sata_dwc_exec_command_by_tag(qc->ap, &qc->tf, tag, in sata_dwc_bmdma_setup_by_tag()
[all …]
A Dsata_sx4.c434 for_each_sg(qc->sg, sg, qc->n_elem, si) { in pdc20621_dma_prep()
449 i = pdc20621_ata_pkt(&qc->tf, qc->dev->devno, &pp->dimm_buf[0], portno); in pdc20621_dma_prep()
486 i = pdc20621_ata_pkt(&qc->tf, qc->dev->devno, &pp->dimm_buf[0], portno); in pdc20621_nodata_prep()
511 pdc20621_dma_prep(qc); in pdc20621_qc_prep()
555 pp->hdma[idx].qc = qc; in pdc20621_push_hdma()
613 pdc20621_dump_hdma(qc); in pdc20621_packet_start()
675 ata_qc_complete(qc); in pdc20621_host_intr()
676 pdc20621_pop_hdma(qc); in pdc20621_host_intr()
714 ata_qc_complete(qc); in pdc20621_host_intr()
725 ata_qc_complete(qc); in pdc20621_host_intr()
[all …]
A Dlibata-eh.c852 if (qc) in ata_eh_nr_in_flight()
881 if (qc) in ata_eh_fastdrain_timerfn()
1027 if (qc && (!link || qc->dev->link == link)) { in ata_do_link_abort()
1988 if (!qc) in ata_eh_get_non_ncq_success_sense()
2595 if (qc->flags & ATA_QCFLAG_SENSE_VALID && !qc->err_mask) in ata_eh_link_report()
2652 struct ata_taskfile *cmd = &qc->tf, *res = &qc->result_tf; in ata_eh_link_report()
2657 ata_dev_phys_link(qc->dev) != link || !qc->err_mask) in ata_eh_link_report()
2698 prot_str, qc->nbytes, dma_str[qc->dma_dir]); in ata_eh_link_report()
2729 res->device, qc->err_mask, ata_err_string(qc->err_mask), in ata_eh_link_report()
3578 qc->scsicmd->allowed = max(qc->scsicmd->allowed, 1); in ata_eh_maybe_retry_flush()
[all …]
A Dsata_qstor.c244 for_each_sg(qc->sg, sg, qc->n_elem, si) { in qs_fill_sg()
268 qs_enter_reg_mode(qc->ap); in qs_qc_prep()
272 nelem = qs_fill_sg(qc); in qs_qc_prep()
312 switch (qc->tf.protocol) { in qs_qc_issue()
315 qs_packet_start(qc); in qs_qc_issue()
334 if (!qc->err_mask) { in qs_do_or_die()
335 ata_qc_complete(qc); in qs_do_or_die()
376 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in qs_intr_pkt()
381 qs_do_or_die(qc, sDST); in qs_intr_pkt()
399 struct ata_queued_cmd *qc; in qs_intr_mmio() local
[all …]
A Dacard-ahci.c191 for_each_sg(qc->sg, sg, qc->n_elem, si) { in acard_ahci_fill_sg()
213 struct ata_port *ap = qc->ap; in acard_ahci_qc_prep()
215 int is_atapi = ata_is_atapi(qc->tf.protocol); in acard_ahci_qc_prep()
226 ata_tf_to_fis(&qc->tf, qc->dev->link->pmp, 1, cmd_tbl); in acard_ahci_qc_prep()
229 memcpy(cmd_tbl + AHCI_CMD_TBL_CDB, qc->cdb, qc->dev->cdb_len); in acard_ahci_qc_prep()
232 if (qc->flags & ATA_QCFLAG_DMAMAP) in acard_ahci_qc_prep()
233 acard_ahci_fill_sg(qc, cmd_tbl); in acard_ahci_qc_prep()
241 if (qc->tf.flags & ATA_TFLAG_WRITE) in acard_ahci_qc_prep()
246 ahci_fill_cmd_slot(pp, qc->hw_tag, opts); in acard_ahci_qc_prep()
265 if (qc->tf.protocol == ATA_PROT_PIO && qc->dma_dir == DMA_FROM_DEVICE && in acard_ahci_qc_fill_rtf()
[all …]
A Dsata_inic162x.c397 if (unlikely(!qc)) in inic_host_intr()
410 ata_qc_complete(qc); in inic_host_intr()
416 qc ? qc->tf.command : 0xff, irq_stat, idma_stat); in inic_host_intr()
470 for_each_sg(qc->sg, sg, qc->n_elem, si) { in inic_fill_sg()
492 cdb_len = qc->dev->cdb_len; in inic_qc_prep()
506 cpb->nsect = qc->tf.nsect; in inic_qc_prep()
507 cpb->lbal = qc->tf.lbal; in inic_qc_prep()
508 cpb->lbam = qc->tf.lbam; in inic_qc_prep()
509 cpb->lbah = qc->tf.lbah; in inic_qc_prep()
536 inic_fill_sg(prd, qc); in inic_qc_prep()
[all …]
A Dsata_promise.c484 u8 *cdb = qc->cdb; in pdc_atapi_pkt()
524 buf[19] = qc->tf.lbal; in pdc_atapi_pkt()
535 buf[23] = qc->tf.lbam; in pdc_atapi_pkt()
580 for_each_sg(qc->sg, sg, qc->n_elem, si) { in pdc_fill_sg()
637 pdc_fill_sg(qc); in pdc_qc_prep()
640 i = pdc_pkt_header(&qc->tf, qc->ap->bmdma_prd_dma, in pdc_qc_prep()
649 pdc_fill_sg(qc); in pdc_qc_prep()
652 pdc_fill_sg(qc); in pdc_qc_prep()
655 pdc_atapi_pkt(qc); in pdc_qc_prep()
893 ata_qc_complete(qc); in pdc_host_intr()
[all …]
A Dpata_octeon_cf.c524 qc->cursg = qc->sg; in octeon_cf_dma_setup()
542 sg = qc->cursg; in octeon_cf_dma_start()
599 trace_ata_bmdma_stop(ap, &qc->tf, qc->tag); in octeon_cf_dma_finished()
628 if (unlikely(qc->err_mask) && (qc->tf.protocol == ATA_PROT_DMA)) in octeon_cf_dma_finished()
663 if (!qc || (qc->tf.flags & ATA_TFLAG_POLLING)) in octeon_cf_interrupt()
668 qc->cursg = sg_next(qc->cursg); in octeon_cf_interrupt()
670 trace_ata_bmdma_start(ap, &qc->tf, qc->tag); in octeon_cf_interrupt()
712 struct ata_queued_cmd *qc; in octeon_cf_delayed_finish() local
736 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) in octeon_cf_delayed_finish()
771 trace_ata_bmdma_setup(ap, &qc->tf, qc->tag); in octeon_cf_qc_issue()
[all …]
A Dsata_nv.c870 if (unlikely(!qc || (qc->tf.flags & ATA_TFLAG_POLLING))) { in nv_host_intr()
1320 for_each_sg(qc->sg, sg, qc->n_elem, si) { in nv_adma_fill_sg()
1448 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in nv_generic_interrupt()
1746 qc.ap = ap; in __ata_bmdma_stop()
1747 ata_bmdma_stop(&qc); in __ata_bmdma_stop()
1960 nv_swncq_fill_sg(qc); in nv_swncq_qc_prep()
1976 for_each_sg(qc->sg, sg, qc->n_elem, si) { in nv_swncq_fill_sg()
2006 if (qc == NULL) in nv_swncq_issue_atacmd()
2017 trace_ata_exec_command(ap, &qc->tf, qc->hw_tag); in nv_swncq_issue_atacmd()
2131 WARN_ON(qc == NULL); in nv_swncq_sdbfis()
[all …]
A Dsata_sil24.c775 for_each_sg(qc->sg, sg, qc->n_elem, si) { in sil24_fill_sg()
791 u8 prot = qc->tf.protocol; in sil24_qc_defer()
829 return ata_std_qc_defer(qc); in sil24_qc_defer()
834 struct ata_port *ap = qc->ap; in sil24_qc_prep()
861 memcpy(cb->atapi.cdb, qc->cdb, qc->dev->cdb_len); in sil24_qc_prep()
872 ata_tf_to_fis(&qc->tf, qc->dev->link->pmp, 1, prb->fis); in sil24_qc_prep()
875 sil24_fill_sg(qc, sge); in sil24_qc_prep()
905 sil24_read_tf(qc->ap, qc->hw_tag, &qc->result_tf); in sil24_qc_fill_rtf()
1072 if (qc) in sil24_error_intr()
1073 qc->err_mask |= err_mask; in sil24_error_intr()
[all …]
A Dsata_sil.c253 struct ata_port *ap = qc->ap; in sil_bmdma_stop()
266 struct ata_port *ap = qc->ap; in sil_bmdma_setup()
279 struct ata_port *ap = qc->ap; in sil_bmdma_start()
296 struct ata_port *ap = qc->ap; in sil_fill_sg()
301 for_each_sg(qc->sg, sg, qc->n_elem, si) { in sil_fill_sg()
324 sil_fill_sg(qc); in sil_qc_prep()
447 if (unlikely(!qc || (qc->tf.flags & ATA_TFLAG_POLLING))) { in sil_host_intr()
468 if (ata_is_dma(qc->tf.protocol)) { in sil_host_intr()
470 ap->ops->bmdma_stop(qc); in sil_host_intr()
495 if (unlikely(qc->err_mask) && ata_is_dma(qc->tf.protocol)) in sil_host_intr()
[all …]
A Dpata_arasan_cf.c218 struct ata_queued_cmd *qc; member
366 struct ata_queued_cmd *qc = acdev->qc; in dma_complete() local
369 acdev->qc = NULL; in dma_complete()
373 if (unlikely(qc->err_mask) && ata_is_dma(qc->tf.protocol)) in dma_complete()
522 struct ata_queued_cmd *qc = acdev->qc; in data_xfer() local
538 for_each_sg(qc->sg, sg, qc->n_elem, temp) { in data_xfer()
580 struct ata_queued_cmd *qc = acdev->qc; in delayed_finish() local
636 struct ata_queued_cmd *qc = acdev->qc; in arasan_cf_interrupt() local
676 struct ata_queued_cmd *qc = acdev->qc; in arasan_cf_dma_start() local
711 acdev->qc = qc; in arasan_cf_qc_issue()
[all …]
A Dpata_pxa.c49 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_qc_prep()
53 if (!(qc->flags & ATA_QCFLAG_DMAMAP)) in pxa_qc_prep()
57 tx = dmaengine_prep_slave_sg(pd->dma_chan, qc->sg, qc->n_elem, dir, in pxa_qc_prep()
60 ata_dev_err(qc->dev, "prep_slave_sg() failed\n"); in pxa_qc_prep()
74 static void pxa_bmdma_setup(struct ata_queued_cmd *qc) in pxa_bmdma_setup() argument
76 qc->ap->ops->sff_exec_command(qc->ap, &qc->tf); in pxa_bmdma_setup()
82 static void pxa_bmdma_start(struct ata_queued_cmd *qc) in pxa_bmdma_start() argument
84 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_bmdma_start()
92 static void pxa_bmdma_stop(struct ata_queued_cmd *qc) in pxa_bmdma_stop() argument
94 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_bmdma_stop()
[all …]
A Dlibata-sata.c784 struct ata_queued_cmd *qc; in ata_qc_complete_multiple() local
787 qc = ata_qc_from_tag(ap, tag); in ata_qc_complete_multiple()
788 if (qc) { in ata_qc_complete_multiple()
789 ata_qc_complete(qc); in ata_qc_complete_multiple()
1543 struct ata_queued_cmd *qc; in ata_eh_get_ncq_success_sense() local
1574 qc->err_mask || in ata_eh_get_ncq_success_sense()
1641 struct ata_queued_cmd *qc; in ata_eh_analyze_ncq_error() local
1658 if (qc->err_mask) in ata_eh_analyze_ncq_error()
1687 if (qc->result_tf.auxiliary) { in ata_eh_analyze_ncq_error()
1707 if (qc->err_mask) in ata_eh_analyze_ncq_error()
[all …]
A Dpata_ns87415.c120 static void ns87415_bmdma_setup(struct ata_queued_cmd *qc) in ns87415_bmdma_setup() argument
122 struct ata_port *ap = qc->ap; in ns87415_bmdma_setup()
123 unsigned int rw = (qc->tf.flags & ATA_TFLAG_WRITE); in ns87415_bmdma_setup()
140 ap->ops->sff_exec_command(ap, &qc->tf); in ns87415_bmdma_setup()
154 static void ns87415_bmdma_start(struct ata_queued_cmd *qc) in ns87415_bmdma_start() argument
156 ns87415_set_mode(qc->ap, qc->dev, qc->dev->dma_mode); in ns87415_bmdma_start()
157 ata_bmdma_start(qc); in ns87415_bmdma_start()
167 static void ns87415_bmdma_stop(struct ata_queued_cmd *qc) in ns87415_bmdma_stop() argument
169 ata_bmdma_stop(qc); in ns87415_bmdma_stop()
170 ns87415_set_mode(qc->ap, qc->dev, qc->dev->pio_mode); in ns87415_bmdma_stop()
[all …]
A Dlibata-core.c1559 qc->ap = ap; in ata_exec_internal()
1573 qc->tf = *tf; in ata_exec_internal()
4702 qc->sg = sg; in ata_sg_init()
4704 qc->cursg = qc->sg; in ata_sg_init()
4751 n_elem = dma_map_sg(ap->dev, qc->sg, qc->n_elem, qc->dma_dir); in ata_sg_setup()
4755 qc->orig_n_elem = qc->n_elem; in ata_sg_setup()
4846 qc->complete_fn(qc); in __ata_qc_complete()
4860 qc->result_tf.flags = qc->tf.flags; in fill_result_tf()
5058 if (ata_is_data(prot) && (!qc->sg || !qc->n_elem || !qc->nbytes)) in ata_qc_issue()
5076 qc->err_mask |= ap->ops->qc_prep(qc); in ata_qc_issue()
[all …]
A Dpata_triflex.c142 static void triflex_bmdma_start(struct ata_queued_cmd *qc) in triflex_bmdma_start() argument
144 triflex_load_timing(qc->ap, qc->dev, qc->dev->dma_mode); in triflex_bmdma_start()
145 ata_bmdma_start(qc); in triflex_bmdma_start()
157 static void triflex_bmdma_stop(struct ata_queued_cmd *qc) in triflex_bmdma_stop() argument
159 ata_bmdma_stop(qc); in triflex_bmdma_stop()
160 triflex_load_timing(qc->ap, qc->dev, qc->dev->pio_mode); in triflex_bmdma_stop()
A Dpata_sl82c105.c163 struct ata_port *ap = qc->ap; in sl82c105_bmdma_start()
170 sl82c105_configure_dmamode(ap, qc->dev); in sl82c105_bmdma_start()
172 ata_bmdma_start(qc); in sl82c105_bmdma_start()
189 static void sl82c105_bmdma_stop(struct ata_queued_cmd *qc) in sl82c105_bmdma_stop() argument
191 struct ata_port *ap = qc->ap; in sl82c105_bmdma_stop()
193 ata_bmdma_stop(qc); in sl82c105_bmdma_stop()
199 sl82c105_set_piomode(ap, qc->dev); in sl82c105_bmdma_stop()
212 static int sl82c105_qc_defer(struct ata_queued_cmd *qc) in sl82c105_qc_defer() argument
214 struct ata_host *host = qc->ap->host; in sl82c105_qc_defer()
215 struct ata_port *alt = host->ports[1 ^ qc->ap->port_no]; in sl82c105_qc_defer()
[all …]
A Dsata_fsl.c455 for_each_sg(qc->sg, sg, qc->n_elem, si) { in sata_fsl_fill_sg()
514 ata_tf_to_fis(&qc->tf, qc->dev->link->pmp, 1, (u8 *) &cd->cfis); in sata_fsl_qc_prep()
520 memcpy(&cd->cdb, qc->cdb, qc->dev->cdb_len); in sata_fsl_qc_prep()
574 unsigned int tag = sata_fsl_tag(qc->ap, qc->hw_tag, hcr_base); in sata_fsl_qc_fill_rtf()
1047 if (qc->err_mask) { in sata_fsl_post_internal_cmd()
1161 if (qc) in sata_fsl_error_intr()
1162 qc->err_mask |= err_mask; in sata_fsl_error_intr()
1172 if (qc) in sata_fsl_error_intr()
1199 if (qc && ata_is_atapi(qc->tf.protocol)) { in sata_fsl_host_intr()
1266 if (qc) { in sata_fsl_host_intr()
[all …]
A Dpata_pdc202xx_old.c177 struct ata_port *ap = qc->ap; in pdc2026x_bmdma_start()
178 struct ata_device *adev = qc->dev; in pdc2026x_bmdma_start()
179 struct ata_taskfile *tf = &qc->tf; in pdc2026x_bmdma_start()
196 pdc202xx_set_dmamode(ap, qc->dev); in pdc2026x_bmdma_start()
200 len = qc->nbytes / 2; in pdc2026x_bmdma_start()
211 ata_bmdma_start(qc); in pdc2026x_bmdma_start()
225 static void pdc2026x_bmdma_stop(struct ata_queued_cmd *qc) in pdc2026x_bmdma_stop() argument
227 struct ata_port *ap = qc->ap; in pdc2026x_bmdma_stop()
228 struct ata_device *adev = qc->dev; in pdc2026x_bmdma_stop()
229 struct ata_taskfile *tf = &qc->tf; in pdc2026x_bmdma_stop()
[all …]
A Dlibahci.c1665 for_each_sg(qc->sg, sg, qc->n_elem, si) { in ahci_fill_sg()
1679 struct ata_port *ap = qc->ap; in ahci_pmp_qc_defer()
1704 ata_tf_to_fis(&qc->tf, qc->dev->link->pmp, 1, cmd_tbl); in ahci_qc_prep()
1707 memcpy(cmd_tbl + AHCI_CMD_TBL_CDB, qc->cdb, qc->dev->cdb_len); in ahci_qc_prep()
2093 if (qc->tf.protocol == ATA_PROT_PIO && qc->dma_dir == DMA_FROM_DEVICE && in ahci_qc_fill_rtf()
2150 struct ata_queued_cmd *qc; in ahci_qc_ncq_fill_rtf() local
2154 if (qc && ata_is_ncq(qc->tf.protocol)) { in ahci_qc_ncq_fill_rtf()
2157 qc->result_tf.flags = qc->tf.flags; in ahci_qc_ncq_fill_rtf()
2172 struct ata_queued_cmd *qc; in ahci_qc_ncq_fill_rtf() local
2176 if (qc && ata_is_ncq(qc->tf.protocol)) { in ahci_qc_ncq_fill_rtf()
[all …]
/drivers/scsi/libsas/
A Dsas_ata.c88 else if (qc && qc->scsicmd) in sas_ata_task_done()
96 if (!qc) in sas_ata_task_done()
99 ap = qc->ap; in sas_ata_task_done()
106 if (qc->scsicmd) in sas_ata_task_done()
148 ata_qc_complete(qc); in sas_ata_task_done()
184 qc->tf.nsect = 0; in sas_ata_qc_issue()
189 memcpy(task->ata_task.atapi_packet, qc->cdb, qc->dev->cdb_len); in sas_ata_qc_issue()
196 for_each_sg(qc->sg, sg, qc->n_elem, si) in sas_ata_qc_issue()
212 if (qc->scsicmd) in sas_ata_qc_issue()
219 if (qc->scsicmd) in sas_ata_qc_issue()
[all …]

Completed in 892 milliseconds

1234