/linux-6.3-rc2/drivers/ata/ |
A D | libata-sff.c | 608 qc->curbytes = qc->nbytes; in ata_pio_sector() 611 if (qc->curbytes == qc->nbytes - qc->sect_size) in ata_pio_sector() 638 qc->curbytes += qc->sect_size; in ata_pio_sector() 639 qc->cursg_ofs += qc->sect_size; in ata_pio_sector() 642 qc->cursg = sg_next(qc->cursg); in ata_pio_sector() 667 nsect = min((qc->nbytes - qc->curbytes) / qc->sect_size, in ata_pio_sectors() 694 ap->ops->sff_data_xfer(qc, qc->cdb, qc->dev->cdb_len, 1); in atapi_send_cdb() 745 qc->nbytes, qc->curbytes, bytes); in __atapi_pio_bytes() 774 qc->cursg = sg_next(qc->cursg); in __atapi_pio_bytes() 891 qc = ata_qc_from_tag(ap, qc->tag); in ata_hsm_qc_complete() [all …]
|
A D | pdc_adma.c | 266 for_each_sg(qc->sg, sg, qc->n_elem, si) { in adma_fill_sg() 335 buf[i++] = qc->tf.lbal; in adma_qc_prep() 337 buf[i++] = qc->tf.lbam; in adma_qc_prep() 339 buf[i++] = qc->tf.lbah; in adma_qc_prep() 351 i = adma_fill_sg(qc); in adma_qc_prep() 372 adma_packet_start(qc); in adma_qc_issue() 406 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in adma_intr_pkt() 417 if (!qc->err_mask) in adma_intr_pkt() 418 ata_qc_complete(qc); in adma_intr_pkt() 449 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in adma_intr_mmio() [all …]
|
A D | libata-scsi.c | 682 qc->tag = qc->hw_tag = tag; in ata_scsi_qc_new() 683 qc->ap = ap; in ata_scsi_qc_new() 684 qc->dev = dev; in ata_scsi_qc_new() 697 return qc; in ata_scsi_qc_new() 711 qc->nbytes = scsi_bufflen(scmd) + qc->extrabytes; in ata_qc_set_pc_nbytes() 1653 ata_qc_free(qc); in ata_qc_done() 1723 if (!qc) in ata_scsi_translate() 2462 if (qc->err_mask && ((qc->err_mask & AC_ERR_DEV) == 0)) { in atapi_sense_complete() 2500 ata_sg_init(qc, &qc->sgent, 1); in atapi_request_sense() 2503 memset(&qc->cdb, 0, qc->dev->cdb_len); in atapi_request_sense() [all …]
|
A D | sata_dwc_460ex.c | 348 desc = dmaengine_prep_slave_sg(hsdevp->chan, qc->sg, qc->n_elem, in dma_dwc_xfer_setup() 359 qc->sg, qc->n_elem, &hsdev->dmadr); in dma_dwc_xfer_setup() 448 if (qc) in sata_dwc_error_intr() 535 if (unlikely(!qc || (qc->tf.flags & ATA_TFLAG_POLLING))) { in sata_dwc_isr() 538 __func__, qc); in sata_dwc_isr() 720 if (!qc) { in sata_dwc_dma_xfer_complete() 745 u8 tag = qc->hw_tag; in sata_dwc_qc_complete() 756 qc->tf.command, status, ap->print_id, qc->tf.protocol); in sata_dwc_qc_complete() 762 ata_qc_complete(qc); in sata_dwc_qc_complete() 943 sata_dwc_exec_command_by_tag(qc->ap, &qc->tf, tag, in sata_dwc_bmdma_setup_by_tag() [all …]
|
A D | sata_qstor.c | 244 for_each_sg(qc->sg, sg, qc->n_elem, si) { in qs_fill_sg() 268 qs_enter_reg_mode(qc->ap); in qs_qc_prep() 272 nelem = qs_fill_sg(qc); in qs_qc_prep() 312 switch (qc->tf.protocol) { in qs_qc_issue() 315 qs_packet_start(qc); in qs_qc_issue() 334 if (!qc->err_mask) { in qs_do_or_die() 335 ata_qc_complete(qc); in qs_do_or_die() 376 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in qs_intr_pkt() 381 qs_do_or_die(qc, sDST); in qs_intr_pkt() 399 struct ata_queued_cmd *qc; in qs_intr_mmio() local [all …]
|
A D | sata_sx4.c | 435 for_each_sg(qc->sg, sg, qc->n_elem, si) { in pdc20621_dma_prep() 450 i = pdc20621_ata_pkt(&qc->tf, qc->dev->devno, &pp->dimm_buf[0], portno); in pdc20621_dma_prep() 487 i = pdc20621_ata_pkt(&qc->tf, qc->dev->devno, &pp->dimm_buf[0], portno); in pdc20621_nodata_prep() 512 pdc20621_dma_prep(qc); in pdc20621_qc_prep() 556 pp->hdma[idx].qc = qc; in pdc20621_push_hdma() 614 pdc20621_dump_hdma(qc); in pdc20621_packet_start() 676 ata_qc_complete(qc); in pdc20621_host_intr() 677 pdc20621_pop_hdma(qc); in pdc20621_host_intr() 715 ata_qc_complete(qc); in pdc20621_host_intr() 726 ata_qc_complete(qc); in pdc20621_host_intr() [all …]
|
A D | acard-ahci.c | 191 for_each_sg(qc->sg, sg, qc->n_elem, si) { in acard_ahci_fill_sg() 213 struct ata_port *ap = qc->ap; in acard_ahci_qc_prep() 215 int is_atapi = ata_is_atapi(qc->tf.protocol); in acard_ahci_qc_prep() 226 ata_tf_to_fis(&qc->tf, qc->dev->link->pmp, 1, cmd_tbl); in acard_ahci_qc_prep() 229 memcpy(cmd_tbl + AHCI_CMD_TBL_CDB, qc->cdb, qc->dev->cdb_len); in acard_ahci_qc_prep() 232 if (qc->flags & ATA_QCFLAG_DMAMAP) in acard_ahci_qc_prep() 233 acard_ahci_fill_sg(qc, cmd_tbl); in acard_ahci_qc_prep() 241 if (qc->tf.flags & ATA_TFLAG_WRITE) in acard_ahci_qc_prep() 246 ahci_fill_cmd_slot(pp, qc->hw_tag, opts); in acard_ahci_qc_prep() 265 if (qc->tf.protocol == ATA_PROT_PIO && qc->dma_dir == DMA_FROM_DEVICE && in acard_ahci_qc_fill_rtf() [all …]
|
A D | libata-eh.c | 818 if (qc) in ata_eh_nr_in_flight() 847 if (qc) in ata_eh_fastdrain_timerfn() 999 if (qc && (!link || qc->dev->link == link)) { in ata_do_link_abort() 2243 if (qc->flags & ATA_QCFLAG_SENSE_VALID && !qc->err_mask) in ata_eh_link_report() 2300 struct ata_taskfile *cmd = &qc->tf, *res = &qc->result_tf; in ata_eh_link_report() 2305 ata_dev_phys_link(qc->dev) != link || !qc->err_mask) in ata_eh_link_report() 2346 prot_str, qc->nbytes, dma_str[qc->dma_dir]); in ata_eh_link_report() 2377 res->device, qc->err_mask, ata_err_string(qc->err_mask), in ata_eh_link_report() 3201 if (qc->dev != dev || (qc->tf.command != ATA_CMD_FLUSH_EXT && in ata_eh_maybe_retry_flush() 3229 qc->scsicmd->allowed = max(qc->scsicmd->allowed, 1); in ata_eh_maybe_retry_flush() [all …]
|
A D | sata_inic162x.c | 397 if (unlikely(!qc)) in inic_host_intr() 410 ata_qc_complete(qc); in inic_host_intr() 416 qc ? qc->tf.command : 0xff, irq_stat, idma_stat); in inic_host_intr() 470 for_each_sg(qc->sg, sg, qc->n_elem, si) { in inic_fill_sg() 492 cdb_len = qc->dev->cdb_len; in inic_qc_prep() 506 cpb->nsect = qc->tf.nsect; in inic_qc_prep() 507 cpb->lbal = qc->tf.lbal; in inic_qc_prep() 508 cpb->lbam = qc->tf.lbam; in inic_qc_prep() 509 cpb->lbah = qc->tf.lbah; in inic_qc_prep() 536 inic_fill_sg(prd, qc); in inic_qc_prep() [all …]
|
A D | sata_promise.c | 484 u8 *cdb = qc->cdb; in pdc_atapi_pkt() 524 buf[19] = qc->tf.lbal; in pdc_atapi_pkt() 535 buf[23] = qc->tf.lbam; in pdc_atapi_pkt() 580 for_each_sg(qc->sg, sg, qc->n_elem, si) { in pdc_fill_sg() 637 pdc_fill_sg(qc); in pdc_qc_prep() 640 i = pdc_pkt_header(&qc->tf, qc->ap->bmdma_prd_dma, in pdc_qc_prep() 649 pdc_fill_sg(qc); in pdc_qc_prep() 652 pdc_fill_sg(qc); in pdc_qc_prep() 655 pdc_atapi_pkt(qc); in pdc_qc_prep() 893 ata_qc_complete(qc); in pdc_host_intr() [all …]
|
A D | pata_octeon_cf.c | 523 qc->cursg = qc->sg; in octeon_cf_dma_setup() 541 sg = qc->cursg; in octeon_cf_dma_start() 598 trace_ata_bmdma_stop(ap, &qc->tf, qc->tag); in octeon_cf_dma_finished() 627 if (unlikely(qc->err_mask) && (qc->tf.protocol == ATA_PROT_DMA)) in octeon_cf_dma_finished() 662 if (!qc || (qc->tf.flags & ATA_TFLAG_POLLING)) in octeon_cf_interrupt() 667 qc->cursg = sg_next(qc->cursg); in octeon_cf_interrupt() 669 trace_ata_bmdma_start(ap, &qc->tf, qc->tag); in octeon_cf_interrupt() 711 struct ata_queued_cmd *qc; in octeon_cf_delayed_finish() local 735 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) in octeon_cf_delayed_finish() 770 trace_ata_bmdma_setup(ap, &qc->tf, qc->tag); in octeon_cf_qc_issue() [all …]
|
A D | sata_nv.c | 867 if (unlikely(!qc || (qc->tf.flags & ATA_TFLAG_POLLING))) { in nv_host_intr() 1317 for_each_sg(qc->sg, sg, qc->n_elem, si) { in nv_adma_fill_sg() 1445 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in nv_generic_interrupt() 1743 qc.ap = ap; in __ata_bmdma_stop() 1744 ata_bmdma_stop(&qc); in __ata_bmdma_stop() 1956 nv_swncq_fill_sg(qc); in nv_swncq_qc_prep() 1972 for_each_sg(qc->sg, sg, qc->n_elem, si) { in nv_swncq_fill_sg() 2002 if (qc == NULL) in nv_swncq_issue_atacmd() 2013 trace_ata_exec_command(ap, &qc->tf, qc->hw_tag); in nv_swncq_issue_atacmd() 2127 WARN_ON(qc == NULL); in nv_swncq_sdbfis() [all …]
|
A D | pata_pxa.c | 49 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_qc_prep() 53 if (!(qc->flags & ATA_QCFLAG_DMAMAP)) in pxa_qc_prep() 57 tx = dmaengine_prep_slave_sg(pd->dma_chan, qc->sg, qc->n_elem, dir, in pxa_qc_prep() 60 ata_dev_err(qc->dev, "prep_slave_sg() failed\n"); in pxa_qc_prep() 74 static void pxa_bmdma_setup(struct ata_queued_cmd *qc) in pxa_bmdma_setup() argument 76 qc->ap->ops->sff_exec_command(qc->ap, &qc->tf); in pxa_bmdma_setup() 82 static void pxa_bmdma_start(struct ata_queued_cmd *qc) in pxa_bmdma_start() argument 84 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_bmdma_start() 92 static void pxa_bmdma_stop(struct ata_queued_cmd *qc) in pxa_bmdma_stop() argument 94 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_bmdma_stop() [all …]
|
A D | sata_sil.c | 253 struct ata_port *ap = qc->ap; in sil_bmdma_stop() 266 struct ata_port *ap = qc->ap; in sil_bmdma_setup() 279 struct ata_port *ap = qc->ap; in sil_bmdma_start() 296 struct ata_port *ap = qc->ap; in sil_fill_sg() 301 for_each_sg(qc->sg, sg, qc->n_elem, si) { in sil_fill_sg() 324 sil_fill_sg(qc); in sil_qc_prep() 447 if (unlikely(!qc || (qc->tf.flags & ATA_TFLAG_POLLING))) { in sil_host_intr() 468 if (ata_is_dma(qc->tf.protocol)) { in sil_host_intr() 470 ap->ops->bmdma_stop(qc); in sil_host_intr() 495 if (unlikely(qc->err_mask) && ata_is_dma(qc->tf.protocol)) in sil_host_intr() [all …]
|
A D | sata_sil24.c | 776 for_each_sg(qc->sg, sg, qc->n_elem, si) { in sil24_fill_sg() 792 u8 prot = qc->tf.protocol; in sil24_qc_defer() 830 return ata_std_qc_defer(qc); in sil24_qc_defer() 835 struct ata_port *ap = qc->ap; in sil24_qc_prep() 862 memcpy(cb->atapi.cdb, qc->cdb, qc->dev->cdb_len); in sil24_qc_prep() 873 ata_tf_to_fis(&qc->tf, qc->dev->link->pmp, 1, prb->fis); in sil24_qc_prep() 876 sil24_fill_sg(qc, sge); in sil24_qc_prep() 906 sil24_read_tf(qc->ap, qc->hw_tag, &qc->result_tf); in sil24_qc_fill_rtf() 1073 if (qc) in sil24_error_intr() 1074 qc->err_mask |= err_mask; in sil24_error_intr() [all …]
|
A D | pata_triflex.c | 142 static void triflex_bmdma_start(struct ata_queued_cmd *qc) in triflex_bmdma_start() argument 144 triflex_load_timing(qc->ap, qc->dev, qc->dev->dma_mode); in triflex_bmdma_start() 145 ata_bmdma_start(qc); in triflex_bmdma_start() 157 static void triflex_bmdma_stop(struct ata_queued_cmd *qc) in triflex_bmdma_stop() argument 159 ata_bmdma_stop(qc); in triflex_bmdma_stop() 160 triflex_load_timing(qc->ap, qc->dev, qc->dev->pio_mode); in triflex_bmdma_stop()
|
A D | pata_ns87415.c | 120 static void ns87415_bmdma_setup(struct ata_queued_cmd *qc) in ns87415_bmdma_setup() argument 122 struct ata_port *ap = qc->ap; in ns87415_bmdma_setup() 123 unsigned int rw = (qc->tf.flags & ATA_TFLAG_WRITE); in ns87415_bmdma_setup() 140 ap->ops->sff_exec_command(ap, &qc->tf); in ns87415_bmdma_setup() 154 static void ns87415_bmdma_start(struct ata_queued_cmd *qc) in ns87415_bmdma_start() argument 156 ns87415_set_mode(qc->ap, qc->dev, qc->dev->dma_mode); in ns87415_bmdma_start() 157 ata_bmdma_start(qc); in ns87415_bmdma_start() 167 static void ns87415_bmdma_stop(struct ata_queued_cmd *qc) in ns87415_bmdma_stop() argument 169 ata_bmdma_stop(qc); in ns87415_bmdma_stop() 170 ns87415_set_mode(qc->ap, qc->dev, qc->dev->pio_mode); in ns87415_bmdma_stop() [all …]
|
A D | pata_arasan_cf.c | 218 struct ata_queued_cmd *qc; member 366 struct ata_queued_cmd *qc = acdev->qc; in dma_complete() local 369 acdev->qc = NULL; in dma_complete() 373 if (unlikely(qc->err_mask) && ata_is_dma(qc->tf.protocol)) in dma_complete() 522 struct ata_queued_cmd *qc = acdev->qc; in data_xfer() local 537 for_each_sg(qc->sg, sg, qc->n_elem, temp) { in data_xfer() 579 struct ata_queued_cmd *qc = acdev->qc; in delayed_finish() local 635 struct ata_queued_cmd *qc = acdev->qc; in arasan_cf_interrupt() local 675 struct ata_queued_cmd *qc = acdev->qc; in arasan_cf_dma_start() local 710 acdev->qc = qc; in arasan_cf_qc_issue() [all …]
|
A D | libata-core.c | 1507 qc->ap = ap; in ata_exec_internal_sg() 1521 qc->tf = *tf; in ata_exec_internal_sg() 4534 qc->cursg = qc->sg; in ata_sg_init() 4581 n_elem = dma_map_sg(ap->dev, qc->sg, qc->n_elem, qc->dma_dir); in ata_sg_setup() 4585 qc->orig_n_elem = qc->n_elem; in ata_sg_setup() 4674 qc->complete_fn(qc); in __ata_qc_complete() 4681 qc->result_tf.flags = qc->tf.flags; in fill_result_tf() 4801 if (qc->err_mask || qc->flags & ATA_QCFLAG_RESULT_TF) in ata_qc_complete() 4877 if (ata_is_data(prot) && (!qc->sg || !qc->n_elem || !qc->nbytes)) in ata_qc_issue() 4894 qc->err_mask |= ap->ops->qc_prep(qc); in ata_qc_issue() [all …]
|
A D | pata_sl82c105.c | 163 struct ata_port *ap = qc->ap; in sl82c105_bmdma_start() 170 sl82c105_configure_dmamode(ap, qc->dev); in sl82c105_bmdma_start() 172 ata_bmdma_start(qc); in sl82c105_bmdma_start() 190 static void sl82c105_bmdma_stop(struct ata_queued_cmd *qc) in sl82c105_bmdma_stop() argument 192 struct ata_port *ap = qc->ap; in sl82c105_bmdma_stop() 194 ata_bmdma_stop(qc); in sl82c105_bmdma_stop() 200 sl82c105_set_piomode(ap, qc->dev); in sl82c105_bmdma_stop() 213 static int sl82c105_qc_defer(struct ata_queued_cmd *qc) in sl82c105_qc_defer() argument 215 struct ata_host *host = qc->ap->host; in sl82c105_qc_defer() 216 struct ata_port *alt = host->ports[1 ^ qc->ap->port_no]; in sl82c105_qc_defer() [all …]
|
/linux-6.3-rc2/include/trace/events/ |
A D | libata.h | 186 TP_ARGS(qc), 211 __entry->ata_dev = qc->dev->link->pmp + qc->dev->devno; 243 TP_ARGS(qc)); 247 TP_ARGS(qc)); 253 TP_ARGS(qc), 277 __entry->ata_dev = qc->dev->link->pmp + qc->dev->devno; 308 TP_ARGS(qc)); 470 TP_ARGS(qc), 482 __entry->ata_dev = qc->dev->link->pmp + qc->dev->devno; 652 __entry->ata_dev = qc->dev->link->pmp + qc->dev->devno; [all …]
|
/linux-6.3-rc2/drivers/scsi/libsas/ |
A D | sas_ata.c | 88 else if (qc && qc->scsicmd) in sas_ata_task_done() 96 if (!qc) in sas_ata_task_done() 99 ap = qc->ap; in sas_ata_task_done() 106 if (qc->scsicmd) in sas_ata_task_done() 148 ata_qc_complete(qc); in sas_ata_task_done() 184 qc->tf.nsect = 0; in sas_ata_qc_issue() 189 memcpy(task->ata_task.atapi_packet, qc->cdb, qc->dev->cdb_len); in sas_ata_qc_issue() 196 for_each_sg(qc->sg, sg, qc->n_elem, si) in sas_ata_qc_issue() 210 if (qc->scsicmd) in sas_ata_qc_issue() 217 if (qc->scsicmd) in sas_ata_qc_issue() [all …]
|
/linux-6.3-rc2/include/linux/ |
A D | libata.h | 1773 return qc; in ata_qc_from_tag() 1777 return qc; in ata_qc_from_tag() 1784 return qc->nbytes - min(qc->extrabytes, qc->nbytes); in ata_qc_raw_nbytes() 1805 qc->sg = NULL; in ata_qc_reinit() 1806 qc->flags = 0; in ata_qc_reinit() 1807 qc->cursg = NULL; in ata_qc_reinit() 1808 qc->cursg_ofs = 0; in ata_qc_reinit() 1809 qc->nbytes = qc->extrabytes = qc->curbytes = 0; in ata_qc_reinit() 1810 qc->n_elem = 0; in ata_qc_reinit() 1811 qc->err_mask = 0; in ata_qc_reinit() [all …]
|
/linux-6.3-rc2/arch/arm64/kernel/ |
A D | io.c | 73 u64 qc = (u8)c; in __memset_io() local 75 qc |= qc << 8; in __memset_io() 76 qc |= qc << 16; in __memset_io() 77 qc |= qc << 32; in __memset_io() 86 __raw_writeq(qc, dst); in __memset_io()
|
/linux-6.3-rc2/arch/loongarch/kernel/ |
A D | io.c | 70 u64 qc = (u8)c; in __memset_io() local 72 qc |= qc << 8; in __memset_io() 73 qc |= qc << 16; in __memset_io() 74 qc |= qc << 32; in __memset_io() 83 __raw_writeq(qc, dst); in __memset_io()
|