| /drivers/net/ethernet/fungible/funcore/ |
| A D | fun_hci.h | 151 __u8 subop; member 185 __u8 subop; member 215 __u8 subop; member 498 __u8 subop; member 517 __u8 subop; member 524 __u8 subop; member 541 __u8 subop; member 600 u8 subop; member 688 u8 subop; member 716 __u8 subop; member [all …]
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | vpe_6_1_fw_if.h | 59 #define VPE_CMD_HEADER(op, subop) \ argument 60 (((subop << VPE_HEADER_SUB_OPCODE__SHIFT) & VPE_HEADER_SUB_OPCODE_MASK) | \ 142 #define VPE_PLANE_CFG_CMD_HEADER(subop, nps0, npd0, nps1, npd1) \ argument 143 (VPE_CMD_HEADER(VPE_CMD_OPCODE_PLANE_CFG, subop) | \ 163 #define VPE_DIR_CFG_CMD_HEADER(subop, arr_sz) \ argument 164 (VPE_CMD_HEADER(VPE_CMD_OPCODE_VPEP_CFG, subop) | \ 179 #define VPE_IND_CFG_CMD_HEADER(subop, num_dst) \ argument 180 (VPE_CMD_HEADER(VPE_CMD_OPCODE_VPEP_CFG, subop) | \
|
| /drivers/mtd/nand/raw/ |
| A D | arasan-nand-controller.c | 596 const struct nand_subop *subop, in anfc_parse_instructions() argument 615 instr = &subop->instrs[op_id]; in anfc_parse_instructions() 724 const struct nand_subop *subop, in anfc_misc_data_type_exec() argument 748 const struct nand_subop *subop) in anfc_param_read_type_exec() argument 754 const struct nand_subop *subop) in anfc_data_read_type_exec() argument 776 const struct nand_subop *subop) in anfc_param_write_type_exec() argument 782 const struct nand_subop *subop) in anfc_data_write_type_exec() argument 813 const struct nand_subop *subop) in anfc_status_type_exec() argument 834 const struct nand_subop *subop) in anfc_reset_type_exec() argument 840 const struct nand_subop *subop) in anfc_erase_type_exec() argument [all …]
|
| A D | loongson1-nand-controller.c | 167 for (op_id = 0; op_id < subop->ninstrs; op_id++) { in ls1x_nand_parse_instructions() 168 const struct nand_op_instr *instr = &subop->instrs[op_id]; in ls1x_nand_parse_instructions() 180 naddrs = nand_subop_get_num_addr_cyc(subop, op_id); in ls1x_nand_parse_instructions() 184 offset = nand_subop_get_addr_start_off(subop, op_id); in ls1x_nand_parse_instructions() 190 offset = nand_subop_get_data_start_off(subop, op_id); in ls1x_nand_parse_instructions() 191 op->orig_len = nand_subop_get_data_len(subop, op_id); in ls1x_nand_parse_instructions() 414 ret = ls1x_nand_parse_instructions(chip, subop, &op); in ls1x_nand_data_type_exec() 433 ret = ls1x_nand_parse_instructions(chip, subop, op); in ls1x_nand_misc_type_exec() 446 return ls1x_nand_misc_type_exec(chip, subop, &op); in ls1x_nand_zerolen_type_exec() 462 ret = ls1x_nand_misc_type_exec(chip, subop, &op); in ls1x_nand_read_id_type_exec() [all …]
|
| A D | vf610_nfc.c | 350 if (*op_id + 1 >= subop->ninstrs) in vf610_get_next_instr() 355 return &subop->instrs[*op_id]; in vf610_get_next_instr() 359 const struct nand_subop *subop) in vf610_nfc_cmd() argument 373 instr = vf610_get_next_instr(subop, &op_id); in vf610_nfc_cmd() 381 instr = vf610_get_next_instr(subop, &op_id); in vf610_nfc_cmd() 398 instr = vf610_get_next_instr(subop, &op_id); in vf610_nfc_cmd() 402 trfr_sz = nand_subop_get_data_len(subop, op_id); in vf610_nfc_cmd() 415 instr = vf610_get_next_instr(subop, &op_id); in vf610_nfc_cmd() 422 instr = vf610_get_next_instr(subop, &op_id); in vf610_nfc_cmd() 428 instr = vf610_get_next_instr(subop, &op_id); in vf610_nfc_cmd() [all …]
|
| A D | marvell_nand.c | 1710 const struct nand_subop *subop, in marvell_nfc_parse_instructions() argument 1727 instr = &subop->instrs[op_id]; in marvell_nfc_parse_instructions() 1800 const struct nand_subop *subop, in marvell_nfc_xfer_data_pio() argument 1831 const struct nand_subop *subop) in marvell_nfc_monolithic_access_exec() argument 1895 const struct nand_subop *subop) in marvell_nfc_naked_access_exec() argument 1909 switch (subop->instrs[0].type) { in marvell_nfc_naked_access_exec() 1967 const struct nand_subop *subop) in marvell_nfc_naked_waitrdy_exec() argument 1981 const struct nand_subop *subop) in marvell_nfc_read_id_type_exec() argument 2021 const struct nand_subop *subop) in marvell_nfc_read_status_exec() argument 2061 const struct nand_subop *subop) in marvell_nfc_reset_cmd_type_exec() argument [all …]
|
| A D | nand_base.c | 2306 struct nand_subop subop; member 2466 if (instr == &ctx->subop.instrs[ctx->subop.ninstrs - 1]) in nand_op_parser_trace() 2480 if (a->subop.ninstrs < b->subop.ninstrs) in nand_op_parser_cmp_ctx() 2482 else if (a->subop.ninstrs > b->subop.ninstrs) in nand_op_parser_cmp_ctx() 2485 if (a->subop.last_instr_end_off < b->subop.last_instr_end_off) in nand_op_parser_cmp_ctx() 2487 else if (a->subop.last_instr_end_off > b->subop.last_instr_end_off) in nand_op_parser_cmp_ctx() 2520 .subop.cs = op->cs, in nand_op_parser_exec_op() 2566 ctx.subop.instrs = ctx.subop.instrs + ctx.subop.ninstrs; in nand_op_parser_exec_op() 2568 ctx.subop.instrs -= 1; in nand_op_parser_exec_op() 2570 ctx.subop.first_instr_start_off = ctx.subop.last_instr_end_off; in nand_op_parser_exec_op() [all …]
|
| A D | cadence-nand-controller.c | 2041 const struct nand_subop *subop) in cadence_nand_cmd_opcode() argument 2050 instr = &subop->instrs[op_id]; in cadence_nand_cmd_opcode() 2071 const struct nand_subop *subop) in cadence_nand_cmd_address() argument 2084 instr = &subop->instrs[op_id]; in cadence_nand_cmd_address() 2114 const struct nand_subop *subop) in cadence_nand_cmd_erase() argument 2125 instr = &subop->instrs[1]; in cadence_nand_cmd_erase() 2144 .instrs = &subop->instrs[op_id], in cadence_nand_cmd_erase() 2155 const struct nand_subop *subop) in cadence_nand_cmd_data() argument 2165 instr = &subop->instrs[op_id]; in cadence_nand_cmd_data() 2177 len = nand_subop_get_data_len(subop, op_id); in cadence_nand_cmd_data() [all …]
|
| A D | qcom_nandc.c | 1601 const struct nand_subop *subop, in qcom_parse_instructions() argument 1608 for (op_id = 0; op_id < subop->ninstrs; op_id++) { in qcom_parse_instructions() 1612 instr = &subop->instrs[op_id]; in qcom_parse_instructions() 1689 const struct nand_subop *subop) in qcom_read_status_exec() argument 1703 ret = qcom_parse_instructions(chip, subop, &q_op); in qcom_read_status_exec() 1746 len = nand_subop_get_data_len(subop, op_id); in qcom_read_status_exec() 1763 ret = qcom_parse_instructions(chip, subop, &q_op); in qcom_read_id_type_exec() 1793 len = nand_subop_get_data_len(subop, op_id); in qcom_read_id_type_exec() 1810 ret = qcom_parse_instructions(chip, subop, &q_op); in qcom_misc_cmd_type_exec() 1873 ret = qcom_parse_instructions(chip, subop, &q_op); in qcom_param_page_type_exec() [all …]
|
| A D | tegra_nand.c | 351 const struct nand_subop *subop) in tegra_nand_cmd() argument 361 for (op_id = 0; op_id < subop->ninstrs; op_id++) { in tegra_nand_cmd() 366 instr = &subop->instrs[op_id]; in tegra_nand_cmd() 383 offset = nand_subop_get_addr_start_off(subop, op_id); in tegra_nand_cmd() 384 naddrs = nand_subop_get_num_addr_cyc(subop, op_id); in tegra_nand_cmd() 399 size = nand_subop_get_data_len(subop, op_id); in tegra_nand_cmd() 400 offset = nand_subop_get_data_start_off(subop, op_id); in tegra_nand_cmd() 409 size = nand_subop_get_data_len(subop, op_id); in tegra_nand_cmd() 410 offset = nand_subop_get_data_start_off(subop, op_id); in tegra_nand_cmd()
|
| A D | pl35x-nand-controller.c | 663 const struct nand_subop *subop) in pl35x_nand_exec_op() argument 675 for (op_id = 0; op_id < subop->ninstrs; op_id++) { in pl35x_nand_exec_op() 676 instr = &subop->instrs[op_id]; in pl35x_nand_exec_op() 691 offset = nand_subop_get_addr_start_off(subop, op_id); in pl35x_nand_exec_op() 692 naddrs = nand_subop_get_num_addr_cyc(subop, op_id); in pl35x_nand_exec_op() 707 len = nand_subop_get_data_len(subop, op_id); in pl35x_nand_exec_op()
|
| A D | rockchip-nand-controller.c | 332 const struct nand_subop *subop) in rk_nfc_cmd() argument 342 for (i = 0; i < subop->ninstrs; i++) { in rk_nfc_cmd() 343 const struct nand_op_instr *instr = &subop->instrs[i]; in rk_nfc_cmd() 352 remaining = nand_subop_get_num_addr_cyc(subop, i); in rk_nfc_cmd() 353 start = nand_subop_get_addr_start_off(subop, i); in rk_nfc_cmd() 362 start = nand_subop_get_data_start_off(subop, i); in rk_nfc_cmd() 363 cnt = nand_subop_get_data_len(subop, i); in rk_nfc_cmd()
|
| A D | sunxi_nand.c | 1767 const struct nand_subop *subop) in sunxi_nfc_exec_subop() argument 1775 for (i = 0; i < subop->ninstrs; i++) { in sunxi_nfc_exec_subop() 1776 const struct nand_op_instr *instr = &subop->instrs[i]; in sunxi_nfc_exec_subop() 1793 remaining = nand_subop_get_num_addr_cyc(subop, i); in sunxi_nfc_exec_subop() 1794 start = nand_subop_get_addr_start_off(subop, i); in sunxi_nfc_exec_subop() 1808 start = nand_subop_get_data_start_off(subop, i); in sunxi_nfc_exec_subop() 1809 remaining = nand_subop_get_data_len(subop, i); in sunxi_nfc_exec_subop() 1863 const struct nand_subop *subop) in sunxi_nfc_soft_waitrdy() argument 1866 subop->instrs[0].ctx.waitrdy.timeout_ms); in sunxi_nfc_soft_waitrdy()
|
| /drivers/acpi/acpica/ |
| A D | psargs.c | 745 u32 subop; in acpi_ps_get_next_arg() local 843 subop = acpi_ps_peek_opcode(parser_state); in acpi_ps_get_next_arg() 844 if (subop == 0 || in acpi_ps_get_next_arg() 845 acpi_ps_is_leading_char(subop) || in acpi_ps_get_next_arg() 846 ACPI_IS_ROOT_PREFIX(subop) || in acpi_ps_get_next_arg() 847 ACPI_IS_PARENT_PREFIX(subop)) { in acpi_ps_get_next_arg() 881 subop = acpi_ps_peek_opcode(parser_state); in acpi_ps_get_next_arg() 882 if (subop == 0 || in acpi_ps_get_next_arg() 883 acpi_ps_is_leading_char(subop) || in acpi_ps_get_next_arg() 884 ACPI_IS_ROOT_PREFIX(subop) || in acpi_ps_get_next_arg() [all …]
|
| /drivers/gpu/host1x/hw/ |
| A D | debug_hw.c | 44 unsigned int mask, subop, num, opcode; in show_channel_command() local 142 subop = val >> 24 & 0xf; in show_channel_command() 143 if (subop == HOST1X_OPCODE_EXTEND_ACQUIRE_MLOCK) in show_channel_command() 146 else if (subop == HOST1X_OPCODE_EXTEND_RELEASE_MLOCK) in show_channel_command()
|
| /drivers/net/ethernet/fungible/funeth/ |
| A D | funeth_ktls.c | 11 .subop = FUN_ADMIN_SUBOP_CREATE, in fun_admin_ktls_create() 27 .subop = FUN_ADMIN_SUBOP_MODIFY, in fun_ktls_add() 86 req.subop = FUN_ADMIN_SUBOP_MODIFY; in fun_ktls_del() 110 req.subop = FUN_ADMIN_SUBOP_MODIFY; in fun_ktls_resync()
|
| A D | funeth_main.c | 159 .u.write.subop = FUN_ADMIN_SUBOP_WRITE, in fun_adi_write() 1920 if (rsp->subop == FUN_ADMIN_SUBOP_NOTIFY) { in fun_event_cb() 1922 } else if (rsp->subop == FUN_ADMIN_SUBOP_RES_COUNT) { in fun_event_cb() 1932 op, rsp->subop); in fun_event_cb()
|
| /drivers/scsi/csiostor/ |
| A D | csio_isr.c | 151 uint8_t subop; in csio_process_scsi_cmpl() local 158 subop = FW_SCSI_ABRT_CLS_WR_SUB_OPCODE_GET( in csio_process_scsi_cmpl() 163 subop ? "Close" : "Abort", in csio_process_scsi_cmpl() 167 if (subop) in csio_process_scsi_cmpl()
|
| /drivers/mtd/nand/raw/atmel/ |
| A D | nand-controller.c | 633 const struct nand_subop *subop) in atmel_hsmc_exec_cmd_addr() argument 642 for (i = 0; i < subop->ninstrs; i++) { in atmel_hsmc_exec_cmd_addr() 643 const struct nand_op_instr *instr = &subop->instrs[i]; in atmel_hsmc_exec_cmd_addr() 650 for (j = nand_subop_get_addr_start_off(subop, i); in atmel_hsmc_exec_cmd_addr() 651 j < nand_subop_get_num_addr_cyc(subop, i); j++) { in atmel_hsmc_exec_cmd_addr() 661 const struct nand_subop *subop) in atmel_hsmc_exec_rw() argument 663 const struct nand_op_instr *instr = subop->instrs; in atmel_hsmc_exec_rw() 679 const struct nand_subop *subop) in atmel_hsmc_exec_waitrdy() argument 681 const struct nand_op_instr *instr = subop->instrs; in atmel_hsmc_exec_waitrdy()
|
| /drivers/gpu/drm/xe/ |
| A D | xe_vm_types.h | 429 enum xe_vma_subop subop; member
|
| A D | xe_pt.c | 1478 if (op->subop == XE_VMA_SUBOP_UNMAP_RANGE) in xe_pt_svm_pre_commit() 1495 xe_assert(vm->xe, op->subop == XE_VMA_SUBOP_MAP_RANGE); in xe_pt_svm_pre_commit() 2135 if (op->subop == XE_VMA_SUBOP_MAP_RANGE) { in op_prepare() 2141 } else if (op->subop == XE_VMA_SUBOP_UNMAP_RANGE) { in op_prepare() 2357 if (op->subop == XE_VMA_SUBOP_MAP_RANGE) in op_commit() 2359 else if (op->subop == XE_VMA_SUBOP_UNMAP_RANGE) in op_commit()
|
| A D | xe_vm.c | 989 op->subop = XE_VMA_SUBOP_MAP_RANGE; in xe_vm_populate_range_rebind() 1077 op->subop = XE_VMA_SUBOP_UNMAP_RANGE; in xe_vm_populate_range_unbind()
|