| /linux/drivers/macintosh/ |
| A D | rack-meter.c | 162 out_le32(&rm->dma_regs->cmdptr, rm->dma_buf_p); in rackmeter_do_pause() 216 struct rackmeter *rm = rcpu->rm; in rackmeter_do_timer() local 246 pause = (rm->cpu[0].zero && rm->cpu[1].zero); in rackmeter_do_timer() 249 pause = (rm->cpu[0].zero && rm->cpu[1].zero); in rackmeter_do_timer() 267 rm->cpu[0].rm = rm; in rackmeter_init_cpu_sniffer() 269 rm->cpu[1].rm = rm; in rackmeter_init_cpu_sniffer() 399 rm = kzalloc(sizeof(*rm), GFP_KERNEL); in rackmeter_probe() 510 rm->dma_buf_v, rm->dma_buf_p); in rackmeter_probe() 518 kfree(rm); in rackmeter_probe() 540 free_irq(rm->irq, rm); in rackmeter_remove() [all …]
|
| /linux/arch/arm/net/ |
| A D | bpf_jit_32.h | 171 #define ARM_ADD_R(rd, rn, rm) _AL3_R(ARM_INST_ADD, rd, rn, rm) argument 186 #define ARM_BX(rm) (ARM_INST_BX | (rm)) argument 187 #define ARM_BLX_R(rm) (ARM_INST_BLX_R | (rm)) argument 189 #define ARM_CMP_R(rn, rm) _AL3_R(ARM_INST_CMP, 0, rn, rm) argument 197 | (rm)) 204 | (rm)) 207 | (rm)) 220 #define ARM_MOV_R(rd, rm) _AL3_R(ARM_INST_MOV, rd, 0, rm) argument 221 #define ARM_MOVS_R(rd, rm) _AL3_R(ARM_INST_MOVS, rd, 0, rm) argument 251 #define ARM_REV(rd, rm) (ARM_INST_REV | (rd) << 12 | (rm)) argument [all …]
|
| /linux/net/rds/ |
| A D | message.c | 53 rdsdebug("addref rm %p ref %d\n", rm, refcount_read(&rm->m_refcount)); in rds_message_addref() 139 if (rm->m_rs) { in rds_message_purge() 149 rm->m_rs = NULL; in rds_message_purge() 182 kfree(rm); in rds_message_put() 292 if (!rm) in rds_message_alloc() 305 return rm; in rds_message_alloc() 321 if (rm->m_used_sgs + nents > rm->m_total_sgs) { in rds_message_alloc_sgs() 323 rm->m_total_sgs, rm->m_used_sgs, nents); in rds_message_alloc_sgs() 342 if (!rm) in rds_message_map_pages() 348 rm->data.op_sg = rds_message_alloc_sgs(rm, num_sgs); in rds_message_map_pages() [all …]
|
| A D | send.c | 203 if (!rm) { in rds_send_xmit() 238 if (!rm) { in rds_send_xmit() 269 if (!rm) in rds_send_xmit() 310 rm->m_final_op = &rm->rdma; in rds_send_xmit() 326 rm->m_final_op = &rm->atomic; in rds_send_xmit() 353 if (rm->atomic.op_active && !rm->atomic.op_silent) in rds_send_xmit() 355 if (rm->rdma.op_active && !rm->rdma.op_silent) in rds_send_xmit() 364 rm->m_final_op = &rm->data; in rds_send_xmit() 1268 if (!rm) { in rds_sendmsg() 1407 if (rm) in rds_sendmsg() [all …]
|
| A D | tcp_send.c | 86 rm->m_ack_seq = tc->t_last_sent_nxt + in rds_tcp_xmit() 88 be32_to_cpu(rm->m_inc.i_hdr.h_len) - 1; in rds_tcp_xmit() 97 rm, rds_tcp_write_seq(tc), in rds_tcp_xmit() 98 (unsigned long long)rm->m_ack_seq); in rds_tcp_xmit() 115 while (sg < rm->data.op_nents) { in rds_tcp_xmit() 117 if (sg + 1 < rm->data.op_nents) in rds_tcp_xmit() 121 rm->data.op_sg[sg].length - off, in rds_tcp_xmit() 122 rm->data.op_sg[sg].offset + off); in rds_tcp_xmit() 124 rm->data.op_sg[sg].length - off); in rds_tcp_xmit() 127 rm->data.op_sg[sg].offset + off, rm->data.op_sg[sg].length - off, in rds_tcp_xmit() [all …]
|
| A D | rdma.c | 626 || rm->rdma.op_active) in rds_cmsg_rdma_args() 820 rm->m_rdma_cookie != 0) in rds_cmsg_rdma_dest() 823 memcpy(&rm->m_rdma_cookie, CMSG_DATA(cmsg), sizeof(rm->m_rdma_cookie)); in rds_cmsg_rdma_dest() 843 rm->rdma.op_rdma_mr = mr; in rds_cmsg_rdma_dest() 862 &rm->rdma.op_rdma_mr, rm->m_conn_path); in rds_cmsg_rdma_map() 876 || rm->atomic.op_active) in rds_cmsg_atomic() 913 rm->atomic.op_active = 1; in rds_cmsg_atomic() 915 rm->atomic.op_sg = rds_message_alloc_sgs(rm, 1); in rds_cmsg_atomic() 934 if (rm->atomic.op_notify || rm->atomic.op_recverr) { in rds_cmsg_atomic() 940 rm->atomic.op_notifier = kmalloc(sizeof(*rm->atomic.op_notifier), GFP_KERNEL); in rds_cmsg_atomic() [all …]
|
| A D | ib_send.c | 187 return rm; in rds_ib_send_unmap_op() 288 rds_message_put(rm); in rds_ib_send_cqe_handler() 552 rm->data.op_sg, in rds_ib_xmit() 555 rdsdebug("ic %p mapping rm %p: %d\n", ic, rm, rm->data.op_count); in rds_ib_xmit() 563 rm->data.op_count = 0; in rds_ib_xmit() 567 rm->data.op_dmasg = 0; in rds_ib_xmit() 615 if (rm->rdma.op_active && rm->rdma.op_fence) in rds_ib_xmit() 650 && scat != &rm->data.op_sg[rm->data.op_count]) { in rds_ib_xmit() 664 rm->data.op_dmasg++; in rds_ib_xmit() 708 && scat != &rm->data.op_sg[rm->data.op_count]); in rds_ib_xmit() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
| A D | r535.c | 36 &vmm->rm.client, &vmm->rm.device); in r535_mmu_promote_vmm() 56 &vmm->rm.rsvd); in r535_mmu_promote_vmm() 69 ctrl->virtAddrHi = vmm->rm.rsvd->addr + vmm->rm.rsvd->size - 1; in r535_mmu_promote_vmm() 103 struct nvkm_mmu_func *rm; in r535_mmu_new() local 106 if (!(rm = kzalloc(sizeof(*rm), GFP_KERNEL))) in r535_mmu_new() 109 rm->dtor = r535_mmu_dtor; in r535_mmu_new() 111 rm->mmu = hw->mmu; in r535_mmu_new() 112 rm->mem = hw->mem; in r535_mmu_new() 113 rm->vmm = hw->vmm; in r535_mmu_new() 114 rm->kind = hw->kind; in r535_mmu_new() [all …]
|
| /linux/drivers/gpu/drm/msm/disp/dpu1/ |
| A D | dpu_rm.c | 38 struct dpu_rm *rm, in dpu_rm_init() argument 45 if (!rm || !cat || !mmio) { in dpu_rm_init() 51 memset(rm, 0, sizeof(*rm)); in dpu_rm_init() 188 rm->cdm_blk = &hw->base; in dpu_rm_init() 304 if (!rm->mixer_blks[i]) in _dpu_rm_reserve_lms() 360 struct dpu_rm *rm, in _dpu_rm_reserve_ctls() argument 379 if (!rm->ctl_blks[j]) in _dpu_rm_reserve_ctls() 568 if (!rm->cdm_blk) { in _dpu_rm_reserve_cdm() 584 struct dpu_rm *rm, in _dpu_rm_make_reservation() argument 661 struct dpu_rm *rm, in dpu_rm_reserve() argument [all …]
|
| A D | dpu_rm.h | 51 struct dpu_rm *rm, 69 int dpu_rm_reserve(struct dpu_rm *rm, 88 int dpu_rm_get_assigned_resources(struct dpu_rm *rm, 105 static inline struct dpu_hw_intf *dpu_rm_get_intf(struct dpu_rm *rm, enum dpu_intf intf_idx) in dpu_rm_get_intf() argument 107 return rm->hw_intf[intf_idx - INTF_0]; in dpu_rm_get_intf() 115 static inline struct dpu_hw_wb *dpu_rm_get_wb(struct dpu_rm *rm, enum dpu_wb wb_idx) in dpu_rm_get_wb() argument 117 return rm->hw_wb[wb_idx - WB_0]; in dpu_rm_get_wb() 125 static inline struct dpu_hw_sspp *dpu_rm_get_sspp(struct dpu_rm *rm, enum dpu_sspp sspp_idx) in dpu_rm_get_sspp() argument 127 return rm->hw_sspp[sspp_idx - SSPP_NONE]; in dpu_rm_get_sspp()
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/nvjpg/ |
| A D | r535.c | 33 struct nvkm_gsp_object rm; member 41 nvkm_gsp_rm_free(&obj->rm); in r535_nvjpg_obj_dtor() 65 sizeof(*args), &obj->rm); in r535_nvjpg_obj_ctor() 72 return nvkm_gsp_rm_alloc_wr(&obj->rm, args); in r535_nvjpg_obj_ctor() 86 struct nvkm_engine_func *rm; in r535_nvjpg_new() local 91 if (!(rm = kzalloc(sizeof(*rm) + (nclass + 1) * sizeof(rm->sclass[0]), GFP_KERNEL))) in r535_nvjpg_new() 94 rm->dtor = r535_nvjpg_dtor; in r535_nvjpg_new() 96 rm->sclass[i].minver = hw->sclass[i].minver; in r535_nvjpg_new() 97 rm->sclass[i].maxver = hw->sclass[i].maxver; in r535_nvjpg_new() 99 rm->sclass[i].ctor = r535_nvjpg_obj_ctor; in r535_nvjpg_new() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/ofa/ |
| A D | r535.c | 34 struct nvkm_gsp_object rm; member 42 nvkm_gsp_rm_free(&obj->rm); in r535_ofa_obj_dtor() 66 sizeof(*args), &obj->rm); in r535_ofa_obj_ctor() 72 return nvkm_gsp_rm_alloc_wr(&obj->rm, args); in r535_ofa_obj_ctor() 86 struct nvkm_engine_func *rm; in r535_ofa_new() local 91 if (!(rm = kzalloc(sizeof(*rm) + (nclass + 1) * sizeof(rm->sclass[0]), GFP_KERNEL))) in r535_ofa_new() 94 rm->dtor = r535_ofa_dtor; in r535_ofa_new() 96 rm->sclass[i].minver = hw->sclass[i].minver; in r535_ofa_new() 97 rm->sclass[i].maxver = hw->sclass[i].maxver; in r535_ofa_new() 99 rm->sclass[i].ctor = r535_ofa_obj_ctor; in r535_ofa_new() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/ce/ |
| A D | r535.c | 34 struct nvkm_gsp_object rm; member 42 nvkm_gsp_rm_free(&obj->rm); in r535_ce_obj_dtor() 66 sizeof(*args), &obj->rm); in r535_ce_obj_ctor() 73 return nvkm_gsp_rm_alloc_wr(&obj->rm, args); in r535_ce_obj_ctor() 87 struct nvkm_engine_func *rm; in r535_ce_new() local 92 if (!(rm = kzalloc(sizeof(*rm) + (nclass + 1) * sizeof(rm->sclass[0]), GFP_KERNEL))) in r535_ce_new() 95 rm->dtor = r535_ce_dtor; in r535_ce_new() 97 rm->sclass[i].minver = hw->sclass[i].minver; in r535_ce_new() 98 rm->sclass[i].maxver = hw->sclass[i].maxver; in r535_ce_new() 100 rm->sclass[i].ctor = r535_ce_obj_ctor; in r535_ce_new() [all …]
|
| /linux/tools/perf/arch/x86/util/ |
| A D | perf_regs.c | 227 rm[4].rm_eo - rm[4].rm_so > SDT_REG_NAME_SIZE) { in arch_sdt_arg_parse_op() 244 if (rm[3].rm_so != rm[3].rm_eo) { in arch_sdt_arg_parse_op() 245 if (rm[1].rm_so != rm[1].rm_eo) in arch_sdt_arg_parse_op() 247 else if (rm[2].rm_so != rm[2].rm_eo) in arch_sdt_arg_parse_op() 254 sdt_rename_register(old_op + rm[4].rm_so, rm[4].rm_eo - rm[4].rm_so, in arch_sdt_arg_parse_op() 259 (rm[2].rm_eo - rm[2].rm_so) + in arch_sdt_arg_parse_op() 260 (rm[3].rm_eo - rm[3].rm_so) + in arch_sdt_arg_parse_op() 262 (rm[5].rm_eo - rm[5].rm_so) + in arch_sdt_arg_parse_op() 271 (int)(rm[2].rm_eo - rm[2].rm_so), old_op + rm[2].rm_so, in arch_sdt_arg_parse_op() 272 (int)(rm[3].rm_eo - rm[3].rm_so), old_op + rm[3].rm_so, in arch_sdt_arg_parse_op() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/disp/ |
| A D | r535.c | 1475 ret = nvkm_gsp_client_device_ctor(gsp, &disp->rm.client, &disp->rm.device); in r535_disp_oneinit() 1480 &disp->rm.objcom); in r535_disp_oneinit() 1689 struct nvkm_disp_func *rm; in r535_disp_new() local 1692 if (!(rm = kzalloc(sizeof(*rm) + 6 * sizeof(rm->user[0]), GFP_KERNEL))) in r535_disp_new() 1699 rm->uevent = hw->uevent; in r535_disp_new() 1704 rm->root = hw->root; in r535_disp_new() 1709 case 0x7d: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_core; break; in r535_disp_new() 1710 case 0x7e: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_wndw; break; in r535_disp_new() 1711 case 0x7b: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_wimm; break; in r535_disp_new() 1712 case 0x7a: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_curs; break; in r535_disp_new() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/nvdec/ |
| A D | r535.c | 33 struct nvkm_gsp_object rm; member 41 nvkm_gsp_rm_free(&obj->rm); in r535_nvdec_obj_dtor() 65 sizeof(*args), &obj->rm); in r535_nvdec_obj_ctor() 72 return nvkm_gsp_rm_alloc_wr(&obj->rm, args); in r535_nvdec_obj_ctor() 88 struct nvkm_engine_func *rm; in r535_nvdec_new() local 93 if (!(rm = kzalloc(sizeof(*rm) + (nclass + 1) * sizeof(rm->sclass[0]), GFP_KERNEL))) in r535_nvdec_new() 96 rm->dtor = r535_nvdec_dtor; in r535_nvdec_new() 98 rm->sclass[i].minver = hw->sclass[i].minver; in r535_nvdec_new() 99 rm->sclass[i].maxver = hw->sclass[i].maxver; in r535_nvdec_new() 101 rm->sclass[i].ctor = r535_nvdec_obj_ctor; in r535_nvdec_new() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/nvenc/ |
| A D | r535.c | 33 struct nvkm_gsp_object rm; member 41 nvkm_gsp_rm_free(&obj->rm); in r535_nvenc_obj_dtor() 65 sizeof(*args), &obj->rm); in r535_nvenc_obj_ctor() 72 return nvkm_gsp_rm_alloc_wr(&obj->rm, args); in r535_nvenc_obj_ctor() 88 struct nvkm_engine_func *rm; in r535_nvenc_new() local 93 if (!(rm = kzalloc(sizeof(*rm) + (nclass + 1) * sizeof(rm->sclass[0]), GFP_KERNEL))) in r535_nvenc_new() 96 rm->dtor = r535_nvenc_dtor; in r535_nvenc_new() 98 rm->sclass[i].minver = hw->sclass[i].minver; in r535_nvenc_new() 99 rm->sclass[i].maxver = hw->sclass[i].maxver; in r535_nvenc_new() 101 rm->sclass[i].ctor = r535_nvenc_obj_ctor; in r535_nvenc_new() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/bar/ |
| A D | r535.c | 151 struct nvkm_bar_func *rm; in r535_bar_new_() local 155 if (!(rm = kzalloc(sizeof(*rm), GFP_KERNEL))) in r535_bar_new_() 158 rm->dtor = r535_bar_dtor; in r535_bar_new_() 159 rm->oneinit = hw->oneinit; in r535_bar_new_() 160 rm->bar1.init = r535_bar_bar1_init; in r535_bar_new_() 161 rm->bar1.fini = r535_bar_bar1_fini; in r535_bar_new_() 162 rm->bar1.wait = r535_bar_bar1_wait; in r535_bar_new_() 163 rm->bar1.vmm = hw->bar1.vmm; in r535_bar_new_() 167 rm->bar2.vmm = hw->bar2.vmm; in r535_bar_new_() 168 rm->flush = r535_bar_flush; in r535_bar_new_() [all …]
|
| /linux/tools/perf/arch/powerpc/util/ |
| A D | perf_regs.c | 138 regmatch_t rm[5]; in arch_sdt_arg_parse_op() local 151 if (!regexec(&sdt_op_regex1, old_op, 3, rm, 0)) { in arch_sdt_arg_parse_op() 155 new_len += (int)(rm[2].rm_eo - rm[2].rm_so); in arch_sdt_arg_parse_op() 162 (int)(rm[2].rm_eo - rm[2].rm_so), old_op + rm[2].rm_so); in arch_sdt_arg_parse_op() 163 } else if (!regexec(&sdt_op_regex2, old_op, 5, rm, 0)) { in arch_sdt_arg_parse_op() 168 prefix = (rm[1].rm_so == -1) ? '+' : '-'; in arch_sdt_arg_parse_op() 171 new_len += (int)(rm[2].rm_eo - rm[2].rm_so); in arch_sdt_arg_parse_op() 172 new_len += (int)(rm[4].rm_eo - rm[4].rm_so); in arch_sdt_arg_parse_op() 179 (int)(rm[2].rm_eo - rm[2].rm_so), old_op + rm[2].rm_so, in arch_sdt_arg_parse_op() 180 (int)(rm[4].rm_eo - rm[4].rm_so), old_op + rm[4].rm_so); in arch_sdt_arg_parse_op()
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
| A D | r535.c | 72 chan->rm.mthdbuf.ptr, chan->rm.mthdbuf.addr); in r535_chan_ramfc_clear() 108 if (!chan->rm.mthdbuf.ptr) in r535_chan_ramfc_write() 113 &chan->rm.object); in r535_chan_ramfc_write() 374 chan->rm.grctx = vctx; in r535_gr_ctor() 475 switch (rm) { in r535_fifo_engn_type() 523 engn->rm.size = in r535_fifo_ectx_size() 649 struct nvkm_fifo_func *rm; in r535_fifo_new() local 651 if (!(rm = kzalloc(sizeof(*rm), GFP_KERNEL))) in r535_fifo_new() 656 rm->runl = &r535_runl; in r535_fifo_new() 657 rm->cgrp = hw->cgrp; in r535_fifo_new() [all …]
|
| /linux/tools/perf/arch/arm64/util/ |
| A D | perf_regs.c | 98 regmatch_t rm[5]; in arch_sdt_arg_parse_op() local 104 if (!regexec(&sdt_op_regex1, old_op, 3, rm, 0)) { in arch_sdt_arg_parse_op() 107 new_len += (int)(rm[1].rm_eo - rm[1].rm_so); in arch_sdt_arg_parse_op() 114 (int)(rm[1].rm_eo - rm[1].rm_so), old_op + rm[1].rm_so); in arch_sdt_arg_parse_op() 115 } else if (!regexec(&sdt_op_regex2, old_op, 5, rm, 0)) { in arch_sdt_arg_parse_op() 120 if (rm[2].rm_so == -1) in arch_sdt_arg_parse_op() 123 new_len += (int)(rm[2].rm_eo - rm[2].rm_so); in arch_sdt_arg_parse_op() 129 if (rm[2].rm_so == -1) in arch_sdt_arg_parse_op() 133 (int)(rm[2].rm_eo - rm[2].rm_so), in arch_sdt_arg_parse_op() 134 old_op + rm[2].rm_so); in arch_sdt_arg_parse_op()
|
| /linux/arch/x86/math-emu/ |
| A D | reg_divide.c | 37 deststnr = rm; in FPU_div() 46 a = (FPU_REG *) rm; in FPU_div() 49 a = &st(rm); in FPU_div() 51 taga = FPU_gettagi(rm); in FPU_div() 58 b = (FPU_REG *) rm; in FPU_div() 61 b = &st(rm); in FPU_div() 63 tagb = FPU_gettagi(rm); in FPU_div() 141 return real_2op_NaN(st0_ptr, tag, rm, in FPU_div() 145 tag = FPU_gettagi(rm); in FPU_div() 147 tag = FPU_Special(&st(rm)); in FPU_div() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/devinit/ |
| A D | r535.c | 36 struct nvkm_devinit_func *rm; in r535_devinit_new() local 39 if (!(rm = kzalloc(sizeof(*rm), GFP_KERNEL))) in r535_devinit_new() 42 rm->dtor = r535_devinit_dtor; in r535_devinit_new() 43 rm->post = hw->post; in r535_devinit_new() 44 rm->disable = hw->disable; in r535_devinit_new() 46 ret = nv50_devinit_new_(rm, device, type, inst, pdevinit); in r535_devinit_new() 48 kfree(rm); in r535_devinit_new()
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/vfn/ |
| A D | r535.c | 35 struct nvkm_vfn_func *rm; in r535_vfn_new() local 38 if (!(rm = kzalloc(sizeof(*rm), GFP_KERNEL))) in r535_vfn_new() 41 rm->dtor = r535_vfn_dtor; in r535_vfn_new() 42 rm->intr = hw->intr; in r535_vfn_new() 43 rm->user = hw->user; in r535_vfn_new() 45 ret = nvkm_vfn_new_(rm, device, type, inst, addr, pvfn); in r535_vfn_new() 47 kfree(rm); in r535_vfn_new()
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
| A D | r535.c | 71 struct nvkm_fb_func *rm; in r535_fb_new() local 74 if (!(rm = kzalloc(sizeof(*rm), GFP_KERNEL))) in r535_fb_new() 77 rm->dtor = r535_fb_dtor; in r535_fb_new() 78 rm->sysmem.flush_page_init = hw->sysmem.flush_page_init; in r535_fb_new() 79 rm->vidmem.size = hw->vidmem.size; in r535_fb_new() 80 rm->ram_new = r535_fb_ram_new; in r535_fb_new() 82 ret = nvkm_fb_new_(rm, device, type, inst, pfb); in r535_fb_new() 84 kfree(rm); in r535_fb_new()
|