| /drivers/accel/qaic/ |
| A D | sahara.c | 272 ret = mhi_queue_buf(context->mhi_dev, DMA_TO_DEVICE, context->tx[0], in sahara_send_reset() 314 context->tx[0]->hello_resp.mode = context->rx->hello_resp.mode; in sahara_hello() 316 ret = mhi_queue_buf(context->mhi_dev, DMA_TO_DEVICE, context->tx[0], in sahara_hello() 490 context->rx_size_requested = context->dump_table_length; in sahara_memory_debug64() 528 ret = mhi_queue_buf(context->mhi_dev, DMA_FROM_DEVICE, context->rx, in sahara_processing() 643 memcpy(context->mem_dump_freespace, context->rx, context->rx_size); in sahara_parse_dump_image() 644 context->mem_dump_freespace += context->rx_size; in sahara_parse_dump_image() 646 if (context->dump_image_offset >= context->dump_image->length) { in sahara_parse_dump_image() 696 if (context->rx_size != context->rx_size_requested && in sahara_dump_processing() 752 if (!context) in sahara_mhi_probe() [all …]
|
| /drivers/misc/vmw_vmci/ |
| A D | vmci_context.c | 107 context = kzalloc(sizeof(*context), GFP_KERNEL); in vmci_ctx_create() 108 if (!context) { in vmci_ctx_create() 173 return context; in vmci_ctx_create() 180 kfree(context); in vmci_ctx_create() 291 if (!context) { in vmci_ctx_enqueue_datagram() 389 context = c; in vmci_ctx_get() 396 return context; in vmci_ctx_get() 415 ctx_fire_notification(context->cid, context->priv_flags); in ctx_free_ctx() 586 if (!context) in vmci_ctx_add_notification() 652 if (!context) in vmci_ctx_remove_notification() [all …]
|
| A D | vmci_route.c | 42 if (VMCI_INVALID_ID == dst->context) in vmci_route() 75 if (VMCI_INVALID_ID == src->context && in vmci_route() 77 src->context = vmci_get_context_id(); in vmci_route() 113 if (VMCI_INVALID_ID == src->context) in vmci_route() 130 if (VMCI_INVALID_ID == src->context) { in vmci_route() 139 src->context = VMCI_HOST_CONTEXT_ID; in vmci_route() 153 if (vmci_ctx_exists(dst->context)) { in vmci_route() 165 src->context = VMCI_HOST_CONTEXT_ID; in vmci_route() 167 src->context != dst->context) { in vmci_route() 209 if (VMCI_INVALID_ID == src->context) in vmci_route() [all …]
|
| /drivers/gpu/drm/etnaviv/ |
| A D | etnaviv_mmu.c | 23 unmapped_page = context->global->ops->unmap(context, iova, in etnaviv_context_unmap() 43 ret = context->global->ops->map(context, iova, paddr, pgsize, in etnaviv_context_map() 69 if (!context || !sgt) in etnaviv_iommu_map() 126 struct etnaviv_iommu_context *context = mapping->context; in etnaviv_iommu_reap_mapping() local 284 mapping->context = etnaviv_iommu_context_get(context); in etnaviv_iommu_map_gem() 309 mapping->context = etnaviv_iommu_context_get(context); in etnaviv_iommu_map_gem() 344 etnaviv_cmdbuf_suballoc_unmap(context, &context->cmdbuf_mapping); in etnaviv_iommu_context_free() 346 context->global->ops->free(context); in etnaviv_iommu_context_free() 392 context->global->ops->restore(gpu, context); in etnaviv_iommu_restore() 466 return context->global->ops->dump_size(context); in etnaviv_iommu_dump_size() [all …]
|
| A D | etnaviv_iommu.c | 37 drm_mm_takedown(&context->mm); in etnaviv_iommuv1_free() 140 context = global->v1.shared_context; in etnaviv_iommuv1_context_alloc() 141 etnaviv_iommu_context_get(context); in etnaviv_iommuv1_context_alloc() 143 return context; in etnaviv_iommuv1_context_alloc() 160 context = &v1_context->base; in etnaviv_iommuv1_context_alloc() 161 context->global = global; in etnaviv_iommuv1_context_alloc() 162 kref_init(&context->refcount); in etnaviv_iommuv1_context_alloc() 163 mutex_init(&context->lock); in etnaviv_iommuv1_context_alloc() 164 INIT_LIST_HEAD(&context->mappings); in etnaviv_iommuv1_context_alloc() 166 context->global->v1.shared_context = context; in etnaviv_iommuv1_context_alloc() [all …]
|
| A D | etnaviv_iommu_v2.c | 52 drm_mm_takedown(&context->mm); in etnaviv_iommuv2_free() 166 struct etnaviv_iommu_context *context) in etnaviv_iommuv2_restore_nonsec() argument 181 (u32)context->global->bad_page_dma); in etnaviv_iommuv2_restore_nonsec() 190 struct etnaviv_iommu_context *context) in etnaviv_iommuv2_restore_sec() argument 273 struct etnaviv_iommu_context *context; in etnaviv_iommuv2_context_alloc() local 300 context = &v2_context->base; in etnaviv_iommuv2_context_alloc() 301 context->global = global; in etnaviv_iommuv2_context_alloc() 302 kref_init(&context->refcount); in etnaviv_iommuv2_context_alloc() 303 mutex_init(&context->lock); in etnaviv_iommuv2_context_alloc() 304 INIT_LIST_HEAD(&context->mappings); in etnaviv_iommuv2_context_alloc() [all …]
|
| /drivers/gpu/drm/tegra/ |
| A D | uapi.c | 46 kfree(context); in tegra_drm_channel_context_close() 89 context = kzalloc(sizeof(*context), GFP_KERNEL); in tegra_drm_ioctl_channel_open() 90 if (!context) in tegra_drm_ioctl_channel_open() 135 err = xa_alloc(&fpriv->contexts, &args->context, context, XA_LIMIT(1, U32_MAX), in tegra_drm_ioctl_channel_open() 157 kfree(context); in tegra_drm_ioctl_channel_open() 170 context = xa_load(&fpriv->contexts, args->context); in tegra_drm_ioctl_channel_close() 171 if (!context) { in tegra_drm_ioctl_channel_close() 200 context = xa_load(&fpriv->contexts, args->context); in tegra_drm_ioctl_channel_map() 201 if (!context) { in tegra_drm_ioctl_channel_map() 281 context = xa_load(&fpriv->contexts, args->context); in tegra_drm_ioctl_channel_unmap() [all …]
|
| A D | submit.c | 26 #define SUBMIT_ERR(context, fmt, ...) \ argument 150 xa_lock(&context->mappings); in tegra_drm_mapping_get() 156 xa_unlock(&context->mappings); in tegra_drm_mapping_get() 243 SUBMIT_ERR(context, in submit_write_reloc() 403 class = context->client->base.class; in submit_create_job() 516 struct tegra_drm_context *context; in tegra_drm_ioctl_channel_submit() local 524 context = xa_load(&fpriv->contexts, args->context); in tegra_drm_ioctl_channel_submit() 525 if (!context) { in tegra_drm_ioctl_channel_submit() 528 current->comm, args->context); in tegra_drm_ioctl_channel_submit() 598 if (context->memory_context && context->client->ops->can_use_memory_ctx) { in tegra_drm_ioctl_channel_submit() [all …]
|
| /drivers/gpu/drm/amd/display/dc/dml/dcn32/ |
| A D | dcn32_fpu.c | 1488 …!dcn32_mpo_in_use(context) && !dcn32_any_surfaces_rotated(dc, context) && !is_test_pattern_enabled… in dcn32_full_validate_bw_helper() 1659 context->bw_ctx.bw.dcn.clk.dispclk_khz = context->bw_ctx.dml.vba.DISPCLK * 1000; in dcn32_calculate_dlg_params() 1660 context->bw_ctx.bw.dcn.clk.dcfclk_khz = context->bw_ctx.dml.vba.DCFCLK * 1000; in dcn32_calculate_dlg_params() 1661 context->bw_ctx.bw.dcn.clk.socclk_khz = context->bw_ctx.dml.vba.SOCCLK * 1000; in dcn32_calculate_dlg_params() 1675 context->bw_ctx.bw.dcn.clk.dtbclk_en = is_dtbclk_required(dc, context); in dcn32_calculate_dlg_params() 1799 context->bw_ctx.dml.funcs.rq_dlg_get_dlg_reg_v2(&context->bw_ctx.dml, in dcn32_calculate_dlg_params() 2359 stream_status = dc_state_get_stream_status(context, context->streams[i]); in dcn32_calculate_wm_and_dlg_fpu() 2411 stream_status = dc_state_get_stream_status(context, context->streams[i]); in dcn32_calculate_wm_and_dlg_fpu() 2561 context->bw_ctx.bw.dcn.watermarks.a = context->bw_ctx.bw.dcn.watermarks.c; in dcn32_calculate_wm_and_dlg_fpu() 2593 context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a; in dcn32_calculate_wm_and_dlg_fpu() [all …]
|
| /drivers/platform/x86/intel/int1092/ |
| A D | intel_sar.c | 41 &context->config_data[context->reg_value]; in update_sar_data() 135 out = acpi_evaluate_dsm_typed(context->handle, &context->guid, rev, in sar_get_device_mode() 143 update_sar_data(context); in sar_get_device_mode() 189 context->reg_value = value; in intc_reg_store() 190 update_sar_data(context); in intc_reg_store() 223 out = acpi_evaluate_dsm_typed(context->handle, &context->guid, rev, in sar_get_data() 251 context = kzalloc(sizeof(*context), GFP_KERNEL); in sar_probe() 252 if (!context) in sar_probe() 266 sar_get_data(reg, context); in sar_probe() 291 kfree(context); in sar_probe() [all …]
|
| /drivers/gpu/drm/amd/display/dc/dml/dcn30/ |
| A D | dcn30_fpu.c | 321 if (context->streams[i]) in dcn30_fpu_calculate_wm_and_dlg() 322 stream_status = dc_state_get_stream_status(context, context->streams[i]); in dcn30_fpu_calculate_wm_and_dlg() 334 context, pipes, pipe_cnt, vlevel); in dcn30_fpu_calculate_wm_and_dlg() 345 dcfclk = context->bw_ctx.dml.vba.DCFCLKState[vlevel][context->bw_ctx.dml.vba.maxMpcComb]; in dcn30_fpu_calculate_wm_and_dlg() 370 …context->bw_ctx.bw.dcn.watermarks.b.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cn… in dcn30_fpu_calculate_wm_and_dlg() 440 …context->bw_ctx.bw.dcn.watermarks.c.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cn… in dcn30_fpu_calculate_wm_and_dlg() 453 context->bw_ctx.bw.dcn.watermarks.a = context->bw_ctx.bw.dcn.watermarks.c; in dcn30_fpu_calculate_wm_and_dlg() 463 …context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cn… in dcn30_fpu_calculate_wm_and_dlg() 473 context->perf_params.stutter_period_us = context->bw_ctx.dml.vba.StutterPeriod; in dcn30_fpu_calculate_wm_and_dlg() 476 context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a; in dcn30_fpu_calculate_wm_and_dlg() [all …]
|
| /drivers/net/ethernet/mellanox/mlx4/ |
| A D | en_resources.c | 47 memset(context, 0, sizeof(*context)); in mlx4_en_fill_qp_context() 49 context->pd = cpu_to_be32(mdev->priv_pdn); in mlx4_en_fill_qp_context() 50 context->mtu_msgmax = 0xff; in mlx4_en_fill_qp_context() 59 context->sq_size_stride = ilog2(TXBB_SIZE) - 4; in mlx4_en_fill_qp_context() 63 context->local_qpn = cpu_to_be32(qpn); in mlx4_en_fill_qp_context() 64 context->pri_path.ackto = 1 & 0x07; in mlx4_en_fill_qp_context() 68 context->pri_path.sched_queue |= user_prio << 3; in mlx4_en_fill_qp_context() 72 context->cqn_send = cpu_to_be32(cqn); in mlx4_en_fill_qp_context() 73 context->cqn_recv = cpu_to_be32(cqn); in mlx4_en_fill_qp_context() 76 context->pri_path.counter_index != in mlx4_en_fill_qp_context() [all …]
|
| /drivers/net/ethernet/qlogic/qed/ |
| A D | qed_nvmetcp_fw_funcs.c | 145 struct e5_nvmetcp_task_context *context = task_params->context; in init_default_nvmetcp_task() local 149 memset(context, 0, sizeof(*context)); in init_default_nvmetcp_task() 150 init_nvmetcp_task_params(context, task_params, in init_default_nvmetcp_task() 210 SET_FIELD(context->ustorm_st_context.flags, in set_local_completion_context() 221 struct e5_nvmetcp_task_context *context = task_params->context; in init_rw_nvmetcp_task() local 247 &context->mstorm_st_context.data_desc, in init_rw_nvmetcp_task() 260 &context->ustorm_ag_context, in init_rw_nvmetcp_task() 279 context->ustorm_ag_context.exp_cont_len = 0; in init_rw_nvmetcp_task() 327 struct e5_nvmetcp_task_context *context = task_params->context; in init_common_login_request_task() local 334 &context->ustorm_ag_context, in init_common_login_request_task() [all …]
|
| /drivers/gpu/drm/amd/display/dc/dml2/dml21/ |
| A D | dml21_wrapper.c | 98 context->bw_ctx.bw.dcn.clk.dppclk_khz = 0; in dml21_calculate_rq_and_dlg_params() 106 context->bw_ctx.bw.dcn.mall_ss_size_bytes = 0; in dml21_calculate_rq_and_dlg_params() 151 context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz; in dml21_calculate_rq_and_dlg_params() 152 context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz; in dml21_calculate_rq_and_dlg_params() 171 context->bw_ctx.bw.dcn.clk.num_ways = 0; in dml21_calculate_rq_and_dlg_params() 183 …_display_cfg(dml_ctx, context->streams[stream_idx]->stream_id, context->stream_status[stream_idx].… in dml21_prepare_mcache_params() 214 if (!context) in dml21_mode_check_and_programming() 217 if (context->stream_count == 0) { in dml21_mode_check_and_programming() 255 …dml21_calculate_rq_and_dlg_params(in_dc, context, &context->res_ctx, dml_ctx, in_dc->res_pool->pip… in dml21_mode_check_and_programming() 274 if (!context || context->stream_count == 0) in dml21_check_mode_support() [all …]
|
| A D | dml21_utils.c | 85 struct dc_state *context, in dml21_find_dc_pipes_for_plane() argument 146 struct dc_state *context, in dml21_pipe_populate_global_sync() argument 243 struct dc_state *context, in dml21_add_phantom_stream() argument 280 struct dc_state *context, in dml21_add_phantom_plane() argument 345 context, in dml21_handle_phantom_streams_planes() 362 context, in dml21_handle_phantom_streams_planes() 378 struct dc_state *context, in dml21_build_fams2_programming() argument 391 for (i = 0; i < context->stream_count; i++) { in dml21_build_fams2_programming() 441 if (context->res_ctx.pipe_ctx[k].stream && in dml21_build_fams2_programming() 443 context->res_ctx.pipe_ctx[k].plane_state == context->stream_status[i].plane_states[j]) { in dml21_build_fams2_programming() [all …]
|
| /drivers/gpu/drm/amd/display/dc/resource/dcn32/ |
| A D | dcn32_resource_helpers.c | 92 struct dc_state *context) in dcn32_helper_calculate_num_ways_for_subvp() argument 108 struct dc_state *context) in dcn32_merge_pipes_for_subvp() argument 154 struct dc_state *context) in dcn32_all_pipes_have_stream_and_plane() argument 171 struct dc_state *context) in dcn32_subvp_in_use() argument 255 if (dc_state_get_stream_subvp_type(context, context->streams[i]) != SUBVP_PHANTOM) in override_det_for_subvp() 312 struct dc_state *context, in dcn32_determine_det_override() argument 324 if (dc_state_get_stream_subvp_type(context, context->streams[i]) != SUBVP_PHANTOM) in dcn32_determine_det_override() 340 if (context->res_ctx.pipe_ctx[j].stream == context->streams[i] && in dcn32_determine_det_override() 350 if (k != j && context->res_ctx.pipe_ctx[k].stream == context->streams[i] && in dcn32_determine_det_override() 359 if (k != j && context->res_ctx.pipe_ctx[k].stream == context->streams[i] && in dcn32_determine_det_override() [all …]
|
| /drivers/usb/image/ |
| A D | microtek.c | 384 context in mts_int_submit_urb() 402 context->final_callback(context->srb); in mts_transfer_cleanup() 410 context->srb->result |= (unsigned)(*context->scsi_status)<<1; in mts_transfer_done() 436 scsi_set_resid(context->srb, context->data_length - in mts_data_done() 477 context->data, in mts_command_done() 500 context->curr_sg = sg_next(context->curr_sg); in mts_do_sg() 502 context->data_pipe, in mts_do_sg() 529 desc->context.srb = srb; in mts_build_transfer_context() 537 desc->context.data = sg_virt(desc->context.curr_sg); in mts_build_transfer_context() 538 desc->context.data_length = desc->context.curr_sg->length; in mts_build_transfer_context() [all …]
|
| /drivers/infiniband/hw/hns/ |
| A D | hns_roce_cmd.c | 99 token, context->token); in hns_roce_cmd_event() 104 context->out_param = out_param; in hns_roce_cmd_event() 105 complete(&context->done); in hns_roce_cmd_event() 120 context = &cmd->context[cmd->free_head]; in __hns_roce_cmd_mbox_wait() 122 } while (context->busy); in __hns_roce_cmd_mbox_wait() 124 context->busy = 1; in __hns_roce_cmd_mbox_wait() 148 ret = context->result; in __hns_roce_cmd_mbox_wait() 154 context->busy = 0; in __hns_roce_cmd_mbox_wait() 221 hr_cmd->context = in hns_roce_cmd_use_events() 223 if (!hr_cmd->context) { in hns_roce_cmd_use_events() [all …]
|
| /drivers/gpu/drm/amd/display/dc/dml/dcn31/ |
| A D | dcn31_fpu.c | 471 …if (context->bw_ctx.dml.vba.DRAMClockChangeSupport[context->bw_ctx.dml.vba.VoltageLevel][context->… in dcn315_update_soc_for_wm_a() 472 …context->bw_ctx.dml.soc.dram_clock_change_latency_us = context->bw_ctx.dml.soc.dummy_pstate_latenc… in dcn315_update_soc_for_wm_a() 477 context->bw_ctx.dml.soc.sr_exit_time_us = in dcn315_update_soc_for_wm_a() 483 struct dc *dc, struct dc_state *context, in dcn31_calculate_wm_and_dlg_fp() argument 489 double dcfclk = context->bw_ctx.dml.vba.DCFCLKState[vlevel][context->bw_ctx.dml.vba.maxMpcComb]; in dcn31_calculate_wm_and_dlg_fp() 523 …context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cn… in dcn31_calculate_wm_and_dlg_fp() 533 context->bw_ctx.bw.dcn.watermarks.b = context->bw_ctx.bw.dcn.watermarks.a; in dcn31_calculate_wm_and_dlg_fp() 534 context->bw_ctx.bw.dcn.watermarks.c = context->bw_ctx.bw.dcn.watermarks.a; in dcn31_calculate_wm_and_dlg_fp() 535 context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a; in dcn31_calculate_wm_and_dlg_fp() 570 context->bw_ctx.bw.dcn.clk.fclk_khz = 0; in dcn31_calculate_wm_and_dlg_fp() [all …]
|
| /drivers/pci/hotplug/ |
| A D | acpiphp_glue.c | 63 context = kzalloc(sizeof(*context), GFP_KERNEL); in acpiphp_init_context() 64 if (!context) in acpiphp_init_context() 71 return context; in acpiphp_init_context() 89 return context; in acpiphp_get_context() 107 kfree(context); in acpiphp_put_context() 127 if (!context) in acpiphp_grab_context() 141 return context; in acpiphp_grab_context() 167 context = bridge->context; in free_bridge() 169 if (context) { in free_bridge() 195 if (!context) in acpiphp_post_dock_fixup() [all …]
|
| /drivers/gpu/drm/amd/display/dc/dml/dcn20/ |
| A D | dcn20_fpu.c | 1154 context->bw_ctx.bw.dcn.clk.dispclk_khz = context->bw_ctx.dml.vba.DISPCLK * 1000; in dcn20_calculate_dlg_params() 1155 context->bw_ctx.bw.dcn.clk.dcfclk_khz = context->bw_ctx.dml.vba.DCFCLK * 1000; in dcn20_calculate_dlg_params() 1156 context->bw_ctx.bw.dcn.clk.socclk_khz = context->bw_ctx.dml.vba.SOCCLK * 1000; in dcn20_calculate_dlg_params() 1157 context->bw_ctx.bw.dcn.clk.dramclk_khz = context->bw_ctx.dml.vba.DRAMSpeed * 1000 / 16; in dcn20_calculate_dlg_params() 1163 context->bw_ctx.bw.dcn.clk.fclk_khz = context->bw_ctx.dml.vba.FabricClock * 1000; in dcn20_calculate_dlg_params() 1175 context->bw_ctx.bw.dcn.clk.dtbclk_en = is_dtbclk_required(dc, context); in dcn20_calculate_dlg_params() 1217 context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz; in dcn20_calculate_dlg_params() 1218 context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz; in dcn20_calculate_dlg_params() 1235 context->bw_ctx.dml.funcs.rq_dlg_get_dlg_reg(&context->bw_ctx.dml, in dcn20_calculate_dlg_params() 1245 context->bw_ctx.dml.funcs.rq_dlg_get_rq_reg(&context->bw_ctx.dml, in dcn20_calculate_dlg_params() [all …]
|
| /drivers/gpu/drm/i915/selftests/ |
| A D | i915_syncmap.c | 179 context); in check_one() 195 context, seqno); in check_one() 225 err = check_one(&sync, context, in igt_syncmap_one() 264 context, seqno); in check_leaf() 297 u64 context = BIT_ULL(order); in igt_syncmap_join_above() local 300 err = check_leaf(&sync, context, 0); in igt_syncmap_join_above() 346 u64 context = step * BIT_ULL(order); in igt_syncmap_join_below() local 367 context, order, step); in igt_syncmap_join_below() 389 context, order, step); in igt_syncmap_join_below() 434 context, idx, in igt_syncmap_neighbours() [all …]
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_psp_ta.c | 204 if (ret || context->resp_status) { in ta_if_load_debugfs_write() 207 ret, context->resp_status); in ta_if_load_debugfs_write() 226 if (ret || context->resp_status) { in ta_if_load_debugfs_write() 228 ret, context->resp_status); in ta_if_load_debugfs_write() 272 context->session_id = ta_id; in ta_if_unload_debugfs_write() 280 if (ret || context->resp_status) { in ta_if_unload_debugfs_write() 282 ret, context->resp_status); in ta_if_unload_debugfs_write() 340 if (!context || !context->initialized) { in ta_if_invoke_debugfs_write() 352 context->session_id = ta_id; in ta_if_invoke_debugfs_write() 360 if (ret || context->resp_status) { in ta_if_invoke_debugfs_write() [all …]
|
| /drivers/gpu/drm/amd/display/dc/dml2/ |
| A D | dml2_utils.c | 174 if (!context->res_ctx.pipe_ctx[i].stream) in is_dtbclk_required() 305 if (get_plane_id(in_ctx, context, context->res_ctx.pipe_ctx[dc_pipe_ctx_index].plane_state, in dml2_calculate_rq_and_dlg_params() 323 …pipe_mall_type = dc_state_get_pipe_subvp_type(context, &context->res_ctx.pipe_ctx[dc_pipe_ctx_inde… in dml2_calculate_rq_and_dlg_params() 334 …context->bw_ctx.bw.dcn.compbuf_size_kb -= context->res_ctx.pipe_ctx[dc_pipe_ctx_index].det_buffer_… in dml2_calculate_rq_and_dlg_params() 336 …if (context->bw_ctx.bw.dcn.clk.dppclk_khz < context->res_ctx.pipe_ctx[dc_pipe_ctx_index].plane_res… in dml2_calculate_rq_and_dlg_params() 337 …context->bw_ctx.bw.dcn.clk.dppclk_khz = context->res_ctx.pipe_ctx[dc_pipe_ctx_index].plane_res.bw.… in dml2_calculate_rq_and_dlg_params() 361 context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz; in dml2_calculate_rq_and_dlg_params() 362 context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz; in dml2_calculate_rq_and_dlg_params() 370 context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz; in dml2_calculate_rq_and_dlg_params() 371 context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz ; in dml2_calculate_rq_and_dlg_params() [all …]
|
| /drivers/comedi/drivers/ |
| A D | comedi_8255.c | 36 unsigned long context; member 38 unsigned long context); 71 unsigned long context = spriv->context; in subdev_8255_insn() local 79 s->state & 0xff, context); in subdev_8255_insn() 82 (s->state >> 8) & 0xff, context); in subdev_8255_insn() 101 unsigned long context = spriv->context; in subdev_8255_do_config() local 149 unsigned long context), in __subdev_8255_init() argument 150 unsigned long context) in __subdev_8255_init() 161 spriv->context = context; in __subdev_8255_init() 240 unsigned long context) in subdev_8255_cb_init() [all …]
|