| /drivers/net/ethernet/qlogic/ |
| A D | qla3xxx.c | 644 netif_warn(qdev, link, qdev->ndev, TIMED_OUT_MSG); in ql_mii_write_reg_ex() 655 netif_warn(qdev, link, qdev->ndev, TIMED_OUT_MSG); in ql_mii_write_reg_ex() 1018 qdev->phyType = getPhyType(qdev, reg1, reg2); in PHY_Setup() 1314 netif_printk(qdev, link, KERN_DEBUG, qdev->ndev, in ql_this_adapter_controls_port() 1492 netif_info(qdev, link, qdev->ndev, in ql_finish_auto_neg() 1529 netif_info(qdev, link, qdev->ndev, in ql_link_state_machine_work() 2165 qdev->rsp_current = qdev->rsp_q_virt_addr; in ql_tx_rx_clean() 2469 tx_cb = &qdev->tx_buf[qdev->req_producer_index]; in ql3xxx_send() 2905 qdev->rsp_current = qdev->rsp_q_virt_addr; in ql_alloc_mem_resources() 3046 qdev->rsp_current = qdev->rsp_q_virt_addr; in ql_adapter_initialize() [all …]
|
| /drivers/gpu/drm/qxl/ |
| A D | qxl_kms.c | 62 qxl_io_memslot_add(qdev, qdev->rom->slots_start + slot->index); in setup_hw_slot() 96 setup_hw_slot(qdev, &qdev->main_slot); in qxl_reinit_memslots() 97 setup_hw_slot(qdev, &qdev->surfaces_slot); in qxl_reinit_memslots() 118 qxl_gem_init(qdev); in qxl_device_init() 166 qdev->rom = ioremap_wc(qdev->rom_base, qdev->rom_size); in qxl_device_init() 167 if (!qdev->rom) { in qxl_device_init() 184 qdev->ram_header = ioremap_wc(qdev->vram_base + in qxl_device_init() 253 setup_slot(qdev, &qdev->main_slot, 0, "main", in qxl_device_init() 256 setup_slot(qdev, &qdev->surfaces_slot, 1, "surfaces", in qxl_device_init() 273 qxl_bo_fini(qdev); in qxl_device_init() [all …]
|
| A D | qxl_cmd.c | 204 schedule_work(&qdev->gc_work); in qxl_queue_garbage_collect() 206 flush_work(&qdev->gc_work); in qxl_queue_garbage_collect() 244 qxl_release_free(qdev, release); in qxl_garbage_collect() 249 wake_up_all(&qdev->release_event); in qxl_garbage_collect() 283 long addr = qdev->io_base + port; in wait_for_io_cmd_user() 374 qdev->primary_bo = NULL; in qxl_io_destroy_primary() 381 if (WARN_ON(qdev->primary_bo)) in qxl_io_create_primary() 384 DRM_DEBUG_DRIVER("qdev %p, ram_header %p\n", qdev, qdev->ram_header); in qxl_io_create_primary() 398 qdev->primary_bo = bo; in qxl_io_create_primary() 438 qxl_reap_surface_id(qdev, 2); in qxl_surface_id_alloc() [all …]
|
| A D | qxl_display.c | 213 if (size > qdev->vram_size) in qxl_check_mode() 259 if (!qdev->monitors_config) in qxl_add_monitors_config_modes() 327 if (!qdev->primary_bo) { in qxl_crtc_update_monitors_config() 348 if (qdev->primary_bo == qdev->dumb_shadow_bo) in qxl_crtc_update_monitors_config() 658 if (qdev->primary_bo) in qxl_primary_atomic_update() 1213 qdev->monitors_config = qdev->monitors_config_bo->kptr; in qxl_create_monitors_object() 1215 qxl_bo_physical_address(qdev, qdev->monitors_config_bo, 0); in qxl_create_monitors_object() 1218 qdev->dumb_heads = kcalloc(qxl_num_crtc, sizeof(qdev->dumb_heads[0]), in qxl_create_monitors_object() 1220 if (!qdev->dumb_heads) { in qxl_create_monitors_object() 1234 kfree(qdev->dumb_heads); in qxl_destroy_monitors_object() [all …]
|
| A D | qxl_irq.c | 35 struct qxl_device *qdev = to_qxl(dev); in qxl_irq_handler() local 43 atomic_inc(&qdev->irq_received); in qxl_irq_handler() 47 wake_up_all(&qdev->display_event); in qxl_irq_handler() 48 qxl_queue_garbage_collect(qdev, false); in qxl_irq_handler() 51 atomic_inc(&qdev->irq_received_cursor); in qxl_irq_handler() 52 wake_up_all(&qdev->cursor_event); in qxl_irq_handler() 56 wake_up_all(&qdev->io_cmd_event); in qxl_irq_handler() 63 qdev->irq_received_error++; in qxl_irq_handler() 84 struct drm_device *ddev = &qdev->ddev; in qxl_irq_init() 94 atomic_set(&qdev->irq_received, 0); in qxl_irq_init() [all …]
|
| A D | qxl_drv.h | 264 int qxl_bo_init(struct qxl_device *qdev); 265 void qxl_bo_fini(struct qxl_device *qdev); 268 int qxl_surf_evict(struct qxl_device *qdev); 285 ? &qdev->main_slot : &qdev->surfaces_slot; in qxl_bo_physical_address() 298 void qxl_gem_init(struct qxl_device *qdev); 299 void qxl_gem_fini(struct qxl_device *qdev); 324 int qxl_ttm_init(struct qxl_device *qdev); 325 void qxl_ttm_fini(struct qxl_device *qdev); 331 int qxl_image_init(struct qxl_device *qdev, 355 void qxl_io_reset(struct qxl_device *qdev); [all …]
|
| A D | qxl_release.c | 60 struct qxl_device *qdev; in qxl_fence_wait() local 67 (qxl_io_notify_oom(qdev), 0)), in qxl_fence_wait() 103 spin_lock(&qdev->release_idr_lock); in qxl_release_alloc() 143 spin_lock(&qdev->release_idr_lock); in qxl_release_free() 157 atomic_dec(&qdev->release_count); in qxl_release_free() 318 atomic_inc(&qdev->release_count); in qxl_alloc_release_reserved() 320 mutex_lock(&qdev->release_mutex); in qxl_alloc_release_reserved() 327 ret = qxl_release_bo_alloc(qdev, &qdev->current_release_bo[cur_idx], priority); in qxl_alloc_release_reserved() 334 qxl_release_free(qdev, *release); in qxl_alloc_release_reserved() 357 qxl_release_free(qdev, *release); in qxl_alloc_release_reserved() [all …]
|
| A D | qxl_ttm.c | 43 struct qxl_device *qdev; in qxl_get_qdev() local 47 return qdev; in qxl_get_qdev() 88 qdev->surfaceram_base; in qxl_ttm_io_mem_reserve() 125 struct qxl_device *qdev; in qxl_bo_move_notify() local 130 qdev = to_qxl(qbo->tbo.base.dev); in qxl_bo_move_notify() 190 int qxl_ttm_init(struct qxl_device *qdev) in qxl_ttm_init() argument 197 qdev->ddev.anon_inode->i_mapping, in qxl_ttm_init() 198 qdev->ddev.vma_offset_manager, in qxl_ttm_init() 212 qdev->surfaceram_size / PAGE_SIZE); in qxl_ttm_init() 226 void qxl_ttm_fini(struct qxl_device *qdev) in qxl_ttm_fini() argument [all …]
|
| A D | qxl_drv.c | 75 struct qxl_device *qdev; in qxl_pci_probe() local 86 if (IS_ERR(qdev)) { in qxl_pci_probe() 126 qxl_modeset_fini(qdev); in qxl_pci_probe() 128 qxl_device_fini(qdev); in qxl_pci_probe() 147 qxl_modeset_fini(qdev); in qxl_drm_release() 148 qxl_device_fini(qdev); in qxl_drm_release() 181 qxl_surf_evict(qdev); in qxl_drm_freeze() 182 qxl_vram_evict(qdev); in qxl_drm_freeze() 199 qxl_reinit_memslots(qdev); in qxl_drm_resume() 233 qxl_io_reset(qdev); in qxl_pm_resume() [all …]
|
| A D | qxl_debugfs.c | 42 struct qxl_device *qdev = to_qxl(node->minor->dev); in qxl_debugfs_irq_received() local 48 seq_printf(m, "%d\n", qdev->irq_received_error); in qxl_debugfs_irq_received() 56 struct qxl_device *qdev = to_qxl(node->minor->dev); in qxl_debugfs_buffers_info() local 105 for (i = 0; i < qdev->debugfs_count; i++) { in qxl_debugfs_add_files() 106 if (qdev->debugfs[i].files == files) { in qxl_debugfs_add_files() 112 i = qdev->debugfs_count + 1; in qxl_debugfs_add_files() 118 qdev->debugfs[qdev->debugfs_count].files = files; in qxl_debugfs_add_files() 119 qdev->debugfs[qdev->debugfs_count].num_files = nfiles; in qxl_debugfs_add_files() 120 qdev->debugfs_count = i; in qxl_debugfs_add_files() 123 qdev->ddev.primary->debugfs_root, in qxl_debugfs_add_files() [all …]
|
| A D | qxl_object.c | 35 struct qxl_device *qdev; in qxl_ttm_bo_destroy() local 38 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy() 40 qxl_surface_evict(qdev, bo, false); in qxl_ttm_bo_destroy() 42 mutex_lock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 44 mutex_unlock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 144 dev_err(qdev->ddev.dev, in qxl_bo_create() 216 map = qdev->vram_mapping; in qxl_bo_kmap_atomic_page() 218 map = qdev->surface_mapping; in qxl_bo_kmap_atomic_page() 369 mutex_lock(&qdev->gem.mutex); in qxl_bo_force_delete() 379 return qxl_ttm_init(qdev); in qxl_bo_init() [all …]
|
| A D | qxl_draw.c | 31 static int alloc_clips(struct qxl_device *qdev, in alloc_clips() argument 38 return qxl_alloc_bo_reserved(qdev, release, size, clips_bo); in alloc_clips() 74 qxl_release_free(qdev, release); in free_drawable() 114 drawable->mm_time = qdev->rom->mm_clock; in make_drawable() 126 void qxl_draw_dirty_fb(struct qxl_device *qdev, in qxl_draw_dirty_fb() argument 157 ret = alloc_drawable(qdev, &release); in qxl_draw_dirty_fb() 181 ret = alloc_clips(qdev, release, num_clips, &clips_bo); in qxl_draw_dirty_fb() 185 ret = qxl_image_alloc_objects(qdev, release, in qxl_draw_dirty_fb() 226 drawable->clip.data = qxl_bo_physical_address(qdev, in qxl_draw_dirty_fb() 260 qxl_image_free_objects(qdev, dimage); in qxl_draw_dirty_fb() [all …]
|
| A D | qxl_ioctl.c | 38 struct qxl_device *qdev = to_qxl(dev); in qxl_alloc_ioctl() local 64 struct qxl_device *qdev = to_qxl(dev); in qxl_map_ioctl() local 174 ret = qxl_alloc_release_reserved(qdev, in qxl_process_single_command() 192 draw->mm_time = qdev->rom->mm_clock; in qxl_process_single_command() 253 apply_reloc(qdev, &reloc_info[i]); in qxl_process_single_command() 264 qxl_release_free(qdev, release); in qxl_process_single_command() 272 struct qxl_device *qdev = to_qxl(dev); in qxl_execbuffer_ioctl() local 296 struct qxl_device *qdev = to_qxl(dev); in qxl_update_area_ioctl() local 328 ret = qxl_bo_check_id(qdev, qobj); in qxl_update_area_ioctl() 345 struct qxl_device *qdev = to_qxl(dev); in qxl_getparam_ioctl() local [all …]
|
| A D | qxl_gem.c | 34 struct qxl_device *qdev; in qxl_gem_object_free() local 37 qdev = to_qxl(gobj->dev); in qxl_gem_object_free() 39 qxl_surface_evict(qdev, qobj, false); in qxl_gem_object_free() 68 mutex_lock(&qdev->gem.mutex); in qxl_gem_object_create() 69 list_add_tail(&qbo->list, &qdev->gem.objects); in qxl_gem_object_create() 70 mutex_unlock(&qdev->gem.mutex); in qxl_gem_object_create() 94 r = qxl_gem_object_create(qdev, size, 0, in qxl_gem_object_create_with_handle() 123 void qxl_gem_init(struct qxl_device *qdev) in qxl_gem_init() argument 125 INIT_LIST_HEAD(&qdev->gem.objects); in qxl_gem_init() 128 void qxl_gem_fini(struct qxl_device *qdev) in qxl_gem_fini() argument [all …]
|
| A D | qxl_image.c | 33 qxl_allocate_chunk(struct qxl_device *qdev, in qxl_allocate_chunk() argument 56 qxl_image_alloc_objects(struct qxl_device *qdev, in qxl_image_alloc_objects() argument 100 qxl_image_init_helper(struct qxl_device *qdev, in qxl_image_init_helper() argument 127 ptr = qxl_bo_kmap_atomic_page(qdev, chunk_bo, 0); in qxl_image_init_helper() 132 qxl_bo_kunmap_atomic_page(qdev, chunk_bo, ptr); in qxl_image_init_helper() 160 qxl_bo_kunmap_atomic_page(qdev, chunk_bo, ptr); in qxl_image_init_helper() 181 qxl_bo_kunmap_atomic_page(qdev, chunk_bo, ptr); in qxl_image_init_helper() 192 ptr = qxl_bo_kmap_atomic_page(qdev, image_bo, 0); in qxl_image_init_helper() 215 qxl_bo_kunmap_atomic_page(qdev, image_bo, ptr); in qxl_image_init_helper() 225 qxl_bo_kunmap_atomic_page(qdev, image_bo, ptr); in qxl_image_init_helper() [all …]
|
| A D | qxl_object.h | 56 extern int qxl_bo_create(struct qxl_device *qdev, 66 void *qxl_bo_kmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, int page_offset); 67 void qxl_bo_kunmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, void *map);
|
| /drivers/dma/amd/qdma/ |
| A D | qdma.c | 44 qdev->qintr_ring_idx %= qdev->qintr_ring_num; in qdma_get_intr_ring_idx() 298 struct qdma_device *qdev = queue->qdev; in qdma_clear_queue_context() local 403 qdev->chan_num, qdev->fid); in qdma_get_hw_info() 410 struct qdma_device *qdev = queue->qdev; in qdma_update_pidx() local 419 struct qdma_device *qdev = queue->qdev; in qdma_update_cidx() local 464 q->qdev = qdev; in qdma_alloc_queues() 545 struct qdma_device *qdev = queue->qdev; in qdma_free_queue_resources() local 562 struct qdma_device *qdev = queue->qdev; in qdma_alloc_queue_resources() local 610 struct qdma_device *qdev = queue->qdev; in qdma_xfer_start() local 842 qdev = intr->qdev; in qdma_queue_isr() [all …]
|
| A D | qdma.h | 45 #define qdma_err(qdev, fmt, args...) \ argument 46 dev_err(&(qdev)->pdev->dev, fmt, ##args) 48 #define qdma_dbg(qdev, fmt, args...) \ argument 49 dev_dbg(&(qdev)->pdev->dev, fmt, ##args) 51 #define qdma_info(qdev, fmt, args...) \ argument 52 dev_info(&(qdev)->pdev->dev, fmt, ##args) 205 struct qdma_device *qdev; member 223 struct qdma_device *qdev; member
|
| /drivers/accel/qaic/ |
| A D | qaic_drv.c | 143 struct qaic_device *qdev = qddev->qdev; in qaic_open() local 204 qdev = qddev->qdev; in qaic_postclose() 398 qdev = devm_kzalloc(dev, sizeof(*qdev), GFP_KERNEL); in create_qdev() 399 if (!qdev) in create_qdev() 404 qdev->dbc = devm_kcalloc(dev, qdev->num_dbc, sizeof(*qdev->dbc), GFP_KERNEL); in create_qdev() 441 qddev->qdev = qdev; in create_qdev() 449 qdev->dbc[i].qdev = qdev; in create_qdev() 459 return qdev; in create_qdev() 561 if (!qdev) in qaic_pci_probe() 569 qdev->dbc[i].dbc_base = qdev->bar_dbc + QAIC_DBC_OFF(i); in qaic_pci_probe() [all …]
|
| A D | qaic_debugfs.c | 31 struct qaic_device *qdev; member 48 struct qaic_device *qdev; in bootlog_show() local 52 qdev = s->private; in bootlog_show() 100 struct qaic_device *qdev = qddev->qdev; in qaic_debugfs_init() local 186 struct qaic_device *qdev = msg->qdev; in bootlog_log() local 193 bootlog_commit(qdev, len); in bootlog_log() 208 if (!qdev->bootlog_wq) { in qaic_bootlog_mhi_probe() 213 ret = reset_bootlog(qdev); in qaic_bootlog_mhi_probe() 228 msg->qdev = qdev; in qaic_bootlog_mhi_probe() 250 struct qaic_device *qdev; in qaic_bootlog_mhi_remove() local [all …]
|
| A D | qaic_control.c | 236 struct qaic_device *qdev; member 303 wait_event_interruptible(qdev->dbc[dbc_id].dbc_release, !qdev->dbc[dbc_id].in_use); in save_dbc_buf() 623 cleanup_xfer(qdev, xfer); in encode_dma() 1223 *rsp = msg_xfer(qdev, wrappers, qdev->next_seq_num - 1, false); in qaic_manage_msg_xfer() 1288 struct qaic_device *qdev; in qaic_manage_ioctl() local 1307 qdev = usr->qddev->qdev; in qaic_manage_ioctl() 1400 struct qaic_device *qdev = resp->qdev; in resp_worker() local 1464 resp->qdev = qdev; in qaic_mhi_dl_xfer_cb() 1471 if (!qdev->cntl_ch) in qaic_control_open() 1487 qdev->gen_crc = gen_crc; in qaic_control_open() [all …]
|
| A D | qaic_data.c | 685 struct qaic_device *qdev; in qaic_create_bo_ioctl() local 705 qdev = usr->qddev->qdev; in qaic_create_bo_ioctl() 754 struct qaic_device *qdev; in qaic_mmap_bo_ioctl() local 765 qdev = usr->qddev->qdev; in qaic_mmap_bo_ioctl() 973 qdev = usr->qddev->qdev; in qaic_attach_slice_bo_ioctl() 1339 qdev = usr->qddev->qdev; in __qaic_execute_bo_ioctl() 1552 qdev = dbc->qdev; in dbc_irq_threaded_fn() 1684 qdev = usr->qddev->qdev; in qaic_wait_bo_ioctl() 1753 qdev = usr->qddev->qdev; in qaic_perf_stats_bo_ioctl() 1844 qdev = usr->qddev->qdev; in qaic_detach_slice_bo_ioctl() [all …]
|
| A D | qaic_ras.c | 492 mhi_soc_reset(qdev->mhi_cntrl); in decode_ras_msg() 496 if (qdev->ce_count != UINT_MAX) in decode_ras_msg() 497 qdev->ce_count++; in decode_ras_msg() 500 if (qdev->ce_count != UINT_MAX) in decode_ras_msg() 501 qdev->ue_count++; in decode_ras_msg() 504 if (qdev->ce_count != UINT_MAX) in decode_ras_msg() 505 qdev->ue_nf_count++; in decode_ras_msg() 580 qdev->ras_ch = mhi_dev; in qaic_ras_mhi_probe() 587 struct qaic_device *qdev; in qaic_ras_mhi_remove() local 590 qdev->ras_ch = NULL; in qaic_ras_mhi_remove() [all …]
|
| A D | qaic_timesync.c | 78 struct qaic_device *qdev; member 94 struct qaic_device *qdev; member 189 mqtsdev->qdev = qdev; in qaic_timesync_probe() 190 mqtsdev->dev = &qdev->pdev->dev; in qaic_timesync_probe() 253 struct qaic_device *qdev = resp->qdev; in qaic_boot_timesync_worker() local 258 mhi_dev = qdev->qts_ch; in qaic_boot_timesync_worker() 302 resp->qdev = qdev; in qaic_boot_timesync_queue_resp() 317 struct qaic_device *qdev; in qaic_boot_timesync_remove() local 321 qdev->qts_ch = NULL; in qaic_boot_timesync_remove() 333 qdev->qts_ch = mhi_dev; in qaic_boot_timesync_probe() [all …]
|
| A D | qaic.h | 33 #define to_qaic_device(dev) (to_qaic_drm_device((dev))->qdev) 67 struct qaic_device *qdev; member 184 struct qaic_device *qdev; member 293 int qaic_control_open(struct qaic_device *qdev); 294 void qaic_control_close(struct qaic_device *qdev); 295 void qaic_release_usr(struct qaic_device *qdev, struct qaic_user *usr); 299 int disable_dbc(struct qaic_device *qdev, u32 dbc_id, struct qaic_user *usr); 301 void wakeup_dbc(struct qaic_device *qdev, u32 dbc_id); 302 void release_dbc(struct qaic_device *qdev, u32 dbc_id); 305 void wake_all_cntl(struct qaic_device *qdev); [all …]
|