Home
last modified time | relevance | path

Searched refs:tile (Results 1 – 25 of 162) sorted by relevance

1234567

/drivers/gpu/drm/xe/
A Dxe_tile_sriov_vf.c20 struct xe_ggtt *ggtt = tile->mem.ggtt; in vf_init_ggtt_balloons()
22 xe_tile_assert(tile, IS_SRIOV_VF(tile_to_xe(tile))); in vf_init_ggtt_balloons()
53 xe_tile_assert(tile, ggtt_size); in xe_tile_sriov_vf_balloon_ggtt_locked()
119 xe_tile_assert(tile, IS_SRIOV_VF(tile_to_xe(tile))); in xe_tile_sriov_vf_deballoon_ggtt_locked()
127 mutex_lock(&tile->mem.ggtt->lock); in vf_deballoon_ggtt()
134 xe_tile_assert(tile, IS_SRIOV_VF(tile_to_xe(tile))); in vf_fini_ggtt_balloons()
142 struct xe_tile *tile = arg; in cleanup_ggtt() local
144 vf_deballoon_ggtt(tile); in cleanup_ggtt()
145 vf_fini_ggtt_balloons(tile); in cleanup_ggtt()
165 err = vf_balloon_ggtt(tile); in xe_tile_sriov_vf_prepare_ggtt()
[all …]
A Dxe_tile.c91 tile->mem.ggtt = xe_ggtt_alloc(tile); in xe_tile_alloc()
92 if (!tile->mem.ggtt) in xe_tile_alloc()
113 tile->xe = xe; in xe_tile_init_early()
114 tile->id = id; in xe_tile_init_early()
116 err = xe_tile_alloc(tile); in xe_tile_init_early()
120 tile->primary_gt = xe_gt_alloc(tile); in xe_tile_init_early()
121 if (IS_ERR(tile->primary_gt)) in xe_tile_init_early()
124 xe_pcode_init(tile); in xe_tile_init_early()
136 err = xe_ttm_vram_mgr_init(tile, &tile->mem.vram.ttm); in tile_ttm_mgr_init()
171 xe_devm_add(tile, &tile->mem.vram); in xe_tile_init_noalloc()
[all …]
A Dxe_pcode.c105 mutex_lock(&tile->pcode.lock); in xe_pcode_write_timeout()
107 mutex_unlock(&tile->pcode.lock); in xe_pcode_write_timeout()
116 mutex_lock(&tile->pcode.lock); in xe_pcode_write64_timeout()
118 mutex_unlock(&tile->pcode.lock); in xe_pcode_write64_timeout()
127 mutex_lock(&tile->pcode.lock); in xe_pcode_read()
129 mutex_unlock(&tile->pcode.lock); in xe_pcode_read()
189 mutex_lock(&tile->pcode.lock); in xe_pcode_request()
206 drm_err(&tile_to_xe(tile)->drm, in xe_pcode_request()
254 mutex_lock(&tile->pcode.lock); in xe_pcode_init_min_freq_table()
296 mutex_lock(&tile->pcode.lock); in xe_pcode_ready()
[all …]
A Dxe_irq.c432 struct xe_tile *tile; in dg1_irq_handler() local
546 gt_irq_reset(tile); in xelp_irq_reset()
559 gt_irq_reset(tile); in dg1_irq_reset()
576 struct xe_tile *tile; in vf_irq_reset() local
590 gt_irq_reset(tile); in vf_irq_reset()
596 struct xe_tile *tile; in xe_irq_reset() local
631 struct xe_tile *tile; in vf_irq_postinstall() local
676 struct xe_tile *tile; in vf_mem_irq_handler() local
849 struct xe_tile *tile; in guc2host_irq_handler() local
869 struct xe_tile *tile; in xe_irq_msix_default_hwe_handler() local
[all …]
A Dxe_vram.c236 for_each_if(t->id < tile->id) in tile_vram_size()
278 struct xe_tile *tile; in vram_fini() local
286 for_each_tile(tile, xe, id) in vram_fini()
287 tile->mem.vram.mapping = NULL; in vram_fini()
300 struct xe_tile *tile; in xe_vram_probe() local
329 for_each_tile(tile, xe, id) { in xe_vram_probe()
338 if (!tile->mem.vram.io_size) { in xe_vram_probe()
347 if (tile->mem.vram.io_size < tile->mem.vram.usable_size) in xe_vram_probe()
350tile->id, &tile->mem.vram.actual_physical_size, &tile->mem.vram.usable_size, &tile->mem.vram.io_si… in xe_vram_probe()
352 &tile->mem.vram.dpa_base, tile->mem.vram.dpa_base + (u64)tile->mem.vram.actual_physical_size, in xe_vram_probe()
[all …]
A Dxe_ggtt.c108 struct xe_tile *tile = ggtt->tile; in ggtt_update_access_counter() local
110 tile->primary_gt : tile->media_gt; in ggtt_update_access_counter()
146 xe_tile_assert(ggtt->tile, start < end); in xe_ggtt_clear()
173 ggtt->tile = tile; in xe_ggtt_alloc()
286 ggtt->pt_ops = (ggtt->tile->media_gt && in xe_ggtt_init_early()
383 xe = tile_to_xe(ggtt->tile); in xe_ggtt_node_remove()
533 struct xe_tile *tile = ggtt->tile; in xe_ggtt_assert_fit() local
537 xe_tile_assert(tile, start >= wopcm); in xe_ggtt_assert_fit()
558 struct xe_tile *tile __maybe_unused = ggtt->tile; in xe_ggtt_shift_nodes_locked()
738 u8 tile_id = ggtt->tile->id; in __xe_ggtt_insert_bo_at()
[all …]
A Dxe_pt.c65 u8 id = tile->id; in __xe_pt_empty_pte()
286 struct xe_tile *tile; member
708 .tile = tile, in xe_pt_stage_bind()
840 struct xe_tile *tile; member
905 .tile = tile, in xe_pt_zap_ptes()
948 .tile = tile, in xe_pt_zap_ptes_range()
1587 struct xe_tile *tile; member
1724 .tile = tile, in xe_pt_stage_unbind()
2362 ~BIT(tile->id)); in op_commit()
2424 .tile_id = tile->id, in xe_pt_update_ops_run()
[all …]
A Dxe_tile_sysfs.c27 struct xe_tile *tile = arg; in tile_sysfs_fini() local
29 kobject_put(tile->sysfs); in tile_sysfs_fini()
32 int xe_tile_sysfs_init(struct xe_tile *tile) in xe_tile_sysfs_init() argument
34 struct xe_device *xe = tile_to_xe(tile); in xe_tile_sysfs_init()
44 kt->tile = tile; in xe_tile_sysfs_init()
46 err = kobject_add(&kt->base, &dev->kobj, "tile%d", tile->id); in xe_tile_sysfs_init()
52 tile->sysfs = &kt->base; in xe_tile_sysfs_init()
54 err = xe_vram_freq_sysfs_init(tile); in xe_tile_sysfs_init()
58 return devm_add_action_or_reset(xe->drm.dev, tile_sysfs_fini, tile); in xe_tile_sysfs_init()
A Dxe_tile.h13 int xe_tile_init_early(struct xe_tile *tile, struct xe_device *xe, u8 id);
14 int xe_tile_init_noalloc(struct xe_tile *tile);
15 int xe_tile_init(struct xe_tile *tile);
17 void xe_tile_migrate_wait(struct xe_tile *tile);
20 static inline struct drm_pagemap *xe_tile_local_pagemap(struct xe_tile *tile) in xe_tile_local_pagemap() argument
22 return &tile->mem.vram.dpagemap; in xe_tile_local_pagemap()
25 static inline struct drm_pagemap *xe_tile_local_pagemap(struct xe_tile *tile) in xe_tile_local_pagemap() argument
31 static inline bool xe_tile_is_root(struct xe_tile *tile) in xe_tile_is_root() argument
33 return tile->id == 0; in xe_tile_is_root()
A Dxe_bo_evict.c153 struct xe_tile *tile; in xe_bo_evict_all() local
172 for_each_tile(tile, xe, id) in xe_bo_evict_all()
173 xe_tile_migrate_wait(tile); in xe_bo_evict_all()
193 struct xe_tile *tile; in xe_bo_restore_and_map_ggtt() local
197 if (tile != bo->tile && !(bo->flags & XE_BO_FLAG_GGTTx(tile))) in xe_bo_restore_and_map_ggtt()
244 struct xe_tile *tile; in xe_bo_restore_late() local
251 for_each_tile(tile, xe, id) in xe_bo_restore_late()
252 xe_tile_migrate_wait(tile); in xe_bo_restore_late()
266 for_each_tile(tile, xe, id) in xe_bo_restore_late()
274 struct xe_tile *tile; in xe_bo_pci_dev_remove_pinned() local
[all …]
A Dxe_svm.c122 struct xe_tile *tile; in xe_svm_range_notifier_event_begin() local
145 for_each_tile(tile, xe, id) in xe_svm_range_notifier_event_begin()
340 struct xe_tile *tile; in xe_svm_copy() local
373 tile = vr_to_tile(vr); in xe_svm_copy()
405 vm_dbg(&tile->xe->drm, in xe_svm_copy()
413 vm_dbg(&tile->xe->drm, in xe_svm_copy()
441 vm_dbg(&tile->xe->drm, in xe_svm_copy()
448 vm_dbg(&tile->xe->drm, in xe_svm_copy()
614 struct xe_tile *tile, in xe_svm_range_is_valid() argument
688 return &tile->mem.vram; in tile_to_vr()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/fb/
A Dnv20.c35 tile->pitch = pitch; in nv20_fb_tile_init()
37 fb->func->tile.comp(fb, i, size, flags, tile); in nv20_fb_tile_init()
38 tile->addr |= 2; in nv20_fb_tile_init()
44 struct nvkm_fb_tile *tile) in nv20_fb_tile_comp() argument
51 tile->zcomp |= tile->tag->offset; in nv20_fb_tile_comp()
54 tile->zcomp |= 0x08000000; in nv20_fb_tile_comp()
62 tile->addr = 0; in nv20_fb_tile_fini()
63 tile->limit = 0; in nv20_fb_tile_fini()
64 tile->pitch = 0; in nv20_fb_tile_fini()
65 tile->zcomp = 0; in nv20_fb_tile_fini()
[all …]
A Dnv30.c35 tile->addr = (0 << 4); in nv30_fb_tile_init()
38 fb->func->tile.comp(fb, i, size, flags, tile); in nv30_fb_tile_init()
39 tile->addr = (1 << 4); in nv30_fb_tile_init()
43 tile->addr |= addr; in nv30_fb_tile_init()
45 tile->pitch = pitch; in nv30_fb_tile_init()
50 struct nvkm_fb_tile *tile) in nv30_fb_tile_comp() argument
57 tile->zcomp |= ((tile->tag->offset ) >> 6); in nv30_fb_tile_comp()
58 tile->zcomp |= ((tile->tag->offset + tags - 1) >> 6) << 12; in nv30_fb_tile_comp()
60 tile->zcomp |= 0x10000000; in nv30_fb_tile_comp()
121 .tile.regions = 8,
[all …]
A Dnv10.c33 tile->addr = 0x80000000 | addr; in nv10_fb_tile_init()
34 tile->limit = max(1u, addr + size) - 1; in nv10_fb_tile_init()
35 tile->pitch = pitch; in nv10_fb_tile_init()
41 tile->addr = 0; in nv10_fb_tile_fini()
42 tile->limit = 0; in nv10_fb_tile_fini()
43 tile->pitch = 0; in nv10_fb_tile_fini()
44 tile->zcomp = 0; in nv10_fb_tile_fini()
59 .tile.regions = 8,
60 .tile.init = nv10_fb_tile_init,
61 .tile.fini = nv10_fb_tile_fini,
[all …]
A Dnv35.c31 struct nvkm_fb_tile *tile) in nv35_fb_tile_comp() argument
36 if (flags & 2) tile->zcomp |= 0x04000000; /* Z16 */ in nv35_fb_tile_comp()
37 else tile->zcomp |= 0x08000000; /* Z24S8 */ in nv35_fb_tile_comp()
38 tile->zcomp |= ((tile->tag->offset ) >> 6); in nv35_fb_tile_comp()
39 tile->zcomp |= ((tile->tag->offset + tags - 1) >> 6) << 13; in nv35_fb_tile_comp()
41 tile->zcomp |= 0x40000000; in nv35_fb_tile_comp()
50 .tile.regions = 8,
51 .tile.init = nv30_fb_tile_init,
52 .tile.comp = nv35_fb_tile_comp,
53 .tile.fini = nv20_fb_tile_fini,
[all …]
A Dnv36.c31 struct nvkm_fb_tile *tile) in nv36_fb_tile_comp() argument
36 if (flags & 2) tile->zcomp |= 0x10000000; /* Z16 */ in nv36_fb_tile_comp()
37 else tile->zcomp |= 0x20000000; /* Z24S8 */ in nv36_fb_tile_comp()
38 tile->zcomp |= ((tile->tag->offset ) >> 6); in nv36_fb_tile_comp()
39 tile->zcomp |= ((tile->tag->offset + tags - 1) >> 6) << 14; in nv36_fb_tile_comp()
41 tile->zcomp |= 0x80000000; in nv36_fb_tile_comp()
50 .tile.regions = 8,
51 .tile.init = nv30_fb_tile_init,
52 .tile.comp = nv36_fb_tile_comp,
53 .tile.fini = nv20_fb_tile_fini,
[all …]
A Dnv40.c31 struct nvkm_fb_tile *tile) in nv40_fb_tile_comp() argument
36 !nvkm_mm_head(&fb->tags.mm, 0, 1, tags, tags, 1, &tile->tag)) { in nv40_fb_tile_comp()
37 tile->zcomp = 0x28000000; /* Z24S8_SPLIT_GRAD */ in nv40_fb_tile_comp()
38 tile->zcomp |= ((tile->tag->offset ) >> 8); in nv40_fb_tile_comp()
39 tile->zcomp |= ((tile->tag->offset + tags - 1) >> 8) << 13; in nv40_fb_tile_comp()
41 tile->zcomp |= 0x40000000; in nv40_fb_tile_comp()
56 .tile.regions = 8,
57 .tile.init = nv30_fb_tile_init,
58 .tile.comp = nv40_fb_tile_comp,
59 .tile.fini = nv20_fb_tile_fini,
[all …]
A Dnv44.c31 u32 flags, struct nvkm_fb_tile *tile) in nv44_fb_tile_init() argument
33 tile->addr = 0x00000001; /* mode = vram */ in nv44_fb_tile_init()
34 tile->addr |= addr; in nv44_fb_tile_init()
35 tile->limit = max(1u, addr + size) - 1; in nv44_fb_tile_init()
36 tile->pitch = pitch; in nv44_fb_tile_init()
43 nvkm_wr32(device, 0x100604 + (i * 0x10), tile->limit); in nv44_fb_tile_prog()
45 nvkm_wr32(device, 0x100600 + (i * 0x10), tile->addr); in nv44_fb_tile_prog()
60 .tile.regions = 12,
61 .tile.init = nv44_fb_tile_init,
62 .tile.fini = nv20_fb_tile_fini,
[all …]
A Dnv25.c31 struct nvkm_fb_tile *tile) in nv25_fb_tile_comp() argument
35 if (!nvkm_mm_head(&fb->tags.mm, 0, 1, tags, tags, 1, &tile->tag)) { in nv25_fb_tile_comp()
36 if (!(flags & 2)) tile->zcomp = 0x00100000; /* Z16 */ in nv25_fb_tile_comp()
37 else tile->zcomp = 0x00200000; /* Z24S8 */ in nv25_fb_tile_comp()
38 tile->zcomp |= tile->tag->offset; in nv25_fb_tile_comp()
40 tile->zcomp |= 0x01000000; in nv25_fb_tile_comp()
48 .tile.regions = 8,
49 .tile.init = nv20_fb_tile_init,
50 .tile.comp = nv25_fb_tile_comp,
51 .tile.fini = nv20_fb_tile_fini,
[all …]
A Dnv46.c31 u32 flags, struct nvkm_fb_tile *tile) in nv46_fb_tile_init() argument
34 if (!(flags & 4)) tile->addr = (0 << 3); in nv46_fb_tile_init()
35 else tile->addr = (1 << 3); in nv46_fb_tile_init()
37 tile->addr |= 0x00000001; /* mode = vram */ in nv46_fb_tile_init()
38 tile->addr |= addr; in nv46_fb_tile_init()
39 tile->limit = max(1u, addr + size) - 1; in nv46_fb_tile_init()
40 tile->pitch = pitch; in nv46_fb_tile_init()
46 .tile.regions = 15,
47 .tile.init = nv46_fb_tile_init,
48 .tile.fini = nv20_fb_tile_fini,
[all …]
A Dnv41.c30 nv41_fb_tile_prog(struct nvkm_fb *fb, int i, struct nvkm_fb_tile *tile) in nv41_fb_tile_prog() argument
33 nvkm_wr32(device, 0x100604 + (i * 0x10), tile->limit); in nv41_fb_tile_prog()
34 nvkm_wr32(device, 0x100608 + (i * 0x10), tile->pitch); in nv41_fb_tile_prog()
35 nvkm_wr32(device, 0x100600 + (i * 0x10), tile->addr); in nv41_fb_tile_prog()
37 nvkm_wr32(device, 0x100700 + (i * 0x04), tile->zcomp); in nv41_fb_tile_prog()
50 .tile.regions = 12,
51 .tile.init = nv30_fb_tile_init,
52 .tile.comp = nv40_fb_tile_comp,
53 .tile.fini = nv20_fb_tile_fini,
54 .tile.prog = nv41_fb_tile_prog,
A Dbase.c35 nvkm_fb_tile_fini(struct nvkm_fb *fb, int region, struct nvkm_fb_tile *tile) in nvkm_fb_tile_fini() argument
37 fb->func->tile.fini(fb, region, tile); in nvkm_fb_tile_fini()
42 u32 pitch, u32 flags, struct nvkm_fb_tile *tile) in nvkm_fb_tile_init() argument
44 fb->func->tile.init(fb, region, addr, size, pitch, flags, tile); in nvkm_fb_tile_init()
51 if (fb->func->tile.prog) { in nvkm_fb_tile_prog()
52 fb->func->tile.prog(fb, region, tile); in nvkm_fb_tile_prog()
201 for (i = 0; i < fb->tile.regions; i++) in nvkm_fb_init()
202 fb->func->tile.prog(fb, i, &fb->tile.region[i]); in nvkm_fb_init()
240 for (i = 0; i < fb->tile.regions; i++) in nvkm_fb_dtor()
241 fb->func->tile.fini(fb, i, &fb->tile.region[i]); in nvkm_fb_dtor()
[all …]
/drivers/gpu/drm/nouveau/nvkm/engine/gr/
A Dnv44.c44 nvkm_wr32(device, NV20_PGRAPH_TSIZE(i), tile->pitch); in nv44_gr_tile()
45 nvkm_wr32(device, NV20_PGRAPH_TLIMIT(i), tile->limit); in nv44_gr_tile()
46 nvkm_wr32(device, NV20_PGRAPH_TILE(i), tile->addr); in nv44_gr_tile()
53 nvkm_wr32(device, NV47_PGRAPH_TSIZE(i), tile->pitch); in nv44_gr_tile()
54 nvkm_wr32(device, NV47_PGRAPH_TLIMIT(i), tile->limit); in nv44_gr_tile()
55 nvkm_wr32(device, NV47_PGRAPH_TILE(i), tile->addr); in nv44_gr_tile()
58 nvkm_wr32(device, NV40_PGRAPH_TILE1(i), tile->addr); in nv44_gr_tile()
61 nvkm_wr32(device, NV20_PGRAPH_TSIZE(i), tile->pitch); in nv44_gr_tile()
63 nvkm_wr32(device, NV20_PGRAPH_TILE(i), tile->addr); in nv44_gr_tile()
66 nvkm_wr32(device, NV40_PGRAPH_TILE1(i), tile->addr); in nv44_gr_tile()
[all …]
/drivers/gpu/drm/i915/gem/selftests/
A Di915_gem_mman.c52 v = div64_u64_rem(y, tile->height, &y) * tile->stride * tile->height; in tiled_offset()
93 const struct tile *tile, in check_partial_mapping() argument
111 tile->tiling, tile->stride, err); in check_partial_mapping()
186 const struct tile *tile, in check_partial_mappings() argument
199 tile->tiling, tile->stride, err); in check_partial_mappings()
352 struct tile tile; in igt_partial_tiling() local
370 struct tile tile; in igt_partial_tiling() local
398 tile.stride = tile.width * pitch; in igt_partial_tiling()
426 tile.stride = tile.width * pitch; in igt_partial_tiling()
489 struct tile tile; in igt_smoke_tiling() local
[all …]
/drivers/gpu/ipu-v3/
A Dipu-image-convert.c863 tile = &image->tile[i]; in calc_tile_dimensions()
869 tile->stride = tile->width; in calc_tile_dimensions()
870 tile->rot_stride = tile->height; in calc_tile_dimensions()
883 tile->width, tile->height, tile->left, tile->top); in calc_tile_dimensions()
994 top = image->tile[tile].top; in calc_tile_offsets_planar()
1012 image->tile[tile].offset = y_off; in calc_tile_offsets_planar()
1013 image->tile[tile].u_off = u_off; in calc_tile_offsets_planar()
1055 image->tile[tile].u_off = 0; in calc_tile_offsets_packed()
1056 image->tile[tile++].v_off = 0; in calc_tile_offsets_packed()
1407 s_image->tile[tile].width, in convert_start()
[all …]

Completed in 792 milliseconds

1234567