| /drivers/gpu/drm/i915/selftests/ |
| A D | i915_syncmap.c | 104 if (*sync) { in check_syncmap_free() 173 context, (*sync)->height, (*sync)->prefix); in check_one() 185 (*sync)->bitmap, hweight32((*sync)->bitmap)); in check_one() 189 err = check_seqno((*sync), ilog2((*sync)->bitmap), seqno); in check_one() 248 context, (*sync)->height, (*sync)->prefix); in check_leaf() 258 err = check_seqno((*sync), ilog2((*sync)->bitmap), seqno); in check_leaf() 427 context, sync->height, sync->prefix); in igt_syncmap_neighbours() 435 sync->bitmap, hweight32(sync->bitmap), in igt_syncmap_neighbours() 478 sync->height, sync->prefix); in igt_syncmap_compact() 484 sync = sync->parent; in igt_syncmap_compact() [all …]
|
| /drivers/gpu/drm/xe/ |
| A D | xe_sync.c | 146 sync->fence = drm_syncobj_fence_get(sync->syncobj); in xe_sync_entry_parse() 171 sync->fence = drm_syncobj_fence_get(sync->syncobj); in xe_sync_entry_parse() 217 if (sync->fence) in xe_sync_entry_add_deps() 229 if (sync->chain_fence) { in xe_sync_entry_signal() 230 drm_syncobj_add_point(sync->syncobj, sync->chain_fence, in xe_sync_entry_signal() 258 if (sync->syncobj) in xe_sync_entry_cleanup() 262 if (sync->ufence) in xe_sync_entry_cleanup() 293 if (sync[i].fence) { in xe_sync_in_fence_get() 295 fence = sync[i].fence; in xe_sync_in_fence_get() 310 if (sync[i].fence) { in xe_sync_in_fence_get() [all …]
|
| A D | xe_sync.h | 22 struct xe_sync_entry *sync, 25 int xe_sync_entry_add_deps(struct xe_sync_entry *sync, 27 void xe_sync_entry_signal(struct xe_sync_entry *sync, 29 void xe_sync_entry_cleanup(struct xe_sync_entry *sync); 31 xe_sync_in_fence_get(struct xe_sync_entry *sync, int num_sync, 34 static inline bool xe_sync_is_ufence(struct xe_sync_entry *sync) in xe_sync_is_ufence() argument 36 return !!sync->ufence; in xe_sync_is_ufence() 40 struct xe_user_fence *xe_sync_ufence_get(struct xe_sync_entry *sync);
|
| /drivers/clk/tegra/ |
| A D | clk-audio-sync.c | 15 struct tegra_clk_sync_source *sync = to_clk_sync_source(hw); in clk_sync_source_recalc_rate() local 17 return sync->rate; in clk_sync_source_recalc_rate() 25 if (rate > sync->max_rate) in clk_sync_source_round_rate() 36 sync->rate = rate; in clk_sync_source_set_rate() 49 struct tegra_clk_sync_source *sync; in tegra_clk_register_sync_source() local 53 sync = kzalloc(sizeof(*sync), GFP_KERNEL); in tegra_clk_register_sync_source() 54 if (!sync) { in tegra_clk_register_sync_source() 59 sync->max_rate = max_rate; in tegra_clk_register_sync_source() 68 sync->hw.init = &init; in tegra_clk_register_sync_source() 70 clk = clk_register(NULL, &sync->hw); in tegra_clk_register_sync_source() [all …]
|
| /drivers/gpu/drm/radeon/ |
| A D | radeon_sync.c | 46 sync->semaphores[i] = NULL; in radeon_sync_create() 49 sync->sync_to[i] = NULL; in radeon_sync_create() 51 sync->last_vm_update = NULL; in radeon_sync_create() 62 void radeon_sync_fence(struct radeon_sync *sync, in radeon_sync_fence() argument 70 other = sync->sync_to[fence->ring]; in radeon_sync_fence() 74 other = sync->last_vm_update; in radeon_sync_fence() 90 struct radeon_sync *sync, in radeon_sync_resv() argument 102 radeon_sync_fence(sync, fence); in radeon_sync_resv() 122 struct radeon_sync *sync, in radeon_sync_rings() argument 153 sync->semaphores[count++] = semaphore; in radeon_sync_rings() [all …]
|
| A D | rv770_dma.c | 48 struct radeon_sync sync; in rv770_copy_dma() local 55 radeon_sync_create(&sync); in rv770_copy_dma() 62 radeon_sync_free(rdev, &sync, NULL); in rv770_copy_dma() 66 radeon_sync_resv(rdev, &sync, resv, false); in rv770_copy_dma() 67 radeon_sync_rings(rdev, &sync, ring->idx); in rv770_copy_dma() 86 radeon_sync_free(rdev, &sync, NULL); in rv770_copy_dma() 91 radeon_sync_free(rdev, &sync, fence); in rv770_copy_dma()
|
| A D | evergreen_dma.c | 113 struct radeon_sync sync; in evergreen_copy_dma() local 120 radeon_sync_create(&sync); in evergreen_copy_dma() 127 radeon_sync_free(rdev, &sync, NULL); in evergreen_copy_dma() 131 radeon_sync_resv(rdev, &sync, resv, false); in evergreen_copy_dma() 132 radeon_sync_rings(rdev, &sync, ring->idx); in evergreen_copy_dma() 151 radeon_sync_free(rdev, &sync, NULL); in evergreen_copy_dma() 156 radeon_sync_free(rdev, &sync, fence); in evergreen_copy_dma()
|
| A D | si_dma.c | 236 struct radeon_sync sync; in si_copy_dma() local 243 radeon_sync_create(&sync); in si_copy_dma() 250 radeon_sync_free(rdev, &sync, NULL); in si_copy_dma() 254 radeon_sync_resv(rdev, &sync, resv, false); in si_copy_dma() 255 radeon_sync_rings(rdev, &sync, ring->idx); in si_copy_dma() 274 radeon_sync_free(rdev, &sync, NULL); in si_copy_dma() 279 radeon_sync_free(rdev, &sync, fence); in si_copy_dma()
|
| A D | radeon_ib.c | 72 radeon_sync_create(&ib->sync); in radeon_ib_get() 101 radeon_sync_free(rdev, &ib->sync, ib->fence); in radeon_ib_free() 150 radeon_sync_fence(&ib->sync, vm_id_fence); in radeon_ib_schedule() 154 r = radeon_sync_rings(rdev, &ib->sync, ib->ring); in radeon_ib_schedule() 163 ib->sync.last_vm_update); in radeon_ib_schedule() 167 radeon_sync_free(rdev, &const_ib->sync, NULL); in radeon_ib_schedule()
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_sync.c | 52 void amdgpu_sync_create(struct amdgpu_sync *sync) in amdgpu_sync_create() argument 54 hash_init(sync->fences); in amdgpu_sync_create() 169 if (amdgpu_sync_add_later(sync, f)) in amdgpu_sync_fence() 176 hash_add(sync->fences, &e->node, f->context); in amdgpu_sync_fence() 258 r = amdgpu_sync_fence(sync, f, GFP_KERNEL); in amdgpu_sync_resv() 290 r = amdgpu_sync_fence(sync, f, GFP_KERNEL); in amdgpu_sync_kfd() 323 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_peek_fence() 363 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_get_fence() 441 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_push_to_job() 482 void amdgpu_sync_free(struct amdgpu_sync *sync) in amdgpu_sync_free() argument [all …]
|
| A D | amdgpu_sync.h | 49 void amdgpu_sync_create(struct amdgpu_sync *sync); 50 int amdgpu_sync_fence(struct amdgpu_sync *sync, struct dma_fence *f, 52 int amdgpu_sync_resv(struct amdgpu_device *adev, struct amdgpu_sync *sync, 55 int amdgpu_sync_kfd(struct amdgpu_sync *sync, struct dma_resv *resv); 56 struct dma_fence *amdgpu_sync_peek_fence(struct amdgpu_sync *sync, 58 struct dma_fence *amdgpu_sync_get_fence(struct amdgpu_sync *sync); 61 int amdgpu_sync_push_to_job(struct amdgpu_sync *sync, struct amdgpu_job *job); 62 int amdgpu_sync_wait(struct amdgpu_sync *sync, bool intr); 63 void amdgpu_sync_free(struct amdgpu_sync *sync);
|
| A D | amdgpu_amdkfd_gpuvm.c | 1116 struct amdgpu_sync *sync; member 1141 ctx->sync = &mem->sync; in reserve_bo_and_vm() 1180 ctx->sync = &mem->sync; in reserve_bo_and_cond_vms() 1231 ctx->sync = NULL; in unreserve_bo_and_vms() 2160 struct amdgpu_sync sync; in amdgpu_amdkfd_gpuvm_sync_memory() local 2163 amdgpu_sync_create(&sync); in amdgpu_amdkfd_gpuvm_sync_memory() 2166 amdgpu_sync_clone(&mem->sync, &sync); in amdgpu_amdkfd_gpuvm_sync_memory() 2170 amdgpu_sync_free(&sync); in amdgpu_amdkfd_gpuvm_sync_memory() 2605 struct amdgpu_sync sync; in validate_invalid_user_pages() local 2613 amdgpu_sync_create(&sync); in validate_invalid_user_pages() [all …]
|
| /drivers/gpu/drm/sti/ |
| A D | sti_vtg.c | 212 sync->hsync = (stop << 16) | start; in vtg_set_hsync_vsync_pos() 230 sync->vsync_line_top = (fallsync_top << 16) | risesync_top; in vtg_set_hsync_vsync_pos() 234 sync->vsync_line_bot = sync->vsync_line_top; in vtg_set_hsync_vsync_pos() 235 sync->vsync_off_bot = sync->vsync_off_top; in vtg_set_hsync_vsync_pos() 240 struct sti_vtg_sync_params *sync, in vtg_set_mode() argument 258 vtg_set_hsync_vsync_pos(&sync[VTG_SYNC_ID_HDDCS - 1], 0, mode); in vtg_set_mode() 268 writel(sync[i].hsync, in vtg_set_mode() 270 writel(sync[i].vsync_line_top, in vtg_set_mode() 272 writel(sync[i].vsync_line_bot, in vtg_set_mode() 274 writel(sync[i].vsync_off_top, in vtg_set_mode() [all …]
|
| /drivers/gpu/drm/nouveau/ |
| A D | nouveau_sched.c | 39 job->sync = args->sync; in nouveau_job_init() 46 if (job->sync) in nouveau_job_init() 59 if (job->sync) { in nouveau_job_init() 141 struct drm_nouveau_sync *sync, in sync_find_fence() argument 153 point = sync->timeline_value; in sync_find_fence() 156 sync->handle, point, in sync_find_fence() 173 ret = sync_find_fence(job, sync, &in_fence); in nouveau_job_add_deps() 177 sync->handle); in nouveau_job_add_deps() 227 sync->handle); in nouveau_job_fence_attach_prepare() 262 sync->timeline_value); in nouveau_job_fence_attach() [all …]
|
| /drivers/iio/imu/ |
| A D | adis16475.c | 911 .sync = adis16475_sync_mode, 930 .sync = adis16475_sync_mode, 949 .sync = adis16475_sync_mode, 968 .sync = adis16475_sync_mode, 987 .sync = adis16475_sync_mode, 1007 .sync = adis16475_sync_mode, 1027 .sync = adis16475_sync_mode, 1047 .sync = adis16475_sync_mode, 1066 .sync = adis16475_sync_mode, 1801 sync = &st->info->sync[sync_mode]; in adis16475_config_sync_mode() [all …]
|
| /drivers/media/dvb-frontends/ |
| A D | ves1820.c | 233 int sync; in ves1820_read_status() local 236 sync = ves1820_readreg(state, 0x11); in ves1820_read_status() 238 if (sync & 1) in ves1820_read_status() 241 if (sync & 2) in ves1820_read_status() 244 if (sync & 2) /* XXX FIXME! */ in ves1820_read_status() 247 if (sync & 4) in ves1820_read_status() 250 if (sync & 8) in ves1820_read_status() 307 int sync; in ves1820_get_frontend() local 310 sync = ves1820_readreg(state, 0x11); in ves1820_get_frontend() 314 printk(sync & 2 ? "ves1820: AFC (%d) %dHz\n" : in ves1820_get_frontend() [all …]
|
| A D | tda10021.c | 312 int sync; in tda10021_read_status() local 320 sync = tda10021_readreg (state, 0x11); in tda10021_read_status() 322 if (sync & 2) in tda10021_read_status() 325 if (sync & 4) in tda10021_read_status() 328 if (sync & 8) in tda10021_read_status() 391 int sync; in tda10021_get_frontend() local 394 sync = tda10021_readreg(state, 0x11); in tda10021_get_frontend() 398 printk(sync & 2 ? "DVB: TDA10021(%d): AFC (%d) %dHz\n" : in tda10021_get_frontend() 410 if (sync & 2) in tda10021_get_frontend()
|
| A D | ves1x93.c | 294 u8 sync = ves1x93_readreg (state, 0x0e); in ves1x93_read_status() local 306 while ((sync & 0x03) != 0x03 && (sync & 0x0c) && maxtry--) { in ves1x93_read_status() 308 sync = ves1x93_readreg (state, 0x0e); in ves1x93_read_status() 313 if (sync & 1) in ves1x93_read_status() 316 if (sync & 2) in ves1x93_read_status() 319 if (sync & 4) in ves1x93_read_status() 322 if (sync & 8) in ves1x93_read_status() 325 if ((sync & 0x1f) == 0x1f) in ves1x93_read_status()
|
| /drivers/gpu/drm/nouveau/dispnv50/ |
| A D | dac507d.c | 33 u32 sync = 0; in dac507d_ctrl() local 37 sync |= NVVAL(NV507D, DAC_SET_POLARITY, HSYNC, asyh->or.nhsync); in dac507d_ctrl() 38 sync |= NVVAL(NV507D, DAC_SET_POLARITY, VSYNC, asyh->or.nvsync); in dac507d_ctrl() 45 DAC_SET_POLARITY(or), sync); in dac507d_ctrl()
|
| /drivers/md/ |
| A D | dm-log.c | 237 enum sync { enum 241 } sync; member 372 enum sync sync = DEFAULTSYNC; in create_log_context() local 388 sync = FORCESYNC; in create_log_context() 390 sync = NOSYNC; in create_log_context() 417 lc->sync = sync; in create_log_context() 498 lc->sync_count = (sync == NOSYNC) ? region_count : 0; in create_log_context() 613 if (lc->sync == NOSYNC) in disk_resume() 789 if (lc->sync != DEFAULTSYNC) \ 790 DMEMIT("%ssync ", lc->sync == NOSYNC ? "no" : ""); \ [all …]
|
| /drivers/net/ |
| A D | ifb.c | 44 struct u64_stats_sync sync; member 83 u64_stats_update_begin(&stats->sync); in ifb_update_q_stats() 86 u64_stats_update_end(&stats->sync); in ifb_update_q_stats() 165 start = u64_stats_fetch_begin(&txp->rx_stats.sync); in ifb_stats64() 168 } while (u64_stats_fetch_retry(&txp->rx_stats.sync, start)); in ifb_stats64() 173 start = u64_stats_fetch_begin(&txp->tx_stats.sync); in ifb_stats64() 176 } while (u64_stats_fetch_retry(&txp->tx_stats.sync, start)); in ifb_stats64() 199 u64_stats_init(&txp->rx_stats.sync); in ifb_dev_init() 200 u64_stats_init(&txp->tx_stats.sync); in ifb_dev_init() 248 start = u64_stats_fetch_begin(&q_stats->sync); in ifb_fill_stats_data() [all …]
|
| /drivers/gpu/ipu-v3/ |
| A D | ipu-dc.c | 120 int map, int wave, int glue, int sync, int stop) in dc_write_tmpl() argument 129 reg1 = sync | glue << 4 | ++wave << 11 | ((operand << 15) & 0xffff8000); in dc_write_tmpl() 132 reg1 = sync | glue << 4 | ++wave << 11 | ++map << 15 | ((operand << 20) & 0xfff00000); in dc_write_tmpl() 164 int addr, sync; in ipu_dc_init_sync() local 183 sync = interlaced ? 6 : 5; in ipu_dc_init_sync() 197 dc_write_tmpl(dc, addr, WROD(0), 0, map, SYNC_WAVE, 0, sync, 1); in ipu_dc_init_sync() 204 dc_write_tmpl(dc, addr + 2, WROD(0), 0, map, SYNC_WAVE, 8, sync, 1); in ipu_dc_init_sync() 205 dc_write_tmpl(dc, addr + 3, WROD(0), 0, map, SYNC_WAVE, 4, sync, 0); in ipu_dc_init_sync() 207 dc_write_tmpl(dc, addr + 1, WROD(0), 0, map, SYNC_WAVE, 0, sync, 1); in ipu_dc_init_sync()
|
| /drivers/media/test-drivers/vidtv/ |
| A D | vidtv_s302m.c | 184 if (e->sync && e->sync->is_video_encoder) { in vidtv_s302m_alloc_au() 185 sync_au = e->sync->access_units; in vidtv_s302m_alloc_au() 204 struct vidtv_access_unit *sync_au = e->sync->access_units; in vidtv_s302m_compute_sample_count_from_video() 210 vau_duration_usecs = USEC_PER_SEC / e->sync->sampling_rate_hz; in vidtv_s302m_compute_sample_count_from_video() 224 struct vidtv_access_unit *sync_au = e->sync->access_units; in vidtv_s302m_compute_pts_from_video() 410 if (e->sync && e->sync->is_video_encoder) { in vidtv_s302m_encode() 485 e->sync = args.sync; in vidtv_s302m_encoder_init()
|
| /drivers/net/ethernet/sfc/falcon/ |
| A D | mdio_10g.h | 43 bool sync; in ef4_mdio_phyxgxs_lane_sync() local 49 sync = !!(lane_status & MDIO_PHYXS_LNSTAT_ALIGN); in ef4_mdio_phyxgxs_lane_sync() 50 if (!sync) in ef4_mdio_phyxgxs_lane_sync() 53 return sync; in ef4_mdio_phyxgxs_lane_sync()
|
| /drivers/video/fbdev/ |
| A D | macmodes.c | 115 sync, FB_VMODE_INTERLACED 119 sync, FB_VMODE_INTERLACED 123 sync, FB_VMODE_INTERLACED 127 sync, FB_VMODE_INTERLACED 285 var->sync = mode->sync; in mac_vmode_to_var()
|