| /drivers/gpu/drm/i915/selftests/ |
| A D | intel_memory_region.c | 165 I915_RND_STATE(prng); in igt_mock_reserve() 243 I915_RND_STATE(prng); in igt_mock_contiguous() 454 I915_RND_STATE(prng); in igt_mock_max_segment() 557 I915_RND_STATE(prng); in igt_mock_io_size() 712 I915_RND_STATE(prng); in igt_gpu_write() 868 I915_RND_STATE(prng); in igt_lmem_create_cleared_cpu() 907 &prng); in igt_lmem_create_cleared_cpu() 924 val = prandom_u32_state(&prng); in igt_lmem_create_cleared_cpu() 953 I915_RND_STATE(prng); in igt_lmem_write_gpu() 994 struct rnd_state *prng) in random_engine_class() argument [all …]
|
| A D | scatterlist.c | 282 struct rnd_state prng; in igt_sg_alloc() local 297 prandom_seed_state(&prng, in igt_sg_alloc() 299 err = alloc_table(&pt, sz, sz, *npages, &prng, in igt_sg_alloc() 306 prandom_seed_state(&prng, in igt_sg_alloc() 308 err = expect_pfn_sgtable(&pt, *npages, &prng, in igt_sg_alloc() 338 struct rnd_state prng; in igt_sg_trim() local 340 prandom_seed_state(&prng, i915_selftest.random_seed); in igt_sg_trim() 341 err = alloc_table(&pt, prime, max, *npages, &prng, in igt_sg_trim() 355 prandom_seed_state(&prng, in igt_sg_trim() 358 *npages, &prng, in igt_sg_trim()
|
| A D | i915_syncmap.c | 204 I915_RND_STATE(prng); in igt_syncmap_one() 217 u64 context = i915_prandom_u64_state(&prng); in igt_syncmap_one() 226 prandom_u32_state(&prng)); in igt_syncmap_one() 402 I915_RND_STATE(prng); in igt_syncmap_neighbours() 414 u64 context = i915_prandom_u64_state(&prng) & ~MASK; in igt_syncmap_neighbours() 544 I915_RND_STATE(prng); in igt_syncmap_random() 562 u64 context = i915_prandom_u64_state(&prng); in igt_syncmap_random() 578 seqno = prandom_u32_state(&prng); in igt_syncmap_random()
|
| A D | i915_gem.c | 48 struct rnd_state prng; in trash_stolen() local 55 prandom_seed_state(&prng, 0x12345678); in trash_stolen() 69 iowrite32(prandom_u32_state(&prng), &s[x]); in trash_stolen()
|
| A D | i915_gem_gtt.c | 260 I915_RND_SUBSTATE(prng, seed_prng); in lowlevel_hole() 277 order = i915_random_order(count, &prng); in lowlevel_hole() 369 i915_random_reorder(order, count, &prng); in lowlevel_hole() 797 I915_RND_STATE(prng); in drunk_hole() 828 order = i915_random_order(count, &prng); in drunk_hole() 1347 I915_RND_STATE(prng); in igt_ggtt_page() 1391 order = i915_random_order(count, &prng); in igt_ggtt_page() 1407 i915_random_reorder(order, count, &prng); in igt_ggtt_page() 1538 I915_RND_STATE(prng); in igt_gtt_reserve() 1657 offset = igt_random_offset(&prng, in igt_gtt_reserve()
|
| A D | i915_request.c | 318 I915_RND_STATE(prng); in __igt_breadcrumbs_smoketest() 337 order = i915_random_order(total, &prng); in __igt_breadcrumbs_smoketest() 361 i915_random_reorder(order, total, &prng); in __igt_breadcrumbs_smoketest() 362 count = 1 + i915_prandom_u32_max_state(max_batch, &prng); in __igt_breadcrumbs_smoketest()
|
| /drivers/gpu/drm/i915/gt/ |
| A D | selftest_migrate.c | 42 u32 sz, struct rnd_state *prng) in copy() argument 261 u32 sz, struct rnd_state *prng) in clear() argument 335 i915_prandom_u32_max_state(1024, prng); in clear() 377 ccs_bytes_left), prng); in clear() 447 return copy(migrate, __global_copy, sz, prng); in global_copy() 493 I915_RND_STATE(prng); in live_migrate_copy() 499 err = migrate_copy(migrate, sizes[i], &prng); in live_migrate_copy() 515 I915_RND_STATE(prng); in live_migrate_clear() 679 struct rnd_state prng; member 688 I915_RND_STATE(prng); in threaded_migrate() [all …]
|
| A D | selftest_tlb.c | 38 struct rnd_state *prng) in pte_tlbinv() argument 67 addr = igt_random_offset(prng, addr, min(ce->vm->total, BIT_ULL(48)), in pte_tlbinv() 91 addr = igt_random_offset(prng, addr, addr + align, 8, 8); in pte_tlbinv() 238 I915_RND_STATE(prng); in mem_tlbinv() 327 &prng); in mem_tlbinv() 336 &prng); in mem_tlbinv()
|
| A D | selftest_timeline.c | 70 struct rnd_state prng; member 130 &state->prng); in __mock_hwsp_timeline() 133 &state->prng); in __mock_hwsp_timeline() 289 struct rnd_state prng; in bench_sync() local 303 prandom_seed_state(&prng, i915_selftest.random_seed); in bench_sync() 311 WRITE_ONCE(x, prandom_u32_state(&prng)); in bench_sync() 321 prandom_seed_state(&prng, i915_selftest.random_seed); in bench_sync() 326 u64 id = i915_prandom_u64_state(&prng); in bench_sync() 341 u64 id = i915_prandom_u64_state(&prng); in bench_sync() 395 u32 id = random_engine(&prng); in bench_sync() [all …]
|
| A D | selftest_rc6.c | 191 struct rnd_state *prng, in randomised_engines() argument 212 i915_prandom_shuffle(engines, sizeof(*engines), n, prng); in randomised_engines() 223 I915_RND_STATE(prng); in live_rc6_ctx_wa() 230 engines = randomised_engines(gt, &prng, &count); in live_rc6_ctx_wa()
|
| A D | selftest_lrc.c | 1770 struct rnd_state *prng) in garbage() argument 1779 prandom_bytes_state(prng, in garbage() 1799 static int __lrc_garbage(struct intel_engine_cs *engine, struct rnd_state *prng) in __lrc_garbage() argument 1809 hang = garbage(ce, prng); in __lrc_garbage() 1862 I915_RND_STATE(prng); in live_lrc_garbage() 1870 err = __lrc_garbage(engine, &prng); in live_lrc_garbage()
|
| /drivers/gpu/drm/i915/gem/selftests/ |
| A D | i915_gem_client_blt.c | 297 struct rnd_state *prng) in tiled_blits_create_buffers() argument 417 struct rnd_state *prng) in verify_buffer() argument 423 x = i915_prandom_u32_max_state(t->width, prng); in verify_buffer() 598 struct rnd_state *prng) in tiled_blits_prepare() argument 611 fill_scratch(t, map, prandom_u32_state(prng)); in tiled_blits_prepare() 652 err = verify_buffer(t, &t->buffers[2], prng); in tiled_blits_bounce() 660 struct rnd_state *prng) in __igt_client_tiled_blits() argument 665 t = tiled_blits_create(engine, prng); in __igt_client_tiled_blits() 669 err = tiled_blits_prepare(t, prng); in __igt_client_tiled_blits() 673 err = tiled_blits_bounce(t, prng); in __igt_client_tiled_blits() [all …]
|
| A D | i915_gem_coherency.c | 289 random_engine(struct drm_i915_private *i915, struct rnd_state *prng) in random_engine() argument 298 count = i915_prandom_u32_max_state(count, prng); in random_engine() 313 I915_RND_STATE(prng); in igt_gem_coherency() 332 ctx.engine = random_engine(i915, &prng); in igt_gem_coherency() 368 i915_random_reorder(offsets, ncachelines, &prng); in igt_gem_coherency() 370 values[n] = prandom_u32_state(&prng); in igt_gem_coherency()
|
| A D | huge_pages.c | 1198 I915_RND_STATE(prng); in igt_write_huge() 1248 order = i915_random_order(count * count, &prng); in igt_write_huge() 1351 static u32 igt_random_size(struct rnd_state *prng, in igt_random_size() argument 1364 size = prandom_u32_state(prng) & mask; in igt_random_size() 1375 I915_RND_STATE(prng); in igt_ppgtt_smoke_huge() 1399 size = igt_random_size(&prng, min, rounddown_pow_of_two(size)); in igt_ppgtt_smoke_huge() 1622 I915_RND_STATE(prng); in igt_ppgtt_mixed() 1657 sz = i915_prandom_u32_max_state(SZ_4M, &prng); in igt_ppgtt_mixed() 1706 order = i915_random_order(count * count, &prng); in igt_ppgtt_mixed() 1716 u32 rnd = i915_prandom_u32_max_state(UINT_MAX, &prng); in igt_ppgtt_mixed()
|
| A D | i915_gem_mman.c | 94 struct rnd_state *prng) in check_partial_mapping() argument 126 page = i915_prandom_u32_max_state(npages, prng); in check_partial_mapping() 452 I915_RND_STATE(prng); in igt_smoke_tiling() 492 i915_prandom_u32_max_state(I915_TILING_Y + 1, &prng); in igt_smoke_tiling() 518 i915_prandom_u32_max_state(max_pitch, &prng); in igt_smoke_tiling() 524 err = check_partial_mapping(obj, &tile, &prng); in igt_smoke_tiling()
|
| A D | i915_gem_context.c | 1377 I915_RND_STATE(prng); in igt_ctx_readonly() 1431 if (prandom_u32_state(&prng) & 1) in igt_ctx_readonly() 1778 I915_RND_STATE(prng); in igt_vm_isolation() 1858 offset = igt_random_offset(&prng, in igt_vm_isolation()
|
| /drivers/gpu/drm/tests/ |
| A D | drm_buddy_test.c | 27 DRM_RND_STATE(prng, random_seed); in drm_test_buddy_alloc_range_bias() 35 ps = roundup_pow_of_two(prandom_u32_state(&prng) % bias_size); in drm_test_buddy_alloc_range_bias() 45 order = drm_random_order(count, &prng); in drm_test_buddy_alloc_range_bias() 121 size = max(round_up(prandom_u32_state(&prng) % bias_rem, ps), ps); in drm_test_buddy_alloc_range_bias() 143 size = max(round_up(prandom_u32_state(&prng) % bias_rem, ps), ps); in drm_test_buddy_alloc_range_bias() 180 bias_start = round_up(prandom_u32_state(&prng) % (mm_size - ps), ps); in drm_test_buddy_alloc_range_bias() 186 u32 size = max(round_up(prandom_u32_state(&prng) % bias_rem, ps), ps); in drm_test_buddy_alloc_range_bias() 206 bias_start -= round_up(prandom_u32_state(&prng) % bias_start, ps); in drm_test_buddy_alloc_range_bias() 208 bias_end += round_up(prandom_u32_state(&prng) % (mm_size - bias_end), ps); in drm_test_buddy_alloc_range_bias() 238 bias_start = round_up(prandom_u32_state(&prng) % (mm_size - ps), ps); in drm_test_buddy_alloc_range_bias() [all …]
|
| /drivers/gpu/drm/xe/tests/ |
| A D | xe_bo.c | 452 struct rnd_state prng; in shrink_test_run_device() local 457 prandom_seed_state(&prng, rand_seed); in shrink_test_run_device() 525 int ret = shrink_test_fill_random(bo, &prng, link); in shrink_test_run_device() 557 prandom_seed_state(&prng, rand_seed); in shrink_test_run_device() 579 failed = shrink_test_verify(test, bo, count, &prng, link); in shrink_test_run_device()
|
| /drivers/crypto/allwinner/sun8i-ce/ |
| A D | sun8i-ce-core.c | 52 .prng = CE_ALG_PRNG, 69 .prng = CE_ALG_PRNG, 91 .prng = CE_ALG_PRNG_V2, 115 .prng = CE_ALG_PRNG_V2, 132 .prng = CE_ALG_PRNG, 151 .prng = CE_ALG_PRNG, 168 .prng = CE_ALG_PRNG, 940 if (ce->variant->prng == CE_ID_NOTSUPP) { in sun8i_ce_register_algs()
|
| A D | Makefile | 4 sun8i-ce-$(CONFIG_CRYPTO_DEV_SUN8I_CE_PRNG) += sun8i-ce-prng.o
|
| A D | sun8i-ce-prng.c | 122 common = ce->variant->prng | CE_COMM_INT; in sun8i_ce_prng_generate()
|
| A D | sun8i-ce.h | 155 unsigned char prng; member
|
| /drivers/crypto/allwinner/sun4i-ss/ |
| A D | Makefile | 4 sun4i-ss-$(CONFIG_CRYPTO_DEV_SUN4I_SS_PRNG) += sun4i-ss-prng.o
|
| /drivers/crypto/allwinner/sun8i-ss/ |
| A D | Makefile | 3 sun8i-ss-$(CONFIG_CRYPTO_DEV_SUN8I_SS_PRNG) += sun8i-ss-prng.o
|
| /drivers/crypto/caam/ |
| A D | Kconfig | 159 Selecting this will register the SEC hardware prng to
|