Home
last modified time | relevance | path

Searched refs:chunk_size (Results 1 – 25 of 114) sorted by relevance

12345

/drivers/md/
A Ddm-exception-store.c146 unsigned int chunk_size; in set_chunk_size() local
148 if (kstrtouint(chunk_size_arg, 10, &chunk_size)) { in set_chunk_size()
153 if (!chunk_size) { in set_chunk_size()
162 unsigned int chunk_size, in dm_exception_store_set_chunk_size() argument
166 if (!is_power_of_2(chunk_size)) { in dm_exception_store_set_chunk_size()
172 if (chunk_size % in dm_exception_store_set_chunk_size()
174 chunk_size % in dm_exception_store_set_chunk_size()
180 if (chunk_size > INT_MAX >> SECTOR_SHIFT) { in dm_exception_store_set_chunk_size()
185 store->chunk_size = chunk_size; in dm_exception_store_set_chunk_size()
186 store->chunk_mask = chunk_size - 1; in dm_exception_store_set_chunk_size()
[all …]
A Ddm-stripe.c38 uint32_t chunk_size; member
93 uint32_t chunk_size; in stripe_ctr() local
107 if (kstrtouint(argv[1], 10, &chunk_size) || !chunk_size) { in stripe_ctr()
119 if (sector_div(tmp_len, chunk_size)) { in stripe_ctr()
162 sc->chunk_size = chunk_size; in stripe_ctr()
163 if (chunk_size & (chunk_size - 1)) in stripe_ctr()
223 chunk *= sc->chunk_size; in stripe_map_sector()
381 (unsigned long long)sc->chunk_size); in stripe_status()
459 unsigned int chunk_size = sc->chunk_size << SECTOR_SHIFT; in stripe_io_hints() local
461 limits->chunk_sectors = sc->chunk_size; in stripe_io_hints()
[all …]
A Ddm-unstripe.c22 uint32_t chunk_size; member
62 if (kstrtouint(argv[1], 10, &uc->chunk_size) || !uc->chunk_size) { in unstripe_ctr()
88 uc->unstripe_offset = (sector_t)uc->unstripe * uc->chunk_size; in unstripe_ctr()
89 uc->unstripe_width = (sector_t)(uc->stripes - 1) * uc->chunk_size; in unstripe_ctr()
90 uc->chunk_shift = is_power_of_2(uc->chunk_size) ? fls(uc->chunk_size) - 1 : 0; in unstripe_ctr()
93 if (sector_div(tmp_len, uc->chunk_size)) { in unstripe_ctr()
98 if (dm_set_target_max_io_len(ti, uc->chunk_size)) { in unstripe_ctr()
127 sector_div(tmp_sector, uc->chunk_size); in map_to_core()
157 uc->stripes, (unsigned long long)uc->chunk_size, uc->unstripe, in unstripe_status()
180 limits->chunk_sectors = uc->chunk_size; in unstripe_io_hints()
A Ddm-snap-persistent.c84 __le32 chunk_size; member
237 .sector = ps->store->chunk_size * chunk, in chunk_io()
238 .count = ps->store->chunk_size, in chunk_io()
310 unsigned int chunk_size; in read_header() local
318 if (!ps->store->chunk_size) { in read_header()
355 chunk_size = le32_to_cpu(dh->chunk_size); in read_header()
357 if (ps->store->chunk_size == chunk_size) in read_header()
362 chunk_size, ps->store->chunk_size); in read_header()
371 chunk_size, chunk_err); in read_header()
393 dh->chunk_size = cpu_to_le32(ps->store->chunk_size); in write_header()
[all …]
A Ddm-snap-transient.c48 if (size < (tc->next_free + store->chunk_size)) in transient_prepare_exception()
52 tc->next_free += store->chunk_size; in transient_prepare_exception()
100 DMEMIT(" N %llu", (unsigned long long)store->chunk_size); in transient_status()
A Ddm-snap.c538 if (l->store->chunk_size < s->store->chunk_size) in __insert_snapshot()
862 chunk_size = min_not_zero(chunk_size, in __minimum_chunk_size()
863 snap->store->chunk_size); in __minimum_chunk_size()
865 return (uint32_t) chunk_size; in __minimum_chunk_size()
1017 sector_t sector, unsigned int chunk_size);
1082 io_size = linear_chunks * s->store->chunk_size; in snapshot_merge_next_chunks()
1378 s->store->chunk_size = 0; in snapshot_ctr()
1392 if (!s->store->chunk_size) { in snapshot_ctr()
1398 r = dm_set_target_max_io_len(ti, s->store->chunk_size); in snapshot_ctr()
1934 dest.count = s->store->chunk_size; in zero_exception()
[all …]
/drivers/md/dm-vdo/indexer/
A Dio-factory.c220 size_t chunk_size; in uds_read_from_buffered_reader() local
229 length -= chunk_size; in uds_read_from_buffered_reader()
230 data += chunk_size; in uds_read_from_buffered_reader()
231 reader->end += chunk_size; in uds_read_from_buffered_reader()
245 size_t chunk_size; in uds_verify_buffered_data() local
262 length -= chunk_size; in uds_verify_buffered_data()
263 value += chunk_size; in uds_verify_buffered_data()
264 reader->end += chunk_size; in uds_verify_buffered_data()
383 size_t chunk_size; in uds_write_to_buffered_writer() local
396 data += chunk_size; in uds_write_to_buffered_writer()
[all …]
/drivers/gpu/drm/
A Ddrm_buddy.c242 if (size < chunk_size) in drm_buddy_init()
245 if (chunk_size < SZ_4K) in drm_buddy_init()
248 if (!is_power_of_2(chunk_size)) in drm_buddy_init()
251 size = round_down(size, chunk_size); in drm_buddy_init()
256 mm->chunk_size = chunk_size; in drm_buddy_init()
291 root_size = chunk_size << order; in drm_buddy_init()
346 root_size = mm->chunk_size << order; in drm_buddy_fini()
429 root_size = mm->chunk_size << order; in drm_buddy_reset_clear()
531 u64 req_size = mm->chunk_size << order; in __alloc_range_bias()
1039 if (size < mm->chunk_size) in drm_buddy_alloc_blocks()
[all …]
/drivers/gpu/drm/panthor/
A Dpanthor_heap.c60 u32 chunk_size; member
133 atomic_sub(heap->chunk_size, &pool->size); in panthor_free_heap_chunk()
151 chunk->bo = panthor_kernel_bo_create(pool->ptdev, pool->vm, heap->chunk_size, in panthor_alloc_heap_chunk()
178 (heap->chunk_size >> 12); in panthor_alloc_heap_chunk()
188 atomic_add(heap->chunk_size, &pool->size); in panthor_alloc_heap_chunk()
275 u32 chunk_size, in panthor_heap_create() argument
293 if (!IS_ALIGNED(chunk_size, PAGE_SIZE) || in panthor_heap_create()
294 chunk_size < SZ_128K || chunk_size > SZ_8M) in panthor_heap_create()
313 heap->chunk_size = chunk_size; in panthor_heap_create()
397 atomic_sub(heap->chunk_size, &pool->size); in panthor_heap_return_chunk()
[all …]
/drivers/platform/x86/intel/ifs/
A Dload.c123 int i, num_chunks, chunk_size; in copy_hashes_authenticate_chunks() local
136 chunk_size = hashes_status.chunk_size * 1024; in copy_hashes_authenticate_chunks()
150 linear_addr = base + i * chunk_size; in copy_hashes_authenticate_chunks()
187 int i, num_chunks, chunk_size; in copy_hashes_authenticate_chunks_gen2() local
203 chunk_size = hashes_status.chunk_size * SZ_1K; in copy_hashes_authenticate_chunks_gen2()
213 ifsd->chunk_size = chunk_size; in copy_hashes_authenticate_chunks_gen2()
216 chunk_size = ifsd->chunk_size; in copy_hashes_authenticate_chunks_gen2()
236 linear_addr = base + i * chunk_size; in copy_hashes_authenticate_chunks_gen2()
A Difs.h188 u32 chunk_size :16; member
201 u16 chunk_size; member
370 u32 chunk_size; member
/drivers/gpu/drm/tests/
A Ddrm_buddy_test.c21 return (1 << order) * chunk_size; in get_size()
547 size = get_size(0, mm.chunk_size); in drm_test_buddy_alloc_pathological()
557 size = get_size(top, mm.chunk_size); in drm_test_buddy_alloc_pathological()
568 size = get_size(order, mm.chunk_size); in drm_test_buddy_alloc_pathological()
604 size = get_size(order, mm.chunk_size); in drm_test_buddy_alloc_pessimistic()
617 size = get_size(0, mm.chunk_size); in drm_test_buddy_alloc_pessimistic()
629 size = get_size(order, mm.chunk_size); in drm_test_buddy_alloc_pessimistic()
645 size = get_size(order, mm.chunk_size); in drm_test_buddy_alloc_pessimistic()
699 size = get_size(order, mm.chunk_size); in drm_test_buddy_alloc_optimistic()
712 size = get_size(0, mm.chunk_size); in drm_test_buddy_alloc_optimistic()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/steering/sws/
A Ddr_icm_pool.c323 enum mlx5dr_icm_chunk_size chunk_size, in dr_icm_chunk_init() argument
330 chunk->size = chunk_size; in dr_icm_chunk_init()
387 enum mlx5dr_icm_chunk_size chunk_size, in dr_icm_handle_buddies_get_mem() argument
399 chunk_size, seg); in dr_icm_handle_buddies_get_mem()
407 chunk_size); in dr_icm_handle_buddies_get_mem()
417 chunk_size); in dr_icm_handle_buddies_get_mem()
436 enum mlx5dr_icm_chunk_size chunk_size) in mlx5dr_icm_alloc_chunk() argument
443 if (chunk_size > pool->max_log_chunk_sz) in mlx5dr_icm_alloc_chunk()
448 ret = dr_icm_handle_buddies_get_mem(pool, chunk_size, &buddy, &seg); in mlx5dr_icm_alloc_chunk()
456 dr_icm_chunk_init(chunk, pool, chunk_size, buddy, seg); in mlx5dr_icm_alloc_chunk()
[all …]
A Ddr_ptrn.c82 u32 chunk_size; in dr_ptrn_alloc_pattern() local
85 chunk_size = ilog2(roundup_pow_of_two(num_of_actions)); in dr_ptrn_alloc_pattern()
87 chunk_size = max_t(u32, chunk_size, DR_CHUNK_SIZE_8); in dr_ptrn_alloc_pattern()
89 chunk = mlx5dr_icm_alloc_chunk(mgr->ptrn_icm_pool, chunk_size); in dr_ptrn_alloc_pattern()
/drivers/net/ethernet/mellanox/mlxsw/
A Di2c.c337 int off = mlxsw_i2c->cmd.mb_off_in, chunk_size, i, j; in mlxsw_i2c_write() local
353 write_tran.len = MLXSW_I2C_ADDR_WIDTH + chunk_size; in mlxsw_i2c_write()
356 mlxsw_i2c->block_size * i, chunk_size); in mlxsw_i2c_write()
376 off += chunk_size; in mlxsw_i2c_write()
377 in_mbox_size -= chunk_size; in mlxsw_i2c_write()
416 int num, chunk_size, reg_size, i, j; in mlxsw_i2c_cmd() local
462 chunk_size = (reg_size > mlxsw_i2c->block_size) ? in mlxsw_i2c_cmd()
464 read_tran[1].len = chunk_size; in mlxsw_i2c_cmd()
486 off += chunk_size; in mlxsw_i2c_cmd()
487 reg_size -= chunk_size; in mlxsw_i2c_cmd()
[all …]
/drivers/rtc/
A Drtc-isl12026.c327 size_t chunk_size, num_written; in isl12026_nvm_write() local
347 chunk_size = round_down(offset, ISL12026_PAGESIZE) + in isl12026_nvm_write()
349 chunk_size = min(bytes, chunk_size); in isl12026_nvm_write()
354 memcpy(payload + 2, v + num_written, chunk_size); in isl12026_nvm_write()
357 msgs[0].len = chunk_size + 2; in isl12026_nvm_write()
367 bytes -= chunk_size; in isl12026_nvm_write()
368 offset += chunk_size; in isl12026_nvm_write()
369 num_written += chunk_size; in isl12026_nvm_write()
/drivers/rpmsg/
A Dqcom_glink_native.c904 __le32 chunk_size; in qcom_glink_rx_data() member
907 unsigned int chunk_size; in qcom_glink_rx_data() local
920 chunk_size = le32_to_cpu(hdr.chunk_size); in qcom_glink_rx_data()
990 intent->offset += chunk_size; in qcom_glink_rx_data()
1472 __le32 chunk_size; in __qcom_glink_send() member
1477 int chunk_size = len; in __qcom_glink_send() local
1513 chunk_size = len - offset; in __qcom_glink_send()
1515 chunk_size = SZ_8K; in __qcom_glink_send()
1520 req.chunk_size = cpu_to_le32(chunk_size); in __qcom_glink_send()
1525 iid, chunk_size, in __qcom_glink_send()
[all …]
A Dqcom_glink_trace.h281 …TP_PROTO(const char *remote, const char *channel, u16 lcid, u16 rcid, u32 iid, u32 chunk_size, u32…
282 TP_ARGS(remote, channel, lcid, rcid, iid, chunk_size, left_size, cont, tx),
289 __field(u32, chunk_size)
300 __entry->chunk_size = chunk_size;
312 __entry->chunk_size,
/drivers/net/wireless/marvell/libertas/
A Dif_sdio.c446 u32 chunk_size; in if_sdio_prog_helper() local
475 chunk_size = min_t(size_t, size, 60); in if_sdio_prog_helper()
487 firmware += chunk_size; in if_sdio_prog_helper()
488 size -= chunk_size; in if_sdio_prog_helper()
542 u32 chunk_size; in if_sdio_prog_real() local
613 chunk_size = min_t(size_t, req_size, 512); in if_sdio_prog_real()
615 memcpy(chunk_buffer, firmware, chunk_size); in if_sdio_prog_real()
621 chunk_buffer, roundup(chunk_size, 32)); in if_sdio_prog_real()
625 firmware += chunk_size; in if_sdio_prog_real()
626 size -= chunk_size; in if_sdio_prog_real()
[all …]
/drivers/gpu/drm/i915/
A Di915_ttm_buddy_manager.c75 GEM_BUG_ON(min_page_size < mm->chunk_size); in i915_ttm_buddy_man_alloc()
83 n_pages = size >> ilog2(mm->chunk_size); in i915_ttm_buddy_man_alloc()
286 u64 chunk_size) in i915_ttm_buddy_man_init() argument
296 err = drm_buddy_init(&bman->mm, size, chunk_size); in i915_ttm_buddy_man_init()
302 GEM_BUG_ON(default_page_size < chunk_size); in i915_ttm_buddy_man_init()
384 size, mm->chunk_size, in i915_ttm_buddy_man_reserve()
/drivers/gpu/drm/amd/display/dc/hubp/dcn21/
A Ddcn21_hubp.c153 CHUNK_SIZE, rq_regs->rq_regs_l.chunk_size, in hubp21_program_requestor()
162 CHUNK_SIZE_C, rq_regs->rq_regs_c.chunk_size, in hubp21_program_requestor()
275 CHUNK_SIZE, &rq_regs.rq_regs_l.chunk_size, in hubp21_validate_dml_output()
284 CHUNK_SIZE_C, &rq_regs.rq_regs_c.chunk_size, in hubp21_validate_dml_output()
308 if (rq_regs.rq_regs_l.chunk_size != dml_rq_regs->rq_regs_l.chunk_size) in hubp21_validate_dml_output()
310 dml_rq_regs->rq_regs_l.chunk_size, rq_regs.rq_regs_l.chunk_size); in hubp21_validate_dml_output()
333 if (rq_regs.rq_regs_c.chunk_size != dml_rq_regs->rq_regs_c.chunk_size) in hubp21_validate_dml_output()
335 dml_rq_regs->rq_regs_c.chunk_size, rq_regs.rq_regs_c.chunk_size); in hubp21_validate_dml_output()
/drivers/net/wireless/ath/wcn36xx/
A Ddxe.c239 int i, chunk_size = pool->chunk_size; in wcn36xx_dxe_init_tx_bd() local
250 bd_phy_addr += chunk_size; in wcn36xx_dxe_init_tx_bd()
251 bd_cpu_addr += chunk_size; in wcn36xx_dxe_init_tx_bd()
694 wcn->mgmt_mem_pool.chunk_size = WCN36XX_BD_CHUNK_SIZE + in wcn36xx_dxe_allocate_mem_pools()
697 s = wcn->mgmt_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_H; in wcn36xx_dxe_allocate_mem_pools()
709 wcn->data_mem_pool.chunk_size = WCN36XX_BD_CHUNK_SIZE + in wcn36xx_dxe_allocate_mem_pools()
712 s = wcn->data_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_L; in wcn36xx_dxe_allocate_mem_pools()
732 dma_free_coherent(wcn->dev, wcn->mgmt_mem_pool.chunk_size * in wcn36xx_dxe_free_mem_pools()
738 dma_free_coherent(wcn->dev, wcn->data_mem_pool.chunk_size * in wcn36xx_dxe_free_mem_pools()
/drivers/gpu/drm/vmwgfx/
A Dvmwgfx_cmd.c391 uint32_t chunk_size = max - next_cmd; in vmw_fifo_res_copy() local
396 if (bytes < chunk_size) in vmw_fifo_res_copy()
397 chunk_size = bytes; in vmw_fifo_res_copy()
401 memcpy(fifo_mem + (next_cmd >> 2), buffer, chunk_size); in vmw_fifo_res_copy()
402 rest = bytes - chunk_size; in vmw_fifo_res_copy()
404 memcpy(fifo_mem + (min >> 2), buffer + (chunk_size >> 2), rest); in vmw_fifo_res_copy()
/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
A Dpool.c74 xsk->chunk_size = xsk_pool_get_chunk_size(pool); in mlx5e_build_xsk_param()
104 const char *recommendation = is_power_of_2(xsk.chunk_size) ? in mlx5e_xsk_enable_locked()
108 xsk.chunk_size, recommendation); in mlx5e_xsk_enable_locked()
A Dsetup.c34 if ((size_t)xsk->chunk_size > PAGE_SIZE || xsk->chunk_size < MLX5E_MIN_XSK_CHUNK_SIZE) { in mlx5e_validate_xsk_param()
35 mlx5_core_err(mdev, "XSK chunk size %u out of bounds [%u, %lu]\n", xsk->chunk_size, in mlx5e_validate_xsk_param()

Completed in 890 milliseconds

12345