| /drivers/media/test-drivers/vivid/ |
| A D | vivid-vid-common.c | 41 .buffers = 1, 50 .buffers = 1, 58 .buffers = 1, 66 .buffers = 1, 74 .buffers = 1, 82 .buffers = 1, 90 .buffers = 1, 98 .buffers = 1, 106 .buffers = 1, 114 .buffers = 1, [all …]
|
| /drivers/crypto/intel/qat/qat_common/ |
| A D | qat_bl.c | 30 bl->buffers[i].len, bl_dma_dir); in qat_bl_free_bufl() 40 blout->buffers[i].len, in qat_bl_free_bufl() 110 bufl->buffers[y].len = sg->length; in __qat_bl_sgl_to_bufl() 115 bufl->buffers[y].len -= left; in __qat_bl_sgl_to_bufl() 128 struct qat_alg_buf *buffers; in __qat_bl_sgl_to_bufl() local 149 buffers = buflout->buffers; in __qat_bl_sgl_to_bufl() 151 buffers[i].addr = DMA_MAPPING_ERROR; in __qat_bl_sgl_to_bufl() 168 buffers[y].len = sg->length; in __qat_bl_sgl_to_bufl() 171 buffers[y].len -= left; in __qat_bl_sgl_to_bufl() 206 buflout->buffers[i].len, in __qat_bl_sgl_to_bufl() [all …]
|
| A D | qat_bl.h | 24 struct qat_alg_buf buffers[]; member 26 static_assert(offsetof(struct qat_alg_buf_list, buffers) == sizeof(struct qat_alg_buf_list_hdr),
|
| /drivers/media/platform/qcom/iris/ |
| A D | iris_buffer.c | 240 struct iris_buffers *buffers = &inst->buffers[buffer_type]; in iris_fill_internal_buf_info() local 268 struct iris_buffers *buffers = &inst->buffers[buffer_type]; in iris_create_internal_buffer() local 272 if (!buffers->size) in iris_create_internal_buffer() 298 struct iris_buffers *buffers; in iris_create_internal_buffers() local 311 buffers = &inst->buffers[internal_buf_type[i]]; in iris_create_internal_buffers() 341 struct iris_buffers *buffers; in iris_queue_internal_buffers() local 355 buffers = &inst->buffers[internal_buf_type[i]]; in iris_queue_internal_buffers() 386 struct iris_buffers *buffers; in iris_destroy_internal_buffers() local 400 buffers = &inst->buffers[internal_buf_type[i]]; in iris_destroy_internal_buffers() 433 struct iris_buffers *buffers = &inst->buffers[buffer_type]; in iris_release_internal_buffers() local [all …]
|
| A D | iris_vidc.c | 156 INIT_LIST_HEAD(&inst->buffers[BUF_BIN].list); in iris_open() 157 INIT_LIST_HEAD(&inst->buffers[BUF_ARP].list); in iris_open() 158 INIT_LIST_HEAD(&inst->buffers[BUF_COMV].list); in iris_open() 159 INIT_LIST_HEAD(&inst->buffers[BUF_NON_COMV].list); in iris_open() 160 INIT_LIST_HEAD(&inst->buffers[BUF_LINE].list); in iris_open() 161 INIT_LIST_HEAD(&inst->buffers[BUF_DPB].list); in iris_open() 162 INIT_LIST_HEAD(&inst->buffers[BUF_PERSIST].list); in iris_open() 163 INIT_LIST_HEAD(&inst->buffers[BUF_SCRATCH_1].list); in iris_open() 228 struct iris_buffers *buffers; in iris_check_num_queued_internal_buffers() local 242 buffers = &inst->buffers[internal_buf_type[i]]; in iris_check_num_queued_internal_buffers() [all …]
|
| A D | iris_hfi_gen1_response.c | 159 inst->buffers[BUF_OUTPUT].min_count = iris_vpu_buf_count(inst, BUF_OUTPUT); in iris_hfi_gen1_read_changed_params() 160 inst->buffers[BUF_OUTPUT].size = pixmp_op->plane_fmt[0].sizeimage; in iris_hfi_gen1_read_changed_params() 163 v4l2_ctrl_s_ctrl(ctrl, inst->buffers[BUF_OUTPUT].min_count); in iris_hfi_gen1_read_changed_params() 166 dst_q->min_reqbufs_allocation = inst->buffers[BUF_OUTPUT].min_count; in iris_hfi_gen1_read_changed_params() 401 struct iris_buffers *buffers; in iris_hfi_gen1_session_ftb_done() local 424 buffers = &inst->buffers[BUF_DPB]; in iris_hfi_gen1_session_ftb_done() 425 if (!buffers) in iris_hfi_gen1_session_ftb_done() 429 list_for_each_entry(iter, &buffers->list, list) { in iris_hfi_gen1_session_ftb_done()
|
| A D | iris_vdec.c | 40 inst->buffers[BUF_INPUT].min_count = iris_vpu_buf_count(inst, BUF_INPUT); in iris_vdec_inst_init() 41 inst->buffers[BUF_INPUT].size = f->fmt.pix_mp.plane_fmt[0].sizeimage; in iris_vdec_inst_init() 56 inst->buffers[BUF_OUTPUT].min_count = iris_vpu_buf_count(inst, BUF_OUTPUT); in iris_vdec_inst_init() 57 inst->buffers[BUF_OUTPUT].size = f->fmt.pix_mp.plane_fmt[0].sizeimage; in iris_vdec_inst_init() 217 inst->buffers[BUF_INPUT].min_count = iris_vpu_buf_count(inst, BUF_INPUT); in iris_vdec_s_fmt() 218 inst->buffers[BUF_INPUT].size = fmt->fmt.pix_mp.plane_fmt[0].sizeimage; in iris_vdec_s_fmt() 234 inst->buffers[BUF_OUTPUT].size = iris_get_buffer_size(inst, BUF_OUTPUT); in iris_vdec_s_fmt() 252 inst->buffers[BUF_OUTPUT].min_count = iris_vpu_buf_count(inst, BUF_OUTPUT); in iris_vdec_s_fmt() 253 inst->buffers[BUF_OUTPUT].size = fmt->fmt.pix_mp.plane_fmt[0].sizeimage; in iris_vdec_s_fmt()
|
| /drivers/iio/buffer/ |
| A D | industrialio-hw-consumer.c | 23 struct list_head buffers; member 58 list_for_each_entry(buf, &hwc->buffers, head) { in iio_hw_consumer_get_buffer() 72 list_add_tail(&buf->head, &hwc->buffers); in iio_hw_consumer_get_buffer() 94 INIT_LIST_HEAD(&hwc->buffers); in iio_hw_consumer_alloc() 116 list_for_each_entry(buf, &hwc->buffers, head) in iio_hw_consumer_alloc() 134 list_for_each_entry_safe(buf, n, &hwc->buffers, head) in iio_hw_consumer_free() 183 list_for_each_entry(buf, &hwc->buffers, head) { in iio_hw_consumer_enable() 192 list_for_each_entry_continue_reverse(buf, &hwc->buffers, head) in iio_hw_consumer_enable() 206 list_for_each_entry(buf, &hwc->buffers, head) in iio_hw_consumer_disable()
|
| /drivers/staging/media/starfive/camss/ |
| A D | stf-capture.c | 79 struct stf_v_buf *output = &cap->buffers; in stf_init_addrs() 244 cap->buffers.state = STF_OUTPUT_OFF; in stf_capture_init() 245 cap->buffers.buf[0] = NULL; in stf_capture_init() 246 cap->buffers.buf[1] = NULL; in stf_capture_init() 247 cap->buffers.active_buf = 0; in stf_capture_init() 251 spin_lock_init(&cap->buffers.lock); in stf_capture_init() 398 buffers); in stf_change_buffer() 460 stf_change_buffer(&cap->buffers); in stf_wr_irq_handler() 461 stf_buf_done(&cap->buffers); in stf_wr_irq_handler() 479 stf_buf_done(&cap->buffers); in stf_isp_irq_handler() [all …]
|
| /drivers/gpu/drm/imagination/ |
| A D | pvr_fw_trace.c | 33 for (u32 thread_nr = 0; thread_nr < ARRAY_SIZE(fw_trace->buffers); thread_nr++) { in tracebuf_ctrl_init() 36 struct pvr_fw_trace_buffer *trace_buffer = &fw_trace->buffers[thread_nr]; in tracebuf_ctrl_init() 52 for (u32 thread_nr = 0; thread_nr < ARRAY_SIZE(fw_trace->buffers); thread_nr++) { in pvr_fw_trace_init() 53 struct pvr_fw_trace_buffer *trace_buffer = &fw_trace->buffers[thread_nr]; in pvr_fw_trace_init() 87 ARRAY_SIZE(fw_trace->buffers)); in pvr_fw_trace_init() 89 for (u32 thread_nr = 0; thread_nr < ARRAY_SIZE(fw_trace->buffers); thread_nr++) { in pvr_fw_trace_init() 92 struct pvr_fw_trace_buffer *trace_buffer = &fw_trace->buffers[thread_nr]; in pvr_fw_trace_init() 101 struct pvr_fw_trace_buffer *trace_buffer = &fw_trace->buffers[thread_nr]; in pvr_fw_trace_init() 115 struct pvr_fw_trace_buffer *trace_buffer = &fw_trace->buffers[thread_nr]; in pvr_fw_trace_fini() 454 static_assert(ARRAY_SIZE(fw_trace->buffers) <= 10, in pvr_fw_trace_debugfs_init() [all …]
|
| /drivers/media/pci/ivtv/ |
| A D | ivtv-queue.c | 35 q->buffers = 0; in ivtv_queue_init() 53 q->buffers++; in ivtv_enqueue() 68 q->buffers--; in ivtv_dequeue() 82 from->buffers--; in ivtv_queue_move_buf() 88 to->buffers++; in ivtv_queue_move_buf() 143 steal->buffers--; in ivtv_queue_move() 147 from->buffers++; in ivtv_queue_move() 187 if (s->buffers == 0) in ivtv_stream_alloc() 192 s->name, s->buffers, s->buf_size, s->buffers * s->buf_size / 1024); in ivtv_stream_alloc() 228 for (i = 0; i < s->buffers; i++) { in ivtv_stream_alloc() [all …]
|
| /drivers/scsi/isci/ |
| A D | unsolicited_frame_control.c | 110 uf = &uf_control->buffers.array[i]; in sci_unsolicited_frame_control_construct() 136 *frame_header = &uf_control->buffers.array[frame_index].header->data; in sci_unsolicited_frame_control_get_header() 149 *frame_buffer = uf_control->buffers.array[frame_index].buffer; in sci_unsolicited_frame_control_get_buffer() 184 uf_control->buffers.array[frame_index].state = UNSOLICITED_FRAME_RELEASED; in sci_unsolicited_frame_control_release_frame() 198 while (uf_control->buffers.array[frame_get].state == UNSOLICITED_FRAME_RELEASED) { in sci_unsolicited_frame_control_release_frame() 199 uf_control->buffers.array[frame_get].state = UNSOLICITED_FRAME_EMPTY; in sci_unsolicited_frame_control_release_frame()
|
| /drivers/android/tests/ |
| A D | binder_alloc_kunit.c | 166 struct binder_buffer *buffers[], in binder_alloc_test_alloc_buf() argument 173 buffers[i] = binder_alloc_new_buf(alloc, sizes[i], 0, 0, 0); in binder_alloc_test_alloc_buf() 174 if (IS_ERR(buffers[i]) || in binder_alloc_test_alloc_buf() 175 !check_buffer_pages_allocated(test, alloc, buffers[i], sizes[i])) in binder_alloc_test_alloc_buf() 184 struct binder_buffer *buffers[], in binder_alloc_test_free_buf() argument 191 binder_alloc_free_buf(alloc, buffers[seq[i]]); in binder_alloc_test_free_buf() 235 struct binder_buffer *buffers[BUFFER_NUM]; in binder_alloc_test_alloc_free() local 239 failures = binder_alloc_test_alloc_buf(test, alloc, buffers, in binder_alloc_test_alloc_free() 247 failures = binder_alloc_test_free_buf(test, alloc, buffers, in binder_alloc_test_alloc_free() 256 failures = binder_alloc_test_alloc_buf(test, alloc, buffers, in binder_alloc_test_alloc_free() [all …]
|
| /drivers/gpu/drm/i915/gem/selftests/ |
| A D | i915_gem_client_blt.c | 102 struct blit_buffer buffers[3]; member 262 for (i = 0; i < ARRAY_SIZE(t->buffers); i++) in tiled_blits_destroy_buffers() 263 i915_vma_put(t->buffers[i].vma); in tiled_blits_destroy_buffers() 315 for (i = 0; i < ARRAY_SIZE(t->buffers); i++) { in tiled_blits_create_buffers() 324 t->buffers[i].vma = vma; in tiled_blits_create_buffers() 325 t->buffers[i].tiling = in tiled_blits_create_buffers() 615 &t->buffers[i], t->hole + offset, in tiled_blits_prepare() 637 &t->buffers[1], t->hole + offset / 2, in tiled_blits_bounce() 638 &t->buffers[0], t->hole + 2 * offset); in tiled_blits_bounce() 643 t->buffers[2].tiling = t->buffers[0].tiling; in tiled_blits_bounce() [all …]
|
| /drivers/media/usb/pvrusb2/ |
| A D | pvrusb2-io.c | 49 struct pvr2_buffer **buffers; member 305 memcpy(nb, sp->buffers, in pvr2_stream_buffer_count() 307 kfree(sp->buffers); in pvr2_stream_buffer_count() 309 sp->buffers = nb; in pvr2_stream_buffer_count() 321 sp->buffers[sp->buffer_total_count] = bp; in pvr2_stream_buffer_count() 328 bp = sp->buffers[sp->buffer_total_count - 1]; in pvr2_stream_buffer_count() 330 sp->buffers[sp->buffer_total_count - 1] = NULL; in pvr2_stream_buffer_count() 338 nb = kmemdup_array(sp->buffers, scnt, sizeof(*nb), in pvr2_stream_buffer_count() 342 kfree(sp->buffers); in pvr2_stream_buffer_count() 343 sp->buffers = nb; in pvr2_stream_buffer_count() [all …]
|
| /drivers/infiniband/hw/hfi1/ |
| A D | init.c | 414 rcd->egrbufs.buffers = in hfi1_create_ctxtdata() 418 if (!rcd->egrbufs.buffers) in hfi1_create_ctxtdata() 1108 if (rcd->egrbufs.buffers[e].addr) in hfi1_free_ctxtdata() 1110 rcd->egrbufs.buffers[e].len, in hfi1_free_ctxtdata() 1111 rcd->egrbufs.buffers[e].addr, in hfi1_free_ctxtdata() 1114 kfree(rcd->egrbufs.buffers); in hfi1_free_ctxtdata() 1116 rcd->egrbufs.buffers = NULL; in hfi1_free_ctxtdata() 1846 rcd->egrbufs.buffers[idx].addr = in hfi1_setup_eagerbufs() 1852 rcd->egrbufs.buffers[idx].len = in hfi1_setup_eagerbufs() 1855 rcd->egrbufs.buffers[idx].addr; in hfi1_setup_eagerbufs() [all …]
|
| /drivers/media/pci/cx18/ |
| A D | cx18-queue.c | 337 if (s->buffers == 0) in cx18_stream_alloc() 341 s->name, s->buffers, s->buf_size, in cx18_stream_alloc() 342 s->buffers * s->buf_size / 1024, in cx18_stream_alloc() 343 (s->buffers * s->buf_size * 100 / 1024) % 100); in cx18_stream_alloc() 345 if (((char __iomem *)&cx->scb->cpu_mdl[cx->free_mdl_idx + s->buffers] - in cx18_stream_alloc() 359 for (i = 0; i < s->buffers; i++) { in cx18_stream_alloc() 394 if (i == s->buffers) { in cx18_stream_alloc() 395 cx->free_mdl_idx += s->buffers; in cx18_stream_alloc()
|
| /drivers/atm/ |
| A D | nicstarmac.copyright | 25 * IDT always receives data into a small buffer, then large buffers 28 * Fix is simple: make large buffers large enough to hold entire 32 * buffers. This is done by 2 things: 35 * recycle large data buffers 36 * 2) skb_clone of received buffers
|
| /drivers/crypto/ccree/ |
| A D | cc_hash.h | 38 u8 buffers[2][CC_MAX_HASH_BLCK_SIZE] ____cacheline_aligned; member 66 return state->buffers[state->buff_index]; in cc_hash_buf() 76 return state->buffers[state->buff_index ^ 1]; in cc_next_buf()
|
| /drivers/platform/goldfish/ |
| A D | goldfish_pipe.c | 201 struct goldfish_pipe_dev_buffers *buffers; member 640 dev->buffers->signalled_pipe_buffers[i].id, in goldfish_pipe_interrupt() 641 dev->buffers->signalled_pipe_buffers[i].flags); in goldfish_pipe_interrupt() 736 dev->buffers->open_command_params.rw_params_max_count = in goldfish_pipe_open() 738 dev->buffers->open_command_params.command_buffer_ptr = in goldfish_pipe_open() 843 dev->buffers = (struct goldfish_pipe_dev_buffers *) in goldfish_pipe_device_init() 845 if (!dev->buffers) { in goldfish_pipe_device_init() 852 write_pa_addr(&dev->buffers->signalled_pipe_buffers, in goldfish_pipe_device_init() 859 write_pa_addr(&dev->buffers->open_command_params, in goldfish_pipe_device_init() 872 free_page((unsigned long)dev->buffers); in goldfish_pipe_device_deinit()
|
| /drivers/android/ |
| A D | binder_alloc.c | 64 if (list_is_last(&buffer->entry, &alloc->buffers)) in binder_alloc_buffer_size() 720 BUG_ON(alloc->buffers.next == &buffer->entry); in binder_delete_free_buffer() 780 if (alloc->buffers.next != &buffer->entry) { in binder_free_buf_locked() 936 list_add(&buffer->entry, &alloc->buffers); in binder_alloc_mmap_handler() 967 int buffers, page_count; in binder_alloc_deferred_release() local 970 buffers = 0; in binder_alloc_deferred_release() 985 buffers++; in binder_alloc_deferred_release() 988 while (!list_empty(&alloc->buffers)) { in binder_alloc_deferred_release() 989 buffer = list_first_entry(&alloc->buffers, in binder_alloc_deferred_release() 994 WARN_ON_ONCE(!list_empty(&alloc->buffers)); in binder_alloc_deferred_release() [all …]
|
| /drivers/media/platform/st/stm32/stm32-dcmipp/ |
| A D | dcmipp-bytecap.c | 121 struct list_head buffers; member 153 u32 buffers; member 377 list_for_each_entry_safe(buf, node, &vcap->buffers, list) { in dcmipp_bytecap_all_buffers_done() 435 vcap->next = list_first_entry(&vcap->buffers, typeof(*buf), list); in dcmipp_bytecap_start_streaming() 519 INIT_LIST_HEAD(&vcap->buffers); in dcmipp_bytecap_stop_streaming() 534 vcap->count.underrun, vcap->count.buffers); in dcmipp_bytecap_stop_streaming() 580 list_add_tail(&buf->list, &vcap->buffers); in dcmipp_bytecap_buf_queue() 677 vcap->count.buffers++; in dcmipp_buffer_done() 684 if (!vcap->next && list_is_singular(&vcap->buffers)) { in dcmipp_bytecap_set_next_frame_or_stop() 918 INIT_LIST_HEAD(&vcap->buffers); in dcmipp_bytecap_ent_init()
|
| /drivers/char/xillybus/ |
| A D | xillyusb.c | 94 struct list_head buffers; member 476 list_for_each_safe(this, next, &ep->buffers) { in endpoint_dealloc() 503 INIT_LIST_HEAD(&ep->buffers); in endpoint_alloc() 542 list_add_tail(&xb->entry, &ep->buffers); in endpoint_alloc() 659 list_add_tail(&xb->entry, &ep->buffers); in bulk_in_completer() 689 list_add_tail(&xb->entry, &ep->buffers); in bulk_out_completer() 715 if (list_empty(&ep->buffers)) { in try_queue_bulk_in() 755 list_add_tail(&xb->entry, &ep->buffers); in try_queue_bulk_in() 810 list_empty(&ep->buffers)) { in try_queue_bulk_out() 862 list_add_tail(&xb->entry, &ep->buffers); in try_queue_bulk_out() [all …]
|
| /drivers/staging/greybus/ |
| A D | camera.c | 61 struct gb_camera_debugfs_buffer *buffers; member 870 &gcam->debugfs.buffers[GB_CAMERA_DEBUGFS_BUFFER_CAPABILITIES]; in gb_camera_debugfs_capabilities() 906 &gcam->debugfs.buffers[GB_CAMERA_DEBUGFS_BUFFER_STREAMS]; in gb_camera_debugfs_configure_streams() 1041 &gcam->debugfs.buffers[GB_CAMERA_DEBUGFS_BUFFER_FLUSH]; in gb_camera_debugfs_flush() 1100 buffer = &gcam->debugfs.buffers[op->buffer]; in gb_camera_debugfs_read() 1155 gcam->debugfs.buffers = in gb_camera_debugfs_init() 1157 sizeof(*gcam->debugfs.buffers))); in gb_camera_debugfs_init() 1158 if (!gcam->debugfs.buffers) in gb_camera_debugfs_init() 1165 gcam->debugfs.buffers[i].length = 0; in gb_camera_debugfs_init() 1179 vfree(gcam->debugfs.buffers); in gb_camera_debugfs_cleanup()
|
| /drivers/net/ethernet/ibm/emac/ |
| A D | Kconfig | 13 int "Number of receive buffers" 18 int "Number of transmit buffers"
|