| /drivers/net/ethernet/sfc/ |
| A D | ef100_tx.c | 383 unsigned int segments; in __ef100_enqueue_skb() local 392 segments = skb_is_gso(skb) ? skb_shinfo(skb)->gso_segs : 0; in __ef100_enqueue_skb() 393 if (segments == 1) in __ef100_enqueue_skb() 394 segments = 0; /* Don't use TSO/GSO for a single segment. */ in __ef100_enqueue_skb() 395 if (segments && !ef100_tx_can_tso(tx_queue, skb)) { in __ef100_enqueue_skb() 448 rc = efx_tx_map_data(tx_queue, skb, segments); in __ef100_enqueue_skb() 451 ef100_tx_make_descriptors(tx_queue, skb, segments, efv); in __ef100_enqueue_skb() 489 if (segments) { in __ef100_enqueue_skb() 491 tx_queue->tso_packets += segments; in __ef100_enqueue_skb() 492 tx_queue->tx_packets += segments; in __ef100_enqueue_skb()
|
| A D | tx.c | 319 unsigned int segments; in __efx_enqueue_skb() local 324 segments = skb_is_gso(skb) ? skb_shinfo(skb)->gso_segs : 0; in __efx_enqueue_skb() 325 if (segments == 1) in __efx_enqueue_skb() 326 segments = 0; /* Don't use TSO for a single segment. */ in __efx_enqueue_skb() 332 if (segments) { in __efx_enqueue_skb() 371 if (!data_mapped && (efx_tx_map_data(tx_queue, skb, segments))) in __efx_enqueue_skb() 382 if (segments) { in __efx_enqueue_skb() 384 tx_queue->tso_packets += segments; in __efx_enqueue_skb() 385 tx_queue->tx_packets += segments; in __efx_enqueue_skb()
|
| A D | tx_common.c | 474 struct sk_buff *segments, *next; in efx_tx_tso_fallback() local 476 segments = skb_gso_segment(skb, 0); in efx_tx_tso_fallback() 477 if (IS_ERR(segments)) in efx_tx_tso_fallback() 478 return PTR_ERR(segments); in efx_tx_tso_fallback() 482 skb_list_walk_safe(segments, skb, next) { in efx_tx_tso_fallback()
|
| /drivers/net/ethernet/sfc/siena/ |
| A D | tx.c | 145 unsigned int segments; in __efx_siena_enqueue_skb() local 150 segments = skb_is_gso(skb) ? skb_shinfo(skb)->gso_segs : 0; in __efx_siena_enqueue_skb() 151 if (segments == 1) in __efx_siena_enqueue_skb() 152 segments = 0; /* Don't use TSO for a single segment. */ in __efx_siena_enqueue_skb() 158 if (segments) { in __efx_siena_enqueue_skb() 173 if (!data_mapped && (efx_siena_tx_map_data(tx_queue, skb, segments))) in __efx_siena_enqueue_skb()
|
| A D | tx_common.c | 434 struct sk_buff *segments, *next; in efx_siena_tx_tso_fallback() local 436 segments = skb_gso_segment(skb, 0); in efx_siena_tx_tso_fallback() 437 if (IS_ERR(segments)) in efx_siena_tx_tso_fallback() 438 return PTR_ERR(segments); in efx_siena_tx_tso_fallback() 442 skb_list_walk_safe(segments, skb, next) { in efx_siena_tx_tso_fallback()
|
| /drivers/gpu/drm/ |
| A D | drm_panic_qr.rs | 211 fn from_segments(segments: &[&Segment<'_>]) -> Option<Version> { in from_segments() 214 .find(|&v| v.max_data() * 8 >= segments.iter().map(|s| s.total_size_bits(v)).sum()) in from_segments() 466 fn new<'a>(segments: &[&Segment<'_>], data: &'a mut [u8]) -> Option<EncodedMsg<'a>> { in new() 467 let version = Version::from_segments(segments)?; in new() 488 em.encode(segments); in new() 523 fn add_segments(&mut self, segments: &[&Segment<'_>]) { in add_segments() 526 for s in segments.iter() { in add_segments() 575 fn encode(&mut self, segments: &[&Segment<'_>]) { in encode() 576 self.add_segments(segments); in encode() 950 let segments = &[ in drm_panic_qr_generate() [all …]
|
| /drivers/dma/xilinx/ |
| A D | xilinx_dma.c | 374 struct list_head segments; member 650 seg = list_first_entry(&desc->segments, in xilinx_dma_get_metadata_ptr() 842 INIT_LIST_HEAD(&desc->segments); in xilinx_dma_alloc_tx_descriptor() 871 &desc->segments, node) { in xilinx_dma_free_tx_descriptor() 877 &desc->segments, node) { in xilinx_dma_free_tx_descriptor() 883 &desc->segments, node) { in xilinx_dma_free_tx_descriptor() 993 list_for_each(entry, &desc->segments) { in xilinx_dma_get_residue() 1727 seg = list_last_entry(&desc->segments, in xilinx_dma_complete_descriptor() 1966 list_last_entry(&tail_desc->segments, in append_desc_queue() 2108 segment = list_first_entry(&desc->segments, in xilinx_vdma_dma_prep_interleaved() [all …]
|
| /drivers/net/ovpn/ |
| A D | io.c | 356 struct sk_buff *segments, *curr, *next; in ovpn_net_xmit() local 371 segments = skb_gso_segment(skb, 0); in ovpn_net_xmit() 372 if (IS_ERR(segments)) { in ovpn_net_xmit() 373 ret = PTR_ERR(segments); in ovpn_net_xmit() 380 skb = segments; in ovpn_net_xmit()
|
| /drivers/media/dvb-frontends/ |
| A D | mb86a20s.c | 1351 if (value >= segments[0].x) in interpolate_value() 1352 return segments[0].y; in interpolate_value() 1353 if (value < segments[len-1].x) in interpolate_value() 1354 return segments[len-1].y; in interpolate_value() 1358 if (value == segments[i].x) in interpolate_value() 1359 return segments[i].y; in interpolate_value() 1360 if (value > segments[i].x) in interpolate_value() 1365 dy = segments[i].y - segments[i - 1].y; in interpolate_value() 1366 dx = segments[i - 1].x - segments[i].x; in interpolate_value() 1367 tmp64 = value - segments[i].x; in interpolate_value() [all …]
|
| A D | dib7000p.c | 1765 if (value >= segments[0].x) in interpolate_value() 1766 return segments[0].y; in interpolate_value() 1767 if (value < segments[len-1].x) in interpolate_value() 1768 return segments[len-1].y; in interpolate_value() 1772 if (value == segments[i].x) in interpolate_value() 1773 return segments[i].y; in interpolate_value() 1774 if (value > segments[i].x) in interpolate_value() 1779 dy = segments[i - 1].y - segments[i].y; in interpolate_value() 1780 dx = segments[i - 1].x - segments[i].x; in interpolate_value() 1782 tmp64 = value - segments[i].x; in interpolate_value() [all …]
|
| A D | dib8000.c | 3986 if (value >= segments[0].x) in interpolate_value() 3987 return segments[0].y; in interpolate_value() 3988 if (value < segments[len-1].x) in interpolate_value() 3989 return segments[len-1].y; in interpolate_value() 3993 if (value == segments[i].x) in interpolate_value() 3994 return segments[i].y; in interpolate_value() 3995 if (value > segments[i].x) in interpolate_value() 4000 dy = segments[i - 1].y - segments[i].y; in interpolate_value() 4001 dx = segments[i - 1].x - segments[i].x; in interpolate_value() 4003 tmp64 = value - segments[i].x; in interpolate_value() [all …]
|
| /drivers/block/xen-blkback/ |
| A D | blkback.c | 707 struct grant_page **pages = req->segments; in xen_blkbk_unmap_and_respond() 921 struct blkif_request_segment *segments = NULL; in xen_blkbk_parse_indirect() local 939 if (segments) in xen_blkbk_parse_indirect() 940 kunmap_atomic(segments); in xen_blkbk_parse_indirect() 945 pending_req->segments[n]->gref = segments[i].gref; in xen_blkbk_parse_indirect() 947 first_sect = READ_ONCE(segments[i].first_sect); in xen_blkbk_parse_indirect() 948 last_sect = READ_ONCE(segments[i].last_sect); in xen_blkbk_parse_indirect() 960 if (segments) in xen_blkbk_parse_indirect() 961 kunmap_atomic(segments); in xen_blkbk_parse_indirect() 1304 struct grant_page **pages = pending_req->segments; in dispatch_rw_block_io() [all …]
|
| A D | xenbus.c | 313 kfree(req->segments[j]); in xen_blkif_disconnect() 1018 req->segments[j] = kzalloc(sizeof(*req->segments[0]), GFP_KERNEL); in read_per_ring_refs() 1019 if (!req->segments[j]) in read_per_ring_refs() 1043 if (!req->segments[j]) in read_per_ring_refs() 1045 kfree(req->segments[j]); in read_per_ring_refs()
|
| A D | common.h | 352 struct grant_page *segments[MAX_INDIRECT_SEGMENTS]; member
|
| /drivers/scsi/mpi3mr/ |
| A D | mpi3mr_fw.c | 519 struct segments *segments = op_reply_q->q_segments; in mpi3mr_get_reply_desc() local 1829 struct segments *segments; in mpi3mr_free_op_req_q_segments() local 1832 if (!segments) in mpi3mr_free_op_req_q_segments() 1852 size, segments[j].segment, segments[j].segment_dma); in mpi3mr_free_op_req_q_segments() 1873 struct segments *segments; in mpi3mr_free_op_reply_q_segments() local 1896 size, segments[j].segment, segments[j].segment_dma); in mpi3mr_free_op_reply_q_segments() 2001 struct segments *segments; in mpi3mr_alloc_op_reply_q_segments() local 2058 struct segments *segments; in mpi3mr_alloc_op_req_q_segments() local 2449 struct segments *segments = op_req_q->q_segments; in mpi3mr_op_request_post() local 4610 struct segments *segments; in mpi3mr_memset_op_reply_q_buffers() local [all …]
|
| A D | mpi3mr.h | 412 struct segments { struct 441 struct segments *q_segments; 471 struct segments *q_segments; 1384 struct segments *trace_buf;
|
| /drivers/gpu/drm/arm/ |
| A D | malidp_crtc.c | 99 } segments[MALIDP_COEFFTAB_NUM_COEFFS] = { variable 136 delta_in = segments[i].end - segments[i].start; in malidp_generate_gamma_table() 138 out_start = drm_color_lut_extract(lut[segments[i].start].green, in malidp_generate_gamma_table() 140 out_end = drm_color_lut_extract(lut[segments[i].end].green, 12); in malidp_generate_gamma_table()
|
| /drivers/bus/mhi/host/ |
| A D | boot.c | 373 int segments = DIV_ROUND_UP(alloc_size, seg_size) + 1; in mhi_alloc_bhie_table() local 383 img_info->mhi_buf = kcalloc(segments, sizeof(*img_info->mhi_buf), in mhi_alloc_bhie_table() 390 for (i = 0; i < segments; i++, mhi_buf++) { in mhi_alloc_bhie_table() 394 if (i == segments - 1) in mhi_alloc_bhie_table() 405 img_info->bhi_vec = img_info->mhi_buf[segments - 1].buf; in mhi_alloc_bhie_table() 406 img_info->entries = segments; in mhi_alloc_bhie_table()
|
| /drivers/gpu/drm/amd/display/dc/optc/dcn32/ |
| A D | dcn32_optc.c | 103 int segments; in optc32_get_odm_combine_segments() local 105 REG_GET(OPTC_DATA_SOURCE_SELECT, OPTC_NUM_OF_INPUT_SEGMENT, &segments); in optc32_get_odm_combine_segments() 107 switch (segments) { in optc32_get_odm_combine_segments()
|
| /drivers/md/dm-vdo/ |
| A D | block-map.c | 69 struct block_map_tree_segment *segments; member 74 size_t segments; member 2344 forest->segments = index + 1; in make_segment() 2382 int result = vdo_allocate(forest->segments, in make_segment() 2384 "tree root segments", &tree->segments); in make_segment() 2389 memcpy(tree->segments, old_forest->trees[root].segments, in make_segment() 2393 segment = &(tree->segments[index]); in make_segment() 2429 vdo_free(forest->trees[root].segments); in deforest() 2468 deforest(forest, forest->segments - 1); in make_forest() 2484 deforest(map->forest, map->forest->segments); in replace_forest() [all …]
|
| /drivers/media/i2c/ |
| A D | adv7511-v4l2.c | 69 u32 segments; member 590 edid->segments ? "found" : "no", in adv7511_log_status() 1574 …atus & MASK_ADV7511_HPD_DETECT) && ((status & MASK_ADV7511_MSEN_DETECT) || state->edid.segments)) { in adv7511_check_monitor_present_status() 1685 state->edid.segments = segment + 1; in adv7511_check_edid_status() 1687 if (state->edid.blocks > state->edid.segments * 2) { in adv7511_check_edid_status() 1689 v4l2_dbg(1, debug, sd, "%s: request segment %d\n", __func__, state->edid.segments); in adv7511_check_edid_status() 1691 adv7511_wr(sd, 0xc4, state->edid.segments); in adv7511_check_edid_status() 1697 v4l2_dbg(1, debug, sd, "%s: edid complete with %d segment(s)\n", __func__, state->edid.segments); in adv7511_check_edid_status() 1700 state->edid.segments * 256, in adv7511_check_edid_status()
|
| /drivers/block/ |
| A D | xen-blkfront.c | 580 struct blkif_request_segment *segments; member 625 if (setup->segments) in blkif_setup_rw_req_grant() 626 kunmap_atomic(setup->segments); in blkif_setup_rw_req_grant() 631 setup->segments = kmap_atomic(gnt_list_entry->page); in blkif_setup_rw_req_grant() 673 setup->segments[grant_idx % GRANTS_PER_INDIRECT_FRAME] = in blkif_setup_rw_req_grant() 712 .segments = NULL, in blkif_queue_rw_req() 848 if (setup.segments) in blkif_queue_rw_req() 849 kunmap_atomic(setup.segments); in blkif_queue_rw_req() 950 unsigned int segments = info->max_indirect_segments ? : in blkif_set_queue_limits() local 971 lim->max_hw_sectors = (segments * XEN_PAGE_SIZE) / 512; in blkif_set_queue_limits() [all …]
|
| /drivers/staging/octeon/ |
| A D | ethernet-rx.c | 144 int segments = work->word2.s.bufs; in copy_segments_to_skb() local 149 while (segments--) { in copy_segments_to_skb()
|
| /drivers/remoteproc/ |
| A D | remoteproc_coredump.c | 135 struct list_head *segments, in rproc_coredump_find_segment() argument 140 list_for_each_entry(segment, segments, node) { in rproc_coredump_find_segment()
|
| /drivers/scsi/sym53c8xx_2/ |
| A D | sym_glue.c | 341 cp->segments = sym_scatter(np, cp, cmd); in sym_setup_data_and_start() 342 if (cp->segments < 0) { in sym_setup_data_and_start() 350 if (!cp->segments) in sym_setup_data_and_start() 354 cp->segments = 0; in sym_setup_data_and_start() 367 lastp = goalp - 8 - (cp->segments * (2*4)); in sym_setup_data_and_start() 372 lastp = goalp - 8 - (cp->segments * (2*4)); in sym_setup_data_and_start()
|