| /drivers/infiniband/hw/usnic/ |
| A D | usnic_uiom_interval_tree.c | 43 #define LAST(node) ((node)->last) 79 interval->last = last; in usnic_uiom_interval_node_alloc() 148 if (pivot > interval->last) { in usnic_uiom_get_intervals_diff() 150 } else if (pivot <= interval->last && in usnic_uiom_get_intervals_diff() 153 pivot = interval->last + 1; in usnic_uiom_get_intervals_diff() 157 if (pivot <= last) in usnic_uiom_get_intervals_diff() 180 unsigned long last, int flags) in usnic_uiom_insert_interval() argument 199 ilast = interval->last; in usnic_uiom_insert_interval() 214 if (ilast > last) { in usnic_uiom_insert_interval() 229 if (lpivot <= last) in usnic_uiom_insert_interval() [all …]
|
| A D | usnic_uiom_interval_tree.h | 43 unsigned long last; member 58 unsigned long last); 61 unsigned long start, unsigned long last); 67 unsigned long start, unsigned long last, 75 unsigned long start, unsigned long last, 82 unsigned long last, int flags,
|
| /drivers/vhost/ |
| A D | iotlb.c | 17 #define LAST(map) ((map)->last) 51 u64 start, u64 last, in vhost_iotlb_add_range_ctx() argument 57 if (last < start) in vhost_iotlb_add_range_ctx() 63 if (start == 0 && last == ULONG_MAX) { in vhost_iotlb_add_range_ctx() 64 u64 mid = last / 2; in vhost_iotlb_add_range_ctx() 87 map->size = last - start + 1; in vhost_iotlb_add_range_ctx() 88 map->last = last; in vhost_iotlb_add_range_ctx() 104 u64 start, u64 last, in vhost_iotlb_add_range() argument 107 return vhost_iotlb_add_range_ctx(iotlb, start, last, in vhost_iotlb_add_range() 123 start, last))) in vhost_iotlb_del_range() [all …]
|
| /drivers/video/fbdev/core/ |
| A D | fb_copyarea.h | 62 last = last ? (last & first) : first; in fb_copy_aligned_fwd() 65 if (last == ~0UL) in fb_copy_aligned_fwd() 90 if (last) in fb_copy_aligned_fwd() 107 if (last) in fb_copy_aligned_rev() 108 first &= last; in fb_copy_aligned_rev() 120 if (last) in fb_copy_aligned_rev() 178 last = last ? (last & first) : first; in fb_copy_fwd() 242 if (last) { in fb_copy_fwd() 272 if (last) in fb_copy_rev() 273 first &= last; in fb_copy_rev() [all …]
|
| A D | fb_fillrect.h | 112 unsigned long first, last; in bitfill() local 119 last = last ? (last & first) : first; in bitfill() 121 if (last == ~0UL) in bitfill() 123 else if (last) in bitfill() 124 fb_modify_offset(first, last, 0, dst); in bitfill() 148 if (last) in bitfill() 160 unsigned long first, last; in bitinvert() local 169 last = last ? (last & first) : first; in bitinvert() 197 last &= get(pattern); in bitinvert() 198 if (last) in bitinvert() [all …]
|
| A D | fbsysfs.c | 146 char ** last = NULL; in store_bpp() local 169 char **last = NULL; in store_rotate() local 196 char *last = NULL; in store_virtual() local 201 last++; in store_virtual() 202 if (last - buf >= count) in store_virtual() 204 var.yres_virtual = simple_strtoul(last, &last, 0); in store_virtual() 231 char *last = NULL; in store_blank() local 288 char *last = NULL; in store_pan() local 293 last++; in store_pan() 296 var.yoffset = simple_strtoul(last, &last, 0); in store_pan() [all …]
|
| /drivers/gpu/drm/amd/amdkfd/ |
| A D | kfd_svm.c | 135 prange->it_node.last = prange->last; in svm_range_add_to_svms() 346 prange->last = last; in svm_range_new() 1056 new->last > old->last) { in svm_range_split_adjust() 1073 old->last = last; in svm_range_split_adjust() 1984 l = min(last, pchild->last); in svm_range_evict() 1990 l = min(last, prange->last); in svm_range_evict() 2171 if (node->last > last) { in svm_range_add() 2437 if (last < tail->last) in svm_range_unmap_split() 2520 l = min(last, pchild->last); in svm_range_unmap_from_cpu() 2527 l = min(last, prange->last); in svm_range_unmap_from_cpu() [all …]
|
| A D | kfd_migrate.c | 292 prange->last, ttm_res_offset); in svm_migrate_copy_to_vram() 428 __func__, r, prange->start, prange->last); in svm_migrate_vma_to_vram() 435 prange->start, prange->last); in svm_migrate_vma_to_vram() 594 prange->last); in svm_migrate_copy_to_ram() 731 __func__, r, prange->start, prange->last); in svm_migrate_vma_to_ram() 738 prange->start, prange->last); in svm_migrate_vma_to_ram() 806 prange->start, prange->last); in svm_migrate_vram_to_ram() 884 unsigned long start, unsigned long last, in svm_migrate_vram_to_vram() argument 936 unsigned long start, last, size; in svm_migrate_to_ram() local 990 last = min(ALIGN(addr + 1, size) - 1, prange->last); in svm_migrate_to_ram() [all …]
|
| /drivers/crypto/marvell/cesa/ |
| A D | tdma.c | 44 engine->chain_sw.last = NULL; in mv_cesa_dma_step() 47 engine->chain_hw.last = dreq->chain.last; in mv_cesa_dma_step() 85 dreq->chain.last = NULL; in mv_cesa_dma_cleanup() 108 struct mv_cesa_tdma_desc *last = engine->chain_sw.last; in mv_cesa_tdma_chain() local 120 last = dreq->chain.last; in mv_cesa_tdma_chain() 121 engine->chain_sw.last = last; in mv_cesa_tdma_chain() 128 engine->chain_sw.last = NULL; in mv_cesa_tdma_chain() 214 if (chain->last) { in mv_cesa_dma_add_desc() 216 chain->last->next = new_tdma; in mv_cesa_dma_add_desc() 221 chain->last = new_tdma; in mv_cesa_dma_add_desc() [all …]
|
| /drivers/base/regmap/ |
| A D | regcache-maple.c | 44 unsigned long index, last; in regcache_maple_write() local 60 last = reg; in regcache_maple_write() 70 last = mas.last; in regcache_maple_write() 93 mas_set_range(&mas, index, last); in regcache_maple_write() 144 if (mas.last > max) { in regcache_maple_drop() 146 upper_last = mas.last; in regcache_maple_drop() 149 mas.last - max, sizeof(*upper), in regcache_maple_drop() 315 int last) in regcache_maple_insert_block() argument 318 MA_STATE(mas, mt, first, last); in regcache_maple_insert_block() 326 for (i = 0; i < last - first + 1; i++) in regcache_maple_insert_block() [all …]
|
| /drivers/iommu/iommufd/ |
| A D | io_pagetable.h | 79 return area->pages_node.last; in iopt_area_last_index() 89 return area->node.last; in iopt_area_last_iova() 94 return (area->node.last - area->node.start) + 1; in iopt_area_length() 121 unsigned long last) \ 127 last); \ 134 unsigned long last) \ 234 unsigned long last, struct page **out_pages); 236 unsigned long last, struct page **out_pages); 238 unsigned long last); 241 unsigned long last, struct page **out_pages, [all …]
|
| /drivers/memory/tegra/ |
| A D | tegra210-emc-cc-r21021.c | 198 if (last->periodic_training && in periodic_compensation_handler() 207 __COPY_EMA(next, last, idx); in periodic_compensation_handler() 258 struct tegra210_emc_timing *last = emc->last; in tegra210_emc_r21021_periodic_compensation() local 261 if (last->periodic_training) { in tegra210_emc_r21021_periodic_compensation() 304 last, last)) { in tegra210_emc_r21021_periodic_compensation() 359 struct tegra210_emc_timing *fake, *last = emc->last, *next = emc->next; in tegra210_emc_r21021_set_clock() local 521 emc_writel(emc, last->burst_regs in tegra210_emc_r21021_set_clock() 527 emc_writel(emc, last->burst_regs in tegra210_emc_r21021_set_clock() 707 W2P_war = last->burst_regs[ in tegra210_emc_r21021_set_clock() 963 if (next->rate < last->rate) { in tegra210_emc_r21021_set_clock() [all …]
|
| /drivers/gpu/drm/i915/gt/uc/ |
| A D | selftest_guc.c | 95 last[i] = rq; in intel_guc_scrub_ctbs() 104 i915_request_put(last[i]); in intel_guc_scrub_ctbs() 105 last[i] = NULL; in intel_guc_scrub_ctbs() 124 if (last[i]) in intel_guc_scrub_ctbs() 125 i915_request_put(last[i]); in intel_guc_scrub_ctbs() 207 if ((ret != -EAGAIN) || !last) { in intel_guc_steal_guc_ids() 213 if (last) in intel_guc_steal_guc_ids() 214 i915_request_put(last); in intel_guc_steal_guc_ids() 215 last = rq; in intel_guc_steal_guc_ids() 224 i915_request_put(last); in intel_guc_steal_guc_ids() [all …]
|
| /drivers/infiniband/hw/cxgb4/ |
| A D | id_table.c | 51 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in c4iw_id_alloc() 57 alloc->last += get_random_u32_below(RANDOM_SKIP); in c4iw_id_alloc() 59 alloc->last = obj + 1; in c4iw_id_alloc() 60 if (alloc->last >= alloc->max) in c4iw_id_alloc() 61 alloc->last = 0; in c4iw_id_alloc() 88 alloc->last = get_random_u32_below(RANDOM_SKIP); in c4iw_id_table_alloc() 90 alloc->last = 0; in c4iw_id_table_alloc()
|
| /drivers/gpu/drm/xe/ |
| A D | xe_range_fence.c | 14 #define XE_RANGE_TREE_LAST(_node) ((_node)->last) 57 u64 start, u64 last, struct dma_fence *fence) in xe_range_fence_insert() argument 68 rfence->last = last; in xe_range_fence_insert() 135 u64 last) in xe_range_fence_tree_first() argument 137 return xe_range_fence_tree_iter_first(&tree->root, start, last); in xe_range_fence_tree_first() 149 xe_range_fence_tree_next(struct xe_range_fence *rfence, u64 start, u64 last) in xe_range_fence_tree_next() argument 151 return xe_range_fence_tree_iter_next(rfence, start, last); in xe_range_fence_tree_next()
|
| /drivers/staging/media/ipu3/ |
| A D | ipu3-css-pool.c | 49 pool->last = IPU3_CSS_POOL_SIZE; in imgu_css_pool_init() 64 u32 n = (pool->last + 1) % IPU3_CSS_POOL_SIZE; in imgu_css_pool_get() 67 pool->last = n; in imgu_css_pool_get() 75 pool->entry[pool->last].valid = false; in imgu_css_pool_put() 76 pool->last = (pool->last + IPU3_CSS_POOL_SIZE - 1) % IPU3_CSS_POOL_SIZE; in imgu_css_pool_put() 92 int i = (pool->last + IPU3_CSS_POOL_SIZE - n) % IPU3_CSS_POOL_SIZE; in imgu_css_pool_last()
|
| /drivers/net/team/ |
| A D | team_mode_broadcast.c | 17 struct team_port *last = NULL; in bc_transmit() local 24 if (last) { in bc_transmit() 27 ret = !team_dev_queue_xmit(team, last, in bc_transmit() 33 last = cur; in bc_transmit() 36 if (last) { in bc_transmit() 37 ret = !team_dev_queue_xmit(team, last, skb); in bc_transmit()
|
| /drivers/gpu/drm/nouveau/nvkm/subdev/bios/ |
| A D | image.c | 54 image->last = pcir.last; in nvbios_imagen() 60 image->last = npde.last; in nvbios_imagen() 62 image->last = true; in nvbios_imagen() 76 if (image->last || !nvbios_imagen(bios, image)) { in nvbios_image()
|
| /drivers/vdpa/vdpa_user/ |
| A D | iova_domain.c | 21 u64 start, u64 last, in vduse_iotlb_add_range() argument 46 u64 start, u64 last) in vduse_iotlb_del_range() argument 64 u64 start = 0ULL, last = ULLONG_MAX; in vduse_domain_set_map() local 68 vduse_iotlb_del_range(domain, start, last); in vduse_domain_set_map() 84 vduse_iotlb_del_range(domain, start, last); in vduse_domain_set_map() 93 u64 start = 0ULL, last = ULLONG_MAX; in vduse_domain_clear_map() local 107 u64 last = iova + size - 1; in vduse_domain_map_bounce_page() local 109 while (iova <= last) { in vduse_domain_map_bounce_page() 127 u64 last = iova + size - 1; in vduse_domain_unmap_bounce_page() local 129 while (iova <= last) { in vduse_domain_unmap_bounce_page() [all …]
|
| /drivers/video/fbdev/omap2/omapfb/dss/ |
| A D | overlay-sysfs.c | 139 char *last; in overlay_position_store() local 144 info.pos_x = simple_strtoul(buf, &last, 10); in overlay_position_store() 145 ++last; in overlay_position_store() 146 if (last - buf >= size) in overlay_position_store() 149 info.pos_y = simple_strtoul(last, &last, 10); in overlay_position_store() 178 char *last; in overlay_output_size_store() local 183 info.out_width = simple_strtoul(buf, &last, 10); in overlay_output_size_store() 184 ++last; in overlay_output_size_store() 185 if (last - buf >= size) in overlay_output_size_store() 188 info.out_height = simple_strtoul(last, &last, 10); in overlay_output_size_store()
|
| /drivers/md/bcache/ |
| A D | util.c | 165 uint64_t now, duration, last; in bch_time_stats_update() local 172 last = time_after64(now, stats->last) in bch_time_stats_update() 173 ? now - stats->last : 0; in bch_time_stats_update() 177 if (stats->last) { in bch_time_stats_update() 181 ewma_add(stats->average_frequency, last, 8, 8); in bch_time_stats_update() 183 stats->average_frequency = last << 8; in bch_time_stats_update() 188 stats->last = now ?: 1; in bch_time_stats_update()
|
| /drivers/scsi/sym53c8xx_2/ |
| A D | sym_misc.h | 80 struct sym_quehead *last = list->blink; in sym_que_splice() local 86 last->flink = at; in sym_que_splice() 87 at->blink = last; in sym_que_splice() 94 struct sym_quehead *first, *last; in sym_que_move() local 100 last = orig->blink; in sym_que_move() 101 last->flink = dest; in sym_que_move() 102 dest->blink = last; in sym_que_move()
|
| /drivers/video/fbdev/ |
| A D | c2p_iplan2.c | 93 u32 dst_idx, first, last, w; in c2p_iplan2() local 101 last = 0xffffU ^ (0xffffU >> ((dst_idx+width) % 16)); in c2p_iplan2() 102 last |= last << 16; in c2p_iplan2() 109 first &= last; in c2p_iplan2() 145 store_iplan2_masked(p, bpp, d.words, last); in c2p_iplan2()
|
| /drivers/net/ethernet/netronome/nfp/ |
| A D | ccm_mbox.c | 190 if (skb == last) in nfp_ccm_mbox_copy_in() 208 if (skb == last) in nfp_ccm_mbox_find_req() 262 skb = nfp_ccm_mbox_find_req(nn, hdr.tag, last); in nfp_ccm_mbox_copy_out() 348 } while (skb != last); in nfp_ccm_mbox_copy_out() 368 } while (skb != last); in nfp_ccm_mbox_mark_all_err() 378 struct sk_buff *skb, *last; in nfp_ccm_mbox_run_queue_unlock() local 385 last = skb = __skb_peek(&nn->mbox_cmsg.queue); in nfp_ccm_mbox_run_queue_unlock() 393 last = skb; in nfp_ccm_mbox_run_queue_unlock() 407 nfp_ccm_mbox_copy_in(nn, last); in nfp_ccm_mbox_run_queue_unlock() 411 nfp_ccm_mbox_copy_out(nn, last); in nfp_ccm_mbox_run_queue_unlock() [all …]
|
| /drivers/net/ethernet/engleder/ |
| A D | tsnep_tc.c | 288 u64 last; in tsnep_enable_gcl() local 291 last = tsnep_gcl_start_before(curr, gcl->start_time); in tsnep_enable_gcl() 292 if ((last + curr->cycle_time) == gcl->start_time) in tsnep_enable_gcl() 293 change = tsnep_cut_gcl(curr, last, in tsnep_enable_gcl() 294 gcl->start_time - last); in tsnep_enable_gcl() 295 else if (((gcl->start_time - last) <= in tsnep_enable_gcl() 297 ((gcl->start_time - last) <= TSNEP_GCL_MIN_INTERVAL)) in tsnep_enable_gcl() 298 change = tsnep_extend_gcl(curr, last, in tsnep_enable_gcl() 299 gcl->start_time - last); in tsnep_enable_gcl() 301 change = tsnep_cut_gcl(curr, last, in tsnep_enable_gcl() [all …]
|