Home
last modified time | relevance | path

Searched refs:order (Results 1 – 25 of 1627) sorted by relevance

12345678910>>...66

/linux/scripts/atomic/
A Dgen-atomic-fallback.sh16 local order="$1"; shift
35 local order="$1"; shift
37 local tmpl_order=${order#_}
49 local order="$1"; shift
62 local order="$1"; shift
192 local order="$1"; shift
196 printf "#define raw_${xchg}${order} arch_${xchg}${order}\n"
204 if [ ! -z "${order}" ]; then
211 printf "#define raw_${xchg}${order}(...) raw_${xchg}${order}_not_implemented()\n"
245 local order="$1"; shift
[all …]
A Dgen-atomic-instrumented.sh37 local order="$1"; shift
39 if [ "${order}" = "_release" ]; then
41 elif [ -z "${order}" ] && ! meta_in "$meta" "slv"; then
59 local order="$1"; shift
63 local atomicname="${atomic}_${pfx}${name}${sfx}${order}"
88 local order="$1"; shift
92 case "$order" in
110 raw_${xchg}${order}(__ai_ptr, __ai_oldp, __VA_ARGS__); \\
124 raw_${xchg}${order}(__ai_ptr, __VA_ARGS__); \\
166 for order in "" "_acquire" "_release" "_relaxed"; do
[all …]
/linux/arch/arm64/kvm/hyp/nvhe/
A Dpage_alloc.c58 if (!buddy || buddy->order != order || buddy->refcount) in __find_buddy_avail()
97 unsigned short order = p->order; in __hyp_attach_page() local
112 p->order = HYP_NO_ORDER; in __hyp_attach_page()
113 for (; (order + 1) <= pool->max_order; order++) { in __hyp_attach_page()
126 p->order = order; in __hyp_attach_page()
137 while (p->order > order) { in __hyp_extract_page()
144 p->order--; in __hyp_extract_page()
146 buddy->order = p->order; in __hyp_extract_page()
186 unsigned short order = p->order; in hyp_split_page() local
189 p->order = 0; in hyp_split_page()
[all …]
/linux/include/trace/events/
A Dcompaction.h168 int order,
181 __entry->order = order;
187 __entry->order,
195 int order,
210 __entry->order = order;
217 __entry->order,
224 int order,
233 int order,
257 __entry->order = order;
266 __entry->order,
[all …]
A Dvmscan.h81 __entry->order = order;
86 __entry->order)
105 __entry->order = order;
111 __entry->order,
127 __entry->order = order;
132 __entry->order,
283 int order,
304 __entry->order = order;
318 __entry->order,
450 __entry->order = order;
[all …]
/linux/mm/
A Dpage_alloc.c598 if (!capc || order != capc->cc->order) in compaction_capture()
839 order++; in __free_one_page()
2092 int order; in unreserve_highatomic_pageblock() local
2106 for (order = 0; order < NR_PAGE_ORDERS; order++) { in unreserve_highatomic_pageblock()
2619 if (order && order <= PAGE_ALLOC_COSTLY_ORDER) { in free_unref_page_commit()
3581 .order = order, in __alloc_pages_may_oom()
3810 if (!order || order > PAGE_ALLOC_COSTLY_ORDER) in should_compact_retry()
4842 free_unref_page(page + (1 << order), order); in __free_pages()
6489 for (order = 0; order < NR_PAGE_ORDERS; order++) { in split_free_pages()
6877 for (order = 0; order < NR_PAGE_ORDERS; order++) { in is_free_buddy_page()
[all …]
A Dcompaction.c92 int order; in release_free_list() local
95 for (order = 0; order < NR_PAGE_ORDERS; order++) { in release_free_list()
732 for (order = 0; order < NR_PAGE_ORDERS; order++) in isolate_freepages_range()
1418 int order = cc->order > 0 ? cc->order : pageblock_order; in suitable_migration_target() local
1523 order--; in next_search_order()
1525 order = cc->order - 1; in next_search_order()
1581 order = next_search_order(cc, order)) { in fast_isolate_freepages()
2013 for (order = cc->order - 1; in fast_find_migrateblock()
2330 for (order = cc->order; order < NR_PAGE_ORDERS; order++) { in __compact_finished()
2526 for (order = 0; order < NR_PAGE_ORDERS; order++) in compact_zone()
[all …]
/linux/drivers/iommu/
A Diommu-pages.h30 const long pgcnt = 1l << order; in __iommu_alloc_account()
43 const long pgcnt = 1l << order; in __iommu_free_account()
60 page = alloc_pages(gfp | __GFP_ZERO, order); in __iommu_alloc_pages()
64 __iommu_alloc_account(page, order); in __iommu_alloc_pages()
79 __iommu_free_account(page, order); in __iommu_free_pages()
80 __free_pages(page, order); in __iommu_free_pages()
99 __iommu_alloc_account(page, order); in iommu_alloc_pages_node()
111 static inline void *iommu_alloc_pages(gfp_t gfp, int order) in iommu_alloc_pages() argument
113 struct page *page = __iommu_alloc_pages(gfp, order); in iommu_alloc_pages()
149 static inline void iommu_free_pages(void *virt, int order) in iommu_free_pages() argument
[all …]
/linux/drivers/gpu/drm/ttm/
A Dttm_pool.c81 unsigned int order) in ttm_pool_alloc_page() argument
92 if (order) in ttm_pool_alloc_page()
99 p->private = order; in ttm_pool_alloc_page()
107 if (order) in ttm_pool_alloc_page()
153 if (order) in ttm_pool_free_page()
264 pt->order = order; in ttm_pool_type_init()
397 unsigned int order; in ttm_pool_free_range() local
438 unsigned int order; in ttm_pool_alloc() local
458 order = min_t(unsigned int, order, __fls(num_pages))) { in ttm_pool_alloc()
507 if (order) { in ttm_pool_alloc()
[all …]
/linux/lib/
A Dtest_xarray.c207 for (order = 2; order < max_order; order++) { in check_xa_mark_1()
353 for (order = 0; order < max_order; order++) { in check_xa_shrink()
1227 for (order = 5; order < order_limit; order++) { in check_multi_find_3()
1391 for (order = 0; order < 20; order++) { in check_find_entry()
1425 for (order = 0; order < order_limit; order++) { in check_pause()
1723 for (order = 0; order < max_order; order++) { in check_create_range()
1831 for (order = 1; order < 2 * XA_CHUNK_SHIFT; order++) { in check_split()
1960 for (order = 1; order < 12; order++) { in check_account()
1993 for (order = 0; order < max_order; order++) { in check_get_order()
2012 for (order = 0; order < max_order; order++) { in check_xas_get_order()
[all …]
/linux/tools/testing/radix-tree/
A Dmultiorder.c16 unsigned order) in item_insert_order() argument
18 XA_STATE_ORDER(xas, xa, index, order); in item_insert_order()
66 assert(item->order == order[i]); in multiorder_iteration()
112 mask = (1UL << order[k]) - 1; in multiorder_tagged_iteration()
117 assert(item->order == order[k]); in multiorder_tagged_iteration()
139 mask = (1 << order[k]) - 1; in multiorder_tagged_iteration()
144 assert(item->order == order[k]); in multiorder_tagged_iteration()
172 item_insert_order(tree, 0, order); in creator_func()
218 unsigned int order; in load_creator() local
226 for (order = 1; order < RADIX_TREE_MAP_SHIFT; order++) { in load_creator()
[all …]
/linux/include/linux/
A Dgfp.h269 return __alloc_pages_noprof(gfp_mask, order, nid, NULL); in __alloc_pages_node_noprof()
280 return __folio_alloc_noprof(gfp, order, nid, NULL); in __folio_alloc_node_noprof()
291 unsigned int order) in alloc_pages_node_noprof() argument
296 return __alloc_pages_node_noprof(nid, gfp_mask, order); in alloc_pages_node_noprof()
318 return alloc_pages_noprof(gfp, order); in alloc_pages_mpol_noprof()
322 return __folio_alloc_node_noprof(gfp, order, numa_node_id()); in folio_alloc_noprof()
327 return folio_alloc_noprof(gfp, order); in folio_alloc_mpol_noprof()
330 folio_alloc_noprof(gfp, order)
368 #define __get_dma_pages(gfp_mask, order) \ argument
369 __get_free_pages((gfp_mask) | GFP_DMA, (order))
[all …]
A Dcompaction.h65 static inline unsigned long compact_gap(unsigned int order) in compact_gap() argument
80 return 2UL << order; in compact_gap()
85 extern unsigned int extfrag_for_order(struct zone *zone, unsigned int order);
86 extern int fragmentation_index(struct zone *zone, unsigned int order);
88 unsigned int order, unsigned int alloc_flags,
92 extern bool compaction_suitable(struct zone *zone, int order,
95 extern void compaction_defer_reset(struct zone *zone, int order,
98 bool compaction_zonelist_suitable(struct alloc_context *ac, int order,
103 extern void wakeup_kcompactd(pg_data_t *pgdat, int order, int highest_zoneidx);
110 static inline bool compaction_suitable(struct zone *zone, int order, in compaction_suitable() argument
[all …]
/linux/drivers/gpu/drm/lib/
A Ddrm_random.c16 void drm_random_reorder(unsigned int *order, unsigned int count, in drm_random_reorder() argument
24 swap(order[i], order[j]); in drm_random_reorder()
31 unsigned int *order, i; in drm_random_order() local
33 order = kmalloc_array(count, sizeof(*order), GFP_KERNEL); in drm_random_order()
34 if (!order) in drm_random_order()
35 return order; in drm_random_order()
38 order[i] = i; in drm_random_order()
40 drm_random_reorder(order, count, state); in drm_random_order()
41 return order; in drm_random_order()
/linux/drivers/media/pci/cx18/
A Dcx18-mailbox.c240 mb = &order->mb; in epu_dma_done()
349 order->mb.cmd); in epu_cmd()
356 order->mb.cmd); in epu_cmd()
374 epu_cmd(cx, order); in cx18_in_work_handler()
399 order->rpu, order->mb.cmd); in mb_ack_irq()
408 rpu_str[order->rpu], rpu_str[order->rpu], req); in mb_ack_irq()
423 mb = &order->mb; in epu_dma_done_irq()
489 order->mb.cmd); in epu_cmd_irq()
518 return order; in alloc_in_work_order_irq()
546 order->flags = 0; in cx18_api_epu_cmd_irq()
[all …]
/linux/mm/kmsan/
A Dinit.c121 if (!held_back[order].shadow) { in kmsan_memblock_free_pages()
122 held_back[order].shadow = page; in kmsan_memblock_free_pages()
125 if (!held_back[order].origin) { in kmsan_memblock_free_pages()
126 held_back[order].origin = page; in kmsan_memblock_free_pages()
133 held_back[order].shadow = NULL; in kmsan_memblock_free_pages()
134 held_back[order].origin = NULL; in kmsan_memblock_free_pages()
142 int order; member
147 .order = MAX_PAGE_ORDER,
185 .order = collect.order - 1, in collect_split()
190 if (!collect.order) in collect_split()
[all …]
/linux/arch/riscv/kvm/
A Dtlb.c22 unsigned long order) in kvm_riscv_local_hfence_gvma_vmid_gpa() argument
50 unsigned long order) in kvm_riscv_local_hfence_gvma_gpa() argument
81 unsigned long order) in kvm_riscv_local_hfence_vvma_asid_gva() argument
121 unsigned long order) in kvm_riscv_local_hfence_vvma_gva() argument
264 d.addr, d.size, d.order); in kvm_riscv_hfence_process()
270 d.addr, d.size, d.order); in kvm_riscv_hfence_process()
335 unsigned long order) in kvm_riscv_hfence_gvma_vmid_gpa() argument
343 data.order = order; in kvm_riscv_hfence_gvma_vmid_gpa()
366 data.order = order; in kvm_riscv_hfence_vvma_asid_gva()
387 unsigned long order) in kvm_riscv_hfence_vvma_gva() argument
[all …]
/linux/Documentation/trace/postprocess/
A Dtrace-vmscan-postprocess.pl315 my $order = $1;
339 my $order = $2;
372 my $order = $2;
551 for (my $order = 0; $order < 20; $order++) {
554 print "direct-$order=$count ";
560 for (my $order = 0; $order < 20; $order++) {
563 print "wakeup-$order=$count ";
608 for (my $order = 0; $order < 20; $order++) {
611 print "wake-$order=$count ";
617 for (my $order = 0; $order < 20; $order++) {
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/therm/
A Dgk104.c34 const struct gk104_clkgate_engine_info *order = therm->clkgate_order; in gk104_clkgate_enable() local
38 for (i = 0; order[i].type != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_enable()
39 if (!nvkm_device_subdev(dev, order[i].type, order[i].inst)) in gk104_clkgate_enable()
42 nvkm_mask(dev, 0x20200 + order[i].offset, 0xff00, 0x4500); in gk104_clkgate_enable()
50 for (i = 0; order[i].type != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_enable()
51 if (!nvkm_device_subdev(dev, order[i].type, order[i].inst)) in gk104_clkgate_enable()
54 nvkm_mask(dev, 0x20200 + order[i].offset, 0x00ff, 0x0045); in gk104_clkgate_enable()
63 const struct gk104_clkgate_engine_info *order = therm->clkgate_order; in gk104_clkgate_fini() local
67 for (i = 0; order[i].type != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_fini()
68 if (!nvkm_device_subdev(dev, order[i].type, order[i].inst)) in gk104_clkgate_fini()
[all …]
/linux/drivers/net/ethernet/mellanox/mlx5/core/steering/
A Ddr_buddy.c75 unsigned int *order) in dr_buddy_find_free_seg() argument
99 *order = order_iter; in dr_buddy_find_free_seg()
120 unsigned int order, in mlx5dr_buddy_alloc_mem() argument
137 while (order_iter > order) { in mlx5dr_buddy_alloc_mem()
144 seg <<= order; in mlx5dr_buddy_alloc_mem()
151 unsigned int seg, unsigned int order) in mlx5dr_buddy_free_mem() argument
153 seg >>= order; in mlx5dr_buddy_free_mem()
160 --buddy->num_free[order]; in mlx5dr_buddy_free_mem()
162 ++order; in mlx5dr_buddy_free_mem()
164 bitmap_set(buddy->bitmap[order], seg, 1); in mlx5dr_buddy_free_mem()
[all …]
/linux/drivers/net/ethernet/mellanox/mlx5/core/steering/hws/
A Dmlx5hws_buddy.c85 u32 *order) in hws_buddy_find_free_seg() argument
109 *order = order_iter; in hws_buddy_find_free_seg()
124 while (order_iter > order) { in mlx5hws_buddy_alloc_mem()
131 seg <<= order; in mlx5hws_buddy_alloc_mem()
138 seg >>= order; in mlx5hws_buddy_free_mem()
140 while (test_bit(seg ^ 1, buddy->bitmap[order])) { in mlx5hws_buddy_free_mem()
141 bitmap_clear(buddy->bitmap[order], seg ^ 1, 1); in mlx5hws_buddy_free_mem()
142 --buddy->num_free[order]; in mlx5hws_buddy_free_mem()
144 ++order; in mlx5hws_buddy_free_mem()
147 bitmap_set(buddy->bitmap[order], seg, 1); in mlx5hws_buddy_free_mem()
[all …]
/linux/arch/riscv/mm/
A Dhugetlbpage.c35 unsigned long order; in huge_pte_alloc() local
68 for_each_napot_order(order) { in huge_pte_alloc()
88 unsigned long order; in huge_pte_offset() local
119 for_each_napot_order(order) { in huge_pte_offset()
186 unsigned long order; in arch_make_huge_pte() local
188 for_each_napot_order(order) { in arch_make_huge_pte()
194 if (order == NAPOT_ORDER_MAX) in arch_make_huge_pte()
270 unsigned long order; in huge_ptep_set_access_flags() local
314 unsigned long order; in huge_ptep_set_wrprotect() local
369 unsigned long order; in is_napot_size() local
[all …]
/linux/kernel/bpf/
A Dcgroup_iter.c54 int order; member
77 if (p->order == BPF_CGROUP_ITER_DESCENDANTS_PRE) in cgroup_iter_seq_start()
110 if (p->order == BPF_CGROUP_ITER_DESCENDANTS_PRE) in cgroup_iter_seq_next()
176 p->order = aux->cgroup.order; in BTF_ID_LIST_GLOBAL_SINGLE()
200 int order = linfo->cgroup.order; in bpf_iter_attach_cgroup() local
203 if (order != BPF_CGROUP_ITER_DESCENDANTS_PRE && in bpf_iter_attach_cgroup()
204 order != BPF_CGROUP_ITER_DESCENDANTS_POST && in bpf_iter_attach_cgroup()
205 order != BPF_CGROUP_ITER_ANCESTORS_UP && in bpf_iter_attach_cgroup()
206 order != BPF_CGROUP_ITER_SELF_ONLY) in bpf_iter_attach_cgroup()
223 aux->cgroup.order = order; in bpf_iter_attach_cgroup()
[all …]
/linux/drivers/gpu/drm/i915/selftests/
A Di915_random.c70 void i915_random_reorder(unsigned int *order, unsigned int count, in i915_random_reorder() argument
73 i915_prandom_shuffle(order, sizeof(*order), count, state); in i915_random_reorder()
78 unsigned int *order, i; in i915_random_order() local
80 order = kmalloc_array(count, sizeof(*order), in i915_random_order()
82 if (!order) in i915_random_order()
83 return order; in i915_random_order()
86 order[i] = i; in i915_random_order()
88 i915_random_reorder(order, count, state); in i915_random_order()
89 return order; in i915_random_order()
A Di915_syncmap.c274 unsigned int pass, order; in igt_syncmap_join_above() local
296 for (order = 0; order < 64; order += SHIFT) { in igt_syncmap_join_above()
335 unsigned int step, order, idx; in igt_syncmap_join_below() local
345 for (order = 64 - SHIFT; order > 0; order -= SHIFT) { in igt_syncmap_join_below()
362 for (order = SHIFT; order < 64; order += SHIFT) { in igt_syncmap_join_below()
383 for (order = SHIFT; order < 64; order += SHIFT) { in igt_syncmap_join_below()
449 unsigned int idx, order; in igt_syncmap_compact() local
462 for (order = SHIFT; order < 64; order += SHIFT) { in igt_syncmap_compact()
477 context, order, idx, in igt_syncmap_compact()
491 if (sync->height != order) { in igt_syncmap_compact()
[all …]

Completed in 60 milliseconds

12345678910>>...66