Home
last modified time | relevance | path

Searched refs:order (Results 1 – 25 of 2084) sorted by relevance

12345678910>>...84

/linux-6.15/scripts/atomic/
H A Dgen-atomic-fallback.sh16 local order="$1"; shift
35 local order="$1"; shift
37 local tmpl_order=${order#_}
49 local order="$1"; shift
62 local order="$1"; shift
192 local order="$1"; shift
196 printf "#define raw_${xchg}${order} arch_${xchg}${order}\n"
204 if [ ! -z "${order}" ]; then
211 printf "#define raw_${xchg}${order}(...) raw_${xchg}${order}_not_implemented()\n"
245 local order="$1"; shift
[all …]
/linux-6.15/arch/arm64/kvm/hyp/nvhe/
H A Dpage_alloc.c54 u8 order) in __find_buddy_avail() argument
58 if (!buddy || buddy->order != order || buddy->refcount) in __find_buddy_avail()
97 u8 order = p->order; in __hyp_attach_page() local
113 for (; (order + 1) <= pool->max_order; order++) { in __hyp_attach_page()
126 p->order = order; in __hyp_attach_page()
137 while (p->order > order) { in __hyp_extract_page()
144 p->order--; in __hyp_extract_page()
146 buddy->order = p->order; in __hyp_extract_page()
186 u8 order = p->order; in hyp_split_page() local
189 p->order = 0; in hyp_split_page()
[all …]
/linux-6.15/include/trace/events/
H A Dcompaction.h168 int order,
181 __entry->order = order;
187 __entry->order,
195 int order,
210 __entry->order = order;
217 __entry->order,
224 int order,
233 int order,
257 __entry->order = order;
266 __entry->order,
[all …]
H A Dvmscan.h81 __entry->order = order;
86 __entry->order)
105 __entry->order = order;
111 __entry->order,
127 __entry->order = order;
132 __entry->order,
283 int order,
304 __entry->order = order;
318 __entry->order,
495 __entry->order = order;
[all …]
/linux-6.15/mm/
H A Dpage_alloc.c603 if (!capc || order != capc->cc->order) in compaction_capture()
865 order++; in __free_one_page()
1387 page->order = order; in add_page_to_zone_llist()
2674 if (order && order <= PAGE_ALLOC_COSTLY_ORDER) { in free_frozen_page_commit()
3255 for (order = 0; order < NR_PAGE_ORDERS; order++) { in unreserve_highatomic_pageblock()
3790 .order = order, in __alloc_pages_may_oom()
4019 if (!order || order > PAGE_ALLOC_COSTLY_ORDER) in should_compact_retry()
5080 __free_frozen_pages(page + (1 << order), order, in ___free_pages()
6612 for (order = 0; order < NR_PAGE_ORDERS; order++) { in split_free_pages()
7053 for (order = 0; order < NR_PAGE_ORDERS; order++) { in is_free_buddy_page()
[all …]
H A Dcompaction.c93 int order; in release_free_list() local
96 for (order = 0; order < NR_PAGE_ORDERS; order++) { in release_free_list()
734 for (order = 0; order < NR_PAGE_ORDERS; order++) in isolate_freepages_range()
1420 int order = cc->order > 0 ? cc->order : pageblock_order; in suitable_migration_target() local
1525 order--; in next_search_order()
1527 order = cc->order - 1; in next_search_order()
1583 order = next_search_order(cc, order)) { in fast_isolate_freepages()
2016 for (order = cc->order - 1; in fast_find_migrateblock()
2349 for (order = cc->order; order < NR_PAGE_ORDERS; order++) { in __compact_finished()
2572 for (order = 0; order < NR_PAGE_ORDERS; order++) in compact_zone()
[all …]
/linux-6.15/drivers/iommu/
H A Diommu-pages.h30 const long pgcnt = 1l << order; in __iommu_alloc_account()
43 const long pgcnt = 1l << order; in __iommu_free_account()
60 page = alloc_pages(gfp | __GFP_ZERO, order); in __iommu_alloc_pages()
64 __iommu_alloc_account(page, order); in __iommu_alloc_pages()
79 __iommu_free_account(page, order); in __iommu_free_pages()
80 __free_pages(page, order); in __iommu_free_pages()
99 __iommu_alloc_account(page, order); in iommu_alloc_pages_node()
111 static inline void *iommu_alloc_pages(gfp_t gfp, int order) in iommu_alloc_pages() argument
113 struct page *page = __iommu_alloc_pages(gfp, order); in iommu_alloc_pages()
149 static inline void iommu_free_pages(void *virt, int order) in iommu_free_pages() argument
[all …]
/linux-6.15/lib/
H A Dtest_xarray.c207 for (order = 2; order < max_order; order++) { in check_xa_mark_1()
353 for (order = 0; order < max_order; order++) { in check_xa_shrink()
1227 for (order = 5; order < order_limit; order++) { in check_multi_find_3()
1391 for (order = 0; order < 20; order++) { in check_find_entry()
1425 for (order = 0; order < order_limit; order++) { in check_pause()
1453 for (order = order_limit - 1; order >= 0; order--) { in check_pause()
1759 for (order = 0; order < max_order; order++) { in check_create_range()
2048 for (order = 1; order < 12; order++) { in check_account()
2081 for (order = 0; order < max_order; order++) { in check_get_order()
2100 for (order = 0; order < max_order; order++) { in check_xas_get_order()
[all …]
/linux-6.15/include/linux/
H A Dgfp.h284 return __alloc_pages_noprof(gfp_mask, order, nid, NULL); in __alloc_pages_node_noprof()
295 return __folio_alloc_noprof(gfp, order, nid, NULL); in __folio_alloc_node_noprof()
306 unsigned int order) in alloc_pages_node_noprof() argument
311 return __alloc_pages_node_noprof(nid, gfp_mask, order); in alloc_pages_node_noprof()
330 return __folio_alloc_node_noprof(gfp, order, numa_node_id()); in folio_alloc_noprof()
335 return folio_alloc_noprof(gfp, order); in folio_alloc_mpol_noprof()
337 #define vma_alloc_folio_noprof(gfp, order, vma, addr) \ argument
338 folio_alloc_noprof(gfp, order)
378 #define __get_dma_pages(gfp_mask, order) \ argument
379 __get_free_pages((gfp_mask) | GFP_DMA, (order))
[all …]
H A Dcompaction.h65 static inline unsigned long compact_gap(unsigned int order) in compact_gap() argument
80 return 2UL << order; in compact_gap()
90 extern unsigned int extfrag_for_order(struct zone *zone, unsigned int order);
91 extern int fragmentation_index(struct zone *zone, unsigned int order);
93 unsigned int order, unsigned int alloc_flags,
97 extern bool compaction_suitable(struct zone *zone, int order,
100 extern void compaction_defer_reset(struct zone *zone, int order,
103 bool compaction_zonelist_suitable(struct alloc_context *ac, int order,
108 extern void wakeup_kcompactd(pg_data_t *pgdat, int order, int highest_zoneidx);
115 static inline bool compaction_suitable(struct zone *zone, int order, in compaction_suitable() argument
[all …]
/linux-6.15/tools/testing/radix-tree/
H A Dmultiorder.c16 unsigned order) in item_insert_order() argument
18 XA_STATE_ORDER(xas, xa, index, order); in item_insert_order()
66 assert(item->order == order[i]); in multiorder_iteration()
112 mask = (1UL << order[k]) - 1; in multiorder_tagged_iteration()
117 assert(item->order == order[k]); in multiorder_tagged_iteration()
139 mask = (1 << order[k]) - 1; in multiorder_tagged_iteration()
144 assert(item->order == order[k]); in multiorder_tagged_iteration()
172 item_insert_order(tree, 0, order); in creator_func()
218 unsigned int order; in load_creator() local
226 for (order = 1; order < RADIX_TREE_MAP_SHIFT; order++) { in load_creator()
[all …]
/linux-6.15/drivers/gpu/drm/lib/
H A Ddrm_random.c16 void drm_random_reorder(unsigned int *order, unsigned int count, in drm_random_reorder() argument
24 swap(order[i], order[j]); in drm_random_reorder()
31 unsigned int *order, i; in drm_random_order() local
33 order = kmalloc_array(count, sizeof(*order), GFP_KERNEL); in drm_random_order()
34 if (!order) in drm_random_order()
35 return order; in drm_random_order()
38 order[i] = i; in drm_random_order()
40 drm_random_reorder(order, count, state); in drm_random_order()
41 return order; in drm_random_order()
/linux-6.15/drivers/media/pci/cx18/
H A Dcx18-mailbox.c240 mb = &order->mb; in epu_dma_done()
349 order->mb.cmd); in epu_cmd()
356 order->mb.cmd); in epu_cmd()
374 epu_cmd(cx, order); in cx18_in_work_handler()
399 order->rpu, order->mb.cmd); in mb_ack_irq()
408 rpu_str[order->rpu], rpu_str[order->rpu], req); in mb_ack_irq()
423 mb = &order->mb; in epu_dma_done_irq()
489 order->mb.cmd); in epu_cmd_irq()
518 return order; in alloc_in_work_order_irq()
546 order->flags = 0; in cx18_api_epu_cmd_irq()
[all …]
/linux-6.15/arch/riscv/kvm/
H A Dtlb.c23 unsigned long order) in kvm_riscv_local_hfence_gvma_vmid_gpa() argument
51 unsigned long order) in kvm_riscv_local_hfence_gvma_gpa() argument
82 unsigned long order) in kvm_riscv_local_hfence_vvma_asid_gva() argument
122 unsigned long order) in kvm_riscv_local_hfence_vvma_gva() argument
276 d.size, d.order); in kvm_riscv_hfence_process()
286 d.size, d.order); in kvm_riscv_hfence_process()
304 d.size, d.order); in kvm_riscv_hfence_process()
358 unsigned long order) in kvm_riscv_hfence_gvma_vmid_gpa() argument
366 data.order = order; in kvm_riscv_hfence_gvma_vmid_gpa()
389 data.order = order; in kvm_riscv_hfence_vvma_asid_gva()
[all …]
/linux-6.15/mm/kmsan/
H A Dinit.c121 if (!held_back[order].shadow) { in kmsan_memblock_free_pages()
122 held_back[order].shadow = page; in kmsan_memblock_free_pages()
125 if (!held_back[order].origin) { in kmsan_memblock_free_pages()
126 held_back[order].origin = page; in kmsan_memblock_free_pages()
133 held_back[order].shadow = NULL; in kmsan_memblock_free_pages()
134 held_back[order].origin = NULL; in kmsan_memblock_free_pages()
142 int order; member
147 .order = MAX_PAGE_ORDER,
185 .order = collect.order - 1, in collect_split()
190 if (!collect.order) in collect_split()
[all …]
/linux-6.15/drivers/gpu/drm/ttm/
H A Dttm_pool.c112 unsigned int order; member
146 if (order) in ttm_pool_alloc_page()
161 if (order) in ttm_pool_alloc_page()
207 if (order) in ttm_pool_free_page()
314 pt->order = order; in ttm_pool_type_init()
417 if (!order) in ttm_pool_split_for_swap()
421 nr = 1UL << order; in ttm_pool_split_for_swap()
589 restore->order = order; in ttm_pool_page_allocated_restore()
730 order = ttm_pool_alloc_find_order(order, alloc)) { in __ttm_pool_alloc()
751 if (order) { in __ttm_pool_alloc()
[all …]
/linux-6.15/Documentation/trace/postprocess/
H A Dtrace-vmscan-postprocess.pl315 my $order = $1;
339 my $order = $2;
372 my $order = $2;
551 for (my $order = 0; $order < 20; $order++) {
554 print "direct-$order=$count ";
560 for (my $order = 0; $order < 20; $order++) {
563 print "wakeup-$order=$count ";
608 for (my $order = 0; $order < 20; $order++) {
611 print "wake-$order=$count ";
617 for (my $order = 0; $order < 20; $order++) {
[all …]
/linux-6.15/drivers/gpu/drm/nouveau/nvkm/subdev/therm/
H A Dgk104.c34 const struct gk104_clkgate_engine_info *order = therm->clkgate_order; in gk104_clkgate_enable() local
38 for (i = 0; order[i].type != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_enable()
39 if (!nvkm_device_subdev(dev, order[i].type, order[i].inst)) in gk104_clkgate_enable()
42 nvkm_mask(dev, 0x20200 + order[i].offset, 0xff00, 0x4500); in gk104_clkgate_enable()
50 for (i = 0; order[i].type != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_enable()
51 if (!nvkm_device_subdev(dev, order[i].type, order[i].inst)) in gk104_clkgate_enable()
54 nvkm_mask(dev, 0x20200 + order[i].offset, 0x00ff, 0x0045); in gk104_clkgate_enable()
63 const struct gk104_clkgate_engine_info *order = therm->clkgate_order; in gk104_clkgate_fini() local
67 for (i = 0; order[i].type != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_fini()
68 if (!nvkm_device_subdev(dev, order[i].type, order[i].inst)) in gk104_clkgate_fini()
[all …]
/linux-6.15/drivers/net/ethernet/mellanox/mlx5/core/steering/sws/
H A Ddr_buddy.c75 unsigned int *order) in dr_buddy_find_free_seg() argument
99 *order = order_iter; in dr_buddy_find_free_seg()
120 unsigned int order, in mlx5dr_buddy_alloc_mem() argument
137 while (order_iter > order) { in mlx5dr_buddy_alloc_mem()
144 seg <<= order; in mlx5dr_buddy_alloc_mem()
151 unsigned int seg, unsigned int order) in mlx5dr_buddy_free_mem() argument
153 seg >>= order; in mlx5dr_buddy_free_mem()
160 --buddy->num_free[order]; in mlx5dr_buddy_free_mem()
162 ++order; in mlx5dr_buddy_free_mem()
164 bitmap_set(buddy->bitmap[order], seg, 1); in mlx5dr_buddy_free_mem()
[all …]
/linux-6.15/Documentation/netlink/specs/
H A Dconntrack.yaml23 byte-order: big-endian
111 byte-order: big-endian
115 byte-order: big-endian
135 byte-order: big-endian
140 byte-order: big-endian
145 byte-order: big-endian
156 byte-order: big-endian
170 byte-order: big-endian
176 byte-order: big-endian
184 byte-order: big-endian
[all …]
/linux-6.15/drivers/net/ethernet/mellanox/mlx5/core/steering/hws/
H A Dbuddy.c85 u32 *order) in hws_buddy_find_free_seg() argument
109 *order = order_iter; in hws_buddy_find_free_seg()
124 while (order_iter > order) { in mlx5hws_buddy_alloc_mem()
131 seg <<= order; in mlx5hws_buddy_alloc_mem()
138 seg >>= order; in mlx5hws_buddy_free_mem()
140 while (test_bit(seg ^ 1, buddy->bitmap[order])) { in mlx5hws_buddy_free_mem()
141 bitmap_clear(buddy->bitmap[order], seg ^ 1, 1); in mlx5hws_buddy_free_mem()
142 --buddy->num_free[order]; in mlx5hws_buddy_free_mem()
144 ++order; in mlx5hws_buddy_free_mem()
147 bitmap_set(buddy->bitmap[order], seg, 1); in mlx5hws_buddy_free_mem()
[all …]
/linux-6.15/arch/riscv/mm/
H A Dhugetlbpage.c35 unsigned long order; in huge_pte_alloc() local
68 for_each_napot_order(order) { in huge_pte_alloc()
88 unsigned long order; in huge_pte_offset() local
119 for_each_napot_order(order) { in huge_pte_offset()
189 unsigned long order; in arch_make_huge_pte() local
191 for_each_napot_order(order) { in arch_make_huge_pte()
197 if (order == NAPOT_ORDER_MAX) in arch_make_huge_pte()
281 unsigned long order; in huge_ptep_set_access_flags() local
326 unsigned long order; in huge_ptep_set_wrprotect() local
383 unsigned long order; in is_napot_size() local
[all …]
/linux-6.15/kernel/bpf/
H A Dcgroup_iter.c54 int order; member
77 if (p->order == BPF_CGROUP_ITER_DESCENDANTS_PRE) in cgroup_iter_seq_start()
110 if (p->order == BPF_CGROUP_ITER_DESCENDANTS_PRE) in cgroup_iter_seq_next()
176 p->order = aux->cgroup.order; in BTF_ID_LIST_GLOBAL_SINGLE()
200 int order = linfo->cgroup.order; in bpf_iter_attach_cgroup() local
203 if (order != BPF_CGROUP_ITER_DESCENDANTS_PRE && in bpf_iter_attach_cgroup()
204 order != BPF_CGROUP_ITER_DESCENDANTS_POST && in bpf_iter_attach_cgroup()
205 order != BPF_CGROUP_ITER_ANCESTORS_UP && in bpf_iter_attach_cgroup()
206 order != BPF_CGROUP_ITER_SELF_ONLY) in bpf_iter_attach_cgroup()
223 aux->cgroup.order = order; in bpf_iter_attach_cgroup()
[all …]
/linux-6.15/drivers/gpu/drm/i915/selftests/
H A Di915_random.c70 void i915_random_reorder(unsigned int *order, unsigned int count, in i915_random_reorder() argument
73 i915_prandom_shuffle(order, sizeof(*order), count, state); in i915_random_reorder()
78 unsigned int *order, i; in i915_random_order() local
80 order = kmalloc_array(count, sizeof(*order), in i915_random_order()
82 if (!order) in i915_random_order()
83 return order; in i915_random_order()
86 order[i] = i; in i915_random_order()
88 i915_random_reorder(order, count, state); in i915_random_order()
89 return order; in i915_random_order()
H A Di915_syncmap.c274 unsigned int pass, order; in igt_syncmap_join_above() local
296 for (order = 0; order < 64; order += SHIFT) { in igt_syncmap_join_above()
335 unsigned int step, order, idx; in igt_syncmap_join_below() local
345 for (order = 64 - SHIFT; order > 0; order -= SHIFT) { in igt_syncmap_join_below()
362 for (order = SHIFT; order < 64; order += SHIFT) { in igt_syncmap_join_below()
383 for (order = SHIFT; order < 64; order += SHIFT) { in igt_syncmap_join_below()
449 unsigned int idx, order; in igt_syncmap_compact() local
462 for (order = SHIFT; order < 64; order += SHIFT) { in igt_syncmap_compact()
477 context, order, idx, in igt_syncmap_compact()
491 if (sync->height != order) { in igt_syncmap_compact()
[all …]

12345678910>>...84