Home
last modified time | relevance | path

Searched refs:mm (Results 1 – 25 of 1359) sorted by relevance

12345678910>>...55

/linux/include/linux/
A Dmmap_lock.h65 rwsem_assert_held(&mm->mmap_lock); in mmap_assert_locked()
84 mmap_assert_write_locked(mm); in vma_end_write_all()
92 smp_store_release(&mm->mm_lock_seq, mm->mm_lock_seq + 1); in vma_end_write_all()
100 init_rwsem(&mm->mmap_lock); in mmap_init_lock()
106 down_write(&mm->mmap_lock); in mmap_write_lock()
130 vma_end_write_all(mm); in mmap_write_unlock()
131 up_write(&mm->mmap_lock); in mmap_write_unlock()
137 vma_end_write_all(mm); in mmap_write_downgrade()
138 downgrade_write(&mm->mmap_lock); in mmap_write_downgrade()
144 down_read(&mm->mmap_lock); in mmap_read_lock()
[all …]
A Dmmu_notifier.h89 struct mm_struct *mm);
101 struct mm_struct *mm,
111 struct mm_struct *mm,
122 struct mm_struct *mm,
199 struct mm_struct *mm,
231 struct mm_struct *mm; member
251 struct mm_struct *mm; member
263 struct mm_struct *mm; member
283 mmap_write_lock(mm); in mmu_notifier_get()
285 mmap_write_unlock(mm); in mmu_notifier_get()
[all …]
A Dpage_table_check.h17 void __page_table_check_pte_clear(struct mm_struct *mm, pte_t pte);
18 void __page_table_check_pmd_clear(struct mm_struct *mm, pmd_t pmd);
19 void __page_table_check_pud_clear(struct mm_struct *mm, pud_t pud);
24 void __page_table_check_pte_clear_range(struct mm_struct *mm,
49 __page_table_check_pte_clear(mm, pte); in page_table_check_pte_clear()
57 __page_table_check_pmd_clear(mm, pmd); in page_table_check_pmd_clear()
65 __page_table_check_pud_clear(mm, pud); in page_table_check_pud_clear()
74 __page_table_check_ptes_set(mm, ptep, pte, nr); in page_table_check_ptes_set()
83 __page_table_check_pmd_set(mm, pmdp, pmd); in page_table_check_pmd_set()
92 __page_table_check_pud_set(mm, pudp, pud); in page_table_check_pud_set()
[all …]
A Dksm.h23 int ksm_enable_merge_any(struct mm_struct *mm);
25 int ksm_disable(struct mm_struct *mm);
27 int __ksm_enter(struct mm_struct *mm);
28 void __ksm_exit(struct mm_struct *mm);
41 atomic_long_inc(&mm->ksm_zero_pages); in ksm_map_zero_page()
48 atomic_long_dec(&mm->ksm_zero_pages); in ksm_might_unmap_zero_page()
61 __ksm_enter(mm); in ksm_fork()
66 if (test_bit(MMF_VM_MERGE_ANY, &mm->flags)) in ksm_execve()
67 return __ksm_enter(mm); in ksm_execve()
74 if (test_bit(MMF_VM_MERGEABLE, &mm->flags)) in ksm_exit()
[all …]
/linux/arch/x86/include/asm/
A Dmmu_context.h57 mm->context.ldt = NULL; in init_new_context_ldt()
66 struct mm_struct *mm) in ldt_dup_context() argument
148 struct mm_struct *mm) in init_new_context() argument
163 mm_reset_untag_mask(mm); in init_new_context()
164 init_new_context_ldt(mm); in init_new_context()
171 destroy_context_ldt(mm); in destroy_context()
202 struct mm_struct *mm) in arch_dup_pkeys() argument
216 arch_dup_pkeys(oldmm, mm); in arch_dup_mmap()
217 paravirt_enter_mmap(mm); in arch_dup_mmap()
218 dup_lam(oldmm, mm); in arch_dup_mmap()
[all …]
/linux/drivers/gpu/drm/
A Ddrm_buddy.c258 mm->free_list = kmalloc_array(mm->max_order + 1, in drm_buddy_init()
269 mm->roots = kmalloc_array(mm->n_roots, in drm_buddy_init()
310 drm_block_free(mm, mm->roots[i]); in drm_buddy_init()
338 drm_block_free(mm, mm->roots[i]); in drm_buddy_fini()
344 WARN_ON(mm->avail != mm->size); in drm_buddy_fini()
412 mm->avail += drm_buddy_block_size(mm, block); in drm_buddy_free_block()
727 mm->avail -= drm_buddy_block_size(mm, block); in __alloc_range()
919 mm->avail += drm_buddy_block_size(mm, block); in drm_buddy_block_trim()
931 mm->avail -= drm_buddy_block_size(mm, block); in drm_buddy_block_trim()
1075 mm->avail -= drm_buddy_block_size(mm, block); in drm_buddy_alloc_blocks()
[all …]
/linux/arch/powerpc/include/asm/
A Dmmu_context.h122 atomic_inc(&mm->context.active_cpus); in inc_mm_active_cpus()
128 atomic_dec(&mm->context.active_cpus); in dec_mm_active_cpus()
139 inc_mm_active_cpus(mm); in mm_context_add_copro()
165 radix__flush_all_mm(mm); in mm_context_remove_copro()
172 dec_mm_active_cpus(mm); in mm_context_remove_copro()
190 mm_context_add_copro(mm); in mm_context_add_vas_window()
197 mm_context_remove_copro(mm); in mm_context_remove_vas_window()
272 #define pkey_mm_init(mm) argument
273 #define arch_dup_pkeys(oldmm, mm) argument
283 struct mm_struct *mm) in arch_dup_mmap() argument
[all …]
/linux/arch/s390/include/asm/
A Dpgalloc.h45 rc = crst_table_upgrade(mm, addr + len); in check_asce_limit()
63 if (!mm_p4d_folded(mm)) in p4d_free()
77 if (!mm_pud_folded(mm)) in pud_free()
89 crst_table_free(mm, table); in pmd_alloc_one()
97 if (mm_pmd_folded(mm)) in pmd_free()
120 return (pgd_t *) crst_table_alloc(mm); in pgd_alloc()
134 #define pmd_populate_kernel(mm, pmd, pte) pmd_populate(mm, pmd, pte) argument
139 #define pte_alloc_one_kernel(mm) ((pte_t *)page_table_alloc(mm)) argument
140 #define pte_alloc_one(mm) ((pte_t *)page_table_alloc(mm)) argument
142 #define pte_free_kernel(mm, pte) page_table_free(mm, (unsigned long *) pte) argument
[all …]
A Dmmu_context.h20 struct mm_struct *mm) in init_new_context() argument
29 mm->context.gmap_asce = 0; in init_new_context()
30 mm->context.flush_mm = 0; in init_new_context()
34 (current->mm && current->mm->context.alloc_pgste); in init_new_context()
35 mm->context.has_pgste = 0; in init_new_context()
36 mm->context.uses_skeys = 0; in init_new_context()
37 mm->context.uses_cmm = 0; in init_new_context()
67 mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH | in init_new_context()
104 struct mm_struct *mm = tsk->mm; in finish_arch_post_lock_switch() local
106 if (mm) { in finish_arch_post_lock_switch()
[all …]
/linux/drivers/gpu/drm/tests/
A Ddrm_buddy_test.c31 struct drm_buddy mm; in drm_test_buddy_alloc_range_bias() local
166 drm_buddy_fini(&mm); in drm_test_buddy_alloc_range_bias()
226 drm_buddy_fini(&mm); in drm_test_buddy_alloc_range_bias()
258 drm_buddy_fini(&mm); in drm_test_buddy_alloc_range_bias()
269 struct drm_buddy mm; in drm_test_buddy_alloc_clear() local
334 drm_buddy_fini(&mm); in drm_test_buddy_alloc_clear()
385 drm_buddy_fini(&mm); in drm_test_buddy_alloc_clear()
400 drm_buddy_fini(&mm); in drm_test_buddy_alloc_clear()
725 size = mm.chunk_size << mm.max_order; in drm_test_buddy_alloc_limit()
737 BIT_ULL(mm.max_order) * mm.chunk_size, in drm_test_buddy_alloc_limit()
[all …]
/linux/arch/m68k/include/asm/
A Dmmu_context.h45 mm->context = ctx; in get_mmu_context()
46 context_mm[ctx] = mm; in get_mmu_context()
52 #define init_new_context(tsk, mm) (((mm)->context = NO_CONTEXT), 0) argument
85 struct mm_struct *mm) in activate_mm() argument
87 get_mmu_context(mm); in activate_mm()
88 set_context(mm->context, mm->pgd); in activate_mm()
115 mm = &init_mm; in load_ksp_mmu()
118 mm = task->mm; in load_ksp_mmu()
121 if (!mm) in load_ksp_mmu()
190 mm->context = get_free_context(mm); in get_mmu_context()
[all …]
/linux/mm/
A Dmmu_notifier.c269 .mm = mm, in mn_itree_release()
659 mmgrab(mm); in __mmu_notifier_register()
660 subscription->mm = mm; in __mmu_notifier_register()
835 mmdrop(mm); in mmu_notifier_unregister()
843 struct mm_struct *mm = subscription->mm; in mmu_notifier_free_rcu() local
847 mmdrop(mm); in mmu_notifier_free_rcu()
874 struct mm_struct *mm = subscription->mm; in mmu_notifier_put() local
895 interval_sub->mm = mm; in __mmu_interval_notifier_insert()
913 mmgrab(mm); in __mmu_interval_notifier_insert()
1040 struct mm_struct *mm = interval_sub->mm; in mmu_interval_notifier_remove() local
[all …]
A Ddebug.c217 mm, mm->task_size, in dump_mm()
218 mm->mmap_base, mm->mmap_legacy_base, in dump_mm()
219 mm->pgd, atomic_read(&mm->mm_users), in dump_mm()
223 mm->hiwater_rss, mm->hiwater_vm, mm->total_vm, mm->locked_vm, in dump_mm()
225 mm->data_vm, mm->exec_vm, mm->stack_vm, in dump_mm()
226 mm->start_code, mm->end_code, mm->start_data, mm->end_data, in dump_mm()
227 mm->start_brk, mm->brk, mm->start_stack, in dump_mm()
228 mm->arg_start, mm->arg_end, mm->env_start, mm->env_end, in dump_mm()
229 mm->binfmt, mm->flags, in dump_mm()
241 mm->numa_next_scan, mm->numa_scan_offset, mm->numa_scan_seq, in dump_mm()
[all …]
/linux/arch/powerpc/mm/book3s64/
A Dmmu_context.c101 if (!mm->context.hash_context) in hash__init_new_context()
118 if (mm->context.id == 0) { in hash__init_new_context()
146 pkey_mm_init(mm); in hash__init_new_context()
207 mm->context.id = index; in init_new_context()
209 mm->context.pte_frag = NULL; in init_new_context()
210 mm->context.pmd_frag = NULL; in init_new_context()
212 mm_iommu_init(mm); in init_new_context()
265 frag = mm->context.pte_frag; in destroy_pagetable_cache()
269 frag = mm->context.pmd_frag; in destroy_pagetable_cache()
296 subpage_prot_free(mm); in destroy_context()
[all …]
A Dslice.c93 vma = find_vma(mm, addr); in slice_area_is_free()
127 if (!slice_low_has_vma(mm, i)) in slice_mask_for_free()
134 if (!slice_high_has_vma(mm, i)) in slice_mask_for_free()
173 struct mm_struct *mm = parm; in slice_flush_segments() local
176 if (mm != current->active_mm) in slice_flush_segments()
251 copro_flush_all_slbs(mm); in slice_convert()
383 return slice_find_area_topdown(mm, mm->mmap_base, len, mask, psize, high_limit); in slice_find_area()
385 return slice_find_area_bottomup(mm, mm->mmap_base, len, mask, psize, high_limit); in slice_find_area()
434 struct mm_struct *mm = current->mm; in slice_get_unmapped_area() local
465 BUG_ON(mm->task_size == 0); in slice_get_unmapped_area()
[all …]
/linux/tools/testing/vma/
A Dvma.c80 if (vma_link(mm, vma)) { in alloc_and_link_vma()
205 mm->map_count = 0; in cleanup_mm()
235 .mm = &mm, in test_simple_merge()
375 .mm = &mm, in test_merge_new()
571 .mm = &mm, in test_vma_merge_special_flags()
643 .mm = &mm, in test_vma_merge_with_close()
852 .mm = &mm, in test_vma_merge_new_with_close()
908 .mm = &mm, in test_merge_existing()
1138 .mm = &mm, in test_anon_vma_non_mergeable()
1245 .mm = &mm, in test_dup_anon_vma()
[all …]
/linux/arch/sparc/include/asm/
A Dmmu_context_64.h29 void destroy_context(struct mm_struct *mm);
40 __tsb_context_switch(__pa(mm->pgd), in tsb_context_switch_ctx()
55 void tsb_grow(struct mm_struct *mm,
59 void smp_tsb_sync(struct mm_struct *mm);
86 if (unlikely(mm == &init_mm)) in switch_mm()
90 ctx_valid = CTX_VALID(mm->context); in switch_mm()
92 get_new_mmu_context(mm); in switch_mm()
124 tsb_context_switch_ctx(mm, CTX_HWBITS(mm->context)); in switch_mm()
131 cpumask_set_cpu(cpu, mm_cpumask(mm)); in switch_mm()
138 #define activate_mm(active_mm, mm) switch_mm(active_mm, mm, NULL) argument
[all …]
/linux/Documentation/core-api/
A Dmm-api.rst14 .. kernel-doc:: mm/gup.c
40 .. kernel-doc:: mm/slub.c
46 .. kernel-doc:: mm/util.c
52 .. kernel-doc:: mm/vmalloc.c
103 .. kernel-doc:: mm/memory.c
116 .. kernel-doc:: mm/util.c
119 .. kernel-doc:: mm/rmap.c
121 .. kernel-doc:: mm/mmap.c
126 .. kernel-doc:: mm/swap.c
127 .. kernel-doc:: mm/zpool.c
[all …]
/linux/arch/arm/include/asm/
A Dmmu_context.h32 __check_vmalloc_seq(mm); in check_vmalloc_seq()
44 atomic64_set(&mm->context.id, 0); in init_new_context()
65 check_vmalloc_seq(mm); in check_and_switch_context()
75 mm->context.switch_pending = 1; in check_and_switch_context()
77 cpu_switch_mm(mm->pgd, mm); in check_and_switch_context()
85 struct mm_struct *mm = current->mm; in finish_arch_post_lock_switch() local
87 if (mm && mm->context.switch_pending) { in finish_arch_post_lock_switch()
96 mm->context.switch_pending = 0; in finish_arch_post_lock_switch()
97 cpu_switch_mm(mm->pgd, mm); in finish_arch_post_lock_switch()
144 if (mm != &init_mm) in enter_lazy_tlb()
[all …]
/linux/arch/s390/mm/
A Dpgtable.c119 mm->context.flush_mm = 1; in ptep_flush_lazy()
186 struct mm_struct *mm) in pgste_set_key() argument
251 if (mm_has_pgste(mm)) { in ptep_xchg_start()
262 if (mm_has_pgste(mm)) { in ptep_xchg_commit()
350 if (mm_has_pgste(mm)) { in ptep_modify_prot_start()
365 if (mm_has_pgste(mm)) { in ptep_modify_prot_commit()
384 if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) in pmdp_idte_local()
394 if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) in pmdp_idte_global()
398 if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) in pmdp_idte_global()
402 if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) in pmdp_idte_global()
[all …]
/linux/include/linux/sched/
A Dmm.h37 atomic_inc(&mm->mm_count); in mmgrab()
55 __mmdrop(mm); in mmdrop()
67 __mmdrop(mm); in __mmdrop_delayed()
77 if (atomic_dec_and_test(&mm->mm_count)) in mmdrop_sched()
83 mmdrop(mm); in mmdrop_sched()
91 mmgrab(mm); in mmgrab_lazy_tlb()
97 mmdrop(mm); in mmdrop_lazy_tlb()
110 mmdrop_sched(mm); in mmdrop_lazy_tlb_sched()
133 atomic_inc(&mm->mm_users); in mmget()
238 rcu_dereference(tsk->real_parent)->mm == tsk->mm; in in_vfork()
[all …]
/linux/arch/x86/kernel/
A Dldt.c143 load_mm_ldt(mm); in flush_ldt()
193 if (mm->context.ldt) { in do_sanity_check()
429 on_each_cpu_mask(mm_cpumask(mm), flush_ldt, mm, true); in install_ldt()
475 free_ldt_pgtables(mm); in ldt_dup_context()
494 mm->context.ldt = NULL; in destroy_context_ldt()
499 free_ldt_pgtables(mm); in ldt_arch_exit_mmap()
504 struct mm_struct *mm = current->mm; in read_ldt() local
510 if (!mm->context.ldt) { in read_ldt()
580 struct mm_struct *mm = current->mm; in write_ldt() local
651 free_ldt_pgtables(mm); in write_ldt()
[all …]
/linux/arch/sparc/mm/
A Dtlb.c26 struct mm_struct *mm = tb->mm; in flush_tlb_pending() local
33 if (CTX_VALID(mm->context)) { in flush_tlb_pending()
81 if (unlikely(nr != 0 && mm != tb->mm)) { in tlb_batch_add_one()
88 global_flush_tlb_page(mm, vaddr); in tlb_batch_add_one()
93 tb->mm = mm; in tlb_batch_add_one()
173 if (mm == &init_mm) in __set_pmd_acct()
189 mm->context.thp_pte_count++; in __set_pmd_acct()
194 mm->context.thp_pte_count--; in __set_pmd_acct()
277 if (!pmd_huge_pte(mm, pmdp)) in pgtable_trans_huge_deposit()
281 pmd_huge_pte(mm, pmdp) = pgtable; in pgtable_trans_huge_deposit()
[all …]
A Dtsb.c121 struct mm_struct *mm = tb->mm; in flush_tsb_user() local
524 tsb_context_switch(mm); in tsb_grow()
528 smp_tsb_sync(mm); in tsb_grow()
545 spin_lock_init(&mm->context.lock); in init_new_context()
549 mm->context.tag_store = NULL; in init_new_context()
560 mm->context.thp_pte_count = 0; in init_new_context()
579 tsb_grow(mm, MM_TSB_HUGE, in init_new_context()
611 if (CTX_VALID(mm->context)) { in destroy_context()
619 if (mm->context.tag_store) { in destroy_context()
632 kfree(mm->context.tag_store); in destroy_context()
[all …]
/linux/arch/arm64/include/asm/
A Dmmu_context.h60 cpu_do_switch_mm(virt_to_phys(pgd),mm); in cpu_switch_mm()
108 cpu_switch_mm(mm->pgd, mm); in cpu_uninstall_idmap()
172 #define init_new_context(tsk, mm) init_new_context(tsk, mm) argument
176 atomic64_set(&mm->context.id, 0); in init_new_context()
177 refcount_set(&mm->context.pinned, 0); in init_new_context()
186 struct mm_struct *mm) in arch_dup_pkeys() argument
194 arch_dup_pkeys(oldmm, mm); in arch_dup_mmap()
210 struct mm_struct *mm) in update_saved_ttbr0() argument
217 if (mm == &init_mm) in update_saved_ttbr0()
220 ttbr = phys_to_ttbr(virt_to_phys(mm->pgd)) | ASID(mm) << 48; in update_saved_ttbr0()
[all …]

Completed in 645 milliseconds

12345678910>>...55