Home
last modified time | relevance | path

Searched refs:mm (Results 1 – 25 of 651) sorted by relevance

12345678910>>...27

/arch/x86/include/asm/
A Dmmu_context.h57 mm->context.ldt = NULL; in init_new_context_ldt()
66 struct mm_struct *mm) in ldt_dup_context() argument
170 mm_init_global_asid(mm); in init_new_context()
171 mm_reset_untag_mask(mm); in init_new_context()
172 init_new_context_ldt(mm); in init_new_context()
179 destroy_context_ldt(mm); in destroy_context()
180 mm_free_global_asid(mm); in destroy_context()
211 struct mm_struct *mm) in arch_dup_pkeys() argument
226 paravirt_enter_mmap(mm); in arch_dup_mmap()
227 dup_lam(oldmm, mm); in arch_dup_mmap()
[all …]
A Dpkeys.h30 return __execute_only_pkey(mm); in execute_only_pkey()
46 #define mm_pkey_allocation_map(mm) (mm->context.pkey_allocation_map) argument
47 #define mm_set_pkey_allocated(mm, pkey) do { \ argument
48 mm_pkey_allocation_map(mm) |= (1U << pkey); \
50 #define mm_set_pkey_free(mm, pkey) do { \ argument
71 if (pkey == mm->context.execute_only_pkey) in mm_pkey_is_allocated()
81 int mm_pkey_alloc(struct mm_struct *mm) in mm_pkey_alloc() argument
100 ret = ffz(mm_pkey_allocation_map(mm)); in mm_pkey_alloc()
102 mm_set_pkey_allocated(mm, ret); in mm_pkey_alloc()
110 if (!mm_pkey_is_allocated(mm, pkey)) in mm_pkey_free()
[all …]
A Dpgalloc.h20 #define paravirt_pgd_alloc(mm) __paravirt_pgd_alloc(mm) argument
50 extern void pgd_free(struct mm_struct *mm, pgd_t *pgd);
65 paravirt_alloc_pte(mm, __pa(pte) >> PAGE_SHIFT); in pmd_populate_kernel()
72 paravirt_alloc_pte(mm, __pa(pte) >> PAGE_SHIFT); in pmd_populate_kernel_safe()
81 paravirt_alloc_pte(mm, pfn); in pmd_populate()
99 paravirt_alloc_pmd(mm, __pa(pmd) >> PAGE_SHIFT); in pud_populate()
105 paravirt_alloc_pmd(mm, __pa(pmd) >> PAGE_SHIFT); in pud_populate_safe()
113 paravirt_alloc_pud(mm, __pa(pud) >> PAGE_SHIFT); in p4d_populate()
119 paravirt_alloc_pud(mm, __pa(pud) >> PAGE_SHIFT); in p4d_populate_safe()
136 paravirt_alloc_p4d(mm, __pa(p4d) >> PAGE_SHIFT); in pgd_populate()
[all …]
/arch/s390/include/asm/
A Dpgalloc.h42 if (addr + len > mm->context.asce_limit && in check_asce_limit()
44 rc = crst_table_upgrade(mm, addr + len); in check_asce_limit()
65 if (mm_p4d_folded(mm)) in p4d_free()
86 if (mm_pud_folded(mm)) in pud_free()
101 crst_table_free(mm, table); in pmd_alloc_one()
109 if (mm_pmd_folded(mm)) in pmd_free()
153 #define pmd_populate_kernel(mm, pmd, pte) pmd_populate(mm, pmd, pte) argument
158 #define pte_alloc_one_kernel(mm) ((pte_t *)page_table_alloc(mm)) argument
159 #define pte_alloc_one(mm) ((pte_t *)page_table_alloc(mm)) argument
161 #define pte_free_kernel(mm, pte) page_table_free(mm, (unsigned long *) pte) argument
[all …]
A Dmmu_context.h21 struct mm_struct *mm) in init_new_context() argument
25 spin_lock_init(&mm->context.lock); in init_new_context()
30 mm->context.gmap_asce = 0; in init_new_context()
31 mm->context.flush_mm = 0; in init_new_context()
33 mm->context.has_pgste = 0; in init_new_context()
34 mm->context.uses_skeys = 0; in init_new_context()
35 mm->context.uses_cmm = 0; in init_new_context()
65 mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH | in init_new_context()
103 struct mm_struct *mm = tsk->mm; in finish_arch_post_lock_switch() local
106 if (mm) { in finish_arch_post_lock_switch()
[all …]
A Dtlbflush.h52 atomic_inc(&mm->context.flush_count); in __tlb_flush_mm()
54 cpumask_copy(mm_cpumask(mm), &mm->context.cpu_attach_mask); in __tlb_flush_mm()
56 gmap_asce = READ_ONCE(mm->context.gmap_asce); in __tlb_flush_mm()
60 __tlb_flush_idte(mm->context.asce); in __tlb_flush_mm()
65 atomic_dec(&mm->context.flush_count); in __tlb_flush_mm()
79 spin_lock(&mm->context.lock); in __tlb_flush_mm_lazy()
80 if (mm->context.flush_mm) { in __tlb_flush_mm_lazy()
81 mm->context.flush_mm = 0; in __tlb_flush_mm_lazy()
82 __tlb_flush_mm(mm); in __tlb_flush_mm_lazy()
84 spin_unlock(&mm->context.lock); in __tlb_flush_mm_lazy()
[all …]
/arch/powerpc/include/asm/
A Dmmu_context.h122 atomic_inc(&mm->context.active_cpus); in inc_mm_active_cpus()
128 atomic_dec(&mm->context.active_cpus); in dec_mm_active_cpus()
139 inc_mm_active_cpus(mm); in mm_context_add_copro()
165 radix__flush_all_mm(mm); in mm_context_remove_copro()
172 dec_mm_active_cpus(mm); in mm_context_remove_copro()
190 mm_context_add_copro(mm); in mm_context_add_vas_window()
197 mm_context_remove_copro(mm); in mm_context_remove_vas_window()
272 #define pkey_mm_init(mm) argument
273 #define arch_dup_pkeys(oldmm, mm) argument
283 struct mm_struct *mm) in arch_dup_mmap() argument
[all …]
A Dpkeys.h52 #define mm_pkey_allocation_map(mm) (mm->context.pkey_allocation_map) argument
54 #define __mm_pkey_allocated(mm, pkey) { \ argument
58 #define __mm_pkey_free(mm, pkey) { \ argument
62 #define __mm_pkey_is_allocated(mm, pkey) \ argument
77 return __mm_pkey_is_allocated(mm, pkey); in mm_pkey_is_allocated()
101 if (mm_pkey_allocation_map(mm) == all_pkeys_mask) in mm_pkey_alloc()
104 ret = ffz((u32)mm_pkey_allocation_map(mm)); in mm_pkey_alloc()
105 __mm_pkey_allocated(mm, ret); in mm_pkey_alloc()
115 if (!mm_pkey_is_allocated(mm, pkey)) in mm_pkey_free()
118 __mm_pkey_free(mm, pkey); in mm_pkey_free()
[all …]
/arch/m68k/include/asm/
A Dmmu_context.h45 mm->context = ctx; in get_mmu_context()
46 context_mm[ctx] = mm; in get_mmu_context()
52 #define init_new_context(tsk, mm) (((mm)->context = NO_CONTEXT), 0) argument
85 struct mm_struct *mm) in activate_mm() argument
87 get_mmu_context(mm); in activate_mm()
88 set_context(mm->context, mm->pgd); in activate_mm()
115 mm = &init_mm; in load_ksp_mmu()
118 mm = task->mm; in load_ksp_mmu()
121 if (!mm) in load_ksp_mmu()
190 mm->context = get_free_context(mm); in get_mmu_context()
[all …]
A Dmotorola_pgalloc.h18 extern void *get_pointer_table(struct mm_struct *mm, int type);
27 static inline pte_t *pte_alloc_one_kernel(struct mm_struct *mm) in pte_alloc_one_kernel() argument
29 return get_pointer_table(mm, TABLE_PTE); in pte_alloc_one_kernel()
32 static inline void pte_free_kernel(struct mm_struct *mm, pte_t *pte) in pte_free_kernel() argument
37 static inline pgtable_t pte_alloc_one(struct mm_struct *mm) in pte_alloc_one() argument
39 return get_pointer_table(mm, TABLE_PTE); in pte_alloc_one()
56 return get_pointer_table(mm, TABLE_PMD); in pmd_alloc_one()
59 static inline int pmd_free(struct mm_struct *mm, pmd_t *pmd) in pmd_free() argument
71 static inline void pgd_free(struct mm_struct *mm, pgd_t *pgd) in pgd_free() argument
76 static inline pgd_t *pgd_alloc(struct mm_struct *mm) in pgd_alloc() argument
[all …]
/arch/arm/include/asm/
A Dmmu_context.h32 __check_vmalloc_seq(mm); in check_vmalloc_seq()
44 atomic64_set(&mm->context.id, 0); in init_new_context()
65 check_vmalloc_seq(mm); in check_and_switch_context()
75 mm->context.switch_pending = 1; in check_and_switch_context()
77 cpu_switch_mm(mm->pgd, mm); in check_and_switch_context()
85 struct mm_struct *mm = current->mm; in finish_arch_post_lock_switch() local
87 if (mm && mm->context.switch_pending) { in finish_arch_post_lock_switch()
96 mm->context.switch_pending = 0; in finish_arch_post_lock_switch()
97 cpu_switch_mm(mm->pgd, mm); in finish_arch_post_lock_switch()
144 if (mm != &init_mm) in enter_lazy_tlb()
[all …]
/arch/sparc/include/asm/
A Dmmu_context_64.h29 void destroy_context(struct mm_struct *mm);
40 __tsb_context_switch(__pa(mm->pgd), in tsb_context_switch_ctx()
55 void tsb_grow(struct mm_struct *mm,
59 void smp_tsb_sync(struct mm_struct *mm);
86 if (unlikely(mm == &init_mm)) in switch_mm()
90 ctx_valid = CTX_VALID(mm->context); in switch_mm()
92 get_new_mmu_context(mm); in switch_mm()
124 tsb_context_switch_ctx(mm, CTX_HWBITS(mm->context)); in switch_mm()
131 cpumask_set_cpu(cpu, mm_cpumask(mm)); in switch_mm()
138 #define activate_mm(active_mm, mm) switch_mm(active_mm, mm, NULL) argument
[all …]
/arch/powerpc/mm/book3s64/
A Dmmu_context.c101 if (!mm->context.hash_context) in hash__init_new_context()
118 if (mm->context.id == 0) { in hash__init_new_context()
146 pkey_mm_init(mm); in hash__init_new_context()
207 mm->context.id = index; in init_new_context()
209 mm->context.pte_frag = NULL; in init_new_context()
210 mm->context.pmd_frag = NULL; in init_new_context()
212 mm_iommu_init(mm); in init_new_context()
265 frag = mm->context.pte_frag; in destroy_pagetable_cache()
269 frag = mm->context.pmd_frag; in destroy_pagetable_cache()
296 subpage_prot_free(mm); in destroy_context()
[all …]
A Dslice.c93 vma = find_vma(mm, addr); in slice_area_is_free()
127 if (!slice_low_has_vma(mm, i)) in slice_mask_for_free()
134 if (!slice_high_has_vma(mm, i)) in slice_mask_for_free()
173 struct mm_struct *mm = parm; in slice_flush_segments() local
176 if (mm != current->active_mm) in slice_flush_segments()
252 spu_flush_all_slbs(mm); in slice_convert()
385 return slice_find_area_topdown(mm, mm->mmap_base, len, mask, psize, high_limit); in slice_find_area()
387 return slice_find_area_bottomup(mm, mm->mmap_base, len, mask, psize, high_limit); in slice_find_area()
436 struct mm_struct *mm = current->mm; in slice_get_unmapped_area() local
467 BUG_ON(mm->task_size == 0); in slice_get_unmapped_area()
[all …]
/arch/arm/mm/
A Dpgd.c23 #define _pgd_alloc(mm) __pgd_alloc(mm, 2) argument
24 #define _pgd_free(mm, pgd) __pgd_free(mm, pgd) argument
129 mm_dec_nr_pmds(mm); in pgd_alloc()
169 pte_free(mm, pte); in pgd_free()
170 mm_dec_nr_ptes(mm); in pgd_free()
173 pmd_free(mm, pmd); in pgd_free()
174 mm_dec_nr_pmds(mm); in pgd_free()
177 pud_free(mm, pud); in pgd_free()
180 p4d_free(mm, p4d); in pgd_free()
199 pmd_free(mm, pmd); in pgd_free()
[all …]
/arch/arm64/include/asm/
A Dmmu_context.h61 cpu_do_switch_mm(virt_to_phys(pgd),mm); in cpu_switch_mm()
109 cpu_switch_mm(mm->pgd, mm); in cpu_uninstall_idmap()
173 #define init_new_context(tsk, mm) init_new_context(tsk, mm) argument
177 atomic64_set(&mm->context.id, 0); in init_new_context()
187 struct mm_struct *mm) in arch_dup_pkeys() argument
195 arch_dup_pkeys(oldmm, mm); in arch_dup_mmap()
211 struct mm_struct *mm) in update_saved_ttbr0() argument
218 if (mm == &init_mm) in update_saved_ttbr0()
221 ttbr = phys_to_ttbr(virt_to_phys(mm->pgd)) | ASID(mm) << 48; in update_saved_ttbr0()
227 struct mm_struct *mm) in update_saved_ttbr0() argument
[all …]
A Dpkeys.h43 #define mm_pkey_allocation_map(mm) (mm)->context.pkey_allocation_map argument
44 #define mm_set_pkey_allocated(mm, pkey) do { \ argument
45 mm_pkey_allocation_map(mm) |= (1U << pkey); \
47 #define mm_set_pkey_free(mm, pkey) do { \ argument
48 mm_pkey_allocation_map(mm) &= ~(1U << pkey); \
61 return mm_pkey_allocation_map(mm) & (1U << pkey); in mm_pkey_is_allocated()
86 if (mm_pkey_allocation_map(mm) == all_pkeys_mask) in mm_pkey_alloc()
89 ret = ffz(mm_pkey_allocation_map(mm)); in mm_pkey_alloc()
91 mm_set_pkey_allocated(mm, ret); in mm_pkey_alloc()
98 if (!mm_pkey_is_allocated(mm, pkey)) in mm_pkey_free()
[all …]
/arch/s390/mm/
A Dpgtable.c111 mm->context.flush_mm = 1; in ptep_flush_lazy()
178 struct mm_struct *mm) in pgste_set_key() argument
243 if (mm_has_pgste(mm)) { in ptep_xchg_start()
254 if (mm_has_pgste(mm)) { in ptep_xchg_commit()
342 if (mm_has_pgste(mm)) { in ptep_modify_prot_start()
355 if (mm_has_pgste(mm)) { in ptep_modify_prot_commit()
374 if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) in pmdp_idte_local()
384 if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) in pmdp_idte_global()
388 if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) in pmdp_idte_global()
392 if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) in pmdp_idte_global()
[all …]
A Dgmap_helpers.c31 dec_mm_counter(mm, MM_SWAPENTS); in ptep_zap_swap_entry()
52 mmap_assert_locked(mm); in gmap_helper_zap_one_page()
55 vma = vma_lookup(mm, vmaddr); in gmap_helper_zap_one_page()
83 mmap_assert_locked(mm); in gmap_helper_discard()
142 VMA_ITERATOR(vmi, mm, 0); in __gmap_helper_unshare_zeropages()
200 struct mm_struct *mm = current->mm; in gmap_helper_disable_cow_sharing() local
203 mmap_assert_write_locked(mm); in gmap_helper_disable_cow_sharing()
205 if (!mm->context.allow_cow_sharing) in gmap_helper_disable_cow_sharing()
208 mm->context.allow_cow_sharing = 0; in gmap_helper_disable_cow_sharing()
218 rc = ksm_disable(mm); in gmap_helper_disable_cow_sharing()
[all …]
/arch/mips/include/asm/
A Dmmu_context.h111 return mm->context.asid[cpu]; in cpu_context()
115 struct mm_struct *mm, u64 ctx) in set_cpu_context() argument
120 mm->context.asid[cpu] = ctx; in set_cpu_context()
124 #define cpu_asid(cpu, mm) \ argument
142 set_cpu_context(0, mm, 0); in init_new_context()
145 set_cpu_context(i, mm, 0); in init_new_context()
183 dsemul_mm_cleanup(mm); in destroy_context()
187 drop_mmu_context(struct mm_struct *mm) in drop_mmu_context() argument
197 ctx = cpu_context(cpu, mm); in drop_mmu_context()
224 get_new_mmu_context(mm); in drop_mmu_context()
[all …]
/arch/x86/kernel/
A Dldt.c143 load_mm_ldt(mm); in flush_ldt()
193 if (mm->context.ldt) { in do_sanity_check()
429 on_each_cpu_mask(mm_cpumask(mm), flush_ldt, mm, true); in install_ldt()
475 free_ldt_pgtables(mm); in ldt_dup_context()
494 mm->context.ldt = NULL; in destroy_context_ldt()
499 free_ldt_pgtables(mm); in ldt_arch_exit_mmap()
504 struct mm_struct *mm = current->mm; in read_ldt() local
510 if (!mm->context.ldt) { in read_ldt()
580 struct mm_struct *mm = current->mm; in write_ldt() local
651 free_ldt_pgtables(mm); in write_ldt()
[all …]
/arch/sparc/mm/
A Dtlb.c26 struct mm_struct *mm = tb->mm; in flush_tlb_pending() local
33 if (CTX_VALID(mm->context)) { in flush_tlb_pending()
84 if (unlikely(nr != 0 && mm != tb->mm)) { in tlb_batch_add_one()
91 global_flush_tlb_page(mm, vaddr); in tlb_batch_add_one()
96 tb->mm = mm; in tlb_batch_add_one()
176 if (mm == &init_mm) in __set_pmd_acct()
192 mm->context.thp_pte_count++; in __set_pmd_acct()
197 mm->context.thp_pte_count--; in __set_pmd_acct()
280 if (!pmd_huge_pte(mm, pmdp)) in pgtable_trans_huge_deposit()
284 pmd_huge_pte(mm, pmdp) = pgtable; in pgtable_trans_huge_deposit()
[all …]
A Dtsb.c121 struct mm_struct *mm = tb->mm; in flush_tsb_user() local
524 tsb_context_switch(mm); in tsb_grow()
528 smp_tsb_sync(mm); in tsb_grow()
545 spin_lock_init(&mm->context.lock); in init_new_context()
549 mm->context.tag_store = NULL; in init_new_context()
560 mm->context.thp_pte_count = 0; in init_new_context()
579 tsb_grow(mm, MM_TSB_HUGE, in init_new_context()
611 if (CTX_VALID(mm->context)) { in destroy_context()
619 if (mm->context.tag_store) { in destroy_context()
632 kfree(mm->context.tag_store); in destroy_context()
[all …]
/arch/loongarch/include/asm/
A Dmmu_context.h34 #define cpu_context(cpu, mm) ((mm)->context.asid[cpu]) argument
36 #define cpu_asid(cpu, mm) (cpu_context((cpu), (mm)) & cpu_asid_mask(&cpu_data[cpu])) argument
59 cpu_context(cpu, mm) = asid_cache(cpu) = asid; in get_new_mmu_context()
72 cpu_context(i, mm) = 0; in init_new_context()
134 #define deactivate_mm(task, mm) do { } while (0) argument
150 if (asid == cpu_asid(cpu, mm)) { in drop_mmu_context()
153 if (!current->mm || (current->mm == mm)) { in drop_mmu_context()
154 get_new_mmu_context(mm, cpu, &need_flush); in drop_mmu_context()
156 write_csr_asid(cpu_asid(cpu, mm)); in drop_mmu_context()
165 cpu_context(cpu, mm) = 0; in drop_mmu_context()
[all …]
/arch/alpha/include/asm/
A Dtlbflush.h18 ev5_flush_tlb_current(struct mm_struct *mm) in ev5_flush_tlb_current() argument
20 __load_new_mm_context(mm); in ev5_flush_tlb_current()
33 __load_new_mm_context(mm); in ev5_flush_tlb_current_page()
56 flush_tlb_other(struct mm_struct *mm) in flush_tlb_other() argument
74 flush_tlb_mm(struct mm_struct *mm) in flush_tlb_mm() argument
76 if (mm == current->active_mm) in flush_tlb_mm()
77 flush_tlb_current(mm); in flush_tlb_mm()
79 flush_tlb_other(mm); in flush_tlb_mm()
86 struct mm_struct *mm = vma->vm_mm; in flush_tlb_page() local
88 if (mm == current->active_mm) in flush_tlb_page()
[all …]

Completed in 768 milliseconds

12345678910>>...27