Home
last modified time | relevance | path

Searched refs:mm (Results 1 – 25 of 40) sorted by relevance

12

/kernel/
A Dfork.c533 mm->pgd = pgd_alloc(mm); in mm_alloc_pgd()
541 pgd_free(mm, mm->pgd); in mm_free_pgd()
677 WARN_ON_ONCE(mm == current->mm); in __mmdrop()
1117 memset(mm, 0, sizeof(*mm)); in mm_alloc()
1303 mm = task->mm; in get_task_exe_file()
1328 mm = task->mm; in get_task_mm()
1338 if (mm == current->mm) in may_access_mm()
1479 memcpy(mm, oldmm, sizeof(*mm)); in dup_mm()
1491 mm->hiwater_vm = mm->total_vm; in dup_mm()
1537 mm = dup_mm(tsk, current->mm); in copy_mm()
[all …]
A Dsys.c1834 struct mm_struct *mm; in getrusage() local
1904 mm = get_task_mm(p); in getrusage()
1905 if (mm) { in getrusage()
1907 mmput(mm); in getrusage()
2046 struct mm_struct *mm = current->mm; in prctl_set_mm_map() local
2106 mmap_read_lock(mm); in prctl_set_mm_map()
2145 mmap_read_unlock(mm); in prctl_set_mm_map()
2183 struct mm_struct *mm = current->mm; in prctl_set_mm() local
2221 mmap_read_lock(mm); in prctl_set_mm()
2313 mmap_read_unlock(mm); in prctl_set_mm()
[all …]
A Dtsacct.c93 struct mm_struct *mm; in xacct_add_tsk() local
100 mm = get_task_mm(p); in xacct_add_tsk()
101 if (mm) { in xacct_add_tsk()
103 stats->hiwater_rss = get_mm_hiwater_rss(mm) * PAGE_SIZE / KB; in xacct_add_tsk()
104 stats->hiwater_vm = get_mm_hiwater_vm(mm) * PAGE_SIZE / KB; in xacct_add_tsk()
105 mmput(mm); in xacct_add_tsk()
129 if (!likely(tsk->mm)) in __acct_update_integrals()
144 tsk->acct_rss_mem1 += delta * get_mm_rss(tsk->mm) >> 10; in __acct_update_integrals()
145 tsk->acct_vm_mem1 += delta * READ_ONCE(tsk->mm->total_vm) >> 10; in __acct_update_integrals()
A Dexit.c458 if (likely(tsk->mm == mm)) { in __try_to_set_owner()
475 if (t_mm == mm) { in try_to_set_owner()
496 if (mm->owner != p) in mm_update_next_owner()
553 struct mm_struct *mm = current->mm; in exit_mm() local
556 if (!mm) in exit_mm()
558 mmap_read_lock(mm); in exit_mm()
559 mmgrab_lazy_tlb(mm); in exit_mm()
575 current->mm = NULL; in exit_mm()
580 mmap_read_unlock(mm); in exit_mm()
582 mmput(mm); in exit_mm()
[all …]
A Dptrace.c47 struct mm_struct *mm; in ptrace_access_vm() local
50 mm = get_task_mm(tsk); in ptrace_access_vm()
51 if (!mm) in ptrace_access_vm()
57 !ptracer_capable(tsk, mm->user_ns))) { in ptrace_access_vm()
58 mmput(mm); in ptrace_access_vm()
63 mmput(mm); in ptrace_access_vm()
279 struct mm_struct *mm; in __ptrace_may_access() local
340 mm = task->mm; in __ptrace_may_access()
341 if (mm && in __ptrace_may_access()
1294 if (!mm) in ptrace_request()
[all …]
A Dkthread.c1606 WARN_ON_ONCE(tsk->mm); in kthread_use_mm()
1613 mmgrab(mm); in kthread_use_mm()
1619 tsk->active_mm = mm; in kthread_use_mm()
1620 tsk->mm = mm; in kthread_use_mm()
1621 membarrier_update_current_mm(mm); in kthread_use_mm()
1622 switch_mm_irqs_off(active_mm, mm, tsk); in kthread_use_mm()
1651 WARN_ON_ONCE(!tsk->mm); in kthread_unuse_mm()
1663 tsk->mm = NULL; in kthread_unuse_mm()
1665 mmgrab_lazy_tlb(mm); in kthread_unuse_mm()
1667 enter_lazy_tlb(mm, tsk); in kthread_unuse_mm()
[all …]
A Dacct.c592 if (group_dead && current->mm) { in acct_collect()
593 struct mm_struct *mm = current->mm; in acct_collect() local
594 VMA_ITERATOR(vmi, mm, 0); in acct_collect()
597 mmap_read_lock(mm); in acct_collect()
600 mmap_read_unlock(mm); in acct_collect()
A Dkcmp.c183 ret = kcmp_ptr(task1->mm, task2->mm, KCMP_VM); in SYSCALL_DEFINE5()
A Daudit_watch.c538 if (!current->mm) in audit_exe_compare()
540 exe_file = get_mm_exe_file(current->mm); in audit_exe_compare()
A Dcred.c411 if (task->mm) in commit_creds()
412 set_dumpable(task->mm, suid_dumpable); in commit_creds()
A Dcpu.c907 struct mm_struct *mm = idle->active_mm; in finish_cpu() local
913 WARN_ON(mm != &init_mm); in finish_cpu()
915 mmdrop_lazy_tlb(mm); in finish_cpu()
1226 #define arch_clear_mm_cpumask_cpu(cpu, mm) cpumask_clear_cpu(cpu, mm_cpumask(mm)) argument
1264 arch_clear_mm_cpumask_cpu(cpu, t->mm); in clear_tasks_mm_cpumask()
A Dlatencytop.c118 if (!tsk->mm) in account_global_scheduler_latency()
/kernel/sched/
A Dmembarrier.c209 if (current->mm != mm) in ipi_sync_rq_state()
293 if (!p->mm) in membarrier_global_expedited()
319 struct mm_struct *mm = current->mm; in membarrier_private_expedited() local
329 prepare_sync_core_cmd(mm); in membarrier_private_expedited()
371 if (!p || p->mm != mm) { in membarrier_private_expedited()
384 if (p && p->mm == mm) in membarrier_private_expedited()
483 if (p && p->mm == mm) in sync_runqueues_membarrier_state()
499 struct mm_struct *mm = p->mm; in membarrier_register_global_expedited() local
510 &mm->membarrier_state); in membarrier_register_global_expedited()
518 struct mm_struct *mm = p->mm; in membarrier_register_private_expedited() local
[all …]
A Dsched.h3540 struct mm_struct *mm = t->mm; in mm_cid_put_lazy() local
3578 cid = mm_cid_pcpu_unset(mm); in mm_cid_put()
3618 while (cid < READ_ONCE(mm->nr_cpus_allowed) && cid < atomic_read(&mm->mm_users)) { in __mm_cid_try_get()
3656 struct mm_struct *mm) in __mm_cid_get() argument
3666 cid = __mm_cid_try_get(t, mm); in __mm_cid_get()
3672 cid = __mm_cid_try_get(t, mm); in __mm_cid_get()
3704 mm_cid_snapshot_time(rq, mm); in __mm_cid_get()
3710 struct mm_struct *mm) in mm_cid_get() argument
3717 cpumask = mm_cidmask(mm); in mm_cid_get()
3720 mm_cid_snapshot_time(rq, mm); in mm_cid_get()
[all …]
A Dcore.c10528 struct mm_struct *mm = t->mm; in __sched_mm_cid_migrate_from_fetch_cid() local
10532 if (!mm) in __sched_mm_cid_migrate_from_fetch_cid()
10569 struct mm_struct *mm = t->mm; in __sched_mm_cid_migrate_from_try_steal_cid() local
10630 struct mm_struct *mm = t->mm; in sched_mm_cid_migrate_to() local
10637 if (!mm) in sched_mm_cid_migrate_to()
10723 if (READ_ONCE(t->mm_cid_active) && t->mm == mm) in sched_mm_cid_remote_clear()
10796 mm = t->mm; in task_mm_cid_work()
10829 struct mm_struct *mm = t->mm; in init_sched_mm_cid() local
10858 struct mm_struct *mm = t->mm; in sched_mm_cid_exit_signals() local
10880 struct mm_struct *mm = t->mm; in sched_mm_cid_before_execve() local
[all …]
A Dfair.c1499 rss = get_mm_rss(p->mm); in task_nr_scan_windows()
2260 !cur->mm)) in task_numa_compare()
3063 if (tsk->mm == current->mm) in task_numa_group()
3163 if (!p->mm) in task_numa_fault()
3241 WRITE_ONCE(p->mm->numa_scan_seq, READ_ONCE(p->mm->numa_scan_seq) + 1); in reset_ptenuma_scan()
3292 struct mm_struct *mm = p->mm; in task_numa_work() local
3531 mmap_read_unlock(mm); in task_numa_work()
3548 struct mm_struct *mm = p->mm; in init_numa_balancing() local
3550 if (mm) { in init_numa_balancing()
3558 p->numa_scan_seq = mm ? mm->numa_scan_seq : 0; in init_numa_balancing()
[all …]
/kernel/futex/
A Dcore.c276 struct mm_struct *mm = current->mm; in futex_private_hash() local
552 struct mm_struct *mm = current->mm; in get_futex_key() local
625 key->private.mm = mm; in get_futex_key()
727 key->private.mm = mm; in get_futex_key()
1546 struct mm_struct *mm = fph->mm; in __futex_ref_atomic_begin() local
1568 struct mm_struct *mm = fph->mm; in __futex_ref_atomic_end() local
1635 struct mm_struct *mm = fph->mm; in futex_ref_drop() local
1681 struct mm_struct *mm = fph->mm; in futex_ref_get() local
1695 struct mm_struct *mm = fph->mm; in futex_ref_put() local
1709 struct mm_struct *mm = fph->mm; in futex_ref_is_dead() local
[all …]
/kernel/events/
A Duprobes.c228 if (du->uprobe == uprobe && du->mm == mm) in delayed_uprobe_check()
245 du->mm = mm; in delayed_uprobe_add()
271 if (mm && du->mm != mm) in delayed_uprobe_remove()
1277 struct mm_struct *mm = info->mm; in register_for_each_vma() local
1739 struct mm_struct *mm = current->mm; in __create_xol_area() local
1784 struct mm_struct *mm = current->mm; in get_xol_area() local
2166 struct mm_struct *mm = current->mm; in uprobe_copy_process() local
2174 if (mm == t->mm && !(flags & CLONE_VFORK)) in uprobe_copy_process()
2185 if (mm == t->mm) in uprobe_copy_process()
2405 struct mm_struct *mm = current->mm; in find_active_uprobe_speculative() local
[all …]
/kernel/trace/
A Dtrace_events_user.c423 mmap_read_lock(mm->mm); in user_event_mm_fault_in()
434 mmap_read_unlock(mm->mm); in user_event_mm_fault_in()
481 mmap_read_lock(mm->mm); in user_event_enabler_fault_fixup()
483 mmap_read_unlock(mm->mm); in user_event_enabler_fault_fixup()
537 mmap_assert_locked(mm->mm); in user_event_enabler_write()
616 mmap_read_lock(mm->mm); in user_event_enabler_update()
625 mmap_read_unlock(mm->mm); in user_event_enabler_update()
714 user_mm->mm = t->mm; in user_event_mm_alloc()
769 mmdrop(mm->mm); in user_event_mm_destroy()
817 mmap_write_lock(mm->mm); in user_event_mm_remove()
[all …]
A Dtrace_output.c396 static int seq_print_user_ip(struct trace_seq *s, struct mm_struct *mm, in seq_print_user_ip() argument
406 if (mm) { in seq_print_user_ip()
409 mmap_read_lock(mm); in seq_print_user_ip()
410 vma = find_vma(mm, ip); in seq_print_user_ip()
421 mmap_read_unlock(mm); in seq_print_user_ip()
1413 struct mm_struct *mm = NULL; in trace_user_stack_print() local
1429 mm = get_task_mm(task); in trace_user_stack_print()
1440 seq_print_user_ip(s, mm, ip, flags); in trace_user_stack_print()
1444 if (mm) in trace_user_stack_print()
1445 mmput(mm); in trace_user_stack_print()
A Dtrace_uprobe.c1118 typedef bool (*filter_func_t)(struct uprobe_consumer *self, struct mm_struct *mm);
1263 __uprobe_perf_filter(struct trace_uprobe_filter *filter, struct mm_struct *mm) in __uprobe_perf_filter() argument
1268 if (event->hw.target->mm == mm) in __uprobe_perf_filter()
1279 return __uprobe_perf_filter(filter, event->hw.target->mm); in trace_uprobe_filter_event()
1381 static bool uprobe_perf_filter(struct uprobe_consumer *uc, struct mm_struct *mm) in uprobe_perf_filter() argument
1398 ret = __uprobe_perf_filter(filter, mm); in uprobe_perf_filter()
1471 if (!uprobe_perf_filter(&tu->consumer, current->mm)) in uprobe_perf_func()
/kernel/bpf/
A Dmmap_unlock_work.h12 struct mm_struct *mm; member
49 static inline void bpf_mmap_unlock_mm(struct mmap_unlock_irq_work *work, struct mm_struct *mm) in bpf_mmap_unlock_mm() argument
52 mmap_read_unlock(mm); in bpf_mmap_unlock_mm()
54 work->mm = mm; in bpf_mmap_unlock_mm()
60 rwsem_release(&mm->mmap_lock.dep_map, _RET_IP_); in bpf_mmap_unlock_mm()
A Dtask_iter.c412 struct mm_struct *mm; member
443 curr_mm = info->mm; in task_vma_seq_get_next()
555 info->mm = curr_mm; in task_vma_seq_get_next()
564 info->mm = NULL; in task_vma_seq_get_next()
573 info->mm = NULL; in task_vma_seq_get_next()
646 mmput(info->mm); in task_vma_seq_stop()
647 info->mm = NULL; in task_vma_seq_stop()
757 struct mm_struct *mm; in BPF_CALL_5() local
766 mm = task->mm; in BPF_CALL_5()
767 if (!mm) in BPF_CALL_5()
[all …]
A Dstackmap.c156 if (!user || !current || !current->mm || irq_work_busy || in stack_map_get_build_id_offset()
157 !mmap_read_trylock(current->mm)) { in stack_map_get_build_id_offset()
174 vma = find_vma(current->mm, ip); in stack_map_get_build_id_offset()
187 bpf_mmap_unlock_mm(work, current->mm); in stack_map_get_build_id_offset()
/kernel/cgroup/
A Dcpuset.c2547 struct mm_struct *mm; member
2559 mmput(mwork->mm); in cpuset_migrate_mm_workfn()
2569 mmput(mm); in cpuset_migrate_mm()
2575 mwork->mm = mm; in cpuset_migrate_mm()
2581 mmput(mm); in cpuset_migrate_mm()
2650 struct mm_struct *mm; in cpuset_update_tasks_nodemask() local
2655 mm = get_task_mm(task); in cpuset_update_tasks_nodemask()
2656 if (!mm) in cpuset_update_tasks_nodemask()
2665 mmput(mm); in cpuset_update_tasks_nodemask()
3183 if (mm) { in cpuset_attach()
[all …]

Completed in 130 milliseconds

12