| /arch/powerpc/include/asm/ |
| A D | pkeys.h | 50 #define pkey_alloc_mask(pkey) (0x1 << pkey) argument 58 #define __mm_pkey_free(mm, pkey) { \ argument 66 pkey_alloc_mask(pkey)) 70 if (pkey < 0 || pkey >= arch_max_pkey()) in mm_pkey_is_allocated() 74 if (__mm_pkey_is_reserved(pkey)) in mm_pkey_is_allocated() 118 __mm_pkey_free(mm, pkey); in mm_pkey_free() 129 int prot, int pkey); 131 int prot, int pkey) in arch_override_mprotect_pkey() argument 140 if (pkey != -1) in arch_override_mprotect_pkey() 141 return pkey; in arch_override_mprotect_pkey() [all …]
|
| A D | mman.h | 18 unsigned long pkey) in arch_calc_vm_prot_bits() argument 21 return (((prot & PROT_SAO) ? VM_SAO : 0) | pkey_to_vmflag_bits(pkey)); in arch_calc_vm_prot_bits() 26 #define arch_calc_vm_prot_bits(prot, pkey) arch_calc_vm_prot_bits(prot, pkey) argument
|
| /arch/arm64/include/asm/ |
| A D | pkeys.h | 29 int prot, int pkey) in arch_override_mprotect_pkey() argument 31 if (pkey != -1) in arch_override_mprotect_pkey() 32 return pkey; in arch_override_mprotect_pkey() 44 #define mm_set_pkey_allocated(mm, pkey) do { \ argument 45 mm_pkey_allocation_map(mm) |= (1U << pkey); \ 47 #define mm_set_pkey_free(mm, pkey) do { \ argument 48 mm_pkey_allocation_map(mm) &= ~(1U << pkey); \ 58 if (pkey < 0 || pkey >= arch_max_pkey()) in mm_pkey_is_allocated() 61 return mm_pkey_allocation_map(mm) & (1U << pkey); in mm_pkey_is_allocated() 98 if (!mm_pkey_is_allocated(mm, pkey)) in mm_pkey_free() [all …]
|
| A D | por.h | 13 static inline bool por_elx_allows_read(u64 por, u8 pkey) in por_elx_allows_read() argument 15 u8 perm = POR_ELx_PERM_GET(pkey, por); in por_elx_allows_read() 20 static inline bool por_elx_allows_write(u64 por, u8 pkey) in por_elx_allows_write() argument 22 u8 perm = POR_ELx_PERM_GET(pkey, por); in por_elx_allows_write() 27 static inline bool por_elx_allows_exec(u64 por, u8 pkey) in por_elx_allows_exec() argument 29 u8 perm = POR_ELx_PERM_GET(pkey, por); in por_elx_allows_exec()
|
| A D | mman.h | 15 unsigned long pkey) in arch_calc_vm_prot_bits() argument 27 ret |= pkey & BIT(0) ? VM_PKEY_BIT0 : 0; in arch_calc_vm_prot_bits() 28 ret |= pkey & BIT(1) ? VM_PKEY_BIT1 : 0; in arch_calc_vm_prot_bits() 29 ret |= pkey & BIT(2) ? VM_PKEY_BIT2 : 0; in arch_calc_vm_prot_bits() 35 #define arch_calc_vm_prot_bits(prot, pkey) arch_calc_vm_prot_bits(prot, pkey) argument
|
| A D | traps.h | 28 void arm64_force_sig_fault_pkey(unsigned long far, const char *str, int pkey);
|
| A D | pgtable.h | 234 static inline bool por_el0_allows_pkey(u8 pkey, bool write, bool execute) in por_el0_allows_pkey() argument 244 return por_elx_allows_write(por, pkey); in por_el0_allows_pkey() 247 return por_elx_allows_exec(por, pkey); in por_el0_allows_pkey() 249 return por_elx_allows_read(por, pkey); in por_el0_allows_pkey()
|
| /arch/x86/include/asm/ |
| A D | pkeys.h | 34 int prot, int pkey); 36 int prot, int pkey) in arch_override_mprotect_pkey() argument 47 #define mm_set_pkey_allocated(mm, pkey) do { \ argument 48 mm_pkey_allocation_map(mm) |= (1U << pkey); \ 50 #define mm_set_pkey_free(mm, pkey) do { \ argument 51 mm_pkey_allocation_map(mm) &= ~(1U << pkey); \ 62 if (pkey < 0) in mm_pkey_is_allocated() 64 if (pkey >= arch_max_pkey()) in mm_pkey_is_allocated() 71 if (pkey == mm->context.execute_only_pkey) in mm_pkey_is_allocated() 110 if (!mm_pkey_is_allocated(mm, pkey)) in mm_pkey_free() [all …]
|
| A D | pkru.h | 19 static inline bool __pkru_allows_read(u32 pkru, u16 pkey) in __pkru_allows_read() argument 21 int pkru_pkey_bits = pkey * PKRU_BITS_PER_PKEY; in __pkru_allows_read() 25 static inline bool __pkru_allows_write(u32 pkru, u16 pkey) in __pkru_allows_write() argument 27 int pkru_pkey_bits = pkey * PKRU_BITS_PER_PKEY; in __pkru_allows_write()
|
| A D | pgtable.h | 1603 static inline bool __pkru_allows_pkey(u16 pkey, bool write) in __pkru_allows_pkey() argument 1607 if (!__pkru_allows_read(pkru, pkey)) in __pkru_allows_pkey() 1609 if (write && !__pkru_allows_write(pkru, pkey)) in __pkru_allows_pkey()
|
| /arch/powerpc/mm/book3s64/ |
| A D | pkeys.c | 49 #define pkeyshift(pkey) (PKEY_REG_BITS - ((pkey+1) * AMR_BITS_PER_PKEY)) argument 315 static inline void init_amr(int pkey, u8 init_bits) in init_amr() argument 323 static inline void init_iamr(int pkey, u8 init_bits) in init_iamr() argument 348 pkey_bits = 0x3ul << pkeyshift(pkey); in __arch_set_user_pkey_access() 362 init_iamr(pkey, new_iamr_bits); in __arch_set_user_pkey_access() 370 init_amr(pkey, new_amr_bits); in __arch_set_user_pkey_access() 392 int pkey) in __arch_override_mprotect_pkey() argument 406 pkey = execute_only_pkey(vma->vm_mm); in __arch_override_mprotect_pkey() 407 if (pkey > 0) in __arch_override_mprotect_pkey() 408 return pkey; in __arch_override_mprotect_pkey() [all …]
|
| /arch/x86/mm/ |
| A D | pkeys.c | 76 int __arch_override_mprotect_pkey(struct vm_area_struct *vma, int prot, int pkey) in __arch_override_mprotect_pkey() argument 82 if (pkey != -1) in __arch_override_mprotect_pkey() 83 return pkey; in __arch_override_mprotect_pkey() 92 pkey = execute_only_pkey(vma->vm_mm); in __arch_override_mprotect_pkey() 93 if (pkey > 0) in __arch_override_mprotect_pkey() 94 return pkey; in __arch_override_mprotect_pkey() 113 #define PKRU_AD_MASK(pkey) (PKRU_AD_BIT << ((pkey) * PKRU_BITS_PER_PKEY)) argument
|
| A D | fault.c | 721 u32 pkey) in kernelmode_fixup_or_oops() argument 778 unsigned long address, u32 pkey, int si_code) in __bad_area_nosemaphore() argument 784 SIGSEGV, si_code, pkey); in __bad_area_nosemaphore() 821 force_sig_pkuerr((void __user *)address, pkey); in __bad_area_nosemaphore() 838 struct vm_area_struct *vma, u32 pkey, int si_code) in __bad_area() argument 849 __bad_area_nosemaphore(regs, error_code, address, pkey, si_code); in __bad_area() 900 u32 pkey = vma_pkey(vma); in bad_area_access_error() local 902 __bad_area(regs, error_code, address, mm, vma, pkey, SEGV_PKUERR); in bad_area_access_error()
|
| /arch/sparc/include/asm/ |
| A D | mman.h | 30 #define arch_calc_vm_prot_bits(prot, pkey) sparc_calc_vm_prot_bits(prot) argument
|
| /arch/powerpc/mm/ |
| A D | fault.c | 95 int pkey; in bad_access_pkey() local 114 pkey = vma_pkey(vma); in bad_access_pkey() 129 _exception_pkey(regs, address, pkey); in bad_access_pkey()
|
| /arch/arm64/mm/ |
| A D | fault.c | 563 int pkey = -1; in do_page_fault() local 645 pkey = vma_pkey(vma); in do_page_fault() 689 pkey = vma_pkey(vma); in do_page_fault() 770 arm64_force_sig_fault_pkey(far, inf->name, pkey); in do_page_fault()
|
| A D | mmu.c | 1598 int arch_set_user_pkey_access(struct task_struct *tsk, int pkey, unsigned long init_val) in arch_set_user_pkey_access() argument 1611 if (WARN_ON_ONCE(pkey >= arch_max_pkey())) in arch_set_user_pkey_access() 1626 new_por = POR_ELx_PERM_PREP(pkey, new_por); in arch_set_user_pkey_access() 1630 old_por &= ~(POE_MASK << POR_ELx_PERM_SHIFT(pkey)); in arch_set_user_pkey_access()
|
| /arch/s390/include/uapi/asm/ |
| A D | pkey.h | 468 __u8 __user *pkey; /* in: pkey blob buffer space ptr */ member
|
| /arch/arm64/kernel/ |
| A D | signal.c | 93 for (int pkey = 0; pkey < arch_max_pkey(); pkey++) in save_reset_user_access_state() local 94 por_enable_all |= POR_ELx_PERM_PREP(pkey, POE_RWX); in save_reset_user_access_state()
|
| A D | traps.c | 268 void arm64_force_sig_fault_pkey(unsigned long far, const char *str, int pkey) in arm64_force_sig_fault_pkey() argument 271 force_sig_pkuerr((void __user *)far, pkey); in arm64_force_sig_fault_pkey()
|
| /arch/x86/kernel/fpu/ |
| A D | xstate.c | 1082 int arch_set_user_pkey_access(struct task_struct *tsk, int pkey, in arch_set_user_pkey_access() argument 1100 if (WARN_ON_ONCE(pkey >= arch_max_pkey())) in arch_set_user_pkey_access() 1110 pkey_shift = pkey * PKRU_BITS_PER_PKEY; in arch_set_user_pkey_access()
|