Home
last modified time | relevance | path

Searched refs:ea (Results 1 – 25 of 112) sorted by relevance

12345

/arch/powerpc/mm/
A Dcopro_fault.c21 int copro_handle_mm_fault(struct mm_struct *mm, unsigned long ea, in copro_handle_mm_fault() argument
34 vma = lock_mm_and_find_vma(mm, ea, NULL); in copro_handle_mm_fault()
86 switch (get_region_id(ea)) { in copro_calculate_slb()
88 pr_devel("%s: 0x%llx -- USER_REGION_ID\n", __func__, ea); in copro_calculate_slb()
91 psize = get_slice_psize(mm, ea); in copro_calculate_slb()
92 ssize = user_segment_size(ea); in copro_calculate_slb()
93 vsid = get_user_vsid(&mm->context, ea, ssize); in copro_calculate_slb()
100 vsid = get_kernel_vsid(ea, mmu_kernel_ssize); in copro_calculate_slb()
104 pr_devel("%s: 0x%llx -- IO_REGION_ID\n", __func__, ea); in copro_calculate_slb()
107 vsid = get_kernel_vsid(ea, mmu_kernel_ssize); in copro_calculate_slb()
[all …]
/arch/powerpc/kernel/
A Dhw_breakpoint_constraints.c17 return ((ea < info->address + info->len) && in ea_user_range_overlaps()
18 (ea + size > info->address)); in ea_user_range_overlaps()
47 return ((ea < hw_end_addr) && (ea + size > hw_start_addr)); in ea_hw_range_overlaps()
84 unsigned long ea, int type, int size, in wp_check_constraints() argument
118 if (ea_user_range_overlaps(ea, size, info)) in wp_check_constraints()
121 if (ea_hw_range_overlaps(ea, size, info)) { in wp_check_constraints()
131 int *type, int *size, unsigned long *ea) in wp_get_instr_detail() argument
145 *ea = op.ea; in wp_get_instr_detail()
148 *ea &= 0xffffffffUL; in wp_get_instr_detail()
154 *ea &= ~(*size - 1); in wp_get_instr_detail()
[all …]
/arch/nios2/kernel/
A Dtraps.c116 fp->ea -= 4; in breakpoint_c()
127 fp->ea -= 4; in handle_unaligned_c()
149 fp->ea -= 4; in handle_illegal_c()
150 _exception(SIGILL, fp, ILL_ILLOPC, fp->ea); in handle_illegal_c()
156 fp->ea -= 4; in handle_supervisor_instr()
157 _exception(SIGILL, fp, ILL_PRVOPC, fp->ea); in handle_supervisor_instr()
163 fp->ea -= 4; in handle_diverror_c()
177 regs->ea -= 4; in unhandled_exception()
185 _send_sig(SIGUSR1, 0, fp->ea); in handle_trap_1_c()
190 _send_sig(SIGUSR2, 0, fp->ea); in handle_trap_2_c()
[all …]
A Dmisaligned.c75 fp->ea -= 4; in handle_unaligned_c()
83 isn = *(unsigned long *)(fp->ea); in handle_unaligned_c()
156 fp->ea, (unsigned int)addr, in handle_unaligned_c()
160 fp->ea, in handle_unaligned_c()
164 _exception(SIGSEGV, fp, SEGV_MAPERR, fp->ea); in handle_unaligned_c()
174 fp->ea += 4; in handle_unaligned_c()
178 fp->ea, in handle_unaligned_c()
195 fp->ea, fp->ra, fp->sp); in handle_unaligned_c()
199 _exception(SIGBUS, fp, BUS_ADRALN, fp->ea); in handle_unaligned_c()
201 fp->ea += 4; /* else advance */ in handle_unaligned_c()
A Dkgdb.c52 { "pc", GDB_SIZEOF_REG, offsetof(struct pt_regs, ea) },
101 gdb_regs[GDB_PC] = p->thread.kregs->ea; in sleeping_thread_to_gdb_regs()
106 regs->ea = pc; in kgdb_arch_set_pc()
122 regs->ea = addr; in kgdb_arch_handle_exception()
137 regs->ea -= 4; in kgdb_breakpoint_c()
/arch/powerpc/mm/nohash/
A Dbook3e_pgtable.c84 pgdp = pgd_offset_k(ea); in map_kernel_page()
85 p4dp = p4d_offset(pgdp, ea); in map_kernel_page()
86 pudp = pud_alloc(&init_mm, p4dp, ea); in map_kernel_page()
89 pmdp = pmd_alloc(&init_mm, pudp, ea); in map_kernel_page()
92 ptep = pte_alloc_kernel(pmdp, ea); in map_kernel_page()
96 pgdp = pgd_offset_k(ea); in map_kernel_page()
97 p4dp = p4d_offset(pgdp, ea); in map_kernel_page()
102 pudp = pud_offset(p4dp, ea); in map_kernel_page()
107 pmdp = pmd_offset(pudp, ea); in map_kernel_page()
112 ptep = pte_offset_kernel(pmdp, ea); in map_kernel_page()
[all …]
A De500_hugetlbpage.c104 static inline int book3e_tlb_exists(unsigned long ea, unsigned long pid) in book3e_tlb_exists() argument
113 : "=&r"(found) : "r"(ea)); in book3e_tlb_exists()
119 book3e_hugetlb_preload(struct vm_area_struct *vma, unsigned long ea, pte_t pte) in book3e_hugetlb_preload() argument
128 if (unlikely(is_kernel_addr(ea))) in book3e_hugetlb_preload()
144 if (unlikely(book3e_tlb_exists(ea, mm->context.id))) { in book3e_hugetlb_preload()
155 mas2 = ea & ~((1UL << shift) - 1); in book3e_hugetlb_preload()
/arch/powerpc/lib/
A Dsstep.c134 return ea; in dform_ea()
152 return ea; in dsform_ea()
169 return ea; in dqform_ea()
188 return ea; in xform_ea()
215 ea = (ea << 2) | (d & 0x3); in mlsd_8lsd_ea()
229 return ea; in mlsd_8lsd_ea()
377 ea += c; in __copy_mem_in()
513 ea += c; in __copy_mem_out()
1071 ea = truncate_if_32bit(regs->msr, ea); in emulate_dcbz()
3333 ea = truncate_if_32bit(regs->msr, op->ea); in emulate_loadstore()
[all …]
/arch/powerpc/mm/book3s64/
A Dslb.c59 ea &= ~((1UL << SID_SHIFT) - 1); in assert_slb_presence()
98 assert_slb_presence(false, ea); in create_shadowed_slbe()
304 if (ea & ESID_MASK_1T) in preload_add()
305 ea &= ESID_MASK_1T; in preload_add()
308 esid = ea >> SID_SHIFT; in preload_add()
521 unsigned long ea; in switch_slb() local
526 slb_allocate_user(mm, ea); in switch_slb()
754 if (ea >= H_VMEMMAP_END) in slb_allocate_kernel()
761 if (ea >= H_VMALLOC_END) in slb_allocate_kernel()
768 if (ea >= H_KERN_IO_END) in slb_allocate_kernel()
[all …]
A Dhash_utils.c1631 if (ea >= spt->maxaddr) in subpage_protection()
1633 if (ea < 0x100000000UL) { in subpage_protection()
1673 ea, access, current->comm); in hash_failure_debug()
1714 ea, access, trap); in hash_page_mm()
1718 switch (get_region_id(ea)) { in hash_page_mm()
1852 demote_segment_4k(mm, ea); in hash_page_mm()
1862 demote_segment_4k(mm, ea); in hash_page_mm()
1939 unsigned long ea = regs->dar; in DEFINE_INTERRUPT_HANDLER() local
1952 region_id = get_region_id(ea); in DEFINE_INTERRUPT_HANDLER()
2036 ssize = user_segment_size(ea); in hash_preload()
[all …]
A Dhash_64k.c38 int __hash_page_4K(unsigned long ea, unsigned long access, unsigned long vsid, in __hash_page_4K() argument
89 subpg_index = (ea & (PAGE_SIZE - 1)) >> shift; in __hash_page_4K()
90 vpn = hpt_vpn(ea, vsid, ssize); in __hash_page_4K()
213 hash_failure_debug(ea, access, vsid, trap, ssize, in __hash_page_4K()
222 hpt_do_stress(ea, hpte_group); in __hash_page_4K()
228 int __hash_page_64K(unsigned long ea, unsigned long access, in __hash_page_64K() argument
275 vpn = hpt_vpn(ea, vsid, ssize); in __hash_page_64K()
328 hash_failure_debug(ea, access, vsid, trap, ssize, in __hash_page_64K()
337 hpt_do_stress(ea, hpte_group); in __hash_page_64K()
A Dhash_hugepage.c21 int __hash_page_thp(unsigned long ea, unsigned long access, unsigned long vsid, in __hash_page_thp() argument
73 index = (ea & ~HPAGE_PMD_MASK) >> shift; in __hash_page_thp()
76 vpn = hpt_vpn(ea, vsid, ssize); in __hash_page_thp()
85 flush_hash_hugepage(vsid, ea, pmdp, MMU_PAGE_64K, in __hash_page_thp()
163 hash_failure_debug(ea, access, vsid, trap, ssize, in __hash_page_thp()
A Dhash_pgtable.c147 int hash__map_kernel_page(unsigned long ea, unsigned long pa, pgprot_t prot) in hash__map_kernel_page() argument
157 pgdp = pgd_offset_k(ea); in hash__map_kernel_page()
158 p4dp = p4d_offset(pgdp, ea); in hash__map_kernel_page()
159 pudp = pud_alloc(&init_mm, p4dp, ea); in hash__map_kernel_page()
162 pmdp = pmd_alloc(&init_mm, pudp, ea); in hash__map_kernel_page()
165 ptep = pte_alloc_kernel(pmdp, ea); in hash__map_kernel_page()
168 set_pte_at(&init_mm, ea, ptep, pfn_pte(pa >> PAGE_SHIFT, prot)); in hash__map_kernel_page()
176 if (htab_bolt_mapping(ea, ea + PAGE_SIZE, pa, pgprot_val(prot), in hash__map_kernel_page()
/arch/powerpc/include/asm/book3s/64/
A Dmmu-hash.h146 unsigned long ea,
156 int (*hpte_removebolted)(unsigned long ea,
420 static inline unsigned long hpt_vpn(unsigned long ea, in hpt_vpn() argument
788 if ((ea & EA_MASK) >= H_PGTABLE_RANGE) in get_vsid()
797 ((ea >> SID_SHIFT) & ESID_BITS_MASK); in get_vsid()
803 ((ea >> SID_SHIFT_1T) & ESID_BITS_1T_MASK); in get_vsid()
825 unsigned long region_id = get_region_id(ea); in get_kernel_context()
835 ctx = 1 + ((ea & EA_MASK) >> MAX_EA_BITS_PER_CONTEXT); in get_kernel_context()
848 if (!is_kernel_addr(ea)) in get_kernel_vsid()
851 context = get_kernel_context(ea); in get_kernel_vsid()
[all …]
A Dhash.h91 #define NON_LINEAR_REGION_ID(ea) ((((unsigned long)ea - H_KERN_VIRT_START) >> REGION_SHIFT) + 2) argument
116 static inline int get_region_id(unsigned long ea) in get_region_id() argument
119 int id = (ea >> 60UL); in get_region_id()
127 if (ea < H_KERN_VIRT_START) in get_region_id()
132 region_id = NON_LINEAR_REGION_ID(ea); in get_region_id()
287 int hash__map_kernel_page(unsigned long ea, unsigned long pa, pgprot_t prot);
/arch/powerpc/platforms/cell/spufs/
A Dfault.c24 unsigned long ea, int type) in spufs_handle_event() argument
38 force_sig_fault(SIGSEGV, SEGV_ACCERR, (void __user *)ea); in spufs_handle_event()
88 u64 ea, dsisr, access; in spufs_handle_class1() local
102 ea = ctx->csa.class_1_dar; in spufs_handle_class1()
110 pr_debug("ctx %p: ea %016llx, dsisr %016llx state %d\n", ctx, ea, in spufs_handle_class1()
123 ret = hash_page(ea, access, 0x300, dsisr); in spufs_handle_class1()
128 ret = copro_handle_mm_fault(current->mm, ea, dsisr, &flt); in spufs_handle_class1()
163 spufs_handle_event(ctx, ea, SPE_EVENT_SPE_DATA_STORAGE); in spufs_handle_class1()
/arch/powerpc/include/asm/
A Dpte-walk.h7 extern pte_t *__find_linux_pte(pgd_t *pgdir, unsigned long ea,
10 static inline pte_t *find_linux_pte(pgd_t *pgdir, unsigned long ea, in find_linux_pte() argument
16 pte = __find_linux_pte(pgdir, ea, is_thp, hshift); in find_linux_pte()
29 static inline pte_t *find_init_mm_pte(unsigned long ea, unsigned *hshift) in find_init_mm_pte() argument
32 return __find_linux_pte(pgdir, ea, NULL, hshift); in find_init_mm_pte()
A Disa-bridge.h13 unsigned long ea = (unsigned long)address; in isa_vaddr_is_ioport() local
14 return ea >= ISA_IO_BASE && ea < ISA_IO_END; in isa_vaddr_is_ioport()
/arch/powerpc/math-emu/
A Dstfiwx.c7 stfiwx(u32 *frS, void *ea) in stfiwx() argument
10 printk("%s: %p %p\n", __func__, frS, ea); in stfiwx()
13 if (copy_to_user(ea, &frS[1], sizeof(frS[1]))) in stfiwx()
A Dlfd.c10 lfd(void *frD, void *ea) in lfd() argument
12 if (copy_from_user(frD, ea, sizeof(double))) in lfd()
15 printk("%s: D %p, ea %p: ", __func__, frD, ea); in lfd()
A Dstfd.c7 stfd(void *frS, void *ea) in stfd() argument
11 printk("%s: S %p, ea %p: ", __func__, frS, ea); in stfd()
17 if (copy_to_user(ea, frS, sizeof(double))) in stfd()
A Dstfs.c12 stfs(void *frS, void *ea) in stfs() argument
20 printk("%s: S %p, ea %p\n", __func__, frS, ea); in stfs()
38 if (copy_to_user(ea, &f, sizeof(float))) in stfs()
A Dlfs.c12 lfs(void *frD, void *ea) in lfs() argument
20 printk("%s: D %p, ea %p\n", __func__, frD, ea); in lfs()
23 if (copy_from_user(&f, ea, sizeof(float))) in lfs()
/arch/powerpc/mm/ptdump/
A Dhashpagetable.c188 while (ea >= st->marker[1].start_address) { in dump_hpte_info()
192 seq_printf(st->seq, "0x%lx:\t", ea); in dump_hpte_info()
215 vsid = get_kernel_vsid(ea, ssize); in native_find()
216 vpn = hpt_vpn(ea, vsid, ssize); in native_find()
251 vsid = get_kernel_vsid(ea, ssize); in pseries_find()
252 vpn = hpt_vpn(ea, vsid, ssize); in pseries_find()
320 return pseries_find(ea, psize, primary, v, r); in base_hpte_find()
322 return native_find(ea, psize, primary, v, r); in base_hpte_find()
332 if (ea < PAGE_OFFSET) in hpte_find()
336 slot = base_hpte_find(ea, psize, true, &v, &r); in hpte_find()
[all …]
/arch/powerpc/mm/kasan/
A Dinit_book3e_64.c32 static int __init kasan_map_kernel_page(unsigned long ea, unsigned long pa, pgprot_t prot) in kasan_map_kernel_page() argument
40 pgdp = pgd_offset_k(ea); in kasan_map_kernel_page()
41 p4dp = p4d_offset(pgdp, ea); in kasan_map_kernel_page()
47 pudp = pud_offset(p4dp, ea); in kasan_map_kernel_page()
53 pmdp = pmd_offset(pudp, ea); in kasan_map_kernel_page()
59 ptep = pte_offset_kernel(pmdp, ea); in kasan_map_kernel_page()
61 __set_pte_at(&init_mm, ea, ptep, pfn_pte(pa >> PAGE_SHIFT, prot), 0); in kasan_map_kernel_page()

Completed in 41 milliseconds

12345