| /arch/s390/mm/ |
| A D | gmap.c | 1055 if (temp->raddr == rmap->raddr) { in gmap_insert_rmap() 1097 rmap->raddr = raddr; in gmap_protect_rmap() 1167 gmap_call_notifier(sg, raddr, raddr + PAGE_SIZE - 1); in gmap_unshadow_page() 1206 gmap_call_notifier(sg, raddr, raddr + _SEGMENT_SIZE - 1); in gmap_unshadow_pgt() 1262 gmap_call_notifier(sg, raddr, raddr + _REGION3_SIZE - 1); in gmap_unshadow_sgt() 1318 gmap_call_notifier(sg, raddr, raddr + _REGION2_SIZE - 1); in gmap_unshadow_r3t() 1374 gmap_call_notifier(sg, raddr, raddr + _REGION1_SIZE - 1); in gmap_unshadow_r2t() 1409 gmap_idte_one(asce, raddr); in __gmap_unshadow_r1t() 1525 gmap_unshadow_r2t(sg, raddr); in gmap_shadow_r2t() 1729 unsigned long raddr, origin; in gmap_shadow_pgt() local [all …]
|
| /arch/mips/alchemy/devboards/ |
| A D | bcsr.c | 22 void __iomem *raddr; member 40 bcsr_regs[i].raddr = (void __iomem *)bcsr2_phys + in bcsr_init() 43 bcsr_regs[i].raddr = (void __iomem *)bcsr1_phys + in bcsr_init() 56 r = __raw_readw(bcsr_regs[reg].raddr); in bcsr_read() 67 __raw_writew(val, bcsr_regs[reg].raddr); in bcsr_write() 79 r = __raw_readw(bcsr_regs[reg].raddr); in bcsr_mod() 82 __raw_writew(r, bcsr_regs[reg].raddr); in bcsr_mod()
|
| /arch/parisc/kernel/ |
| A D | perf.c | 189 static int perf_stop_counters(uint32_t *raddr); 218 uint32_t raddr[4]; in perf_config() local 221 error = perf_stop_counters(raddr); in perf_config() 430 uint32_t raddr[4]; in perf_ioctl() local 441 error_start = perf_stop_counters(raddr); in perf_ioctl() 449 if (copy_to_user((void __user *)arg, raddr, in perf_ioctl() 450 sizeof (raddr)) != 0) { in perf_ioctl() 568 raddr[0] = (uint32_t)tmp64; in perf_stop_counters() 574 raddr[1] = (uint32_t)tmp64; in perf_stop_counters() 581 raddr[2] = (uint32_t)tmp64; in perf_stop_counters() [all …]
|
| /arch/powerpc/kvm/ |
| A D | trace_pr.h | 72 __field( ulong, raddr ) 81 __entry->raddr = pte->pte.raddr; 89 __entry->vpage, __entry->raddr, __entry->flags) 101 __field( ulong, raddr ) 110 __entry->raddr = pte->pte.raddr; 118 __entry->vpage, __entry->raddr, __entry->flags)
|
| A D | book3s_32_mmu.c | 162 pte->raddr = bat->brpn | (eaddr & ~bat->bepi_mask); in kvmppc_mmu_book3s_32_xlate_bat() 221 pte->raddr = (pte1 & ~(0xFFFULL)) | (eaddr & 0xFFF); in kvmppc_mmu_book3s_32_xlate_pte() 305 pte->raddr = vcpu->arch.magic_page_pa | (pte->raddr & 0xfff); in kvmppc_mmu_book3s_32_xlate() 306 pte->raddr &= KVM_PAM; in kvmppc_mmu_book3s_32_xlate()
|
| A D | book3s_hv_rm_xics.c | 807 unsigned int *raddr; in this_cpu_inc_rm() local 810 raddr = per_cpu_ptr(addr, cpu); in this_cpu_inc_rm() 811 l = (unsigned long)raddr; in this_cpu_inc_rm() 814 l = vmalloc_to_phys(raddr); in this_cpu_inc_rm() 815 raddr = (unsigned int *)l; in this_cpu_inc_rm() 817 ++*raddr; in this_cpu_inc_rm()
|
| A D | book3s_64_mmu_host.c | 89 unsigned long gfn = orig_pte->raddr >> PAGE_SHIFT; in kvmppc_mmu_map_page() 98 pfn = kvmppc_gpa_to_pfn(vcpu, orig_pte->raddr, iswrite, &writable, &page); in kvmppc_mmu_map_page() 101 orig_pte->raddr); in kvmppc_mmu_map_page() 145 hpaddr |= orig_pte->raddr & (~0xfffULL & ~PAGE_MASK); in kvmppc_mmu_map_page()
|
| A D | book3s_64_mmu.c | 215 gpte->raddr = vcpu->arch.magic_page_pa | (gpte->raddr & 0xfff); in kvmppc_mmu_book3s_64_xlate() 216 gpte->raddr &= KVM_PAM; in kvmppc_mmu_book3s_64_xlate() 297 gpte->raddr = (r & HPTE_R_RPN & ~eaddr_mask) | (eaddr & eaddr_mask); in kvmppc_mmu_book3s_64_xlate() 324 eaddr, avpn, gpte->vpage, gpte->raddr); in kvmppc_mmu_book3s_64_xlate()
|
| A D | book3s_32_mmu_host.c | 147 hpaddr = kvmppc_gpa_to_pfn(vcpu, orig_pte->raddr, iswrite, &writable, &page); in kvmppc_mmu_map_page() 150 orig_pte->raddr); in kvmppc_mmu_map_page() 199 mark_page_dirty(vcpu->kvm, orig_pte->raddr >> PAGE_SHIFT); in kvmppc_mmu_map_page()
|
| A D | book3s_mmu_hpte.c | 306 if ((pte->pte.raddr >= pa_start) && in kvmppc_mmu_pte_pflush() 307 (pte->pte.raddr < pa_end)) in kvmppc_mmu_pte_pflush()
|
| A D | book3s_pr.c | 647 r = kvm_vcpu_map(vcpu, pte->raddr >> PAGE_SHIFT, &map); in kvmppc_patch_dcbz() 651 hpage_offset = pte->raddr & ~PAGE_MASK; in kvmppc_patch_dcbz() 704 pte.raddr = eaddr & KVM_PAM; in kvmppc_handle_pagefault() 718 ((pte.raddr & SPLIT_HACK_MASK) == SPLIT_HACK_OFFS)) in kvmppc_handle_pagefault() 719 pte.raddr &= ~SPLIT_HACK_MASK; in kvmppc_handle_pagefault() 763 } else if (kvmppc_visible_gpa(vcpu, pte.raddr)) { in kvmppc_handle_pagefault() 786 vcpu->arch.paddr_accessed = pte.raddr; in kvmppc_handle_pagefault()
|
| A D | powerpc.c | 369 *eaddr = pte.raddr; in kvmppc_st() 376 ((pte.raddr & KVM_PAM & PAGE_MASK) == mp_pa) && in kvmppc_st() 384 if (kvm_write_guest(vcpu->kvm, pte.raddr, ptr, size)) in kvmppc_st() 412 *eaddr = pte.raddr; in kvmppc_ld() 422 ((pte.raddr & KVM_PAM & PAGE_MASK) == mp_pa) && in kvmppc_ld() 431 rc = kvm_read_guest(vcpu->kvm, pte.raddr, ptr, size); in kvmppc_ld()
|
| A D | book3s.c | 463 pte->raddr = eaddr & KVM_PAM; in kvmppc_xlate() 474 pte->raddr &= ~SPLIT_HACK_MASK; in kvmppc_xlate()
|
| A D | e500_mmu.c | 436 u64 raddr = get_tlb_raddr(gtlbe); in kvmppc_e500_emul_tlbwe() local 444 kvmppc_mmu_map(vcpu, eaddr, raddr, index_of(tlbsel, esel)); in kvmppc_e500_emul_tlbwe()
|
| A D | book3s_hv_nested.c | 1479 gpte.raddr, kvm->arch.lpid); in kvmhv_handle_nested_set_rc() 1591 gpa = gpte.raddr; in __kvmhv_nested_page_fault()
|
| /arch/mips/include/asm/octeon/ |
| A D | cvmx-ipd.h | 200 ipd_pwp_ptr_fifo_ctl.s.raddr = in cvmx_ipd_free_ptr() 247 ipd_prc_port_ptr_fifo_ctl.s.raddr = in cvmx_ipd_free_ptr() 276 ipd_prc_hold_ptr_fifo_ctl.s.raddr = in cvmx_ipd_free_ptr() 304 ipd_pwp_ptr_fifo_ctl.s.raddr = in cvmx_ipd_free_ptr()
|
| A D | cvmx-ipd-defs.h | 594 uint64_t raddr:8; member 596 uint64_t raddr:8; 628 uint64_t raddr:3; member 630 uint64_t raddr:3; 1098 uint64_t raddr:7; member 1100 uint64_t raddr:7; 1164 uint64_t raddr:3; member 1166 uint64_t raddr:3; 1184 uint64_t raddr:7; member 1186 uint64_t raddr:7; [all …]
|
| /arch/mips/mm/ |
| A D | uasm-micromips.c | 217 long raddr = (long)rel->addr; in __resolve_relocs() local 222 *rel->addr |= (build_bimm(laddr - (raddr + 4)) << 16); in __resolve_relocs() 224 *rel->addr |= build_bimm(laddr - (raddr + 4)); in __resolve_relocs()
|
| A D | uasm-mips.c | 281 long raddr = (long)rel->addr; in __resolve_relocs() local 285 *rel->addr |= build_bimm(laddr - (raddr + 4)); in __resolve_relocs()
|
| /arch/s390/kvm/ |
| A D | gaccess.c | 466 union raddress raddr = {.addr = gva}; in guest_translate() local 562 raddr.rfaa = rtte.fc1.rfaa; in guest_translate() 590 raddr.sfaa = ste.fc1.sfaa; in guest_translate() 607 raddr.pfra = pte.pfra; in guest_translate() 609 raddr.addr = kvm_s390_real_to_abs(vcpu, raddr.addr); in guest_translate() 619 if (!kvm_is_gpa_in_memslot(vcpu->kvm, raddr.addr)) in guest_translate() 621 *gpa = raddr.addr; in guest_translate()
|
| /arch/s390/include/asm/ |
| A D | gmap.h | 76 unsigned long raddr; member
|
| /arch/sparc/kernel/ |
| A D | sys_sparc_64.c | 409 ulong raddr; in SYSCALL_DEFINE6() local 410 err = do_shmat(first, ptr, (int)second, &raddr, SHMLBA); in SYSCALL_DEFINE6() 412 if (put_user(raddr, in SYSCALL_DEFINE6()
|
| /arch/powerpc/kernel/ |
| A D | fadump.c | 967 unsigned long raddr, rstart, rend, rlast, hole_size; in fadump_relocate() local 972 raddr = paddr; in fadump_relocate() 979 raddr += fw_dump.boot_mem_dest_addr - hole_size; in fadump_relocate() 986 pr_debug("vmcoreinfo: paddr = 0x%lx, raddr = 0x%lx\n", paddr, raddr); in fadump_relocate() 987 return raddr; in fadump_relocate()
|
| /arch/x86/kernel/kprobes/ |
| A D | core.c | 114 s32 raddr; in __synthesize_relative_insn() member 118 insn->raddr = (s32)((long)(to) - ((long)(from) + 5)); in __synthesize_relative_insn()
|
| /arch/powerpc/include/asm/ |
| A D | kvm_host.h | 379 ulong raddr; member
|