| /arch/mips/include/asm/ |
| A D | asm-eva.h | 38 #define kernel_sd(reg, addr) user_sw(reg, addr) argument 106 #define kernel_ll(reg, addr) ll reg, addr argument 107 #define kernel_sc(reg, addr) sc reg, addr argument 108 #define kernel_lw(reg, addr) lw reg, addr argument 111 #define kernel_lh(reg, addr) lh reg, addr argument 112 #define kernel_lb(reg, addr) lb reg, addr argument 114 #define kernel_sw(reg, addr) sw reg, addr argument 117 #define kernel_sh(reg, addr) sh reg, addr argument 118 #define kernel_sb(reg, addr) sb reg, addr argument 128 #define kernel_sd(reg, addr) sd reg, addr argument [all …]
|
| A D | unaligned-emul.h | 764 #define LoadHWUE(addr, value, res) _LoadHWU(addr, value, res, user) argument 765 #define LoadWU(addr, value, res) _LoadWU(addr, value, res, kernel) argument 766 #define LoadWUE(addr, value, res) _LoadWU(addr, value, res, user) argument 767 #define LoadHW(addr, value, res) _LoadHW(addr, value, res, kernel) argument 768 #define LoadHWE(addr, value, res) _LoadHW(addr, value, res, user) argument 769 #define LoadW(addr, value, res) _LoadW(addr, value, res, kernel) argument 770 #define LoadWE(addr, value, res) _LoadW(addr, value, res, user) argument 771 #define LoadDW(addr, value, res) _LoadDW(addr, value, res) argument 775 #define StoreW(addr, value, res) _StoreW(addr, value, res, kernel) argument 776 #define StoreWE(addr, value, res) _StoreW(addr, value, res, user) argument [all …]
|
| A D | prefetch.h | 47 .macro __pref hint addr 49 pref \hint, \addr 53 .macro pref_load addr 54 __pref Pref_Load, \addr 57 .macro pref_store addr 58 __pref Pref_Store, \addr 61 .macro pref_load_streamed addr 62 __pref Pref_LoadStreamed, \addr 65 .macro pref_store_streamed addr 69 .macro pref_load_retained addr [all …]
|
| /arch/parisc/lib/ |
| A D | iomap.c | 315 return iomap_ops[ADDR_TO_REGION(addr)]->read8(addr); in ioread8() 322 return iomap_ops[ADDR_TO_REGION(addr)]->read16(addr); in ioread16() 329 return iomap_ops[ADDR_TO_REGION(addr)]->read16be(addr); in ioread16be() 336 return iomap_ops[ADDR_TO_REGION(addr)]->read32(addr); in ioread32() 343 return iomap_ops[ADDR_TO_REGION(addr)]->read32be(addr); in ioread32be() 351 return iomap_ops[ADDR_TO_REGION(addr)]->read64(addr); in ioread64() 358 return iomap_ops[ADDR_TO_REGION(addr)]->read64be(addr); in ioread64be() 366 iomap_ops[ADDR_TO_REGION(addr)]->write8(datum, addr); in iowrite8() 375 iomap_ops[ADDR_TO_REGION(addr)]->write16(datum, addr); in iowrite16() 393 iomap_ops[ADDR_TO_REGION(addr)]->write32(datum, addr); in iowrite32() [all …]
|
| /arch/m68k/include/asm/ |
| A D | io_no.h | 16 #define __raw_readb(addr) \ argument 18 #define __raw_readw(addr) \ argument 20 #define __raw_readl(addr) \ argument 23 #define __raw_writeb(b, addr) (void)((*(__force volatile u8 *) (addr)) = (b)) argument 24 #define __raw_writew(b, addr) (void)((*(__force volatile u16 *) (addr)) = (b)) argument 25 #define __raw_writel(b, addr) (void)((*(__force volatile u32 *) (addr)) = (b)) argument 50 return (addr >= IOMEMBASE) && (addr <= IOMEMBASE + IOMEMSIZE - 1); in __cf_internalio() 68 if (cf_internalio(addr)) in readw() 76 if (cf_internalio(addr)) in readl() 84 if (cf_internalio(addr)) in writew() [all …]
|
| /arch/sparc/include/asm/ |
| A D | swift.h | 35 : "r" (addr), "i" (ASI_M_TXTC_TAG) in swift_inv_insn_tag() 50 unsigned long addr; in swift_flush_dcache() local 52 for (addr = 0; addr < 0x2000; addr += 0x10) in swift_flush_dcache() 53 swift_inv_data_tag(addr); in swift_flush_dcache() 58 unsigned long addr; in swift_flush_icache() local 60 for (addr = 0; addr < 0x4000; addr += 0x20) in swift_flush_icache() 61 swift_inv_insn_tag(addr); in swift_flush_icache() 66 unsigned long addr; in swift_idflash_clear() local 68 for (addr = 0; addr < 0x2000; addr += 0x10) { in swift_idflash_clear() 69 swift_inv_insn_tag(addr<<1); in swift_idflash_clear() [all …]
|
| A D | turbosparc.h | 67 : "r" (addr), "i" (ASI_M_TXTC_TAG) in turbosparc_inv_insn_tag() 76 : "r" (addr), "i" (ASI_M_DATAC_TAG) in turbosparc_inv_data_tag() 82 unsigned long addr; in turbosparc_flush_icache() local 84 for (addr = 0; addr < 0x4000; addr += 0x20) in turbosparc_flush_icache() 85 turbosparc_inv_insn_tag(addr); in turbosparc_flush_icache() 90 unsigned long addr; in turbosparc_flush_dcache() local 92 for (addr = 0; addr < 0x4000; addr += 0x20) in turbosparc_flush_dcache() 93 turbosparc_inv_data_tag(addr); in turbosparc_flush_dcache() 98 unsigned long addr; in turbosparc_idflash_clear() local 100 for (addr = 0; addr < 0x4000; addr += 0x20) { in turbosparc_idflash_clear() [all …]
|
| A D | uaccess_64.h | 33 return addr > limit - size; in __chk_range_not_ok() 35 addr += size; in __chk_range_not_ok() 36 if (addr < size) in __chk_range_not_ok() 39 return addr > limit; in __chk_range_not_ok() 79 type *addr = (type __force *)(dst); \ 93 #define __put_kernel_asm(x, size, addr, ret) \ argument 113 #define __put_user_nocheck(data, addr, size) ({ \ argument 125 #define __put_user_asm(x, size, addr, ret) \ argument 149 type *addr = (type __force *)(src); \ 166 #define __get_kernel_asm(x, size, addr, ret) \ argument [all …]
|
| /arch/parisc/include/asm/ |
| A D | bitops.h | 26 addr += BIT_WORD(nr); in set_bit() 28 *addr |= mask; in set_bit() 37 addr += BIT_WORD(nr); in clear_bit() 39 *addr &= ~mask; in clear_bit() 48 addr += BIT_WORD(nr); in change_bit() 50 *addr ^= mask; in change_bit() 61 addr += BIT_WORD(nr); in test_and_set_bit() 63 old = *addr; in test_and_set_bit() 66 *addr = old | mask; in test_and_set_bit() 81 old = *addr; in test_and_clear_bit() [all …]
|
| /arch/powerpc/boot/ |
| A D | io.h | 12 static inline int in_8(const volatile unsigned char *addr) in in_8() argument 17 : "=r" (ret) : "m" (*addr)); in in_8() 24 : "=m" (*addr) : "r" (val)); in out_8() 32 : "=r" (ret) : "r" (addr), "m" (*addr)); in in_le16() 42 : "=r" (ret) : "m" (*addr)); in in_be16() 49 : "r" (val), "r" (addr)); in out_le16() 55 : "=m" (*addr) : "r" (val)); in out_be16() 63 : "=r" (ret) : "r" (addr), "m" (*addr)); in in_le32() 72 : "=r" (ret) : "m" (*addr)); in in_be32() 79 : "r" (val), "r" (addr)); in out_le32() [all …]
|
| /arch/powerpc/include/asm/ |
| A D | io.h | 438 #define __do_readb(addr) eeh_readb(addr) argument 439 #define __do_readw(addr) eeh_readw(addr) argument 440 #define __do_readl(addr) eeh_readl(addr) argument 441 #define __do_readq(addr) eeh_readq(addr) argument 446 #define __do_readb(addr) in_8(addr) 447 #define __do_readw(addr) in_le16(addr) 448 #define __do_readl(addr) in_le32(addr) 449 #define __do_readq(addr) in_le64(addr) 694 #define readb_relaxed(addr) readb(addr) argument 695 #define readw_relaxed(addr) readw(addr) argument [all …]
|
| /arch/riscv/kernel/ |
| A D | patch.c | 20 void *addr; member 52 return addr; in patch_map() 57 offset_in_page(addr)); in patch_map() 69 void *waddr = addr; in __patch_insn_set() 114 void *waddr = addr; in __patch_insn_write() 167 memset(addr, c, len); in __patch_insn_set() 195 addr += size; in patch_insn_set() 209 flush_icache_range((uintptr_t)addr, (uintptr_t)addr + len); in patch_text_set_nosync() 230 addr += size; in patch_insn_write() 245 flush_icache_range((uintptr_t)addr, (uintptr_t)addr + len); in patch_text_nosync() [all …]
|
| /arch/riscv/include/asm/ |
| A D | io.h | 97 #define readsb(addr, buffer, count) __readsb(addr, buffer, count) argument 98 #define readsw(addr, buffer, count) __readsw(addr, buffer, count) argument 99 #define readsl(addr, buffer, count) __readsl(addr, buffer, count) argument 104 #define insb(addr, buffer, count) __insb(PCI_IOBASE + (addr), buffer, count) argument 105 #define insw(addr, buffer, count) __insw(PCI_IOBASE + (addr), buffer, count) argument 106 #define insl(addr, buffer, count) __insl(PCI_IOBASE + (addr), buffer, count) argument 111 #define writesb(addr, buffer, count) __writesb(addr, buffer, count) argument 112 #define writesw(addr, buffer, count) __writesw(addr, buffer, count) argument 113 #define writesl(addr, buffer, count) __writesl(addr, buffer, count) argument 124 #define readsq(addr, buffer, count) __readsq(addr, buffer, count) argument [all …]
|
| /arch/hexagon/include/asm/ |
| A D | bitops.h | 43 : "r" (addr), "r" (nr) in test_and_clear_bit() 67 : "r" (addr), "r" (nr) in test_and_set_bit() 93 : "r" (addr), "r" (nr) in test_and_change_bit() 108 test_and_clear_bit(nr, addr); in clear_bit() 113 test_and_set_bit(nr, addr); in set_bit() 118 test_and_change_bit(nr, addr); in change_bit() 133 test_and_clear_bit(nr, addr); in arch___clear_bit() 139 test_and_set_bit(nr, addr); in arch___set_bit() 145 test_and_change_bit(nr, addr); in arch___change_bit() 152 return test_and_clear_bit(nr, addr); in arch___test_and_clear_bit() [all …]
|
| /arch/arm/mm/ |
| A D | idmap.c | 44 pmd += pmd_index(addr); in idmap_add_pmd() 46 pmd = pmd_offset(pud, addr); in idmap_add_pmd() 52 } while (pmd++, addr = next, addr != end); in idmap_add_pmd() 60 addr = (addr & PMD_MASK) | prot; in idmap_add_pmd() 61 pmd[0] = __pmd(addr); in idmap_add_pmd() 62 addr += SECTION_SIZE; in idmap_add_pmd() 63 pmd[1] = __pmd(addr); in idmap_add_pmd() 78 } while (pud++, addr = next, addr != end); in idmap_add_pud() 84 unsigned long addr, end; in identity_mapping_add() local 103 pgd += pgd_index(addr); in identity_mapping_add() [all …]
|
| A D | mmap.c | 54 return addr; in arch_get_unmapped_area() 60 if (addr) { in arch_get_unmapped_area() 62 addr = COLOUR_ALIGN(addr, pgoff); in arch_get_unmapped_area() 64 addr = PAGE_ALIGN(addr); in arch_get_unmapped_area() 69 return addr; in arch_get_unmapped_area() 107 return addr; in arch_get_unmapped_area_topdown() 111 if (addr) { in arch_get_unmapped_area_topdown() 113 addr = COLOUR_ALIGN(addr, pgoff); in arch_get_unmapped_area_topdown() 115 addr = PAGE_ALIGN(addr); in arch_get_unmapped_area_topdown() 119 return addr; in arch_get_unmapped_area_topdown() [all …]
|
| /arch/powerpc/mm/book3s64/ |
| A D | subpage_prot.c | 25 unsigned long i, j, addr; in subpage_prot_free() local 37 addr = 0; in subpage_prot_free() 44 ++j, addr += PAGE_SIZE) in subpage_prot_free() 63 pgd = pgd_offset(mm, addr); in hpte_flush_range() 64 p4d = p4d_offset(pgd, addr); in hpte_flush_range() 67 pud = pud_offset(p4d, addr); in hpte_flush_range() 70 pmd = pmd_offset(pud, addr); in hpte_flush_range() 79 addr += PAGE_SIZE; in hpte_flush_range() 105 limit = addr + len; in subpage_prot_clear() 108 for (; addr < limit; addr = next) { in subpage_prot_clear() [all …]
|
| /arch/powerpc/lib/ |
| A D | code-patching.c | 47 return __patch_mem(addr, ppc_inst_as_ulong(instr), addr, true); in raw_patch_instruction() 49 return __patch_mem(addr, ppc_inst_val(instr), addr, false); in raw_patch_instruction() 57 unsigned long addr; member 119 addr = (unsigned long)area->addr; in text_area_cpu_up() 127 this_cpu_write(cpu_patching_context.addr, addr); in text_area_cpu_up() 183 this_cpu_write(cpu_patching_context.addr, addr); in text_area_cpu_up_mm() 278 flush_tlb_kernel_range(addr, addr + PAGE_SIZE); in unmap_patch_area() 364 return __patch_mem(addr, val, addr, is_dword); in patch_mem() 417 for (u64 *end = addr + count; addr < end; addr++) in NOKPROBE_SYMBOL() 428 for (u32 *end = addr + count; addr < end; addr++) in patch_memset32() [all …]
|
| /arch/alpha/kernel/ |
| A D | core_irongate.c | 97 *pci_addr = addr; in mk_conf_addr() 151 *(vuip)addr; in irongate_write_config() 305 #define GET_PAGE_DIR_OFF(addr) (addr >> 22) argument 306 #define GET_PAGE_DIR_IDX(addr) (GET_PAGE_DIR_OFF(addr)) argument 308 #define GET_GATT_OFF(addr) ((addr & 0x003ff000) >> 12) argument 309 #define GET_GATT(addr) (gatt_pages[GET_PAGE_DIR_IDX(addr)]) argument 333 if (addr >= gart_bus_addr && addr + size - 1 < in irongate_ioremap() 353 addr); in irongate_ioremap() 381 for(baddr = addr, vaddr = (unsigned long)area->addr; in irongate_ioremap() 398 vaddr = (unsigned long)area->addr + (addr & ~PAGE_MASK); in irongate_ioremap() [all …]
|
| /arch/alpha/include/asm/ |
| A D | core_cia.h | 357 addr &= CIA_MEM_R1_MASK; in cia_ioread8() 372 addr &= CIA_MEM_R1_MASK; in cia_iowrite8() 387 addr &= CIA_MEM_R1_MASK; in cia_ioread16() 411 addr = ((addr - CIA_IO) << 5) + CIA_IO + 0x18; in cia_ioread32() 412 return *(vuip)addr; in cia_ioread32() 419 addr = ((addr - CIA_IO) << 5) + CIA_IO + 0x18; in cia_iowrite32() 420 *(vuip)addr = b; in cia_iowrite32() 427 addr = ((addr - CIA_IO) << 5) + CIA_IO + 0x18; in cia_ioread64() 428 return *(vulp)addr; in cia_ioread64() 435 addr = ((addr - CIA_IO) << 5) + CIA_IO + 0x18; in cia_iowrite64() [all …]
|
| /arch/parisc/mm/ |
| A D | hugetlbpage.c | 40 addr &= HPAGE_MASK; in huge_pte_alloc() 42 pgd = pgd_offset(mm, addr); in huge_pte_alloc() 43 p4d = p4d_offset(pgd, addr); in huge_pte_alloc() 62 addr &= HPAGE_MASK; in huge_pte_offset() 64 pgd = pgd_offset(mm, addr); in huge_pte_offset() 66 p4d = p4d_offset(pgd, addr); in huge_pte_offset() 91 addr &= HPAGE_MASK; in purge_tlb_entries_huge() 95 purge_tlb_entries(mm, addr); in purge_tlb_entries_huge() 107 addr &= HPAGE_MASK; in __set_huge_pte_at() 108 addr_start = addr; in __set_huge_pte_at() [all …]
|
| /arch/x86/include/asm/ |
| A D | bitops.h | 42 #define ADDR RLONG_ADDR(addr) 48 #define CONST_MASK_ADDR(nr, addr) WBYTE_ADDR((void *)(addr) + ((nr)>>3)) argument 56 : CONST_MASK_ADDR(nr, addr) in arch_set_bit() 76 : CONST_MASK_ADDR(nr, addr) in arch_clear_bit() 88 arch_clear_bit(nr, addr); in arch_clear_bit_unlock() 98 volatile unsigned long *addr) in arch_xor_unlock_is_negative_byte() argument 112 arch___clear_bit(nr, addr); in arch___clear_bit_unlock() 126 : CONST_MASK_ADDR(nr, addr) in arch_change_bit() 143 return arch_test_and_set_bit(nr, addr); in arch_test_and_set_bit_lock() 239 variable_test_bit(nr, addr); in arch_test_bit() [all …]
|
| /arch/openrisc/mm/ |
| A D | tlb.c | 35 #define DTLB_OFFSET(addr) (((addr) >> PAGE_SHIFT) & (NUM_DTLB_SETS-1)) argument 36 #define ITLB_OFFSET(addr) (((addr) >> PAGE_SHIFT) & (NUM_ITLB_SETS-1)) argument 75 #define flush_dtlb_page_eir(addr) mtspr(SPR_DTLBEIR, addr) argument 79 #define flush_itlb_page_eir(addr) mtspr(SPR_ITLBEIR, addr) argument 86 flush_dtlb_page_eir(addr); in local_flush_tlb_page() 88 flush_dtlb_page_no_eir(addr); in local_flush_tlb_page() 91 flush_itlb_page_eir(addr); in local_flush_tlb_page() 99 int addr; in local_flush_tlb_range() local 106 for (addr = start; addr < end; addr += PAGE_SIZE) { in local_flush_tlb_range() 108 flush_dtlb_page_eir(addr); in local_flush_tlb_range() [all …]
|
| /arch/riscv/mm/ |
| A D | fault.c | 85 addr); in die_kernel_fault() 88 show_pte(addr); in die_kernel_fault() 105 if (addr < PAGE_SIZE) in no_context() 120 no_context(regs, addr); in mm_fault_error() 156 no_context(regs, addr); in bad_area_nosemaphore() 161 unsigned long addr) in bad_area() argument 190 index = pgd_index(addr); in vmalloc_fault() 196 no_context(regs, addr); in vmalloc_fault() 203 no_context(regs, addr); in vmalloc_fault() 289 addr = regs->badaddr; in handle_page_fault() [all …]
|
| /arch/sh/mm/ |
| A D | mmap.c | 70 return addr; in arch_get_unmapped_area() 80 if (addr) { in arch_get_unmapped_area() 82 addr = COLOUR_ALIGN(addr, pgoff); in arch_get_unmapped_area() 84 addr = PAGE_ALIGN(addr); in arch_get_unmapped_area() 89 return addr; in arch_get_unmapped_area() 118 return addr; in arch_get_unmapped_area_topdown() 129 if (addr) { in arch_get_unmapped_area_topdown() 131 addr = COLOUR_ALIGN(addr, pgoff); in arch_get_unmapped_area_topdown() 133 addr = PAGE_ALIGN(addr); in arch_get_unmapped_area_topdown() 138 return addr; in arch_get_unmapped_area_topdown() [all …]
|