| /arch/arm64/include/asm/ |
| A D | arm_dsu_pmu.h | 41 isb(); in __dsu_pmu_write_pmcr() 49 isb(); in __dsu_pmu_get_reset_overflow() 56 isb(); in __dsu_pmu_select_counter() 69 isb(); in __dsu_pmu_write_counter() 76 isb(); in __dsu_pmu_set_event() 87 isb(); in __dsu_pmu_write_pmccntr() 93 isb(); in __dsu_pmu_disable_counter() 99 isb(); in __dsu_pmu_enable_counter() 105 isb(); in __dsu_pmu_counter_interrupt_enable() 111 isb(); in __dsu_pmu_counter_interrupt_disable()
|
| A D | dcc.h | 26 isb(); in __dcc_getchar() 38 isb(); in __dcc_putchar()
|
| A D | arch_timer.h | 35 (__wa && __wa->h) ? ({ isb(); __wa->h;}) : arch_timer_##h; \ 106 isb(); in arch_timer_reg_write_cp15() 118 isb(); in arch_timer_reg_write_cp15() 167 isb(); in arch_timer_set_cntkctl()
|
| A D | arch_gicv3.h | 32 isb(); in gic_write_dir() 93 isb(); in gic_write_ctlr() 104 isb(); in gic_write_grpen1() 120 isb(); in gic_write_sre()
|
| A D | mmu_context.h | 37 isb(); in contextidr_thread_switch() 53 isb(); in cpu_set_reserved_ttbr0() 82 isb(); in __cpu_set_tcr_t0sz() 142 isb(); in cpu_install_ttbr0()
|
| A D | assembler.h | 63 isb // Take effect before a subsequent clear of DAIF.D 107 isb 444 isb 458 isb 471 isb 649 isb 722 isb 730 isb 865 isb
|
| /arch/arm/include/asm/ |
| A D | uaccess-asm.h | 44 .macro uaccess_disable, tmp, isb=1 51 .if \isb 56 .macro uaccess_enable, tmp, isb=1 63 .if \isb 70 .macro uaccess_disable, tmp, isb=1 80 .if \isb 85 .macro uaccess_enable, tmp, isb=1 94 .if \isb 101 .macro uaccess_disable, tmp, isb=1 104 .macro uaccess_enable, tmp, isb=1
|
| A D | arch_timer.h | 34 isb(); in arch_timer_reg_write_cp15() 46 isb(); in arch_timer_reg_write_cp15() 98 isb(); in __arch_counter_get_cntpct() 112 isb(); in __arch_counter_get_cntvct() 132 isb(); in arch_timer_set_cntkctl()
|
| A D | barrier.h | 19 #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory") macro 29 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ macro 36 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ macro 42 #define isb(x) __asm__ __volatile__ ("" : : : "memory") macro
|
| A D | cp15.h | 68 isb(); in set_cr() 82 isb(); in set_auxcr() 101 isb(); in set_copro_access()
|
| A D | dcc.h | 22 isb(); in __dcc_getchar() 32 isb(); in __dcc_putchar()
|
| A D | arch_gicv3.h | 70 isb(); in CPUIF_MAP() 85 isb(); in gic_write_ctlr() 96 isb(); in gic_write_grpen1() 112 isb(); in gic_write_sre()
|
| /arch/arm64/kvm/hyp/nvhe/ |
| A D | tlb.c | 89 isb(); in enter_vmid_context() 96 isb(); in enter_vmid_context() 136 isb(); in exit_vmid_context() 141 isb(); in exit_vmid_context() 173 isb(); in __kvm_tlb_flush_vmid_ipa() 203 isb(); in __kvm_tlb_flush_vmid_ipa_nsh() 230 isb(); in __kvm_tlb_flush_vmid_range() 244 isb(); in __kvm_tlb_flush_vmid() 259 isb(); in __kvm_flush_cpu_context()
|
| A D | hyp-init.S | 111 isb 140 isb 161 isb 261 isb 292 isb 308 isb
|
| A D | debug-sr.c | 36 isb(); in __debug_save_spe() 48 isb(); in __debug_restore_spe() 84 isb(); in __trace_switch_to_guest()
|
| /arch/arm64/kvm/hyp/vhe/ |
| A D | tlb.c | 67 isb(); in enter_vmid_context() 77 isb(); in exit_vmid_context() 119 isb(); in __kvm_tlb_flush_vmid_ipa() 151 isb(); in __kvm_tlb_flush_vmid_ipa_nsh() 180 isb(); in __kvm_tlb_flush_vmid_range() 196 isb(); in __kvm_tlb_flush_vmid() 211 isb(); in __kvm_flush_cpu_context() 362 isb(); in __kvm_tlbi_s1e2()
|
| /arch/arm/common/ |
| A D | krait-l2-accessors.c | 22 isb(); in krait_set_l2_indirect_reg() 24 isb(); in krait_set_l2_indirect_reg() 41 isb(); in krait_get_l2_indirect_reg()
|
| A D | secure_cntvoff.S | 24 isb 27 isb 29 isb
|
| /arch/arm64/kvm/hyp/ |
| A D | entry.S | 43 isb 51 isb 185 isb // Prevent an early read of side-effect free ISR 208 isb
|
| /arch/arm/mach-omap2/ |
| A D | sleep44xx.S | 91 isb 131 isb 193 isb 205 isb 223 isb 274 isb 342 isb
|
| /arch/arm/kernel/ |
| A D | head-nommu.S | 192 isb 290 isb 298 1: isb 302 isb 311 2: isb 315 isb 330 3: isb 339 isb 436 isb 489 isb [all …]
|
| /arch/arm/mm/ |
| A D | pmsa-v7.c | 346 isb(); in __mpu_min_region_order() 360 isb(); /* Ensure that MPU region operations have completed */ in __mpu_min_region_order() 392 isb(); in mpu_setup_region() 395 isb(); /* Propagate properties before enabling region */ in mpu_setup_region() 402 isb(); in mpu_setup_region() 405 isb(); in mpu_setup_region()
|
| A D | pv-fixup-asm.S | 26 isb 78 isb 82 isb
|
| /arch/arm64/mm/ |
| A D | proc.S | 155 isb 167 isb 170 isb 184 isb 233 isb 297 isb 337 isb 434 isb
|
| /arch/arm64/kernel/ |
| A D | head.S | 162 3: isb 226 isb 279 isb 299 isb 308 isb 392 isb
|