| /arch/alpha/kernel/ |
| A D | core_tsunami.c | 181 volatile unsigned long *csr; in tsunami_pci_tbi() local 186 csr = &pchip->tlbia.csr; in tsunami_pci_tbi() 188 csr = &pchip->tlbiv.csr; in tsunami_pci_tbi() 194 *csr = value; in tsunami_pci_tbi() 196 *csr; in tsunami_pci_tbi() 345 pchip->tba[2].csr = 0; in tsunami_init_one_pchip() 347 pchip->wsba[3].csr = 0; in tsunami_init_one_pchip() 350 pchip->pctl.csr |= pctl_m_mwin; in tsunami_init_one_pchip() 455 pchip->perror.csr; in tsunami_pci_clr_err_1() 456 pchip->perror.csr = 0x040; in tsunami_pci_clr_err_1() [all …]
|
| A D | core_wildfire.c | 127 pci->pci_window[1].tbase.csr = 0; in wildfire_init_hose() 193 temp = fast->qsd_whami.csr; in wildfire_hardware_probe() 220 temp = qsa->qsa_qbb_id.csr; in wildfire_hardware_probe() 254 temp = qsd->qsd_whami.csr; in wildfire_hardware_probe() 263 temp = qsa->qsa_qbb_pop[0].csr; in wildfire_hardware_probe() 270 temp = qsa->qsa_qbb_pop[1].csr; in wildfire_hardware_probe() 277 temp = qsa->qsa_qbb_id.csr; in wildfire_hardware_probe() 538 qsd->qsd_fault_ena.csr); in wildfire_dump_qsd_regs() 542 qsd->qsd_mem_config.csr); in wildfire_dump_qsd_regs() 544 qsd->qsd_err_sum.csr); in wildfire_dump_qsd_regs() [all …]
|
| A D | core_titan.c | 207 volatile unsigned long *csr; in titan_pci_tbi() local 220 csr = &port->port_specific.g.gtlbia.csr; in titan_pci_tbi() 222 csr = &port->port_specific.g.gtlbiv.csr; in titan_pci_tbi() 229 *csr = value; in titan_pci_tbi() 231 *csr; in titan_pci_tbi() 240 pctl.pctl_q_whole = port->pctl.csr; in titan_query_agp() 332 port->tba[1].csr = 0; in titan_init_one_pachip_port() 338 port->wsba[3].csr = 0; in titan_init_one_pachip_port() 341 port->pctl.csr |= pctl_m_mwin; in titan_init_one_pachip_port() 645 pctl.pctl_q_whole = port->pctl.csr; in titan_agp_configure() [all …]
|
| A D | sys_marvel.c | 174 volatile unsigned long *csr, in io7_redirect_irq() argument 179 val = *csr; in io7_redirect_irq() 183 *csr = val; in io7_redirect_irq() 185 *csr; in io7_redirect_irq() 196 val = io7->csrs->PO7_LSI_CTL[which].csr; in io7_redirect_one_lsi() 200 io7->csrs->PO7_LSI_CTL[which].csr = val; in io7_redirect_one_lsi() 202 io7->csrs->PO7_LSI_CTL[which].csr; in io7_redirect_one_lsi() 213 val = io7->csrs->PO7_MSI_CTL[which].csr; in io7_redirect_one_msi() 219 io7->csrs->PO7_MSI_CTL[which].csr; in io7_redirect_one_msi() 230 io7->csrs->PO7_LSI_CTL[which].csr; in init_one_io7_lsi() [all …]
|
| A D | core_marvel.c | 62 q = ev7csr->csr; in read_ev7_csr() 74 ev7csr->csr = q; in write_ev7_csr() 181 csrs->POx_ERR_SUM.csr = -1UL; in io7_clear_errors() 182 csrs->POx_TLB_ERR.csr = -1UL; in io7_clear_errors() 184 csrs->POx_TRANS_SUM.csr = -1UL; in io7_clear_errors() 289 csrs->POx_WBASE[0].csr = in io7_init_hose() 299 csrs->POx_TBASE[1].csr = 0; in io7_init_hose() 306 csrs->POx_WBASE[2].csr = in io7_init_hose() 314 csrs->POx_WBASE[3].csr = 0; in io7_init_hose() 609 csrs->POx_SG_TBIA.csr = 0; in marvel_pci_tbi() [all …]
|
| A D | err_marvel.c | 818 err_sum |= io7->csrs->PO7_ERROR_SUM.csr; in marvel_find_io7_with_error() 822 err_sum |= io7->ports[i].csrs->POx_ERR_SUM.csr; in marvel_find_io7_with_error() 843 io->io_asic_rev = io7->csrs->IO_ASIC_REV.csr; in marvel_find_io7_with_error() 844 io->io_sys_rev = io7->csrs->IO_SYS_REV.csr; in marvel_find_io7_with_error() 845 io->io7_uph = io7->csrs->IO7_UPH.csr; in marvel_find_io7_with_error() 846 io->hpi_ctl = io7->csrs->HPI_CTL.csr; in marvel_find_io7_with_error() 847 io->crd_ctl = io7->csrs->CRD_CTL.csr; in marvel_find_io7_with_error() 848 io->hei_ctl = io7->csrs->HEI_CTL.csr; in marvel_find_io7_with_error() 849 io->po7_error_sum = io7->csrs->PO7_ERROR_SUM.csr; in marvel_find_io7_with_error() 884 csrs->POx_ERR_SUM.csr; in marvel_find_io7_with_error() [all …]
|
| A D | sys_titan.c | 83 dim0 = &cchip->dim0.csr; in titan_update_irq_hw() 84 dim1 = &cchip->dim1.csr; in titan_update_irq_hw() 85 dim2 = &cchip->dim2.csr; in titan_update_irq_hw() 86 dim3 = &cchip->dim3.csr; in titan_update_irq_hw() 103 dimB = &cchip->dim0.csr; in titan_update_irq_hw() 104 if (bcpu == 1) dimB = &cchip->dim1.csr; in titan_update_irq_hw() 105 else if (bcpu == 2) dimB = &cchip->dim2.csr; in titan_update_irq_hw() 106 else if (bcpu == 3) dimB = &cchip->dim3.csr; in titan_update_irq_hw()
|
| /arch/sparc/kernel/ |
| A D | ebus.c | 74 u32 csr = 0; in ebus_dma_irq() local 99 u32 csr; in ebus_dma_register() local 116 csr |= EBDMA_CSR_TCI_DIS; in ebus_dma_register() 127 u32 csr; in ebus_dma_irq_enable() local 137 csr |= EBDMA_CSR_INT_EN; in ebus_dma_irq_enable() 159 u32 csr; in ebus_dma_unregister() local 179 u32 csr; in ebus_dma_request() local 208 u32 csr; in ebus_dma_prepare() local 213 csr = (EBDMA_CSR_INT_EN | in ebus_dma_prepare() 219 csr |= EBDMA_CSR_WRITE; in ebus_dma_prepare() [all …]
|
| /arch/loongarch/kvm/ |
| A D | vcpu.c | 74 struct loongarch_csrs *csr = vcpu->arch.csr; in kvm_save_guest_pmu() local 88 struct loongarch_csrs *csr = vcpu->arch.csr; in kvm_restore_guest_pmu() local 122 struct loongarch_csrs *csr = vcpu->arch.csr; in kvm_lose_pmu() local 491 struct loongarch_csrs *csr = vcpu->arch.csr; in kvm_set_cpuid() local 542 struct loongarch_csrs *csr = vcpu->arch.csr; in kvm_drop_cpuid() local 576 struct loongarch_csrs *csr = vcpu->arch.csr; in _kvm_getcsr() local 611 struct loongarch_csrs *csr = vcpu->arch.csr; in _kvm_setcsr() local 1521 if (!vcpu->arch.csr) in kvm_arch_vcpu_create() 1543 csr = vcpu->arch.csr; in kvm_arch_vcpu_create() 1585 struct loongarch_csrs *csr = vcpu->arch.csr; in _kvm_vcpu_load() local [all …]
|
| A D | timer.c | 57 struct loongarch_csrs *csr = vcpu->arch.csr; in kvm_restore_timer() local 64 cfg = kvm_read_sw_gcsr(csr, LOONGARCH_CSR_TCFG); in kvm_restore_timer() 67 kvm_restore_hw_gcsr(csr, LOONGARCH_CSR_ESTAT); in kvm_restore_timer() 68 kvm_restore_hw_gcsr(csr, LOONGARCH_CSR_TCFG); in kvm_restore_timer() 71 kvm_restore_hw_gcsr(csr, LOONGARCH_CSR_TVAL); in kvm_restore_timer() 142 struct loongarch_csrs *csr = vcpu->arch.csr; in _kvm_save_timer() local 144 cfg = kvm_read_sw_gcsr(csr, LOONGARCH_CSR_TCFG); in _kvm_save_timer() 178 struct loongarch_csrs *csr = vcpu->arch.csr; in kvm_save_timer() local 183 kvm_save_hw_gcsr(csr, LOONGARCH_CSR_TCFG); in kvm_save_timer() 184 kvm_save_hw_gcsr(csr, LOONGARCH_CSR_TVAL); in kvm_save_timer() [all …]
|
| /arch/sh/kernel/cpu/ |
| A D | adc.c | 16 unsigned char csr; in adc_single() local 22 csr = __raw_readb(ADCSR); in adc_single() 23 csr = channel | ADCSR_ADST | ADCSR_CKS; in adc_single() 24 __raw_writeb(csr, ADCSR); in adc_single() 27 csr = __raw_readb(ADCSR); in adc_single() 28 } while ((csr & ADCSR_ADF) == 0); in adc_single() 30 csr &= ~(ADCSR_ADF | ADCSR_ADST); in adc_single() 31 __raw_writeb(csr, ADCSR); in adc_single()
|
| /arch/riscv/kvm/ |
| A D | aia.c | 60 csr->hviph &= ~mask; in kvm_riscv_vcpu_aia_flush_interrupts() 61 csr->hviph |= val; in kvm_riscv_vcpu_aia_flush_interrupts() 70 csr->vsieh = ncsr_read(CSR_VSIEH); in kvm_riscv_vcpu_aia_sync_interrupts() 131 csr_write(CSR_HVIPRIO1, csr->hviprio1); in kvm_riscv_vcpu_aia_load() 132 csr_write(CSR_HVIPRIO2, csr->hviprio2); in kvm_riscv_vcpu_aia_load() 134 csr_write(CSR_VSIEH, csr->vsieh); in kvm_riscv_vcpu_aia_load() 135 csr_write(CSR_HVIPH, csr->hviph); in kvm_riscv_vcpu_aia_load() 169 csr->hviprio1 = csr_read(CSR_HVIPRIO1); in kvm_riscv_vcpu_aia_put() 172 csr->vsieh = csr_read(CSR_VSIEH); in kvm_riscv_vcpu_aia_put() 173 csr->hviph = csr_read(CSR_HVIPH); in kvm_riscv_vcpu_aia_put() [all …]
|
| A D | vcpu.c | 63 memset(csr, 0, sizeof(*csr)); in kvm_riscv_vcpu_context_reset() 355 csr->hvip &= ~mask; in kvm_riscv_vcpu_flush_interrupts() 356 csr->hvip |= val; in kvm_riscv_vcpu_flush_interrupts() 370 csr->vsie = ncsr_read(CSR_VSIE); in kvm_riscv_vcpu_sync_interrupts() 606 csr_write(CSR_VSIE, csr->vsie); in kvm_arch_vcpu_load() 609 csr_write(CSR_VSEPC, csr->vsepc); in kvm_arch_vcpu_load() 613 csr_write(CSR_HVIP, csr->hvip); in kvm_arch_vcpu_load() 614 csr_write(CSR_VSATP, csr->vsatp); in kvm_arch_vcpu_load() 674 csr->vsie = csr_read(CSR_VSIE); in kvm_arch_vcpu_put() 680 csr->hvip = csr_read(CSR_HVIP); in kvm_arch_vcpu_put() [all …]
|
| /arch/loongarch/include/asm/ |
| A D | kvm_csr.h | 14 #define gcsr_read(csr) \ argument 20 : [reg] "i" (csr) \ 25 #define gcsr_write(v, csr) \ argument 31 : [reg] "i" (csr) \ 182 #define kvm_save_hw_gcsr(csr, gid) (csr->csrs[gid] = gcsr_read(gid)) argument 183 #define kvm_restore_hw_gcsr(csr, gid) (gcsr_write(csr->csrs[gid], gid)) argument 185 #define kvm_read_clear_hw_gcsr(csr, gid) (csr->csrs[gid] = gcsr_write(0, gid)) argument 191 return csr->csrs[gid]; in kvm_read_sw_gcsr() 196 csr->csrs[gid] = val; in kvm_write_sw_gcsr() 202 csr->csrs[gid] |= val; in kvm_set_sw_gcsr() [all …]
|
| A D | kvm_host.h | 207 struct loongarch_csrs *csr; member 256 static inline unsigned long readl_sw_gcsr(struct loongarch_csrs *csr, int reg) in readl_sw_gcsr() argument 258 return csr->csrs[reg]; in readl_sw_gcsr() 261 static inline void writel_sw_gcsr(struct loongarch_csrs *csr, int reg, unsigned long val) in writel_sw_gcsr() argument 263 csr->csrs[reg] = val; in writel_sw_gcsr() 350 int get_gcsr_flag(int csr);
|
| /arch/mips/dec/ |
| A D | kn02-irq.c | 30 volatile u32 *csr = (volatile u32 *)CKSEG1ADDR(KN02_SLOT_BASE + in unmask_kn02_irq() local 34 *csr = cached_kn02_csr; in unmask_kn02_irq() 39 volatile u32 *csr = (volatile u32 *)CKSEG1ADDR(KN02_SLOT_BASE + in mask_kn02_irq() local 43 *csr = cached_kn02_csr; in mask_kn02_irq() 62 volatile u32 *csr = (volatile u32 *)CKSEG1ADDR(KN02_SLOT_BASE + in init_kn02_irqs() local 68 *csr = cached_kn02_csr; in init_kn02_irqs()
|
| A D | kn01-berr.c | 49 volatile u16 *csr = (void *)CKSEG1ADDR(KN01_SLOT_BASE + KN01_CSR); in dec_kn01_be_ack() local 54 *csr = cached_kn01_csr | KN01_CSR_MEMERR; /* Clear bus IRQ. */ in dec_kn01_be_ack() 150 volatile u16 *csr = (void *)CKSEG1ADDR(KN01_SLOT_BASE + KN01_CSR); in dec_kn01_be_interrupt() local 154 if (!(*csr & KN01_CSR_MEMERR)) in dec_kn01_be_interrupt() 177 volatile u16 *csr = (void *)CKSEG1ADDR(KN01_SLOT_BASE + KN01_CSR); in dec_kn01_be_init() local 183 cached_kn01_csr = *csr; in dec_kn01_be_init() 189 *csr = cached_kn01_csr; in dec_kn01_be_init()
|
| /arch/mips/cavium-octeon/executive/ |
| A D | cvmx-interrupt-rsl.c | 53 union cvmx_asxx_int_en csr; in __cvmx_interrupt_asxx_enable() local 65 csr.u64 = cvmx_read_csr(CVMX_ASXX_INT_EN(block)); in __cvmx_interrupt_asxx_enable() 66 csr.s.txpsh = mask; in __cvmx_interrupt_asxx_enable() 67 csr.s.txpop = mask; in __cvmx_interrupt_asxx_enable() 68 csr.s.ovrflw = mask; in __cvmx_interrupt_asxx_enable() 69 cvmx_write_csr(CVMX_ASXX_INT_EN(block), csr.u64); in __cvmx_interrupt_asxx_enable()
|
| /arch/sh/boards/mach-hp6xx/ |
| A D | pm.c | 40 u8 stbcr, csr; in pm_enter() local 47 csr = sh_wdt_read_csr(); in pm_enter() 48 csr &= ~WTCSR_TME; in pm_enter() 49 csr |= WTCSR_CKS_4096; in pm_enter() 50 sh_wdt_write_csr(csr); in pm_enter() 51 csr = sh_wdt_read_csr(); in pm_enter()
|
| /arch/mips/kernel/ |
| A D | signal.c | 213 unsigned int csr; in restore_msa_extcontext() local 219 err = get_user(csr, &msa->csr); in restore_msa_extcontext() 235 write_msa_csr(csr); in restore_msa_extcontext() 364 __put_user(0, csr); in protected_save_fp_context() 401 err = sig = fpcsr_pending(csr); in protected_restore_fp_context() 427 __get_user(tmp, csr); in protected_restore_fp_context() 500 unsigned int csr, enabled; in fpcsr_pending() local 502 err = __get_user(csr, fpcsr); in fpcsr_pending() 508 if (csr & enabled) { in fpcsr_pending() 509 csr &= ~enabled; in fpcsr_pending() [all …]
|
| A D | irq_txx9.c | 34 u32 csr; member 173 u32 csr = __raw_readl(&txx9_ircptr->csr); in txx9_irq() local 175 if (likely(!(csr & TXx9_IRCSR_IF))) in txx9_irq() 176 return TXX9_IRQ_BASE + (csr & (TXx9_MAX_IR - 1)); in txx9_irq()
|
| /arch/riscv/include/asm/ |
| A D | csr.h | 518 #define csr_swap(csr, val) \ argument 527 #define csr_read(csr) \ argument 530 __asm__ __volatile__ ("csrr %0, " __ASM_STR(csr) \ 536 #define csr_write(csr, val) \ argument 539 __asm__ __volatile__ ("csrw " __ASM_STR(csr) ", %0" \ 544 #define csr_read_set(csr, val) \ argument 553 #define csr_set(csr, val) \ argument 556 __asm__ __volatile__ ("csrs " __ASM_STR(csr) ", %0" \ 561 #define csr_read_clear(csr, val) \ argument 570 #define csr_clear(csr, val) \ argument [all …]
|
| /arch/m68k/sun3x/ |
| A D | time.c | 49 h->csr |= C_WRITE; in sun3x_hwclk() 57 h->csr &= ~C_WRITE; in sun3x_hwclk() 59 h->csr |= C_READ; in sun3x_hwclk() 67 h->csr &= ~C_READ; in sun3x_hwclk()
|
| /arch/arm64/boot/dts/apm/ |
| A D | apm-storm.dtsi | 198 csr-mask = <0x2>; 235 csr-mask = <0xa>; 246 csr-mask = <0x3>; 257 csr-mask = <0x3>; 270 csr-mask = <0x00>; 284 csr-mask = <0x3a>; 298 csr-mask = <0x3a>; 311 csr-mask = <0x05>; 324 csr-mask = <0x05>; 337 csr-mask = <0x05>; [all …]
|
| /arch/riscv/kernel/ |
| A D | vector.c | 82 u32 width, csr; in insn_is_vector() local 103 csr = RVG_EXTRACT_SYSTEM_CSR(insn_buf); in insn_is_vector() 104 if ((csr >= CSR_VSTART && csr <= CSR_VCSR) || in insn_is_vector() 105 (csr >= CSR_VL && csr <= CSR_VLENB)) in insn_is_vector()
|