| /arch/mips/mach-octeon/ |
| A D | cvmx-helper-agl.c | 38 agl_prtx_ctl.u64 = csr_rd(CVMX_AGL_PRTX_CTL(0)); in __cvmx_helper_agl_enumerate() 84 gmx_bist.u64 = csr_rd(CVMX_AGL_GMX_BIST); in __cvmx_helper_agl_probe() 90 gmx_prtx_cfg.u64 = csr_rd(CVMX_AGL_GMX_PRTX_CFG(port)); in __cvmx_helper_agl_probe() 99 agl_prtx_ctl.u64 = csr_rd(CVMX_AGL_PRTX_CTL(port)); in __cvmx_helper_agl_probe() 122 csr_rd(CVMX_AGL_PRTX_CTL(port)); in __cvmx_helper_agl_probe() 126 agl_prtx_ctl.u64 = csr_rd(CVMX_AGL_PRTX_CTL(port)); in __cvmx_helper_agl_probe() 130 csr_rd(CVMX_AGL_PRTX_CTL(port)); in __cvmx_helper_agl_probe() 134 agl_prtx_ctl.u64 = csr_rd(CVMX_AGL_PRTX_CTL(port)); in __cvmx_helper_agl_probe() 138 agl_prtx_ctl.u64 = csr_rd(CVMX_AGL_PRTX_CTL(port)); in __cvmx_helper_agl_probe() 142 agl_prtx_ctl.u64 = csr_rd(CVMX_AGL_PRTX_CTL(port)); in __cvmx_helper_agl_probe() [all …]
|
| A D | cvmx-helper-xaui.c | 85 ciu_qlm.u64 = csr_rd(CVMX_CIU_QLM2); in __cvmx_helper_xaui_probe() 100 mio_rst_boot.u64 = csr_rd(CVMX_MIO_RST_BOOT); in __cvmx_helper_xaui_probe() 105 ciu_qlm.u64 = csr_rd(CVMX_CIU_QLM2); in __cvmx_helper_xaui_probe() 135 mode.u64 = csr_rd(CVMX_GMXX_INF_MODE(interface)); in __cvmx_helper_xaui_probe() 199 tx_ctl.u64 = csr_rd(CVMX_GMXX_TX_XAUI_CTL(interface)); in __cvmx_helper_xaui_link_init() 285 gmx_cfg.u64 = csr_rd(CVMX_GMXX_PRTX_CFG(0, interface)); in __cvmx_helper_xaui_link_init() 318 gmx_cfg.u64 = csr_rd(CVMX_GMXX_PRTX_CFG(0, interface)); in __cvmx_helper_xaui_link_init() 360 bpid_msk.u64 = csr_rd(CVMX_GMXX_BPID_MSK(interface)); in __cvmx_helper_xaui_enable() 367 csr_rd(CVMX_GMXX_TXX_APPEND(0, interface)); in __cvmx_helper_xaui_enable() 424 qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(qlm)); in __cvmx_helper_xaui_link_get() [all …]
|
| A D | cvmx-helper-jtag.c | 59 csr_rd(CVMX_CIU_QLM_JTGC); in cvmx_helper_qlm_jtag_init() 82 jtgc.u64 = csr_rd(CVMX_CIU_QLM_JTGC); in cvmx_helper_qlm_jtag_shift() 85 csr_rd(CVMX_CIU_QLM_JTGC); in cvmx_helper_qlm_jtag_shift() 94 jtgd.u64 = csr_rd(CVMX_CIU_QLM_JTGD); in cvmx_helper_qlm_jtag_shift() 133 jtgc.u64 = csr_rd(CVMX_CIU_QLM_JTGC); in cvmx_helper_qlm_jtag_update() 137 csr_rd(CVMX_CIU_QLM_JTGC); in cvmx_helper_qlm_jtag_update() 145 jtgd.u64 = csr_rd(CVMX_CIU_QLM_JTGD); in cvmx_helper_qlm_jtag_update() 159 jtgc.u64 = csr_rd(CVMX_CIU_QLM_JTGC); in cvmx_helper_qlm_jtag_capture() 163 csr_rd(CVMX_CIU_QLM_JTGC); in cvmx_helper_qlm_jtag_capture() 170 jtgd.u64 = csr_rd(CVMX_CIU_QLM_JTGD); in cvmx_helper_qlm_jtag_capture()
|
| A D | cvmx-helper-sgmii.c | 67 csr_rd(CVMX_PCSX_MISCX_CTL_REG(index, interface)); in __cvmx_helper_sgmii_hardware_init_one_time() 104 csr_rd(CVMX_PCSX_ANX_ADV_REG(index, interface)); in __cvmx_helper_sgmii_hardware_init_one_time() 116 pcsx_sgmx_an_adv_reg.u64 = csr_rd( in __cvmx_helper_sgmii_hardware_init_one_time() 230 csr_rd(CVMX_PCSX_MISCX_CTL_REG(index, interface)); in __cvmx_helper_sgmii_hardware_init_link() 392 ciu_qlm.u64 = csr_rd(CVMX_CIU_QLM2); in __cvmx_helper_sgmii_hardware_init() 407 mio_rst_boot.u64 = csr_rd(CVMX_MIO_RST_BOOT); in __cvmx_helper_sgmii_hardware_init() 412 ciu_qlm.u64 = csr_rd(CVMX_CIU_QLM2); in __cvmx_helper_sgmii_hardware_init() 494 mode.u64 = csr_rd(CVMX_GMXX_INF_MODE(interface)); in __cvmx_helper_sgmii_probe() 529 csr_rd(CVMX_GMXX_PRTX_CFG(index, interface)); in __cvmx_helper_sgmii_enable() 537 csr_rd(CVMX_GMXX_BPID_MAPX(index, interface)); in __cvmx_helper_sgmii_enable() [all …]
|
| A D | cvmx-helper-rgmii.c | 48 mode.u64 = csr_rd(CVMX_GMXX_INF_MODE(xi.interface)); in __cvmx_helper_rgmii_probe() 76 mode.u64 = csr_rd(CVMX_GMXX_INF_MODE(interface)); in __cvmx_helper_rgmii_enable() 128 gmx_cfg.u64 = csr_rd(CVMX_GMXX_PRTX_CFG(port, interface)); in __cvmx_helper_rgmii_enable() 152 asxx_prt_loop.u64 = csr_rd(CVMX_ASXX_PRT_LOOP(interface)); in __cvmx_helper_rgmii_link_get() 235 pko_mem_queue_qos.u64 = csr_rd(CVMX_PKO_MEM_QUEUE_QOS); in __cvmx_helper_rgmii_link_set() 249 csr_rd(CVMX_GMXX_TX_OVR_BP(interface)); in __cvmx_helper_rgmii_link_set() 267 csr_rd(CVMX_GMXX_PRTX_CFG(index, interface)); in __cvmx_helper_rgmii_link_set() 304 csr_rd(CVMX_GMXX_PRTX_CFG(index, interface)); in __cvmx_helper_rgmii_link_set() 372 asxx_prt_loop.u64 = csr_rd(CVMX_ASXX_PRT_LOOP(interface)); in __cvmx_helper_rgmii_configure_loopback() 387 tmp = csr_rd(CVMX_ASXX_TX_PRT_EN(interface)); in __cvmx_helper_rgmii_configure_loopback() [all …]
|
| A D | cvmx-qlm.c | 203 g1.u64 = csr_rd(CVMX_GSERX_CFG(5)); in cvmx_qlm_lmac() 204 g2.u64 = csr_rd(CVMX_GSERX_CFG(6)); in cvmx_qlm_lmac() 240 g1.u64 = csr_rd(CVMX_GSERX_CFG(4)); in cvmx_qlm_lmac() 241 g2.u64 = csr_rd(CVMX_GSERX_CFG(5)); in cvmx_qlm_lmac() 307 gser1.u64 = csr_rd(CVMX_GSERX_CFG(qlm1)); in cvmx_qlm_mux_interface() 308 gser2.u64 = csr_rd(CVMX_GSERX_CFG(qlm2)); in cvmx_qlm_mux_interface() 957 pem0_cfg.u64 = csr_rd(CVMX_PEMX_CFG(0)); in __cvmx_qlm_get_mode_cn70xx() 1741 gser1.u64 = csr_rd(CVMX_GSERX_CFG(4)); in __cvmx_qlm_get_mode_cnf75xx() 1742 gser2.u64 = csr_rd(CVMX_GSERX_CFG(5)); in __cvmx_qlm_get_mode_cnf75xx() 1933 count = csr_rd(CVMX_MIO_PTP_EVT_CNT); in cvmx_qlm_measure_clock() [all …]
|
| A D | cvmx-agl.c | 103 agl_gmx_prtx.u64 = csr_rd(CVMX_AGL_GMX_PRTX_CFG(port)); in cvmx_agl_link_set() 125 agl_gmx_prtx.u64 = csr_rd(CVMX_AGL_GMX_PRTX_CFG(port)); in cvmx_agl_link_set() 173 agl_gmx_prtx.u64 = csr_rd(CVMX_AGL_GMX_PRTX_CFG(port)); in cvmx_agl_link_set() 179 prt_ctl.u64 = csr_rd(CVMX_AGL_PRTX_CTL(port)); in cvmx_agl_link_set() 180 agl_clk.u64 = csr_rd(CVMX_AGL_GMX_TXX_CLK(port)); in cvmx_agl_link_set() 205 agl_prtx_ctl.u64 = csr_rd(CVMX_AGL_PRTX_CTL(port)); in cvmx_agl_link_set() 208 csr_rd(CVMX_AGL_PRTX_CTL(port)); in cvmx_agl_link_set() 212 csr_rd(CVMX_AGL_PRTX_CTL(port)); in cvmx_agl_link_set()
|
| A D | octeon_qlm.c | 130 soft_prst.u64 = csr_rd(CVMX_CIU_SOFT_PRST); in __set_qlm_pcie_mode_61xx() 218 qlm0.u64 = csr_rd(CVMX_MIO_QLMX_CFG(0)); in octeon_configure_qlm_cn61xx() 1078 csr_rd(CVMX_GSERX_SATA_LANE_RST(qlm)); in __sata_dlm_init_cn73xx() 1746 pemx_on.u64 = csr_rd(CVMX_PEMX_ON(0)); in __dlmx_setup_pcie_cn70xx() 1765 pemx_on.u64 = csr_rd(CVMX_PEMX_ON(0)); in __dlmx_setup_pcie_cn70xx() 1780 pemx_on.u64 = csr_rd(CVMX_PEMX_ON(1)); in __dlmx_setup_pcie_cn70xx() 1809 pemx_on.u64 = csr_rd(CVMX_PEMX_ON(1)); in __dlmx_setup_pcie_cn70xx() 4662 cfg.u64 = csr_rd(CVMX_GSERX_CFG(qlm)); in octeon_configure_qlm_cn73xx() 5105 cfg.u64 = csr_rd(CVMX_GSERX_CFG(qlm)); in octeon_configure_qlm_cn73xx() 5377 cfg.u64 = csr_rd(CVMX_GSERX_CFG(qlm)); in octeon_configure_qlm_cnf75xx() [all …]
|
| A D | cvmx-helper-ipd.c | 130 pkind_fcsx.u64 = csr_rd(CVMX_PIP_SUB_PKIND_FCSX(0)); in cvmx_helper_fcs_op() 139 port_cfg.u64 = csr_rd(CVMX_PIP_PRT_CFGX(pknd)); in cvmx_helper_fcs_op() 173 port_config.u64 = csr_rd(CVMX_PIP_PRT_CFGX(pknd)); in __cvmx_helper_ipd_port_setup() 174 tag_config.u64 = csr_rd(CVMX_PIP_PRT_TAGX(pknd)); in __cvmx_helper_ipd_port_setup() 179 prt_cfgbx.u64 = csr_rd(CVMX_PIP_PRT_CFGBX(pknd)); in __cvmx_helper_ipd_port_setup() 183 port_config.u64 = csr_rd(CVMX_PIP_PRT_CFGX(ipd_port)); in __cvmx_helper_ipd_port_setup() 184 tag_config.u64 = csr_rd(CVMX_PIP_PRT_TAGX(ipd_port)); in __cvmx_helper_ipd_port_setup()
|
| A D | cvmx-ipd.c | 87 ctl_status.u64 = csr_rd(CVMX_IPD_CTL_STATUS); in cvmx_ipd_config() 116 ipd_ctl_reg.u64 = csr_rd(CVMX_IPD_CTL_STATUS); in cvmx_ipd_config() 131 ipd_reg.u64 = csr_rd(CVMX_IPD_CTL_STATUS); in cvmx_ipd_enable() 138 ipd_reg.u64 = csr_rd(CVMX_IPD_CTL_STATUS); in cvmx_ipd_enable()
|
| A D | cvmx-helper-npi.c | 105 port_cfg.u64 = csr_rd(CVMX_PIP_PRT_CFGX(ipd_port)); in __cvmx_helper_npi_enable() 115 config.u64 = csr_rd(CVMX_PEXP_SLI_PORTX_PKIND(port)); in __cvmx_helper_npi_enable() 128 config.u64 = csr_rd(CVMX_PEXP_SLI_TX_PIPE); in __cvmx_helper_npi_enable()
|
| A D | cvmx-helper-loop.c | 95 port_cfg.u64 = csr_rd(CVMX_PIP_PRT_CFGX(offset)); in __cvmx_helper_loop_enable() 108 ipd_sub_port_fcs.u64 = csr_rd(CVMX_IPD_SUB_PORT_FCS); in __cvmx_helper_loop_enable() 123 lp_pknd.u64 = csr_rd(CVMX_PKO_REG_LOOPBACK_PKIND); in __cvmx_helper_loop_enable() 124 lp_bpid.u64 = csr_rd(CVMX_PKO_REG_LOOPBACK_BPID); in __cvmx_helper_loop_enable()
|
| A D | cvmx-ilk.c | 169 mio_qlmx_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(this_qlm)); in cvmx_ilk_start_interface() 905 csr_rd(CVMX_ILK_RXX_IDX_CAL(interface)); in cvmx_ilk_reg_dump_rx() 910 csr_rd(CVMX_ILK_RXX_MEM_CAL0(interface)); in cvmx_ilk_reg_dump_rx() 914 csr_rd(CVMX_ILK_RXX_MEM_CAL1(interface)); in cvmx_ilk_reg_dump_rx() 964 csr_rd(CVMX_ILK_TXX_MEM_PMAP(interface)); in cvmx_ilk_reg_dump_tx() 979 csr_rd(CVMX_ILK_TXX_IDX_CAL(interface)); in cvmx_ilk_reg_dump_tx() 984 csr_rd(CVMX_ILK_TXX_MEM_CAL0(interface)); in cvmx_ilk_reg_dump_tx() 988 csr_rd(CVMX_ILK_TXX_MEM_CAL1(interface)); in cvmx_ilk_reg_dump_tx() 1038 csr_rd(CVMX_ILK_TXX_FLOW_CTL0(interface)); in cvmx_ilk_runtime_status() 1065 csr_rd(CVMX_ILK_RXX_FLOW_CTL0(interface)); in cvmx_ilk_runtime_status() [all …]
|
| A D | cvmx-helper-util.c | 194 ipd_ctl_status.u64 = csr_rd(CVMX_IPD_CTL_STATUS); in __cvmx_ipd_mode_no_wptr() 221 __cvmx_wqe_pool = csr_rd(CVMX_IPD_WQE_FPA_QUEUE) & 7; in cvmx_packet_short_ptr_calculate() 229 pip_gbl_cfg.u64 = csr_rd(CVMX_PIP_GBL_CFG); in cvmx_packet_short_ptr_calculate() 230 pip_ip_offset.u64 = csr_rd(CVMX_PIP_IP_OFFSET); in cvmx_packet_short_ptr_calculate() 588 gmx_tx_prts.u64 = csr_rd(CVMX_GMXX_TX_PRTS(xi.interface)); in __cvmx_helper_setup_gmx() 608 gmx_rx_prts.u64 = csr_rd(CVMX_GMXX_RX_PRTS(xi.interface)); in __cvmx_helper_setup_gmx() 623 pko_mode.u64 = csr_rd(CVMX_PKO_REG_GMX_PORT_MODE); in __cvmx_helper_setup_gmx() 656 gmx_tx_thresh.u64 = csr_rd(CVMX_GMXX_TXX_THRESH(0, xi.interface)); in __cvmx_helper_setup_gmx() 687 config.u64 = csr_rd(CVMX_GMXX_TXX_PIPE(index, in __cvmx_helper_setup_gmx()
|
| A D | cvmx-helper.c | 590 prtx_ctl.u64 = csr_rd(CVMX_AGL_PRTX_CTL(0)); in __cvmx_get_mode_cn70xx() 888 qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(0)); in __cvmx_get_mode_cn68xx() 903 qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(0)); in __cvmx_get_mode_cn68xx() 943 qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(3)); in __cvmx_get_mode_cn68xx() 944 qlm_cfg1.u64 = csr_rd(CVMX_MIO_QLMX_CFG(1)); in __cvmx_get_mode_cn68xx() 1006 mio_qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(2)); in __cvmx_get_mode_octeon2() 1008 mio_qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(1)); in __cvmx_get_mode_octeon2() 1026 qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(2)); in __cvmx_get_mode_octeon2() 1028 qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(0)); in __cvmx_get_mode_octeon2() 1046 qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(0)); in __cvmx_get_mode_octeon2() [all …]
|
| A D | cvmx-helper-bgx.c | 182 xcv_reset.u64 = csr_rd(CVMX_XCV_RESET); in __cvmx_helper_bgx_rgmii_speed() 183 xcv_ctl.u64 = csr_rd(CVMX_XCV_CTL); in __cvmx_helper_bgx_rgmii_speed() 195 xcv_reset.u64 = csr_rd(CVMX_XCV_RESET); in __cvmx_helper_bgx_rgmii_speed() 200 xcv_ctl.u64 = csr_rd(CVMX_XCV_CTL); in __cvmx_helper_bgx_rgmii_speed() 206 dll_ctl.u64 = csr_rd(CVMX_XCV_DLL_CTL); in __cvmx_helper_bgx_rgmii_speed() 213 dll_ctl.u64 = csr_rd(CVMX_XCV_DLL_CTL); in __cvmx_helper_bgx_rgmii_speed() 216 xcv_reset.u64 = csr_rd(CVMX_XCV_RESET); in __cvmx_helper_bgx_rgmii_speed() 230 xcv_reset.u64 = csr_rd(CVMX_XCV_RESET); in __cvmx_helper_bgx_rgmii_speed() 261 xcv_reset.u64 = csr_rd(CVMX_XCV_RESET); in __cvmx_helper_bgx_rgmii_speed() 874 csr_rd(CVMX_XCV_RESET); in __cvmx_helper_bgx_rgmii_link_set_down() [all …]
|
| A D | cvmx-coremask.c | 190 ciu_fuse = (csr_rd(CVMX_CIU_FUSE) & in octeon_get_available_coremask() 200 ciu_fuse = (csr_rd(CVMX_CIU_FUSE) & 0x0000FFFFFFFFFFFFULL); in octeon_get_available_coremask()
|
| A D | cvmx-pcie.c | 1047 qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(1)); in __cvmx_pcie_check_qlm_mode() 1232 pemx_cfg.u64 = csr_rd(CVMX_PEMX_CFG(0)); in __cvmx_pcie_rc_initialize_gen2() 1251 ciu_qlm.u64 = csr_rd(CVMX_CIU_QLM1); in __cvmx_pcie_rc_initialize_gen2() 1259 ciu_qlm.u64 = csr_rd(CVMX_CIU_QLM0); in __cvmx_pcie_rc_initialize_gen2() 1850 pemx_cfg.u64 = csr_rd(CVMX_PEMX_CFG(pcie_port)); in __cvmx_pcie_rc_initialize_gen2_v3() 2334 ciu_qlm.u64 = csr_rd(CVMX_CIU_QLM1); in cvmx_pcie_ep_initialize() 2342 ciu_qlm.u64 = csr_rd(CVMX_CIU_QLM0); in cvmx_pcie_ep_initialize() 2430 sli_data_out_cnt.u64 = csr_rd(CVMX_PEXP_SLI_DATA_OUT_CNT); in cvmx_pcie_wait_for_pending() 2444 sli_data_out_cnt.u64 = csr_rd(CVMX_PEXP_SLI_DATA_OUT_CNT); in cvmx_pcie_wait_for_pending() 2478 rst_ctl.u64 = csr_rd(CVMX_RST_CTLX(pcie_port)); in cvmx_pcie_is_host_mode() [all …]
|
| A D | cvmx-helper-ilk.c | 213 tx_cal0.u64 = csr_rd(CVMX_ILK_TXX_MEM_CAL0(interface)); in __cvmx_ilk_write_tx_cal_entry_cn68xx() 214 tx_cal1.u64 = csr_rd(CVMX_ILK_TXX_MEM_CAL1(interface)); in __cvmx_ilk_write_tx_cal_entry_cn68xx() 363 rx_cal0.u64 = csr_rd(CVMX_ILK_RXX_MEM_CAL0(interface)); in __cvmx_ilk_write_rx_cal_entry_cn68xx() 364 rx_cal1.u64 = csr_rd(CVMX_ILK_RXX_MEM_CAL1(interface)); in __cvmx_ilk_write_rx_cal_entry_cn68xx()
|
| /arch/mips/mach-octeon/include/mach/ |
| A D | cvmx-pip.h | 643 stat0.u64 = csr_rd(CVMX_PIP_STAT0_X(pknd)); in cvmx_pip_get_port_stats() 644 stat1.u64 = csr_rd(CVMX_PIP_STAT1_X(pknd)); in cvmx_pip_get_port_stats() 645 stat2.u64 = csr_rd(CVMX_PIP_STAT2_X(pknd)); in cvmx_pip_get_port_stats() 646 stat3.u64 = csr_rd(CVMX_PIP_STAT3_X(pknd)); in cvmx_pip_get_port_stats() 647 stat4.u64 = csr_rd(CVMX_PIP_STAT4_X(pknd)); in cvmx_pip_get_port_stats() 648 stat5.u64 = csr_rd(CVMX_PIP_STAT5_X(pknd)); in cvmx_pip_get_port_stats() 649 stat6.u64 = csr_rd(CVMX_PIP_STAT6_X(pknd)); in cvmx_pip_get_port_stats() 650 stat7.u64 = csr_rd(CVMX_PIP_STAT7_X(pknd)); in cvmx_pip_get_port_stats() 651 stat8.u64 = csr_rd(CVMX_PIP_STAT8_X(pknd)); in cvmx_pip_get_port_stats() 652 stat9.u64 = csr_rd(CVMX_PIP_STAT9_X(pknd)); in cvmx_pip_get_port_stats() [all …]
|
| A D | cvmx-fpa1.h | 89 status.u64 = csr_rd(CVMX_FPA_CTL_STATUS); in cvmx_fpa1_enable() 115 status.u64 = csr_rd(CVMX_FPA_CTL_STATUS); in cvmx_fpa1_disable() 125 address = csr_rd(CVMX_ADDR_DID(CVMX_FULL_DID(CVMX_OCT_DID_FPA, pool))); in cvmx_fpa1_alloc() 129 if (csr_rd(CVMX_FPA_QUEX_AVAILABLE(pool)) > 0) in cvmx_fpa1_alloc() 193 return csr_rd(CVMX_FPA_QUEX_AVAILABLE(pool)); in cvmx_fpa1_get_available()
|
| A D | octeon-feature.h | 133 val = csr_rd(CVMX_MIO_FUS_DAT2); in octeon_has_feature_OCTEON_FEATURE_CRYPTO() 139 val = csr_rd(CVMX_RNM_CTL_STATUS); in octeon_has_feature_OCTEON_FEATURE_CRYPTO() 148 val = csr_rd(CVMX_MIO_FUS_DAT2); in octeon_has_feature_OCTEON_FEATURE_DORM_CRYPTO() 333 val = csr_rd(CVMX_MIO_FUS_DAT2); in octeon_has_feature_OCTEON_FEATURE_AUTHENTIK()
|
| A D | cvmx-regs.h | 132 c.u64 = csr_rd(address); \ 153 c.u64 = csr_rd(address); \ 186 static inline u64 csr_rd(u64 addr) in csr_rd() function 478 #define cvmx_read_csr_node(node, addr) csr_rd(addr)
|
| A D | cvmx-pow.h | 943 load_resp.u64 = csr_rd(load_addr.u64); in cvmx_pow_get_current_tag() 957 load_resp.u64 = csr_rd(load_addr.u64); in cvmx_pow_get_current_tag() 994 load_resp.u64 = csr_rd(load_addr.u64); in cvmx_pow_get_current_wqp() 1007 load_resp.u64 = csr_rd(load_addr.u64); in cvmx_pow_get_current_wqp() 1087 result.u64 = csr_rd(ptr.u64); in cvmx_pow_work_request_sync_nocheck() 1138 result.u64 = csr_rd(ptr.u64); in cvmx_pow_work_request_null_rd() 1727 grp_msk.u64 = csr_rd(CVMX_POW_PP_GRP_MSKX(core_num)); in cvmx_pow_set_group_mask() 1782 grp_msk.u64 = csr_rd(CVMX_SSO_PPX_GRP_MSK(core_num)); in cvmx_pow_get_group_mask() 2389 result.u64 = csr_rd(ptr.u64); in cvmx_sso_work_request_grp_sync_nocheck() 2889 sso_iq_com_cnt.u64 = csr_rd(CVMX_SSO_IQ_COM_CNT); in cvmx_sso_get_total_wqe_count() [all …]
|
| A D | cvmx-helper-bgx.h | 250 cmr_config.u64 = csr_rd(CVMX_BGXX_CMRX_CONFIG(index, xiface)); in cvmx_helper_bgx_is_rgmii()
|