Home
last modified time | relevance | path

Searched refs:csr_rd_node (Results 1 – 19 of 19) sorted by relevance

/u-boot/arch/mips/mach-octeon/
A Dcvmx-ilk.c194 csr_rd_node(node, CVMX_GSERX_PHY_CTL(qlm)); in cvmx_ilk_start_interface()
641 csr_rd_node(node, CVMX_ILK_RXX_CFG0(interface)); in cvmx_ilk_rx_cal_ena()
784 csr_rd_node(node, CVMX_ILK_TXX_CFG0(interface)); in cvmx_ilk_tx_cal_ena()
856 csr_rd_node(node, CVMX_ILK_RX_LNEX_CFG(i)); in cvmx_ilk_reg_dump_rx()
863 csr_rd_node(node, CVMX_ILK_RX_LNEX_INT(i)); in cvmx_ilk_reg_dump_rx()
891 rxx_chax.u64 = csr_rd_node( in cvmx_ilk_reg_dump_rx()
923 rxx_cal_entryx.u64 = csr_rd_node( in cvmx_ilk_reg_dump_rx()
997 txx_cal_entryx.u64 = csr_rd_node( in cvmx_ilk_reg_dump_tx()
1046 txx_cha_xonx.u64 = csr_rd_node( in cvmx_ilk_runtime_status()
1077 rxx_cha_xonx.u64 = csr_rd_node( in cvmx_ilk_runtime_status()
[all …]
A Dcvmx-helper-bgx.c125 cmr_config.u64 = csr_rd_node( in cvmx_helper_bgx_get_mode()
138 pmd_control.u64 = csr_rd_node( in cvmx_helper_bgx_get_mode()
430 gmp_timer.u64 = csr_rd_node( in __cvmx_helper_bgx_sgmii_hardware_init_one_time()
1101 csr_rd_node(node, in __cvmx_helper_bgx_xaui_init()
1104 csr_rd_node(node, in __cvmx_helper_bgx_xaui_init()
1657 spu_int.u64 = csr_rd_node( in __cvmx_helper_bgx_xaui_link_init()
1668 spu_int.u64 = csr_rd_node( in __cvmx_helper_bgx_xaui_link_init()
1983 csr_rd_node(node, in __cvmx_helper_bgx_xaui_link_init()
2556 adr_ctl.u64 = csr_rd_node( in cvmx_helper_bgx_set_mac()
2664 frmctl.u64 = csr_rd_node( in cvmx_bgx_set_flowctl_mode()
[all …]
A Dcvmx-pki.c288 pstyle.u64 = csr_rd_node( in cvmx_pki_write_pkind_config()
297 pcfg.u64 = csr_rd_node( in cvmx_pki_write_pkind_config()
311 skip.u64 = csr_rd_node( in cvmx_pki_write_pkind_config()
319 l2cust.u64 = csr_rd_node( in cvmx_pki_write_pkind_config()
328 lgcust.u64 = csr_rd_node( in cvmx_pki_write_pkind_config()
442 scfg2.u64 = csr_rd_node( in cvmx_pki_write_tag_config()
634 scfg.u64 = csr_rd_node( in cvmx_pki_write_style_config()
657 scfg2.u64 = csr_rd_node( in cvmx_pki_write_style_config()
780 term.u64 = csr_rd_node( in cvmx_pki_pcam_write_entry()
798 act.u64 = csr_rd_node( in cvmx_pki_pcam_write_entry()
[all …]
A Dcvmx-qlm.c148 csr_rd_node(xi.node, in cvmx_qlm_lmac()
636 cfg.u64 = csr_rd_node(node, CVMX_GSERX_CFG(qlm)); in cvmx_qlm_get_gbaud_mhz_node()
648 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(0)); in cvmx_qlm_get_gbaud_mhz_node()
659 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(2)); in cvmx_qlm_get_gbaud_mhz_node()
663 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(3)); in cvmx_qlm_get_gbaud_mhz_node()
677 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(pem)); in cvmx_qlm_get_gbaud_mhz_node()
1323 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(0)); in cvmx_qlm_get_mode_cn78xx()
1337 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(2)); in cvmx_qlm_get_mode_cn78xx()
1351 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(2)); in cvmx_qlm_get_mode_cn78xx()
1358 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(3)); in cvmx_qlm_get_mode_cn78xx()
[all …]
A Dcvmx-pko3.c55 debug("%s=%#llx\n", #reg, (long long)csr_rd_node(node, reg))
126 pko_enable.u64 = csr_rd_node(node, CVMX_PKO_ENABLE); in cvmx_pko3_hw_init_global()
133 shaper_cfg.u64 = csr_rd_node(node, CVMX_PKO_SHAPER_CFG); in cvmx_pko3_hw_init_global()
159 pko_status.u64 = csr_rd_node(node, CVMX_PKO_STATUS); in cvmx_pko3_hw_init_global()
165 dpfi_status.u64 = csr_rd_node(node, CVMX_PKO_DPFI_STATUS); in cvmx_pko3_hw_init_global()
173 ptf_iobp_cfg.u64 = csr_rd_node(node, CVMX_PKO_PTF_IOBP_CFG); in cvmx_pko3_hw_init_global()
197 pko_status.u64 = csr_rd_node(node, CVMX_PKO_STATUS); in cvmx_pko3_hw_init_global()
680 csr_rd_node(node, CVMX_PKO_MCI0_MAX_CREDX(mac_num)); in cvmx_pko_setup_macs()
682 csr_rd_node(node, CVMX_PKO_MCI1_MAX_CREDX(mac_num)); in cvmx_pko_setup_macs()
684 csr_rd_node(node, CVMX_PKO_MACX_CFG(mac_num)); in cvmx_pko_setup_macs()
[all …]
A Docteon_qlm.c3755 pemx_on.u64 = csr_rd_node(node, CVMX_PEMX_ON(pem)); in __cvmx_qlm_pcie_errata_ep_cn78xx()
3763 pemx_on.u64 = csr_rd_node(node, CVMX_PEMX_ON(pem)); in __cvmx_qlm_pcie_errata_ep_cn78xx()
3856 pem_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(0)); in __cvmx_qlm_pcie_errata_cn78xx()
3862 pem_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(0)); in __cvmx_qlm_pcie_errata_cn78xx()
3868 pem_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(2)); in __cvmx_qlm_pcie_errata_cn78xx()
3874 pem_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(2)); in __cvmx_qlm_pcie_errata_cn78xx()
3875 pem3_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(3)); in __cvmx_qlm_pcie_errata_cn78xx()
4137 cfg.u64 = csr_rd_node(node, CVMX_GSERX_CFG(qlm)); in octeon_configure_qlm_cn78xx()
4442 cfg.u64 = csr_rd_node(node, CVMX_GSERX_CFG(qlm)); in octeon_configure_qlm_cn78xx()
4472 csr_rd_node(node, CVMX_GSERX_PHY_CTL(qlm)); in octeon_configure_qlm_cn78xx()
[all …]
A Dcvmx-pko3-queue.c361 csr_rd_node(node, CVMX_PKO_L3_L2_SQX_CHANNEL(l2_l3_q_num)); in cvmx_pko3_map_channel()
445 csr_rd_node(node, CVMX_PKO_L1_SQX_TOPOLOGY(parent_queue)); in cvmx_pko_configure_l2_queue()
492 csr_rd_node(node, CVMX_PKO_L2_SQX_TOPOLOGY(parent_queue)); in cvmx_pko_configure_l3_queue()
539 csr_rd_node(node, CVMX_PKO_L3_SQX_TOPOLOGY(parent_queue)); in cvmx_pko_configure_l4_queue()
586 csr_rd_node(node, CVMX_PKO_L4_SQX_TOPOLOGY(parent_queue)); in cvmx_pko_configure_l5_queue()
653 pko_parent_topology.u64 = csr_rd_node(node, parent_topology_reg); in cvmx_pko_configure_dq()
681 pko_dq_sched.u64 = csr_rd_node(node, CVMX_PKO_DQX_SCHEDULE(dq)); in cvmx_pko_configure_dq()
683 csr_rd_node(node, CVMX_PKO_DQX_TOPOLOGY(dq)); in cvmx_pko_configure_dq()
A Dcvmx-fpa.c117 pool_cfg.u64 = csr_rd_node(pool.node, CVMX_FPA_POOLX_CFG(pool.lpool)); in cvmx_fpa3_get_pool_buf_size()
219 pool_cfg.u64 = csr_rd_node(pool.node, CVMX_FPA_POOLX_CFG(pool.lpool)); in cvmx_fpa3_aura_cfg()
269 csr_rd_node(aura.node, CVMX_FPA_AURAX_INT(aura.laura)); in cvmx_fpa3_aura_cfg()
317 pool_cfg.u64 = csr_rd_node(pool.node, CVMX_FPA_POOLX_CFG(pool.lpool)); in cvmx_fpa3_pool_populate()
435 avail_reg.u64 = csr_rd_node( in cvmx_fpa3_pool_populate()
713 CAST_ULL(csr_rd_node(pool.node, CVMX_FPA_POOLX_STACK_BASE( in cvmx_fpa3_pool_stack_init()
715 CAST_ULL(csr_rd_node(pool.node, CVMX_FPA_POOLX_STACK_END( in cvmx_fpa3_pool_stack_init()
730 csr_rd_node(pool.node, CVMX_FPA_POOLX_FPF_MARKS(pool.lpool)); in cvmx_fpa3_pool_stack_init()
876 csr_rd_node(pool.node, CVMX_FPA_POOLX_AVAILABLE(pool.lpool)); in cvmx_fpa3_set_aura_for_pool()
A Dcvmx-helper-pki.c183 csr_rd_node(node, CVMX_PKI_CLX_ECC_CTL(i)); in __cvmx_helper_pki_install_dflt_vlan()
404 buf_ctl.u64 = csr_rd_node(node, CVMX_PKI_BUF_CTL); in __cvmx_helper_pki_global_setup()
476 pki_cl_msk.u64 = csr_rd_node( in cvmx_pki_get_port_config()
505 pki_cl_msk.u64 = csr_rd_node( in cvmx_pki_set_port_config()
535 pkind_cfg.u64 = csr_rd_node( in cvmx_helper_pki_set_fcs_op()
A Dcvmx-helper-ilk.c731 ilk_rxx_cfg1.u64 = csr_rd_node(node, CVMX_ILK_RXX_CFG1(interface)); in __cvmx_helper_ilk_link_get()
732 ilk_rxx_int.u64 = csr_rd_node(node, CVMX_ILK_RXX_INT(interface)); in __cvmx_helper_ilk_link_get()
801 csr_rd_node(node, CVMX_ILK_TXX_CFG1(interface)); in __cvmx_helper_ilk_link_get()
803 csr_rd_node(node, CVMX_ILK_RXX_CFG1(interface)); in __cvmx_helper_ilk_link_get()
A Dcvmx-helper.c732 if (csr_rd_node(xi.node, CVMX_GSERX_CFG(qlm)) & 0x1) { in __cvmx_get_mode_cn78xx()
1470 l2c_ctl.u64 = csr_rd_node(node, CVMX_L2C_CTL); in cvmx_helper_initialize_packet_io_node()
1494 smix_en.u64 = csr_rd_node(node, CVMX_SMIX_EN(i)); in cvmx_helper_initialize_packet_io_node()
1521 cfg.u64 = csr_rd_node(node, CVMX_SSO_WS_CFG); in cvmx_helper_initialize_packet_io_node()
A Dcvmx-helper-cfg.c750 csr_rd_node(node, CVMX_BGXX_CMRX_CONFIG(j, i)); in __cvmx_helper_init_port_valid()
A Dcvmx-pcie.c58 #define CVMX_READ_CSR(addr) csr_rd_node(node, addr)
/u-boot/arch/mips/mach-octeon/include/mach/
A Dcvmx-fuse.h24 val = csr_rd_node(node, CVMX_MIO_FUS_RCMD); in cvmx_fuse_read_byte_node()
A Dcvmx-mdio.h267 smi_clk.u64 = csr_rd_node(node, CVMX_SMIX_CLK(bus)); in __cvmx_mdio_set_clause45_mode()
281 smi_clk.u64 = csr_rd_node(node, CVMX_SMIX_CLK(bus)); in __cvmx_mdio_set_clause22_mode()
307 smi_rd.u64 = csr_rd_node(node, CVMX_SMIX_RD_DAT(bus)); in __cvmx_mdio_read_rd_dat()
A Dcvmx-pow.h924 sl_ppx_tag.u64 = csr_rd_node(node, CVMX_SSO_SL_PPX_TAG(core)); in cvmx_pow_get_current_tag()
982 sso_wqp.u64 = csr_rd_node(node, CVMX_SSO_SL_PPX_WQP(core)); in cvmx_pow_get_current_wqp()
1876 grp_pri.u64 = csr_rd_node(node, CVMX_SSO_GRPX_PRI(group)); in cvmx_pow_set_priority()
1936 grp_pri.u64 = csr_rd_node(node, CVMX_SSO_GRPX_PRI(group)); in cvmx_pow_get_priority()
1994 grp_pri.u64 = csr_rd_node(node, CVMX_SSO_GRPX_PRI(xgrp.xgrp)); in cvmx_sso_get_group_priority()
2251 grp_msk.u64 = csr_rd_node(node, reg_addr); in cvmx_sso_set_group_core_affinity()
2261 grp_msk.u64 = csr_rd_node(node, reg_addr); in cvmx_sso_set_group_core_affinity()
2306 grp_pri.u64 = csr_rd_node(node, CVMX_SSO_GRPX_PRI(xgrp.xgrp)); in cvmx_sso_set_group_priority()
2509 grp_msk.u64 = csr_rd_node(node, reg_addr); in cvmx_pow_get_xgrp_mask()
2514 grp_msk.u64 = csr_rd_node(node, reg_addr); in cvmx_pow_get_xgrp_mask()
[all …]
A Dcvmx-regs.h170 static inline u64 csr_rd_node(int node, u64 addr) in csr_rd_node() function
188 return csr_rd_node(0, addr); in csr_rd()
A Dcvmx-pko3.h650 wm_cnt.u64 = csr_rd_node(node, CVMX_PKO_DQX_WM_CNT(dq)); in __cvmx_pko3_lmtdma()
761 wm_cnt.u64 = csr_rd_node(node, CVMX_PKO_DQX_WM_CNT(dq)); in __cvmx_pko3_do_dma()
909 pko_aura.u64 = csr_rd_node(node, CVMX_PKO_DPFI_FPA_AURA); in __cvmx_pko3_aura_get()
/u-boot/drivers/net/octeon/
A Docteon_eth.c941 cmr_config.u64 = csr_rd_node(xi.node, in octeon_eth_halt_bgx()
948 cmr_cam.u64 = csr_rd_node(xi.node, in octeon_eth_halt_bgx()

Completed in 95 milliseconds