Lines Matching refs:pdata

51 static inline unsigned int axgbe_get_max_frame(struct axgbe_port *pdata)  in axgbe_get_max_frame()  argument
53 return pdata->eth_dev->data->mtu + RTE_ETHER_HDR_LEN + in axgbe_get_max_frame()
58 static int mdio_complete(struct axgbe_port *pdata) in mdio_complete() argument
60 if (!AXGMAC_IOREAD_BITS(pdata, MAC_MDIOSCCDR, BUSY)) in mdio_complete()
66 static int axgbe_write_ext_mii_regs(struct axgbe_port *pdata, int addr, in axgbe_write_ext_mii_regs() argument
75 AXGMAC_IOWRITE(pdata, MAC_MDIOSCAR, mdio_sca); in axgbe_write_ext_mii_regs()
81 AXGMAC_IOWRITE(pdata, MAC_MDIOSCCDR, mdio_sccd); in axgbe_write_ext_mii_regs()
86 if (mdio_complete(pdata)) in axgbe_write_ext_mii_regs()
94 static int axgbe_read_ext_mii_regs(struct axgbe_port *pdata, int addr, in axgbe_read_ext_mii_regs() argument
103 AXGMAC_IOWRITE(pdata, MAC_MDIOSCAR, mdio_sca); in axgbe_read_ext_mii_regs()
108 AXGMAC_IOWRITE(pdata, MAC_MDIOSCCDR, mdio_sccd); in axgbe_read_ext_mii_regs()
114 if (mdio_complete(pdata)) in axgbe_read_ext_mii_regs()
122 return AXGMAC_IOREAD_BITS(pdata, MAC_MDIOSCCDR, DATA); in axgbe_read_ext_mii_regs()
125 static int axgbe_set_ext_mii_mode(struct axgbe_port *pdata, unsigned int port, in axgbe_set_ext_mii_mode() argument
141 AXGMAC_IOWRITE(pdata, MAC_MDIOCL22R, reg_val); in axgbe_set_ext_mii_mode()
146 static int axgbe_read_mmd_regs_v2(struct axgbe_port *pdata, in axgbe_read_mmd_regs_v2() argument
155 mmd_address = (pdata->mdio_mmd << 16) | (mmd_reg & 0xffff); in axgbe_read_mmd_regs_v2()
167 index = mmd_address & ~pdata->xpcs_window_mask; in axgbe_read_mmd_regs_v2()
168 offset = pdata->xpcs_window + (mmd_address & pdata->xpcs_window_mask); in axgbe_read_mmd_regs_v2()
170 pthread_mutex_lock(&pdata->xpcs_mutex); in axgbe_read_mmd_regs_v2()
172 XPCS32_IOWRITE(pdata, pdata->xpcs_window_sel_reg, index); in axgbe_read_mmd_regs_v2()
173 mmd_data = XPCS16_IOREAD(pdata, offset); in axgbe_read_mmd_regs_v2()
175 pthread_mutex_unlock(&pdata->xpcs_mutex); in axgbe_read_mmd_regs_v2()
180 static void axgbe_write_mmd_regs_v2(struct axgbe_port *pdata, in axgbe_write_mmd_regs_v2() argument
189 mmd_address = (pdata->mdio_mmd << 16) | (mmd_reg & 0xffff); in axgbe_write_mmd_regs_v2()
201 index = mmd_address & ~pdata->xpcs_window_mask; in axgbe_write_mmd_regs_v2()
202 offset = pdata->xpcs_window + (mmd_address & pdata->xpcs_window_mask); in axgbe_write_mmd_regs_v2()
204 pthread_mutex_lock(&pdata->xpcs_mutex); in axgbe_write_mmd_regs_v2()
206 XPCS32_IOWRITE(pdata, pdata->xpcs_window_sel_reg, index); in axgbe_write_mmd_regs_v2()
207 XPCS16_IOWRITE(pdata, offset, mmd_data); in axgbe_write_mmd_regs_v2()
209 pthread_mutex_unlock(&pdata->xpcs_mutex); in axgbe_write_mmd_regs_v2()
212 static int axgbe_read_mmd_regs(struct axgbe_port *pdata, int prtad, in axgbe_read_mmd_regs() argument
215 switch (pdata->vdata->xpcs_access) { in axgbe_read_mmd_regs()
221 return axgbe_read_mmd_regs_v2(pdata, prtad, mmd_reg); in axgbe_read_mmd_regs()
225 static void axgbe_write_mmd_regs(struct axgbe_port *pdata, int prtad, in axgbe_write_mmd_regs() argument
228 switch (pdata->vdata->xpcs_access) { in axgbe_write_mmd_regs()
234 return axgbe_write_mmd_regs_v2(pdata, prtad, mmd_reg, mmd_data); in axgbe_write_mmd_regs()
238 static int axgbe_set_speed(struct axgbe_port *pdata, int speed) in axgbe_set_speed() argument
256 if (AXGMAC_IOREAD_BITS(pdata, MAC_TCR, SS) != ss) in axgbe_set_speed()
257 AXGMAC_IOWRITE_BITS(pdata, MAC_TCR, SS, ss); in axgbe_set_speed()
262 static int axgbe_disable_tx_flow_control(struct axgbe_port *pdata) in axgbe_disable_tx_flow_control() argument
269 for (i = 0; i < pdata->rx_q_count; i++) in axgbe_disable_tx_flow_control()
270 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_Q_RQOMR, EHFC, 0); in axgbe_disable_tx_flow_control()
274 q_count = RTE_MIN(pdata->tx_q_count, in axgbe_disable_tx_flow_control()
278 reg_val = AXGMAC_IOREAD(pdata, reg); in axgbe_disable_tx_flow_control()
280 AXGMAC_IOWRITE(pdata, reg, reg_val); in axgbe_disable_tx_flow_control()
288 static int axgbe_enable_tx_flow_control(struct axgbe_port *pdata) in axgbe_enable_tx_flow_control() argument
295 for (i = 0; i < pdata->rx_q_count; i++) { in axgbe_enable_tx_flow_control()
299 if (pdata->rx_rfd[i]) in axgbe_enable_tx_flow_control()
302 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_Q_RQOMR, EHFC, ehfc); in axgbe_enable_tx_flow_control()
310 q_count = RTE_MIN(pdata->tx_q_count, in axgbe_enable_tx_flow_control()
314 reg_val = AXGMAC_IOREAD(pdata, reg); in axgbe_enable_tx_flow_control()
321 AXGMAC_IOWRITE(pdata, reg, reg_val); in axgbe_enable_tx_flow_control()
329 static int axgbe_disable_rx_flow_control(struct axgbe_port *pdata) in axgbe_disable_rx_flow_control() argument
331 AXGMAC_IOWRITE_BITS(pdata, MAC_RFCR, RFE, 0); in axgbe_disable_rx_flow_control()
336 static int axgbe_enable_rx_flow_control(struct axgbe_port *pdata) in axgbe_enable_rx_flow_control() argument
338 AXGMAC_IOWRITE_BITS(pdata, MAC_RFCR, RFE, 1); in axgbe_enable_rx_flow_control()
343 static int axgbe_config_tx_flow_control(struct axgbe_port *pdata) in axgbe_config_tx_flow_control() argument
345 if (pdata->tx_pause) in axgbe_config_tx_flow_control()
346 axgbe_enable_tx_flow_control(pdata); in axgbe_config_tx_flow_control()
348 axgbe_disable_tx_flow_control(pdata); in axgbe_config_tx_flow_control()
353 static int axgbe_config_rx_flow_control(struct axgbe_port *pdata) in axgbe_config_rx_flow_control() argument
355 if (pdata->rx_pause) in axgbe_config_rx_flow_control()
356 axgbe_enable_rx_flow_control(pdata); in axgbe_config_rx_flow_control()
358 axgbe_disable_rx_flow_control(pdata); in axgbe_config_rx_flow_control()
363 static void axgbe_config_flow_control(struct axgbe_port *pdata) in axgbe_config_flow_control() argument
365 axgbe_config_tx_flow_control(pdata); in axgbe_config_flow_control()
366 axgbe_config_rx_flow_control(pdata); in axgbe_config_flow_control()
368 AXGMAC_IOWRITE_BITS(pdata, MAC_RFCR, PFCE, 0); in axgbe_config_flow_control()
371 static void axgbe_queue_flow_control_threshold(struct axgbe_port *pdata, in axgbe_queue_flow_control_threshold() argument
378 frame_fifo_size = AXGMAC_FLOW_CONTROL_ALIGN(axgbe_get_max_frame(pdata)); in axgbe_queue_flow_control_threshold()
387 pdata->rx_rfa[queue] = 0; in axgbe_queue_flow_control_threshold()
388 pdata->rx_rfd[queue] = 0; in axgbe_queue_flow_control_threshold()
394 pdata->rx_rfa[queue] = 0; /* Full - 1024 bytes */ in axgbe_queue_flow_control_threshold()
395 pdata->rx_rfd[queue] = 1; /* Full - 1536 bytes */ in axgbe_queue_flow_control_threshold()
401 pdata->rx_rfa[queue] = 2; /* Full - 2048 bytes */ in axgbe_queue_flow_control_threshold()
402 pdata->rx_rfd[queue] = 5; /* Full - 3584 bytes */ in axgbe_queue_flow_control_threshold()
422 pdata->rx_rfa[queue] = AXGMAC_FLOW_CONTROL_VALUE(rfa); in axgbe_queue_flow_control_threshold()
423 pdata->rx_rfd[queue] = AXGMAC_FLOW_CONTROL_VALUE(rfd); in axgbe_queue_flow_control_threshold()
426 static void axgbe_calculate_flow_control_threshold(struct axgbe_port *pdata) in axgbe_calculate_flow_control_threshold() argument
431 for (i = 0; i < pdata->rx_q_count; i++) { in axgbe_calculate_flow_control_threshold()
432 q_fifo_size = (pdata->fifo + 1) * AXGMAC_FIFO_UNIT; in axgbe_calculate_flow_control_threshold()
434 axgbe_queue_flow_control_threshold(pdata, i, q_fifo_size); in axgbe_calculate_flow_control_threshold()
438 static void axgbe_config_flow_control_threshold(struct axgbe_port *pdata) in axgbe_config_flow_control_threshold() argument
442 for (i = 0; i < pdata->rx_q_count; i++) { in axgbe_config_flow_control_threshold()
443 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_Q_RQFCR, RFA, in axgbe_config_flow_control_threshold()
444 pdata->rx_rfa[i]); in axgbe_config_flow_control_threshold()
445 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_Q_RQFCR, RFD, in axgbe_config_flow_control_threshold()
446 pdata->rx_rfd[i]); in axgbe_config_flow_control_threshold()
450 static int axgbe_enable_rx_vlan_stripping(struct axgbe_port *pdata) in axgbe_enable_rx_vlan_stripping() argument
453 AXGMAC_IOWRITE_BITS(pdata, MAC_VLANTR, EVLRXS, 1); in axgbe_enable_rx_vlan_stripping()
456 AXGMAC_IOWRITE_BITS(pdata, MAC_VLANTR, DOVLTC, 1); in axgbe_enable_rx_vlan_stripping()
459 AXGMAC_IOWRITE_BITS(pdata, MAC_VLANTR, ERSVLM, 0); in axgbe_enable_rx_vlan_stripping()
462 AXGMAC_IOWRITE_BITS(pdata, MAC_VLANTR, ESVL, 0); in axgbe_enable_rx_vlan_stripping()
465 AXGMAC_IOWRITE_BITS(pdata, MAC_VLANTR, EVLS, 0x3); in axgbe_enable_rx_vlan_stripping()
469 static int axgbe_disable_rx_vlan_stripping(struct axgbe_port *pdata) in axgbe_disable_rx_vlan_stripping() argument
471 AXGMAC_IOWRITE_BITS(pdata, MAC_VLANTR, EVLS, 0); in axgbe_disable_rx_vlan_stripping()
475 static int axgbe_enable_rx_vlan_filtering(struct axgbe_port *pdata) in axgbe_enable_rx_vlan_filtering() argument
478 AXGMAC_IOWRITE_BITS(pdata, MAC_PFR, VTFE, 1); in axgbe_enable_rx_vlan_filtering()
481 AXGMAC_IOWRITE_BITS(pdata, MAC_VLANTR, VTHM, 1); in axgbe_enable_rx_vlan_filtering()
484 AXGMAC_IOWRITE_BITS(pdata, MAC_VLANTR, VTIM, 0); in axgbe_enable_rx_vlan_filtering()
487 AXGMAC_IOWRITE_BITS(pdata, MAC_VLANTR, ETV, 1); in axgbe_enable_rx_vlan_filtering()
495 AXGMAC_IOWRITE_BITS(pdata, MAC_VLANTR, VL, 1); in axgbe_enable_rx_vlan_filtering()
499 static int axgbe_disable_rx_vlan_filtering(struct axgbe_port *pdata) in axgbe_disable_rx_vlan_filtering() argument
502 AXGMAC_IOWRITE_BITS(pdata, MAC_PFR, VTFE, 0); in axgbe_disable_rx_vlan_filtering()
530 static int axgbe_update_vlan_hash_table(struct axgbe_port *pdata) in axgbe_update_vlan_hash_table() argument
542 vid_valid = pdata->active_vlans[vid_idx]; in axgbe_update_vlan_hash_table()
547 vid, vid_idx, pdata->active_vlans[vid_idx]); in axgbe_update_vlan_hash_table()
558 AXGMAC_IOWRITE_BITS(pdata, MAC_VLANHTR, VLHT, vlan_hash_table); in axgbe_update_vlan_hash_table()
559 reg = AXGMAC_IOREAD(pdata, MAC_VLANHTR); in axgbe_update_vlan_hash_table()
564 static int __axgbe_exit(struct axgbe_port *pdata) in __axgbe_exit() argument
569 AXGMAC_IOWRITE_BITS(pdata, DMA_MR, SWR, 1); in __axgbe_exit()
573 while (--count && AXGMAC_IOREAD_BITS(pdata, DMA_MR, SWR)) in __axgbe_exit()
582 static int axgbe_exit(struct axgbe_port *pdata) in axgbe_exit() argument
589 ret = __axgbe_exit(pdata); in axgbe_exit()
593 return __axgbe_exit(pdata); in axgbe_exit()
596 static int axgbe_flush_tx_queues(struct axgbe_port *pdata) in axgbe_flush_tx_queues() argument
600 if (AXGMAC_GET_BITS(pdata->hw_feat.version, MAC_VR, SNPSVER) < 0x21) in axgbe_flush_tx_queues()
603 for (i = 0; i < pdata->tx_q_count; i++) in axgbe_flush_tx_queues()
604 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_Q_TQOMR, FTQ, 1); in axgbe_flush_tx_queues()
607 for (i = 0; i < pdata->tx_q_count; i++) { in axgbe_flush_tx_queues()
609 while (--count && AXGMAC_MTL_IOREAD_BITS(pdata, i, in axgbe_flush_tx_queues()
620 static void axgbe_config_dma_bus(struct axgbe_port *pdata) in axgbe_config_dma_bus() argument
623 AXGMAC_IOWRITE_BITS(pdata, DMA_SBMR, EAME, 1); in axgbe_config_dma_bus()
626 AXGMAC_IOWRITE_BITS(pdata, DMA_SBMR, RD_OSR, 0x3f); in axgbe_config_dma_bus()
627 AXGMAC_IOWRITE_BITS(pdata, DMA_SBMR, WR_OSR, 0x3f); in axgbe_config_dma_bus()
630 AXGMAC_IOWRITE_BITS(pdata, DMA_SBMR, UNDEF, 1); in axgbe_config_dma_bus()
631 AXGMAC_IOWRITE_BITS(pdata, DMA_SBMR, BLEN_32, 1); in axgbe_config_dma_bus()
632 AXGMAC_IOWRITE_BITS(pdata, DMA_SBMR, AAL, 1); in axgbe_config_dma_bus()
635 static void axgbe_config_dma_cache(struct axgbe_port *pdata) in axgbe_config_dma_cache() argument
641 AXGMAC_IOWRITE(pdata, DMA_AXIARCR, arcache); in axgbe_config_dma_cache()
651 AXGMAC_IOWRITE(pdata, DMA_AXIAWCR, awcache); in axgbe_config_dma_cache()
657 AXGMAC_IOWRITE(pdata, DMA_AXIAWRCR, arwcache); in axgbe_config_dma_cache()
660 static void axgbe_config_edma_control(struct axgbe_port *pdata) in axgbe_config_edma_control() argument
662 AXGMAC_IOWRITE(pdata, EDMA_TX_CONTROL, 0x5); in axgbe_config_edma_control()
663 AXGMAC_IOWRITE(pdata, EDMA_RX_CONTROL, 0x5); in axgbe_config_edma_control()
666 static int axgbe_config_osp_mode(struct axgbe_port *pdata) in axgbe_config_osp_mode() argument
674 for (i = 0; i < pdata->eth_dev->data->nb_tx_queues; i++) { in axgbe_config_osp_mode()
675 txq = pdata->eth_dev->data->tx_queues[i]; in axgbe_config_osp_mode()
677 pdata->tx_osp_mode); in axgbe_config_osp_mode()
683 static int axgbe_config_pblx8(struct axgbe_port *pdata) in axgbe_config_pblx8() argument
688 for (i = 0; i < pdata->eth_dev->data->nb_tx_queues; i++) { in axgbe_config_pblx8()
689 txq = pdata->eth_dev->data->tx_queues[i]; in axgbe_config_pblx8()
691 pdata->pblx8); in axgbe_config_pblx8()
696 static int axgbe_config_tx_pbl_val(struct axgbe_port *pdata) in axgbe_config_tx_pbl_val() argument
701 for (i = 0; i < pdata->eth_dev->data->nb_tx_queues; i++) { in axgbe_config_tx_pbl_val()
702 txq = pdata->eth_dev->data->tx_queues[i]; in axgbe_config_tx_pbl_val()
704 pdata->tx_pbl); in axgbe_config_tx_pbl_val()
710 static int axgbe_config_rx_pbl_val(struct axgbe_port *pdata) in axgbe_config_rx_pbl_val() argument
715 for (i = 0; i < pdata->eth_dev->data->nb_rx_queues; i++) { in axgbe_config_rx_pbl_val()
716 rxq = pdata->eth_dev->data->rx_queues[i]; in axgbe_config_rx_pbl_val()
718 pdata->rx_pbl); in axgbe_config_rx_pbl_val()
724 static void axgbe_config_rx_buffer_size(struct axgbe_port *pdata) in axgbe_config_rx_buffer_size() argument
729 for (i = 0; i < pdata->eth_dev->data->nb_rx_queues; i++) { in axgbe_config_rx_buffer_size()
730 rxq = pdata->eth_dev->data->rx_queues[i]; in axgbe_config_rx_buffer_size()
737 if (rxq->buf_size > pdata->rx_buf_size) in axgbe_config_rx_buffer_size()
738 pdata->rx_buf_size = rxq->buf_size; in axgbe_config_rx_buffer_size()
745 static int axgbe_write_rss_reg(struct axgbe_port *pdata, unsigned int type, in axgbe_write_rss_reg() argument
750 if (AXGMAC_IOREAD_BITS(pdata, MAC_RSSAR, OB)) in axgbe_write_rss_reg()
753 AXGMAC_IOWRITE(pdata, MAC_RSSDR, val); in axgbe_write_rss_reg()
755 AXGMAC_IOWRITE_BITS(pdata, MAC_RSSAR, RSSIA, index); in axgbe_write_rss_reg()
756 AXGMAC_IOWRITE_BITS(pdata, MAC_RSSAR, ADDRT, type); in axgbe_write_rss_reg()
757 AXGMAC_IOWRITE_BITS(pdata, MAC_RSSAR, CT, 0); in axgbe_write_rss_reg()
758 AXGMAC_IOWRITE_BITS(pdata, MAC_RSSAR, OB, 1); in axgbe_write_rss_reg()
762 if (!AXGMAC_IOREAD_BITS(pdata, MAC_RSSAR, OB)) in axgbe_write_rss_reg()
771 int axgbe_write_rss_hash_key(struct axgbe_port *pdata) in axgbe_write_rss_hash_key() argument
774 unsigned int key_regs = sizeof(pdata->rss_key) / sizeof(u32); in axgbe_write_rss_hash_key()
778 rss_conf = &pdata->eth_dev->data->dev_conf.rx_adv_conf.rss_conf; in axgbe_write_rss_hash_key()
781 key = (unsigned int *)&pdata->rss_key; in axgbe_write_rss_hash_key()
786 ret = axgbe_write_rss_reg(pdata, AXGBE_RSS_HASH_KEY_TYPE, in axgbe_write_rss_hash_key()
795 int axgbe_write_rss_lookup_table(struct axgbe_port *pdata) in axgbe_write_rss_lookup_table() argument
800 for (i = 0; i < ARRAY_SIZE(pdata->rss_table); i++) { in axgbe_write_rss_lookup_table()
801 ret = axgbe_write_rss_reg(pdata, in axgbe_write_rss_lookup_table()
803 pdata->rss_table[i]); in axgbe_write_rss_lookup_table()
811 static int axgbe_enable_rss(struct axgbe_port *pdata) in axgbe_enable_rss() argument
816 ret = axgbe_write_rss_hash_key(pdata); in axgbe_enable_rss()
821 ret = axgbe_write_rss_lookup_table(pdata); in axgbe_enable_rss()
826 AXGMAC_IOWRITE(pdata, MAC_RSSCR, pdata->rss_options); in axgbe_enable_rss()
829 AXGMAC_IOWRITE_BITS(pdata, MAC_RSSCR, RSSE, 1); in axgbe_enable_rss()
834 static void axgbe_rss_options(struct axgbe_port *pdata) in axgbe_rss_options() argument
839 rss_conf = &pdata->eth_dev->data->dev_conf.rx_adv_conf.rss_conf; in axgbe_rss_options()
840 pdata->rss_hf = rss_conf->rss_hf; in axgbe_rss_options()
844 AXGMAC_SET_BITS(pdata->rss_options, MAC_RSSCR, IP2TE, 1); in axgbe_rss_options()
846 AXGMAC_SET_BITS(pdata->rss_options, MAC_RSSCR, TCP4TE, 1); in axgbe_rss_options()
848 AXGMAC_SET_BITS(pdata->rss_options, MAC_RSSCR, UDP4TE, 1); in axgbe_rss_options()
851 static int axgbe_config_rss(struct axgbe_port *pdata) in axgbe_config_rss() argument
855 if (pdata->rss_enable) { in axgbe_config_rss()
857 uint32_t *key = (uint32_t *)pdata->rss_key; in axgbe_config_rss()
859 for (i = 0; i < sizeof(pdata->rss_key) / 4; i++) in axgbe_config_rss()
862 AXGMAC_SET_BITS(pdata->rss_table[i], MAC_RSSDR, DMCH, in axgbe_config_rss()
863 i % pdata->eth_dev->data->nb_rx_queues); in axgbe_config_rss()
864 axgbe_rss_options(pdata); in axgbe_config_rss()
865 if (axgbe_enable_rss(pdata)) { in axgbe_config_rss()
870 AXGMAC_IOWRITE_BITS(pdata, MAC_RSSCR, RSSE, 0); in axgbe_config_rss()
876 static void axgbe_enable_dma_interrupts(struct axgbe_port *pdata) in axgbe_enable_dma_interrupts() argument
882 for (i = 0; i < pdata->eth_dev->data->nb_tx_queues; i++) { in axgbe_enable_dma_interrupts()
883 txq = pdata->eth_dev->data->tx_queues[i]; in axgbe_enable_dma_interrupts()
913 static void wrapper_tx_desc_init(struct axgbe_port *pdata) in wrapper_tx_desc_init() argument
918 for (i = 0; i < pdata->eth_dev->data->nb_tx_queues; i++) { in wrapper_tx_desc_init()
919 txq = pdata->eth_dev->data->tx_queues[i]; in wrapper_tx_desc_init()
932 static int wrapper_rx_desc_init(struct axgbe_port *pdata) in wrapper_rx_desc_init() argument
939 for (i = 0; i < pdata->eth_dev->data->nb_rx_queues; i++) { in wrapper_rx_desc_init()
940 rxq = pdata->eth_dev->data->rx_queues[i]; in wrapper_rx_desc_init()
953 axgbe_dev_rx_queue_release(pdata->eth_dev, i); in wrapper_rx_desc_init()
989 static void axgbe_config_mtl_mode(struct axgbe_port *pdata) in axgbe_config_mtl_mode() argument
994 AXGMAC_IOWRITE_BITS(pdata, MTL_OMR, ETSALG, MTL_ETSALG_WRR); in axgbe_config_mtl_mode()
997 for (i = 0; i < pdata->hw_feat.tc_cnt; i++) { in axgbe_config_mtl_mode()
998 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_TC_ETSCR, TSA, in axgbe_config_mtl_mode()
1000 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_TC_QWR, QW, 1); in axgbe_config_mtl_mode()
1004 AXGMAC_IOWRITE_BITS(pdata, MTL_OMR, RAA, MTL_RAA_SP); in axgbe_config_mtl_mode()
1007 static int axgbe_config_tsf_mode(struct axgbe_port *pdata, unsigned int val) in axgbe_config_tsf_mode() argument
1011 for (i = 0; i < pdata->tx_q_count; i++) in axgbe_config_tsf_mode()
1012 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_Q_TQOMR, TSF, val); in axgbe_config_tsf_mode()
1017 static int axgbe_config_rsf_mode(struct axgbe_port *pdata, unsigned int val) in axgbe_config_rsf_mode() argument
1021 for (i = 0; i < pdata->rx_q_count; i++) in axgbe_config_rsf_mode()
1022 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_Q_RQOMR, RSF, val); in axgbe_config_rsf_mode()
1027 static int axgbe_config_tx_threshold(struct axgbe_port *pdata, in axgbe_config_tx_threshold() argument
1032 for (i = 0; i < pdata->tx_q_count; i++) in axgbe_config_tx_threshold()
1033 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_Q_TQOMR, TTC, val); in axgbe_config_tx_threshold()
1038 static int axgbe_config_rx_threshold(struct axgbe_port *pdata, in axgbe_config_rx_threshold() argument
1043 for (i = 0; i < pdata->rx_q_count; i++) in axgbe_config_rx_threshold()
1044 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_Q_RQOMR, RTC, val); in axgbe_config_rx_threshold()
1050 static void axgbe_config_rx_fifo_size(struct axgbe_port *pdata) in axgbe_config_rx_fifo_size() argument
1056 fifo_size = RTE_MIN(pdata->rx_max_fifo_size, in axgbe_config_rx_fifo_size()
1057 pdata->hw_feat.rx_fifo_size); in axgbe_config_rx_fifo_size()
1058 q_fifo_size = fifo_size / pdata->rx_q_count; in axgbe_config_rx_fifo_size()
1069 for (i = 0; i < pdata->rx_q_count; i++) in axgbe_config_rx_fifo_size()
1070 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_Q_RQOMR, RQS, p_fifo); in axgbe_config_rx_fifo_size()
1071 pdata->fifo = p_fifo; in axgbe_config_rx_fifo_size()
1074 axgbe_calculate_flow_control_threshold(pdata); in axgbe_config_rx_fifo_size()
1075 axgbe_config_flow_control_threshold(pdata); in axgbe_config_rx_fifo_size()
1078 pdata->rx_q_count, q_fifo_size); in axgbe_config_rx_fifo_size()
1081 static void axgbe_config_tx_fifo_size(struct axgbe_port *pdata) in axgbe_config_tx_fifo_size() argument
1087 fifo_size = RTE_MIN(pdata->tx_max_fifo_size, in axgbe_config_tx_fifo_size()
1088 pdata->hw_feat.tx_fifo_size); in axgbe_config_tx_fifo_size()
1089 q_fifo_size = fifo_size / pdata->tx_q_count; in axgbe_config_tx_fifo_size()
1100 for (i = 0; i < pdata->tx_q_count; i++) in axgbe_config_tx_fifo_size()
1101 AXGMAC_MTL_IOWRITE_BITS(pdata, i, MTL_Q_TQOMR, TQS, p_fifo); in axgbe_config_tx_fifo_size()
1104 pdata->tx_q_count, q_fifo_size); in axgbe_config_tx_fifo_size()
1107 static void axgbe_config_queue_mapping(struct axgbe_port *pdata) in axgbe_config_queue_mapping() argument
1115 qptc = pdata->tx_q_count / pdata->hw_feat.tc_cnt; in axgbe_config_queue_mapping()
1116 qptc_extra = pdata->tx_q_count % pdata->hw_feat.tc_cnt; in axgbe_config_queue_mapping()
1118 for (i = 0, queue = 0; i < pdata->hw_feat.tc_cnt; i++) { in axgbe_config_queue_mapping()
1121 AXGMAC_MTL_IOWRITE_BITS(pdata, queue, MTL_Q_TQOMR, in axgbe_config_queue_mapping()
1126 AXGMAC_MTL_IOWRITE_BITS(pdata, queue, MTL_Q_TQOMR, in axgbe_config_queue_mapping()
1131 if (pdata->rss_enable) { in axgbe_config_queue_mapping()
1135 for (i = 0; i < pdata->rx_q_count;) { in axgbe_config_queue_mapping()
1139 (i != pdata->rx_q_count)) in axgbe_config_queue_mapping()
1142 AXGMAC_IOWRITE(pdata, reg, reg_val); in axgbe_config_queue_mapping()
1150 static void axgbe_enable_mtl_interrupts(struct axgbe_port *pdata) in axgbe_enable_mtl_interrupts() argument
1155 q_count = RTE_MAX(pdata->hw_feat.tx_q_cnt, pdata->hw_feat.rx_q_cnt); in axgbe_enable_mtl_interrupts()
1158 mtl_q_isr = AXGMAC_MTL_IOREAD(pdata, i, MTL_Q_ISR); in axgbe_enable_mtl_interrupts()
1159 AXGMAC_MTL_IOWRITE(pdata, i, MTL_Q_ISR, mtl_q_isr); in axgbe_enable_mtl_interrupts()
1162 AXGMAC_MTL_IOWRITE(pdata, i, MTL_Q_IER, 0); in axgbe_enable_mtl_interrupts()
1177 void axgbe_set_mac_hash_table(struct axgbe_port *pdata, u8 *addr, bool add) in axgbe_set_mac_hash_table() argument
1182 crc >>= pdata->hash_table_shift; in axgbe_set_mac_hash_table()
1187 pdata->uc_hash_table[htable_index] |= htable_bitmask; in axgbe_set_mac_hash_table()
1188 pdata->uc_hash_mac_addr++; in axgbe_set_mac_hash_table()
1190 pdata->uc_hash_table[htable_index] &= ~htable_bitmask; in axgbe_set_mac_hash_table()
1191 pdata->uc_hash_mac_addr--; in axgbe_set_mac_hash_table()
1196 AXGMAC_IOWRITE(pdata, MAC_HTR(htable_index), in axgbe_set_mac_hash_table()
1197 pdata->uc_hash_table[htable_index]); in axgbe_set_mac_hash_table()
1200 void axgbe_set_mac_addn_addr(struct axgbe_port *pdata, u8 *addr, uint32_t index) in axgbe_set_mac_addn_addr() argument
1225 AXGMAC_IOWRITE(pdata, MAC_MACAHR(index), mac_addr_hi); in axgbe_set_mac_addn_addr()
1226 AXGMAC_IOWRITE(pdata, MAC_MACALR(index), mac_addr_lo); in axgbe_set_mac_addn_addr()
1229 static int axgbe_set_mac_address(struct axgbe_port *pdata, u8 *addr) in axgbe_set_mac_address() argument
1237 AXGMAC_IOWRITE(pdata, MAC_MACA0HR, mac_addr_hi); in axgbe_set_mac_address()
1238 AXGMAC_IOWRITE(pdata, MAC_MACA0LR, mac_addr_lo); in axgbe_set_mac_address()
1243 static void axgbe_config_mac_hash_table(struct axgbe_port *pdata) in axgbe_config_mac_hash_table() argument
1245 struct axgbe_hw_features *hw_feat = &pdata->hw_feat; in axgbe_config_mac_hash_table()
1247 pdata->hash_table_shift = 0; in axgbe_config_mac_hash_table()
1248 pdata->hash_table_count = 0; in axgbe_config_mac_hash_table()
1249 pdata->uc_hash_mac_addr = 0; in axgbe_config_mac_hash_table()
1250 memset(pdata->uc_hash_table, 0, sizeof(pdata->uc_hash_table)); in axgbe_config_mac_hash_table()
1253 pdata->hash_table_shift = 26 - (hw_feat->hash_table_size >> 7); in axgbe_config_mac_hash_table()
1254 pdata->hash_table_count = hw_feat->hash_table_size / 32; in axgbe_config_mac_hash_table()
1258 static void axgbe_config_mac_address(struct axgbe_port *pdata) in axgbe_config_mac_address() argument
1260 axgbe_set_mac_address(pdata, pdata->mac_addr.addr_bytes); in axgbe_config_mac_address()
1263 static void axgbe_config_jumbo_enable(struct axgbe_port *pdata) in axgbe_config_jumbo_enable() argument
1267 val = (pdata->rx_buf_size > AXGMAC_STD_PACKET_MTU) ? 1 : 0; in axgbe_config_jumbo_enable()
1269 AXGMAC_IOWRITE_BITS(pdata, MAC_RCR, JE, val); in axgbe_config_jumbo_enable()
1272 static void axgbe_config_mac_speed(struct axgbe_port *pdata) in axgbe_config_mac_speed() argument
1274 axgbe_set_speed(pdata, pdata->phy_speed); in axgbe_config_mac_speed()
1277 static void axgbe_config_checksum_offload(struct axgbe_port *pdata) in axgbe_config_checksum_offload() argument
1279 if (pdata->rx_csum_enable) in axgbe_config_checksum_offload()
1280 AXGMAC_IOWRITE_BITS(pdata, MAC_RCR, IPC, 1); in axgbe_config_checksum_offload()
1282 AXGMAC_IOWRITE_BITS(pdata, MAC_RCR, IPC, 0); in axgbe_config_checksum_offload()
1285 static void axgbe_config_mmc(struct axgbe_port *pdata) in axgbe_config_mmc() argument
1287 struct axgbe_mmc_stats *stats = &pdata->mmc_stats; in axgbe_config_mmc()
1293 AXGMAC_IOWRITE_BITS(pdata, MMC_CR, ROR, 1); in axgbe_config_mmc()
1296 AXGMAC_IOWRITE_BITS(pdata, MMC_CR, CR, 1); in axgbe_config_mmc()
1299 static int axgbe_init(struct axgbe_port *pdata) in axgbe_init() argument
1304 ret = axgbe_flush_tx_queues(pdata); in axgbe_init()
1308 axgbe_config_dma_bus(pdata); in axgbe_init()
1309 axgbe_config_dma_cache(pdata); in axgbe_init()
1310 axgbe_config_edma_control(pdata); in axgbe_init()
1311 axgbe_config_osp_mode(pdata); in axgbe_init()
1312 axgbe_config_pblx8(pdata); in axgbe_init()
1313 axgbe_config_tx_pbl_val(pdata); in axgbe_init()
1314 axgbe_config_rx_pbl_val(pdata); in axgbe_init()
1315 axgbe_config_rx_buffer_size(pdata); in axgbe_init()
1316 axgbe_config_rss(pdata); in axgbe_init()
1317 wrapper_tx_desc_init(pdata); in axgbe_init()
1318 ret = wrapper_rx_desc_init(pdata); in axgbe_init()
1321 axgbe_enable_dma_interrupts(pdata); in axgbe_init()
1324 axgbe_config_mtl_mode(pdata); in axgbe_init()
1325 axgbe_config_queue_mapping(pdata); in axgbe_init()
1326 axgbe_config_tsf_mode(pdata, pdata->tx_sf_mode); in axgbe_init()
1327 axgbe_config_rsf_mode(pdata, pdata->rx_sf_mode); in axgbe_init()
1328 axgbe_config_tx_threshold(pdata, pdata->tx_threshold); in axgbe_init()
1329 axgbe_config_rx_threshold(pdata, pdata->rx_threshold); in axgbe_init()
1330 axgbe_config_tx_fifo_size(pdata); in axgbe_init()
1331 axgbe_config_rx_fifo_size(pdata); in axgbe_init()
1333 axgbe_enable_mtl_interrupts(pdata); in axgbe_init()
1336 axgbe_config_mac_hash_table(pdata); in axgbe_init()
1337 axgbe_config_mac_address(pdata); in axgbe_init()
1338 axgbe_config_jumbo_enable(pdata); in axgbe_init()
1339 axgbe_config_flow_control(pdata); in axgbe_init()
1340 axgbe_config_mac_speed(pdata); in axgbe_init()
1341 axgbe_config_checksum_offload(pdata); in axgbe_init()
1342 axgbe_config_mmc(pdata); in axgbe_init()