Lines Matching refs:dev

11 	struct mt7915_dev *dev = phy->dev;  in mt7915_init_tx_queues()  local
13 if (mtk_wed_device_active(&phy->dev->mt76.mmio.wed)) { in mt7915_init_tx_queues()
14 if (is_mt798x(&dev->mt76)) in mt7915_init_tx_queues()
28 struct mt7915_dev *dev; in mt7915_poll_tx() local
30 dev = container_of(napi, struct mt7915_dev, mt76.tx_napi); in mt7915_poll_tx()
32 mt76_connac_tx_cleanup(&dev->mt76); in mt7915_poll_tx()
34 mt7915_irq_enable(dev, MT_INT_TX_DONE_MCU); in mt7915_poll_tx()
39 static void mt7915_dma_config(struct mt7915_dev *dev) in mt7915_dma_config() argument
43 dev->wfdma_mask |= (1 << (q)); \ in mt7915_dma_config()
44 dev->q_int_mask[(q)] = int; \ in mt7915_dma_config()
45 dev->q_id[(q)] = id; \ in mt7915_dma_config()
52 if (is_mt7915(&dev->mt76)) { in mt7915_dma_config()
85 if (is_mt7916(&dev->mt76) && mtk_wed_device_active(&dev->mt76.mmio.wed)) { in mt7915_dma_config()
90 if (dev->hif2) in mt7915_dma_config()
121 static void __mt7915_dma_prefetch(struct mt7915_dev *dev, u32 ofs) in __mt7915_dma_prefetch() argument
127 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_FWDL) + ofs, PREFETCH(0x0, 0x4)); in __mt7915_dma_prefetch()
128 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WM) + ofs, PREFETCH(0x40, 0x4)); in __mt7915_dma_prefetch()
129 mt76_wr(dev, MT_TXQ_EXT_CTRL(0) + ofs, PREFETCH(0x80, 0x4)); in __mt7915_dma_prefetch()
130 mt76_wr(dev, MT_TXQ_EXT_CTRL(1) + ofs, PREFETCH(0xc0, 0x4)); in __mt7915_dma_prefetch()
131 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WA) + ofs, PREFETCH(0x100, 0x4)); in __mt7915_dma_prefetch()
133 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MCU) + ofs, in __mt7915_dma_prefetch()
135 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MCU_WA) + ofs, in __mt7915_dma_prefetch()
137 if (!is_mt7915(&dev->mt76)) { in __mt7915_dma_prefetch()
138 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MAIN_WA) + ofs, in __mt7915_dma_prefetch()
142 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_BAND1_WA) + ofs, in __mt7915_dma_prefetch()
144 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MAIN) + ofs, in __mt7915_dma_prefetch()
146 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_BAND1) + ofs, in __mt7915_dma_prefetch()
152 if (is_mt7915(&dev->mt76)) { in __mt7915_dma_prefetch()
154 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WA) + ofs, in __mt7915_dma_prefetch()
156 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_BAND1_WA) + ofs, in __mt7915_dma_prefetch()
158 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_BAND1) + ofs, in __mt7915_dma_prefetch()
163 void mt7915_dma_prefetch(struct mt7915_dev *dev) in mt7915_dma_prefetch() argument
165 __mt7915_dma_prefetch(dev, 0); in mt7915_dma_prefetch()
166 if (dev->hif2) in mt7915_dma_prefetch()
167 __mt7915_dma_prefetch(dev, MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0)); in mt7915_dma_prefetch()
170 static void mt7915_dma_disable(struct mt7915_dev *dev, bool rst) in mt7915_dma_disable() argument
172 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_disable()
175 if (dev->hif2) in mt7915_dma_disable()
180 mt76_clear(dev, MT_WFDMA0_RST, in mt7915_dma_disable()
184 mt76_set(dev, MT_WFDMA0_RST, in mt7915_dma_disable()
189 mt76_clear(dev, MT_WFDMA1_RST, in mt7915_dma_disable()
193 mt76_set(dev, MT_WFDMA1_RST, in mt7915_dma_disable()
198 if (dev->hif2) { in mt7915_dma_disable()
199 mt76_clear(dev, MT_WFDMA0_RST + hif1_ofs, in mt7915_dma_disable()
203 mt76_set(dev, MT_WFDMA0_RST + hif1_ofs, in mt7915_dma_disable()
208 mt76_clear(dev, MT_WFDMA1_RST + hif1_ofs, in mt7915_dma_disable()
212 mt76_set(dev, MT_WFDMA1_RST + hif1_ofs, in mt7915_dma_disable()
220 mt76_clear(dev, MT_WFDMA0_GLO_CFG, in mt7915_dma_disable()
228 mt76_clear(dev, MT_WFDMA1_GLO_CFG, in mt7915_dma_disable()
235 if (dev->hif2) { in mt7915_dma_disable()
236 mt76_clear(dev, MT_WFDMA0_GLO_CFG + hif1_ofs, in mt7915_dma_disable()
244 mt76_clear(dev, MT_WFDMA1_GLO_CFG + hif1_ofs, in mt7915_dma_disable()
253 int mt7915_dma_start(struct mt7915_dev *dev, bool reset, bool wed_reset) in mt7915_dma_start() argument
255 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_start()
259 if (dev->hif2) in mt7915_dma_start()
264 mt76_set(dev, MT_WFDMA0_GLO_CFG, in mt7915_dma_start()
271 mt76_set(dev, MT_WFDMA1_GLO_CFG, in mt7915_dma_start()
277 if (dev->hif2) { in mt7915_dma_start()
278 mt76_set(dev, MT_WFDMA0_GLO_CFG + hif1_ofs, in mt7915_dma_start()
285 mt76_set(dev, MT_WFDMA1_GLO_CFG + hif1_ofs, in mt7915_dma_start()
291 mt76_set(dev, MT_WFDMA_HOST_CONFIG, in mt7915_dma_start()
301 if (!dev->phy.mt76->band_idx) in mt7915_dma_start()
304 if (dev->dbdc_support || dev->phy.mt76->band_idx) in mt7915_dma_start()
307 if (mtk_wed_device_active(&dev->mt76.mmio.wed) && wed_reset) { in mt7915_dma_start()
312 if (!is_mt798x(&dev->mt76)) in mt7915_dma_start()
313 mt76_wr(dev, MT_INT_WED_MASK_CSR, wed_irq_mask); in mt7915_dma_start()
315 mt76_wr(dev, MT_INT_MASK_CSR, wed_irq_mask); in mt7915_dma_start()
317 ret = mt7915_mcu_wed_enable_rx_stats(dev); in mt7915_dma_start()
321 mtk_wed_device_start(&dev->mt76.mmio.wed, wed_irq_mask); in mt7915_dma_start()
326 mt7915_irq_enable(dev, irq_mask); in mt7915_dma_start()
327 mt7915_irq_disable(dev, 0); in mt7915_dma_start()
332 static int mt7915_dma_enable(struct mt7915_dev *dev, bool reset) in mt7915_dma_enable() argument
334 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_enable()
337 if (dev->hif2) in mt7915_dma_enable()
341 mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR, ~0); in mt7915_dma_enable()
343 mt76_wr(dev, MT_WFDMA1_RST_DTX_PTR, ~0); in mt7915_dma_enable()
344 if (dev->hif2) { in mt7915_dma_enable()
345 mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR + hif1_ofs, ~0); in mt7915_dma_enable()
347 mt76_wr(dev, MT_WFDMA1_RST_DTX_PTR + hif1_ofs, ~0); in mt7915_dma_enable()
351 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0, 0); in mt7915_dma_enable()
353 mt76_wr(dev, MT_WFDMA1_PRI_DLY_INT_CFG0, 0); in mt7915_dma_enable()
355 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG1, 0); in mt7915_dma_enable()
356 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG2, 0); in mt7915_dma_enable()
359 if (dev->hif2) { in mt7915_dma_enable()
360 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0 + hif1_ofs, 0); in mt7915_dma_enable()
362 mt76_wr(dev, MT_WFDMA1_PRI_DLY_INT_CFG0 + in mt7915_dma_enable()
365 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG1 + in mt7915_dma_enable()
367 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG2 + in mt7915_dma_enable()
373 mt7915_dma_prefetch(dev); in mt7915_dma_enable()
376 mt76_set(dev, MT_WFDMA0_BUSY_ENA, in mt7915_dma_enable()
382 mt76_set(dev, MT_WFDMA1_BUSY_ENA, in mt7915_dma_enable()
387 if (dev->hif2) { in mt7915_dma_enable()
388 mt76_set(dev, MT_WFDMA0_BUSY_ENA + hif1_ofs, in mt7915_dma_enable()
394 mt76_set(dev, MT_WFDMA1_BUSY_ENA + hif1_ofs, in mt7915_dma_enable()
400 mt76_poll(dev, MT_WFDMA_EXT_CSR_HIF_MISC, in mt7915_dma_enable()
403 return mt7915_dma_start(dev, reset, true); in mt7915_dma_enable()
406 int mt7915_dma_init(struct mt7915_dev *dev, struct mt7915_phy *phy2) in mt7915_dma_init() argument
408 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_init()
413 mt7915_dma_config(dev); in mt7915_dma_init()
415 mt76_dma_attach(&dev->mt76); in mt7915_dma_init()
417 if (dev->hif2) in mt7915_dma_init()
420 mt7915_dma_disable(dev, true); in mt7915_dma_init()
426 mt76_set(dev, MT_WFDMA_HOST_CONFIG, in mt7915_dma_init()
428 mt76_wr(dev, MT_WFDMA_WED_RING_CONTROL, in mt7915_dma_init()
434 mt76_rmw(dev, MT_WFDMA0_EXT0_CFG, MT_WFDMA0_EXT0_RXWB_KEEP, in mt7915_dma_init()
438 mt76_clear(dev, MT_WFDMA_HOST_CONFIG, MT_WFDMA_HOST_CONFIG_WED); in mt7915_dma_init()
442 ret = mt7915_init_tx_queues(&dev->phy, in mt7915_dma_init()
443 MT_TXQ_ID(dev->phy.mt76->band_idx), in mt7915_dma_init()
459 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, in mt7915_dma_init()
467 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WA, in mt7915_dma_init()
475 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, in mt7915_dma_init()
483 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU], in mt7915_dma_init()
495 dev->mt76.q_rx[MT_RXQ_MCU_WA].flags = MT_WED_Q_TXFREE; in mt7915_dma_init()
500 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA], in mt7915_dma_init()
507 if (!dev->phy.mt76->band_idx) { in mt7915_dma_init()
510 dev->mt76.q_rx[MT_RXQ_MAIN].flags = in mt7915_dma_init()
512 dev->mt76.rx_token_size += MT7915_RX_RING_SIZE; in mt7915_dma_init()
515 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN], in mt7915_dma_init()
537 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN_WA], in mt7915_dma_init()
544 if (dev->dbdc_support || dev->phy.mt76->band_idx) { in mt7915_dma_init()
547 dev->mt76.q_rx[MT_RXQ_BAND1].flags = in mt7915_dma_init()
549 dev->mt76.rx_token_size += MT7915_RX_RING_SIZE; in mt7915_dma_init()
553 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1], in mt7915_dma_init()
562 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1_WA], in mt7915_dma_init()
571 ret = mt76_init_queues(dev, mt76_dma_rx_poll); in mt7915_dma_init()
575 netif_napi_add_tx(&dev->mt76.tx_napi_dev, &dev->mt76.tx_napi, in mt7915_dma_init()
577 napi_enable(&dev->mt76.tx_napi); in mt7915_dma_init()
579 mt7915_dma_enable(dev, false); in mt7915_dma_init()
584 static void mt7915_dma_wed_reset(struct mt7915_dev *dev) in mt7915_dma_wed_reset() argument
586 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_wed_reset()
588 if (!test_bit(MT76_STATE_WED_RESET, &dev->mphy.state)) in mt7915_dma_wed_reset()
593 if (!wait_for_completion_timeout(&dev->mt76.mmio.wed_reset_complete, in mt7915_dma_wed_reset()
595 dev_err(dev->mt76.dev, "wed reset complete timeout\n"); in mt7915_dma_wed_reset()
599 mt7915_dma_reset_tx_queue(struct mt7915_dev *dev, struct mt76_queue *q) in mt7915_dma_reset_tx_queue() argument
601 mt76_queue_reset(dev, q); in mt7915_dma_reset_tx_queue()
602 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) in mt7915_dma_reset_tx_queue()
603 mt76_dma_wed_setup(&dev->mt76, q, true); in mt7915_dma_reset_tx_queue()
606 int mt7915_dma_reset(struct mt7915_dev *dev, bool force) in mt7915_dma_reset() argument
608 struct mt76_phy *mphy_ext = dev->mt76.phys[MT_BAND1]; in mt7915_dma_reset()
609 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7915_dma_reset()
613 for (i = 0; i < ARRAY_SIZE(dev->mt76.phy.q_tx); i++) { in mt7915_dma_reset()
614 mt76_queue_tx_cleanup(dev, dev->mphy.q_tx[i], true); in mt7915_dma_reset()
616 mt76_queue_tx_cleanup(dev, mphy_ext->q_tx[i], true); in mt7915_dma_reset()
619 for (i = 0; i < ARRAY_SIZE(dev->mt76.q_mcu); i++) in mt7915_dma_reset()
620 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true); in mt7915_dma_reset()
622 mt76_for_each_q_rx(&dev->mt76, i) in mt7915_dma_reset()
623 mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]); in mt7915_dma_reset()
627 mt7915_wfsys_reset(dev); in mt7915_dma_reset()
632 mt7915_dma_disable(dev, force); in mt7915_dma_reset()
633 mt7915_dma_wed_reset(dev); in mt7915_dma_reset()
637 mt7915_dma_reset_tx_queue(dev, dev->mphy.q_tx[i]); in mt7915_dma_reset()
639 mt7915_dma_reset_tx_queue(dev, mphy_ext->q_tx[i]); in mt7915_dma_reset()
643 mt76_queue_reset(dev, dev->mt76.q_mcu[i]); in mt7915_dma_reset()
645 mt76_for_each_q_rx(&dev->mt76, i) { in mt7915_dma_reset()
646 if (dev->mt76.q_rx[i].flags == MT_WED_Q_TXFREE) in mt7915_dma_reset()
649 mt76_queue_reset(dev, &dev->mt76.q_rx[i]); in mt7915_dma_reset()
652 mt76_tx_status_check(&dev->mt76, true); in mt7915_dma_reset()
654 mt76_for_each_q_rx(&dev->mt76, i) in mt7915_dma_reset()
655 mt76_queue_rx_reset(dev, i); in mt7915_dma_reset()
657 if (mtk_wed_device_active(wed) && is_mt7915(&dev->mt76)) in mt7915_dma_reset()
658 mt76_rmw(dev, MT_WFDMA0_EXT0_CFG, MT_WFDMA0_EXT0_RXWB_KEEP, in mt7915_dma_reset()
661 mt7915_dma_enable(dev, !force); in mt7915_dma_reset()
666 void mt7915_dma_cleanup(struct mt7915_dev *dev) in mt7915_dma_cleanup() argument
668 mt7915_dma_disable(dev, true); in mt7915_dma_cleanup()
670 mt76_dma_cleanup(&dev->mt76); in mt7915_dma_cleanup()