1*cbb3ec25SBjoern A. Zeeb // SPDX-License-Identifier: ISC
2*cbb3ec25SBjoern A. Zeeb /* Copyright (C) 2023 MediaTek Inc. */
3*cbb3ec25SBjoern A. Zeeb 
4*cbb3ec25SBjoern A. Zeeb #include <linux/module.h>
5*cbb3ec25SBjoern A. Zeeb #include <linux/firmware.h>
6*cbb3ec25SBjoern A. Zeeb #if defined(__FreeBSD__)
7*cbb3ec25SBjoern A. Zeeb #include <linux/delay.h>
8*cbb3ec25SBjoern A. Zeeb #endif
9*cbb3ec25SBjoern A. Zeeb 
10*cbb3ec25SBjoern A. Zeeb #include "mt792x.h"
11*cbb3ec25SBjoern A. Zeeb #include "dma.h"
12*cbb3ec25SBjoern A. Zeeb #include "trace.h"
13*cbb3ec25SBjoern A. Zeeb 
mt792x_irq_handler(int irq,void * dev_instance)14*cbb3ec25SBjoern A. Zeeb irqreturn_t mt792x_irq_handler(int irq, void *dev_instance)
15*cbb3ec25SBjoern A. Zeeb {
16*cbb3ec25SBjoern A. Zeeb 	struct mt792x_dev *dev = dev_instance;
17*cbb3ec25SBjoern A. Zeeb 
18*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, dev->irq_map->host_irq_enable, 0);
19*cbb3ec25SBjoern A. Zeeb 
20*cbb3ec25SBjoern A. Zeeb 	if (!test_bit(MT76_STATE_INITIALIZED, &dev->mphy.state))
21*cbb3ec25SBjoern A. Zeeb 		return IRQ_NONE;
22*cbb3ec25SBjoern A. Zeeb 
23*cbb3ec25SBjoern A. Zeeb 	tasklet_schedule(&dev->mt76.irq_tasklet);
24*cbb3ec25SBjoern A. Zeeb 
25*cbb3ec25SBjoern A. Zeeb 	return IRQ_HANDLED;
26*cbb3ec25SBjoern A. Zeeb }
27*cbb3ec25SBjoern A. Zeeb EXPORT_SYMBOL_GPL(mt792x_irq_handler);
28*cbb3ec25SBjoern A. Zeeb 
mt792x_irq_tasklet(unsigned long data)29*cbb3ec25SBjoern A. Zeeb void mt792x_irq_tasklet(unsigned long data)
30*cbb3ec25SBjoern A. Zeeb {
31*cbb3ec25SBjoern A. Zeeb 	struct mt792x_dev *dev = (struct mt792x_dev *)data;
32*cbb3ec25SBjoern A. Zeeb 	const struct mt792x_irq_map *irq_map = dev->irq_map;
33*cbb3ec25SBjoern A. Zeeb 	u32 intr, mask = 0;
34*cbb3ec25SBjoern A. Zeeb 
35*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, irq_map->host_irq_enable, 0);
36*cbb3ec25SBjoern A. Zeeb 
37*cbb3ec25SBjoern A. Zeeb 	intr = mt76_rr(dev, MT_WFDMA0_HOST_INT_STA);
38*cbb3ec25SBjoern A. Zeeb 	intr &= dev->mt76.mmio.irqmask;
39*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_HOST_INT_STA, intr);
40*cbb3ec25SBjoern A. Zeeb 
41*cbb3ec25SBjoern A. Zeeb 	trace_dev_irq(&dev->mt76, intr, dev->mt76.mmio.irqmask);
42*cbb3ec25SBjoern A. Zeeb 
43*cbb3ec25SBjoern A. Zeeb 	mask |= intr & (irq_map->rx.data_complete_mask |
44*cbb3ec25SBjoern A. Zeeb 			irq_map->rx.wm_complete_mask |
45*cbb3ec25SBjoern A. Zeeb 			irq_map->rx.wm2_complete_mask);
46*cbb3ec25SBjoern A. Zeeb 	if (intr & dev->irq_map->tx.mcu_complete_mask)
47*cbb3ec25SBjoern A. Zeeb 		mask |= dev->irq_map->tx.mcu_complete_mask;
48*cbb3ec25SBjoern A. Zeeb 
49*cbb3ec25SBjoern A. Zeeb 	if (intr & MT_INT_MCU_CMD) {
50*cbb3ec25SBjoern A. Zeeb 		u32 intr_sw;
51*cbb3ec25SBjoern A. Zeeb 
52*cbb3ec25SBjoern A. Zeeb 		intr_sw = mt76_rr(dev, MT_MCU_CMD);
53*cbb3ec25SBjoern A. Zeeb 		/* ack MCU2HOST_SW_INT_STA */
54*cbb3ec25SBjoern A. Zeeb 		mt76_wr(dev, MT_MCU_CMD, intr_sw);
55*cbb3ec25SBjoern A. Zeeb 		if (intr_sw & MT_MCU_CMD_WAKE_RX_PCIE) {
56*cbb3ec25SBjoern A. Zeeb 			mask |= irq_map->rx.data_complete_mask;
57*cbb3ec25SBjoern A. Zeeb 			intr |= irq_map->rx.data_complete_mask;
58*cbb3ec25SBjoern A. Zeeb 		}
59*cbb3ec25SBjoern A. Zeeb 	}
60*cbb3ec25SBjoern A. Zeeb 
61*cbb3ec25SBjoern A. Zeeb 	mt76_set_irq_mask(&dev->mt76, irq_map->host_irq_enable, mask, 0);
62*cbb3ec25SBjoern A. Zeeb 
63*cbb3ec25SBjoern A. Zeeb 	if (intr & dev->irq_map->tx.all_complete_mask)
64*cbb3ec25SBjoern A. Zeeb 		napi_schedule(&dev->mt76.tx_napi);
65*cbb3ec25SBjoern A. Zeeb 
66*cbb3ec25SBjoern A. Zeeb 	if (intr & irq_map->rx.wm_complete_mask)
67*cbb3ec25SBjoern A. Zeeb 		napi_schedule(&dev->mt76.napi[MT_RXQ_MCU]);
68*cbb3ec25SBjoern A. Zeeb 
69*cbb3ec25SBjoern A. Zeeb 	if (intr & irq_map->rx.wm2_complete_mask)
70*cbb3ec25SBjoern A. Zeeb 		napi_schedule(&dev->mt76.napi[MT_RXQ_MCU_WA]);
71*cbb3ec25SBjoern A. Zeeb 
72*cbb3ec25SBjoern A. Zeeb 	if (intr & irq_map->rx.data_complete_mask)
73*cbb3ec25SBjoern A. Zeeb 		napi_schedule(&dev->mt76.napi[MT_RXQ_MAIN]);
74*cbb3ec25SBjoern A. Zeeb }
75*cbb3ec25SBjoern A. Zeeb EXPORT_SYMBOL_GPL(mt792x_irq_tasklet);
76*cbb3ec25SBjoern A. Zeeb 
mt792x_rx_poll_complete(struct mt76_dev * mdev,enum mt76_rxq_id q)77*cbb3ec25SBjoern A. Zeeb void mt792x_rx_poll_complete(struct mt76_dev *mdev, enum mt76_rxq_id q)
78*cbb3ec25SBjoern A. Zeeb {
79*cbb3ec25SBjoern A. Zeeb 	struct mt792x_dev *dev = container_of(mdev, struct mt792x_dev, mt76);
80*cbb3ec25SBjoern A. Zeeb 	const struct mt792x_irq_map *irq_map = dev->irq_map;
81*cbb3ec25SBjoern A. Zeeb 
82*cbb3ec25SBjoern A. Zeeb 	if (q == MT_RXQ_MAIN)
83*cbb3ec25SBjoern A. Zeeb 		mt76_connac_irq_enable(mdev, irq_map->rx.data_complete_mask);
84*cbb3ec25SBjoern A. Zeeb 	else if (q == MT_RXQ_MCU_WA)
85*cbb3ec25SBjoern A. Zeeb 		mt76_connac_irq_enable(mdev, irq_map->rx.wm2_complete_mask);
86*cbb3ec25SBjoern A. Zeeb 	else
87*cbb3ec25SBjoern A. Zeeb 		mt76_connac_irq_enable(mdev, irq_map->rx.wm_complete_mask);
88*cbb3ec25SBjoern A. Zeeb }
89*cbb3ec25SBjoern A. Zeeb EXPORT_SYMBOL_GPL(mt792x_rx_poll_complete);
90*cbb3ec25SBjoern A. Zeeb 
91*cbb3ec25SBjoern A. Zeeb #define PREFETCH(base, depth)	((base) << 16 | (depth))
mt792x_dma_prefetch(struct mt792x_dev * dev)92*cbb3ec25SBjoern A. Zeeb static void mt792x_dma_prefetch(struct mt792x_dev *dev)
93*cbb3ec25SBjoern A. Zeeb {
94*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_RX_RING0_EXT_CTRL, PREFETCH(0x0, 0x4));
95*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_RX_RING2_EXT_CTRL, PREFETCH(0x40, 0x4));
96*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_RX_RING3_EXT_CTRL, PREFETCH(0x80, 0x4));
97*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_RX_RING4_EXT_CTRL, PREFETCH(0xc0, 0x4));
98*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_RX_RING5_EXT_CTRL, PREFETCH(0x100, 0x4));
99*cbb3ec25SBjoern A. Zeeb 
100*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_TX_RING0_EXT_CTRL, PREFETCH(0x140, 0x4));
101*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_TX_RING1_EXT_CTRL, PREFETCH(0x180, 0x4));
102*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_TX_RING2_EXT_CTRL, PREFETCH(0x1c0, 0x4));
103*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_TX_RING3_EXT_CTRL, PREFETCH(0x200, 0x4));
104*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_TX_RING4_EXT_CTRL, PREFETCH(0x240, 0x4));
105*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_TX_RING5_EXT_CTRL, PREFETCH(0x280, 0x4));
106*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_TX_RING6_EXT_CTRL, PREFETCH(0x2c0, 0x4));
107*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_TX_RING16_EXT_CTRL, PREFETCH(0x340, 0x4));
108*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_TX_RING17_EXT_CTRL, PREFETCH(0x380, 0x4));
109*cbb3ec25SBjoern A. Zeeb }
110*cbb3ec25SBjoern A. Zeeb 
mt792x_dma_enable(struct mt792x_dev * dev)111*cbb3ec25SBjoern A. Zeeb int mt792x_dma_enable(struct mt792x_dev *dev)
112*cbb3ec25SBjoern A. Zeeb {
113*cbb3ec25SBjoern A. Zeeb 	/* configure perfetch settings */
114*cbb3ec25SBjoern A. Zeeb 	mt792x_dma_prefetch(dev);
115*cbb3ec25SBjoern A. Zeeb 
116*cbb3ec25SBjoern A. Zeeb 	/* reset dma idx */
117*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR, ~0);
118*cbb3ec25SBjoern A. Zeeb 
119*cbb3ec25SBjoern A. Zeeb 	/* configure delay interrupt */
120*cbb3ec25SBjoern A. Zeeb 	mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0, 0);
121*cbb3ec25SBjoern A. Zeeb 
122*cbb3ec25SBjoern A. Zeeb 	mt76_set(dev, MT_WFDMA0_GLO_CFG,
123*cbb3ec25SBjoern A. Zeeb 		 MT_WFDMA0_GLO_CFG_TX_WB_DDONE |
124*cbb3ec25SBjoern A. Zeeb 		 MT_WFDMA0_GLO_CFG_FIFO_LITTLE_ENDIAN |
125*cbb3ec25SBjoern A. Zeeb 		 MT_WFDMA0_GLO_CFG_CLK_GAT_DIS |
126*cbb3ec25SBjoern A. Zeeb 		 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
127*cbb3ec25SBjoern A. Zeeb 		 MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
128*cbb3ec25SBjoern A. Zeeb 		 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
129*cbb3ec25SBjoern A. Zeeb 
130*cbb3ec25SBjoern A. Zeeb 	mt76_set(dev, MT_WFDMA0_GLO_CFG,
131*cbb3ec25SBjoern A. Zeeb 		 MT_WFDMA0_GLO_CFG_TX_DMA_EN | MT_WFDMA0_GLO_CFG_RX_DMA_EN);
132*cbb3ec25SBjoern A. Zeeb 
133*cbb3ec25SBjoern A. Zeeb 	mt76_set(dev, MT_WFDMA_DUMMY_CR, MT_WFDMA_NEED_REINIT);
134*cbb3ec25SBjoern A. Zeeb 
135*cbb3ec25SBjoern A. Zeeb 	/* enable interrupts for TX/RX rings */
136*cbb3ec25SBjoern A. Zeeb 	mt76_connac_irq_enable(&dev->mt76,
137*cbb3ec25SBjoern A. Zeeb 			       dev->irq_map->tx.all_complete_mask |
138*cbb3ec25SBjoern A. Zeeb 			       dev->irq_map->rx.data_complete_mask |
139*cbb3ec25SBjoern A. Zeeb 			       dev->irq_map->rx.wm2_complete_mask |
140*cbb3ec25SBjoern A. Zeeb 			       dev->irq_map->rx.wm_complete_mask |
141*cbb3ec25SBjoern A. Zeeb 			       MT_INT_MCU_CMD);
142*cbb3ec25SBjoern A. Zeeb 	mt76_set(dev, MT_MCU2HOST_SW_INT_ENA, MT_MCU_CMD_WAKE_RX_PCIE);
143*cbb3ec25SBjoern A. Zeeb 
144*cbb3ec25SBjoern A. Zeeb 	return 0;
145*cbb3ec25SBjoern A. Zeeb }
146*cbb3ec25SBjoern A. Zeeb EXPORT_SYMBOL_GPL(mt792x_dma_enable);
147*cbb3ec25SBjoern A. Zeeb 
148*cbb3ec25SBjoern A. Zeeb static int
mt792x_dma_reset(struct mt792x_dev * dev,bool force)149*cbb3ec25SBjoern A. Zeeb mt792x_dma_reset(struct mt792x_dev *dev, bool force)
150*cbb3ec25SBjoern A. Zeeb {
151*cbb3ec25SBjoern A. Zeeb 	int i, err;
152*cbb3ec25SBjoern A. Zeeb 
153*cbb3ec25SBjoern A. Zeeb 	err = mt792x_dma_disable(dev, force);
154*cbb3ec25SBjoern A. Zeeb 	if (err)
155*cbb3ec25SBjoern A. Zeeb 		return err;
156*cbb3ec25SBjoern A. Zeeb 
157*cbb3ec25SBjoern A. Zeeb 	/* reset hw queues */
158*cbb3ec25SBjoern A. Zeeb 	for (i = 0; i < __MT_TXQ_MAX; i++)
159*cbb3ec25SBjoern A. Zeeb 		mt76_queue_reset(dev, dev->mphy.q_tx[i]);
160*cbb3ec25SBjoern A. Zeeb 
161*cbb3ec25SBjoern A. Zeeb 	for (i = 0; i < __MT_MCUQ_MAX; i++)
162*cbb3ec25SBjoern A. Zeeb 		mt76_queue_reset(dev, dev->mt76.q_mcu[i]);
163*cbb3ec25SBjoern A. Zeeb 
164*cbb3ec25SBjoern A. Zeeb 	mt76_for_each_q_rx(&dev->mt76, i)
165*cbb3ec25SBjoern A. Zeeb 		mt76_queue_reset(dev, &dev->mt76.q_rx[i]);
166*cbb3ec25SBjoern A. Zeeb 
167*cbb3ec25SBjoern A. Zeeb 	mt76_tx_status_check(&dev->mt76, true);
168*cbb3ec25SBjoern A. Zeeb 
169*cbb3ec25SBjoern A. Zeeb 	return mt792x_dma_enable(dev);
170*cbb3ec25SBjoern A. Zeeb }
171*cbb3ec25SBjoern A. Zeeb 
mt792x_wpdma_reset(struct mt792x_dev * dev,bool force)172*cbb3ec25SBjoern A. Zeeb int mt792x_wpdma_reset(struct mt792x_dev *dev, bool force)
173*cbb3ec25SBjoern A. Zeeb {
174*cbb3ec25SBjoern A. Zeeb 	int i, err;
175*cbb3ec25SBjoern A. Zeeb 
176*cbb3ec25SBjoern A. Zeeb 	/* clean up hw queues */
177*cbb3ec25SBjoern A. Zeeb 	for (i = 0; i < ARRAY_SIZE(dev->mt76.phy.q_tx); i++)
178*cbb3ec25SBjoern A. Zeeb 		mt76_queue_tx_cleanup(dev, dev->mphy.q_tx[i], true);
179*cbb3ec25SBjoern A. Zeeb 
180*cbb3ec25SBjoern A. Zeeb 	for (i = 0; i < ARRAY_SIZE(dev->mt76.q_mcu); i++)
181*cbb3ec25SBjoern A. Zeeb 		mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true);
182*cbb3ec25SBjoern A. Zeeb 
183*cbb3ec25SBjoern A. Zeeb 	mt76_for_each_q_rx(&dev->mt76, i)
184*cbb3ec25SBjoern A. Zeeb 		mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]);
185*cbb3ec25SBjoern A. Zeeb 
186*cbb3ec25SBjoern A. Zeeb 	if (force) {
187*cbb3ec25SBjoern A. Zeeb 		err = mt792x_wfsys_reset(dev);
188*cbb3ec25SBjoern A. Zeeb 		if (err)
189*cbb3ec25SBjoern A. Zeeb 			return err;
190*cbb3ec25SBjoern A. Zeeb 	}
191*cbb3ec25SBjoern A. Zeeb 	err = mt792x_dma_reset(dev, force);
192*cbb3ec25SBjoern A. Zeeb 	if (err)
193*cbb3ec25SBjoern A. Zeeb 		return err;
194*cbb3ec25SBjoern A. Zeeb 
195*cbb3ec25SBjoern A. Zeeb 	mt76_for_each_q_rx(&dev->mt76, i)
196*cbb3ec25SBjoern A. Zeeb 		mt76_queue_rx_reset(dev, i);
197*cbb3ec25SBjoern A. Zeeb 
198*cbb3ec25SBjoern A. Zeeb 	return 0;
199*cbb3ec25SBjoern A. Zeeb }
200*cbb3ec25SBjoern A. Zeeb EXPORT_SYMBOL_GPL(mt792x_wpdma_reset);
201*cbb3ec25SBjoern A. Zeeb 
mt792x_wpdma_reinit_cond(struct mt792x_dev * dev)202*cbb3ec25SBjoern A. Zeeb int mt792x_wpdma_reinit_cond(struct mt792x_dev *dev)
203*cbb3ec25SBjoern A. Zeeb {
204*cbb3ec25SBjoern A. Zeeb 	struct mt76_connac_pm *pm = &dev->pm;
205*cbb3ec25SBjoern A. Zeeb 	int err;
206*cbb3ec25SBjoern A. Zeeb 
207*cbb3ec25SBjoern A. Zeeb 	/* check if the wpdma must be reinitialized */
208*cbb3ec25SBjoern A. Zeeb 	if (mt792x_dma_need_reinit(dev)) {
209*cbb3ec25SBjoern A. Zeeb 		/* disable interrutpts */
210*cbb3ec25SBjoern A. Zeeb 		mt76_wr(dev, dev->irq_map->host_irq_enable, 0);
211*cbb3ec25SBjoern A. Zeeb 		mt76_wr(dev, MT_PCIE_MAC_INT_ENABLE, 0x0);
212*cbb3ec25SBjoern A. Zeeb 
213*cbb3ec25SBjoern A. Zeeb 		err = mt792x_wpdma_reset(dev, false);
214*cbb3ec25SBjoern A. Zeeb 		if (err) {
215*cbb3ec25SBjoern A. Zeeb 			dev_err(dev->mt76.dev, "wpdma reset failed\n");
216*cbb3ec25SBjoern A. Zeeb 			return err;
217*cbb3ec25SBjoern A. Zeeb 		}
218*cbb3ec25SBjoern A. Zeeb 
219*cbb3ec25SBjoern A. Zeeb 		/* enable interrutpts */
220*cbb3ec25SBjoern A. Zeeb 		mt76_wr(dev, MT_PCIE_MAC_INT_ENABLE, 0xff);
221*cbb3ec25SBjoern A. Zeeb 		pm->stats.lp_wake++;
222*cbb3ec25SBjoern A. Zeeb 	}
223*cbb3ec25SBjoern A. Zeeb 
224*cbb3ec25SBjoern A. Zeeb 	return 0;
225*cbb3ec25SBjoern A. Zeeb }
226*cbb3ec25SBjoern A. Zeeb EXPORT_SYMBOL_GPL(mt792x_wpdma_reinit_cond);
227*cbb3ec25SBjoern A. Zeeb 
mt792x_dma_disable(struct mt792x_dev * dev,bool force)228*cbb3ec25SBjoern A. Zeeb int mt792x_dma_disable(struct mt792x_dev *dev, bool force)
229*cbb3ec25SBjoern A. Zeeb {
230*cbb3ec25SBjoern A. Zeeb 	/* disable WFDMA0 */
231*cbb3ec25SBjoern A. Zeeb 	mt76_clear(dev, MT_WFDMA0_GLO_CFG,
232*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_GLO_CFG_TX_DMA_EN | MT_WFDMA0_GLO_CFG_RX_DMA_EN |
233*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
234*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
235*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO |
236*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
237*cbb3ec25SBjoern A. Zeeb 
238*cbb3ec25SBjoern A. Zeeb 	if (!mt76_poll_msec_tick(dev, MT_WFDMA0_GLO_CFG,
239*cbb3ec25SBjoern A. Zeeb 				 MT_WFDMA0_GLO_CFG_TX_DMA_BUSY |
240*cbb3ec25SBjoern A. Zeeb 				 MT_WFDMA0_GLO_CFG_RX_DMA_BUSY, 0, 100, 1))
241*cbb3ec25SBjoern A. Zeeb 		return -ETIMEDOUT;
242*cbb3ec25SBjoern A. Zeeb 
243*cbb3ec25SBjoern A. Zeeb 	/* disable dmashdl */
244*cbb3ec25SBjoern A. Zeeb 	mt76_clear(dev, MT_WFDMA0_GLO_CFG_EXT0,
245*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_CSR_TX_DMASHDL_ENABLE);
246*cbb3ec25SBjoern A. Zeeb 	mt76_set(dev, MT_DMASHDL_SW_CONTROL, MT_DMASHDL_DMASHDL_BYPASS);
247*cbb3ec25SBjoern A. Zeeb 
248*cbb3ec25SBjoern A. Zeeb 	if (force) {
249*cbb3ec25SBjoern A. Zeeb 		/* reset */
250*cbb3ec25SBjoern A. Zeeb 		mt76_clear(dev, MT_WFDMA0_RST,
251*cbb3ec25SBjoern A. Zeeb 			   MT_WFDMA0_RST_DMASHDL_ALL_RST |
252*cbb3ec25SBjoern A. Zeeb 			   MT_WFDMA0_RST_LOGIC_RST);
253*cbb3ec25SBjoern A. Zeeb 
254*cbb3ec25SBjoern A. Zeeb 		mt76_set(dev, MT_WFDMA0_RST,
255*cbb3ec25SBjoern A. Zeeb 			 MT_WFDMA0_RST_DMASHDL_ALL_RST |
256*cbb3ec25SBjoern A. Zeeb 			 MT_WFDMA0_RST_LOGIC_RST);
257*cbb3ec25SBjoern A. Zeeb 	}
258*cbb3ec25SBjoern A. Zeeb 
259*cbb3ec25SBjoern A. Zeeb 	return 0;
260*cbb3ec25SBjoern A. Zeeb }
261*cbb3ec25SBjoern A. Zeeb EXPORT_SYMBOL_GPL(mt792x_dma_disable);
262*cbb3ec25SBjoern A. Zeeb 
mt792x_dma_cleanup(struct mt792x_dev * dev)263*cbb3ec25SBjoern A. Zeeb void mt792x_dma_cleanup(struct mt792x_dev *dev)
264*cbb3ec25SBjoern A. Zeeb {
265*cbb3ec25SBjoern A. Zeeb 	/* disable */
266*cbb3ec25SBjoern A. Zeeb 	mt76_clear(dev, MT_WFDMA0_GLO_CFG,
267*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_GLO_CFG_TX_DMA_EN |
268*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_GLO_CFG_RX_DMA_EN |
269*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
270*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
271*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO |
272*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
273*cbb3ec25SBjoern A. Zeeb 
274*cbb3ec25SBjoern A. Zeeb 	mt76_poll_msec_tick(dev, MT_WFDMA0_GLO_CFG,
275*cbb3ec25SBjoern A. Zeeb 			    MT_WFDMA0_GLO_CFG_TX_DMA_BUSY |
276*cbb3ec25SBjoern A. Zeeb 			    MT_WFDMA0_GLO_CFG_RX_DMA_BUSY, 0, 100, 1);
277*cbb3ec25SBjoern A. Zeeb 
278*cbb3ec25SBjoern A. Zeeb 	/* reset */
279*cbb3ec25SBjoern A. Zeeb 	mt76_clear(dev, MT_WFDMA0_RST,
280*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_RST_DMASHDL_ALL_RST |
281*cbb3ec25SBjoern A. Zeeb 		   MT_WFDMA0_RST_LOGIC_RST);
282*cbb3ec25SBjoern A. Zeeb 
283*cbb3ec25SBjoern A. Zeeb 	mt76_set(dev, MT_WFDMA0_RST,
284*cbb3ec25SBjoern A. Zeeb 		 MT_WFDMA0_RST_DMASHDL_ALL_RST |
285*cbb3ec25SBjoern A. Zeeb 		 MT_WFDMA0_RST_LOGIC_RST);
286*cbb3ec25SBjoern A. Zeeb 
287*cbb3ec25SBjoern A. Zeeb 	mt76_dma_cleanup(&dev->mt76);
288*cbb3ec25SBjoern A. Zeeb }
289*cbb3ec25SBjoern A. Zeeb EXPORT_SYMBOL_GPL(mt792x_dma_cleanup);
290*cbb3ec25SBjoern A. Zeeb 
mt792x_poll_tx(struct napi_struct * napi,int budget)291*cbb3ec25SBjoern A. Zeeb int mt792x_poll_tx(struct napi_struct *napi, int budget)
292*cbb3ec25SBjoern A. Zeeb {
293*cbb3ec25SBjoern A. Zeeb 	struct mt792x_dev *dev;
294*cbb3ec25SBjoern A. Zeeb 
295*cbb3ec25SBjoern A. Zeeb 	dev = container_of(napi, struct mt792x_dev, mt76.tx_napi);
296*cbb3ec25SBjoern A. Zeeb 
297*cbb3ec25SBjoern A. Zeeb 	if (!mt76_connac_pm_ref(&dev->mphy, &dev->pm)) {
298*cbb3ec25SBjoern A. Zeeb 		napi_complete(napi);
299*cbb3ec25SBjoern A. Zeeb 		queue_work(dev->mt76.wq, &dev->pm.wake_work);
300*cbb3ec25SBjoern A. Zeeb 		return 0;
301*cbb3ec25SBjoern A. Zeeb 	}
302*cbb3ec25SBjoern A. Zeeb 
303*cbb3ec25SBjoern A. Zeeb 	mt76_connac_tx_cleanup(&dev->mt76);
304*cbb3ec25SBjoern A. Zeeb 	if (napi_complete(napi))
305*cbb3ec25SBjoern A. Zeeb 		mt76_connac_irq_enable(&dev->mt76,
306*cbb3ec25SBjoern A. Zeeb 				       dev->irq_map->tx.all_complete_mask);
307*cbb3ec25SBjoern A. Zeeb 	mt76_connac_pm_unref(&dev->mphy, &dev->pm);
308*cbb3ec25SBjoern A. Zeeb 
309*cbb3ec25SBjoern A. Zeeb 	return 0;
310*cbb3ec25SBjoern A. Zeeb }
311*cbb3ec25SBjoern A. Zeeb EXPORT_SYMBOL_GPL(mt792x_poll_tx);
312*cbb3ec25SBjoern A. Zeeb 
mt792x_poll_rx(struct napi_struct * napi,int budget)313*cbb3ec25SBjoern A. Zeeb int mt792x_poll_rx(struct napi_struct *napi, int budget)
314*cbb3ec25SBjoern A. Zeeb {
315*cbb3ec25SBjoern A. Zeeb 	struct mt792x_dev *dev;
316*cbb3ec25SBjoern A. Zeeb 	int done;
317*cbb3ec25SBjoern A. Zeeb 
318*cbb3ec25SBjoern A. Zeeb 	dev = container_of(napi->dev, struct mt792x_dev, mt76.napi_dev);
319*cbb3ec25SBjoern A. Zeeb 
320*cbb3ec25SBjoern A. Zeeb 	if (!mt76_connac_pm_ref(&dev->mphy, &dev->pm)) {
321*cbb3ec25SBjoern A. Zeeb 		napi_complete(napi);
322*cbb3ec25SBjoern A. Zeeb 		queue_work(dev->mt76.wq, &dev->pm.wake_work);
323*cbb3ec25SBjoern A. Zeeb 		return 0;
324*cbb3ec25SBjoern A. Zeeb 	}
325*cbb3ec25SBjoern A. Zeeb 	done = mt76_dma_rx_poll(napi, budget);
326*cbb3ec25SBjoern A. Zeeb 	mt76_connac_pm_unref(&dev->mphy, &dev->pm);
327*cbb3ec25SBjoern A. Zeeb 
328*cbb3ec25SBjoern A. Zeeb 	return done;
329*cbb3ec25SBjoern A. Zeeb }
330*cbb3ec25SBjoern A. Zeeb EXPORT_SYMBOL_GPL(mt792x_poll_rx);
331*cbb3ec25SBjoern A. Zeeb 
mt792x_wfsys_reset(struct mt792x_dev * dev)332*cbb3ec25SBjoern A. Zeeb int mt792x_wfsys_reset(struct mt792x_dev *dev)
333*cbb3ec25SBjoern A. Zeeb {
334*cbb3ec25SBjoern A. Zeeb 	u32 addr = is_mt7921(&dev->mt76) ? 0x18000140 : 0x7c000140;
335*cbb3ec25SBjoern A. Zeeb 
336*cbb3ec25SBjoern A. Zeeb 	mt76_clear(dev, addr, WFSYS_SW_RST_B);
337*cbb3ec25SBjoern A. Zeeb 	msleep(50);
338*cbb3ec25SBjoern A. Zeeb 	mt76_set(dev, addr, WFSYS_SW_RST_B);
339*cbb3ec25SBjoern A. Zeeb 
340*cbb3ec25SBjoern A. Zeeb 	if (!__mt76_poll_msec(&dev->mt76, addr, WFSYS_SW_INIT_DONE,
341*cbb3ec25SBjoern A. Zeeb 			      WFSYS_SW_INIT_DONE, 500))
342*cbb3ec25SBjoern A. Zeeb 		return -ETIMEDOUT;
343*cbb3ec25SBjoern A. Zeeb 
344*cbb3ec25SBjoern A. Zeeb 	return 0;
345*cbb3ec25SBjoern A. Zeeb }
346*cbb3ec25SBjoern A. Zeeb EXPORT_SYMBOL_GPL(mt792x_wfsys_reset);
347*cbb3ec25SBjoern A. Zeeb 
348