1 /* $NetBSD: arn5008.c,v 1.19 2022/03/18 23:32:24 riastradh Exp $ */
2 /* $OpenBSD: ar5008.c,v 1.21 2012/08/25 12:14:31 kettenis Exp $ */
3
4 /*-
5 * Copyright (c) 2009 Damien Bergamini <damien.bergamini@free.fr>
6 * Copyright (c) 2008-2009 Atheros Communications Inc.
7 *
8 * Permission to use, copy, modify, and/or distribute this software for any
9 * purpose with or without fee is hereby granted, provided that the above
10 * copyright notice and this permission notice appear in all copies.
11 *
12 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
13 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
14 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
15 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
16 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
17 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
18 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19 */
20
21 /*
22 * Driver for Atheros 802.11a/g/n chipsets.
23 * Routines common to AR5008, AR9001 and AR9002 families.
24 */
25
26 #include <sys/cdefs.h>
27 __KERNEL_RCSID(0, "$NetBSD: arn5008.c,v 1.19 2022/03/18 23:32:24 riastradh Exp $");
28
29 #include <sys/param.h>
30 #include <sys/sockio.h>
31 #include <sys/mbuf.h>
32 #include <sys/kernel.h>
33 #include <sys/socket.h>
34 #include <sys/systm.h>
35 #include <sys/malloc.h>
36 #include <sys/queue.h>
37 #include <sys/conf.h>
38 #include <sys/device.h>
39
40 #include <sys/bus.h>
41 #include <sys/endian.h>
42 #include <sys/intr.h>
43
44 #include <net/bpf.h>
45 #include <net/if.h>
46 #include <net/if_arp.h>
47 #include <net/if_dl.h>
48 #include <net/if_ether.h>
49 #include <net/if_media.h>
50 #include <net/if_types.h>
51
52 #include <netinet/in.h>
53 #include <netinet/in_systm.h>
54 #include <netinet/in_var.h>
55 #include <netinet/ip.h>
56
57 #include <net80211/ieee80211_var.h>
58 #include <net80211/ieee80211_amrr.h>
59 #include <net80211/ieee80211_radiotap.h>
60
61 #include <dev/ic/athnreg.h>
62 #include <dev/ic/athnvar.h>
63
64 #include <dev/ic/arn5008reg.h>
65 #include <dev/ic/arn5008.h>
66 #include <dev/ic/arn5416.h>
67 #include <dev/ic/arn9280.h>
68
69 #define Static static
70
71 Static void ar5008_calib_adc_dc_off(struct athn_softc *);
72 Static void ar5008_calib_adc_gain(struct athn_softc *);
73 Static void ar5008_calib_iq(struct athn_softc *);
74 Static void ar5008_disable_ofdm_weak_signal(struct athn_softc *);
75 Static void ar5008_disable_phy(struct athn_softc *);
76 Static int ar5008_dma_alloc(struct athn_softc *);
77 Static void ar5008_dma_free(struct athn_softc *);
78 Static void ar5008_do_calib(struct athn_softc *);
79 Static void ar5008_do_noisefloor_calib(struct athn_softc *);
80 Static void ar5008_enable_antenna_diversity(struct athn_softc *);
81 Static void ar5008_enable_ofdm_weak_signal(struct athn_softc *);
82 Static uint8_t ar5008_get_vpd(uint8_t, const uint8_t *, const uint8_t *, int);
83 Static void ar5008_gpio_config_input(struct athn_softc *, int);
84 Static void ar5008_gpio_config_output(struct athn_softc *, int, int);
85 Static int ar5008_gpio_read(struct athn_softc *, int);
86 Static void ar5008_gpio_write(struct athn_softc *, int, int);
87 Static void ar5008_hw_init(struct athn_softc *, struct ieee80211_channel *,
88 struct ieee80211_channel *);
89 Static void ar5008_init_baseband(struct athn_softc *);
90 Static void ar5008_init_chains(struct athn_softc *);
91 Static int ar5008_intr_status(struct athn_softc *);
92 Static int ar5008_intr(struct athn_softc *);
93 Static void ar5008_next_calib(struct athn_softc *);
94 Static int ar5008_read_eep_word(struct athn_softc *, uint32_t,
95 uint16_t *);
96 Static int ar5008_read_rom(struct athn_softc *);
97 Static void ar5008_rf_bus_release(struct athn_softc *);
98 Static int ar5008_rf_bus_request(struct athn_softc *);
99 Static void ar5008_rfsilent_init(struct athn_softc *);
100 Static int ar5008_rx_alloc(struct athn_softc *);
101 Static void ar5008_rx_enable(struct athn_softc *);
102 Static void ar5008_rx_free(struct athn_softc *);
103 Static void ar5008_rx_intr(struct athn_softc *);
104 Static void ar5008_rx_radiotap(struct athn_softc *, struct mbuf *,
105 struct ar_rx_desc *);
106 Static void ar5008_set_cck_weak_signal(struct athn_softc *, int);
107 Static void ar5008_set_delta_slope(struct athn_softc *,
108 struct ieee80211_channel *, struct ieee80211_channel *);
109 Static void ar5008_set_firstep_level(struct athn_softc *, int);
110 Static void ar5008_set_noise_immunity_level(struct athn_softc *, int);
111 Static void ar5008_set_phy(struct athn_softc *, struct ieee80211_channel *,
112 struct ieee80211_channel *);
113 Static void ar5008_set_rf_mode(struct athn_softc *,
114 struct ieee80211_channel *);
115 Static void ar5008_set_rxchains(struct athn_softc *);
116 Static void ar5008_set_spur_immunity_level(struct athn_softc *, int);
117 Static void ar5008_swap_rom(struct athn_softc *);
118 Static int ar5008_swba_intr(struct athn_softc *);
119 Static int ar5008_tx(struct athn_softc *, struct mbuf *,
120 struct ieee80211_node *, int);
121 Static int ar5008_tx_alloc(struct athn_softc *);
122 Static void ar5008_tx_free(struct athn_softc *);
123 Static void ar5008_tx_intr(struct athn_softc *);
124 Static int ar5008_tx_process(struct athn_softc *, int);
125
126 #ifdef notused
127 Static void ar5008_bb_load_noisefloor(struct athn_softc *);
128 Static void ar5008_get_noisefloor(struct athn_softc *,
129 struct ieee80211_channel *);
130 Static void ar5008_noisefloor_calib(struct athn_softc *);
131 Static void ar5008_read_noisefloor(struct athn_softc *, int16_t *,
132 int16_t *);
133 Static void ar5008_write_noisefloor(struct athn_softc *, int16_t *,
134 int16_t *);
135 #endif /* notused */
136
137 // bf->bf_m = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
138
139 /*
140 * XXX: see if_iwn.c:MCLGETIalt() for a better solution.
141 */
142 static struct mbuf *
MCLGETI(struct athn_softc * sc __unused,int how,struct ifnet * ifp __unused,u_int size)143 MCLGETI(struct athn_softc *sc __unused, int how,
144 struct ifnet *ifp __unused, u_int size)
145 {
146 struct mbuf *m;
147
148 MGETHDR(m, how, MT_DATA);
149 if (m == NULL)
150 return NULL;
151
152 MEXTMALLOC(m, size, how);
153 if ((m->m_flags & M_EXT) == 0) {
154 m_freem(m);
155 return NULL;
156 }
157 return m;
158 }
159
160 PUBLIC int
ar5008_attach(struct athn_softc * sc)161 ar5008_attach(struct athn_softc *sc)
162 {
163 struct athn_ops *ops = &sc->sc_ops;
164 struct ieee80211com *ic = &sc->sc_ic;
165 struct ar_base_eep_header *base;
166 uint8_t eep_ver, kc_entries_log;
167 int error;
168
169 /* Set callbacks for AR5008, AR9001 and AR9002 families. */
170 ops->gpio_read = ar5008_gpio_read;
171 ops->gpio_write = ar5008_gpio_write;
172 ops->gpio_config_input = ar5008_gpio_config_input;
173 ops->gpio_config_output = ar5008_gpio_config_output;
174 ops->rfsilent_init = ar5008_rfsilent_init;
175
176 ops->dma_alloc = ar5008_dma_alloc;
177 ops->dma_free = ar5008_dma_free;
178 ops->rx_enable = ar5008_rx_enable;
179 ops->intr_status = ar5008_intr_status;
180 ops->intr = ar5008_intr;
181 ops->tx = ar5008_tx;
182
183 ops->set_rf_mode = ar5008_set_rf_mode;
184 ops->rf_bus_request = ar5008_rf_bus_request;
185 ops->rf_bus_release = ar5008_rf_bus_release;
186 ops->set_phy = ar5008_set_phy;
187 ops->set_delta_slope = ar5008_set_delta_slope;
188 ops->enable_antenna_diversity = ar5008_enable_antenna_diversity;
189 ops->init_baseband = ar5008_init_baseband;
190 ops->disable_phy = ar5008_disable_phy;
191 ops->set_rxchains = ar5008_set_rxchains;
192 ops->noisefloor_calib = ar5008_do_noisefloor_calib;
193 ops->do_calib = ar5008_do_calib;
194 ops->next_calib = ar5008_next_calib;
195 ops->hw_init = ar5008_hw_init;
196
197 ops->set_noise_immunity_level = ar5008_set_noise_immunity_level;
198 ops->enable_ofdm_weak_signal = ar5008_enable_ofdm_weak_signal;
199 ops->disable_ofdm_weak_signal = ar5008_disable_ofdm_weak_signal;
200 ops->set_cck_weak_signal = ar5008_set_cck_weak_signal;
201 ops->set_firstep_level = ar5008_set_firstep_level;
202 ops->set_spur_immunity_level = ar5008_set_spur_immunity_level;
203
204 /* Set MAC registers offsets. */
205 sc->sc_obs_off = AR_OBS;
206 sc->sc_gpio_input_en_off = AR_GPIO_INPUT_EN_VAL;
207
208 if (!(sc->sc_flags & ATHN_FLAG_PCIE))
209 athn_config_nonpcie(sc);
210 else
211 athn_config_pcie(sc);
212
213 /* Read entire ROM content in memory. */
214 if ((error = ar5008_read_rom(sc)) != 0) {
215 aprint_error_dev(sc->sc_dev, "could not read ROM\n");
216 return error;
217 }
218
219 /* Get RF revision. */
220 sc->sc_rf_rev = ar5416_get_rf_rev(sc);
221
222 base = sc->sc_eep;
223 eep_ver = (base->version >> 12) & 0xf;
224 sc->sc_eep_rev = (base->version & 0xfff);
225 if (eep_ver != AR_EEP_VER || sc->sc_eep_rev == 0) {
226 aprint_error_dev(sc->sc_dev, "unsupported ROM version %d.%d\n",
227 eep_ver, sc->sc_eep_rev);
228 return EINVAL;
229 }
230
231 if (base->opCapFlags & AR_OPFLAGS_11A)
232 sc->sc_flags |= ATHN_FLAG_11A;
233 if (base->opCapFlags & AR_OPFLAGS_11G)
234 sc->sc_flags |= ATHN_FLAG_11G;
235 if (base->opCapFlags & AR_OPFLAGS_11N)
236 sc->sc_flags |= ATHN_FLAG_11N;
237
238 IEEE80211_ADDR_COPY(ic->ic_myaddr, base->macAddr);
239
240 /* Check if we have a hardware radio switch. */
241 if (base->rfSilent & AR_EEP_RFSILENT_ENABLED) {
242 sc->sc_flags |= ATHN_FLAG_RFSILENT;
243 /* Get GPIO pin used by hardware radio switch. */
244 sc->sc_rfsilent_pin = MS(base->rfSilent,
245 AR_EEP_RFSILENT_GPIO_SEL);
246 /* Get polarity of hardware radio switch. */
247 if (base->rfSilent & AR_EEP_RFSILENT_POLARITY)
248 sc->sc_flags |= ATHN_FLAG_RFSILENT_REVERSED;
249 }
250
251 /* Get the number of HW key cache entries. */
252 kc_entries_log = MS(base->deviceCap, AR_EEP_DEVCAP_KC_ENTRIES);
253 sc->sc_kc_entries = kc_entries_log != 0 ?
254 1 << kc_entries_log : AR_KEYTABLE_SIZE;
255
256 sc->sc_txchainmask = base->txMask;
257 if (sc->sc_mac_ver == AR_SREV_VERSION_5416_PCI &&
258 !(base->opCapFlags & AR_OPFLAGS_11A)) {
259 /* For single-band AR5416 PCI, use GPIO pin 0. */
260 sc->sc_rxchainmask = ar5008_gpio_read(sc, 0) ? 0x5 : 0x7;
261 }
262 else
263 sc->sc_rxchainmask = base->rxMask;
264
265 ops->setup(sc);
266 return 0;
267 }
268
269 /*
270 * Read 16-bit word from ROM.
271 */
272 Static int
ar5008_read_eep_word(struct athn_softc * sc,uint32_t addr,uint16_t * val)273 ar5008_read_eep_word(struct athn_softc *sc, uint32_t addr, uint16_t *val)
274 {
275 uint32_t reg;
276 int ntries;
277
278 reg = AR_READ(sc, AR_EEPROM_OFFSET(addr));
279 for (ntries = 0; ntries < 1000; ntries++) {
280 reg = AR_READ(sc, AR_EEPROM_STATUS_DATA);
281 if (!(reg & (AR_EEPROM_STATUS_DATA_BUSY |
282 AR_EEPROM_STATUS_DATA_PROT_ACCESS))) {
283 *val = MS(reg, AR_EEPROM_STATUS_DATA_VAL);
284 return 0;
285 }
286 DELAY(10);
287 }
288 *val = 0xffff;
289 return ETIMEDOUT;
290 }
291
292 Static int
ar5008_read_rom(struct athn_softc * sc)293 ar5008_read_rom(struct athn_softc *sc)
294 {
295 uint32_t addr, end;
296 uint16_t magic, sum, *eep;
297 int need_swap = 0;
298 int error;
299
300 /* Determine ROM endianness. */
301 error = ar5008_read_eep_word(sc, AR_EEPROM_MAGIC_OFFSET, &magic);
302 if (error != 0)
303 return error;
304 if (magic != AR_EEPROM_MAGIC) {
305 if (magic != bswap16(AR_EEPROM_MAGIC)) {
306 DPRINTFN(DBG_INIT, sc,
307 "invalid ROM magic 0x%x != 0x%x\n",
308 magic, AR_EEPROM_MAGIC);
309 return EIO;
310 }
311 DPRINTFN(DBG_INIT, sc, "non-native ROM endianness\n");
312 need_swap = 1;
313 }
314
315 /* Allocate space to store ROM in host memory. */
316 sc->sc_eep = malloc(sc->sc_eep_size, M_DEVBUF, M_WAITOK);
317
318 /* Read entire ROM and compute checksum. */
319 sum = 0;
320 eep = sc->sc_eep;
321 end = sc->sc_eep_base + sc->sc_eep_size / sizeof(uint16_t);
322 for (addr = sc->sc_eep_base; addr < end; addr++, eep++) {
323 if ((error = ar5008_read_eep_word(sc, addr, eep)) != 0) {
324 DPRINTFN(DBG_INIT, sc,
325 "could not read ROM at 0x%x\n", addr);
326 return error;
327 }
328 if (need_swap)
329 *eep = bswap16(*eep);
330 sum ^= *eep;
331 }
332 if (sum != 0xffff) {
333 aprint_error_dev(sc->sc_dev, "bad ROM checksum 0x%04x\n", sum);
334 return EIO;
335 }
336 if (need_swap)
337 ar5008_swap_rom(sc);
338
339 return 0;
340 }
341
342 Static void
ar5008_swap_rom(struct athn_softc * sc)343 ar5008_swap_rom(struct athn_softc *sc)
344 {
345 struct ar_base_eep_header *base = sc->sc_eep;
346
347 /* Swap common fields first. */
348 base->length = bswap16(base->length);
349 base->version = bswap16(base->version);
350 base->regDmn[0] = bswap16(base->regDmn[0]);
351 base->regDmn[1] = bswap16(base->regDmn[1]);
352 base->rfSilent = bswap16(base->rfSilent);
353 base->blueToothOptions = bswap16(base->blueToothOptions);
354 base->deviceCap = bswap16(base->deviceCap);
355
356 /* Swap device-dependent fields. */
357 sc->sc_ops.swap_rom(sc);
358 }
359
360 /*
361 * Access to General Purpose Input/Output ports.
362 */
363 Static int
ar5008_gpio_read(struct athn_softc * sc,int pin)364 ar5008_gpio_read(struct athn_softc *sc, int pin)
365 {
366
367 KASSERT(pin < sc->sc_ngpiopins);
368 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc))
369 return !((AR_READ(sc, AR7010_GPIO_IN) >> pin) & 1);
370 return (AR_READ(sc, AR_GPIO_IN_OUT) >> (sc->sc_ngpiopins + pin)) & 1;
371 }
372
373 Static void
ar5008_gpio_write(struct athn_softc * sc,int pin,int set)374 ar5008_gpio_write(struct athn_softc *sc, int pin, int set)
375 {
376 uint32_t reg;
377
378 KASSERT(pin < sc->sc_ngpiopins);
379
380 if (sc->sc_flags & ATHN_FLAG_USB)
381 set = !set; /* AR9271/AR7010 is reversed. */
382
383 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
384 /* Special case for AR7010. */
385 reg = AR_READ(sc, AR7010_GPIO_OUT);
386 if (set)
387 reg |= 1 << pin;
388 else
389 reg &= ~(1 << pin);
390 AR_WRITE(sc, AR7010_GPIO_OUT, reg);
391 }
392 else {
393 reg = AR_READ(sc, AR_GPIO_IN_OUT);
394 if (set)
395 reg |= 1 << pin;
396 else
397 reg &= ~(1 << pin);
398 AR_WRITE(sc, AR_GPIO_IN_OUT, reg);
399 }
400 AR_WRITE_BARRIER(sc);
401 }
402
403 Static void
ar5008_gpio_config_input(struct athn_softc * sc,int pin)404 ar5008_gpio_config_input(struct athn_softc *sc, int pin)
405 {
406 uint32_t reg;
407
408 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
409 /* Special case for AR7010. */
410 AR_SETBITS(sc, AR7010_GPIO_OE, 1 << pin);
411 }
412 else {
413 reg = AR_READ(sc, AR_GPIO_OE_OUT);
414 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
415 reg |= AR_GPIO_OE_OUT_DRV_NO << (pin * 2);
416 AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
417 }
418 AR_WRITE_BARRIER(sc);
419 }
420
421 Static void
ar5008_gpio_config_output(struct athn_softc * sc,int pin,int type)422 ar5008_gpio_config_output(struct athn_softc *sc, int pin, int type)
423 {
424 uint32_t reg;
425 int mux, off;
426
427 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
428 /* Special case for AR7010. */
429 AR_CLRBITS(sc, AR7010_GPIO_OE, 1 << pin);
430 AR_WRITE_BARRIER(sc);
431 return;
432 }
433 mux = pin / 6;
434 off = pin % 6;
435
436 reg = AR_READ(sc, AR_GPIO_OUTPUT_MUX(mux));
437 if (!AR_SREV_9280_20_OR_LATER(sc) && mux == 0)
438 reg = (reg & ~0x1f0) | (reg & 0x1f0) << 1;
439 reg &= ~(0x1f << (off * 5));
440 reg |= (type & 0x1f) << (off * 5);
441 AR_WRITE(sc, AR_GPIO_OUTPUT_MUX(mux), reg);
442
443 reg = AR_READ(sc, AR_GPIO_OE_OUT);
444 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
445 reg |= AR_GPIO_OE_OUT_DRV_ALL << (pin * 2);
446 AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
447 AR_WRITE_BARRIER(sc);
448 }
449
450 Static void
ar5008_rfsilent_init(struct athn_softc * sc)451 ar5008_rfsilent_init(struct athn_softc *sc)
452 {
453 uint32_t reg;
454
455 /* Configure hardware radio switch. */
456 AR_SETBITS(sc, AR_GPIO_INPUT_EN_VAL, AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
457 reg = AR_READ(sc, AR_GPIO_INPUT_MUX2);
458 reg = RW(reg, AR_GPIO_INPUT_MUX2_RFSILENT, 0);
459 AR_WRITE(sc, AR_GPIO_INPUT_MUX2, reg);
460 ar5008_gpio_config_input(sc, sc->sc_rfsilent_pin);
461 AR_SETBITS(sc, AR_PHY_TEST, AR_PHY_TEST_RFSILENT_BB);
462 if (!(sc->sc_flags & ATHN_FLAG_RFSILENT_REVERSED)) {
463 AR_SETBITS(sc, AR_GPIO_INTR_POL,
464 AR_GPIO_INTR_POL_PIN(sc->sc_rfsilent_pin));
465 }
466 AR_WRITE_BARRIER(sc);
467 }
468
469 Static int
ar5008_dma_alloc(struct athn_softc * sc)470 ar5008_dma_alloc(struct athn_softc *sc)
471 {
472 int error;
473
474 error = ar5008_tx_alloc(sc);
475 if (error != 0)
476 return error;
477
478 error = ar5008_rx_alloc(sc);
479 if (error != 0)
480 return error;
481
482 return 0;
483 }
484
485 Static void
ar5008_dma_free(struct athn_softc * sc)486 ar5008_dma_free(struct athn_softc *sc)
487 {
488
489 ar5008_tx_free(sc);
490 ar5008_rx_free(sc);
491 }
492
493 Static int
ar5008_tx_alloc(struct athn_softc * sc)494 ar5008_tx_alloc(struct athn_softc *sc)
495 {
496 struct athn_tx_buf *bf;
497 bus_size_t size;
498 int error, nsegs, i;
499
500 /*
501 * Allocate a pool of Tx descriptors shared between all Tx queues.
502 */
503 size = ATHN_NTXBUFS * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
504
505 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
506 BUS_DMA_NOWAIT, &sc->sc_map);
507 if (error != 0)
508 goto fail;
509
510 error = bus_dmamem_alloc(sc->sc_dmat, size, 4, 0, &sc->sc_seg, 1,
511 // XXX &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
512 &nsegs, BUS_DMA_NOWAIT);
513 if (error != 0)
514 goto fail;
515
516 error = bus_dmamem_map(sc->sc_dmat, &sc->sc_seg, 1, size,
517 (void **)&sc->sc_descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
518 if (error != 0)
519 goto fail;
520
521 error = bus_dmamap_load(sc->sc_dmat, sc->sc_map, sc->sc_descs,
522 size, NULL, BUS_DMA_NOWAIT);
523 if (error != 0)
524 goto fail;
525
526 SIMPLEQ_INIT(&sc->sc_txbufs);
527 for (i = 0; i < ATHN_NTXBUFS; i++) {
528 bf = &sc->sc_txpool[i];
529
530 error = bus_dmamap_create(sc->sc_dmat, ATHN_TXBUFSZ,
531 AR5008_MAX_SCATTER, ATHN_TXBUFSZ, 0, BUS_DMA_NOWAIT,
532 &bf->bf_map);
533 if (error != 0) {
534 aprint_error_dev(sc->sc_dev,
535 "could not create Tx buf DMA map\n");
536 goto fail;
537 }
538
539 bf->bf_descs =
540 &((struct ar_tx_desc *)sc->sc_descs)[i * AR5008_MAX_SCATTER];
541 bf->bf_daddr = sc->sc_map->dm_segs[0].ds_addr +
542 i * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
543
544 SIMPLEQ_INSERT_TAIL(&sc->sc_txbufs, bf, bf_list);
545 }
546 return 0;
547 fail:
548 ar5008_tx_free(sc);
549 return error;
550 }
551
552 Static void
ar5008_tx_free(struct athn_softc * sc)553 ar5008_tx_free(struct athn_softc *sc)
554 {
555 struct athn_tx_buf *bf;
556 int i;
557
558 for (i = 0; i < ATHN_NTXBUFS; i++) {
559 bf = &sc->sc_txpool[i];
560
561 if (bf->bf_map != NULL)
562 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
563 }
564 /* Free Tx descriptors. */
565 if (sc->sc_map != NULL) {
566 if (sc->sc_descs != NULL) {
567 bus_dmamap_unload(sc->sc_dmat, sc->sc_map);
568 bus_dmamem_unmap(sc->sc_dmat, (void *)sc->sc_descs,
569 ATHN_NTXBUFS * AR5008_MAX_SCATTER *
570 sizeof(struct ar_tx_desc));
571 bus_dmamem_free(sc->sc_dmat, &sc->sc_seg, 1);
572 }
573 bus_dmamap_destroy(sc->sc_dmat, sc->sc_map);
574 }
575 }
576
577 Static int
ar5008_rx_alloc(struct athn_softc * sc)578 ar5008_rx_alloc(struct athn_softc *sc)
579 {
580 struct athn_rxq *rxq = &sc->sc_rxq[0];
581 struct athn_rx_buf *bf;
582 struct ar_rx_desc *ds;
583 bus_size_t size;
584 int error, nsegs, i;
585
586 rxq->bf = malloc(ATHN_NRXBUFS * sizeof(*bf), M_DEVBUF,
587 M_WAITOK | M_ZERO);
588
589 size = ATHN_NRXBUFS * sizeof(struct ar_rx_desc);
590
591 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
592 BUS_DMA_NOWAIT, &rxq->map);
593 if (error != 0)
594 goto fail;
595
596 error = bus_dmamem_alloc(sc->sc_dmat, size, 0, 0, &rxq->seg, 1,
597 // &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
598 &nsegs, BUS_DMA_NOWAIT);
599 if (error != 0)
600 goto fail;
601
602 error = bus_dmamem_map(sc->sc_dmat, &rxq->seg, 1, size,
603 (void **)&rxq->descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
604 if (error != 0)
605 goto fail;
606
607 error = bus_dmamap_load(sc->sc_dmat, rxq->map, rxq->descs,
608 size, NULL, BUS_DMA_NOWAIT);
609 if (error != 0)
610 goto fail;
611
612 for (i = 0; i < ATHN_NRXBUFS; i++) {
613 bf = &rxq->bf[i];
614 ds = &((struct ar_rx_desc *)rxq->descs)[i];
615
616 error = bus_dmamap_create(sc->sc_dmat, ATHN_RXBUFSZ, 1,
617 ATHN_RXBUFSZ, 0, BUS_DMA_NOWAIT | BUS_DMA_ALLOCNOW,
618 &bf->bf_map);
619 if (error != 0) {
620 aprint_error_dev(sc->sc_dev,
621 " could not create Rx buf DMA map\n");
622 goto fail;
623 }
624 /*
625 * Assumes MCLGETI returns cache-line-size aligned buffers.
626 * XXX: does ours?
627 */
628 bf->bf_m = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
629 if (bf->bf_m == NULL) {
630 aprint_error_dev(sc->sc_dev,
631 "could not allocate Rx mbuf\n");
632 error = ENOBUFS;
633 goto fail;
634 }
635
636 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
637 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
638 BUS_DMA_NOWAIT | BUS_DMA_READ);
639 if (error != 0) {
640 aprint_error_dev(sc->sc_dev,
641 "could not DMA map Rx buffer\n");
642 goto fail;
643 }
644
645 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
646 BUS_DMASYNC_PREREAD);
647
648 bf->bf_desc = ds;
649 bf->bf_daddr = rxq->map->dm_segs[0].ds_addr +
650 i * sizeof(struct ar_rx_desc);
651 }
652 return 0;
653 fail:
654 ar5008_rx_free(sc);
655 return error;
656 }
657
658 Static void
ar5008_rx_free(struct athn_softc * sc)659 ar5008_rx_free(struct athn_softc *sc)
660 {
661 struct athn_rxq *rxq = &sc->sc_rxq[0];
662 struct athn_rx_buf *bf;
663 int i;
664
665 if (rxq->bf == NULL)
666 return;
667 for (i = 0; i < ATHN_NRXBUFS; i++) {
668 bf = &rxq->bf[i];
669
670 if (bf->bf_map != NULL)
671 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
672 if (bf->bf_m != NULL)
673 m_freem(bf->bf_m);
674 }
675 free(rxq->bf, M_DEVBUF);
676
677 /* Free Rx descriptors. */
678 if (rxq->map != NULL) {
679 if (rxq->descs != NULL) {
680 bus_dmamap_unload(sc->sc_dmat, rxq->map);
681 bus_dmamem_unmap(sc->sc_dmat, (void *)rxq->descs,
682 ATHN_NRXBUFS * sizeof(struct ar_rx_desc));
683 bus_dmamem_free(sc->sc_dmat, &rxq->seg, 1);
684 }
685 bus_dmamap_destroy(sc->sc_dmat, rxq->map);
686 }
687 }
688
689 Static void
ar5008_rx_enable(struct athn_softc * sc)690 ar5008_rx_enable(struct athn_softc *sc)
691 {
692 struct athn_rxq *rxq = &sc->sc_rxq[0];
693 struct athn_rx_buf *bf;
694 struct ar_rx_desc *ds;
695 int i;
696
697 /* Setup and link Rx descriptors. */
698 SIMPLEQ_INIT(&rxq->head);
699 rxq->lastds = NULL;
700 for (i = 0; i < ATHN_NRXBUFS; i++) {
701 bf = &rxq->bf[i];
702 ds = bf->bf_desc;
703
704 memset(ds, 0, sizeof(*ds));
705 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
706 ds->ds_ctl1 = SM(AR_RXC1_BUF_LEN, ATHN_RXBUFSZ);
707
708 if (rxq->lastds != NULL) {
709 ((struct ar_rx_desc *)rxq->lastds)->ds_link =
710 bf->bf_daddr;
711 }
712 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
713 rxq->lastds = ds;
714 }
715 bus_dmamap_sync(sc->sc_dmat, rxq->map, 0, rxq->map->dm_mapsize,
716 BUS_DMASYNC_PREREAD);
717
718 /* Enable Rx. */
719 AR_WRITE(sc, AR_RXDP, SIMPLEQ_FIRST(&rxq->head)->bf_daddr);
720 AR_WRITE(sc, AR_CR, AR_CR_RXE);
721 AR_WRITE_BARRIER(sc);
722 }
723
724 Static void
ar5008_rx_radiotap(struct athn_softc * sc,struct mbuf * m,struct ar_rx_desc * ds)725 ar5008_rx_radiotap(struct athn_softc *sc, struct mbuf *m,
726 struct ar_rx_desc *ds)
727 {
728 struct athn_rx_radiotap_header *tap = &sc->sc_rxtap;
729 struct ieee80211com *ic = &sc->sc_ic;
730 uint64_t tsf;
731 uint32_t tstamp;
732 uint8_t rate;
733
734 /* Extend the 15-bit timestamp from Rx descriptor to 64-bit TSF. */
735 tstamp = ds->ds_status2;
736 tsf = AR_READ(sc, AR_TSF_U32);
737 tsf = tsf << 32 | AR_READ(sc, AR_TSF_L32);
738 if ((tsf & 0x7fff) < tstamp)
739 tsf -= 0x8000;
740 tsf = (tsf & ~0x7fff) | tstamp;
741
742 tap->wr_flags = IEEE80211_RADIOTAP_F_FCS;
743 tap->wr_tsft = htole64(tsf);
744 tap->wr_chan_freq = htole16(ic->ic_curchan->ic_freq);
745 tap->wr_chan_flags = htole16(ic->ic_curchan->ic_flags);
746 tap->wr_dbm_antsignal = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
747 /* XXX noise. */
748 tap->wr_antenna = MS(ds->ds_status3, AR_RXS3_ANTENNA);
749 tap->wr_rate = 0; /* In case it can't be found below. */
750 if (AR_SREV_5416_20_OR_LATER(sc))
751 rate = MS(ds->ds_status0, AR_RXS0_RATE);
752 else
753 rate = MS(ds->ds_status3, AR_RXS3_RATE);
754 if (rate & 0x80) { /* HT. */
755 /* Bit 7 set means HT MCS instead of rate. */
756 tap->wr_rate = rate;
757 if (!(ds->ds_status3 & AR_RXS3_GI))
758 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTGI;
759
760 }
761 else if (rate & 0x10) { /* CCK. */
762 if (rate & 0x04)
763 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
764 switch (rate & ~0x14) {
765 case 0xb: tap->wr_rate = 2; break;
766 case 0xa: tap->wr_rate = 4; break;
767 case 0x9: tap->wr_rate = 11; break;
768 case 0x8: tap->wr_rate = 22; break;
769 }
770 }
771 else { /* OFDM. */
772 switch (rate) {
773 case 0xb: tap->wr_rate = 12; break;
774 case 0xf: tap->wr_rate = 18; break;
775 case 0xa: tap->wr_rate = 24; break;
776 case 0xe: tap->wr_rate = 36; break;
777 case 0x9: tap->wr_rate = 48; break;
778 case 0xd: tap->wr_rate = 72; break;
779 case 0x8: tap->wr_rate = 96; break;
780 case 0xc: tap->wr_rate = 108; break;
781 }
782 }
783 bpf_mtap2(sc->sc_drvbpf, tap, sc->sc_rxtap_len, m, BPF_D_IN);
784 }
785
786 static __inline int
ar5008_rx_process(struct athn_softc * sc)787 ar5008_rx_process(struct athn_softc *sc)
788 {
789 struct ieee80211com *ic = &sc->sc_ic;
790 struct ifnet *ifp = &sc->sc_if;
791 struct athn_rxq *rxq = &sc->sc_rxq[0];
792 struct athn_rx_buf *bf, *nbf;
793 struct ar_rx_desc *ds;
794 struct ieee80211_frame *wh;
795 struct ieee80211_node *ni;
796 struct mbuf *m, *m1;
797 u_int32_t rstamp;
798 int error, len, rssi, s;
799
800 bf = SIMPLEQ_FIRST(&rxq->head);
801 if (__predict_false(bf == NULL)) { /* Should not happen. */
802 aprint_error_dev(sc->sc_dev, "Rx queue is empty!\n");
803 return ENOENT;
804 }
805 ds = bf->bf_desc;
806
807 if (!(ds->ds_status8 & AR_RXS8_DONE)) {
808 /*
809 * On some parts, the status words can get corrupted
810 * (including the "done" bit), so we check the next
811 * descriptor "done" bit. If it is set, it is a good
812 * indication that the status words are corrupted, so
813 * we skip this descriptor and drop the frame.
814 */
815 nbf = SIMPLEQ_NEXT(bf, bf_list);
816 if (nbf != NULL &&
817 (((struct ar_rx_desc *)nbf->bf_desc)->ds_status8 &
818 AR_RXS8_DONE)) {
819 DPRINTFN(DBG_RX, sc,
820 "corrupted descriptor status=0x%x\n",
821 ds->ds_status8);
822 /* HW will not "move" RXDP in this case, so do it. */
823 AR_WRITE(sc, AR_RXDP, nbf->bf_daddr);
824 AR_WRITE_BARRIER(sc);
825 if_statinc(ifp, if_ierrors);
826 goto skip;
827 }
828 return EBUSY;
829 }
830
831 if (__predict_false(ds->ds_status1 & AR_RXS1_MORE)) {
832 /* Drop frames that span multiple Rx descriptors. */
833 DPRINTFN(DBG_RX, sc, "dropping split frame\n");
834 if_statinc(ifp, if_ierrors);
835 goto skip;
836 }
837 if (!(ds->ds_status8 & AR_RXS8_FRAME_OK)) {
838 if (ds->ds_status8 & AR_RXS8_CRC_ERR)
839 DPRINTFN(DBG_RX, sc, "CRC error\n");
840 else if (ds->ds_status8 & AR_RXS8_PHY_ERR)
841 DPRINTFN(DBG_RX, sc, "PHY error=0x%x\n",
842 MS(ds->ds_status8, AR_RXS8_PHY_ERR_CODE));
843 else if (ds->ds_status8 & AR_RXS8_DECRYPT_CRC_ERR)
844 DPRINTFN(DBG_RX, sc, "Decryption CRC error\n");
845 else if (ds->ds_status8 & AR_RXS8_MICHAEL_ERR) {
846 DPRINTFN(DBG_RX, sc, "Michael MIC failure\n");
847
848 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN);
849 m = bf->bf_m;
850 m_set_rcvif(m, ifp);
851 m->m_pkthdr.len = m->m_len = len;
852 wh = mtod(m, struct ieee80211_frame *);
853
854 /* Report Michael MIC failures to net80211. */
855 ieee80211_notify_michael_failure(ic, wh, 0 /* XXX: keyix */);
856 }
857 if_statinc(ifp, if_ierrors);
858 goto skip;
859 }
860
861 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN);
862 if (__predict_false(len < (int)IEEE80211_MIN_LEN || len > ATHN_RXBUFSZ)) {
863 DPRINTFN(DBG_RX, sc, "corrupted descriptor length=%d\n", len);
864 if_statinc(ifp, if_ierrors);
865 goto skip;
866 }
867
868 /* Allocate a new Rx buffer. */
869 m1 = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
870 if (__predict_false(m1 == NULL)) {
871 ic->ic_stats.is_rx_nobuf++;
872 if_statinc(ifp, if_ierrors);
873 goto skip;
874 }
875
876 /* Sync and unmap the old Rx buffer. */
877 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
878 BUS_DMASYNC_POSTREAD);
879 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
880
881 /* Map the new Rx buffer. */
882 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map, mtod(m1, void *),
883 ATHN_RXBUFSZ, NULL, BUS_DMA_NOWAIT | BUS_DMA_READ);
884 if (__predict_false(error != 0)) {
885 m_freem(m1);
886
887 /* Remap the old Rx buffer or panic. */
888 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
889 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
890 BUS_DMA_NOWAIT | BUS_DMA_READ);
891 KASSERT(error != 0);
892 if_statinc(ifp, if_ierrors);
893 goto skip;
894 }
895
896 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
897 BUS_DMASYNC_PREREAD);
898
899 /* Write physical address of new Rx buffer. */
900 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
901
902 m = bf->bf_m;
903 bf->bf_m = m1;
904
905 /* Finalize mbuf. */
906 m_set_rcvif(m, ifp);
907 m->m_pkthdr.len = m->m_len = len;
908
909 s = splnet();
910
911 /* Grab a reference to the source node. */
912 wh = mtod(m, struct ieee80211_frame *);
913 ni = ieee80211_find_rxnode(ic, (struct ieee80211_frame_min *)wh);
914
915 /* Remove any HW padding after the 802.11 header. */
916 if (!(wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL)) {
917 u_int hdrlen = ieee80211_anyhdrsize(wh);
918 if (hdrlen & 3) {
919 memmove((uint8_t *)wh + 2, wh, hdrlen);
920 m_adj(m, 2);
921 }
922 }
923 if (__predict_false(sc->sc_drvbpf != NULL))
924 ar5008_rx_radiotap(sc, m, ds);
925
926 /* Trim 802.11 FCS after radiotap. */
927 m_adj(m, -IEEE80211_CRC_LEN);
928
929 /* Send the frame to the 802.11 layer. */
930 rssi = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
931 rstamp = ds->ds_status2;
932 ieee80211_input(ic, m, ni, rssi, rstamp);
933
934 /* Node is no longer needed. */
935 ieee80211_free_node(ni);
936
937 splx(s);
938
939 skip:
940 /* Unlink this descriptor from head. */
941 SIMPLEQ_REMOVE_HEAD(&rxq->head, bf_list);
942 memset(&ds->ds_status0, 0, 36); /* XXX Really needed? */
943 ds->ds_status8 &= ~AR_RXS8_DONE;
944 ds->ds_link = 0;
945
946 /* Re-use this descriptor and link it to tail. */
947 if (__predict_true(!SIMPLEQ_EMPTY(&rxq->head)))
948 ((struct ar_rx_desc *)rxq->lastds)->ds_link = bf->bf_daddr;
949 else
950 AR_WRITE(sc, AR_RXDP, bf->bf_daddr);
951 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
952 rxq->lastds = ds;
953
954 /* Re-enable Rx. */
955 AR_WRITE(sc, AR_CR, AR_CR_RXE);
956 AR_WRITE_BARRIER(sc);
957 return 0;
958 }
959
960 Static void
ar5008_rx_intr(struct athn_softc * sc)961 ar5008_rx_intr(struct athn_softc *sc)
962 {
963
964 while (ar5008_rx_process(sc) == 0)
965 continue;
966 }
967
968 Static int
ar5008_tx_process(struct athn_softc * sc,int qid)969 ar5008_tx_process(struct athn_softc *sc, int qid)
970 {
971 struct ifnet *ifp = &sc->sc_if;
972 struct athn_txq *txq = &sc->sc_txq[qid];
973 struct athn_node *an;
974 struct athn_tx_buf *bf;
975 struct ar_tx_desc *ds;
976 uint8_t failcnt;
977
978 bf = SIMPLEQ_FIRST(&txq->head);
979 if (bf == NULL)
980 return ENOENT;
981 /* Get descriptor of last DMA segment. */
982 ds = &((struct ar_tx_desc *)bf->bf_descs)[bf->bf_map->dm_nsegs - 1];
983
984 if (!(ds->ds_status9 & AR_TXS9_DONE))
985 return EBUSY;
986
987 SIMPLEQ_REMOVE_HEAD(&txq->head, bf_list);
988 if_statinc(ifp, if_opackets);
989
990 sc->sc_tx_timer = 0;
991
992 if (ds->ds_status1 & AR_TXS1_EXCESSIVE_RETRIES)
993 if_statinc(ifp, if_oerrors);
994
995 if (ds->ds_status1 & AR_TXS1_UNDERRUN)
996 athn_inc_tx_trigger_level(sc);
997
998 an = (struct athn_node *)bf->bf_ni;
999 /*
1000 * NB: the data fail count contains the number of un-acked tries
1001 * for the final series used. We must add the number of tries for
1002 * each series that was fully processed.
1003 */
1004 failcnt = MS(ds->ds_status1, AR_TXS1_DATA_FAIL_CNT);
1005 /* NB: Assume two tries per series. */
1006 failcnt += MS(ds->ds_status9, AR_TXS9_FINAL_IDX) * 2;
1007
1008 /* Update rate control statistics. */
1009 an->amn.amn_txcnt++;
1010 if (failcnt > 0)
1011 an->amn.amn_retrycnt++;
1012
1013 DPRINTFN(DBG_TX, sc, "Tx done qid=%d status1=%d fail count=%d\n",
1014 qid, ds->ds_status1, failcnt);
1015
1016 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1017 BUS_DMASYNC_POSTWRITE);
1018 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1019
1020 m_freem(bf->bf_m);
1021 bf->bf_m = NULL;
1022 ieee80211_free_node(bf->bf_ni);
1023 bf->bf_ni = NULL;
1024
1025 /* Link Tx buffer back to global free list. */
1026 SIMPLEQ_INSERT_TAIL(&sc->sc_txbufs, bf, bf_list);
1027 return 0;
1028 }
1029
1030 Static void
ar5008_tx_intr(struct athn_softc * sc)1031 ar5008_tx_intr(struct athn_softc *sc)
1032 {
1033 struct ifnet *ifp = &sc->sc_if;
1034 uint16_t mask = 0;
1035 uint32_t reg;
1036 int qid, s;
1037
1038 s = splnet();
1039
1040 reg = AR_READ(sc, AR_ISR_S0_S);
1041 mask |= MS(reg, AR_ISR_S0_QCU_TXOK);
1042 mask |= MS(reg, AR_ISR_S0_QCU_TXDESC);
1043
1044 reg = AR_READ(sc, AR_ISR_S1_S);
1045 mask |= MS(reg, AR_ISR_S1_QCU_TXERR);
1046 mask |= MS(reg, AR_ISR_S1_QCU_TXEOL);
1047
1048 DPRINTFN(DBG_TX, sc, "Tx interrupt mask=0x%x\n", mask);
1049 for (qid = 0; mask != 0; mask >>= 1, qid++) {
1050 if (mask & 1)
1051 while (ar5008_tx_process(sc, qid) == 0);
1052 }
1053 if (!SIMPLEQ_EMPTY(&sc->sc_txbufs)) {
1054 ifp->if_flags &= ~IFF_OACTIVE;
1055 ifp->if_start(ifp); /* in softint */
1056 }
1057
1058 splx(s);
1059 }
1060
1061 #ifndef IEEE80211_STA_ONLY
1062 /*
1063 * Process Software Beacon Alert interrupts.
1064 */
1065 Static int
ar5008_swba_intr(struct athn_softc * sc)1066 ar5008_swba_intr(struct athn_softc *sc)
1067 {
1068 struct ieee80211com *ic = &sc->sc_ic;
1069 struct ifnet *ifp = &sc->sc_if;
1070 struct ieee80211_node *ni = ic->ic_bss;
1071 struct athn_tx_buf *bf = sc->sc_bcnbuf;
1072 struct ieee80211_frame *wh;
1073 struct ieee80211_beacon_offsets bo;
1074 struct ar_tx_desc *ds;
1075 struct mbuf *m;
1076 uint8_t ridx, hwrate;
1077 int error, totlen;
1078
1079 #if notyet
1080 if (ic->ic_tim_mcast_pending &&
1081 IF_IS_EMPTY(&ni->ni_savedq) &&
1082 SIMPLEQ_EMPTY(&sc->sc_txq[ATHN_QID_CAB].head))
1083 ic->ic_tim_mcast_pending = 0;
1084 #endif
1085 if (ic->ic_dtim_count == 0)
1086 ic->ic_dtim_count = ic->ic_dtim_period - 1;
1087 else
1088 ic->ic_dtim_count--;
1089
1090 /* Make sure previous beacon has been sent. */
1091 if (athn_tx_pending(sc, ATHN_QID_BEACON)) {
1092 DPRINTFN(DBG_INTR, sc, "beacon stuck\n");
1093 return EBUSY;
1094 }
1095 /* Get new beacon. */
1096 m = ieee80211_beacon_alloc(ic, ic->ic_bss, &bo);
1097 if (__predict_false(m == NULL))
1098 return ENOBUFS;
1099 /* Assign sequence number. */
1100 /* XXX: use non-QoS tid? */
1101 wh = mtod(m, struct ieee80211_frame *);
1102 *(uint16_t *)&wh->i_seq[0] =
1103 htole16(ic->ic_bss->ni_txseqs[0] << IEEE80211_SEQ_SEQ_SHIFT);
1104 ic->ic_bss->ni_txseqs[0]++;
1105
1106 /* Unmap and free old beacon if any. */
1107 if (__predict_true(bf->bf_m != NULL)) {
1108 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0,
1109 bf->bf_map->dm_mapsize, BUS_DMASYNC_POSTWRITE);
1110 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1111 m_freem(bf->bf_m);
1112 bf->bf_m = NULL;
1113 }
1114 /* DMA map new beacon. */
1115 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1116 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1117 if (__predict_false(error != 0)) {
1118 m_freem(m);
1119 return error;
1120 }
1121 bf->bf_m = m;
1122
1123 /* Setup Tx descriptor (simplified ar5008_tx()). */
1124 ds = bf->bf_descs;
1125 memset(ds, 0, sizeof(*ds));
1126
1127 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1128 ds->ds_ctl0 = SM(AR_TXC0_FRAME_LEN, totlen);
1129 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, AR_MAX_RATE_POWER);
1130 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, AR_FRAME_TYPE_BEACON);
1131 ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1132 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, AR_ENCR_TYPE_CLEAR);
1133
1134 /* Write number of tries. */
1135 ds->ds_ctl2 = SM(AR_TXC2_XMIT_DATA_TRIES0, 1);
1136
1137 /* Write Tx rate. */
1138 ridx = (ic->ic_curmode == IEEE80211_MODE_11A) ?
1139 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1;
1140 hwrate = athn_rates[ridx].hwrate;
1141 ds->ds_ctl3 = SM(AR_TXC3_XMIT_RATE0, hwrate);
1142
1143 /* Write Tx chains. */
1144 ds->ds_ctl7 = SM(AR_TXC7_CHAIN_SEL0, sc->sc_txchainmask);
1145
1146 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
1147 /* Segment length must be a multiple of 4. */
1148 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1149 (bf->bf_map->dm_segs[0].ds_len + 3) & ~3);
1150
1151 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1152 BUS_DMASYNC_PREWRITE);
1153
1154 /* Stop Tx DMA before putting the new beacon on the queue. */
1155 athn_stop_tx_dma(sc, ATHN_QID_BEACON);
1156
1157 AR_WRITE(sc, AR_QTXDP(ATHN_QID_BEACON), bf->bf_daddr);
1158
1159 for(;;) {
1160 if (SIMPLEQ_EMPTY(&sc->sc_txbufs))
1161 break;
1162
1163 IF_DEQUEUE(&ni->ni_savedq, m);
1164 if (m == NULL)
1165 break;
1166 if (!IF_IS_EMPTY(&ni->ni_savedq)) {
1167 /* more queued frames, set the more data bit */
1168 wh = mtod(m, struct ieee80211_frame *);
1169 wh->i_fc[1] |= IEEE80211_FC1_MORE_DATA;
1170 }
1171
1172 if (sc->sc_ops.tx(sc, m, ni, ATHN_TXFLAG_CAB) != 0) {
1173 ieee80211_free_node(ni);
1174 if_statinc(ifp, if_oerrors);
1175 break;
1176 }
1177 }
1178
1179 /* Kick Tx. */
1180 AR_WRITE(sc, AR_Q_TXE, 1 << ATHN_QID_BEACON);
1181 AR_WRITE_BARRIER(sc);
1182 return 0;
1183 }
1184 #endif
1185
1186 static int
ar5008_get_intr_status(struct athn_softc * sc,uint32_t * intrp,uint32_t * syncp)1187 ar5008_get_intr_status(struct athn_softc *sc, uint32_t *intrp, uint32_t *syncp)
1188 {
1189 uint32_t intr, sync;
1190
1191 /* Get pending interrupts. */
1192 intr = AR_READ(sc, AR_INTR_ASYNC_CAUSE);
1193 if (!(intr & AR_INTR_MAC_IRQ) || intr == AR_INTR_SPURIOUS) {
1194 intr = AR_READ(sc, AR_INTR_SYNC_CAUSE);
1195 if (intr == AR_INTR_SPURIOUS || (intr & sc->sc_isync) == 0)
1196 return 0; /* Not for us. */
1197 }
1198
1199 if ((AR_READ(sc, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) &&
1200 (AR_READ(sc, AR_RTC_STATUS) & AR_RTC_STATUS_M) == AR_RTC_STATUS_ON)
1201 intr = AR_READ(sc, AR_ISR);
1202 else
1203 intr = 0;
1204 sync = AR_READ(sc, AR_INTR_SYNC_CAUSE) & sc->sc_isync;
1205 if (intr == 0 && sync == 0)
1206 return 0; /* Not for us. */
1207
1208 *intrp = intr;
1209 *syncp = sync;
1210 return 1;
1211 }
1212
1213
1214 Static int
ar5008_intr_status(struct athn_softc * sc)1215 ar5008_intr_status(struct athn_softc *sc)
1216 {
1217 uint32_t intr, sync;
1218
1219 return ar5008_get_intr_status(sc, &intr, &sync);
1220 }
1221
1222 Static int
ar5008_intr(struct athn_softc * sc)1223 ar5008_intr(struct athn_softc *sc)
1224 {
1225 uint32_t intr, intr5, sync;
1226 #ifndef IEEE80211_STA_ONLY
1227 int s;
1228 #endif
1229
1230 if (!ar5008_get_intr_status(sc, &intr, &sync))
1231 return 0;
1232
1233 if (intr != 0) {
1234 if (intr & AR_ISR_BCNMISC) {
1235 uint32_t intr2 = AR_READ(sc, AR_ISR_S2);
1236 #if notyet
1237 if (intr2 & AR_ISR_S2_TIM)
1238 /* TBD */;
1239 if (intr2 & AR_ISR_S2_TSFOOR)
1240 /* TBD */;
1241 #else
1242 __USE(intr2);
1243 #endif
1244 }
1245 intr = AR_READ(sc, AR_ISR_RAC);
1246 if (intr == AR_INTR_SPURIOUS)
1247 return 1;
1248
1249 #ifndef IEEE80211_STA_ONLY
1250 if (intr & AR_ISR_SWBA) {
1251 s = splnet();
1252 ar5008_swba_intr(sc);
1253 splx(s);
1254 }
1255 #endif
1256 if (intr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
1257 ar5008_rx_intr(sc);
1258 if (intr & (AR_ISR_RXOK | AR_ISR_RXERR | AR_ISR_RXORN))
1259 ar5008_rx_intr(sc);
1260
1261 if (intr & (AR_ISR_TXOK | AR_ISR_TXDESC |
1262 AR_ISR_TXERR | AR_ISR_TXEOL))
1263 ar5008_tx_intr(sc);
1264
1265 intr5 = AR_READ(sc, AR_ISR_S5_S);
1266 if (intr & AR_ISR_GENTMR) {
1267 if (intr5 & AR_ISR_GENTMR) {
1268 DPRINTFN(DBG_INTR, sc,
1269 "GENTMR trigger=%d thresh=%d\n",
1270 MS(intr5, AR_ISR_S5_GENTIMER_TRIG),
1271 MS(intr5, AR_ISR_S5_GENTIMER_THRESH));
1272 }
1273 }
1274 #if notyet
1275 if (intr5 & AR_ISR_S5_TIM_TIMER) {
1276 /* TBD */;
1277 }
1278 #endif
1279 }
1280 if (sync != 0) {
1281 #if notyet
1282 if (sync &
1283 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR)) {
1284 /* TBD */;
1285 }
1286 #endif
1287 if (sync & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
1288 AR_WRITE(sc, AR_RC, AR_RC_HOSTIF);
1289 AR_WRITE(sc, AR_RC, 0);
1290 }
1291
1292 if ((sc->sc_flags & ATHN_FLAG_RFSILENT) &&
1293 (sync & AR_INTR_SYNC_GPIO_PIN(sc->sc_rfsilent_pin))) {
1294 AR_WRITE(sc, AR_INTR_SYNC_ENABLE, 0);
1295 (void)AR_READ(sc, AR_INTR_SYNC_ENABLE);
1296 pmf_event_inject(sc->sc_dev, PMFE_RADIO_OFF);
1297 }
1298
1299 AR_WRITE(sc, AR_INTR_SYNC_CAUSE, sync);
1300 (void)AR_READ(sc, AR_INTR_SYNC_CAUSE);
1301 }
1302 return 1;
1303 }
1304
1305 Static int
ar5008_tx(struct athn_softc * sc,struct mbuf * m,struct ieee80211_node * ni,int txflags)1306 ar5008_tx(struct athn_softc *sc, struct mbuf *m, struct ieee80211_node *ni,
1307 int txflags)
1308 {
1309 struct ieee80211com *ic = &sc->sc_ic;
1310 struct ieee80211_key *k = NULL;
1311 struct ieee80211_frame *wh;
1312 struct athn_series series[4];
1313 struct ar_tx_desc *ds, *lastds;
1314 struct athn_txq *txq;
1315 struct athn_tx_buf *bf;
1316 struct athn_node *an = (void *)ni;
1317 struct mbuf *m1;
1318 uint16_t qos;
1319 uint8_t txpower, type, encrtype, ridx[4];
1320 int i, error, totlen, hasqos, qid;
1321
1322 /* Grab a Tx buffer from our global free list. */
1323 bf = SIMPLEQ_FIRST(&sc->sc_txbufs);
1324 KASSERT(bf != NULL);
1325
1326 /* Map 802.11 frame type to hardware frame type. */
1327 wh = mtod(m, struct ieee80211_frame *);
1328 if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) ==
1329 IEEE80211_FC0_TYPE_MGT) {
1330 /* NB: Beacons do not use ar5008_tx(). */
1331 if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1332 IEEE80211_FC0_SUBTYPE_PROBE_RESP)
1333 type = AR_FRAME_TYPE_PROBE_RESP;
1334 else if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1335 IEEE80211_FC0_SUBTYPE_ATIM)
1336 type = AR_FRAME_TYPE_ATIM;
1337 else
1338 type = AR_FRAME_TYPE_NORMAL;
1339 }
1340 else if ((wh->i_fc[0] &
1341 (IEEE80211_FC0_TYPE_MASK | IEEE80211_FC0_SUBTYPE_MASK)) ==
1342 (IEEE80211_FC0_TYPE_CTL | IEEE80211_FC0_SUBTYPE_PS_POLL)) {
1343 type = AR_FRAME_TYPE_PSPOLL;
1344 }
1345 else
1346 type = AR_FRAME_TYPE_NORMAL;
1347
1348 if (wh->i_fc[1] & IEEE80211_FC1_PROTECTED) {
1349 k = ieee80211_crypto_encap(ic, ni, m);
1350 if (k == NULL)
1351 return ENOBUFS;
1352
1353 /* packet header may have moved, reset our local pointer */
1354 wh = mtod(m, struct ieee80211_frame *);
1355 }
1356
1357 /* XXX 2-byte padding for QoS and 4-addr headers. */
1358
1359 /* Select the HW Tx queue to use for this frame. */
1360 if ((hasqos = ieee80211_has_qos(wh))) {
1361 #ifdef notyet_edca
1362 uint8_t tid;
1363
1364 qos = ieee80211_get_qos(wh);
1365 tid = qos & IEEE80211_QOS_TID;
1366 qid = athn_ac2qid[ieee80211_up_to_ac(ic, tid)];
1367 #else
1368 qos = ieee80211_get_qos(wh);
1369 qid = ATHN_QID_AC_BE;
1370 #endif /* notyet_edca */
1371 }
1372 else if (type == AR_FRAME_TYPE_PSPOLL) {
1373 qos = 0;
1374 qid = ATHN_QID_PSPOLL;
1375 }
1376 else if (txflags & ATHN_TXFLAG_CAB) {
1377 qos = 0;
1378 qid = ATHN_QID_CAB;
1379 }
1380 else {
1381 qos = 0;
1382 qid = ATHN_QID_AC_BE;
1383 }
1384 txq = &sc->sc_txq[qid];
1385
1386 /* Select the transmit rates to use for this frame. */
1387 if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1388 (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) !=
1389 IEEE80211_FC0_TYPE_DATA) {
1390 /* Use lowest rate for all tries. */
1391 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1392 (ic->ic_curmode == IEEE80211_MODE_11A) ?
1393 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1;
1394 }
1395 else if (ic->ic_fixed_rate != -1) {
1396 /* Use same fixed rate for all tries. */
1397 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1398 sc->sc_fixed_ridx;
1399 }
1400 else {
1401 int txrate = ni->ni_txrate;
1402 /* Use fallback table of the node. */
1403 for (i = 0; i < 4; i++) {
1404 ridx[i] = an->ridx[txrate];
1405 txrate = an->fallback[txrate];
1406 }
1407 }
1408
1409 if (__predict_false(sc->sc_drvbpf != NULL)) {
1410 struct athn_tx_radiotap_header *tap = &sc->sc_txtap;
1411
1412 tap->wt_flags = 0;
1413 /* Use initial transmit rate. */
1414 tap->wt_rate = athn_rates[ridx[0]].rate;
1415 tap->wt_chan_freq = htole16(ic->ic_curchan->ic_freq);
1416 tap->wt_chan_flags = htole16(ic->ic_curchan->ic_flags);
1417 // XXX tap->wt_hwqueue = qid;
1418 if (ridx[0] != ATHN_RIDX_CCK1 &&
1419 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1420 tap->wt_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
1421
1422 bpf_mtap2(sc->sc_drvbpf, tap, sc->sc_txtap_len, m, BPF_D_OUT);
1423 }
1424
1425 /* DMA map mbuf. */
1426 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1427 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1428 if (__predict_false(error != 0)) {
1429 if (error != EFBIG) {
1430 aprint_error_dev(sc->sc_dev,
1431 "can't map mbuf (error %d)\n", error);
1432 m_freem(m);
1433 return error;
1434 }
1435 /*
1436 * DMA mapping requires too many DMA segments; linearize
1437 * mbuf in kernel virtual address space and retry.
1438 */
1439 MGETHDR(m1, M_DONTWAIT, MT_DATA);
1440 if (m1 == NULL) {
1441 m_freem(m);
1442 return ENOBUFS;
1443 }
1444 if (m->m_pkthdr.len > (int)MHLEN) {
1445 MCLGET(m1, M_DONTWAIT);
1446 if (!(m1->m_flags & M_EXT)) {
1447 m_freem(m);
1448 m_freem(m1);
1449 return ENOBUFS;
1450 }
1451 }
1452 m_copydata(m, 0, m->m_pkthdr.len, mtod(m1, void *));
1453 m1->m_pkthdr.len = m1->m_len = m->m_pkthdr.len;
1454 m_freem(m);
1455 m = m1;
1456
1457 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1458 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1459 if (error != 0) {
1460 aprint_error_dev(sc->sc_dev,
1461 "can't map mbuf (error %d)\n", error);
1462 m_freem(m);
1463 return error;
1464 }
1465 }
1466 bf->bf_m = m;
1467 bf->bf_ni = ni;
1468 bf->bf_txflags = txflags;
1469
1470 wh = mtod(m, struct ieee80211_frame *);
1471
1472 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1473
1474 /* Clear all Tx descriptors that we will use. */
1475 memset(bf->bf_descs, 0, bf->bf_map->dm_nsegs * sizeof(*ds));
1476
1477 /* Setup first Tx descriptor. */
1478 ds = bf->bf_descs;
1479
1480 ds->ds_ctl0 = AR_TXC0_INTR_REQ | AR_TXC0_CLR_DEST_MASK;
1481 txpower = AR_MAX_RATE_POWER; /* Get from per-rate registers. */
1482 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, txpower);
1483
1484 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, type);
1485
1486 if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1487 (hasqos && (qos & IEEE80211_QOS_ACKPOLICY_MASK) ==
1488 IEEE80211_QOS_ACKPOLICY_NOACK))
1489 ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1490 #if notyet
1491 if (0 && k != NULL) {
1492 uintptr_t entry;
1493
1494 /*
1495 * Map 802.11 cipher to hardware encryption type and
1496 * compute MIC+ICV overhead.
1497 */
1498 totlen += k->wk_keylen;
1499 switch (k->wk_cipher->ic_cipher) {
1500 case IEEE80211_CIPHER_WEP:
1501 encrtype = AR_ENCR_TYPE_WEP;
1502 break;
1503 case IEEE80211_CIPHER_TKIP:
1504 encrtype = AR_ENCR_TYPE_TKIP;
1505 break;
1506 case IEEE80211_CIPHER_AES_OCB:
1507 case IEEE80211_CIPHER_AES_CCM:
1508 encrtype = AR_ENCR_TYPE_AES;
1509 break;
1510 default:
1511 panic("unsupported cipher");
1512 }
1513 /*
1514 * NB: The key cache entry index is stored in the key
1515 * private field when the key is installed.
1516 */
1517 entry = (uintptr_t)k->k_priv;
1518 ds->ds_ctl1 |= SM(AR_TXC1_DEST_IDX, entry);
1519 ds->ds_ctl0 |= AR_TXC0_DEST_IDX_VALID;
1520 }
1521 else
1522 #endif
1523 encrtype = AR_ENCR_TYPE_CLEAR;
1524 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, encrtype);
1525
1526 /* Check if frame must be protected using RTS/CTS or CTS-to-self. */
1527 if (!IEEE80211_IS_MULTICAST(wh->i_addr1)) {
1528 /* NB: Group frames are sent using CCK in 802.11b/g. */
1529 if (totlen > ic->ic_rtsthreshold) {
1530 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1531 }
1532 else if ((ic->ic_flags & IEEE80211_F_USEPROT) &&
1533 athn_rates[ridx[0]].phy == IEEE80211_T_OFDM) {
1534 if (ic->ic_protmode == IEEE80211_PROT_RTSCTS)
1535 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1536 else if (ic->ic_protmode == IEEE80211_PROT_CTSONLY)
1537 ds->ds_ctl0 |= AR_TXC0_CTS_ENABLE;
1538 }
1539 }
1540 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1541 /* Disable multi-rate retries when protection is used. */
1542 ridx[1] = ridx[2] = ridx[3] = ridx[0];
1543 }
1544 /* Setup multi-rate retries. */
1545 for (i = 0; i < 4; i++) {
1546 series[i].hwrate = athn_rates[ridx[i]].hwrate;
1547 if (athn_rates[ridx[i]].phy == IEEE80211_T_DS &&
1548 ridx[i] != ATHN_RIDX_CCK1 &&
1549 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1550 series[i].hwrate |= 0x04;
1551 series[i].dur = 0;
1552 }
1553 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1554 /* Compute duration for each series. */
1555 for (i = 0; i < 4; i++) {
1556 series[i].dur = athn_txtime(sc, IEEE80211_ACK_LEN,
1557 athn_rates[ridx[i]].rspridx, ic->ic_flags);
1558 }
1559 }
1560
1561 /* Write number of tries for each series. */
1562 ds->ds_ctl2 =
1563 SM(AR_TXC2_XMIT_DATA_TRIES0, 2) |
1564 SM(AR_TXC2_XMIT_DATA_TRIES1, 2) |
1565 SM(AR_TXC2_XMIT_DATA_TRIES2, 2) |
1566 SM(AR_TXC2_XMIT_DATA_TRIES3, 4);
1567
1568 /* Tell HW to update duration field in 802.11 header. */
1569 if (type != AR_FRAME_TYPE_PSPOLL)
1570 ds->ds_ctl2 |= AR_TXC2_DUR_UPDATE_ENA;
1571
1572 /* Write Tx rate for each series. */
1573 ds->ds_ctl3 =
1574 SM(AR_TXC3_XMIT_RATE0, series[0].hwrate) |
1575 SM(AR_TXC3_XMIT_RATE1, series[1].hwrate) |
1576 SM(AR_TXC3_XMIT_RATE2, series[2].hwrate) |
1577 SM(AR_TXC3_XMIT_RATE3, series[3].hwrate);
1578
1579 /* Write duration for each series. */
1580 ds->ds_ctl4 =
1581 SM(AR_TXC4_PACKET_DUR0, series[0].dur) |
1582 SM(AR_TXC4_PACKET_DUR1, series[1].dur);
1583 ds->ds_ctl5 =
1584 SM(AR_TXC5_PACKET_DUR2, series[2].dur) |
1585 SM(AR_TXC5_PACKET_DUR3, series[3].dur);
1586
1587 /* Use the same Tx chains for all tries. */
1588 ds->ds_ctl7 =
1589 SM(AR_TXC7_CHAIN_SEL0, sc->sc_txchainmask) |
1590 SM(AR_TXC7_CHAIN_SEL1, sc->sc_txchainmask) |
1591 SM(AR_TXC7_CHAIN_SEL2, sc->sc_txchainmask) |
1592 SM(AR_TXC7_CHAIN_SEL3, sc->sc_txchainmask);
1593 #ifdef notyet
1594 #ifndef IEEE80211_NO_HT
1595 /* Use the same short GI setting for all tries. */
1596 if (ic->ic_flags & IEEE80211_F_SHGI)
1597 ds->ds_ctl7 |= AR_TXC7_GI0123;
1598 /* Use the same channel width for all tries. */
1599 if (ic->ic_flags & IEEE80211_F_CBW40)
1600 ds->ds_ctl7 |= AR_TXC7_2040_0123;
1601 #endif
1602 #endif
1603
1604 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1605 uint8_t protridx, hwrate;
1606 uint16_t dur = 0;
1607
1608 /* Use the same protection mode for all tries. */
1609 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1610 ds->ds_ctl4 |= AR_TXC4_RTSCTS_QUAL01;
1611 ds->ds_ctl5 |= AR_TXC5_RTSCTS_QUAL23;
1612 }
1613 /* Select protection rate (suboptimal but ok). */
1614 protridx = (ic->ic_curmode == IEEE80211_MODE_11A) ?
1615 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK2;
1616 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1617 /* Account for CTS duration. */
1618 dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1619 athn_rates[protridx].rspridx, ic->ic_flags);
1620 }
1621 dur += athn_txtime(sc, totlen, ridx[0], ic->ic_flags);
1622 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1623 /* Account for ACK duration. */
1624 dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1625 athn_rates[ridx[0]].rspridx, ic->ic_flags);
1626 }
1627 /* Write protection frame duration and rate. */
1628 ds->ds_ctl2 |= SM(AR_TXC2_BURST_DUR, dur);
1629 hwrate = athn_rates[protridx].hwrate;
1630 if (protridx == ATHN_RIDX_CCK2 &&
1631 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1632 hwrate |= 0x04;
1633 ds->ds_ctl7 |= SM(AR_TXC7_RTSCTS_RATE, hwrate);
1634 }
1635
1636 /* Finalize first Tx descriptor and fill others (if any). */
1637 ds->ds_ctl0 |= SM(AR_TXC0_FRAME_LEN, totlen);
1638
1639 lastds = NULL; /* XXX: gcc */
1640 for (i = 0; i < bf->bf_map->dm_nsegs; i++, ds++) {
1641 ds->ds_data = bf->bf_map->dm_segs[i].ds_addr;
1642 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1643 bf->bf_map->dm_segs[i].ds_len);
1644
1645 if (i != bf->bf_map->dm_nsegs - 1)
1646 ds->ds_ctl1 |= AR_TXC1_MORE;
1647 ds->ds_link = 0;
1648
1649 /* Chain Tx descriptor. */
1650 if (i != 0)
1651 lastds->ds_link = bf->bf_daddr + i * sizeof(*ds);
1652 lastds = ds;
1653 }
1654 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1655 BUS_DMASYNC_PREWRITE);
1656
1657 if (!SIMPLEQ_EMPTY(&txq->head))
1658 ((struct ar_tx_desc *)txq->lastds)->ds_link = bf->bf_daddr;
1659 else
1660 AR_WRITE(sc, AR_QTXDP(qid), bf->bf_daddr);
1661 txq->lastds = lastds;
1662 SIMPLEQ_REMOVE_HEAD(&sc->sc_txbufs, bf_list);
1663 SIMPLEQ_INSERT_TAIL(&txq->head, bf, bf_list);
1664
1665 ds = bf->bf_descs;
1666 DPRINTFN(DBG_TX, sc,
1667 "Tx qid=%d nsegs=%d ctl0=0x%x ctl1=0x%x ctl3=0x%x\n",
1668 qid, bf->bf_map->dm_nsegs, ds->ds_ctl0, ds->ds_ctl1, ds->ds_ctl3);
1669
1670 /* Kick Tx. */
1671 AR_WRITE(sc, AR_Q_TXE, 1 << qid);
1672 AR_WRITE_BARRIER(sc);
1673 return 0;
1674 }
1675
1676 Static void
ar5008_set_rf_mode(struct athn_softc * sc,struct ieee80211_channel * c)1677 ar5008_set_rf_mode(struct athn_softc *sc, struct ieee80211_channel *c)
1678 {
1679 uint32_t reg;
1680
1681 reg = IEEE80211_IS_CHAN_2GHZ(c) ?
1682 AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1683 if (!AR_SREV_9280_10_OR_LATER(sc)) {
1684 reg |= IEEE80211_IS_CHAN_2GHZ(c) ?
1685 AR_PHY_MODE_RF2GHZ : AR_PHY_MODE_RF5GHZ;
1686 }
1687 else if (IEEE80211_IS_CHAN_5GHZ(c) &&
1688 (sc->sc_flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
1689 reg |= AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE;
1690 }
1691 AR_WRITE(sc, AR_PHY_MODE, reg);
1692 AR_WRITE_BARRIER(sc);
1693 }
1694
1695 static __inline uint32_t
ar5008_synth_delay(struct athn_softc * sc)1696 ar5008_synth_delay(struct athn_softc *sc)
1697 {
1698 uint32_t synth_delay;
1699
1700 synth_delay = MS(AR_READ(sc, AR_PHY_RX_DELAY), AR_PHY_RX_DELAY_DELAY);
1701 if (sc->sc_ic.ic_curmode == IEEE80211_MODE_11B)
1702 synth_delay = (synth_delay * 4) / 22;
1703 else
1704 synth_delay = synth_delay / 10; /* in 100ns steps */
1705 return synth_delay;
1706 }
1707
1708 Static int
ar5008_rf_bus_request(struct athn_softc * sc)1709 ar5008_rf_bus_request(struct athn_softc *sc)
1710 {
1711 int ntries;
1712
1713 /* Request RF Bus grant. */
1714 AR_WRITE(sc, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1715 for (ntries = 0; ntries < 10000; ntries++) {
1716 if (AR_READ(sc, AR_PHY_RFBUS_GRANT) & AR_PHY_RFBUS_GRANT_EN)
1717 return 0;
1718 DELAY(10);
1719 }
1720 DPRINTFN(DBG_RF, sc, "could not kill baseband Rx");
1721 return ETIMEDOUT;
1722 }
1723
1724 Static void
ar5008_rf_bus_release(struct athn_softc * sc)1725 ar5008_rf_bus_release(struct athn_softc *sc)
1726 {
1727
1728 /* Wait for the synthesizer to settle. */
1729 DELAY(AR_BASE_PHY_ACTIVE_DELAY + ar5008_synth_delay(sc));
1730
1731 /* Release the RF Bus grant. */
1732 AR_WRITE(sc, AR_PHY_RFBUS_REQ, 0);
1733 AR_WRITE_BARRIER(sc);
1734 }
1735
1736 Static void
ar5008_set_phy(struct athn_softc * sc,struct ieee80211_channel * c,struct ieee80211_channel * extc)1737 ar5008_set_phy(struct athn_softc *sc, struct ieee80211_channel *c,
1738 struct ieee80211_channel *extc)
1739 {
1740 uint32_t phy;
1741
1742 if (AR_SREV_9285_10_OR_LATER(sc))
1743 phy = AR_READ(sc, AR_PHY_TURBO) & AR_PHY_FC_ENABLE_DAC_FIFO;
1744 else
1745 phy = 0;
1746 phy |= AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40 |
1747 AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH;
1748 #ifndef IEEE80211_NO_HT
1749 if (extc != NULL) {
1750 phy |= AR_PHY_FC_DYN2040_EN;
1751 if (extc > c) /* XXX */
1752 phy |= AR_PHY_FC_DYN2040_PRI_CH;
1753 }
1754 #endif
1755 AR_WRITE(sc, AR_PHY_TURBO, phy);
1756
1757 AR_WRITE(sc, AR_2040_MODE,
1758 (extc != NULL) ? AR_2040_JOINED_RX_CLEAR : 0);
1759
1760 /* Set global transmit timeout. */
1761 AR_WRITE(sc, AR_GTXTO, SM(AR_GTXTO_TIMEOUT_LIMIT, 25));
1762 /* Set carrier sense timeout. */
1763 AR_WRITE(sc, AR_CST, SM(AR_CST_TIMEOUT_LIMIT, 15));
1764 AR_WRITE_BARRIER(sc);
1765 }
1766
1767 Static void
ar5008_set_delta_slope(struct athn_softc * sc,struct ieee80211_channel * c,struct ieee80211_channel * extc)1768 ar5008_set_delta_slope(struct athn_softc *sc, struct ieee80211_channel *c,
1769 struct ieee80211_channel *extc)
1770 {
1771 uint32_t coeff, exp, man, reg;
1772
1773 /* Set Delta Slope (exponent and mantissa). */
1774 coeff = (100 << 24) / c->ic_freq;
1775 athn_get_delta_slope(coeff, &exp, &man);
1776 DPRINTFN(DBG_RX, sc, "delta slope coeff exp=%u man=%u\n", exp, man);
1777
1778 reg = AR_READ(sc, AR_PHY_TIMING3);
1779 reg = RW(reg, AR_PHY_TIMING3_DSC_EXP, exp);
1780 reg = RW(reg, AR_PHY_TIMING3_DSC_MAN, man);
1781 AR_WRITE(sc, AR_PHY_TIMING3, reg);
1782
1783 /* For Short GI, coeff is 9/10 that of normal coeff. */
1784 coeff = (9 * coeff) / 10;
1785 athn_get_delta_slope(coeff, &exp, &man);
1786 DPRINTFN(DBG_RX, sc, "delta slope coeff exp=%u man=%u\n", exp, man);
1787
1788 reg = AR_READ(sc, AR_PHY_HALFGI);
1789 reg = RW(reg, AR_PHY_HALFGI_DSC_EXP, exp);
1790 reg = RW(reg, AR_PHY_HALFGI_DSC_MAN, man);
1791 AR_WRITE(sc, AR_PHY_HALFGI, reg);
1792 AR_WRITE_BARRIER(sc);
1793 }
1794
1795 Static void
ar5008_enable_antenna_diversity(struct athn_softc * sc)1796 ar5008_enable_antenna_diversity(struct athn_softc *sc)
1797 {
1798
1799 AR_SETBITS(sc, AR_PHY_CCK_DETECT,
1800 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV);
1801 AR_WRITE_BARRIER(sc);
1802 }
1803
1804 Static void
ar5008_init_baseband(struct athn_softc * sc)1805 ar5008_init_baseband(struct athn_softc *sc)
1806 {
1807 uint32_t synth_delay;
1808
1809 synth_delay = ar5008_synth_delay(sc);
1810 /* Activate the PHY (includes baseband activate and synthesizer on). */
1811 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1812 AR_WRITE_BARRIER(sc);
1813 DELAY(AR_BASE_PHY_ACTIVE_DELAY + synth_delay);
1814 }
1815
1816 Static void
ar5008_disable_phy(struct athn_softc * sc)1817 ar5008_disable_phy(struct athn_softc *sc)
1818 {
1819
1820 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1821 AR_WRITE_BARRIER(sc);
1822 }
1823
1824 Static void
ar5008_init_chains(struct athn_softc * sc)1825 ar5008_init_chains(struct athn_softc *sc)
1826 {
1827
1828 if (sc->sc_rxchainmask == 0x5 || sc->sc_txchainmask == 0x5)
1829 AR_SETBITS(sc, AR_PHY_ANALOG_SWAP, AR_PHY_SWAP_ALT_CHAIN);
1830
1831 /* Setup chain masks. */
1832 if (sc->sc_mac_ver <= AR_SREV_VERSION_9160 &&
1833 (sc->sc_rxchainmask == 0x3 || sc->sc_rxchainmask == 0x5)) {
1834 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, 0x7);
1835 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, 0x7);
1836 }
1837 else {
1838 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->sc_rxchainmask);
1839 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->sc_rxchainmask);
1840 }
1841 AR_WRITE(sc, AR_SELFGEN_MASK, sc->sc_txchainmask);
1842 AR_WRITE_BARRIER(sc);
1843 }
1844
1845 Static void
ar5008_set_rxchains(struct athn_softc * sc)1846 ar5008_set_rxchains(struct athn_softc *sc)
1847 {
1848
1849 if (sc->sc_rxchainmask == 0x3 || sc->sc_rxchainmask == 0x5) {
1850 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->sc_rxchainmask);
1851 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->sc_rxchainmask);
1852 AR_WRITE_BARRIER(sc);
1853 }
1854 }
1855
1856 #ifdef notused
1857 Static void
ar5008_read_noisefloor(struct athn_softc * sc,int16_t * nf,int16_t * nf_ext)1858 ar5008_read_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
1859 {
1860 /* Sign-extends 9-bit value (assumes upper bits are zeroes). */
1861 #define SIGN_EXT(v) (((v) ^ 0x100) - 0x100)
1862 uint32_t reg;
1863 int i;
1864
1865 for (i = 0; i < sc->sc_nrxchains; i++) {
1866 reg = AR_READ(sc, AR_PHY_CCA(i));
1867 if (AR_SREV_9280_10_OR_LATER(sc))
1868 nf[i] = MS(reg, AR9280_PHY_MINCCA_PWR);
1869 else
1870 nf[i] = MS(reg, AR_PHY_MINCCA_PWR);
1871 nf[i] = SIGN_EXT(nf[i]);
1872
1873 reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
1874 if (AR_SREV_9280_10_OR_LATER(sc))
1875 nf_ext[i] = MS(reg, AR9280_PHY_EXT_MINCCA_PWR);
1876 else
1877 nf_ext[i] = MS(reg, AR_PHY_EXT_MINCCA_PWR);
1878 nf_ext[i] = SIGN_EXT(nf_ext[i]);
1879 }
1880 #undef SIGN_EXT
1881 }
1882 #endif /* notused */
1883
1884 #ifdef notused
1885 Static void
ar5008_write_noisefloor(struct athn_softc * sc,int16_t * nf,int16_t * nf_ext)1886 ar5008_write_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
1887 {
1888 uint32_t reg;
1889 int i;
1890
1891 for (i = 0; i < sc->sc_nrxchains; i++) {
1892 reg = AR_READ(sc, AR_PHY_CCA(i));
1893 reg = RW(reg, AR_PHY_MAXCCA_PWR, nf[i]);
1894 AR_WRITE(sc, AR_PHY_CCA(i), reg);
1895
1896 reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
1897 reg = RW(reg, AR_PHY_EXT_MAXCCA_PWR, nf_ext[i]);
1898 AR_WRITE(sc, AR_PHY_EXT_CCA(i), reg);
1899 }
1900 AR_WRITE_BARRIER(sc);
1901 }
1902 #endif /* notused */
1903
1904 #ifdef notused
1905 Static void
ar5008_get_noisefloor(struct athn_softc * sc,struct ieee80211_channel * c)1906 ar5008_get_noisefloor(struct athn_softc *sc, struct ieee80211_channel *c)
1907 {
1908 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
1909 int i;
1910
1911 if (AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF) {
1912 /* Noisefloor calibration not finished. */
1913 return;
1914 }
1915 /* Noisefloor calibration is finished. */
1916 ar5008_read_noisefloor(sc, nf, nf_ext);
1917
1918 /* Update noisefloor history. */
1919 for (i = 0; i < sc->sc_nrxchains; i++) {
1920 sc->sc_nf_hist[sc->sc_nf_hist_cur].nf[i] = nf[i];
1921 sc->sc_nf_hist[sc->sc_nf_hist_cur].nf_ext[i] = nf_ext[i];
1922 }
1923 if (++sc->sc_nf_hist_cur >= ATHN_NF_CAL_HIST_MAX)
1924 sc->sc_nf_hist_cur = 0;
1925 }
1926 #endif /* notused */
1927
1928 #ifdef notused
1929 Static void
ar5008_bb_load_noisefloor(struct athn_softc * sc)1930 ar5008_bb_load_noisefloor(struct athn_softc *sc)
1931 {
1932 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
1933 int i, ntries;
1934
1935 /* Write filtered noisefloor values. */
1936 for (i = 0; i < sc->sc_nrxchains; i++) {
1937 nf[i] = sc->sc_nf_priv[i] * 2;
1938 nf_ext[i] = sc->sc_nf_ext_priv[i] * 2;
1939 }
1940 ar5008_write_noisefloor(sc, nf, nf_ext);
1941
1942 /* Load filtered noisefloor values into baseband. */
1943 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
1944 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
1945 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1946 /* Wait for load to complete. */
1947 for (ntries = 0; ntries < 1000; ntries++) {
1948 if (!(AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF))
1949 break;
1950 DELAY(50);
1951 }
1952 if (ntries == 1000) {
1953 DPRINTFN(DBG_RF, sc, "failed to load noisefloor values\n");
1954 return;
1955 }
1956
1957 /* Restore noisefloor values to initial (max) values. */
1958 for (i = 0; i < AR_MAX_CHAINS; i++)
1959 nf[i] = nf_ext[i] = -50 * 2;
1960 ar5008_write_noisefloor(sc, nf, nf_ext);
1961 }
1962 #endif /* notused */
1963
1964 #ifdef notused
1965 Static void
ar5008_noisefloor_calib(struct athn_softc * sc)1966 ar5008_noisefloor_calib(struct athn_softc *sc)
1967 {
1968
1969 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
1970 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
1971 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1972 AR_WRITE_BARRIER(sc);
1973 }
1974 #endif /* notused */
1975
1976 Static void
ar5008_do_noisefloor_calib(struct athn_softc * sc)1977 ar5008_do_noisefloor_calib(struct athn_softc *sc)
1978 {
1979
1980 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1981 AR_WRITE_BARRIER(sc);
1982 }
1983
1984 Static void
ar5008_do_calib(struct athn_softc * sc)1985 ar5008_do_calib(struct athn_softc *sc)
1986 {
1987 uint32_t mode, reg;
1988 int log;
1989
1990 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4_0);
1991 log = AR_SREV_9280_10_OR_LATER(sc) ? 10 : 2;
1992 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCAL_LOG_COUNT_MAX, log);
1993 AR_WRITE(sc, AR_PHY_TIMING_CTRL4_0, reg);
1994
1995 if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_GAIN)
1996 mode = AR_PHY_CALMODE_ADC_GAIN;
1997 else if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_DC)
1998 mode = AR_PHY_CALMODE_ADC_DC_PER;
1999 else /* ATHN_CAL_IQ */
2000 mode = AR_PHY_CALMODE_IQ;
2001 AR_WRITE(sc, AR_PHY_CALMODE, mode);
2002
2003 DPRINTFN(DBG_RF, sc, "starting calibration mode=0x%x\n", mode);
2004 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0, AR_PHY_TIMING_CTRL4_DO_CAL);
2005 AR_WRITE_BARRIER(sc);
2006 }
2007
2008 Static void
ar5008_next_calib(struct athn_softc * sc)2009 ar5008_next_calib(struct athn_softc *sc)
2010 {
2011
2012 /* Check if we have any calibration in progress. */
2013 if (sc->sc_cur_calib_mask != 0) {
2014 if (!(AR_READ(sc, AR_PHY_TIMING_CTRL4_0) &
2015 AR_PHY_TIMING_CTRL4_DO_CAL)) {
2016 /* Calibration completed for current sample. */
2017 if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_GAIN)
2018 ar5008_calib_adc_gain(sc);
2019 else if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_DC)
2020 ar5008_calib_adc_dc_off(sc);
2021 else /* ATHN_CAL_IQ */
2022 ar5008_calib_iq(sc);
2023 }
2024 }
2025 }
2026
2027 Static void
ar5008_calib_iq(struct athn_softc * sc)2028 ar5008_calib_iq(struct athn_softc *sc)
2029 {
2030 struct athn_iq_cal *cal;
2031 uint32_t reg, i_coff_denom, q_coff_denom;
2032 int32_t i_coff, q_coff;
2033 int i, iq_corr_neg;
2034
2035 for (i = 0; i < AR_MAX_CHAINS; i++) {
2036 cal = &sc->sc_calib.iq[i];
2037
2038 /* Accumulate IQ calibration measures (clear on read). */
2039 cal->pwr_meas_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2040 cal->pwr_meas_q += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2041 cal->iq_corr_meas +=
2042 (int32_t)AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2043 }
2044 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2045 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2046 /* Not enough samples accumulated, continue. */
2047 ar5008_do_calib(sc);
2048 return;
2049 }
2050
2051 for (i = 0; i < sc->sc_nrxchains; i++) {
2052 cal = &sc->sc_calib.iq[i];
2053
2054 if (cal->pwr_meas_q == 0)
2055 continue;
2056
2057 if ((iq_corr_neg = cal->iq_corr_meas) < 0)
2058 cal->iq_corr_meas = -cal->iq_corr_meas;
2059
2060 i_coff_denom =
2061 (cal->pwr_meas_i / 2 + cal->pwr_meas_q / 2) / 128;
2062 q_coff_denom = cal->pwr_meas_q / 64;
2063
2064 if (i_coff_denom == 0 || q_coff_denom == 0)
2065 continue; /* Prevents division by zero. */
2066
2067 i_coff = cal->iq_corr_meas / i_coff_denom;
2068 q_coff = (cal->pwr_meas_i / q_coff_denom) - 64;
2069
2070 /* Negate i_coff if iq_corr_meas is positive. */
2071 if (!iq_corr_neg)
2072 i_coff = 0x40 - (i_coff & 0x3f);
2073 if (q_coff > 15)
2074 q_coff = 15;
2075 else if (q_coff <= -16)
2076 q_coff = -16; /* XXX Linux has a bug here? */
2077
2078 DPRINTFN(DBG_RF, sc, "IQ calibration for chain %d\n", i);
2079 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4(i));
2080 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_I_COFF, i_coff);
2081 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_Q_COFF, q_coff);
2082 AR_WRITE(sc, AR_PHY_TIMING_CTRL4(i), reg);
2083 }
2084
2085 /* Apply new settings. */
2086 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0,
2087 AR_PHY_TIMING_CTRL4_IQCORR_ENABLE);
2088 AR_WRITE_BARRIER(sc);
2089
2090 /* IQ calibration done. */
2091 sc->sc_cur_calib_mask &= ~ATHN_CAL_IQ;
2092 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2093 }
2094
2095 Static void
ar5008_calib_adc_gain(struct athn_softc * sc)2096 ar5008_calib_adc_gain(struct athn_softc *sc)
2097 {
2098 struct athn_adc_cal *cal;
2099 uint32_t reg, gain_mismatch_i, gain_mismatch_q;
2100 int i;
2101
2102 for (i = 0; i < AR_MAX_CHAINS; i++) {
2103 cal = &sc->sc_calib.adc_gain[i];
2104
2105 /* Accumulate ADC gain measures (clear on read). */
2106 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2107 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2108 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2109 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2110 }
2111 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2112 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2113 /* Not enough samples accumulated, continue. */
2114 ar5008_do_calib(sc);
2115 return;
2116 }
2117
2118 for (i = 0; i < sc->sc_nrxchains; i++) {
2119 cal = &sc->sc_calib.adc_gain[i];
2120
2121 if (cal->pwr_meas_odd_i == 0 || cal->pwr_meas_even_q == 0)
2122 continue; /* Prevents division by zero. */
2123
2124 gain_mismatch_i =
2125 (cal->pwr_meas_even_i * 32) / cal->pwr_meas_odd_i;
2126 gain_mismatch_q =
2127 (cal->pwr_meas_odd_q * 32) / cal->pwr_meas_even_q;
2128
2129 DPRINTFN(DBG_RF, sc, "ADC gain calibration for chain %d\n", i);
2130 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2131 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IGAIN, gain_mismatch_i);
2132 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QGAIN, gain_mismatch_q);
2133 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2134 }
2135
2136 /* Apply new settings. */
2137 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2138 AR_PHY_NEW_ADC_GAIN_CORR_ENABLE);
2139 AR_WRITE_BARRIER(sc);
2140
2141 /* ADC gain calibration done. */
2142 sc->sc_cur_calib_mask &= ~ATHN_CAL_ADC_GAIN;
2143 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2144 }
2145
2146 Static void
ar5008_calib_adc_dc_off(struct athn_softc * sc)2147 ar5008_calib_adc_dc_off(struct athn_softc *sc)
2148 {
2149 struct athn_adc_cal *cal;
2150 int32_t dc_offset_mismatch_i, dc_offset_mismatch_q;
2151 uint32_t reg;
2152 int count, i;
2153
2154 for (i = 0; i < AR_MAX_CHAINS; i++) {
2155 cal = &sc->sc_calib.adc_dc_offset[i];
2156
2157 /* Accumulate ADC DC offset measures (clear on read). */
2158 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2159 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2160 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2161 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2162 }
2163 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2164 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2165 /* Not enough samples accumulated, continue. */
2166 ar5008_do_calib(sc);
2167 return;
2168 }
2169
2170 if (AR_SREV_9280_10_OR_LATER(sc))
2171 count = (1 << (10 + 5));
2172 else
2173 count = (1 << ( 2 + 5)) * AR_CAL_SAMPLES;
2174 for (i = 0; i < sc->sc_nrxchains; i++) {
2175 cal = &sc->sc_calib.adc_dc_offset[i];
2176
2177 dc_offset_mismatch_i =
2178 (cal->pwr_meas_even_i - cal->pwr_meas_odd_i * 2) / count;
2179 dc_offset_mismatch_q =
2180 (cal->pwr_meas_odd_q - cal->pwr_meas_even_q * 2) / count;
2181
2182 DPRINTFN(DBG_RF, sc, "ADC DC offset calibration for chain %d\n", i);
2183 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2184 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QDC,
2185 dc_offset_mismatch_q);
2186 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IDC,
2187 dc_offset_mismatch_i);
2188 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2189 }
2190
2191 /* Apply new settings. */
2192 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2193 AR_PHY_NEW_ADC_DC_OFFSET_CORR_ENABLE);
2194 AR_WRITE_BARRIER(sc);
2195
2196 /* ADC DC offset calibration done. */
2197 sc->sc_cur_calib_mask &= ~ATHN_CAL_ADC_DC;
2198 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2199 }
2200
2201 PUBLIC void
ar5008_write_txpower(struct athn_softc * sc,int16_t power[ATHN_POWER_COUNT])2202 ar5008_write_txpower(struct athn_softc *sc, int16_t power[ATHN_POWER_COUNT])
2203 {
2204
2205 AR_WRITE(sc, AR_PHY_POWER_TX_RATE1,
2206 (power[ATHN_POWER_OFDM18 ] & 0x3f) << 24 |
2207 (power[ATHN_POWER_OFDM12 ] & 0x3f) << 16 |
2208 (power[ATHN_POWER_OFDM9 ] & 0x3f) << 8 |
2209 (power[ATHN_POWER_OFDM6 ] & 0x3f));
2210 AR_WRITE(sc, AR_PHY_POWER_TX_RATE2,
2211 (power[ATHN_POWER_OFDM54 ] & 0x3f) << 24 |
2212 (power[ATHN_POWER_OFDM48 ] & 0x3f) << 16 |
2213 (power[ATHN_POWER_OFDM36 ] & 0x3f) << 8 |
2214 (power[ATHN_POWER_OFDM24 ] & 0x3f));
2215 AR_WRITE(sc, AR_PHY_POWER_TX_RATE3,
2216 (power[ATHN_POWER_CCK2_SP ] & 0x3f) << 24 |
2217 (power[ATHN_POWER_CCK2_LP ] & 0x3f) << 16 |
2218 (power[ATHN_POWER_XR ] & 0x3f) << 8 |
2219 (power[ATHN_POWER_CCK1_LP ] & 0x3f));
2220 AR_WRITE(sc, AR_PHY_POWER_TX_RATE4,
2221 (power[ATHN_POWER_CCK11_SP] & 0x3f) << 24 |
2222 (power[ATHN_POWER_CCK11_LP] & 0x3f) << 16 |
2223 (power[ATHN_POWER_CCK55_SP] & 0x3f) << 8 |
2224 (power[ATHN_POWER_CCK55_LP] & 0x3f));
2225 #ifndef IEEE80211_NO_HT
2226 AR_WRITE(sc, AR_PHY_POWER_TX_RATE5,
2227 (power[ATHN_POWER_HT20(3) ] & 0x3f) << 24 |
2228 (power[ATHN_POWER_HT20(2) ] & 0x3f) << 16 |
2229 (power[ATHN_POWER_HT20(1) ] & 0x3f) << 8 |
2230 (power[ATHN_POWER_HT20(0) ] & 0x3f));
2231 AR_WRITE(sc, AR_PHY_POWER_TX_RATE6,
2232 (power[ATHN_POWER_HT20(7) ] & 0x3f) << 24 |
2233 (power[ATHN_POWER_HT20(6) ] & 0x3f) << 16 |
2234 (power[ATHN_POWER_HT20(5) ] & 0x3f) << 8 |
2235 (power[ATHN_POWER_HT20(4) ] & 0x3f));
2236 AR_WRITE(sc, AR_PHY_POWER_TX_RATE7,
2237 (power[ATHN_POWER_HT40(3) ] & 0x3f) << 24 |
2238 (power[ATHN_POWER_HT40(2) ] & 0x3f) << 16 |
2239 (power[ATHN_POWER_HT40(1) ] & 0x3f) << 8 |
2240 (power[ATHN_POWER_HT40(0) ] & 0x3f));
2241 AR_WRITE(sc, AR_PHY_POWER_TX_RATE8,
2242 (power[ATHN_POWER_HT40(7) ] & 0x3f) << 24 |
2243 (power[ATHN_POWER_HT40(6) ] & 0x3f) << 16 |
2244 (power[ATHN_POWER_HT40(5) ] & 0x3f) << 8 |
2245 (power[ATHN_POWER_HT40(4) ] & 0x3f));
2246 AR_WRITE(sc, AR_PHY_POWER_TX_RATE9,
2247 (power[ATHN_POWER_OFDM_EXT] & 0x3f) << 24 |
2248 (power[ATHN_POWER_CCK_EXT ] & 0x3f) << 16 |
2249 (power[ATHN_POWER_OFDM_DUP] & 0x3f) << 8 |
2250 (power[ATHN_POWER_CCK_DUP ] & 0x3f));
2251 #endif
2252 AR_WRITE_BARRIER(sc);
2253 }
2254
2255 PUBLIC void
ar5008_set_viterbi_mask(struct athn_softc * sc,int bin)2256 ar5008_set_viterbi_mask(struct athn_softc *sc, int bin)
2257 {
2258 uint32_t mask[4], reg;
2259 uint8_t m[62], p[62]; /* XXX use bit arrays? */
2260 int i, bit, cur;
2261
2262 /* Compute pilot mask. */
2263 cur = -6000;
2264 for (i = 0; i < 4; i++) {
2265 mask[i] = 0;
2266 for (bit = 0; bit < 30; bit++) {
2267 if (abs(cur - bin) < 100)
2268 mask[i] |= 1 << bit;
2269 cur += 100;
2270 }
2271 if (cur == 0) /* Skip entry "0". */
2272 cur = 100;
2273 }
2274 /* Write entries from -6000 to -3100. */
2275 AR_WRITE(sc, AR_PHY_TIMING7, mask[0]);
2276 AR_WRITE(sc, AR_PHY_TIMING9, mask[0]);
2277 /* Write entries from -3000 to -100. */
2278 AR_WRITE(sc, AR_PHY_TIMING8, mask[1]);
2279 AR_WRITE(sc, AR_PHY_TIMING10, mask[1]);
2280 /* Write entries from 100 to 3000. */
2281 AR_WRITE(sc, AR_PHY_PILOT_MASK_01_30, mask[2]);
2282 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_01_30, mask[2]);
2283 /* Write entries from 3100 to 6000. */
2284 AR_WRITE(sc, AR_PHY_PILOT_MASK_31_60, mask[3]);
2285 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_31_60, mask[3]);
2286
2287 /* Compute viterbi mask. */
2288 for (cur = 6100; cur >= 0; cur -= 100)
2289 p[+cur / 100] = abs(cur - bin) < 75;
2290 for (cur = 0; cur >= -6100; cur -= 100)
2291 m[-cur / 100] = abs(cur - bin) < 75;
2292
2293 /* Write viterbi mask (XXX needs to be reworked). */
2294 reg =
2295 m[46] << 30 | m[47] << 28 | m[48] << 26 | m[49] << 24 |
2296 m[50] << 22 | m[51] << 20 | m[52] << 18 | m[53] << 16 |
2297 m[54] << 14 | m[55] << 12 | m[56] << 10 | m[57] << 8 |
2298 m[58] << 6 | m[59] << 4 | m[60] << 2 | m[61] << 0;
2299 AR_WRITE(sc, AR_PHY_BIN_MASK_1, reg);
2300 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_46_61, reg);
2301
2302 /* XXX m[48] should be m[38] ? */
2303 reg = m[31] << 28 | m[32] << 26 | m[33] << 24 |
2304 m[34] << 22 | m[35] << 20 | m[36] << 18 | m[37] << 16 |
2305 m[48] << 14 | m[39] << 12 | m[40] << 10 | m[41] << 8 |
2306 m[42] << 6 | m[43] << 4 | m[44] << 2 | m[45] << 0;
2307 AR_WRITE(sc, AR_PHY_BIN_MASK_2, reg);
2308 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_31_45, reg);
2309
2310 /* XXX This one is weird too. */
2311 reg =
2312 m[16] << 30 | m[16] << 28 | m[18] << 26 | m[18] << 24 |
2313 m[20] << 22 | m[20] << 20 | m[22] << 18 | m[22] << 16 |
2314 m[24] << 14 | m[24] << 12 | m[25] << 10 | m[26] << 8 |
2315 m[27] << 6 | m[28] << 4 | m[29] << 2 | m[30] << 0;
2316 AR_WRITE(sc, AR_PHY_BIN_MASK_3, reg);
2317 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_16_30, reg);
2318
2319 reg =
2320 m[ 0] << 30 | m[ 1] << 28 | m[ 2] << 26 | m[ 3] << 24 |
2321 m[ 4] << 22 | m[ 5] << 20 | m[ 6] << 18 | m[ 7] << 16 |
2322 m[ 8] << 14 | m[ 9] << 12 | m[10] << 10 | m[11] << 8 |
2323 m[12] << 6 | m[13] << 4 | m[14] << 2 | m[15] << 0;
2324 AR_WRITE(sc, AR_PHY_MASK_CTL, reg);
2325 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_00_15, reg);
2326
2327 reg = p[15] << 28 | p[14] << 26 | p[13] << 24 |
2328 p[12] << 22 | p[11] << 20 | p[10] << 18 | p[ 9] << 16 |
2329 p[ 8] << 14 | p[ 7] << 12 | p[ 6] << 10 | p[ 5] << 8 |
2330 p[ 4] << 6 | p[ 3] << 4 | p[ 2] << 2 | p[ 1] << 0;
2331 AR_WRITE(sc, AR_PHY_BIN_MASK2_1, reg);
2332 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_15_01, reg);
2333
2334 reg = p[30] << 28 | p[29] << 26 | p[28] << 24 |
2335 p[27] << 22 | p[26] << 20 | p[25] << 18 | p[24] << 16 |
2336 p[23] << 14 | p[22] << 12 | p[21] << 10 | p[20] << 8 |
2337 p[19] << 6 | p[18] << 4 | p[17] << 2 | p[16] << 0;
2338 AR_WRITE(sc, AR_PHY_BIN_MASK2_2, reg);
2339 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_30_16, reg);
2340
2341 reg = p[45] << 28 | p[44] << 26 | p[43] << 24 |
2342 p[42] << 22 | p[41] << 20 | p[40] << 18 | p[39] << 16 |
2343 p[38] << 14 | p[37] << 12 | p[36] << 10 | p[35] << 8 |
2344 p[34] << 6 | p[33] << 4 | p[32] << 2 | p[31] << 0;
2345 AR_WRITE(sc, AR_PHY_BIN_MASK2_3, reg);
2346 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_45_31, reg);
2347
2348 reg =
2349 p[61] << 30 | p[60] << 28 | p[59] << 26 | p[58] << 24 |
2350 p[57] << 22 | p[56] << 20 | p[55] << 18 | p[54] << 16 |
2351 p[53] << 14 | p[52] << 12 | p[51] << 10 | p[50] << 8 |
2352 p[49] << 6 | p[48] << 4 | p[47] << 2 | p[46] << 0;
2353 AR_WRITE(sc, AR_PHY_BIN_MASK2_4, reg);
2354 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_61_46, reg);
2355 AR_WRITE_BARRIER(sc);
2356 }
2357
2358 Static void
ar5008_hw_init(struct athn_softc * sc,struct ieee80211_channel * c,struct ieee80211_channel * extc)2359 ar5008_hw_init(struct athn_softc *sc, struct ieee80211_channel *c,
2360 struct ieee80211_channel *extc)
2361 {
2362 struct athn_ops *ops = &sc->sc_ops;
2363 const struct athn_ini *ini = sc->sc_ini;
2364 const uint32_t *pvals;
2365 uint32_t reg;
2366 int i;
2367
2368 AR_WRITE(sc, AR_PHY(0), 0x00000007);
2369 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
2370
2371 if (!AR_SINGLE_CHIP(sc))
2372 ar5416_reset_addac(sc, c);
2373
2374 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
2375
2376 /* First initialization step (depends on channel band/bandwidth). */
2377 #ifndef IEEE80211_NO_HT
2378 if (extc != NULL) {
2379 if (IEEE80211_IS_CHAN_2GHZ(c))
2380 pvals = ini->vals_2g40;
2381 else
2382 pvals = ini->vals_5g40;
2383 }
2384 else
2385 #endif
2386 {
2387 if (IEEE80211_IS_CHAN_2GHZ(c))
2388 pvals = ini->vals_2g20;
2389 else
2390 pvals = ini->vals_5g20;
2391 }
2392 DPRINTFN(DBG_INIT, sc, "writing modal init vals\n");
2393 for (i = 0; i < ini->nregs; i++) {
2394 uint32_t val = pvals[i];
2395
2396 /* Fix AR_AN_TOP2 initialization value if required. */
2397 if (ini->regs[i] == AR_AN_TOP2 &&
2398 (sc->sc_flags & ATHN_FLAG_AN_TOP2_FIXUP))
2399 val &= ~AR_AN_TOP2_PWDCLKIND;
2400 AR_WRITE(sc, ini->regs[i], val);
2401 if (AR_IS_ANALOG_REG(ini->regs[i])) {
2402 AR_WRITE_BARRIER(sc);
2403 DELAY(100);
2404 }
2405 if ((i & 0x1f) == 0)
2406 DELAY(1);
2407 }
2408 AR_WRITE_BARRIER(sc);
2409
2410 if (sc->sc_rx_gain != NULL)
2411 ar9280_reset_rx_gain(sc, c);
2412 if (sc->sc_tx_gain != NULL)
2413 ar9280_reset_tx_gain(sc, c);
2414
2415 if (AR_SREV_9271_10(sc)) {
2416 AR_WRITE(sc, AR_PHY(68), 0x30002311);
2417 AR_WRITE(sc, AR_PHY_RF_CTL3, 0x0a020001);
2418 }
2419 AR_WRITE_BARRIER(sc);
2420
2421 /* Second initialization step (common to all channels). */
2422 DPRINTFN(DBG_INIT, sc, "writing common init vals\n");
2423 for (i = 0; i < ini->ncmregs; i++) {
2424 AR_WRITE(sc, ini->cmregs[i], ini->cmvals[i]);
2425 if (AR_IS_ANALOG_REG(ini->cmregs[i])) {
2426 AR_WRITE_BARRIER(sc);
2427 DELAY(100);
2428 }
2429 if ((i & 0x1f) == 0)
2430 DELAY(1);
2431 }
2432 AR_WRITE_BARRIER(sc);
2433
2434 if (!AR_SINGLE_CHIP(sc))
2435 ar5416_reset_bb_gain(sc, c);
2436
2437 if (IEEE80211_IS_CHAN_5GHZ(c) &&
2438 (sc->sc_flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
2439 /* Update modal values for fast PLL clock. */
2440 #ifndef IEEE80211_NO_HT
2441 if (extc != NULL)
2442 pvals = ini->fastvals_5g40;
2443 else
2444 #endif
2445 pvals = ini->fastvals_5g20;
2446 DPRINTFN(DBG_INIT, sc, "writing fast pll clock init vals\n");
2447 for (i = 0; i < ini->nfastregs; i++) {
2448 AR_WRITE(sc, ini->fastregs[i], pvals[i]);
2449 if (AR_IS_ANALOG_REG(ini->fastregs[i])) {
2450 AR_WRITE_BARRIER(sc);
2451 DELAY(100);
2452 }
2453 if ((i & 0x1f) == 0)
2454 DELAY(1);
2455 }
2456 }
2457
2458 /*
2459 * Set the RX_ABORT and RX_DIS bits to prevent frames with corrupted
2460 * descriptor status.
2461 */
2462 AR_SETBITS(sc, AR_DIAG_SW, AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT);
2463
2464 /* Hardware workarounds for occasional Rx data corruption. */
2465 if (AR_SREV_9280_10_OR_LATER(sc)) {
2466 reg = AR_READ(sc, AR_PCU_MISC_MODE2);
2467 if (!AR_SREV_9271(sc))
2468 reg &= ~AR_PCU_MISC_MODE2_HWWAR1;
2469 if (AR_SREV_9287_10_OR_LATER(sc))
2470 reg &= ~AR_PCU_MISC_MODE2_HWWAR2;
2471 AR_WRITE(sc, AR_PCU_MISC_MODE2, reg);
2472
2473 }
2474 else if (AR_SREV_5416_20_OR_LATER(sc)) {
2475 /* Disable baseband clock gating. */
2476 AR_WRITE(sc, AR_PHY(651), 0x11);
2477
2478 if (AR_SREV_9160(sc)) {
2479 /* Disable RIFS search to fix baseband hang. */
2480 AR_CLRBITS(sc, AR_PHY_HEAVY_CLIP_FACTOR_RIFS,
2481 AR_PHY_RIFS_INIT_DELAY_M);
2482 }
2483 }
2484 AR_WRITE_BARRIER(sc);
2485
2486 ar5008_set_phy(sc, c, extc);
2487 ar5008_init_chains(sc);
2488
2489 if (sc->sc_flags & ATHN_FLAG_OLPC) {
2490 sc->sc_olpc_ticks = ticks;
2491 ops->olpc_init(sc);
2492 }
2493
2494 ops->set_txpower(sc, c, extc);
2495
2496 if (!AR_SINGLE_CHIP(sc))
2497 ar5416_rf_reset(sc, c);
2498 }
2499
2500 Static uint8_t
ar5008_get_vpd(uint8_t pwr,const uint8_t * pwrPdg,const uint8_t * vpdPdg,int nicepts)2501 ar5008_get_vpd(uint8_t pwr, const uint8_t *pwrPdg, const uint8_t *vpdPdg,
2502 int nicepts)
2503 {
2504 uint8_t vpd;
2505 int i, lo, hi;
2506
2507 for (i = 0; i < nicepts; i++)
2508 if (pwrPdg[i] > pwr)
2509 break;
2510 hi = i;
2511 lo = hi - 1;
2512 if (lo == -1)
2513 lo = hi;
2514 else if (hi == nicepts)
2515 hi = lo;
2516
2517 vpd = athn_interpolate(pwr, pwrPdg[lo], vpdPdg[lo],
2518 pwrPdg[hi], vpdPdg[hi]);
2519 return vpd;
2520 }
2521
2522 PUBLIC void
ar5008_get_pdadcs(struct athn_softc * sc,uint8_t fbin,struct athn_pier * lopier,struct athn_pier * hipier,int nxpdgains,int nicepts,uint8_t overlap,uint8_t * boundaries,uint8_t * pdadcs)2523 ar5008_get_pdadcs(struct athn_softc *sc, uint8_t fbin,
2524 struct athn_pier *lopier, struct athn_pier *hipier, int nxpdgains,
2525 int nicepts, uint8_t overlap, uint8_t *boundaries, uint8_t *pdadcs)
2526 {
2527 #define DB(x) ((x) / 2) /* Convert half dB to dB. */
2528 uint8_t minpwr[AR_PD_GAINS_IN_MASK], maxpwr[AR_PD_GAINS_IN_MASK];
2529 uint8_t vpd[AR_MAX_PWR_RANGE_IN_HALF_DB], pwr;
2530 uint8_t lovpd, hivpd, boundary;
2531 int16_t ss, delta, vpdstep, val;
2532 int i, j, npdadcs, nvpds, maxidx, tgtidx;
2533
2534 /* Compute min and max power in half dB for each pdGain. */
2535 for (i = 0; i < nxpdgains; i++) {
2536 minpwr[i] = MAX(lopier->pwr[i][0], hipier->pwr[i][0]);
2537 maxpwr[i] = MIN(lopier->pwr[i][nicepts - 1],
2538 hipier->pwr[i][nicepts - 1]);
2539 }
2540
2541 /* Fill phase domain analog-to-digital converter (PDADC) table. */
2542 npdadcs = 0;
2543 for (i = 0; i < nxpdgains; i++) {
2544 if (i != nxpdgains - 1)
2545 boundaries[i] = DB(maxpwr[i] + minpwr[i + 1]) / 2;
2546 else
2547 boundaries[i] = DB(maxpwr[i]);
2548 if (boundaries[i] > AR_MAX_RATE_POWER)
2549 boundaries[i] = AR_MAX_RATE_POWER;
2550
2551 if (i == 0 && !AR_SREV_5416_20_OR_LATER(sc)) {
2552 /* Fix the gain delta (AR5416 1.0 only). */
2553 delta = boundaries[0] - 23;
2554 boundaries[0] = 23;
2555 }
2556 else
2557 delta = 0;
2558
2559 /* Find starting index for this pdGain. */
2560 if (i != 0) {
2561 ss = boundaries[i - 1] - DB(minpwr[i]) -
2562 overlap + 1 + delta;
2563 }
2564 else if (AR_SREV_9280_10_OR_LATER(sc))
2565 ss = -DB(minpwr[i]);
2566 else
2567 ss = 0;
2568
2569 /* Compute Vpd table for this pdGain. */
2570 nvpds = DB(maxpwr[i] - minpwr[i]) + 1;
2571 memset(vpd, 0, sizeof(vpd));
2572 pwr = minpwr[i];
2573 for (j = 0; j < nvpds; j++) {
2574 /* Get lower and higher Vpd. */
2575 lovpd = ar5008_get_vpd(pwr, lopier->pwr[i],
2576 lopier->vpd[i], nicepts);
2577 hivpd = ar5008_get_vpd(pwr, hipier->pwr[i],
2578 hipier->vpd[i], nicepts);
2579
2580 /* Interpolate the final Vpd. */
2581 vpd[j] = athn_interpolate(fbin,
2582 lopier->fbin, lovpd, hipier->fbin, hivpd);
2583
2584 pwr += 2; /* In half dB. */
2585 }
2586
2587 /* Extrapolate data for ss < 0. */
2588 if (vpd[1] > vpd[0])
2589 vpdstep = vpd[1] - vpd[0];
2590 else
2591 vpdstep = 1;
2592 while (ss < 0 && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2593 val = vpd[0] + ss * vpdstep;
2594 pdadcs[npdadcs++] = MAX(val, 0);
2595 ss++;
2596 }
2597
2598 tgtidx = boundaries[i] + overlap - DB(minpwr[i]);
2599 maxidx = MIN(tgtidx, nvpds);
2600 while (ss < maxidx && npdadcs < AR_NUM_PDADC_VALUES - 1)
2601 pdadcs[npdadcs++] = vpd[ss++];
2602
2603 if (tgtidx < maxidx)
2604 continue;
2605
2606 /* Extrapolate data for maxidx <= ss <= tgtidx. */
2607 if (vpd[nvpds - 1] > vpd[nvpds - 2])
2608 vpdstep = vpd[nvpds - 1] - vpd[nvpds - 2];
2609 else
2610 vpdstep = 1;
2611 while (ss <= tgtidx && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2612 val = vpd[nvpds - 1] + (ss - maxidx + 1) * vpdstep;
2613 pdadcs[npdadcs++] = MIN(val, 255);
2614 ss++;
2615 }
2616 }
2617
2618 /* Fill remaining PDADC and boundaries entries. */
2619 if (AR_SREV_9285(sc))
2620 boundary = AR9285_PD_GAIN_BOUNDARY_DEFAULT;
2621 else /* Fill with latest. */
2622 boundary = boundaries[nxpdgains - 1];
2623
2624 for (; nxpdgains < AR_PD_GAINS_IN_MASK; nxpdgains++)
2625 boundaries[nxpdgains] = boundary;
2626
2627 for (; npdadcs < AR_NUM_PDADC_VALUES; npdadcs++)
2628 pdadcs[npdadcs] = pdadcs[npdadcs - 1];
2629 #undef DB
2630 }
2631
2632 PUBLIC void
ar5008_get_lg_tpow(struct athn_softc * sc,struct ieee80211_channel * c,uint8_t ctl,const struct ar_cal_target_power_leg * tgt,int nchans,uint8_t tpow[4])2633 ar5008_get_lg_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2634 uint8_t ctl, const struct ar_cal_target_power_leg *tgt, int nchans,
2635 uint8_t tpow[4])
2636 {
2637 uint8_t fbin;
2638 int i, lo, hi;
2639
2640 /* Find interval (lower and upper indices). */
2641 fbin = athn_chan2fbin(c);
2642 for (i = 0; i < nchans; i++) {
2643 if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2644 tgt[i].bChannel > fbin)
2645 break;
2646 }
2647 hi = i;
2648 lo = hi - 1;
2649 if (lo == -1)
2650 lo = hi;
2651 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2652 hi = lo;
2653
2654 /* Interpolate values. */
2655 for (i = 0; i < 4; i++) {
2656 tpow[i] = athn_interpolate(fbin,
2657 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2658 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2659 }
2660 /* XXX Apply conformance testing limit. */
2661 }
2662
2663 #ifndef IEEE80211_NO_HT
2664 PUBLIC void
ar5008_get_ht_tpow(struct athn_softc * sc,struct ieee80211_channel * c,uint8_t ctl,const struct ar_cal_target_power_ht * tgt,int nchans,uint8_t tpow[8])2665 ar5008_get_ht_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2666 uint8_t ctl, const struct ar_cal_target_power_ht *tgt, int nchans,
2667 uint8_t tpow[8])
2668 {
2669 uint8_t fbin;
2670 int i, lo, hi;
2671
2672 /* Find interval (lower and upper indices). */
2673 fbin = athn_chan2fbin(c);
2674 for (i = 0; i < nchans; i++) {
2675 if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2676 tgt[i].bChannel > fbin)
2677 break;
2678 }
2679 hi = i;
2680 lo = hi - 1;
2681 if (lo == -1)
2682 lo = hi;
2683 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2684 hi = lo;
2685
2686 /* Interpolate values. */
2687 for (i = 0; i < 8; i++) {
2688 tpow[i] = athn_interpolate(fbin,
2689 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2690 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2691 }
2692 /* XXX Apply conformance testing limit. */
2693 }
2694 #endif
2695
2696 /*
2697 * Adaptive noise immunity.
2698 */
2699 Static void
ar5008_set_noise_immunity_level(struct athn_softc * sc,int level)2700 ar5008_set_noise_immunity_level(struct athn_softc *sc, int level)
2701 {
2702 int high = level == 4;
2703 uint32_t reg;
2704
2705 reg = AR_READ(sc, AR_PHY_DESIRED_SZ);
2706 reg = RW(reg, AR_PHY_DESIRED_SZ_TOT_DES, high ? -62 : -55);
2707 AR_WRITE(sc, AR_PHY_DESIRED_SZ, reg);
2708
2709 reg = AR_READ(sc, AR_PHY_AGC_CTL1);
2710 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_LOW, high ? -70 : -64);
2711 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_HIGH, high ? -12 : -14);
2712 AR_WRITE(sc, AR_PHY_AGC_CTL1, reg);
2713
2714 reg = AR_READ(sc, AR_PHY_FIND_SIG);
2715 reg = RW(reg, AR_PHY_FIND_SIG_FIRPWR, high ? -80 : -78);
2716 AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2717
2718 AR_WRITE_BARRIER(sc);
2719 }
2720
2721 Static void
ar5008_enable_ofdm_weak_signal(struct athn_softc * sc)2722 ar5008_enable_ofdm_weak_signal(struct athn_softc *sc)
2723 {
2724 uint32_t reg;
2725
2726 reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2727 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 50);
2728 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 40);
2729 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 48);
2730 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2731
2732 reg = AR_READ(sc, AR_PHY_SFCORR);
2733 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 77);
2734 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 64);
2735 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 16);
2736 AR_WRITE(sc, AR_PHY_SFCORR, reg);
2737
2738 reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2739 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 50);
2740 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 40);
2741 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 77);
2742 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 64);
2743 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2744
2745 AR_SETBITS(sc, AR_PHY_SFCORR_LOW,
2746 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2747 AR_WRITE_BARRIER(sc);
2748 }
2749
2750 Static void
ar5008_disable_ofdm_weak_signal(struct athn_softc * sc)2751 ar5008_disable_ofdm_weak_signal(struct athn_softc *sc)
2752 {
2753 uint32_t reg;
2754
2755 reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2756 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 127);
2757 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 127);
2758 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 63);
2759 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2760
2761 reg = AR_READ(sc, AR_PHY_SFCORR);
2762 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 127);
2763 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 127);
2764 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 31);
2765 AR_WRITE(sc, AR_PHY_SFCORR, reg);
2766
2767 reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2768 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 127);
2769 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 127);
2770 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 127);
2771 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 127);
2772 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2773
2774 AR_CLRBITS(sc, AR_PHY_SFCORR_LOW,
2775 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2776 AR_WRITE_BARRIER(sc);
2777 }
2778
2779 Static void
ar5008_set_cck_weak_signal(struct athn_softc * sc,int high)2780 ar5008_set_cck_weak_signal(struct athn_softc *sc, int high)
2781 {
2782 uint32_t reg;
2783
2784 reg = AR_READ(sc, AR_PHY_CCK_DETECT);
2785 reg = RW(reg, AR_PHY_CCK_DETECT_WEAK_SIG_THR_CCK, high ? 6 : 8);
2786 AR_WRITE(sc, AR_PHY_CCK_DETECT, reg);
2787 AR_WRITE_BARRIER(sc);
2788 }
2789
2790 Static void
ar5008_set_firstep_level(struct athn_softc * sc,int level)2791 ar5008_set_firstep_level(struct athn_softc *sc, int level)
2792 {
2793 uint32_t reg;
2794
2795 reg = AR_READ(sc, AR_PHY_FIND_SIG);
2796 reg = RW(reg, AR_PHY_FIND_SIG_FIRSTEP, level * 4);
2797 AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2798 AR_WRITE_BARRIER(sc);
2799 }
2800
2801 Static void
ar5008_set_spur_immunity_level(struct athn_softc * sc,int level)2802 ar5008_set_spur_immunity_level(struct athn_softc *sc, int level)
2803 {
2804 uint32_t reg;
2805
2806 reg = AR_READ(sc, AR_PHY_TIMING5);
2807 reg = RW(reg, AR_PHY_TIMING5_CYCPWR_THR1, (level + 1) * 2);
2808 AR_WRITE(sc, AR_PHY_TIMING5, reg);
2809 AR_WRITE_BARRIER(sc);
2810 }
2811