xref: /openbsd/sys/dev/ic/ar5008.c (revision 73471bf0)
1 /*	$OpenBSD: ar5008.c,v 1.69 2021/10/11 09:01:05 stsp Exp $	*/
2 
3 /*-
4  * Copyright (c) 2009 Damien Bergamini <damien.bergamini@free.fr>
5  * Copyright (c) 2008-2009 Atheros Communications Inc.
6  *
7  * Permission to use, copy, modify, and/or distribute this software for any
8  * purpose with or without fee is hereby granted, provided that the above
9  * copyright notice and this permission notice appear in all copies.
10  *
11  * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
12  * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
13  * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
14  * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
15  * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
16  * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
17  * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
18  */
19 
20 /*
21  * Driver for Atheros 802.11a/g/n chipsets.
22  * Routines common to AR5008, AR9001 and AR9002 families.
23  */
24 
25 #include "bpfilter.h"
26 
27 #include <sys/param.h>
28 #include <sys/sockio.h>
29 #include <sys/mbuf.h>
30 #include <sys/kernel.h>
31 #include <sys/socket.h>
32 #include <sys/systm.h>
33 #include <sys/malloc.h>
34 #include <sys/queue.h>
35 #include <sys/timeout.h>
36 #include <sys/conf.h>
37 #include <sys/device.h>
38 #include <sys/stdint.h>	/* uintptr_t */
39 #include <sys/endian.h>
40 
41 #include <machine/bus.h>
42 
43 #if NBPFILTER > 0
44 #include <net/bpf.h>
45 #endif
46 #include <net/if.h>
47 #include <net/if_media.h>
48 
49 #include <netinet/in.h>
50 #include <netinet/if_ether.h>
51 
52 #include <net80211/ieee80211_var.h>
53 #include <net80211/ieee80211_amrr.h>
54 #include <net80211/ieee80211_ra.h>
55 #include <net80211/ieee80211_radiotap.h>
56 
57 #include <dev/ic/athnreg.h>
58 #include <dev/ic/athnvar.h>
59 
60 #include <dev/ic/ar5008reg.h>
61 
62 int	ar5008_attach(struct athn_softc *);
63 int	ar5008_read_eep_word(struct athn_softc *, uint32_t, uint16_t *);
64 int	ar5008_read_rom(struct athn_softc *);
65 void	ar5008_swap_rom(struct athn_softc *);
66 int	ar5008_gpio_read(struct athn_softc *, int);
67 void	ar5008_gpio_write(struct athn_softc *, int, int);
68 void	ar5008_gpio_config_input(struct athn_softc *, int);
69 void	ar5008_gpio_config_output(struct athn_softc *, int, int);
70 void	ar5008_rfsilent_init(struct athn_softc *);
71 int	ar5008_dma_alloc(struct athn_softc *);
72 void	ar5008_dma_free(struct athn_softc *);
73 int	ar5008_tx_alloc(struct athn_softc *);
74 void	ar5008_tx_free(struct athn_softc *);
75 int	ar5008_rx_alloc(struct athn_softc *);
76 void	ar5008_rx_free(struct athn_softc *);
77 void	ar5008_rx_enable(struct athn_softc *);
78 void	ar5008_rx_radiotap(struct athn_softc *, struct mbuf *,
79 	    struct ar_rx_desc *);
80 int	ar5008_ccmp_decap(struct athn_softc *, struct mbuf *,
81 	    struct ieee80211_node *);
82 void	ar5008_rx_intr(struct athn_softc *);
83 int	ar5008_tx_process(struct athn_softc *, int);
84 void	ar5008_tx_intr(struct athn_softc *);
85 int	ar5008_swba_intr(struct athn_softc *);
86 int	ar5008_intr(struct athn_softc *);
87 int	ar5008_ccmp_encap(struct mbuf *, u_int, struct ieee80211_key *);
88 int	ar5008_tx(struct athn_softc *, struct mbuf *, struct ieee80211_node *,
89 	    int);
90 void	ar5008_set_rf_mode(struct athn_softc *, struct ieee80211_channel *);
91 int	ar5008_rf_bus_request(struct athn_softc *);
92 void	ar5008_rf_bus_release(struct athn_softc *);
93 void	ar5008_set_phy(struct athn_softc *, struct ieee80211_channel *,
94 	    struct ieee80211_channel *);
95 void	ar5008_set_delta_slope(struct athn_softc *, struct ieee80211_channel *,
96 	    struct ieee80211_channel *);
97 void	ar5008_enable_antenna_diversity(struct athn_softc *);
98 void	ar5008_init_baseband(struct athn_softc *);
99 void	ar5008_disable_phy(struct athn_softc *);
100 void	ar5008_init_chains(struct athn_softc *);
101 void	ar5008_set_rxchains(struct athn_softc *);
102 void	ar5008_read_noisefloor(struct athn_softc *, int16_t *, int16_t *);
103 void	ar5008_write_noisefloor(struct athn_softc *, int16_t *, int16_t *);
104 int	ar5008_get_noisefloor(struct athn_softc *);
105 void	ar5008_apply_noisefloor(struct athn_softc *);
106 void	ar5008_bb_load_noisefloor(struct athn_softc *);
107 void	ar5008_do_noisefloor_calib(struct athn_softc *);
108 void	ar5008_init_noisefloor_calib(struct athn_softc *);
109 void	ar5008_do_calib(struct athn_softc *);
110 void	ar5008_next_calib(struct athn_softc *);
111 void	ar5008_calib_iq(struct athn_softc *);
112 void	ar5008_calib_adc_gain(struct athn_softc *);
113 void	ar5008_calib_adc_dc_off(struct athn_softc *);
114 void	ar5008_write_txpower(struct athn_softc *, int16_t power[]);
115 void	ar5008_set_viterbi_mask(struct athn_softc *, int);
116 void	ar5008_hw_init(struct athn_softc *, struct ieee80211_channel *,
117 	    struct ieee80211_channel *);
118 uint8_t	ar5008_get_vpd(uint8_t, const uint8_t *, const uint8_t *, int);
119 void	ar5008_get_pdadcs(struct athn_softc *, uint8_t, struct athn_pier *,
120 	    struct athn_pier *, int, int, uint8_t, uint8_t *, uint8_t *);
121 void	ar5008_get_lg_tpow(struct athn_softc *, struct ieee80211_channel *,
122 	    uint8_t, const struct ar_cal_target_power_leg *, int, uint8_t[]);
123 void	ar5008_get_ht_tpow(struct athn_softc *, struct ieee80211_channel *,
124 	    uint8_t, const struct ar_cal_target_power_ht *, int, uint8_t[]);
125 void	ar5008_set_noise_immunity_level(struct athn_softc *, int);
126 void	ar5008_enable_ofdm_weak_signal(struct athn_softc *);
127 void	ar5008_disable_ofdm_weak_signal(struct athn_softc *);
128 void	ar5008_set_cck_weak_signal(struct athn_softc *, int);
129 void	ar5008_set_firstep_level(struct athn_softc *, int);
130 void	ar5008_set_spur_immunity_level(struct athn_softc *, int);
131 
132 /* Extern functions. */
133 void	athn_stop(struct ifnet *, int);
134 int	athn_interpolate(int, int, int, int, int);
135 int	athn_txtime(struct athn_softc *, int, int, u_int);
136 void	athn_inc_tx_trigger_level(struct athn_softc *);
137 int	athn_tx_pending(struct athn_softc *, int);
138 void	athn_stop_tx_dma(struct athn_softc *, int);
139 void	athn_get_delta_slope(uint32_t, uint32_t *, uint32_t *);
140 void	athn_config_pcie(struct athn_softc *);
141 void	athn_config_nonpcie(struct athn_softc *);
142 uint8_t	athn_chan2fbin(struct ieee80211_channel *);
143 uint8_t	ar5416_get_rf_rev(struct athn_softc *);
144 void	ar5416_reset_addac(struct athn_softc *, struct ieee80211_channel *);
145 void	ar5416_rf_reset(struct athn_softc *, struct ieee80211_channel *);
146 void	ar5416_reset_bb_gain(struct athn_softc *, struct ieee80211_channel *);
147 void	ar9280_reset_rx_gain(struct athn_softc *, struct ieee80211_channel *);
148 void	ar9280_reset_tx_gain(struct athn_softc *, struct ieee80211_channel *);
149 
150 
151 int
152 ar5008_attach(struct athn_softc *sc)
153 {
154 	struct athn_ops *ops = &sc->ops;
155 	struct ieee80211com *ic = &sc->sc_ic;
156 	struct ar_base_eep_header *base;
157 	uint8_t eep_ver, kc_entries_log;
158 	int error;
159 
160 	/* Set callbacks for AR5008, AR9001 and AR9002 families. */
161 	ops->gpio_read = ar5008_gpio_read;
162 	ops->gpio_write = ar5008_gpio_write;
163 	ops->gpio_config_input = ar5008_gpio_config_input;
164 	ops->gpio_config_output = ar5008_gpio_config_output;
165 	ops->rfsilent_init = ar5008_rfsilent_init;
166 
167 	ops->dma_alloc = ar5008_dma_alloc;
168 	ops->dma_free = ar5008_dma_free;
169 	ops->rx_enable = ar5008_rx_enable;
170 	ops->intr = ar5008_intr;
171 	ops->tx = ar5008_tx;
172 
173 	ops->set_rf_mode = ar5008_set_rf_mode;
174 	ops->rf_bus_request = ar5008_rf_bus_request;
175 	ops->rf_bus_release = ar5008_rf_bus_release;
176 	ops->set_phy = ar5008_set_phy;
177 	ops->set_delta_slope = ar5008_set_delta_slope;
178 	ops->enable_antenna_diversity = ar5008_enable_antenna_diversity;
179 	ops->init_baseband = ar5008_init_baseband;
180 	ops->disable_phy = ar5008_disable_phy;
181 	ops->set_rxchains = ar5008_set_rxchains;
182 	ops->noisefloor_calib = ar5008_do_noisefloor_calib;
183 	ops->init_noisefloor_calib = ar5008_init_noisefloor_calib;
184 	ops->get_noisefloor = ar5008_get_noisefloor;
185 	ops->apply_noisefloor = ar5008_apply_noisefloor;
186 	ops->do_calib = ar5008_do_calib;
187 	ops->next_calib = ar5008_next_calib;
188 	ops->hw_init = ar5008_hw_init;
189 
190 	ops->set_noise_immunity_level = ar5008_set_noise_immunity_level;
191 	ops->enable_ofdm_weak_signal = ar5008_enable_ofdm_weak_signal;
192 	ops->disable_ofdm_weak_signal = ar5008_disable_ofdm_weak_signal;
193 	ops->set_cck_weak_signal = ar5008_set_cck_weak_signal;
194 	ops->set_firstep_level = ar5008_set_firstep_level;
195 	ops->set_spur_immunity_level = ar5008_set_spur_immunity_level;
196 
197 	/* Set MAC registers offsets. */
198 	sc->obs_off = AR_OBS;
199 	sc->gpio_input_en_off = AR_GPIO_INPUT_EN_VAL;
200 
201 	if (!(sc->flags & ATHN_FLAG_PCIE))
202 		athn_config_nonpcie(sc);
203 	else
204 		athn_config_pcie(sc);
205 
206 	/* Read entire ROM content in memory. */
207 	if ((error = ar5008_read_rom(sc)) != 0) {
208 		printf("%s: could not read ROM\n", sc->sc_dev.dv_xname);
209 		return (error);
210 	}
211 
212 	/* Get RF revision. */
213 	sc->rf_rev = ar5416_get_rf_rev(sc);
214 
215 	base = sc->eep;
216 	eep_ver = (base->version >> 12) & 0xf;
217 	sc->eep_rev = (base->version & 0xfff);
218 	if (eep_ver != AR_EEP_VER || sc->eep_rev == 0) {
219 		printf("%s: unsupported ROM version %d.%d\n",
220 		    sc->sc_dev.dv_xname, eep_ver, sc->eep_rev);
221 		return (EINVAL);
222 	}
223 
224 	if (base->opCapFlags & AR_OPFLAGS_11A) {
225 		sc->flags |= ATHN_FLAG_11A;
226 		if ((base->opCapFlags & AR_OPFLAGS_11N_5G20) == 0)
227 			sc->flags |= ATHN_FLAG_11N;
228 #ifdef notyet
229 		if ((base->opCapFlags & AR_OPFLAGS_11N_5G40) == 0)
230 			sc->flags |= ATHN_FLAG_11N;
231 #endif
232 	}
233 	if (base->opCapFlags & AR_OPFLAGS_11G) {
234 		sc->flags |= ATHN_FLAG_11G;
235 		if ((base->opCapFlags & AR_OPFLAGS_11N_2G20) == 0)
236 			sc->flags |= ATHN_FLAG_11N;
237 #ifdef notyet
238 		if ((base->opCapFlags & AR_OPFLAGS_11N_2G40) == 0)
239 			sc->flags |= ATHN_FLAG_11N;
240 #endif
241 	}
242 
243 	IEEE80211_ADDR_COPY(ic->ic_myaddr, base->macAddr);
244 
245 	/* Check if we have a hardware radio switch. */
246 	if (base->rfSilent & AR_EEP_RFSILENT_ENABLED) {
247 		sc->flags |= ATHN_FLAG_RFSILENT;
248 		/* Get GPIO pin used by hardware radio switch. */
249 		sc->rfsilent_pin = MS(base->rfSilent,
250 		    AR_EEP_RFSILENT_GPIO_SEL);
251 		/* Get polarity of hardware radio switch. */
252 		if (base->rfSilent & AR_EEP_RFSILENT_POLARITY)
253 			sc->flags |= ATHN_FLAG_RFSILENT_REVERSED;
254 	}
255 
256 	/* Get the number of HW key cache entries. */
257 	kc_entries_log = MS(base->deviceCap, AR_EEP_DEVCAP_KC_ENTRIES);
258 	sc->kc_entries = (kc_entries_log != 0) ?
259 	    1 << kc_entries_log : AR_KEYTABLE_SIZE;
260 	if (sc->kc_entries > AR_KEYTABLE_SIZE)
261 		sc->kc_entries = AR_KEYTABLE_SIZE;
262 
263 	sc->txchainmask = base->txMask;
264 	if (sc->mac_ver == AR_SREV_VERSION_5416_PCI &&
265 	    !(base->opCapFlags & AR_OPFLAGS_11A)) {
266 		/* For single-band AR5416 PCI, use GPIO pin 0. */
267 		sc->rxchainmask = ar5008_gpio_read(sc, 0) ? 0x5 : 0x7;
268 	} else
269 		sc->rxchainmask = base->rxMask;
270 
271 	ops->setup(sc);
272 	return (0);
273 }
274 
275 /*
276  * Read 16-bit word from ROM.
277  */
278 int
279 ar5008_read_eep_word(struct athn_softc *sc, uint32_t addr, uint16_t *val)
280 {
281 	uint32_t reg;
282 	int ntries;
283 
284 	reg = AR_READ(sc, AR_EEPROM_OFFSET(addr));
285 	for (ntries = 0; ntries < 1000; ntries++) {
286 		reg = AR_READ(sc, AR_EEPROM_STATUS_DATA);
287 		if (!(reg & (AR_EEPROM_STATUS_DATA_BUSY |
288 		    AR_EEPROM_STATUS_DATA_PROT_ACCESS))) {
289 			*val = MS(reg, AR_EEPROM_STATUS_DATA_VAL);
290 			return (0);
291 		}
292 		DELAY(10);
293 	}
294 	*val = 0xffff;
295 	return (ETIMEDOUT);
296 }
297 
298 int
299 ar5008_read_rom(struct athn_softc *sc)
300 {
301 	uint32_t addr, end;
302 	uint16_t magic, sum, *eep;
303 	int need_swap = 0;
304 	int error;
305 
306 	/* Determine ROM endianness. */
307 	error = ar5008_read_eep_word(sc, AR_EEPROM_MAGIC_OFFSET, &magic);
308 	if (error != 0)
309 		return (error);
310 	if (magic != AR_EEPROM_MAGIC) {
311 		if (magic != swap16(AR_EEPROM_MAGIC)) {
312 			DPRINTF(("invalid ROM magic 0x%x != 0x%x\n",
313 			    magic, AR_EEPROM_MAGIC));
314 			return (EIO);
315 		}
316 		DPRINTF(("non-native ROM endianness\n"));
317 		need_swap = 1;
318 	}
319 
320 	/* Allocate space to store ROM in host memory. */
321 	sc->eep = malloc(sc->eep_size, M_DEVBUF, M_NOWAIT);
322 	if (sc->eep == NULL)
323 		return (ENOMEM);
324 
325 	/* Read entire ROM and compute checksum. */
326 	sum = 0;
327 	eep = sc->eep;
328 	end = sc->eep_base + sc->eep_size / sizeof(uint16_t);
329 	for (addr = sc->eep_base; addr < end; addr++, eep++) {
330 		if ((error = ar5008_read_eep_word(sc, addr, eep)) != 0) {
331 			DPRINTF(("could not read ROM at 0x%x\n", addr));
332 			return (error);
333 		}
334 		if (need_swap)
335 			*eep = swap16(*eep);
336 		sum ^= *eep;
337 	}
338 	if (sum != 0xffff) {
339 		printf("%s: bad ROM checksum 0x%04x\n",
340 		    sc->sc_dev.dv_xname, sum);
341 		return (EIO);
342 	}
343 	if (need_swap)
344 		ar5008_swap_rom(sc);
345 
346 	return (0);
347 }
348 
349 void
350 ar5008_swap_rom(struct athn_softc *sc)
351 {
352 	struct ar_base_eep_header *base = sc->eep;
353 
354 	/* Swap common fields first. */
355 	base->length = swap16(base->length);
356 	base->version = swap16(base->version);
357 	base->regDmn[0] = swap16(base->regDmn[0]);
358 	base->regDmn[1] = swap16(base->regDmn[1]);
359 	base->rfSilent = swap16(base->rfSilent);
360 	base->blueToothOptions = swap16(base->blueToothOptions);
361 	base->deviceCap = swap16(base->deviceCap);
362 
363 	/* Swap device-dependent fields. */
364 	sc->ops.swap_rom(sc);
365 }
366 
367 /*
368  * Access to General Purpose Input/Output ports.
369  */
370 int
371 ar5008_gpio_read(struct athn_softc *sc, int pin)
372 {
373 	KASSERT(pin < sc->ngpiopins);
374 	if ((sc->flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc))
375 		return (!((AR_READ(sc, AR7010_GPIO_IN) >> pin) & 1));
376 	return ((AR_READ(sc, AR_GPIO_IN_OUT) >> (sc->ngpiopins + pin)) & 1);
377 }
378 
379 void
380 ar5008_gpio_write(struct athn_softc *sc, int pin, int set)
381 {
382 	uint32_t reg;
383 
384 	KASSERT(pin < sc->ngpiopins);
385 
386 	if (sc->flags & ATHN_FLAG_USB)
387 		set = !set;	/* AR9271/AR7010 is reversed. */
388 
389 	if ((sc->flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
390 		/* Special case for AR7010. */
391 		reg = AR_READ(sc, AR7010_GPIO_OUT);
392 		if (set)
393 			reg |= 1 << pin;
394 		else
395 			reg &= ~(1 << pin);
396 		AR_WRITE(sc, AR7010_GPIO_OUT, reg);
397 	} else {
398 		reg = AR_READ(sc, AR_GPIO_IN_OUT);
399 		if (set)
400 			reg |= 1 << pin;
401 		else
402 			reg &= ~(1 << pin);
403 		AR_WRITE(sc, AR_GPIO_IN_OUT, reg);
404 	}
405 	AR_WRITE_BARRIER(sc);
406 }
407 
408 void
409 ar5008_gpio_config_input(struct athn_softc *sc, int pin)
410 {
411 	uint32_t reg;
412 
413 	if ((sc->flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
414 		/* Special case for AR7010. */
415 		AR_SETBITS(sc, AR7010_GPIO_OE, 1 << pin);
416 	} else {
417 		reg = AR_READ(sc, AR_GPIO_OE_OUT);
418 		reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
419 		reg |= AR_GPIO_OE_OUT_DRV_NO << (pin * 2);
420 		AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
421 	}
422 	AR_WRITE_BARRIER(sc);
423 }
424 
425 void
426 ar5008_gpio_config_output(struct athn_softc *sc, int pin, int type)
427 {
428 	uint32_t reg;
429 	int mux, off;
430 
431 	if ((sc->flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
432 		/* Special case for AR7010. */
433 		AR_CLRBITS(sc, AR7010_GPIO_OE, 1 << pin);
434 		AR_WRITE_BARRIER(sc);
435 		return;
436 	}
437 	mux = pin / 6;
438 	off = pin % 6;
439 
440 	reg = AR_READ(sc, AR_GPIO_OUTPUT_MUX(mux));
441 	if (!AR_SREV_9280_20_OR_LATER(sc) && mux == 0)
442 		reg = (reg & ~0x1f0) | (reg & 0x1f0) << 1;
443 	reg &= ~(0x1f << (off * 5));
444 	reg |= (type & 0x1f) << (off * 5);
445 	AR_WRITE(sc, AR_GPIO_OUTPUT_MUX(mux), reg);
446 
447 	reg = AR_READ(sc, AR_GPIO_OE_OUT);
448 	reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
449 	reg |= AR_GPIO_OE_OUT_DRV_ALL << (pin * 2);
450 	AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
451 	AR_WRITE_BARRIER(sc);
452 }
453 
454 void
455 ar5008_rfsilent_init(struct athn_softc *sc)
456 {
457 	uint32_t reg;
458 
459 	/* Configure hardware radio switch. */
460 	AR_SETBITS(sc, AR_GPIO_INPUT_EN_VAL, AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
461 	reg = AR_READ(sc, AR_GPIO_INPUT_MUX2);
462 	reg = RW(reg, AR_GPIO_INPUT_MUX2_RFSILENT, 0);
463 	AR_WRITE(sc, AR_GPIO_INPUT_MUX2, reg);
464 	ar5008_gpio_config_input(sc, sc->rfsilent_pin);
465 	AR_SETBITS(sc, AR_PHY_TEST, AR_PHY_TEST_RFSILENT_BB);
466 	if (!(sc->flags & ATHN_FLAG_RFSILENT_REVERSED)) {
467 		AR_SETBITS(sc, AR_GPIO_INTR_POL,
468 		    AR_GPIO_INTR_POL_PIN(sc->rfsilent_pin));
469 	}
470 	AR_WRITE_BARRIER(sc);
471 }
472 
473 int
474 ar5008_dma_alloc(struct athn_softc *sc)
475 {
476 	int error;
477 
478 	error = ar5008_tx_alloc(sc);
479 	if (error != 0)
480 		return (error);
481 
482 	error = ar5008_rx_alloc(sc);
483 	if (error != 0)
484 		return (error);
485 
486 	return (0);
487 }
488 
489 void
490 ar5008_dma_free(struct athn_softc *sc)
491 {
492 	ar5008_tx_free(sc);
493 	ar5008_rx_free(sc);
494 }
495 
496 int
497 ar5008_tx_alloc(struct athn_softc *sc)
498 {
499 	struct athn_tx_buf *bf;
500 	bus_size_t size;
501 	int error, nsegs, i;
502 
503 	/*
504 	 * Allocate a pool of Tx descriptors shared between all Tx queues.
505 	 */
506 	size = ATHN_NTXBUFS * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
507 
508 	error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
509 	    BUS_DMA_NOWAIT, &sc->map);
510 	if (error != 0)
511 		goto fail;
512 
513 	error = bus_dmamem_alloc(sc->sc_dmat, size, 4, 0, &sc->seg, 1,
514 	    &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
515 	if (error != 0)
516 		goto fail;
517 
518 	error = bus_dmamem_map(sc->sc_dmat, &sc->seg, 1, size,
519 	    (caddr_t *)&sc->descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
520 	if (error != 0)
521 		goto fail;
522 
523 	error = bus_dmamap_load_raw(sc->sc_dmat, sc->map, &sc->seg, 1, size,
524 	    BUS_DMA_NOWAIT);
525 	if (error != 0)
526 		goto fail;
527 
528 	SIMPLEQ_INIT(&sc->txbufs);
529 	for (i = 0; i < ATHN_NTXBUFS; i++) {
530 		bf = &sc->txpool[i];
531 
532 		error = bus_dmamap_create(sc->sc_dmat, ATHN_TXBUFSZ,
533 		    AR5008_MAX_SCATTER, ATHN_TXBUFSZ, 0, BUS_DMA_NOWAIT,
534 		    &bf->bf_map);
535 		if (error != 0) {
536 			printf("%s: could not create Tx buf DMA map\n",
537 			    sc->sc_dev.dv_xname);
538 			goto fail;
539 		}
540 
541 		bf->bf_descs =
542 		    &((struct ar_tx_desc *)sc->descs)[i * AR5008_MAX_SCATTER];
543 		bf->bf_daddr = sc->map->dm_segs[0].ds_addr +
544 		    i * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
545 
546 		SIMPLEQ_INSERT_TAIL(&sc->txbufs, bf, bf_list);
547 	}
548 	return (0);
549  fail:
550 	ar5008_tx_free(sc);
551 	return (error);
552 }
553 
554 void
555 ar5008_tx_free(struct athn_softc *sc)
556 {
557 	struct athn_tx_buf *bf;
558 	int i;
559 
560 	for (i = 0; i < ATHN_NTXBUFS; i++) {
561 		bf = &sc->txpool[i];
562 
563 		if (bf->bf_map != NULL)
564 			bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
565 	}
566 	/* Free Tx descriptors. */
567 	if (sc->map != NULL) {
568 		if (sc->descs != NULL) {
569 			bus_dmamap_unload(sc->sc_dmat, sc->map);
570 			bus_dmamem_unmap(sc->sc_dmat, (caddr_t)sc->descs,
571 			    ATHN_NTXBUFS * AR5008_MAX_SCATTER *
572 			    sizeof(struct ar_tx_desc));
573 			bus_dmamem_free(sc->sc_dmat, &sc->seg, 1);
574 		}
575 		bus_dmamap_destroy(sc->sc_dmat, sc->map);
576 	}
577 }
578 
579 int
580 ar5008_rx_alloc(struct athn_softc *sc)
581 {
582 	struct athn_rxq *rxq = &sc->rxq[0];
583 	struct athn_rx_buf *bf;
584 	struct ar_rx_desc *ds;
585 	bus_size_t size;
586 	int error, nsegs, i;
587 
588 	rxq->bf = mallocarray(ATHN_NRXBUFS, sizeof(*bf), M_DEVBUF,
589 	    M_NOWAIT | M_ZERO);
590 	if (rxq->bf == NULL)
591 		return (ENOMEM);
592 
593 	size = ATHN_NRXBUFS * sizeof(struct ar_rx_desc);
594 
595 	error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
596 	    BUS_DMA_NOWAIT, &rxq->map);
597 	if (error != 0)
598 		goto fail;
599 
600 	error = bus_dmamem_alloc(sc->sc_dmat, size, 0, 0, &rxq->seg, 1,
601 	    &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
602 	if (error != 0)
603 		goto fail;
604 
605 	error = bus_dmamem_map(sc->sc_dmat, &rxq->seg, 1, size,
606 	    (caddr_t *)&rxq->descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
607 	if (error != 0)
608 		goto fail;
609 
610 	error = bus_dmamap_load_raw(sc->sc_dmat, rxq->map, &rxq->seg, 1,
611 	    size, BUS_DMA_NOWAIT);
612 	if (error != 0)
613 		goto fail;
614 
615 	for (i = 0; i < ATHN_NRXBUFS; i++) {
616 		bf = &rxq->bf[i];
617 		ds = &((struct ar_rx_desc *)rxq->descs)[i];
618 
619 		error = bus_dmamap_create(sc->sc_dmat, ATHN_RXBUFSZ, 1,
620 		    ATHN_RXBUFSZ, 0, BUS_DMA_NOWAIT | BUS_DMA_ALLOCNOW,
621 		    &bf->bf_map);
622 		if (error != 0) {
623 			printf("%s: could not create Rx buf DMA map\n",
624 			    sc->sc_dev.dv_xname);
625 			goto fail;
626 		}
627 		/*
628 		 * Assumes MCLGETL returns cache-line-size aligned buffers.
629 		 */
630 		bf->bf_m = MCLGETL(NULL, M_DONTWAIT, ATHN_RXBUFSZ);
631 		if (bf->bf_m == NULL) {
632 			printf("%s: could not allocate Rx mbuf\n",
633 			    sc->sc_dev.dv_xname);
634 			error = ENOBUFS;
635 			goto fail;
636 		}
637 
638 		error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
639 		    mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
640 		    BUS_DMA_NOWAIT | BUS_DMA_READ);
641 		if (error != 0) {
642 			printf("%s: could not DMA map Rx buffer\n",
643 			    sc->sc_dev.dv_xname);
644 			goto fail;
645 		}
646 
647 		bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
648 		    BUS_DMASYNC_PREREAD);
649 
650 		bf->bf_desc = ds;
651 		bf->bf_daddr = rxq->map->dm_segs[0].ds_addr +
652 		    i * sizeof(struct ar_rx_desc);
653 	}
654 	return (0);
655  fail:
656 	ar5008_rx_free(sc);
657 	return (error);
658 }
659 
660 void
661 ar5008_rx_free(struct athn_softc *sc)
662 {
663 	struct athn_rxq *rxq = &sc->rxq[0];
664 	struct athn_rx_buf *bf;
665 	int i;
666 
667 	if (rxq->bf == NULL)
668 		return;
669 	for (i = 0; i < ATHN_NRXBUFS; i++) {
670 		bf = &rxq->bf[i];
671 
672 		if (bf->bf_map != NULL)
673 			bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
674 		m_freem(bf->bf_m);
675 	}
676 	free(rxq->bf, M_DEVBUF, 0);
677 
678 	/* Free Rx descriptors. */
679 	if (rxq->map != NULL) {
680 		if (rxq->descs != NULL) {
681 			bus_dmamap_unload(sc->sc_dmat, rxq->map);
682 			bus_dmamem_unmap(sc->sc_dmat, (caddr_t)rxq->descs,
683 			    ATHN_NRXBUFS * sizeof(struct ar_rx_desc));
684 			bus_dmamem_free(sc->sc_dmat, &rxq->seg, 1);
685 		}
686 		bus_dmamap_destroy(sc->sc_dmat, rxq->map);
687 	}
688 }
689 
690 void
691 ar5008_rx_enable(struct athn_softc *sc)
692 {
693 	struct athn_rxq *rxq = &sc->rxq[0];
694 	struct athn_rx_buf *bf;
695 	struct ar_rx_desc *ds;
696 	int i;
697 
698 	/* Setup and link Rx descriptors. */
699 	SIMPLEQ_INIT(&rxq->head);
700 	rxq->lastds = NULL;
701 	for (i = 0; i < ATHN_NRXBUFS; i++) {
702 		bf = &rxq->bf[i];
703 		ds = bf->bf_desc;
704 
705 		memset(ds, 0, sizeof(*ds));
706 		ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
707 		ds->ds_ctl1 = SM(AR_RXC1_BUF_LEN, ATHN_RXBUFSZ);
708 
709 		if (rxq->lastds != NULL) {
710 			((struct ar_rx_desc *)rxq->lastds)->ds_link =
711 			    bf->bf_daddr;
712 		}
713 		SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
714 		rxq->lastds = ds;
715 	}
716 	bus_dmamap_sync(sc->sc_dmat, rxq->map, 0, rxq->map->dm_mapsize,
717 	    BUS_DMASYNC_PREREAD);
718 
719 	/* Enable Rx. */
720 	AR_WRITE(sc, AR_RXDP, SIMPLEQ_FIRST(&rxq->head)->bf_daddr);
721 	AR_WRITE(sc, AR_CR, AR_CR_RXE);
722 	AR_WRITE_BARRIER(sc);
723 }
724 
725 #if NBPFILTER > 0
726 void
727 ar5008_rx_radiotap(struct athn_softc *sc, struct mbuf *m,
728     struct ar_rx_desc *ds)
729 {
730 #define IEEE80211_RADIOTAP_F_SHORTGI	0x80	/* XXX from FBSD */
731 
732 	struct athn_rx_radiotap_header *tap = &sc->sc_rxtap;
733 	struct ieee80211com *ic = &sc->sc_ic;
734 	uint64_t tsf;
735 	uint32_t tstamp;
736 	uint8_t rate;
737 
738 	/* Extend the 15-bit timestamp from Rx descriptor to 64-bit TSF. */
739 	tstamp = ds->ds_status2;
740 	tsf = AR_READ(sc, AR_TSF_U32);
741 	tsf = tsf << 32 | AR_READ(sc, AR_TSF_L32);
742 	if ((tsf & 0x7fff) < tstamp)
743 		tsf -= 0x8000;
744 	tsf = (tsf & ~0x7fff) | tstamp;
745 
746 	tap->wr_flags = IEEE80211_RADIOTAP_F_FCS;
747 	tap->wr_tsft = htole64(tsf);
748 	tap->wr_chan_freq = htole16(ic->ic_bss->ni_chan->ic_freq);
749 	tap->wr_chan_flags = htole16(ic->ic_bss->ni_chan->ic_flags);
750 	tap->wr_dbm_antsignal = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
751 	/* XXX noise. */
752 	tap->wr_antenna = MS(ds->ds_status3, AR_RXS3_ANTENNA);
753 	tap->wr_rate = 0;	/* In case it can't be found below. */
754 	if (AR_SREV_5416_20_OR_LATER(sc))
755 		rate = MS(ds->ds_status0, AR_RXS0_RATE);
756 	else
757 		rate = MS(ds->ds_status3, AR_RXS3_RATE);
758 	if (rate & 0x80) {		/* HT. */
759 		/* Bit 7 set means HT MCS instead of rate. */
760 		tap->wr_rate = rate;
761 		if (!(ds->ds_status3 & AR_RXS3_GI))
762 			tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTGI;
763 
764 	} else if (rate & 0x10) {	/* CCK. */
765 		if (rate & 0x04)
766 			tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
767 		switch (rate & ~0x14) {
768 		case 0xb: tap->wr_rate =   2; break;
769 		case 0xa: tap->wr_rate =   4; break;
770 		case 0x9: tap->wr_rate =  11; break;
771 		case 0x8: tap->wr_rate =  22; break;
772 		}
773 	} else {			/* OFDM. */
774 		switch (rate) {
775 		case 0xb: tap->wr_rate =  12; break;
776 		case 0xf: tap->wr_rate =  18; break;
777 		case 0xa: tap->wr_rate =  24; break;
778 		case 0xe: tap->wr_rate =  36; break;
779 		case 0x9: tap->wr_rate =  48; break;
780 		case 0xd: tap->wr_rate =  72; break;
781 		case 0x8: tap->wr_rate =  96; break;
782 		case 0xc: tap->wr_rate = 108; break;
783 		}
784 	}
785 	bpf_mtap_hdr(sc->sc_drvbpf, tap, sc->sc_rxtap_len, m, BPF_DIRECTION_IN);
786 }
787 #endif
788 
789 int
790 ar5008_ccmp_decap(struct athn_softc *sc, struct mbuf *m, struct ieee80211_node *ni)
791 {
792 	struct ieee80211com *ic = &sc->sc_ic;
793 	struct ieee80211_key *k;
794 	struct ieee80211_frame *wh;
795 	struct ieee80211_rx_ba *ba;
796 	uint64_t pn, *prsc;
797 	u_int8_t *ivp;
798 	uint8_t tid;
799 	int hdrlen, hasqos;
800 	uintptr_t entry;
801 
802 	wh = mtod(m, struct ieee80211_frame *);
803 	hdrlen = ieee80211_get_hdrlen(wh);
804 	ivp = mtod(m, u_int8_t *) + hdrlen;
805 
806 	/* find key for decryption */
807 	k = ieee80211_get_rxkey(ic, m, ni);
808 	if (k == NULL || k->k_cipher != IEEE80211_CIPHER_CCMP)
809 		return 1;
810 
811 	/* Sanity checks to ensure this is really a key we installed. */
812 	entry = (uintptr_t)k->k_priv;
813 	if (k->k_flags & IEEE80211_KEY_GROUP) {
814 		if (k->k_id >= IEEE80211_WEP_NKID ||
815 		    entry != k->k_id)
816 			return 1;
817 	} else {
818 #ifndef IEEE80211_STA_ONLY
819 		if (ic->ic_opmode == IEEE80211_M_HOSTAP) {
820 			if (entry != IEEE80211_WEP_NKID +
821 			    IEEE80211_AID(ni->ni_associd))
822 				return 1;
823 		} else
824 #endif
825 			if (entry != IEEE80211_WEP_NKID)
826 				return 1;
827 	}
828 
829 	/* Check that ExtIV bit is set. */
830 	if (!(ivp[3] & IEEE80211_WEP_EXTIV))
831 		return 1;
832 
833 	hasqos = ieee80211_has_qos(wh);
834 	tid = hasqos ? ieee80211_get_qos(wh) & IEEE80211_QOS_TID : 0;
835 	ba = hasqos ? &ni->ni_rx_ba[tid] : NULL;
836 	prsc = &k->k_rsc[tid];
837 
838 	/* Extract the 48-bit PN from the CCMP header. */
839 	pn = (uint64_t)ivp[0]       |
840 	     (uint64_t)ivp[1] <<  8 |
841 	     (uint64_t)ivp[4] << 16 |
842 	     (uint64_t)ivp[5] << 24 |
843 	     (uint64_t)ivp[6] << 32 |
844 	     (uint64_t)ivp[7] << 40;
845 	if (pn <= *prsc) {
846 		ic->ic_stats.is_ccmp_replays++;
847 		return 1;
848 	}
849 	/* Last seen packet number is updated in ieee80211_inputm(). */
850 
851 	/* Strip MIC. IV will be stripped by ieee80211_inputm(). */
852 	m_adj(m, -IEEE80211_CCMP_MICLEN);
853 	return 0;
854 }
855 
856 static __inline int
857 ar5008_rx_process(struct athn_softc *sc, struct mbuf_list *ml)
858 {
859 	struct ieee80211com *ic = &sc->sc_ic;
860 	struct ifnet *ifp = &ic->ic_if;
861 	struct athn_rxq *rxq = &sc->rxq[0];
862 	struct athn_rx_buf *bf, *nbf;
863 	struct ar_rx_desc *ds;
864 	struct ieee80211_frame *wh;
865 	struct ieee80211_rxinfo rxi;
866 	struct ieee80211_node *ni;
867 	struct mbuf *m, *m1;
868 	int error, len, michael_mic_failure = 0;
869 
870 	bf = SIMPLEQ_FIRST(&rxq->head);
871 	if (__predict_false(bf == NULL)) {	/* Should not happen. */
872 		printf("%s: Rx queue is empty!\n", sc->sc_dev.dv_xname);
873 		return (ENOENT);
874 	}
875 	ds = bf->bf_desc;
876 
877 	if (!(ds->ds_status8 & AR_RXS8_DONE)) {
878 		/*
879 		 * On some parts, the status words can get corrupted
880 		 * (including the "done" bit), so we check the next
881 		 * descriptor "done" bit.  If it is set, it is a good
882 		 * indication that the status words are corrupted, so
883 		 * we skip this descriptor and drop the frame.
884 		 */
885 		nbf = SIMPLEQ_NEXT(bf, bf_list);
886 		if (nbf != NULL &&
887 		    (((struct ar_rx_desc *)nbf->bf_desc)->ds_status8 &
888 		     AR_RXS8_DONE)) {
889 			DPRINTF(("corrupted descriptor status=0x%x\n",
890 			    ds->ds_status8));
891 			/* HW will not "move" RXDP in this case, so do it. */
892 			AR_WRITE(sc, AR_RXDP, nbf->bf_daddr);
893 			AR_WRITE_BARRIER(sc);
894 			ifp->if_ierrors++;
895 			goto skip;
896 		}
897 		return (EBUSY);
898 	}
899 
900 	if (__predict_false(ds->ds_status1 & AR_RXS1_MORE)) {
901 		/* Drop frames that span multiple Rx descriptors. */
902 		DPRINTF(("dropping split frame\n"));
903 		ifp->if_ierrors++;
904 		goto skip;
905 	}
906 	if (!(ds->ds_status8 & AR_RXS8_FRAME_OK)) {
907 		if (ds->ds_status8 & AR_RXS8_CRC_ERR)
908 			DPRINTFN(6, ("CRC error\n"));
909 		else if (ds->ds_status8 & AR_RXS8_PHY_ERR)
910 			DPRINTFN(6, ("PHY error=0x%x\n",
911 			    MS(ds->ds_status8, AR_RXS8_PHY_ERR_CODE)));
912 		else if (ds->ds_status8 & (AR_RXS8_DECRYPT_CRC_ERR |
913 		    AR_RXS8_KEY_MISS | AR_RXS8_DECRYPT_BUSY_ERR)) {
914 			DPRINTFN(6, ("Decryption CRC error\n"));
915 			ic->ic_stats.is_ccmp_dec_errs++;
916 		} else if (ds->ds_status8 & AR_RXS8_MICHAEL_ERR) {
917 			DPRINTFN(2, ("Michael MIC failure\n"));
918 			michael_mic_failure = 1;
919 		}
920 		if (!michael_mic_failure) {
921 			ifp->if_ierrors++;
922 			goto skip;
923 		}
924 	} else {
925 		if (ds->ds_status8 & (AR_RXS8_CRC_ERR | AR_RXS8_PHY_ERR |
926 		    AR_RXS8_DECRYPT_CRC_ERR | AR_RXS8_MICHAEL_ERR)) {
927 			ifp->if_ierrors++;
928 			goto skip;
929 		}
930 	}
931 
932 	len = MS(ds->ds_status1, AR_RXS1_DATA_LEN);
933 	if (__predict_false(len < IEEE80211_MIN_LEN || len > ATHN_RXBUFSZ)) {
934 		DPRINTF(("corrupted descriptor length=%d\n", len));
935 		ifp->if_ierrors++;
936 		goto skip;
937 	}
938 
939 	/* Allocate a new Rx buffer. */
940 	m1 = MCLGETL(NULL, M_DONTWAIT, ATHN_RXBUFSZ);
941 	if (__predict_false(m1 == NULL)) {
942 		ic->ic_stats.is_rx_nombuf++;
943 		ifp->if_ierrors++;
944 		goto skip;
945 	}
946 
947 	/* Sync and unmap the old Rx buffer. */
948 	bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
949 	    BUS_DMASYNC_POSTREAD);
950 	bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
951 
952 	/* Map the new Rx buffer. */
953 	error = bus_dmamap_load(sc->sc_dmat, bf->bf_map, mtod(m1, void *),
954 	    ATHN_RXBUFSZ, NULL, BUS_DMA_NOWAIT | BUS_DMA_READ);
955 	if (__predict_false(error != 0)) {
956 		m_freem(m1);
957 
958 		/* Remap the old Rx buffer or panic. */
959 		error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
960 		    mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
961 		    BUS_DMA_NOWAIT | BUS_DMA_READ);
962 		KASSERT(error != 0);
963 		ifp->if_ierrors++;
964 		goto skip;
965 	}
966 
967 	bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
968 	    BUS_DMASYNC_PREREAD);
969 
970 	/* Write physical address of new Rx buffer. */
971 	ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
972 
973 	m = bf->bf_m;
974 	bf->bf_m = m1;
975 
976 	/* Finalize mbuf. */
977 	m->m_pkthdr.len = m->m_len = len;
978 
979 	wh = mtod(m, struct ieee80211_frame *);
980 
981 	if (michael_mic_failure) {
982 		/*
983 		 * Check that it is not a control frame
984 		 * (invalid MIC failures on valid ctl frames).
985 		 * Validate the transmitter's address to avoid passing
986 		 * corrupt frames with bogus addresses to net80211.
987 		 */
988 		if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL)) {
989 			switch (ic->ic_opmode) {
990 #ifndef IEEE80211_STA_ONLY
991 			case IEEE80211_M_HOSTAP:
992 				if (ieee80211_find_node(ic, wh->i_addr2))
993 					michael_mic_failure = 0;
994 				break;
995 #endif
996 			case IEEE80211_M_STA:
997 				if (IEEE80211_ADDR_EQ(wh->i_addr2,
998 				    ic->ic_bss->ni_macaddr))
999 					michael_mic_failure = 0;
1000 				break;
1001 			case IEEE80211_M_MONITOR:
1002 				michael_mic_failure = 0;
1003 				break;
1004 			default:
1005 				break;
1006 			}
1007 		}
1008 
1009 		if (michael_mic_failure) {
1010  			/* Report Michael MIC failures to net80211. */
1011 			if ((ic->ic_rsnciphers & IEEE80211_CIPHER_TKIP) ||
1012 			    ic->ic_rsngroupcipher == IEEE80211_CIPHER_TKIP) {
1013 				ic->ic_stats.is_rx_locmicfail++;
1014 				ieee80211_michael_mic_failure(ic, 0);
1015 			}
1016 			ifp->if_ierrors++;
1017 			m_freem(m);
1018 			goto skip;
1019 		}
1020 	}
1021 
1022 	/* Grab a reference to the source node. */
1023 	ni = ieee80211_find_rxnode(ic, wh);
1024 
1025 	/* Remove any HW padding after the 802.11 header. */
1026 	if (!(wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL)) {
1027 		u_int hdrlen = ieee80211_get_hdrlen(wh);
1028 		if (hdrlen & 3) {
1029 			memmove((caddr_t)wh + 2, wh, hdrlen);
1030 			m_adj(m, 2);
1031 		}
1032 		wh = mtod(m, struct ieee80211_frame *);
1033 	}
1034 #if NBPFILTER > 0
1035 	if (__predict_false(sc->sc_drvbpf != NULL))
1036 		ar5008_rx_radiotap(sc, m, ds);
1037 #endif
1038 	/* Trim 802.11 FCS after radiotap. */
1039 	m_adj(m, -IEEE80211_CRC_LEN);
1040 
1041 	/* Send the frame to the 802.11 layer. */
1042 	rxi.rxi_flags = 0;	/* XXX */
1043 	rxi.rxi_rssi = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
1044 	rxi.rxi_rssi += AR_DEFAULT_NOISE_FLOOR;
1045 	rxi.rxi_tstamp = ds->ds_status2;
1046 	if (!(wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL) &&
1047 	    (wh->i_fc[1] & IEEE80211_FC1_PROTECTED) &&
1048 	    (ic->ic_flags & IEEE80211_F_RSNON) &&
1049 	    (ni->ni_flags & IEEE80211_NODE_RXPROT) &&
1050 	    ((!IEEE80211_IS_MULTICAST(wh->i_addr1) &&
1051 	    ni->ni_rsncipher == IEEE80211_CIPHER_CCMP) ||
1052 	    (IEEE80211_IS_MULTICAST(wh->i_addr1) &&
1053 	    ni->ni_rsngroupcipher == IEEE80211_CIPHER_CCMP))) {
1054 		if (ar5008_ccmp_decap(sc, m, ni) != 0) {
1055 			ifp->if_ierrors++;
1056 			ieee80211_release_node(ic, ni);
1057 			m_freem(m);
1058 			goto skip;
1059 		}
1060 		rxi.rxi_flags |= IEEE80211_RXI_HWDEC;
1061 	}
1062 	ieee80211_inputm(ifp, m, ni, &rxi, ml);
1063 
1064 	/* Node is no longer needed. */
1065 	ieee80211_release_node(ic, ni);
1066 
1067  skip:
1068 	/* Unlink this descriptor from head. */
1069 	SIMPLEQ_REMOVE_HEAD(&rxq->head, bf_list);
1070 	memset(&ds->ds_status0, 0, 36);	/* XXX Really needed? */
1071 	ds->ds_status8 &= ~AR_RXS8_DONE;
1072 	ds->ds_link = 0;
1073 
1074 	/* Re-use this descriptor and link it to tail. */
1075 	if (__predict_true(!SIMPLEQ_EMPTY(&rxq->head)))
1076 		((struct ar_rx_desc *)rxq->lastds)->ds_link = bf->bf_daddr;
1077 	else
1078 		AR_WRITE(sc, AR_RXDP, bf->bf_daddr);
1079 	SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
1080 	rxq->lastds = ds;
1081 
1082 	/* Re-enable Rx. */
1083 	AR_WRITE(sc, AR_CR, AR_CR_RXE);
1084 	AR_WRITE_BARRIER(sc);
1085 	return (0);
1086 }
1087 
1088 void
1089 ar5008_rx_intr(struct athn_softc *sc)
1090 {
1091 	struct mbuf_list ml = MBUF_LIST_INITIALIZER();
1092 	struct ieee80211com *ic = &sc->sc_ic;
1093 	struct ifnet *ifp = &ic->ic_if;
1094 
1095 	while (ar5008_rx_process(sc, &ml) == 0);
1096 
1097 	if_input(ifp, &ml);
1098 }
1099 
1100 int
1101 ar5008_tx_process(struct athn_softc *sc, int qid)
1102 {
1103 	struct ieee80211com *ic = &sc->sc_ic;
1104 	struct ifnet *ifp = &ic->ic_if;
1105 	struct athn_txq *txq = &sc->txq[qid];
1106 	struct athn_node *an;
1107 	struct ieee80211_node *ni;
1108 	struct athn_tx_buf *bf;
1109 	struct ar_tx_desc *ds;
1110 	uint8_t failcnt;
1111 	int txfail = 0, rtscts;
1112 
1113 	bf = SIMPLEQ_FIRST(&txq->head);
1114 	if (bf == NULL)
1115 		return (ENOENT);
1116 	/* Get descriptor of last DMA segment. */
1117 	ds = &((struct ar_tx_desc *)bf->bf_descs)[bf->bf_map->dm_nsegs - 1];
1118 
1119 	if (!(ds->ds_status9 & AR_TXS9_DONE))
1120 		return (EBUSY);
1121 
1122 	SIMPLEQ_REMOVE_HEAD(&txq->head, bf_list);
1123 
1124 	sc->sc_tx_timer = 0;
1125 
1126 	/* These status bits are valid if “FRM_XMIT_OK” is clear. */
1127 	if ((ds->ds_status1 & AR_TXS1_FRM_XMIT_OK) == 0) {
1128 		txfail = (ds->ds_status1 & AR_TXS1_EXCESSIVE_RETRIES);
1129 		if (txfail)
1130 			ifp->if_oerrors++;
1131 		if (ds->ds_status1 & AR_TXS1_UNDERRUN)
1132 			athn_inc_tx_trigger_level(sc);
1133 	}
1134 
1135 	an = (struct athn_node *)bf->bf_ni;
1136 	ni = (struct ieee80211_node *)bf->bf_ni;
1137 
1138 	/*
1139 	 * NB: the data fail count contains the number of un-acked tries
1140 	 * for the final series used.  We must add the number of tries for
1141 	 * each series that was fully processed to punish transmit rates in
1142 	 * the earlier series which did not perform well.
1143 	 */
1144 	failcnt  = MS(ds->ds_status1, AR_TXS1_DATA_FAIL_CNT);
1145 	/* Assume two tries per series, as per AR_TXC2_XMIT_DATA_TRIESx. */
1146 	failcnt += MS(ds->ds_status9, AR_TXS9_FINAL_IDX) * 2;
1147 
1148 	rtscts = (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE));
1149 
1150 	/* Update rate control statistics. */
1151 	if ((ni->ni_flags & IEEE80211_NODE_HT) && ic->ic_fixed_mcs == -1) {
1152 		const struct ieee80211_ht_rateset *rs =
1153 		    ieee80211_ra_get_ht_rateset(bf->bf_txmcs, 0 /* chan40 */,
1154 		    ieee80211_node_supports_ht_sgi20(ni));
1155 		unsigned int retries = 0, i;
1156 		int mcs = bf->bf_txmcs;
1157 
1158 		/* With RTS/CTS each Tx series used the same MCS. */
1159 		if (rtscts) {
1160 			retries = failcnt;
1161 		} else {
1162 			for (i = 0; i < failcnt; i++) {
1163 				if (mcs > rs->min_mcs) {
1164 					ieee80211_ra_add_stats_ht(&an->rn,
1165 					    ic, ni, mcs, 1, 1);
1166 					if (i % 2) /* two tries per series */
1167 						mcs--;
1168 				} else
1169 					retries++;
1170 			}
1171 		}
1172 
1173 		if (txfail && retries == 0) {
1174 			ieee80211_ra_add_stats_ht(&an->rn, ic, ni,
1175 			    mcs, 1, 1);
1176 		} else {
1177 			ieee80211_ra_add_stats_ht(&an->rn, ic, ni,
1178 			    mcs, retries + 1, retries);
1179 		}
1180 		if (ic->ic_state == IEEE80211_S_RUN) {
1181 #ifndef IEEE80211_STA_ONLY
1182 			if (ic->ic_opmode != IEEE80211_M_HOSTAP ||
1183 			    ni->ni_state == IEEE80211_STA_ASSOC)
1184 #endif
1185 				ieee80211_ra_choose(&an->rn, ic, ni);
1186 		}
1187 	} else if (ic->ic_fixed_rate == -1) {
1188 		an->amn.amn_txcnt++;
1189 		if (failcnt > 0)
1190 			an->amn.amn_retrycnt++;
1191 	}
1192 	DPRINTFN(5, ("Tx done qid=%d status1=%d fail count=%d\n",
1193 	    qid, ds->ds_status1, failcnt));
1194 
1195 	bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1196 	    BUS_DMASYNC_POSTWRITE);
1197 	bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1198 
1199 	m_freem(bf->bf_m);
1200 	bf->bf_m = NULL;
1201 	ieee80211_release_node(ic, bf->bf_ni);
1202 	bf->bf_ni = NULL;
1203 
1204 	/* Link Tx buffer back to global free list. */
1205 	SIMPLEQ_INSERT_TAIL(&sc->txbufs, bf, bf_list);
1206 	return (0);
1207 }
1208 
1209 void
1210 ar5008_tx_intr(struct athn_softc *sc)
1211 {
1212 	struct ieee80211com *ic = &sc->sc_ic;
1213 	struct ifnet *ifp = &ic->ic_if;
1214 	uint16_t mask = 0;
1215 	uint32_t reg;
1216 	int qid;
1217 
1218 	reg = AR_READ(sc, AR_ISR_S0_S);
1219 	mask |= MS(reg, AR_ISR_S0_QCU_TXOK);
1220 	mask |= MS(reg, AR_ISR_S0_QCU_TXDESC);
1221 
1222 	reg = AR_READ(sc, AR_ISR_S1_S);
1223 	mask |= MS(reg, AR_ISR_S1_QCU_TXERR);
1224 	mask |= MS(reg, AR_ISR_S1_QCU_TXEOL);
1225 
1226 	DPRINTFN(4, ("Tx interrupt mask=0x%x\n", mask));
1227 	for (qid = 0; mask != 0; mask >>= 1, qid++) {
1228 		if (mask & 1)
1229 			while (ar5008_tx_process(sc, qid) == 0);
1230 	}
1231 	if (!SIMPLEQ_EMPTY(&sc->txbufs)) {
1232 		ifq_clr_oactive(&ifp->if_snd);
1233 		ifp->if_start(ifp);
1234 	}
1235 }
1236 
1237 #ifndef IEEE80211_STA_ONLY
1238 /*
1239  * Process Software Beacon Alert interrupts.
1240  */
1241 int
1242 ar5008_swba_intr(struct athn_softc *sc)
1243 {
1244 	struct ieee80211com *ic = &sc->sc_ic;
1245 	struct ifnet *ifp = &ic->ic_if;
1246 	struct ieee80211_node *ni = ic->ic_bss;
1247 	struct athn_tx_buf *bf = sc->bcnbuf;
1248 	struct ieee80211_frame *wh;
1249 	struct ar_tx_desc *ds;
1250 	struct mbuf *m;
1251 	uint8_t ridx, hwrate;
1252 	int error, totlen;
1253 
1254 	if (ic->ic_tim_mcast_pending &&
1255 	    mq_empty(&ni->ni_savedq) &&
1256 	    SIMPLEQ_EMPTY(&sc->txq[ATHN_QID_CAB].head))
1257 		ic->ic_tim_mcast_pending = 0;
1258 
1259 	if (ic->ic_dtim_count == 0)
1260 		ic->ic_dtim_count = ic->ic_dtim_period - 1;
1261 	else
1262 		ic->ic_dtim_count--;
1263 
1264 	/* Make sure previous beacon has been sent. */
1265 	if (athn_tx_pending(sc, ATHN_QID_BEACON)) {
1266 		DPRINTF(("beacon stuck\n"));
1267 		return (EBUSY);
1268 	}
1269 	/* Get new beacon. */
1270 	m = ieee80211_beacon_alloc(ic, ic->ic_bss);
1271 	if (__predict_false(m == NULL))
1272 		return (ENOBUFS);
1273 	/* Assign sequence number. */
1274 	wh = mtod(m, struct ieee80211_frame *);
1275 	*(uint16_t *)&wh->i_seq[0] =
1276 	    htole16(ic->ic_bss->ni_txseq << IEEE80211_SEQ_SEQ_SHIFT);
1277 	ic->ic_bss->ni_txseq++;
1278 
1279 	/* Unmap and free old beacon if any. */
1280 	if (__predict_true(bf->bf_m != NULL)) {
1281 		bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0,
1282 		    bf->bf_map->dm_mapsize, BUS_DMASYNC_POSTWRITE);
1283 		bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1284 		m_freem(bf->bf_m);
1285 		bf->bf_m = NULL;
1286 	}
1287 	/* DMA map new beacon. */
1288 	error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1289 	    BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1290 	if (__predict_false(error != 0)) {
1291 		m_freem(m);
1292 		return (error);
1293 	}
1294 	bf->bf_m = m;
1295 
1296 	/* Setup Tx descriptor (simplified ar5008_tx()). */
1297 	ds = bf->bf_descs;
1298 	memset(ds, 0, sizeof(*ds));
1299 
1300 	totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1301 	ds->ds_ctl0 = SM(AR_TXC0_FRAME_LEN, totlen);
1302 	ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, AR_MAX_RATE_POWER);
1303 	ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, AR_FRAME_TYPE_BEACON);
1304 	ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1305 	ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, AR_ENCR_TYPE_CLEAR);
1306 
1307 	/* Write number of tries. */
1308 	ds->ds_ctl2 = SM(AR_TXC2_XMIT_DATA_TRIES0, 1);
1309 
1310 	/* Write Tx rate. */
1311 	ridx = IEEE80211_IS_CHAN_5GHZ(ni->ni_chan) ?
1312 	    ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1;
1313 	hwrate = athn_rates[ridx].hwrate;
1314 	ds->ds_ctl3 = SM(AR_TXC3_XMIT_RATE0, hwrate);
1315 
1316 	/* Write Tx chains. */
1317 	ds->ds_ctl7 = SM(AR_TXC7_CHAIN_SEL0, sc->txchainmask);
1318 
1319 	ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
1320 	/* Segment length must be a multiple of 4. */
1321 	ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1322 	    (bf->bf_map->dm_segs[0].ds_len + 3) & ~3);
1323 
1324 	bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1325 	    BUS_DMASYNC_PREWRITE);
1326 
1327 	/* Stop Tx DMA before putting the new beacon on the queue. */
1328 	athn_stop_tx_dma(sc, ATHN_QID_BEACON);
1329 
1330 	AR_WRITE(sc, AR_QTXDP(ATHN_QID_BEACON), bf->bf_daddr);
1331 
1332 	for(;;) {
1333 		if (SIMPLEQ_EMPTY(&sc->txbufs))
1334 			break;
1335 
1336 		m = mq_dequeue(&ni->ni_savedq);
1337 		if (m == NULL)
1338 			break;
1339 		if (!mq_empty(&ni->ni_savedq)) {
1340 			/* more queued frames, set the more data bit */
1341 			wh = mtod(m, struct ieee80211_frame *);
1342 			wh->i_fc[1] |= IEEE80211_FC1_MORE_DATA;
1343 		}
1344 
1345 		if (sc->ops.tx(sc, m, ni, ATHN_TXFLAG_CAB) != 0) {
1346 			ieee80211_release_node(ic, ni);
1347 			ifp->if_oerrors++;
1348 			break;
1349 		}
1350 	}
1351 
1352 	/* Kick Tx. */
1353 	AR_WRITE(sc, AR_Q_TXE, 1 << ATHN_QID_BEACON);
1354 	AR_WRITE_BARRIER(sc);
1355 	return (0);
1356 }
1357 #endif
1358 
1359 int
1360 ar5008_intr(struct athn_softc *sc)
1361 {
1362 	uint32_t intr, intr2, intr5, sync;
1363 
1364 	/* Get pending interrupts. */
1365 	intr = AR_READ(sc, AR_INTR_ASYNC_CAUSE);
1366 	if (!(intr & AR_INTR_MAC_IRQ) || intr == AR_INTR_SPURIOUS) {
1367 		intr = AR_READ(sc, AR_INTR_SYNC_CAUSE);
1368 		if (intr == AR_INTR_SPURIOUS || (intr & sc->isync) == 0)
1369 			return (0);	/* Not for us. */
1370 	}
1371 
1372 	if ((AR_READ(sc, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) &&
1373 	    (AR_READ(sc, AR_RTC_STATUS) & AR_RTC_STATUS_M) == AR_RTC_STATUS_ON)
1374 		intr = AR_READ(sc, AR_ISR);
1375 	else
1376 		intr = 0;
1377 	sync = AR_READ(sc, AR_INTR_SYNC_CAUSE) & sc->isync;
1378 	if (intr == 0 && sync == 0)
1379 		return (0);	/* Not for us. */
1380 
1381 	if (intr != 0) {
1382 		if (intr & AR_ISR_BCNMISC) {
1383 			intr2 = AR_READ(sc, AR_ISR_S2);
1384 			if (intr2 & AR_ISR_S2_TIM)
1385 				/* TBD */;
1386 			if (intr2 & AR_ISR_S2_TSFOOR)
1387 				/* TBD */;
1388 		}
1389 		intr = AR_READ(sc, AR_ISR_RAC);
1390 		if (intr == AR_INTR_SPURIOUS)
1391 			return (1);
1392 
1393 #ifndef IEEE80211_STA_ONLY
1394 		if (intr & AR_ISR_SWBA)
1395 			ar5008_swba_intr(sc);
1396 #endif
1397 		if (intr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
1398 			ar5008_rx_intr(sc);
1399 		if (intr & (AR_ISR_RXOK | AR_ISR_RXERR | AR_ISR_RXORN))
1400 			ar5008_rx_intr(sc);
1401 
1402 		if (intr & (AR_ISR_TXOK | AR_ISR_TXDESC |
1403 		    AR_ISR_TXERR | AR_ISR_TXEOL))
1404 			ar5008_tx_intr(sc);
1405 
1406 		intr5 = AR_READ(sc, AR_ISR_S5_S);
1407 		if (intr & AR_ISR_GENTMR) {
1408 			if (intr5 & AR_ISR_GENTMR) {
1409 				DPRINTF(("GENTMR trigger=%d thresh=%d\n",
1410 				    MS(intr5, AR_ISR_S5_GENTIMER_TRIG),
1411 				    MS(intr5, AR_ISR_S5_GENTIMER_THRESH)));
1412 			}
1413 		}
1414 
1415 		if (intr5 & AR_ISR_S5_TIM_TIMER)
1416 			/* TBD */;
1417 	}
1418 	if (sync != 0) {
1419 		if (sync & (AR_INTR_SYNC_HOST1_FATAL |
1420 		    AR_INTR_SYNC_HOST1_PERR))
1421 			/* TBD */;
1422 
1423 		if (sync & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
1424 			AR_WRITE(sc, AR_RC, AR_RC_HOSTIF);
1425 			AR_WRITE(sc, AR_RC, 0);
1426 		}
1427 
1428 		if ((sc->flags & ATHN_FLAG_RFSILENT) &&
1429 		    (sync & AR_INTR_SYNC_GPIO_PIN(sc->rfsilent_pin))) {
1430 			struct ifnet *ifp = &sc->sc_ic.ic_if;
1431 
1432 			printf("%s: radio switch turned off\n",
1433 			    sc->sc_dev.dv_xname);
1434 			/* Turn the interface down. */
1435 			athn_stop(ifp, 1);
1436 			return (1);
1437 		}
1438 
1439 		AR_WRITE(sc, AR_INTR_SYNC_CAUSE, sync);
1440 		(void)AR_READ(sc, AR_INTR_SYNC_CAUSE);
1441 	}
1442 	return (1);
1443 }
1444 
1445 int
1446 ar5008_ccmp_encap(struct mbuf *m, u_int hdrlen, struct ieee80211_key *k)
1447 {
1448 	struct mbuf *n;
1449 	uint8_t *ivp;
1450 	int off;
1451 
1452 	/* Insert IV for CCMP hardware encryption. */
1453 	n = m_makespace(m, hdrlen, IEEE80211_CCMP_HDRLEN, &off);
1454 	if (n == NULL) {
1455 		m_freem(m);
1456 		return (ENOBUFS);
1457 	}
1458 	ivp = mtod(n, uint8_t *) + off;
1459 	k->k_tsc++;
1460 	ivp[0] = k->k_tsc;
1461 	ivp[1] = k->k_tsc >> 8;
1462 	ivp[2] = 0;
1463 	ivp[3] = k->k_id << 6 | IEEE80211_WEP_EXTIV;
1464 	ivp[4] = k->k_tsc >> 16;
1465 	ivp[5] = k->k_tsc >> 24;
1466 	ivp[6] = k->k_tsc >> 32;
1467 	ivp[7] = k->k_tsc >> 40;
1468 
1469 	return 0;
1470 }
1471 
1472 int
1473 ar5008_tx(struct athn_softc *sc, struct mbuf *m, struct ieee80211_node *ni,
1474     int txflags)
1475 {
1476 	struct ieee80211com *ic = &sc->sc_ic;
1477 	struct ieee80211_key *k = NULL;
1478 	struct ieee80211_frame *wh;
1479 	struct athn_series series[4];
1480 	struct ar_tx_desc *ds, *lastds;
1481 	struct athn_txq *txq;
1482 	struct athn_tx_buf *bf;
1483 	struct athn_node *an = (void *)ni;
1484 	uintptr_t entry;
1485 	uint16_t qos;
1486 	uint8_t txpower, type, encrtype, tid, ridx[4];
1487 	int i, error, totlen, hasqos, qid;
1488 
1489 	/* Grab a Tx buffer from our global free list. */
1490 	bf = SIMPLEQ_FIRST(&sc->txbufs);
1491 	KASSERT(bf != NULL);
1492 
1493 	/* Map 802.11 frame type to hardware frame type. */
1494 	wh = mtod(m, struct ieee80211_frame *);
1495 	if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) ==
1496 	    IEEE80211_FC0_TYPE_MGT) {
1497 		/* NB: Beacons do not use ar5008_tx(). */
1498 		if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1499 		    IEEE80211_FC0_SUBTYPE_PROBE_RESP)
1500 			type = AR_FRAME_TYPE_PROBE_RESP;
1501 		else if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1502 		    IEEE80211_FC0_SUBTYPE_ATIM)
1503 			type = AR_FRAME_TYPE_ATIM;
1504 		else
1505 			type = AR_FRAME_TYPE_NORMAL;
1506 	} else if ((wh->i_fc[0] &
1507 	    (IEEE80211_FC0_TYPE_MASK | IEEE80211_FC0_SUBTYPE_MASK)) ==
1508 	    (IEEE80211_FC0_TYPE_CTL  | IEEE80211_FC0_SUBTYPE_PS_POLL)) {
1509 		type = AR_FRAME_TYPE_PSPOLL;
1510 	} else
1511 		type = AR_FRAME_TYPE_NORMAL;
1512 
1513 	if (wh->i_fc[1] & IEEE80211_FC1_PROTECTED) {
1514 		k = ieee80211_get_txkey(ic, wh, ni);
1515 		if (k->k_cipher == IEEE80211_CIPHER_CCMP) {
1516 			u_int hdrlen = ieee80211_get_hdrlen(wh);
1517 			if (ar5008_ccmp_encap(m, hdrlen, k) != 0)
1518 				return (ENOBUFS);
1519 		} else {
1520 			if ((m = ieee80211_encrypt(ic, m, k)) == NULL)
1521 				return (ENOBUFS);
1522 			k = NULL; /* skip hardware crypto further below */
1523 		}
1524 		wh = mtod(m, struct ieee80211_frame *);
1525 	}
1526 
1527 	/* XXX 2-byte padding for QoS and 4-addr headers. */
1528 
1529 	/* Select the HW Tx queue to use for this frame. */
1530 	if ((hasqos = ieee80211_has_qos(wh))) {
1531 		qos = ieee80211_get_qos(wh);
1532 		tid = qos & IEEE80211_QOS_TID;
1533 		qid = athn_ac2qid[ieee80211_up_to_ac(ic, tid)];
1534 	} else if (type == AR_FRAME_TYPE_PSPOLL) {
1535 		qid = ATHN_QID_PSPOLL;
1536 	} else if (txflags & ATHN_TXFLAG_CAB) {
1537 		qid = ATHN_QID_CAB;
1538 	} else
1539 		qid = ATHN_QID_AC_BE;
1540 	txq = &sc->txq[qid];
1541 
1542 	/* Select the transmit rates to use for this frame. */
1543 	if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1544 	    (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) !=
1545 	    IEEE80211_FC0_TYPE_DATA) {
1546 		/* Use lowest rate for all tries. */
1547 		ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1548 		    (IEEE80211_IS_CHAN_5GHZ(ni->ni_chan) ?
1549 			ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1);
1550 	} else if ((ni->ni_flags & IEEE80211_NODE_HT) &&
1551 	    ic->ic_fixed_mcs != -1) {
1552 		/* Use same fixed rate for all tries. */
1553 		ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1554 		    ATHN_RIDX_MCS0 + ic->ic_fixed_mcs;
1555 	} else if (ic->ic_fixed_rate != -1) {
1556 		/* Use same fixed rate for all tries. */
1557 		ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1558 		    sc->fixed_ridx;
1559 	} else {
1560 		/* Use fallback table of the node. */
1561 		int txrate;
1562 
1563 		if (ni->ni_flags & IEEE80211_NODE_HT)
1564 			txrate = ATHN_NUM_LEGACY_RATES + ni->ni_txmcs;
1565 		else
1566 			txrate = ni->ni_txrate;
1567 		for (i = 0; i < 4; i++) {
1568 			ridx[i] = an->ridx[txrate];
1569 			txrate = an->fallback[txrate];
1570 		}
1571 	}
1572 
1573 #if NBPFILTER > 0
1574 	if (__predict_false(sc->sc_drvbpf != NULL)) {
1575 		struct athn_tx_radiotap_header *tap = &sc->sc_txtap;
1576 
1577 		tap->wt_flags = 0;
1578 		/* Use initial transmit rate. */
1579 		if (athn_rates[ridx[0]].hwrate & 0x80) /* MCS */
1580 			tap->wt_rate = athn_rates[ridx[0]].hwrate;
1581 		else
1582 			tap->wt_rate = athn_rates[ridx[0]].rate;
1583 		tap->wt_chan_freq = htole16(ic->ic_bss->ni_chan->ic_freq);
1584 		tap->wt_chan_flags = htole16(ic->ic_bss->ni_chan->ic_flags);
1585 		if (athn_rates[ridx[0]].phy == IEEE80211_T_DS &&
1586 		    ridx[0] != ATHN_RIDX_CCK1 &&
1587 		    (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1588 			tap->wt_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
1589 		bpf_mtap_hdr(sc->sc_drvbpf, tap, sc->sc_txtap_len, m,
1590 		    BPF_DIRECTION_OUT);
1591 	}
1592 #endif
1593 
1594 	/* DMA map mbuf. */
1595 	error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1596 	    BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1597 	if (__predict_false(error != 0)) {
1598 		if (error != EFBIG) {
1599 			printf("%s: can't map mbuf (error %d)\n",
1600 			    sc->sc_dev.dv_xname, error);
1601 			m_freem(m);
1602 			return (error);
1603 		}
1604 		/*
1605 		 * DMA mapping requires too many DMA segments; linearize
1606 		 * mbuf in kernel virtual address space and retry.
1607 		 */
1608 		if (m_defrag(m, M_DONTWAIT) != 0) {
1609 			m_freem(m);
1610 			return (ENOBUFS);
1611 		}
1612 
1613 		error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1614 		    BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1615 		if (error != 0) {
1616 			printf("%s: can't map mbuf (error %d)\n",
1617 			    sc->sc_dev.dv_xname, error);
1618 			m_freem(m);
1619 			return (error);
1620 		}
1621 	}
1622 	bf->bf_m = m;
1623 	bf->bf_ni = ni;
1624 	bf->bf_txmcs = ni->ni_txmcs;
1625 	bf->bf_txflags = txflags;
1626 
1627 	wh = mtod(m, struct ieee80211_frame *);
1628 
1629 	totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1630 
1631 	/* Clear all Tx descriptors that we will use. */
1632 	memset(bf->bf_descs, 0, bf->bf_map->dm_nsegs * sizeof(*ds));
1633 
1634 	/* Setup first Tx descriptor. */
1635 	ds = bf->bf_descs;
1636 
1637 	ds->ds_ctl0 = AR_TXC0_INTR_REQ | AR_TXC0_CLR_DEST_MASK;
1638 	txpower = AR_MAX_RATE_POWER;	/* Get from per-rate registers. */
1639 	ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, txpower);
1640 
1641 	ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, type);
1642 
1643 	if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1644 	    (hasqos && (qos & IEEE80211_QOS_ACK_POLICY_MASK) ==
1645 	     IEEE80211_QOS_ACK_POLICY_NOACK))
1646 		ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1647 
1648 	if (k != NULL) {
1649 		/* Map 802.11 cipher to hardware encryption type. */
1650 		if (k->k_cipher == IEEE80211_CIPHER_CCMP) {
1651 			encrtype = AR_ENCR_TYPE_AES;
1652 			totlen += IEEE80211_CCMP_MICLEN;
1653 		} else
1654 			panic("unsupported cipher");
1655 		/*
1656 		 * NB: The key cache entry index is stored in the key
1657 		 * private field when the key is installed.
1658 		 */
1659 		entry = (uintptr_t)k->k_priv;
1660 		ds->ds_ctl1 |= SM(AR_TXC1_DEST_IDX, entry);
1661 		ds->ds_ctl0 |= AR_TXC0_DEST_IDX_VALID;
1662 	} else
1663 		encrtype = AR_ENCR_TYPE_CLEAR;
1664 	ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, encrtype);
1665 
1666 	/* Check if frame must be protected using RTS/CTS or CTS-to-self. */
1667 	if (!IEEE80211_IS_MULTICAST(wh->i_addr1) &&
1668 	    (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) ==
1669 	    IEEE80211_FC0_TYPE_DATA) {
1670 		enum ieee80211_htprot htprot;
1671 
1672 		htprot = (ic->ic_bss->ni_htop1 & IEEE80211_HTOP1_PROT_MASK);
1673 
1674 		/* NB: Group frames are sent using CCK in 802.11b/g. */
1675 		if (totlen > ic->ic_rtsthreshold) {
1676 			ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1677 		} else if (((ic->ic_flags & IEEE80211_F_USEPROT) &&
1678 		    athn_rates[ridx[0]].phy == IEEE80211_T_OFDM) ||
1679 		    ((ni->ni_flags & IEEE80211_NODE_HT) &&
1680 		    htprot != IEEE80211_HTPROT_NONE)) {
1681 			if (ic->ic_protmode == IEEE80211_PROT_RTSCTS)
1682 				ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1683 			else if (ic->ic_protmode == IEEE80211_PROT_CTSONLY)
1684 				ds->ds_ctl0 |= AR_TXC0_CTS_ENABLE;
1685 		}
1686 	}
1687 	/*
1688 	 * Disable multi-rate retries when protection is used.
1689 	 * The RTS/CTS frame's duration field is fixed and won't be
1690 	 * updated by hardware when the data rate changes.
1691 	 */
1692 	if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1693 		ridx[1] = ridx[2] = ridx[3] = ridx[0];
1694 	}
1695 	/* Setup multi-rate retries. */
1696 	for (i = 0; i < 4; i++) {
1697 		series[i].hwrate = athn_rates[ridx[i]].hwrate;
1698 		if (athn_rates[ridx[i]].phy == IEEE80211_T_DS &&
1699 		    ridx[i] != ATHN_RIDX_CCK1 &&
1700 		    (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1701 			series[i].hwrate |= 0x04;
1702 		/* Compute duration for each series. */
1703 		series[i].dur = athn_txtime(sc, totlen, ridx[i], ic->ic_flags);
1704 		if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1705 			/* Account for ACK duration. */
1706 			series[i].dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1707 			    athn_rates[ridx[i]].rspridx, ic->ic_flags);
1708 		}
1709 	}
1710 
1711 	/* Write number of tries for each series. */
1712 	ds->ds_ctl2 =
1713 	    SM(AR_TXC2_XMIT_DATA_TRIES0, 2) |
1714 	    SM(AR_TXC2_XMIT_DATA_TRIES1, 2) |
1715 	    SM(AR_TXC2_XMIT_DATA_TRIES2, 2) |
1716 	    SM(AR_TXC2_XMIT_DATA_TRIES3, 4);
1717 
1718 	/* Tell HW to update duration field in 802.11 header. */
1719 	if (type != AR_FRAME_TYPE_PSPOLL)
1720 		ds->ds_ctl2 |= AR_TXC2_DUR_UPDATE_ENA;
1721 
1722 	/* Write Tx rate for each series. */
1723 	ds->ds_ctl3 =
1724 	    SM(AR_TXC3_XMIT_RATE0, series[0].hwrate) |
1725 	    SM(AR_TXC3_XMIT_RATE1, series[1].hwrate) |
1726 	    SM(AR_TXC3_XMIT_RATE2, series[2].hwrate) |
1727 	    SM(AR_TXC3_XMIT_RATE3, series[3].hwrate);
1728 
1729 	/* Write duration for each series. */
1730 	ds->ds_ctl4 =
1731 	    SM(AR_TXC4_PACKET_DUR0, series[0].dur) |
1732 	    SM(AR_TXC4_PACKET_DUR1, series[1].dur);
1733 	ds->ds_ctl5 =
1734 	    SM(AR_TXC5_PACKET_DUR2, series[2].dur) |
1735 	    SM(AR_TXC5_PACKET_DUR3, series[3].dur);
1736 
1737 	/* Use the same Tx chains for all tries. */
1738 	ds->ds_ctl7 =
1739 	    SM(AR_TXC7_CHAIN_SEL0, sc->txchainmask) |
1740 	    SM(AR_TXC7_CHAIN_SEL1, sc->txchainmask) |
1741 	    SM(AR_TXC7_CHAIN_SEL2, sc->txchainmask) |
1742 	    SM(AR_TXC7_CHAIN_SEL3, sc->txchainmask);
1743 #ifdef notyet
1744 	/* Use the same short GI setting for all tries. */
1745 	if (ni->ni_htcaps & IEEE80211_HTCAP_SGI20)
1746 		ds->ds_ctl7 |= AR_TXC7_GI0123;
1747 	/* Use the same channel width for all tries. */
1748 	if (ic->ic_flags & IEEE80211_F_CBW40)
1749 		ds->ds_ctl7 |= AR_TXC7_2040_0123;
1750 #endif
1751 
1752 	/* Set Tx power for series 1 - 3 */
1753 	ds->ds_ctl9 = SM(AR_TXC9_XMIT_POWER1, txpower);
1754 	ds->ds_ctl10 = SM(AR_TXC10_XMIT_POWER2, txpower);
1755 	ds->ds_ctl11 = SM(AR_TXC11_XMIT_POWER3, txpower);
1756 
1757 	if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1758 		uint8_t protridx, hwrate;
1759 		uint16_t dur = 0;
1760 
1761 		/* Use the same protection mode for all tries. */
1762 		if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1763 			ds->ds_ctl4 |= AR_TXC4_RTSCTS_QUAL01;
1764 			ds->ds_ctl5 |= AR_TXC5_RTSCTS_QUAL23;
1765 		}
1766 		/* Select protection rate (suboptimal but ok). */
1767 		protridx = IEEE80211_IS_CHAN_5GHZ(ni->ni_chan) ?
1768 		    ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK2;
1769 		if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1770 			/* Account for CTS duration. */
1771 			dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1772 			    athn_rates[protridx].rspridx, ic->ic_flags);
1773 		}
1774 		dur += athn_txtime(sc, totlen, ridx[0], ic->ic_flags);
1775 		if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1776 			/* Account for ACK duration. */
1777 			dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1778 			    athn_rates[ridx[0]].rspridx, ic->ic_flags);
1779 		}
1780 		/* Write protection frame duration and rate. */
1781 		ds->ds_ctl2 |= SM(AR_TXC2_BURST_DUR, dur);
1782 		hwrate = athn_rates[protridx].hwrate;
1783 		if (protridx == ATHN_RIDX_CCK2 &&
1784 		    (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1785 			hwrate |= 0x04;
1786 		ds->ds_ctl7 |= SM(AR_TXC7_RTSCTS_RATE, hwrate);
1787 	}
1788 
1789 	/* Finalize first Tx descriptor and fill others (if any). */
1790 	ds->ds_ctl0 |= SM(AR_TXC0_FRAME_LEN, totlen);
1791 
1792 	for (i = 0; i < bf->bf_map->dm_nsegs; i++, ds++) {
1793 		ds->ds_data = bf->bf_map->dm_segs[i].ds_addr;
1794 		ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1795 		    bf->bf_map->dm_segs[i].ds_len);
1796 
1797 		if (i != bf->bf_map->dm_nsegs - 1)
1798 			ds->ds_ctl1 |= AR_TXC1_MORE;
1799 		ds->ds_link = 0;
1800 
1801 		/* Chain Tx descriptor. */
1802 		if (i != 0)
1803 			lastds->ds_link = bf->bf_daddr + i * sizeof(*ds);
1804 		lastds = ds;
1805 	}
1806 	bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1807 	    BUS_DMASYNC_PREWRITE);
1808 
1809 	if (!SIMPLEQ_EMPTY(&txq->head))
1810 		((struct ar_tx_desc *)txq->lastds)->ds_link = bf->bf_daddr;
1811 	else
1812 		AR_WRITE(sc, AR_QTXDP(qid), bf->bf_daddr);
1813 	txq->lastds = lastds;
1814 	SIMPLEQ_REMOVE_HEAD(&sc->txbufs, bf_list);
1815 	SIMPLEQ_INSERT_TAIL(&txq->head, bf, bf_list);
1816 
1817 	ds = bf->bf_descs;
1818 	DPRINTFN(6, ("Tx qid=%d nsegs=%d ctl0=0x%x ctl1=0x%x ctl3=0x%x\n",
1819 	    qid, bf->bf_map->dm_nsegs, ds->ds_ctl0, ds->ds_ctl1, ds->ds_ctl3));
1820 
1821 	/* Kick Tx. */
1822 	AR_WRITE(sc, AR_Q_TXE, 1 << qid);
1823 	AR_WRITE_BARRIER(sc);
1824 	return (0);
1825 }
1826 
1827 void
1828 ar5008_set_rf_mode(struct athn_softc *sc, struct ieee80211_channel *c)
1829 {
1830 	uint32_t reg;
1831 
1832 	reg = IEEE80211_IS_CHAN_2GHZ(c) ?
1833 	    AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1834 	if (!AR_SREV_9280_10_OR_LATER(sc)) {
1835 		reg |= IEEE80211_IS_CHAN_2GHZ(c) ?
1836 		    AR_PHY_MODE_RF2GHZ : AR_PHY_MODE_RF5GHZ;
1837 	} else if (IEEE80211_IS_CHAN_5GHZ(c) &&
1838 	    (sc->flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
1839 		reg |= AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE;
1840 	}
1841 	AR_WRITE(sc, AR_PHY_MODE, reg);
1842 	AR_WRITE_BARRIER(sc);
1843 }
1844 
1845 static __inline uint32_t
1846 ar5008_synth_delay(struct athn_softc *sc)
1847 {
1848 	uint32_t delay;
1849 
1850 	delay = MS(AR_READ(sc, AR_PHY_RX_DELAY), AR_PHY_RX_DELAY_DELAY);
1851 	if (sc->sc_ic.ic_curmode == IEEE80211_MODE_11B)
1852 		delay = (delay * 4) / 22;
1853 	else
1854 		delay = delay / 10;	/* in 100ns steps */
1855 	return (delay);
1856 }
1857 
1858 int
1859 ar5008_rf_bus_request(struct athn_softc *sc)
1860 {
1861 	int ntries;
1862 
1863 	/* Request RF Bus grant. */
1864 	AR_WRITE(sc, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1865 	for (ntries = 0; ntries < 10000; ntries++) {
1866 		if (AR_READ(sc, AR_PHY_RFBUS_GRANT) & AR_PHY_RFBUS_GRANT_EN)
1867 			return (0);
1868 		DELAY(10);
1869 	}
1870 	DPRINTF(("could not kill baseband Rx"));
1871 	return (ETIMEDOUT);
1872 }
1873 
1874 void
1875 ar5008_rf_bus_release(struct athn_softc *sc)
1876 {
1877 	/* Wait for the synthesizer to settle. */
1878 	DELAY(AR_BASE_PHY_ACTIVE_DELAY + ar5008_synth_delay(sc));
1879 
1880 	/* Release the RF Bus grant. */
1881 	AR_WRITE(sc, AR_PHY_RFBUS_REQ, 0);
1882 	AR_WRITE_BARRIER(sc);
1883 }
1884 
1885 void
1886 ar5008_set_phy(struct athn_softc *sc, struct ieee80211_channel *c,
1887     struct ieee80211_channel *extc)
1888 {
1889 	uint32_t phy;
1890 
1891 	if (AR_SREV_9285_10_OR_LATER(sc))
1892 		phy = AR_READ(sc, AR_PHY_TURBO) & AR_PHY_FC_ENABLE_DAC_FIFO;
1893 	else
1894 		phy = 0;
1895 	phy |= AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40 |
1896 	    AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH;
1897 	if (extc != NULL) {
1898 		phy |= AR_PHY_FC_DYN2040_EN;
1899 		if (extc > c)	/* XXX */
1900 			phy |= AR_PHY_FC_DYN2040_PRI_CH;
1901 	}
1902 	AR_WRITE(sc, AR_PHY_TURBO, phy);
1903 
1904 	AR_WRITE(sc, AR_2040_MODE,
1905 	    (extc != NULL) ? AR_2040_JOINED_RX_CLEAR : 0);
1906 
1907 	/* Set global transmit timeout. */
1908 	AR_WRITE(sc, AR_GTXTO, SM(AR_GTXTO_TIMEOUT_LIMIT, 25));
1909 	/* Set carrier sense timeout. */
1910 	AR_WRITE(sc, AR_CST, SM(AR_CST_TIMEOUT_LIMIT, 15));
1911 	AR_WRITE_BARRIER(sc);
1912 }
1913 
1914 void
1915 ar5008_set_delta_slope(struct athn_softc *sc, struct ieee80211_channel *c,
1916     struct ieee80211_channel *extc)
1917 {
1918 	uint32_t coeff, exp, man, reg;
1919 
1920 	/* Set Delta Slope (exponent and mantissa). */
1921 	coeff = (100 << 24) / c->ic_freq;
1922 	athn_get_delta_slope(coeff, &exp, &man);
1923 	DPRINTFN(5, ("delta slope coeff exp=%u man=%u\n", exp, man));
1924 
1925 	reg = AR_READ(sc, AR_PHY_TIMING3);
1926 	reg = RW(reg, AR_PHY_TIMING3_DSC_EXP, exp);
1927 	reg = RW(reg, AR_PHY_TIMING3_DSC_MAN, man);
1928 	AR_WRITE(sc, AR_PHY_TIMING3, reg);
1929 
1930 	/* For Short GI, coeff is 9/10 that of normal coeff. */
1931 	coeff = (9 * coeff) / 10;
1932 	athn_get_delta_slope(coeff, &exp, &man);
1933 	DPRINTFN(5, ("delta slope coeff exp=%u man=%u\n", exp, man));
1934 
1935 	reg = AR_READ(sc, AR_PHY_HALFGI);
1936 	reg = RW(reg, AR_PHY_HALFGI_DSC_EXP, exp);
1937 	reg = RW(reg, AR_PHY_HALFGI_DSC_MAN, man);
1938 	AR_WRITE(sc, AR_PHY_HALFGI, reg);
1939 	AR_WRITE_BARRIER(sc);
1940 }
1941 
1942 void
1943 ar5008_enable_antenna_diversity(struct athn_softc *sc)
1944 {
1945 	AR_SETBITS(sc, AR_PHY_CCK_DETECT,
1946 	    AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV);
1947 	AR_WRITE_BARRIER(sc);
1948 }
1949 
1950 void
1951 ar5008_init_baseband(struct athn_softc *sc)
1952 {
1953 	uint32_t synth_delay;
1954 
1955 	synth_delay = ar5008_synth_delay(sc);
1956 	/* Activate the PHY (includes baseband activate and synthesizer on). */
1957 	AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1958 	AR_WRITE_BARRIER(sc);
1959 	DELAY(AR_BASE_PHY_ACTIVE_DELAY + synth_delay);
1960 }
1961 
1962 void
1963 ar5008_disable_phy(struct athn_softc *sc)
1964 {
1965 	AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1966 	AR_WRITE_BARRIER(sc);
1967 }
1968 
1969 void
1970 ar5008_init_chains(struct athn_softc *sc)
1971 {
1972 	if (sc->rxchainmask == 0x5 || sc->txchainmask == 0x5)
1973 		AR_SETBITS(sc, AR_PHY_ANALOG_SWAP, AR_PHY_SWAP_ALT_CHAIN);
1974 
1975 	/* Setup chain masks. */
1976 	if (sc->mac_ver <= AR_SREV_VERSION_9160 &&
1977 	    (sc->rxchainmask == 0x3 || sc->rxchainmask == 0x5)) {
1978 		AR_WRITE(sc, AR_PHY_RX_CHAINMASK,  0x7);
1979 		AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, 0x7);
1980 	} else {
1981 		AR_WRITE(sc, AR_PHY_RX_CHAINMASK,  sc->rxchainmask);
1982 		AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->rxchainmask);
1983 	}
1984 	AR_WRITE(sc, AR_SELFGEN_MASK, sc->txchainmask);
1985 	AR_WRITE_BARRIER(sc);
1986 }
1987 
1988 void
1989 ar5008_set_rxchains(struct athn_softc *sc)
1990 {
1991 	if (sc->rxchainmask == 0x3 || sc->rxchainmask == 0x5) {
1992 		AR_WRITE(sc, AR_PHY_RX_CHAINMASK,  sc->rxchainmask);
1993 		AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->rxchainmask);
1994 		AR_WRITE_BARRIER(sc);
1995 	}
1996 }
1997 
1998 void
1999 ar5008_read_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
2000 {
2001 /* Sign-extends 9-bit value (assumes upper bits are zeroes). */
2002 #define SIGN_EXT(v)	(((v) ^ 0x100) - 0x100)
2003 	uint32_t reg;
2004 	int i;
2005 
2006 	for (i = 0; i < sc->nrxchains; i++) {
2007 		reg = AR_READ(sc, AR_PHY_CCA(i));
2008 		if (AR_SREV_9280_10_OR_LATER(sc))
2009 			nf[i] = MS(reg, AR9280_PHY_MINCCA_PWR);
2010 		else
2011 			nf[i] = MS(reg, AR_PHY_MINCCA_PWR);
2012 		nf[i] = SIGN_EXT(nf[i]);
2013 
2014 		reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
2015 		if (AR_SREV_9280_10_OR_LATER(sc))
2016 			nf_ext[i] = MS(reg, AR9280_PHY_EXT_MINCCA_PWR);
2017 		else
2018 			nf_ext[i] = MS(reg, AR_PHY_EXT_MINCCA_PWR);
2019 		nf_ext[i] = SIGN_EXT(nf_ext[i]);
2020 	}
2021 #undef SIGN_EXT
2022 }
2023 
2024 void
2025 ar5008_write_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
2026 {
2027 	uint32_t reg;
2028 	int i;
2029 
2030 	for (i = 0; i < sc->nrxchains; i++) {
2031 		reg = AR_READ(sc, AR_PHY_CCA(i));
2032 		reg = RW(reg, AR_PHY_MAXCCA_PWR, nf[i]);
2033 		AR_WRITE(sc, AR_PHY_CCA(i), reg);
2034 
2035 		reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
2036 		reg = RW(reg, AR_PHY_EXT_MAXCCA_PWR, nf_ext[i]);
2037 		AR_WRITE(sc, AR_PHY_EXT_CCA(i), reg);
2038 	}
2039 	AR_WRITE_BARRIER(sc);
2040 }
2041 
2042 int
2043 ar5008_get_noisefloor(struct athn_softc *sc)
2044 {
2045 	int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
2046 	int i;
2047 
2048 	if (AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF) {
2049 		/* Noisefloor calibration not finished. */
2050 		return 0;
2051 	}
2052 	/* Noisefloor calibration is finished. */
2053 	ar5008_read_noisefloor(sc, nf, nf_ext);
2054 
2055 	/* Update noisefloor history. */
2056 	for (i = 0; i < sc->nrxchains; i++) {
2057 		sc->nf_hist[sc->nf_hist_cur].nf[i] = nf[i];
2058 		sc->nf_hist[sc->nf_hist_cur].nf_ext[i] = nf_ext[i];
2059 	}
2060 	if (++sc->nf_hist_cur >= ATHN_NF_CAL_HIST_MAX)
2061 		sc->nf_hist_cur = 0;
2062 	return 1;
2063 }
2064 
2065 void
2066 ar5008_bb_load_noisefloor(struct athn_softc *sc)
2067 {
2068 	int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
2069 	int i, ntries;
2070 
2071 	/* Write filtered noisefloor values. */
2072 	for (i = 0; i < sc->nrxchains; i++) {
2073 		nf[i] = sc->nf_priv[i] * 2;
2074 		nf_ext[i] = sc->nf_ext_priv[i] * 2;
2075 	}
2076 	ar5008_write_noisefloor(sc, nf, nf_ext);
2077 
2078 	/* Load filtered noisefloor values into baseband. */
2079 	AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
2080 	AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
2081 	AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
2082 	/* Wait for load to complete. */
2083 	for (ntries = 0; ntries < 1000; ntries++) {
2084 		if (!(AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF))
2085 			break;
2086 		DELAY(50);
2087 	}
2088 	if (ntries == 1000) {
2089 		DPRINTF(("failed to load noisefloor values\n"));
2090 		return;
2091 	}
2092 
2093 	/*
2094 	 * Restore noisefloor values to initial (max) values. These will
2095 	 * be used as initial values during the next NF calibration.
2096 	 */
2097 	for (i = 0; i < AR_MAX_CHAINS; i++)
2098 		nf[i] = nf_ext[i] = AR_DEFAULT_NOISE_FLOOR;
2099 	ar5008_write_noisefloor(sc, nf, nf_ext);
2100 }
2101 
2102 void
2103 ar5008_apply_noisefloor(struct athn_softc *sc)
2104 {
2105 	uint32_t agc_nfcal;
2106 
2107 	agc_nfcal = AR_READ(sc, AR_PHY_AGC_CONTROL) &
2108 	    (AR_PHY_AGC_CONTROL_NF | AR_PHY_AGC_CONTROL_ENABLE_NF |
2109 	    AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
2110 
2111 	if (agc_nfcal & AR_PHY_AGC_CONTROL_NF) {
2112 		/* Pause running NF calibration while values are updated. */
2113 		AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
2114 		AR_WRITE_BARRIER(sc);
2115 	}
2116 
2117 	ar5008_bb_load_noisefloor(sc);
2118 
2119 	if (agc_nfcal & AR_PHY_AGC_CONTROL_NF) {
2120 		/* Restart interrupted NF calibration. */
2121 		AR_SETBITS(sc, AR_PHY_AGC_CONTROL, agc_nfcal);
2122 		AR_WRITE_BARRIER(sc);
2123 	}
2124 }
2125 
2126 void
2127 ar5008_do_noisefloor_calib(struct athn_softc *sc)
2128 {
2129 	AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
2130 	AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
2131 	AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
2132 	AR_WRITE_BARRIER(sc);
2133 }
2134 
2135 void
2136 ar5008_init_noisefloor_calib(struct athn_softc *sc)
2137 {
2138 	AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
2139 	AR_WRITE_BARRIER(sc);
2140 }
2141 
2142 void
2143 ar5008_do_calib(struct athn_softc *sc)
2144 {
2145 	uint32_t mode, reg;
2146 	int log;
2147 
2148 	reg = AR_READ(sc, AR_PHY_TIMING_CTRL4_0);
2149 	log = AR_SREV_9280_10_OR_LATER(sc) ? 10 : 2;
2150 	reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCAL_LOG_COUNT_MAX, log);
2151 	AR_WRITE(sc, AR_PHY_TIMING_CTRL4_0, reg);
2152 
2153 	if (sc->cur_calib_mask & ATHN_CAL_ADC_GAIN)
2154 		mode = AR_PHY_CALMODE_ADC_GAIN;
2155 	else if (sc->cur_calib_mask & ATHN_CAL_ADC_DC)
2156 		mode = AR_PHY_CALMODE_ADC_DC_PER;
2157 	else	/* ATHN_CAL_IQ */
2158 		mode = AR_PHY_CALMODE_IQ;
2159 	AR_WRITE(sc, AR_PHY_CALMODE, mode);
2160 
2161 	DPRINTF(("starting calibration mode=0x%x\n", mode));
2162 	AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0, AR_PHY_TIMING_CTRL4_DO_CAL);
2163 	AR_WRITE_BARRIER(sc);
2164 }
2165 
2166 void
2167 ar5008_next_calib(struct athn_softc *sc)
2168 {
2169 	/* Check if we have any calibration in progress. */
2170 	if (sc->cur_calib_mask != 0) {
2171 		if (!(AR_READ(sc, AR_PHY_TIMING_CTRL4_0) &
2172 		    AR_PHY_TIMING_CTRL4_DO_CAL)) {
2173 			/* Calibration completed for current sample. */
2174 			if (sc->cur_calib_mask & ATHN_CAL_ADC_GAIN)
2175 				ar5008_calib_adc_gain(sc);
2176 			else if (sc->cur_calib_mask & ATHN_CAL_ADC_DC)
2177 				ar5008_calib_adc_dc_off(sc);
2178 			else	/* ATHN_CAL_IQ */
2179 				ar5008_calib_iq(sc);
2180 		}
2181 	}
2182 }
2183 
2184 void
2185 ar5008_calib_iq(struct athn_softc *sc)
2186 {
2187 	struct athn_iq_cal *cal;
2188 	uint32_t reg, i_coff_denom, q_coff_denom;
2189 	int32_t i_coff, q_coff;
2190 	int i, iq_corr_neg;
2191 
2192 	for (i = 0; i < AR_MAX_CHAINS; i++) {
2193 		cal = &sc->calib.iq[i];
2194 
2195 		/* Accumulate IQ calibration measures (clear on read). */
2196 		cal->pwr_meas_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2197 		cal->pwr_meas_q += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2198 		cal->iq_corr_meas +=
2199 		    (int32_t)AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2200 	}
2201 	if (!AR_SREV_9280_10_OR_LATER(sc) &&
2202 	    ++sc->calib.nsamples < AR_CAL_SAMPLES) {
2203 		/* Not enough samples accumulated, continue. */
2204 		ar5008_do_calib(sc);
2205 		return;
2206 	}
2207 
2208 	for (i = 0; i < sc->nrxchains; i++) {
2209 		cal = &sc->calib.iq[i];
2210 
2211 		if (cal->pwr_meas_q == 0)
2212 			continue;
2213 
2214 		if ((iq_corr_neg = cal->iq_corr_meas < 0))
2215 			cal->iq_corr_meas = -cal->iq_corr_meas;
2216 
2217 		i_coff_denom =
2218 		    (cal->pwr_meas_i / 2 + cal->pwr_meas_q / 2) / 128;
2219 		q_coff_denom = cal->pwr_meas_q / 64;
2220 
2221 		if (i_coff_denom == 0 || q_coff_denom == 0)
2222 			continue;	/* Prevents division by zero. */
2223 
2224 		i_coff = cal->iq_corr_meas / i_coff_denom;
2225 		q_coff = (cal->pwr_meas_i / q_coff_denom) - 64;
2226 
2227 		/* Negate i_coff if iq_corr_meas is positive. */
2228 		if (!iq_corr_neg)
2229 			i_coff = 0x40 - (i_coff & 0x3f);
2230 		if (q_coff > 15)
2231 			q_coff = 15;
2232 		else if (q_coff <= -16)
2233 			q_coff = -16;	/* XXX Linux has a bug here? */
2234 
2235 		DPRINTFN(2, ("IQ calibration for chain %d\n", i));
2236 		reg = AR_READ(sc, AR_PHY_TIMING_CTRL4(i));
2237 		reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_I_COFF, i_coff);
2238 		reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_Q_COFF, q_coff);
2239 		AR_WRITE(sc, AR_PHY_TIMING_CTRL4(i), reg);
2240 	}
2241 
2242 	/* Apply new settings. */
2243 	AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0,
2244 	    AR_PHY_TIMING_CTRL4_IQCORR_ENABLE);
2245 	AR_WRITE_BARRIER(sc);
2246 
2247 	/* IQ calibration done. */
2248 	sc->cur_calib_mask &= ~ATHN_CAL_IQ;
2249 	memset(&sc->calib, 0, sizeof(sc->calib));
2250 }
2251 
2252 void
2253 ar5008_calib_adc_gain(struct athn_softc *sc)
2254 {
2255 	struct athn_adc_cal *cal;
2256 	uint32_t reg, gain_mismatch_i, gain_mismatch_q;
2257 	int i;
2258 
2259 	for (i = 0; i < AR_MAX_CHAINS; i++) {
2260 		cal = &sc->calib.adc_gain[i];
2261 
2262 		/* Accumulate ADC gain measures (clear on read). */
2263 		cal->pwr_meas_odd_i  += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2264 		cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2265 		cal->pwr_meas_odd_q  += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2266 		cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2267 	}
2268 	if (!AR_SREV_9280_10_OR_LATER(sc) &&
2269 	    ++sc->calib.nsamples < AR_CAL_SAMPLES) {
2270 		/* Not enough samples accumulated, continue. */
2271 		ar5008_do_calib(sc);
2272 		return;
2273 	}
2274 
2275 	for (i = 0; i < sc->nrxchains; i++) {
2276 		cal = &sc->calib.adc_gain[i];
2277 
2278 		if (cal->pwr_meas_odd_i == 0 || cal->pwr_meas_even_q == 0)
2279 			continue;	/* Prevents division by zero. */
2280 
2281 		gain_mismatch_i =
2282 		    (cal->pwr_meas_even_i * 32) / cal->pwr_meas_odd_i;
2283 		gain_mismatch_q =
2284 		    (cal->pwr_meas_odd_q * 32) / cal->pwr_meas_even_q;
2285 
2286 		DPRINTFN(2, ("ADC gain calibration for chain %d\n", i));
2287 		reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2288 		reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IGAIN, gain_mismatch_i);
2289 		reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QGAIN, gain_mismatch_q);
2290 		AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2291 	}
2292 
2293 	/* Apply new settings. */
2294 	AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2295 	    AR_PHY_NEW_ADC_GAIN_CORR_ENABLE);
2296 	AR_WRITE_BARRIER(sc);
2297 
2298 	/* ADC gain calibration done. */
2299 	sc->cur_calib_mask &= ~ATHN_CAL_ADC_GAIN;
2300 	memset(&sc->calib, 0, sizeof(sc->calib));
2301 }
2302 
2303 void
2304 ar5008_calib_adc_dc_off(struct athn_softc *sc)
2305 {
2306 	struct athn_adc_cal *cal;
2307 	int32_t dc_offset_mismatch_i, dc_offset_mismatch_q;
2308 	uint32_t reg;
2309 	int count, i;
2310 
2311 	for (i = 0; i < AR_MAX_CHAINS; i++) {
2312 		cal = &sc->calib.adc_dc_offset[i];
2313 
2314 		/* Accumulate ADC DC offset measures (clear on read). */
2315 		cal->pwr_meas_odd_i  += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2316 		cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2317 		cal->pwr_meas_odd_q  += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2318 		cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2319 	}
2320 	if (!AR_SREV_9280_10_OR_LATER(sc) &&
2321 	    ++sc->calib.nsamples < AR_CAL_SAMPLES) {
2322 		/* Not enough samples accumulated, continue. */
2323 		ar5008_do_calib(sc);
2324 		return;
2325 	}
2326 
2327 	if (AR_SREV_9280_10_OR_LATER(sc))
2328 		count = (1 << (10 + 5));
2329 	else
2330 		count = (1 << ( 2 + 5)) * AR_CAL_SAMPLES;
2331 	for (i = 0; i < sc->nrxchains; i++) {
2332 		cal = &sc->calib.adc_dc_offset[i];
2333 
2334 		dc_offset_mismatch_i =
2335 		    (cal->pwr_meas_even_i - cal->pwr_meas_odd_i * 2) / count;
2336 		dc_offset_mismatch_q =
2337 		    (cal->pwr_meas_odd_q - cal->pwr_meas_even_q * 2) / count;
2338 
2339 		DPRINTFN(2, ("ADC DC offset calibration for chain %d\n", i));
2340 		reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2341 		reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QDC,
2342 		    dc_offset_mismatch_q);
2343 		reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IDC,
2344 		    dc_offset_mismatch_i);
2345 		AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2346 	}
2347 
2348 	/* Apply new settings. */
2349 	AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2350 	    AR_PHY_NEW_ADC_DC_OFFSET_CORR_ENABLE);
2351 	AR_WRITE_BARRIER(sc);
2352 
2353 	/* ADC DC offset calibration done. */
2354 	sc->cur_calib_mask &= ~ATHN_CAL_ADC_DC;
2355 	memset(&sc->calib, 0, sizeof(sc->calib));
2356 }
2357 
2358 void
2359 ar5008_write_txpower(struct athn_softc *sc, int16_t power[ATHN_POWER_COUNT])
2360 {
2361 	AR_WRITE(sc, AR_PHY_POWER_TX_RATE1,
2362 	    (power[ATHN_POWER_OFDM18  ] & 0x3f) << 24 |
2363 	    (power[ATHN_POWER_OFDM12  ] & 0x3f) << 16 |
2364 	    (power[ATHN_POWER_OFDM9   ] & 0x3f) <<  8 |
2365 	    (power[ATHN_POWER_OFDM6   ] & 0x3f));
2366 	AR_WRITE(sc, AR_PHY_POWER_TX_RATE2,
2367 	    (power[ATHN_POWER_OFDM54  ] & 0x3f) << 24 |
2368 	    (power[ATHN_POWER_OFDM48  ] & 0x3f) << 16 |
2369 	    (power[ATHN_POWER_OFDM36  ] & 0x3f) <<  8 |
2370 	    (power[ATHN_POWER_OFDM24  ] & 0x3f));
2371 	AR_WRITE(sc, AR_PHY_POWER_TX_RATE3,
2372 	    (power[ATHN_POWER_CCK2_SP ] & 0x3f) << 24 |
2373 	    (power[ATHN_POWER_CCK2_LP ] & 0x3f) << 16 |
2374 	    (power[ATHN_POWER_XR      ] & 0x3f) <<  8 |
2375 	    (power[ATHN_POWER_CCK1_LP ] & 0x3f));
2376 	AR_WRITE(sc, AR_PHY_POWER_TX_RATE4,
2377 	    (power[ATHN_POWER_CCK11_SP] & 0x3f) << 24 |
2378 	    (power[ATHN_POWER_CCK11_LP] & 0x3f) << 16 |
2379 	    (power[ATHN_POWER_CCK55_SP] & 0x3f) <<  8 |
2380 	    (power[ATHN_POWER_CCK55_LP] & 0x3f));
2381 	AR_WRITE(sc, AR_PHY_POWER_TX_RATE5,
2382 	    (power[ATHN_POWER_HT20(3) ] & 0x3f) << 24 |
2383 	    (power[ATHN_POWER_HT20(2) ] & 0x3f) << 16 |
2384 	    (power[ATHN_POWER_HT20(1) ] & 0x3f) <<  8 |
2385 	    (power[ATHN_POWER_HT20(0) ] & 0x3f));
2386 	AR_WRITE(sc, AR_PHY_POWER_TX_RATE6,
2387 	    (power[ATHN_POWER_HT20(7) ] & 0x3f) << 24 |
2388 	    (power[ATHN_POWER_HT20(6) ] & 0x3f) << 16 |
2389 	    (power[ATHN_POWER_HT20(5) ] & 0x3f) <<  8 |
2390 	    (power[ATHN_POWER_HT20(4) ] & 0x3f));
2391 	AR_WRITE(sc, AR_PHY_POWER_TX_RATE7,
2392 	    (power[ATHN_POWER_HT40(3) ] & 0x3f) << 24 |
2393 	    (power[ATHN_POWER_HT40(2) ] & 0x3f) << 16 |
2394 	    (power[ATHN_POWER_HT40(1) ] & 0x3f) <<  8 |
2395 	    (power[ATHN_POWER_HT40(0) ] & 0x3f));
2396 	AR_WRITE(sc, AR_PHY_POWER_TX_RATE8,
2397 	    (power[ATHN_POWER_HT40(7) ] & 0x3f) << 24 |
2398 	    (power[ATHN_POWER_HT40(6) ] & 0x3f) << 16 |
2399 	    (power[ATHN_POWER_HT40(5) ] & 0x3f) <<  8 |
2400 	    (power[ATHN_POWER_HT40(4) ] & 0x3f));
2401 	AR_WRITE(sc, AR_PHY_POWER_TX_RATE9,
2402 	    (power[ATHN_POWER_OFDM_EXT] & 0x3f) << 24 |
2403 	    (power[ATHN_POWER_CCK_EXT ] & 0x3f) << 16 |
2404 	    (power[ATHN_POWER_OFDM_DUP] & 0x3f) <<  8 |
2405 	    (power[ATHN_POWER_CCK_DUP ] & 0x3f));
2406 	AR_WRITE_BARRIER(sc);
2407 }
2408 
2409 void
2410 ar5008_set_viterbi_mask(struct athn_softc *sc, int bin)
2411 {
2412 	uint32_t mask[4], reg;
2413 	uint8_t m[62], p[62];	/* XXX use bit arrays? */
2414 	int i, bit, cur;
2415 
2416 	/* Compute pilot mask. */
2417 	cur = -6000;
2418 	for (i = 0; i < 4; i++) {
2419 		mask[i] = 0;
2420 		for (bit = 0; bit < 30; bit++) {
2421 			if (abs(cur - bin) < 100)
2422 				mask[i] |= 1 << bit;
2423 			cur += 100;
2424 		}
2425 		if (cur == 0)	/* Skip entry "0". */
2426 			cur = 100;
2427 	}
2428 	/* Write entries from -6000 to -3100. */
2429 	AR_WRITE(sc, AR_PHY_TIMING7, mask[0]);
2430 	AR_WRITE(sc, AR_PHY_TIMING9, mask[0]);
2431 	/* Write entries from -3000 to -100. */
2432 	AR_WRITE(sc, AR_PHY_TIMING8, mask[1]);
2433 	AR_WRITE(sc, AR_PHY_TIMING10, mask[1]);
2434 	/* Write entries from 100 to 3000. */
2435 	AR_WRITE(sc, AR_PHY_PILOT_MASK_01_30, mask[2]);
2436 	AR_WRITE(sc, AR_PHY_CHANNEL_MASK_01_30, mask[2]);
2437 	/* Write entries from 3100 to 6000. */
2438 	AR_WRITE(sc, AR_PHY_PILOT_MASK_31_60, mask[3]);
2439 	AR_WRITE(sc, AR_PHY_CHANNEL_MASK_31_60, mask[3]);
2440 
2441 	/* Compute viterbi mask. */
2442 	for (cur = 6100; cur >= 0; cur -= 100)
2443 		p[+cur / 100] = abs(cur - bin) < 75;
2444 	for (cur = -100; cur >= -6100; cur -= 100)
2445 		m[-cur / 100] = abs(cur - bin) < 75;
2446 
2447 	/* Write viterbi mask (XXX needs to be reworked). */
2448 	reg =
2449 	    m[46] << 30 | m[47] << 28 | m[48] << 26 | m[49] << 24 |
2450 	    m[50] << 22 | m[51] << 20 | m[52] << 18 | m[53] << 16 |
2451 	    m[54] << 14 | m[55] << 12 | m[56] << 10 | m[57] <<  8 |
2452 	    m[58] <<  6 | m[59] <<  4 | m[60] <<  2 | m[61] <<  0;
2453 	AR_WRITE(sc, AR_PHY_BIN_MASK_1, reg);
2454 	AR_WRITE(sc, AR_PHY_VIT_MASK2_M_46_61, reg);
2455 
2456 	/* XXX m[48] should be m[38] ? */
2457 	reg =             m[31] << 28 | m[32] << 26 | m[33] << 24 |
2458 	    m[34] << 22 | m[35] << 20 | m[36] << 18 | m[37] << 16 |
2459 	    m[48] << 14 | m[39] << 12 | m[40] << 10 | m[41] <<  8 |
2460 	    m[42] <<  6 | m[43] <<  4 | m[44] <<  2 | m[45] <<  0;
2461 	AR_WRITE(sc, AR_PHY_BIN_MASK_2, reg);
2462 	AR_WRITE(sc, AR_PHY_VIT_MASK2_M_31_45, reg);
2463 
2464 	/* XXX This one is weird too. */
2465 	reg =
2466 	    m[16] << 30 | m[16] << 28 | m[18] << 26 | m[18] << 24 |
2467 	    m[20] << 22 | m[20] << 20 | m[22] << 18 | m[22] << 16 |
2468 	    m[24] << 14 | m[24] << 12 | m[25] << 10 | m[26] <<  8 |
2469 	    m[27] <<  6 | m[28] <<  4 | m[29] <<  2 | m[30] <<  0;
2470 	AR_WRITE(sc, AR_PHY_BIN_MASK_3, reg);
2471 	AR_WRITE(sc, AR_PHY_VIT_MASK2_M_16_30, reg);
2472 
2473 	reg =
2474 	    m[ 0] << 30 | m[ 1] << 28 | m[ 2] << 26 | m[ 3] << 24 |
2475 	    m[ 4] << 22 | m[ 5] << 20 | m[ 6] << 18 | m[ 7] << 16 |
2476 	    m[ 8] << 14 | m[ 9] << 12 | m[10] << 10 | m[11] <<  8 |
2477 	    m[12] <<  6 | m[13] <<  4 | m[14] <<  2 | m[15] <<  0;
2478 	AR_WRITE(sc, AR_PHY_MASK_CTL, reg);
2479 	AR_WRITE(sc, AR_PHY_VIT_MASK2_M_00_15, reg);
2480 
2481 	reg =             p[15] << 28 | p[14] << 26 | p[13] << 24 |
2482 	    p[12] << 22 | p[11] << 20 | p[10] << 18 | p[ 9] << 16 |
2483 	    p[ 8] << 14 | p[ 7] << 12 | p[ 6] << 10 | p[ 5] <<  8 |
2484 	    p[ 4] <<  6 | p[ 3] <<  4 | p[ 2] <<  2 | p[ 1] <<  0;
2485 	AR_WRITE(sc, AR_PHY_BIN_MASK2_1, reg);
2486 	AR_WRITE(sc, AR_PHY_VIT_MASK2_P_15_01, reg);
2487 
2488 	reg =             p[30] << 28 | p[29] << 26 | p[28] << 24 |
2489 	    p[27] << 22 | p[26] << 20 | p[25] << 18 | p[24] << 16 |
2490 	    p[23] << 14 | p[22] << 12 | p[21] << 10 | p[20] <<  8 |
2491 	    p[19] <<  6 | p[18] <<  4 | p[17] <<  2 | p[16] <<  0;
2492 	AR_WRITE(sc, AR_PHY_BIN_MASK2_2, reg);
2493 	AR_WRITE(sc, AR_PHY_VIT_MASK2_P_30_16, reg);
2494 
2495 	reg =             p[45] << 28 | p[44] << 26 | p[43] << 24 |
2496 	    p[42] << 22 | p[41] << 20 | p[40] << 18 | p[39] << 16 |
2497 	    p[38] << 14 | p[37] << 12 | p[36] << 10 | p[35] <<  8 |
2498 	    p[34] <<  6 | p[33] <<  4 | p[32] <<  2 | p[31] <<  0;
2499 	AR_WRITE(sc, AR_PHY_BIN_MASK2_3, reg);
2500 	AR_WRITE(sc, AR_PHY_VIT_MASK2_P_45_31, reg);
2501 
2502 	reg =
2503 	    p[61] << 30 | p[60] << 28 | p[59] << 26 | p[58] << 24 |
2504 	    p[57] << 22 | p[56] << 20 | p[55] << 18 | p[54] << 16 |
2505 	    p[53] << 14 | p[52] << 12 | p[51] << 10 | p[50] <<  8 |
2506 	    p[49] <<  6 | p[48] <<  4 | p[47] <<  2 | p[46] <<  0;
2507 	AR_WRITE(sc, AR_PHY_BIN_MASK2_4, reg);
2508 	AR_WRITE(sc, AR_PHY_VIT_MASK2_P_61_46, reg);
2509 	AR_WRITE_BARRIER(sc);
2510 }
2511 
2512 void
2513 ar5008_hw_init(struct athn_softc *sc, struct ieee80211_channel *c,
2514     struct ieee80211_channel *extc)
2515 {
2516 	struct athn_ops *ops = &sc->ops;
2517 	const struct athn_ini *ini = sc->ini;
2518 	const uint32_t *pvals;
2519 	uint32_t reg;
2520 	int i;
2521 
2522 	AR_WRITE(sc, AR_PHY(0), 0x00000007);
2523 	AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
2524 
2525 	if (!AR_SINGLE_CHIP(sc))
2526 		ar5416_reset_addac(sc, c);
2527 
2528 	AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
2529 
2530 	/* First initialization step (depends on channel band/bandwidth). */
2531 	if (extc != NULL) {
2532 		if (IEEE80211_IS_CHAN_2GHZ(c))
2533 			pvals = ini->vals_2g40;
2534 		else
2535 			pvals = ini->vals_5g40;
2536 	} else {
2537 		if (IEEE80211_IS_CHAN_2GHZ(c))
2538 			pvals = ini->vals_2g20;
2539 		else
2540 			pvals = ini->vals_5g20;
2541 	}
2542 	DPRINTFN(4, ("writing modal init vals\n"));
2543 	for (i = 0; i < ini->nregs; i++) {
2544 		uint32_t val = pvals[i];
2545 
2546 		/* Fix AR_AN_TOP2 initialization value if required. */
2547 		if (ini->regs[i] == AR_AN_TOP2 &&
2548 		    (sc->flags & ATHN_FLAG_AN_TOP2_FIXUP))
2549 			val &= ~AR_AN_TOP2_PWDCLKIND;
2550 		AR_WRITE(sc, ini->regs[i], val);
2551 		if (AR_IS_ANALOG_REG(ini->regs[i])) {
2552 			AR_WRITE_BARRIER(sc);
2553 			DELAY(100);
2554 		}
2555 		if ((i & 0x1f) == 0)
2556 			DELAY(1);
2557 	}
2558 	AR_WRITE_BARRIER(sc);
2559 
2560 	if (sc->rx_gain != NULL)
2561 		ar9280_reset_rx_gain(sc, c);
2562 	if (sc->tx_gain != NULL)
2563 		ar9280_reset_tx_gain(sc, c);
2564 
2565 	if (AR_SREV_9271_10(sc)) {
2566 		AR_WRITE(sc, AR_PHY(68), 0x30002311);
2567 		AR_WRITE(sc, AR_PHY_RF_CTL3, 0x0a020001);
2568 	}
2569 	AR_WRITE_BARRIER(sc);
2570 
2571 	/* Second initialization step (common to all channels). */
2572 	DPRINTFN(4, ("writing common init vals\n"));
2573 	for (i = 0; i < ini->ncmregs; i++) {
2574 		AR_WRITE(sc, ini->cmregs[i], ini->cmvals[i]);
2575 		if (AR_IS_ANALOG_REG(ini->cmregs[i])) {
2576 			AR_WRITE_BARRIER(sc);
2577 			DELAY(100);
2578 		}
2579 		if ((i & 0x1f) == 0)
2580 			DELAY(1);
2581 	}
2582 	AR_WRITE_BARRIER(sc);
2583 
2584 	if (!AR_SINGLE_CHIP(sc))
2585 		ar5416_reset_bb_gain(sc, c);
2586 
2587 	if (IEEE80211_IS_CHAN_5GHZ(c) &&
2588 	    (sc->flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
2589 		/* Update modal values for fast PLL clock. */
2590 		if (extc != NULL)
2591 			pvals = ini->fastvals_5g40;
2592 		else
2593 			pvals = ini->fastvals_5g20;
2594 		DPRINTFN(4, ("writing fast pll clock init vals\n"));
2595 		for (i = 0; i < ini->nfastregs; i++) {
2596 			AR_WRITE(sc, ini->fastregs[i], pvals[i]);
2597 			if (AR_IS_ANALOG_REG(ini->fastregs[i])) {
2598 				AR_WRITE_BARRIER(sc);
2599 				DELAY(100);
2600 			}
2601 			if ((i & 0x1f) == 0)
2602 				DELAY(1);
2603 		}
2604 	}
2605 
2606 	/*
2607 	 * Set the RX_ABORT and RX_DIS bits to prevent frames with corrupted
2608 	 * descriptor status.
2609 	 */
2610 	AR_SETBITS(sc, AR_DIAG_SW, AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT);
2611 
2612 	/* Hardware workarounds for occasional Rx data corruption. */
2613 	if (AR_SREV_9280_10_OR_LATER(sc)) {
2614 		reg = AR_READ(sc, AR_PCU_MISC_MODE2);
2615 		if (!AR_SREV_9271(sc))
2616 			reg &= ~AR_PCU_MISC_MODE2_HWWAR1;
2617 		if (AR_SREV_9287_10_OR_LATER(sc))
2618 			reg &= ~AR_PCU_MISC_MODE2_HWWAR2;
2619 		AR_WRITE(sc, AR_PCU_MISC_MODE2, reg);
2620 
2621 	} else if (AR_SREV_5416_20_OR_LATER(sc)) {
2622 		/* Disable baseband clock gating. */
2623 		AR_WRITE(sc, AR_PHY(651), 0x11);
2624 
2625 		if (AR_SREV_9160(sc)) {
2626 			/* Disable RIFS search to fix baseband hang. */
2627 			AR_CLRBITS(sc, AR_PHY_HEAVY_CLIP_FACTOR_RIFS,
2628 			    AR_PHY_RIFS_INIT_DELAY_M);
2629 		}
2630 	}
2631 	AR_WRITE_BARRIER(sc);
2632 
2633 	ar5008_set_phy(sc, c, extc);
2634 	ar5008_init_chains(sc);
2635 
2636 	if (sc->flags & ATHN_FLAG_OLPC) {
2637 		extern int ticks;
2638 		sc->olpc_ticks = ticks;
2639 		ops->olpc_init(sc);
2640 	}
2641 
2642 	ops->set_txpower(sc, c, extc);
2643 
2644 	if (!AR_SINGLE_CHIP(sc))
2645 		ar5416_rf_reset(sc, c);
2646 }
2647 
2648 uint8_t
2649 ar5008_get_vpd(uint8_t pwr, const uint8_t *pwrPdg, const uint8_t *vpdPdg,
2650     int nicepts)
2651 {
2652 	uint8_t vpd;
2653 	int i, lo, hi;
2654 
2655 	for (i = 0; i < nicepts; i++)
2656 		if (pwrPdg[i] > pwr)
2657 			break;
2658 	hi = i;
2659 	lo = hi - 1;
2660 	if (lo == -1)
2661 		lo = hi;
2662 	else if (hi == nicepts)
2663 		hi = lo;
2664 
2665 	vpd = athn_interpolate(pwr, pwrPdg[lo], vpdPdg[lo],
2666 	    pwrPdg[hi], vpdPdg[hi]);
2667 	return (vpd);
2668 }
2669 
2670 void
2671 ar5008_get_pdadcs(struct athn_softc *sc, uint8_t fbin,
2672     struct athn_pier *lopier, struct athn_pier *hipier, int nxpdgains,
2673     int nicepts, uint8_t overlap, uint8_t *boundaries, uint8_t *pdadcs)
2674 {
2675 #define DB(x)	((x) / 2)	/* Convert half dB to dB. */
2676 	uint8_t minpwr[AR_PD_GAINS_IN_MASK], maxpwr[AR_PD_GAINS_IN_MASK];
2677 	uint8_t vpd[AR_MAX_PWR_RANGE_IN_HALF_DB], pwr;
2678 	uint8_t lovpd, hivpd, boundary;
2679 	int16_t ss, delta, vpdstep, val;
2680 	int i, j, npdadcs, nvpds, maxidx, tgtidx;
2681 
2682 	/* Compute min and max power in half dB for each pdGain. */
2683 	for (i = 0; i < nxpdgains; i++) {
2684 		minpwr[i] = MAX(lopier->pwr[i][0], hipier->pwr[i][0]);
2685 		maxpwr[i] = MIN(lopier->pwr[i][nicepts - 1],
2686 		    hipier->pwr[i][nicepts - 1]);
2687 	}
2688 
2689 	/* Fill phase domain analog-to-digital converter (PDADC) table. */
2690 	npdadcs = 0;
2691 	for (i = 0; i < nxpdgains; i++) {
2692 		if (i != nxpdgains - 1)
2693 			boundaries[i] = DB(maxpwr[i] + minpwr[i + 1]) / 2;
2694 		else
2695 			boundaries[i] = DB(maxpwr[i]);
2696 		if (boundaries[i] > AR_MAX_RATE_POWER)
2697 			boundaries[i] = AR_MAX_RATE_POWER;
2698 
2699 		if (i == 0 && !AR_SREV_5416_20_OR_LATER(sc)) {
2700 			/* Fix the gain delta (AR5416 1.0 only). */
2701 			delta = boundaries[0] - 23;
2702 			boundaries[0] = 23;
2703 		} else
2704 			delta = 0;
2705 
2706 		/* Find starting index for this pdGain. */
2707 		if (i != 0) {
2708 			ss = boundaries[i - 1] - DB(minpwr[i]) -
2709 			    overlap + 1 + delta;
2710 		} else if (AR_SREV_9280_10_OR_LATER(sc)) {
2711 			ss = -DB(minpwr[i]);
2712 		} else
2713 			ss = 0;
2714 
2715 		/* Compute Vpd table for this pdGain. */
2716 		nvpds = DB(maxpwr[i] - minpwr[i]) + 1;
2717 		memset(vpd, 0, sizeof(vpd));
2718 		pwr = minpwr[i];
2719 		for (j = 0; j < nvpds; j++) {
2720 			/* Get lower and higher Vpd. */
2721 			lovpd = ar5008_get_vpd(pwr, lopier->pwr[i],
2722 			    lopier->vpd[i], nicepts);
2723 			hivpd = ar5008_get_vpd(pwr, hipier->pwr[i],
2724 			    hipier->vpd[i], nicepts);
2725 
2726 			/* Interpolate the final Vpd. */
2727 			vpd[j] = athn_interpolate(fbin,
2728 			    lopier->fbin, lovpd, hipier->fbin, hivpd);
2729 
2730 			pwr += 2;	/* In half dB. */
2731 		}
2732 
2733 		/* Extrapolate data for ss < 0. */
2734 		if (vpd[1] > vpd[0])
2735 			vpdstep = vpd[1] - vpd[0];
2736 		else
2737 			vpdstep = 1;
2738 		while (ss < 0 && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2739 			val = vpd[0] + ss * vpdstep;
2740 			pdadcs[npdadcs++] = MAX(val, 0);
2741 			ss++;
2742 		}
2743 
2744 		tgtidx = boundaries[i] + overlap - DB(minpwr[i]);
2745 		maxidx = MIN(tgtidx, nvpds);
2746 		while (ss < maxidx && npdadcs < AR_NUM_PDADC_VALUES - 1)
2747 			pdadcs[npdadcs++] = vpd[ss++];
2748 
2749 		if (tgtidx < maxidx)
2750 			continue;
2751 
2752 		/* Extrapolate data for maxidx <= ss <= tgtidx. */
2753 		if (vpd[nvpds - 1] > vpd[nvpds - 2])
2754 			vpdstep = vpd[nvpds - 1] - vpd[nvpds - 2];
2755 		else
2756 			vpdstep = 1;
2757 		while (ss <= tgtidx && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2758 			val = vpd[nvpds - 1] + (ss - maxidx + 1) * vpdstep;
2759 			pdadcs[npdadcs++] = MIN(val, 255);
2760 			ss++;
2761 		}
2762 	}
2763 
2764 	/* Fill remaining PDADC and boundaries entries. */
2765 	if (AR_SREV_9285(sc))
2766 		boundary = AR9285_PD_GAIN_BOUNDARY_DEFAULT;
2767 	else	/* Fill with latest. */
2768 		boundary = boundaries[nxpdgains - 1];
2769 
2770 	for (; nxpdgains < AR_PD_GAINS_IN_MASK; nxpdgains++)
2771 		boundaries[nxpdgains] = boundary;
2772 
2773 	for (; npdadcs < AR_NUM_PDADC_VALUES; npdadcs++)
2774 		pdadcs[npdadcs] = pdadcs[npdadcs - 1];
2775 #undef DB
2776 }
2777 
2778 void
2779 ar5008_get_lg_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2780     uint8_t ctl, const struct ar_cal_target_power_leg *tgt, int nchans,
2781     uint8_t tpow[4])
2782 {
2783 	uint8_t fbin;
2784 	int i, lo, hi;
2785 
2786 	/* Find interval (lower and upper indices). */
2787 	fbin = athn_chan2fbin(c);
2788 	for (i = 0; i < nchans; i++) {
2789 		if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2790 		    tgt[i].bChannel > fbin)
2791 			break;
2792 	}
2793 	hi = i;
2794 	lo = hi - 1;
2795 	if (lo == -1)
2796 		lo = hi;
2797 	else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2798 		hi = lo;
2799 
2800 	/* Interpolate values. */
2801 	for (i = 0; i < 4; i++) {
2802 		tpow[i] = athn_interpolate(fbin,
2803 		    tgt[lo].bChannel, tgt[lo].tPow2x[i],
2804 		    tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2805 	}
2806 	/* XXX Apply conformance testing limit. */
2807 }
2808 
2809 void
2810 ar5008_get_ht_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2811     uint8_t ctl, const struct ar_cal_target_power_ht *tgt, int nchans,
2812     uint8_t tpow[8])
2813 {
2814 	uint8_t fbin;
2815 	int i, lo, hi;
2816 
2817 	/* Find interval (lower and upper indices). */
2818 	fbin = athn_chan2fbin(c);
2819 	for (i = 0; i < nchans; i++) {
2820 		if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2821 		    tgt[i].bChannel > fbin)
2822 			break;
2823 	}
2824 	hi = i;
2825 	lo = hi - 1;
2826 	if (lo == -1)
2827 		lo = hi;
2828 	else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2829 		hi = lo;
2830 
2831 	/* Interpolate values. */
2832 	for (i = 0; i < 8; i++) {
2833 		tpow[i] = athn_interpolate(fbin,
2834 		    tgt[lo].bChannel, tgt[lo].tPow2x[i],
2835 		    tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2836 	}
2837 	/* XXX Apply conformance testing limit. */
2838 }
2839 
2840 /*
2841  * Adaptive noise immunity.
2842  */
2843 void
2844 ar5008_set_noise_immunity_level(struct athn_softc *sc, int level)
2845 {
2846 	int high = level == 4;
2847 	uint32_t reg;
2848 
2849 	reg = AR_READ(sc, AR_PHY_DESIRED_SZ);
2850 	reg = RW(reg, AR_PHY_DESIRED_SZ_TOT_DES, high ? -62 : -55);
2851 	AR_WRITE(sc, AR_PHY_DESIRED_SZ, reg);
2852 
2853 	reg = AR_READ(sc, AR_PHY_AGC_CTL1);
2854 	reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_LOW, high ? -70 : -64);
2855 	reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_HIGH, high ? -12 : -14);
2856 	AR_WRITE(sc, AR_PHY_AGC_CTL1, reg);
2857 
2858 	reg = AR_READ(sc, AR_PHY_FIND_SIG);
2859 	reg = RW(reg, AR_PHY_FIND_SIG_FIRPWR, high ? -80 : -78);
2860 	AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2861 
2862 	AR_WRITE_BARRIER(sc);
2863 }
2864 
2865 void
2866 ar5008_enable_ofdm_weak_signal(struct athn_softc *sc)
2867 {
2868 	uint32_t reg;
2869 
2870 	reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2871 	reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 50);
2872 	reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 40);
2873 	reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 48);
2874 	AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2875 
2876 	reg = AR_READ(sc, AR_PHY_SFCORR);
2877 	reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 77);
2878 	reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 64);
2879 	reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 16);
2880 	AR_WRITE(sc, AR_PHY_SFCORR, reg);
2881 
2882 	reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2883 	reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 50);
2884 	reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 40);
2885 	reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 77);
2886 	reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 64);
2887 	AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2888 
2889 	AR_SETBITS(sc, AR_PHY_SFCORR_LOW,
2890 	    AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2891 	AR_WRITE_BARRIER(sc);
2892 }
2893 
2894 void
2895 ar5008_disable_ofdm_weak_signal(struct athn_softc *sc)
2896 {
2897 	uint32_t reg;
2898 
2899 	reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2900 	reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 127);
2901 	reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 127);
2902 	reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 63);
2903 	AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2904 
2905 	reg = AR_READ(sc, AR_PHY_SFCORR);
2906 	reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 127);
2907 	reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 127);
2908 	reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 31);
2909 	AR_WRITE(sc, AR_PHY_SFCORR, reg);
2910 
2911 	reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2912 	reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 127);
2913 	reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 127);
2914 	reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 127);
2915 	reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 127);
2916 	AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2917 
2918 	AR_CLRBITS(sc, AR_PHY_SFCORR_LOW,
2919 	    AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2920 	AR_WRITE_BARRIER(sc);
2921 }
2922 
2923 void
2924 ar5008_set_cck_weak_signal(struct athn_softc *sc, int high)
2925 {
2926 	uint32_t reg;
2927 
2928 	reg = AR_READ(sc, AR_PHY_CCK_DETECT);
2929 	reg = RW(reg, AR_PHY_CCK_DETECT_WEAK_SIG_THR_CCK, high ? 6 : 8);
2930 	AR_WRITE(sc, AR_PHY_CCK_DETECT, reg);
2931 	AR_WRITE_BARRIER(sc);
2932 }
2933 
2934 void
2935 ar5008_set_firstep_level(struct athn_softc *sc, int level)
2936 {
2937 	uint32_t reg;
2938 
2939 	reg = AR_READ(sc, AR_PHY_FIND_SIG);
2940 	reg = RW(reg, AR_PHY_FIND_SIG_FIRSTEP, level * 4);
2941 	AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2942 	AR_WRITE_BARRIER(sc);
2943 }
2944 
2945 void
2946 ar5008_set_spur_immunity_level(struct athn_softc *sc, int level)
2947 {
2948 	uint32_t reg;
2949 
2950 	reg = AR_READ(sc, AR_PHY_TIMING5);
2951 	reg = RW(reg, AR_PHY_TIMING5_CYCPWR_THR1, (level + 1) * 2);
2952 	AR_WRITE(sc, AR_PHY_TIMING5, reg);
2953 	AR_WRITE_BARRIER(sc);
2954 }
2955