2 * Copyright (c) 2008-2009 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
18 #include <asm/unaligned.h>
19 #include <linux/pci.h>
24 static int btcoex_enable;
25 module_param(btcoex_enable, bool, 0);
26 MODULE_PARM_DESC(btcoex_enable, "Enable Bluetooth coexistence support");
28 #define ATH9K_CLOCK_RATE_CCK 22
29 #define ATH9K_CLOCK_RATE_5GHZ_OFDM 40
30 #define ATH9K_CLOCK_RATE_2GHZ_OFDM 44
32 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
33 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
34 enum ath9k_ht_macmode macmode);
35 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
36 struct ar5416_eeprom_def *pEepData,
38 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
39 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
41 /********************/
42 /* Helper Functions */
43 /********************/
45 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
47 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
49 if (!ah->curchan) /* should really check for CCK instead */
50 return clks / ATH9K_CLOCK_RATE_CCK;
51 if (conf->channel->band == IEEE80211_BAND_2GHZ)
52 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
54 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
57 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
59 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
61 if (conf_is_ht40(conf))
62 return ath9k_hw_mac_usec(ah, clks) / 2;
64 return ath9k_hw_mac_usec(ah, clks);
67 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
69 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
71 if (!ah->curchan) /* should really check for CCK instead */
72 return usecs *ATH9K_CLOCK_RATE_CCK;
73 if (conf->channel->band == IEEE80211_BAND_2GHZ)
74 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
75 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
78 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
80 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
82 if (conf_is_ht40(conf))
83 return ath9k_hw_mac_clks(ah, usecs) * 2;
85 return ath9k_hw_mac_clks(ah, usecs);
89 * Read and write, they both share the same lock. We do this to serialize
90 * reads and writes on Atheros 802.11n PCI devices only. This is required
91 * as the FIFO on these devices can only accept sanely 2 requests. After
92 * that the device goes bananas. Serializing the reads/writes prevents this
96 void ath9k_iowrite32(struct ath_hw *ah, u32 reg_offset, u32 val)
98 if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
100 spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
101 iowrite32(val, ah->ah_sc->mem + reg_offset);
102 spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
104 iowrite32(val, ah->ah_sc->mem + reg_offset);
107 unsigned int ath9k_ioread32(struct ath_hw *ah, u32 reg_offset)
110 if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
112 spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
113 val = ioread32(ah->ah_sc->mem + reg_offset);
114 spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
116 val = ioread32(ah->ah_sc->mem + reg_offset);
120 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
124 BUG_ON(timeout < AH_TIME_QUANTUM);
126 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
127 if ((REG_READ(ah, reg) & mask) == val)
130 udelay(AH_TIME_QUANTUM);
133 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
134 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
135 timeout, reg, REG_READ(ah, reg), mask, val);
140 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
145 for (i = 0, retval = 0; i < n; i++) {
146 retval = (retval << 1) | (val & 1);
152 bool ath9k_get_channel_edges(struct ath_hw *ah,
156 struct ath9k_hw_capabilities *pCap = &ah->caps;
158 if (flags & CHANNEL_5GHZ) {
159 *low = pCap->low_5ghz_chan;
160 *high = pCap->high_5ghz_chan;
163 if ((flags & CHANNEL_2GHZ)) {
164 *low = pCap->low_2ghz_chan;
165 *high = pCap->high_2ghz_chan;
171 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
172 const struct ath_rate_table *rates,
173 u32 frameLen, u16 rateix,
176 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
179 kbps = rates->info[rateix].ratekbps;
184 switch (rates->info[rateix].phy) {
185 case WLAN_RC_PHY_CCK:
186 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
187 if (shortPreamble && rates->info[rateix].short_preamble)
189 numBits = frameLen << 3;
190 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
192 case WLAN_RC_PHY_OFDM:
193 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
194 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
195 numBits = OFDM_PLCP_BITS + (frameLen << 3);
196 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
197 txTime = OFDM_SIFS_TIME_QUARTER
198 + OFDM_PREAMBLE_TIME_QUARTER
199 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
200 } else if (ah->curchan &&
201 IS_CHAN_HALF_RATE(ah->curchan)) {
202 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
203 numBits = OFDM_PLCP_BITS + (frameLen << 3);
204 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
205 txTime = OFDM_SIFS_TIME_HALF +
206 OFDM_PREAMBLE_TIME_HALF
207 + (numSymbols * OFDM_SYMBOL_TIME_HALF);
209 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
210 numBits = OFDM_PLCP_BITS + (frameLen << 3);
211 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
212 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
213 + (numSymbols * OFDM_SYMBOL_TIME);
217 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
218 "Unknown phy %u (rate ix %u)\n",
219 rates->info[rateix].phy, rateix);
227 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
228 struct ath9k_channel *chan,
229 struct chan_centers *centers)
233 if (!IS_CHAN_HT40(chan)) {
234 centers->ctl_center = centers->ext_center =
235 centers->synth_center = chan->channel;
239 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
240 (chan->chanmode == CHANNEL_G_HT40PLUS)) {
241 centers->synth_center =
242 chan->channel + HT40_CHANNEL_CENTER_SHIFT;
245 centers->synth_center =
246 chan->channel - HT40_CHANNEL_CENTER_SHIFT;
250 centers->ctl_center =
251 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
252 centers->ext_center =
253 centers->synth_center + (extoff *
254 ((ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_20) ?
255 HT40_CHANNEL_CENTER_SHIFT : 15));
262 static void ath9k_hw_read_revisions(struct ath_hw *ah)
266 val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
269 val = REG_READ(ah, AR_SREV);
270 ah->hw_version.macVersion =
271 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
272 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
273 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
275 if (!AR_SREV_9100(ah))
276 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
278 ah->hw_version.macRev = val & AR_SREV_REVISION;
280 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
281 ah->is_pciexpress = true;
285 static int ath9k_hw_get_radiorev(struct ath_hw *ah)
290 REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
292 for (i = 0; i < 8; i++)
293 REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
294 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
295 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
297 return ath9k_hw_reverse_bits(val, 8);
300 /************************************/
301 /* HW Attach, Detach, Init Routines */
302 /************************************/
304 static void ath9k_hw_disablepcie(struct ath_hw *ah)
306 if (AR_SREV_9100(ah))
309 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
310 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
311 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
312 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
313 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
314 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
315 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
316 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
317 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
319 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
322 static bool ath9k_hw_chip_test(struct ath_hw *ah)
324 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
326 u32 patternData[4] = { 0x55555555,
332 for (i = 0; i < 2; i++) {
333 u32 addr = regAddr[i];
336 regHold[i] = REG_READ(ah, addr);
337 for (j = 0; j < 0x100; j++) {
338 wrData = (j << 16) | j;
339 REG_WRITE(ah, addr, wrData);
340 rdData = REG_READ(ah, addr);
341 if (rdData != wrData) {
342 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
343 "address test failed "
344 "addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
345 addr, wrData, rdData);
349 for (j = 0; j < 4; j++) {
350 wrData = patternData[j];
351 REG_WRITE(ah, addr, wrData);
352 rdData = REG_READ(ah, addr);
353 if (wrData != rdData) {
354 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
355 "address test failed "
356 "addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
357 addr, wrData, rdData);
361 REG_WRITE(ah, regAddr[i], regHold[i]);
368 static const char *ath9k_hw_devname(u16 devid)
371 case AR5416_DEVID_PCI:
372 return "Atheros 5416";
373 case AR5416_DEVID_PCIE:
374 return "Atheros 5418";
375 case AR9160_DEVID_PCI:
376 return "Atheros 9160";
377 case AR5416_AR9100_DEVID:
378 return "Atheros 9100";
379 case AR9280_DEVID_PCI:
380 case AR9280_DEVID_PCIE:
381 return "Atheros 9280";
382 case AR9285_DEVID_PCIE:
383 return "Atheros 9285";
384 case AR5416_DEVID_AR9287_PCI:
385 case AR5416_DEVID_AR9287_PCIE:
386 return "Atheros 9287";
392 static void ath9k_hw_init_config(struct ath_hw *ah)
396 ah->config.dma_beacon_response_time = 2;
397 ah->config.sw_beacon_response_time = 10;
398 ah->config.additional_swba_backoff = 0;
399 ah->config.ack_6mb = 0x0;
400 ah->config.cwm_ignore_extcca = 0;
401 ah->config.pcie_powersave_enable = 0;
402 ah->config.pcie_clock_req = 0;
403 ah->config.pcie_waen = 0;
404 ah->config.analog_shiftreg = 1;
405 ah->config.ht_enable = 1;
406 ah->config.ofdm_trig_low = 200;
407 ah->config.ofdm_trig_high = 500;
408 ah->config.cck_trig_high = 200;
409 ah->config.cck_trig_low = 100;
410 ah->config.enable_ani = 1;
411 ah->config.diversity_control = ATH9K_ANT_VARIABLE;
412 ah->config.antenna_switch_swap = 0;
414 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
415 ah->config.spurchans[i][0] = AR_NO_SPUR;
416 ah->config.spurchans[i][1] = AR_NO_SPUR;
419 ah->config.intr_mitigation = true;
422 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
423 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
424 * This means we use it for all AR5416 devices, and the few
425 * minor PCI AR9280 devices out there.
427 * Serialization is required because these devices do not handle
428 * well the case of two concurrent reads/writes due to the latency
429 * involved. During one read/write another read/write can be issued
430 * on another CPU while the previous read/write may still be working
431 * on our hardware, if we hit this case the hardware poops in a loop.
432 * We prevent this by serializing reads and writes.
434 * This issue is not present on PCI-Express devices or pre-AR5416
435 * devices (legacy, 802.11abg).
437 if (num_possible_cpus() > 1)
438 ah->config.serialize_regmode = SER_REG_MODE_AUTO;
441 static void ath9k_hw_init_defaults(struct ath_hw *ah)
443 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
445 regulatory->country_code = CTRY_DEFAULT;
446 regulatory->power_limit = MAX_RATE_POWER;
447 regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
449 ah->hw_version.magic = AR5416_MAGIC;
450 ah->hw_version.subvendorid = 0;
453 if (ah->hw_version.devid == AR5416_AR9100_DEVID)
454 ah->hw_version.macVersion = AR_SREV_VERSION_9100;
455 if (!AR_SREV_9100(ah))
456 ah->ah_flags = AH_USE_EEPROM;
459 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
460 ah->beacon_interval = 100;
461 ah->enable_32kHz_clock = DONT_USE_32KHZ;
462 ah->slottime = (u32) -1;
463 ah->acktimeout = (u32) -1;
464 ah->ctstimeout = (u32) -1;
465 ah->globaltxtimeout = (u32) -1;
467 ah->gbeacon_rate = 0;
469 ah->power_mode = ATH9K_PM_UNDEFINED;
472 static int ath9k_hw_rfattach(struct ath_hw *ah)
474 bool rfStatus = false;
477 rfStatus = ath9k_hw_init_rf(ah, &ecode);
479 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
480 "RF setup failed, status: %u\n", ecode);
487 static int ath9k_hw_rf_claim(struct ath_hw *ah)
491 REG_WRITE(ah, AR_PHY(0), 0x00000007);
493 val = ath9k_hw_get_radiorev(ah);
494 switch (val & AR_RADIO_SREV_MAJOR) {
496 val = AR_RAD5133_SREV_MAJOR;
498 case AR_RAD5133_SREV_MAJOR:
499 case AR_RAD5122_SREV_MAJOR:
500 case AR_RAD2133_SREV_MAJOR:
501 case AR_RAD2122_SREV_MAJOR:
504 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
505 "Radio Chip Rev 0x%02X not supported\n",
506 val & AR_RADIO_SREV_MAJOR);
510 ah->hw_version.analog5GhzRev = val;
515 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
522 for (i = 0; i < 3; i++) {
523 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
525 ah->macaddr[2 * i] = eeval >> 8;
526 ah->macaddr[2 * i + 1] = eeval & 0xff;
528 if (sum == 0 || sum == 0xffff * 3)
529 return -EADDRNOTAVAIL;
534 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
538 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
539 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
541 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
542 INIT_INI_ARRAY(&ah->iniModesRxGain,
543 ar9280Modes_backoff_13db_rxgain_9280_2,
544 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
545 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
546 INIT_INI_ARRAY(&ah->iniModesRxGain,
547 ar9280Modes_backoff_23db_rxgain_9280_2,
548 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
550 INIT_INI_ARRAY(&ah->iniModesRxGain,
551 ar9280Modes_original_rxgain_9280_2,
552 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
554 INIT_INI_ARRAY(&ah->iniModesRxGain,
555 ar9280Modes_original_rxgain_9280_2,
556 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
560 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
564 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
565 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
567 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
568 INIT_INI_ARRAY(&ah->iniModesTxGain,
569 ar9280Modes_high_power_tx_gain_9280_2,
570 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
572 INIT_INI_ARRAY(&ah->iniModesTxGain,
573 ar9280Modes_original_tx_gain_9280_2,
574 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
576 INIT_INI_ARRAY(&ah->iniModesTxGain,
577 ar9280Modes_original_tx_gain_9280_2,
578 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
582 static int ath9k_hw_post_init(struct ath_hw *ah)
586 if (!ath9k_hw_chip_test(ah))
589 ecode = ath9k_hw_rf_claim(ah);
593 ecode = ath9k_hw_eeprom_init(ah);
597 DPRINTF(ah->ah_sc, ATH_DBG_CONFIG, "Eeprom VER: %d, REV: %d\n",
598 ah->eep_ops->get_eeprom_ver(ah), ah->eep_ops->get_eeprom_rev(ah));
600 ecode = ath9k_hw_rfattach(ah);
604 if (!AR_SREV_9100(ah)) {
605 ath9k_hw_ani_setup(ah);
606 ath9k_hw_ani_init(ah);
612 static bool ath9k_hw_devid_supported(u16 devid)
615 case AR5416_DEVID_PCI:
616 case AR5416_DEVID_PCIE:
617 case AR5416_AR9100_DEVID:
618 case AR9160_DEVID_PCI:
619 case AR9280_DEVID_PCI:
620 case AR9280_DEVID_PCIE:
621 case AR9285_DEVID_PCIE:
622 case AR5416_DEVID_AR9287_PCI:
623 case AR5416_DEVID_AR9287_PCIE:
631 static bool ath9k_hw_macversion_supported(u32 macversion)
633 switch (macversion) {
634 case AR_SREV_VERSION_5416_PCI:
635 case AR_SREV_VERSION_5416_PCIE:
636 case AR_SREV_VERSION_9160:
637 case AR_SREV_VERSION_9100:
638 case AR_SREV_VERSION_9280:
639 case AR_SREV_VERSION_9285:
640 case AR_SREV_VERSION_9287:
643 case AR_SREV_VERSION_9271:
650 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
652 if (AR_SREV_9160_10_OR_LATER(ah)) {
653 if (AR_SREV_9280_10_OR_LATER(ah)) {
654 ah->iq_caldata.calData = &iq_cal_single_sample;
655 ah->adcgain_caldata.calData =
656 &adc_gain_cal_single_sample;
657 ah->adcdc_caldata.calData =
658 &adc_dc_cal_single_sample;
659 ah->adcdc_calinitdata.calData =
662 ah->iq_caldata.calData = &iq_cal_multi_sample;
663 ah->adcgain_caldata.calData =
664 &adc_gain_cal_multi_sample;
665 ah->adcdc_caldata.calData =
666 &adc_dc_cal_multi_sample;
667 ah->adcdc_calinitdata.calData =
670 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
674 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
676 if (AR_SREV_9271(ah)) {
677 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271_1_0,
678 ARRAY_SIZE(ar9271Modes_9271_1_0), 6);
679 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271_1_0,
680 ARRAY_SIZE(ar9271Common_9271_1_0), 2);
684 if (AR_SREV_9287_11_OR_LATER(ah)) {
685 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
686 ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
687 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
688 ARRAY_SIZE(ar9287Common_9287_1_1), 2);
689 if (ah->config.pcie_clock_req)
690 INIT_INI_ARRAY(&ah->iniPcieSerdes,
691 ar9287PciePhy_clkreq_off_L1_9287_1_1,
692 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
694 INIT_INI_ARRAY(&ah->iniPcieSerdes,
695 ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
696 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
698 } else if (AR_SREV_9287_10_OR_LATER(ah)) {
699 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
700 ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
701 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
702 ARRAY_SIZE(ar9287Common_9287_1_0), 2);
704 if (ah->config.pcie_clock_req)
705 INIT_INI_ARRAY(&ah->iniPcieSerdes,
706 ar9287PciePhy_clkreq_off_L1_9287_1_0,
707 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
709 INIT_INI_ARRAY(&ah->iniPcieSerdes,
710 ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
711 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
713 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
716 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
717 ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
718 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
719 ARRAY_SIZE(ar9285Common_9285_1_2), 2);
721 if (ah->config.pcie_clock_req) {
722 INIT_INI_ARRAY(&ah->iniPcieSerdes,
723 ar9285PciePhy_clkreq_off_L1_9285_1_2,
724 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
726 INIT_INI_ARRAY(&ah->iniPcieSerdes,
727 ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
728 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
731 } else if (AR_SREV_9285_10_OR_LATER(ah)) {
732 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
733 ARRAY_SIZE(ar9285Modes_9285), 6);
734 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
735 ARRAY_SIZE(ar9285Common_9285), 2);
737 if (ah->config.pcie_clock_req) {
738 INIT_INI_ARRAY(&ah->iniPcieSerdes,
739 ar9285PciePhy_clkreq_off_L1_9285,
740 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
742 INIT_INI_ARRAY(&ah->iniPcieSerdes,
743 ar9285PciePhy_clkreq_always_on_L1_9285,
744 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
746 } else if (AR_SREV_9280_20_OR_LATER(ah)) {
747 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
748 ARRAY_SIZE(ar9280Modes_9280_2), 6);
749 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
750 ARRAY_SIZE(ar9280Common_9280_2), 2);
752 if (ah->config.pcie_clock_req) {
753 INIT_INI_ARRAY(&ah->iniPcieSerdes,
754 ar9280PciePhy_clkreq_off_L1_9280,
755 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
757 INIT_INI_ARRAY(&ah->iniPcieSerdes,
758 ar9280PciePhy_clkreq_always_on_L1_9280,
759 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
761 INIT_INI_ARRAY(&ah->iniModesAdditional,
762 ar9280Modes_fast_clock_9280_2,
763 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
764 } else if (AR_SREV_9280_10_OR_LATER(ah)) {
765 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
766 ARRAY_SIZE(ar9280Modes_9280), 6);
767 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
768 ARRAY_SIZE(ar9280Common_9280), 2);
769 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
770 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
771 ARRAY_SIZE(ar5416Modes_9160), 6);
772 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
773 ARRAY_SIZE(ar5416Common_9160), 2);
774 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
775 ARRAY_SIZE(ar5416Bank0_9160), 2);
776 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
777 ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
778 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
779 ARRAY_SIZE(ar5416Bank1_9160), 2);
780 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
781 ARRAY_SIZE(ar5416Bank2_9160), 2);
782 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
783 ARRAY_SIZE(ar5416Bank3_9160), 3);
784 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
785 ARRAY_SIZE(ar5416Bank6_9160), 3);
786 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
787 ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
788 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
789 ARRAY_SIZE(ar5416Bank7_9160), 2);
790 if (AR_SREV_9160_11(ah)) {
791 INIT_INI_ARRAY(&ah->iniAddac,
793 ARRAY_SIZE(ar5416Addac_91601_1), 2);
795 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
796 ARRAY_SIZE(ar5416Addac_9160), 2);
798 } else if (AR_SREV_9100_OR_LATER(ah)) {
799 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
800 ARRAY_SIZE(ar5416Modes_9100), 6);
801 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
802 ARRAY_SIZE(ar5416Common_9100), 2);
803 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
804 ARRAY_SIZE(ar5416Bank0_9100), 2);
805 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
806 ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
807 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
808 ARRAY_SIZE(ar5416Bank1_9100), 2);
809 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
810 ARRAY_SIZE(ar5416Bank2_9100), 2);
811 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
812 ARRAY_SIZE(ar5416Bank3_9100), 3);
813 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
814 ARRAY_SIZE(ar5416Bank6_9100), 3);
815 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
816 ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
817 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
818 ARRAY_SIZE(ar5416Bank7_9100), 2);
819 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
820 ARRAY_SIZE(ar5416Addac_9100), 2);
822 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
823 ARRAY_SIZE(ar5416Modes), 6);
824 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
825 ARRAY_SIZE(ar5416Common), 2);
826 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
827 ARRAY_SIZE(ar5416Bank0), 2);
828 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
829 ARRAY_SIZE(ar5416BB_RfGain), 3);
830 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
831 ARRAY_SIZE(ar5416Bank1), 2);
832 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
833 ARRAY_SIZE(ar5416Bank2), 2);
834 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
835 ARRAY_SIZE(ar5416Bank3), 3);
836 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
837 ARRAY_SIZE(ar5416Bank6), 3);
838 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
839 ARRAY_SIZE(ar5416Bank6TPC), 3);
840 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
841 ARRAY_SIZE(ar5416Bank7), 2);
842 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
843 ARRAY_SIZE(ar5416Addac), 2);
847 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
849 if (AR_SREV_9287_11(ah))
850 INIT_INI_ARRAY(&ah->iniModesRxGain,
851 ar9287Modes_rx_gain_9287_1_1,
852 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
853 else if (AR_SREV_9287_10(ah))
854 INIT_INI_ARRAY(&ah->iniModesRxGain,
855 ar9287Modes_rx_gain_9287_1_0,
856 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
857 else if (AR_SREV_9280_20(ah))
858 ath9k_hw_init_rxgain_ini(ah);
860 if (AR_SREV_9287_11(ah)) {
861 INIT_INI_ARRAY(&ah->iniModesTxGain,
862 ar9287Modes_tx_gain_9287_1_1,
863 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
864 } else if (AR_SREV_9287_10(ah)) {
865 INIT_INI_ARRAY(&ah->iniModesTxGain,
866 ar9287Modes_tx_gain_9287_1_0,
867 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
868 } else if (AR_SREV_9280_20(ah)) {
869 ath9k_hw_init_txgain_ini(ah);
870 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
871 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
874 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
875 INIT_INI_ARRAY(&ah->iniModesTxGain,
876 ar9285Modes_high_power_tx_gain_9285_1_2,
877 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
879 INIT_INI_ARRAY(&ah->iniModesTxGain,
880 ar9285Modes_original_tx_gain_9285_1_2,
881 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
887 static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
891 if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
892 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
895 for (i = 0; i < ah->iniModes.ia_rows; i++) {
896 u32 reg = INI_RA(&ah->iniModes, i, 0);
898 for (j = 1; j < ah->iniModes.ia_columns; j++) {
899 u32 val = INI_RA(&ah->iniModes, i, j);
901 INI_RA(&ah->iniModes, i, j) =
902 ath9k_hw_ini_fixup(ah,
910 int ath9k_hw_init(struct ath_hw *ah)
914 if (!ath9k_hw_devid_supported(ah->hw_version.devid))
917 ath9k_hw_init_defaults(ah);
918 ath9k_hw_init_config(ah);
920 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
921 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Couldn't reset chip\n");
925 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
926 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
930 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
931 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
932 (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
933 ah->config.serialize_regmode =
936 ah->config.serialize_regmode =
941 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "serialize_regmode is %d\n",
942 ah->config.serialize_regmode);
944 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
945 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
946 "Mac Chip Rev 0x%02x.%x is not supported by "
947 "this driver\n", ah->hw_version.macVersion,
948 ah->hw_version.macRev);
952 if (AR_SREV_9100(ah)) {
953 ah->iq_caldata.calData = &iq_cal_multi_sample;
954 ah->supp_cals = IQ_MISMATCH_CAL;
955 ah->is_pciexpress = false;
958 if (AR_SREV_9271(ah))
959 ah->is_pciexpress = false;
961 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
963 ath9k_hw_init_cal_settings(ah);
965 ah->ani_function = ATH9K_ANI_ALL;
966 if (AR_SREV_9280_10_OR_LATER(ah))
967 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
969 ath9k_hw_init_mode_regs(ah);
971 if (ah->is_pciexpress)
972 ath9k_hw_configpcipowersave(ah, 0);
974 ath9k_hw_disablepcie(ah);
976 r = ath9k_hw_post_init(ah);
980 ath9k_hw_init_mode_gain_regs(ah);
981 ath9k_hw_fill_cap_info(ah);
982 ath9k_hw_init_11a_eeprom_fix(ah);
984 r = ath9k_hw_init_macaddr(ah);
986 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
987 "Failed to initialize MAC address\n");
991 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
992 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
994 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
996 ath9k_init_nfcal_hist_buffer(ah);
1001 static void ath9k_hw_init_bb(struct ath_hw *ah,
1002 struct ath9k_channel *chan)
1006 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1007 if (IS_CHAN_B(chan))
1008 synthDelay = (4 * synthDelay) / 22;
1012 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1014 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1017 static void ath9k_hw_init_qos(struct ath_hw *ah)
1019 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
1020 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
1022 REG_WRITE(ah, AR_QOS_NO_ACK,
1023 SM(2, AR_QOS_NO_ACK_TWO_BIT) |
1024 SM(5, AR_QOS_NO_ACK_BIT_OFF) |
1025 SM(0, AR_QOS_NO_ACK_BYTE_OFF));
1027 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
1028 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
1029 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1030 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1031 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1034 static void ath9k_hw_init_pll(struct ath_hw *ah,
1035 struct ath9k_channel *chan)
1039 if (AR_SREV_9100(ah)) {
1040 if (chan && IS_CHAN_5GHZ(chan))
1045 if (AR_SREV_9280_10_OR_LATER(ah)) {
1046 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1048 if (chan && IS_CHAN_HALF_RATE(chan))
1049 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1050 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1051 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1053 if (chan && IS_CHAN_5GHZ(chan)) {
1054 pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1057 if (AR_SREV_9280_20(ah)) {
1058 if (((chan->channel % 20) == 0)
1059 || ((chan->channel % 10) == 0))
1065 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1068 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
1070 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1072 if (chan && IS_CHAN_HALF_RATE(chan))
1073 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1074 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1075 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1077 if (chan && IS_CHAN_5GHZ(chan))
1078 pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1080 pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1082 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1084 if (chan && IS_CHAN_HALF_RATE(chan))
1085 pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1086 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1087 pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1089 if (chan && IS_CHAN_5GHZ(chan))
1090 pll |= SM(0xa, AR_RTC_PLL_DIV);
1092 pll |= SM(0xb, AR_RTC_PLL_DIV);
1095 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1097 udelay(RTC_PLL_SETTLE_DELAY);
1099 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1102 static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1104 int rx_chainmask, tx_chainmask;
1106 rx_chainmask = ah->rxchainmask;
1107 tx_chainmask = ah->txchainmask;
1109 switch (rx_chainmask) {
1111 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1112 AR_PHY_SWAP_ALT_CHAIN);
1114 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1115 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1116 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1122 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1123 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1129 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1130 if (tx_chainmask == 0x5) {
1131 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1132 AR_PHY_SWAP_ALT_CHAIN);
1134 if (AR_SREV_9100(ah))
1135 REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1136 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1139 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1140 enum nl80211_iftype opmode)
1142 ah->mask_reg = AR_IMR_TXERR |
1148 if (ah->config.intr_mitigation)
1149 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1151 ah->mask_reg |= AR_IMR_RXOK;
1153 ah->mask_reg |= AR_IMR_TXOK;
1155 if (opmode == NL80211_IFTYPE_AP)
1156 ah->mask_reg |= AR_IMR_MIB;
1158 REG_WRITE(ah, AR_IMR, ah->mask_reg);
1159 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1161 if (!AR_SREV_9100(ah)) {
1162 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1163 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1164 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1168 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1170 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1171 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad ack timeout %u\n", us);
1172 ah->acktimeout = (u32) -1;
1175 REG_RMW_FIELD(ah, AR_TIME_OUT,
1176 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1177 ah->acktimeout = us;
1182 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1184 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1185 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad cts timeout %u\n", us);
1186 ah->ctstimeout = (u32) -1;
1189 REG_RMW_FIELD(ah, AR_TIME_OUT,
1190 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1191 ah->ctstimeout = us;
1196 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1199 DPRINTF(ah->ah_sc, ATH_DBG_XMIT,
1200 "bad global tx timeout %u\n", tu);
1201 ah->globaltxtimeout = (u32) -1;
1204 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1205 ah->globaltxtimeout = tu;
1210 static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1212 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1215 if (ah->misc_mode != 0)
1216 REG_WRITE(ah, AR_PCU_MISC,
1217 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1218 if (ah->slottime != (u32) -1)
1219 ath9k_hw_setslottime(ah, ah->slottime);
1220 if (ah->acktimeout != (u32) -1)
1221 ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1222 if (ah->ctstimeout != (u32) -1)
1223 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1224 if (ah->globaltxtimeout != (u32) -1)
1225 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1228 const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1230 return vendorid == ATHEROS_VENDOR_ID ?
1231 ath9k_hw_devname(devid) : NULL;
1234 void ath9k_hw_detach(struct ath_hw *ah)
1236 if (!AR_SREV_9100(ah))
1237 ath9k_hw_ani_disable(ah);
1239 ath9k_hw_rf_free(ah);
1240 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1249 static void ath9k_hw_override_ini(struct ath_hw *ah,
1250 struct ath9k_channel *chan)
1254 if (AR_SREV_9271(ah)) {
1256 * Enable spectral scan to solution for issues with stuck
1257 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1260 if (AR_SREV_9271_10(ah)) {
1261 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | AR_PHY_SPECTRAL_SCAN_ENABLE;
1262 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1264 else if (AR_SREV_9271_11(ah))
1266 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1267 * present on AR9271 1.1
1269 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1274 * Set the RX_ABORT and RX_DIS and clear if off only after
1275 * RXE is set for MAC. This prevents frames with corrupted
1276 * descriptor status.
1278 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1281 if (!AR_SREV_5416_20_OR_LATER(ah) ||
1282 AR_SREV_9280_10_OR_LATER(ah))
1285 * Disable BB clock gating
1286 * Necessary to avoid issues on AR5416 2.0
1288 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1291 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1292 struct ar5416_eeprom_def *pEepData,
1295 struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1297 switch (ah->hw_version.devid) {
1298 case AR9280_DEVID_PCI:
1299 if (reg == 0x7894) {
1300 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1301 "ini VAL: %x EEPROM: %x\n", value,
1302 (pBase->version & 0xff));
1304 if ((pBase->version & 0xff) > 0x0a) {
1305 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1308 value &= ~AR_AN_TOP2_PWDCLKIND;
1309 value |= AR_AN_TOP2_PWDCLKIND &
1310 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1312 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1313 "PWDCLKIND Earlier Rev\n");
1316 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1317 "final ini VAL: %x\n", value);
1325 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1326 struct ar5416_eeprom_def *pEepData,
1329 if (ah->eep_map == EEP_MAP_4KBITS)
1332 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1335 static void ath9k_olc_init(struct ath_hw *ah)
1339 if (OLC_FOR_AR9287_10_LATER) {
1340 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1341 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1342 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1343 AR9287_AN_TXPC0_TXPCMODE,
1344 AR9287_AN_TXPC0_TXPCMODE_S,
1345 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1348 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1349 ah->originalGain[i] =
1350 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1356 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1357 struct ath9k_channel *chan)
1359 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1361 if (IS_CHAN_B(chan))
1363 else if (IS_CHAN_G(chan))
1371 static int ath9k_hw_process_ini(struct ath_hw *ah,
1372 struct ath9k_channel *chan,
1373 enum ath9k_ht_macmode macmode)
1375 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1376 int i, regWrites = 0;
1377 struct ieee80211_channel *channel = chan->chan;
1378 u32 modesIndex, freqIndex;
1380 switch (chan->chanmode) {
1382 case CHANNEL_A_HT20:
1386 case CHANNEL_A_HT40PLUS:
1387 case CHANNEL_A_HT40MINUS:
1392 case CHANNEL_G_HT20:
1397 case CHANNEL_G_HT40PLUS:
1398 case CHANNEL_G_HT40MINUS:
1407 REG_WRITE(ah, AR_PHY(0), 0x00000007);
1408 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1409 ah->eep_ops->set_addac(ah, chan);
1411 if (AR_SREV_5416_22_OR_LATER(ah)) {
1412 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1414 struct ar5416IniArray temp;
1416 sizeof(u32) * ah->iniAddac.ia_rows *
1417 ah->iniAddac.ia_columns;
1419 memcpy(ah->addac5416_21,
1420 ah->iniAddac.ia_array, addacSize);
1422 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1424 temp.ia_array = ah->addac5416_21;
1425 temp.ia_columns = ah->iniAddac.ia_columns;
1426 temp.ia_rows = ah->iniAddac.ia_rows;
1427 REG_WRITE_ARRAY(&temp, 1, regWrites);
1430 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1432 for (i = 0; i < ah->iniModes.ia_rows; i++) {
1433 u32 reg = INI_RA(&ah->iniModes, i, 0);
1434 u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1436 REG_WRITE(ah, reg, val);
1438 if (reg >= 0x7800 && reg < 0x78a0
1439 && ah->config.analog_shiftreg) {
1443 DO_DELAY(regWrites);
1446 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1447 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1449 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1450 AR_SREV_9287_10_OR_LATER(ah))
1451 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1453 for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1454 u32 reg = INI_RA(&ah->iniCommon, i, 0);
1455 u32 val = INI_RA(&ah->iniCommon, i, 1);
1457 REG_WRITE(ah, reg, val);
1459 if (reg >= 0x7800 && reg < 0x78a0
1460 && ah->config.analog_shiftreg) {
1464 DO_DELAY(regWrites);
1467 ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites);
1469 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1470 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1474 ath9k_hw_override_ini(ah, chan);
1475 ath9k_hw_set_regs(ah, chan, macmode);
1476 ath9k_hw_init_chain_masks(ah);
1478 if (OLC_FOR_AR9280_20_LATER)
1481 ah->eep_ops->set_txpower(ah, chan,
1482 ath9k_regd_get_ctl(regulatory, chan),
1483 channel->max_antenna_gain * 2,
1484 channel->max_power * 2,
1485 min((u32) MAX_RATE_POWER,
1486 (u32) regulatory->power_limit));
1488 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1489 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1490 "ar5416SetRfRegs failed\n");
1497 /****************************************/
1498 /* Reset and Channel Switching Routines */
1499 /****************************************/
1501 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1508 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1509 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1511 if (!AR_SREV_9280_10_OR_LATER(ah))
1512 rfMode |= (IS_CHAN_5GHZ(chan)) ?
1513 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1515 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1516 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1518 REG_WRITE(ah, AR_PHY_MODE, rfMode);
1521 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1523 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1526 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1531 * set AHB_MODE not to do cacheline prefetches
1533 regval = REG_READ(ah, AR_AHB_MODE);
1534 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1537 * let mac dma reads be in 128 byte chunks
1539 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1540 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1543 * Restore TX Trigger Level to its pre-reset value.
1544 * The initial value depends on whether aggregation is enabled, and is
1545 * adjusted whenever underruns are detected.
1547 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1550 * let mac dma writes be in 128 byte chunks
1552 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1553 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1556 * Setup receive FIFO threshold to hold off TX activities
1558 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1561 * reduce the number of usable entries in PCU TXBUF to avoid
1562 * wrap around issues.
1564 if (AR_SREV_9285(ah)) {
1565 /* For AR9285 the number of Fifos are reduced to half.
1566 * So set the usable tx buf size also to half to
1567 * avoid data/delimiter underruns
1569 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1570 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1571 } else if (!AR_SREV_9271(ah)) {
1572 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1573 AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1577 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1581 val = REG_READ(ah, AR_STA_ID1);
1582 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1584 case NL80211_IFTYPE_AP:
1585 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1586 | AR_STA_ID1_KSRCH_MODE);
1587 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1589 case NL80211_IFTYPE_ADHOC:
1590 case NL80211_IFTYPE_MESH_POINT:
1591 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1592 | AR_STA_ID1_KSRCH_MODE);
1593 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1595 case NL80211_IFTYPE_STATION:
1596 case NL80211_IFTYPE_MONITOR:
1597 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1602 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1607 u32 coef_exp, coef_man;
1609 for (coef_exp = 31; coef_exp > 0; coef_exp--)
1610 if ((coef_scaled >> coef_exp) & 0x1)
1613 coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1615 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1617 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1618 *coef_exponent = coef_exp - 16;
1621 static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1622 struct ath9k_channel *chan)
1624 u32 coef_scaled, ds_coef_exp, ds_coef_man;
1625 u32 clockMhzScaled = 0x64000000;
1626 struct chan_centers centers;
1628 if (IS_CHAN_HALF_RATE(chan))
1629 clockMhzScaled = clockMhzScaled >> 1;
1630 else if (IS_CHAN_QUARTER_RATE(chan))
1631 clockMhzScaled = clockMhzScaled >> 2;
1633 ath9k_hw_get_channel_centers(ah, chan, ¢ers);
1634 coef_scaled = clockMhzScaled / centers.synth_center;
1636 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1639 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1640 AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1641 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1642 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1644 coef_scaled = (9 * coef_scaled) / 10;
1646 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1649 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1650 AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1651 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1652 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1655 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1660 if (AR_SREV_9100(ah)) {
1661 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1662 val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1663 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1664 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1665 (void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1668 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1669 AR_RTC_FORCE_WAKE_ON_INT);
1671 if (AR_SREV_9100(ah)) {
1672 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1673 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1675 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1677 (AR_INTR_SYNC_LOCAL_TIMEOUT |
1678 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1679 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1680 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1682 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1685 rst_flags = AR_RTC_RC_MAC_WARM;
1686 if (type == ATH9K_RESET_COLD)
1687 rst_flags |= AR_RTC_RC_MAC_COLD;
1690 REG_WRITE(ah, AR_RTC_RC, rst_flags);
1693 REG_WRITE(ah, AR_RTC_RC, 0);
1694 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1695 DPRINTF(ah->ah_sc, ATH_DBG_RESET,
1696 "RTC stuck in MAC reset\n");
1700 if (!AR_SREV_9100(ah))
1701 REG_WRITE(ah, AR_RC, 0);
1703 ath9k_hw_init_pll(ah, NULL);
1705 if (AR_SREV_9100(ah))
1711 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1713 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1714 AR_RTC_FORCE_WAKE_ON_INT);
1716 if (!AR_SREV_9100(ah))
1717 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1719 REG_WRITE(ah, AR_RTC_RESET, 0);
1722 if (!AR_SREV_9100(ah))
1723 REG_WRITE(ah, AR_RC, 0);
1725 REG_WRITE(ah, AR_RTC_RESET, 1);
1727 if (!ath9k_hw_wait(ah,
1732 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "RTC not waking up\n");
1736 ath9k_hw_read_revisions(ah);
1738 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1741 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1743 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1744 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1747 case ATH9K_RESET_POWER_ON:
1748 return ath9k_hw_set_reset_power_on(ah);
1749 case ATH9K_RESET_WARM:
1750 case ATH9K_RESET_COLD:
1751 return ath9k_hw_set_reset(ah, type);
1757 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
1758 enum ath9k_ht_macmode macmode)
1761 u32 enableDacFifo = 0;
1763 if (AR_SREV_9285_10_OR_LATER(ah))
1764 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1765 AR_PHY_FC_ENABLE_DAC_FIFO);
1767 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1768 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1770 if (IS_CHAN_HT40(chan)) {
1771 phymode |= AR_PHY_FC_DYN2040_EN;
1773 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1774 (chan->chanmode == CHANNEL_G_HT40PLUS))
1775 phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1777 if (ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_25)
1778 phymode |= AR_PHY_FC_DYN2040_EXT_CH;
1780 REG_WRITE(ah, AR_PHY_TURBO, phymode);
1782 ath9k_hw_set11nmac2040(ah, macmode);
1784 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1785 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1788 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1789 struct ath9k_channel *chan)
1791 if (OLC_FOR_AR9280_20_LATER) {
1792 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1794 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1797 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1800 ah->chip_fullsleep = false;
1801 ath9k_hw_init_pll(ah, chan);
1802 ath9k_hw_set_rfmode(ah, chan);
1807 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1808 struct ath9k_channel *chan,
1809 enum ath9k_ht_macmode macmode)
1811 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1812 struct ieee80211_channel *channel = chan->chan;
1813 u32 synthDelay, qnum;
1815 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1816 if (ath9k_hw_numtxpending(ah, qnum)) {
1817 DPRINTF(ah->ah_sc, ATH_DBG_QUEUE,
1818 "Transmit frames pending on queue %d\n", qnum);
1823 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1824 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1825 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1826 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1827 "Could not kill baseband RX\n");
1831 ath9k_hw_set_regs(ah, chan, macmode);
1833 if (AR_SREV_9280_10_OR_LATER(ah)) {
1834 ath9k_hw_ar9280_set_channel(ah, chan);
1836 if (!(ath9k_hw_set_channel(ah, chan))) {
1837 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1838 "Failed to set channel\n");
1843 ah->eep_ops->set_txpower(ah, chan,
1844 ath9k_regd_get_ctl(regulatory, chan),
1845 channel->max_antenna_gain * 2,
1846 channel->max_power * 2,
1847 min((u32) MAX_RATE_POWER,
1848 (u32) regulatory->power_limit));
1850 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1851 if (IS_CHAN_B(chan))
1852 synthDelay = (4 * synthDelay) / 22;
1856 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1858 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1860 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1861 ath9k_hw_set_delta_slope(ah, chan);
1863 if (AR_SREV_9280_10_OR_LATER(ah))
1864 ath9k_hw_9280_spur_mitigate(ah, chan);
1866 ath9k_hw_spur_mitigate(ah, chan);
1868 if (!chan->oneTimeCalsDone)
1869 chan->oneTimeCalsDone = true;
1874 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
1876 int bb_spur = AR_NO_SPUR;
1879 int bb_spur_off, spur_subchannel_sd;
1881 int spur_delta_phase;
1883 int upper, lower, cur_vit_mask;
1886 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
1887 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
1889 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
1890 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
1892 int inc[4] = { 0, 100, 0, 0 };
1893 struct chan_centers centers;
1900 bool is2GHz = IS_CHAN_2GHZ(chan);
1902 memset(&mask_m, 0, sizeof(int8_t) * 123);
1903 memset(&mask_p, 0, sizeof(int8_t) * 123);
1905 ath9k_hw_get_channel_centers(ah, chan, ¢ers);
1906 freq = centers.synth_center;
1908 ah->config.spurmode = SPUR_ENABLE_EEPROM;
1909 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
1910 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
1913 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ;
1915 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ;
1917 if (AR_NO_SPUR == cur_bb_spur)
1919 cur_bb_spur = cur_bb_spur - freq;
1921 if (IS_CHAN_HT40(chan)) {
1922 if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) &&
1923 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) {
1924 bb_spur = cur_bb_spur;
1927 } else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) &&
1928 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) {
1929 bb_spur = cur_bb_spur;
1934 if (AR_NO_SPUR == bb_spur) {
1935 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1936 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1939 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1940 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1943 bin = bb_spur * 320;
1945 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
1947 newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
1948 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
1949 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
1950 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
1951 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal);
1953 newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
1954 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
1955 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
1956 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
1957 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
1958 REG_WRITE(ah, AR_PHY_SPUR_REG, newVal);
1960 if (IS_CHAN_HT40(chan)) {
1962 spur_subchannel_sd = 1;
1963 bb_spur_off = bb_spur + 10;
1965 spur_subchannel_sd = 0;
1966 bb_spur_off = bb_spur - 10;
1969 spur_subchannel_sd = 0;
1970 bb_spur_off = bb_spur;
1973 if (IS_CHAN_HT40(chan))
1975 ((bb_spur * 262144) /
1976 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1979 ((bb_spur * 524288) /
1980 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1982 denominator = IS_CHAN_2GHZ(chan) ? 44 : 40;
1983 spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff;
1985 newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
1986 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
1987 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
1988 REG_WRITE(ah, AR_PHY_TIMING11, newVal);
1990 newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S;
1991 REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal);
1997 for (i = 0; i < 4; i++) {
2001 for (bp = 0; bp < 30; bp++) {
2002 if ((cur_bin > lower) && (cur_bin < upper)) {
2003 pilot_mask = pilot_mask | 0x1 << bp;
2004 chan_mask = chan_mask | 0x1 << bp;
2009 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2010 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2013 cur_vit_mask = 6100;
2017 for (i = 0; i < 123; i++) {
2018 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2020 /* workaround for gcc bug #37014 */
2021 volatile int tmp_v = abs(cur_vit_mask - bin);
2027 if (cur_vit_mask < 0)
2028 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2030 mask_p[cur_vit_mask / 100] = mask_amt;
2032 cur_vit_mask -= 100;
2035 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2036 | (mask_m[48] << 26) | (mask_m[49] << 24)
2037 | (mask_m[50] << 22) | (mask_m[51] << 20)
2038 | (mask_m[52] << 18) | (mask_m[53] << 16)
2039 | (mask_m[54] << 14) | (mask_m[55] << 12)
2040 | (mask_m[56] << 10) | (mask_m[57] << 8)
2041 | (mask_m[58] << 6) | (mask_m[59] << 4)
2042 | (mask_m[60] << 2) | (mask_m[61] << 0);
2043 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2044 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2046 tmp_mask = (mask_m[31] << 28)
2047 | (mask_m[32] << 26) | (mask_m[33] << 24)
2048 | (mask_m[34] << 22) | (mask_m[35] << 20)
2049 | (mask_m[36] << 18) | (mask_m[37] << 16)
2050 | (mask_m[48] << 14) | (mask_m[39] << 12)
2051 | (mask_m[40] << 10) | (mask_m[41] << 8)
2052 | (mask_m[42] << 6) | (mask_m[43] << 4)
2053 | (mask_m[44] << 2) | (mask_m[45] << 0);
2054 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2055 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2057 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2058 | (mask_m[18] << 26) | (mask_m[18] << 24)
2059 | (mask_m[20] << 22) | (mask_m[20] << 20)
2060 | (mask_m[22] << 18) | (mask_m[22] << 16)
2061 | (mask_m[24] << 14) | (mask_m[24] << 12)
2062 | (mask_m[25] << 10) | (mask_m[26] << 8)
2063 | (mask_m[27] << 6) | (mask_m[28] << 4)
2064 | (mask_m[29] << 2) | (mask_m[30] << 0);
2065 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2066 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2068 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2069 | (mask_m[2] << 26) | (mask_m[3] << 24)
2070 | (mask_m[4] << 22) | (mask_m[5] << 20)
2071 | (mask_m[6] << 18) | (mask_m[7] << 16)
2072 | (mask_m[8] << 14) | (mask_m[9] << 12)
2073 | (mask_m[10] << 10) | (mask_m[11] << 8)
2074 | (mask_m[12] << 6) | (mask_m[13] << 4)
2075 | (mask_m[14] << 2) | (mask_m[15] << 0);
2076 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2077 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2079 tmp_mask = (mask_p[15] << 28)
2080 | (mask_p[14] << 26) | (mask_p[13] << 24)
2081 | (mask_p[12] << 22) | (mask_p[11] << 20)
2082 | (mask_p[10] << 18) | (mask_p[9] << 16)
2083 | (mask_p[8] << 14) | (mask_p[7] << 12)
2084 | (mask_p[6] << 10) | (mask_p[5] << 8)
2085 | (mask_p[4] << 6) | (mask_p[3] << 4)
2086 | (mask_p[2] << 2) | (mask_p[1] << 0);
2087 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2088 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2090 tmp_mask = (mask_p[30] << 28)
2091 | (mask_p[29] << 26) | (mask_p[28] << 24)
2092 | (mask_p[27] << 22) | (mask_p[26] << 20)
2093 | (mask_p[25] << 18) | (mask_p[24] << 16)
2094 | (mask_p[23] << 14) | (mask_p[22] << 12)
2095 | (mask_p[21] << 10) | (mask_p[20] << 8)
2096 | (mask_p[19] << 6) | (mask_p[18] << 4)
2097 | (mask_p[17] << 2) | (mask_p[16] << 0);
2098 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2099 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2101 tmp_mask = (mask_p[45] << 28)
2102 | (mask_p[44] << 26) | (mask_p[43] << 24)
2103 | (mask_p[42] << 22) | (mask_p[41] << 20)
2104 | (mask_p[40] << 18) | (mask_p[39] << 16)
2105 | (mask_p[38] << 14) | (mask_p[37] << 12)
2106 | (mask_p[36] << 10) | (mask_p[35] << 8)
2107 | (mask_p[34] << 6) | (mask_p[33] << 4)
2108 | (mask_p[32] << 2) | (mask_p[31] << 0);
2109 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2110 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2112 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2113 | (mask_p[59] << 26) | (mask_p[58] << 24)
2114 | (mask_p[57] << 22) | (mask_p[56] << 20)
2115 | (mask_p[55] << 18) | (mask_p[54] << 16)
2116 | (mask_p[53] << 14) | (mask_p[52] << 12)
2117 | (mask_p[51] << 10) | (mask_p[50] << 8)
2118 | (mask_p[49] << 6) | (mask_p[48] << 4)
2119 | (mask_p[47] << 2) | (mask_p[46] << 0);
2120 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2121 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2124 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
2126 int bb_spur = AR_NO_SPUR;
2129 int spur_delta_phase;
2131 int upper, lower, cur_vit_mask;
2134 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
2135 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
2137 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
2138 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
2140 int inc[4] = { 0, 100, 0, 0 };
2147 bool is2GHz = IS_CHAN_2GHZ(chan);
2149 memset(&mask_m, 0, sizeof(int8_t) * 123);
2150 memset(&mask_p, 0, sizeof(int8_t) * 123);
2152 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
2153 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
2154 if (AR_NO_SPUR == cur_bb_spur)
2156 cur_bb_spur = cur_bb_spur - (chan->channel * 10);
2157 if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) {
2158 bb_spur = cur_bb_spur;
2163 if (AR_NO_SPUR == bb_spur)
2168 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2169 new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2170 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2171 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2172 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2174 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new);
2176 new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2177 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2178 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2179 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2180 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2181 REG_WRITE(ah, AR_PHY_SPUR_REG, new);
2183 spur_delta_phase = ((bb_spur * 524288) / 100) &
2184 AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2186 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400;
2187 spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff;
2189 new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2190 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2191 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2192 REG_WRITE(ah, AR_PHY_TIMING11, new);
2198 for (i = 0; i < 4; i++) {
2202 for (bp = 0; bp < 30; bp++) {
2203 if ((cur_bin > lower) && (cur_bin < upper)) {
2204 pilot_mask = pilot_mask | 0x1 << bp;
2205 chan_mask = chan_mask | 0x1 << bp;
2210 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2211 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2214 cur_vit_mask = 6100;
2218 for (i = 0; i < 123; i++) {
2219 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2221 /* workaround for gcc bug #37014 */
2222 volatile int tmp_v = abs(cur_vit_mask - bin);
2228 if (cur_vit_mask < 0)
2229 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2231 mask_p[cur_vit_mask / 100] = mask_amt;
2233 cur_vit_mask -= 100;
2236 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2237 | (mask_m[48] << 26) | (mask_m[49] << 24)
2238 | (mask_m[50] << 22) | (mask_m[51] << 20)
2239 | (mask_m[52] << 18) | (mask_m[53] << 16)
2240 | (mask_m[54] << 14) | (mask_m[55] << 12)
2241 | (mask_m[56] << 10) | (mask_m[57] << 8)
2242 | (mask_m[58] << 6) | (mask_m[59] << 4)
2243 | (mask_m[60] << 2) | (mask_m[61] << 0);
2244 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2245 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2247 tmp_mask = (mask_m[31] << 28)
2248 | (mask_m[32] << 26) | (mask_m[33] << 24)
2249 | (mask_m[34] << 22) | (mask_m[35] << 20)
2250 | (mask_m[36] << 18) | (mask_m[37] << 16)
2251 | (mask_m[48] << 14) | (mask_m[39] << 12)
2252 | (mask_m[40] << 10) | (mask_m[41] << 8)
2253 | (mask_m[42] << 6) | (mask_m[43] << 4)
2254 | (mask_m[44] << 2) | (mask_m[45] << 0);
2255 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2256 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2258 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2259 | (mask_m[18] << 26) | (mask_m[18] << 24)
2260 | (mask_m[20] << 22) | (mask_m[20] << 20)
2261 | (mask_m[22] << 18) | (mask_m[22] << 16)
2262 | (mask_m[24] << 14) | (mask_m[24] << 12)
2263 | (mask_m[25] << 10) | (mask_m[26] << 8)
2264 | (mask_m[27] << 6) | (mask_m[28] << 4)
2265 | (mask_m[29] << 2) | (mask_m[30] << 0);
2266 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2267 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2269 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2270 | (mask_m[2] << 26) | (mask_m[3] << 24)
2271 | (mask_m[4] << 22) | (mask_m[5] << 20)
2272 | (mask_m[6] << 18) | (mask_m[7] << 16)
2273 | (mask_m[8] << 14) | (mask_m[9] << 12)
2274 | (mask_m[10] << 10) | (mask_m[11] << 8)
2275 | (mask_m[12] << 6) | (mask_m[13] << 4)
2276 | (mask_m[14] << 2) | (mask_m[15] << 0);
2277 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2278 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2280 tmp_mask = (mask_p[15] << 28)
2281 | (mask_p[14] << 26) | (mask_p[13] << 24)
2282 | (mask_p[12] << 22) | (mask_p[11] << 20)
2283 | (mask_p[10] << 18) | (mask_p[9] << 16)
2284 | (mask_p[8] << 14) | (mask_p[7] << 12)
2285 | (mask_p[6] << 10) | (mask_p[5] << 8)
2286 | (mask_p[4] << 6) | (mask_p[3] << 4)
2287 | (mask_p[2] << 2) | (mask_p[1] << 0);
2288 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2289 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2291 tmp_mask = (mask_p[30] << 28)
2292 | (mask_p[29] << 26) | (mask_p[28] << 24)
2293 | (mask_p[27] << 22) | (mask_p[26] << 20)
2294 | (mask_p[25] << 18) | (mask_p[24] << 16)
2295 | (mask_p[23] << 14) | (mask_p[22] << 12)
2296 | (mask_p[21] << 10) | (mask_p[20] << 8)
2297 | (mask_p[19] << 6) | (mask_p[18] << 4)
2298 | (mask_p[17] << 2) | (mask_p[16] << 0);
2299 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2300 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2302 tmp_mask = (mask_p[45] << 28)
2303 | (mask_p[44] << 26) | (mask_p[43] << 24)
2304 | (mask_p[42] << 22) | (mask_p[41] << 20)
2305 | (mask_p[40] << 18) | (mask_p[39] << 16)
2306 | (mask_p[38] << 14) | (mask_p[37] << 12)
2307 | (mask_p[36] << 10) | (mask_p[35] << 8)
2308 | (mask_p[34] << 6) | (mask_p[33] << 4)
2309 | (mask_p[32] << 2) | (mask_p[31] << 0);
2310 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2311 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2313 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2314 | (mask_p[59] << 26) | (mask_p[58] << 24)
2315 | (mask_p[57] << 22) | (mask_p[56] << 20)
2316 | (mask_p[55] << 18) | (mask_p[54] << 16)
2317 | (mask_p[53] << 14) | (mask_p[52] << 12)
2318 | (mask_p[51] << 10) | (mask_p[50] << 8)
2319 | (mask_p[49] << 6) | (mask_p[48] << 4)
2320 | (mask_p[47] << 2) | (mask_p[46] << 0);
2321 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2322 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2325 static void ath9k_enable_rfkill(struct ath_hw *ah)
2327 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
2328 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
2330 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
2331 AR_GPIO_INPUT_MUX2_RFSILENT);
2333 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
2334 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
2337 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
2338 bool bChannelChange)
2341 struct ath_softc *sc = ah->ah_sc;
2342 struct ath9k_channel *curchan = ah->curchan;
2345 int i, rx_chainmask, r;
2347 ah->extprotspacing = sc->ht_extprotspacing;
2348 ah->txchainmask = sc->tx_chainmask;
2349 ah->rxchainmask = sc->rx_chainmask;
2351 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2355 ath9k_hw_getnf(ah, curchan);
2357 if (bChannelChange &&
2358 (ah->chip_fullsleep != true) &&
2359 (ah->curchan != NULL) &&
2360 (chan->channel != ah->curchan->channel) &&
2361 ((chan->channelFlags & CHANNEL_ALL) ==
2362 (ah->curchan->channelFlags & CHANNEL_ALL)) &&
2363 (!AR_SREV_9280(ah) || (!IS_CHAN_A_5MHZ_SPACED(chan) &&
2364 !IS_CHAN_A_5MHZ_SPACED(ah->curchan)))) {
2366 if (ath9k_hw_channel_change(ah, chan, sc->tx_chan_width)) {
2367 ath9k_hw_loadnf(ah, ah->curchan);
2368 ath9k_hw_start_nfcal(ah);
2373 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
2374 if (saveDefAntenna == 0)
2377 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
2379 saveLedState = REG_READ(ah, AR_CFG_LED) &
2380 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
2381 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
2383 ath9k_hw_mark_phy_inactive(ah);
2385 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2387 AR9271_RESET_POWER_DOWN_CONTROL,
2388 AR9271_RADIO_RF_RST);
2392 if (!ath9k_hw_chip_reset(ah, chan)) {
2393 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Chip reset failed\n");
2397 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2398 ah->htc_reset_init = false;
2400 AR9271_RESET_POWER_DOWN_CONTROL,
2401 AR9271_GATE_MAC_CTL);
2405 if (AR_SREV_9280_10_OR_LATER(ah))
2406 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2408 if (AR_SREV_9287_12_OR_LATER(ah)) {
2409 /* Enable ASYNC FIFO */
2410 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2411 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2412 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2413 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2414 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2415 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2416 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2418 r = ath9k_hw_process_ini(ah, chan, sc->tx_chan_width);
2422 /* Setup MFP options for CCMP */
2423 if (AR_SREV_9280_20_OR_LATER(ah)) {
2424 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2425 * frames when constructing CCMP AAD. */
2426 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2428 ah->sw_mgmt_crypto = false;
2429 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
2430 /* Disable hardware crypto for management frames */
2431 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2432 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2433 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2434 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2435 ah->sw_mgmt_crypto = true;
2437 ah->sw_mgmt_crypto = true;
2439 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2440 ath9k_hw_set_delta_slope(ah, chan);
2442 if (AR_SREV_9280_10_OR_LATER(ah))
2443 ath9k_hw_9280_spur_mitigate(ah, chan);
2445 ath9k_hw_spur_mitigate(ah, chan);
2447 ah->eep_ops->set_board_values(ah, chan);
2449 ath9k_hw_decrease_chain_power(ah, chan);
2451 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(ah->macaddr));
2452 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(ah->macaddr + 4)
2454 | AR_STA_ID1_RTS_USE_DEF
2456 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2457 | ah->sta_id1_defaults);
2458 ath9k_hw_set_operating_mode(ah, ah->opmode);
2460 REG_WRITE(ah, AR_BSSMSKL, get_unaligned_le32(sc->bssidmask));
2461 REG_WRITE(ah, AR_BSSMSKU, get_unaligned_le16(sc->bssidmask + 4));
2463 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2465 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(sc->curbssid));
2466 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(sc->curbssid + 4) |
2467 ((sc->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
2469 REG_WRITE(ah, AR_ISR, ~0);
2471 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2473 if (AR_SREV_9280_10_OR_LATER(ah))
2474 ath9k_hw_ar9280_set_channel(ah, chan);
2476 if (!(ath9k_hw_set_channel(ah, chan)))
2479 for (i = 0; i < AR_NUM_DCU; i++)
2480 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2483 for (i = 0; i < ah->caps.total_queues; i++)
2484 ath9k_hw_resettxqueue(ah, i);
2486 ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2487 ath9k_hw_init_qos(ah);
2489 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2490 ath9k_enable_rfkill(ah);
2492 ath9k_hw_init_user_settings(ah);
2494 if (AR_SREV_9287_12_OR_LATER(ah)) {
2495 REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2496 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2497 REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2498 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2499 REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2500 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2502 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2503 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2505 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2506 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2507 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2508 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2510 if (AR_SREV_9287_12_OR_LATER(ah)) {
2511 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2512 AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2515 REG_WRITE(ah, AR_STA_ID1,
2516 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2518 ath9k_hw_set_dma(ah);
2520 REG_WRITE(ah, AR_OBS, 8);
2522 if (ah->config.intr_mitigation) {
2523 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2524 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2527 ath9k_hw_init_bb(ah, chan);
2529 if (!ath9k_hw_init_cal(ah, chan))
2532 rx_chainmask = ah->rxchainmask;
2533 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2534 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2535 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2538 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2541 * For big endian systems turn on swapping for descriptors
2543 if (AR_SREV_9100(ah)) {
2545 mask = REG_READ(ah, AR_CFG);
2546 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2547 DPRINTF(ah->ah_sc, ATH_DBG_RESET,
2548 "CFG Byte Swap Set 0x%x\n", mask);
2551 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2552 REG_WRITE(ah, AR_CFG, mask);
2553 DPRINTF(ah->ah_sc, ATH_DBG_RESET,
2554 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2557 /* Configure AR9271 target WLAN */
2558 if (AR_SREV_9271(ah))
2559 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2562 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2566 if (ah->ah_sc->sc_flags & SC_OP_BTCOEX_ENABLED)
2567 ath9k_hw_btcoex_enable(ah);
2572 /************************/
2573 /* Key Cache Management */
2574 /************************/
2576 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2580 if (entry >= ah->caps.keycache_size) {
2581 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2582 "keychache entry %u out of range\n", entry);
2586 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2588 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2589 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2590 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2591 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2592 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2593 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2594 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2595 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2597 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2598 u16 micentry = entry + 64;
2600 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2601 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2602 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2603 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2610 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2614 if (entry >= ah->caps.keycache_size) {
2615 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2616 "keychache entry %u out of range\n", entry);
2621 macHi = (mac[5] << 8) | mac[4];
2622 macLo = (mac[3] << 24) |
2627 macLo |= (macHi & 1) << 31;
2632 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2633 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2638 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2639 const struct ath9k_keyval *k,
2642 const struct ath9k_hw_capabilities *pCap = &ah->caps;
2643 u32 key0, key1, key2, key3, key4;
2646 if (entry >= pCap->keycache_size) {
2647 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2648 "keycache entry %u out of range\n", entry);
2652 switch (k->kv_type) {
2653 case ATH9K_CIPHER_AES_OCB:
2654 keyType = AR_KEYTABLE_TYPE_AES;
2656 case ATH9K_CIPHER_AES_CCM:
2657 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2658 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2659 "AES-CCM not supported by mac rev 0x%x\n",
2660 ah->hw_version.macRev);
2663 keyType = AR_KEYTABLE_TYPE_CCM;
2665 case ATH9K_CIPHER_TKIP:
2666 keyType = AR_KEYTABLE_TYPE_TKIP;
2667 if (ATH9K_IS_MIC_ENABLED(ah)
2668 && entry + 64 >= pCap->keycache_size) {
2669 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2670 "entry %u inappropriate for TKIP\n", entry);
2674 case ATH9K_CIPHER_WEP:
2675 if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2676 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2677 "WEP key length %u too small\n", k->kv_len);
2680 if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2681 keyType = AR_KEYTABLE_TYPE_40;
2682 else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2683 keyType = AR_KEYTABLE_TYPE_104;
2685 keyType = AR_KEYTABLE_TYPE_128;
2687 case ATH9K_CIPHER_CLR:
2688 keyType = AR_KEYTABLE_TYPE_CLR;
2691 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2692 "cipher %u not supported\n", k->kv_type);
2696 key0 = get_unaligned_le32(k->kv_val + 0);
2697 key1 = get_unaligned_le16(k->kv_val + 4);
2698 key2 = get_unaligned_le32(k->kv_val + 6);
2699 key3 = get_unaligned_le16(k->kv_val + 10);
2700 key4 = get_unaligned_le32(k->kv_val + 12);
2701 if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2705 * Note: Key cache registers access special memory area that requires
2706 * two 32-bit writes to actually update the values in the internal
2707 * memory. Consequently, the exact order and pairs used here must be
2711 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2712 u16 micentry = entry + 64;
2715 * Write inverted key[47:0] first to avoid Michael MIC errors
2716 * on frames that could be sent or received at the same time.
2717 * The correct key will be written in the end once everything
2720 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2721 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2723 /* Write key[95:48] */
2724 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2725 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2727 /* Write key[127:96] and key type */
2728 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2729 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2731 /* Write MAC address for the entry */
2732 (void) ath9k_hw_keysetmac(ah, entry, mac);
2734 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2736 * TKIP uses two key cache entries:
2737 * Michael MIC TX/RX keys in the same key cache entry
2738 * (idx = main index + 64):
2739 * key0 [31:0] = RX key [31:0]
2740 * key1 [15:0] = TX key [31:16]
2741 * key1 [31:16] = reserved
2742 * key2 [31:0] = RX key [63:32]
2743 * key3 [15:0] = TX key [15:0]
2744 * key3 [31:16] = reserved
2745 * key4 [31:0] = TX key [63:32]
2747 u32 mic0, mic1, mic2, mic3, mic4;
2749 mic0 = get_unaligned_le32(k->kv_mic + 0);
2750 mic2 = get_unaligned_le32(k->kv_mic + 4);
2751 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2752 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2753 mic4 = get_unaligned_le32(k->kv_txmic + 4);
2755 /* Write RX[31:0] and TX[31:16] */
2756 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2757 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2759 /* Write RX[63:32] and TX[15:0] */
2760 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2761 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2763 /* Write TX[63:32] and keyType(reserved) */
2764 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2765 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2766 AR_KEYTABLE_TYPE_CLR);
2770 * TKIP uses four key cache entries (two for group
2772 * Michael MIC TX/RX keys are in different key cache
2773 * entries (idx = main index + 64 for TX and
2774 * main index + 32 + 96 for RX):
2775 * key0 [31:0] = TX/RX MIC key [31:0]
2776 * key1 [31:0] = reserved
2777 * key2 [31:0] = TX/RX MIC key [63:32]
2778 * key3 [31:0] = reserved
2779 * key4 [31:0] = reserved
2781 * Upper layer code will call this function separately
2782 * for TX and RX keys when these registers offsets are
2787 mic0 = get_unaligned_le32(k->kv_mic + 0);
2788 mic2 = get_unaligned_le32(k->kv_mic + 4);
2790 /* Write MIC key[31:0] */
2791 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2792 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2794 /* Write MIC key[63:32] */
2795 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2796 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2798 /* Write TX[63:32] and keyType(reserved) */
2799 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2800 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2801 AR_KEYTABLE_TYPE_CLR);
2804 /* MAC address registers are reserved for the MIC entry */
2805 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2806 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2809 * Write the correct (un-inverted) key[47:0] last to enable
2810 * TKIP now that all other registers are set with correct
2813 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2814 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2816 /* Write key[47:0] */
2817 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2818 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2820 /* Write key[95:48] */
2821 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2822 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2824 /* Write key[127:96] and key type */
2825 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2826 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2828 /* Write MAC address for the entry */
2829 (void) ath9k_hw_keysetmac(ah, entry, mac);
2835 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2837 if (entry < ah->caps.keycache_size) {
2838 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2839 if (val & AR_KEYTABLE_VALID)
2845 /******************************/
2846 /* Power Management (Chipset) */
2847 /******************************/
2849 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2851 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2853 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2854 AR_RTC_FORCE_WAKE_EN);
2855 if (!AR_SREV_9100(ah))
2856 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2858 REG_CLR_BIT(ah, (AR_RTC_RESET),
2863 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2865 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2867 struct ath9k_hw_capabilities *pCap = &ah->caps;
2869 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2870 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2871 AR_RTC_FORCE_WAKE_ON_INT);
2873 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2874 AR_RTC_FORCE_WAKE_EN);
2879 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2885 if ((REG_READ(ah, AR_RTC_STATUS) &
2886 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2887 if (ath9k_hw_set_reset_reg(ah,
2888 ATH9K_RESET_POWER_ON) != true) {
2892 if (AR_SREV_9100(ah))
2893 REG_SET_BIT(ah, AR_RTC_RESET,
2896 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2897 AR_RTC_FORCE_WAKE_EN);
2900 for (i = POWER_UP_TIME / 50; i > 0; i--) {
2901 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2902 if (val == AR_RTC_STATUS_ON)
2905 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2906 AR_RTC_FORCE_WAKE_EN);
2909 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2910 "Failed to wakeup in %uus\n", POWER_UP_TIME / 20);
2915 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2920 static bool ath9k_hw_setpower_nolock(struct ath_hw *ah,
2921 enum ath9k_power_mode mode)
2923 int status = true, setChip = true;
2924 static const char *modes[] = {
2931 if (ah->power_mode == mode)
2934 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "%s -> %s\n",
2935 modes[ah->power_mode], modes[mode]);
2938 case ATH9K_PM_AWAKE:
2939 status = ath9k_hw_set_power_awake(ah, setChip);
2941 case ATH9K_PM_FULL_SLEEP:
2942 ath9k_set_power_sleep(ah, setChip);
2943 ah->chip_fullsleep = true;
2945 case ATH9K_PM_NETWORK_SLEEP:
2946 ath9k_set_power_network_sleep(ah, setChip);
2949 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2950 "Unknown power mode %u\n", mode);
2953 ah->power_mode = mode;
2958 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2960 unsigned long flags;
2963 spin_lock_irqsave(&ah->ah_sc->sc_pm_lock, flags);
2964 ret = ath9k_hw_setpower_nolock(ah, mode);
2965 spin_unlock_irqrestore(&ah->ah_sc->sc_pm_lock, flags);
2970 void ath9k_ps_wakeup(struct ath_softc *sc)
2972 unsigned long flags;
2974 spin_lock_irqsave(&sc->sc_pm_lock, flags);
2975 if (++sc->ps_usecount != 1)
2978 ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_AWAKE);
2981 spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
2984 void ath9k_ps_restore(struct ath_softc *sc)
2986 unsigned long flags;
2988 spin_lock_irqsave(&sc->sc_pm_lock, flags);
2989 if (--sc->ps_usecount != 0)
2992 if (sc->ps_enabled &&
2993 !(sc->sc_flags & (SC_OP_WAIT_FOR_BEACON |
2994 SC_OP_WAIT_FOR_CAB |
2995 SC_OP_WAIT_FOR_PSPOLL_DATA |
2996 SC_OP_WAIT_FOR_TX_ACK)))
2997 ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_NETWORK_SLEEP);
3000 spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
3004 * Helper for ASPM support.
3006 * Disable PLL when in L0s as well as receiver clock when in L1.
3007 * This power saving option must be enabled through the SerDes.
3009 * Programming the SerDes must go through the same 288 bit serial shift
3010 * register as the other analog registers. Hence the 9 writes.
3012 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore)
3016 if (ah->is_pciexpress != true)
3019 /* Do not touch SerDes registers */
3020 if (ah->config.pcie_powersave_enable == 2)
3023 /* Nothing to do on restore for 11N */
3027 if (AR_SREV_9280_20_OR_LATER(ah)) {
3029 * AR9280 2.0 or later chips use SerDes values from the
3030 * initvals.h initialized depending on chipset during
3033 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
3034 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
3035 INI_RA(&ah->iniPcieSerdes, i, 1));
3037 } else if (AR_SREV_9280(ah) &&
3038 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
3039 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
3040 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3042 /* RX shut off when elecidle is asserted */
3043 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
3044 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
3045 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
3047 /* Shut off CLKREQ active in L1 */
3048 if (ah->config.pcie_clock_req)
3049 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
3051 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
3053 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3054 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3055 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
3057 /* Load the new settings */
3058 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3061 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
3062 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3064 /* RX shut off when elecidle is asserted */
3065 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
3066 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
3067 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
3070 * Ignore ah->ah_config.pcie_clock_req setting for
3073 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
3075 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3076 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3077 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
3079 /* Load the new settings */
3080 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3085 /* set bit 19 to allow forcing of pcie core into L1 state */
3086 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
3088 /* Several PCIe massages to ensure proper behaviour */
3089 if (ah->config.pcie_waen) {
3090 REG_WRITE(ah, AR_WA, ah->config.pcie_waen);
3092 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) || AR_SREV_9287(ah))
3093 REG_WRITE(ah, AR_WA, AR9285_WA_DEFAULT);
3095 * On AR9280 chips bit 22 of 0x4004 needs to be set to
3096 * otherwise card may disappear.
3098 else if (AR_SREV_9280(ah))
3099 REG_WRITE(ah, AR_WA, AR9280_WA_DEFAULT);
3101 REG_WRITE(ah, AR_WA, AR_WA_DEFAULT);
3105 /**********************/
3106 /* Interrupt Handling */
3107 /**********************/
3109 bool ath9k_hw_intrpend(struct ath_hw *ah)
3113 if (AR_SREV_9100(ah))
3116 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE);
3117 if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS))
3120 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE);
3121 if ((host_isr & AR_INTR_SYNC_DEFAULT)
3122 && (host_isr != AR_INTR_SPURIOUS))
3128 bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked)
3132 struct ath9k_hw_capabilities *pCap = &ah->caps;
3134 bool fatal_int = false;
3136 if (!AR_SREV_9100(ah)) {
3137 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) {
3138 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
3139 == AR_RTC_STATUS_ON) {
3140 isr = REG_READ(ah, AR_ISR);
3144 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) &
3145 AR_INTR_SYNC_DEFAULT;
3149 if (!isr && !sync_cause)
3153 isr = REG_READ(ah, AR_ISR);
3157 if (isr & AR_ISR_BCNMISC) {
3159 isr2 = REG_READ(ah, AR_ISR_S2);
3160 if (isr2 & AR_ISR_S2_TIM)
3161 mask2 |= ATH9K_INT_TIM;
3162 if (isr2 & AR_ISR_S2_DTIM)
3163 mask2 |= ATH9K_INT_DTIM;
3164 if (isr2 & AR_ISR_S2_DTIMSYNC)
3165 mask2 |= ATH9K_INT_DTIMSYNC;
3166 if (isr2 & (AR_ISR_S2_CABEND))
3167 mask2 |= ATH9K_INT_CABEND;
3168 if (isr2 & AR_ISR_S2_GTT)
3169 mask2 |= ATH9K_INT_GTT;
3170 if (isr2 & AR_ISR_S2_CST)
3171 mask2 |= ATH9K_INT_CST;
3172 if (isr2 & AR_ISR_S2_TSFOOR)
3173 mask2 |= ATH9K_INT_TSFOOR;
3176 isr = REG_READ(ah, AR_ISR_RAC);
3177 if (isr == 0xffffffff) {
3182 *masked = isr & ATH9K_INT_COMMON;
3184 if (ah->config.intr_mitigation) {
3185 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
3186 *masked |= ATH9K_INT_RX;
3189 if (isr & (AR_ISR_RXOK | AR_ISR_RXERR))
3190 *masked |= ATH9K_INT_RX;
3192 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
3196 *masked |= ATH9K_INT_TX;
3198 s0_s = REG_READ(ah, AR_ISR_S0_S);
3199 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK);
3200 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC);
3202 s1_s = REG_READ(ah, AR_ISR_S1_S);
3203 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR);
3204 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL);
3207 if (isr & AR_ISR_RXORN) {
3208 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3209 "receive FIFO overrun interrupt\n");
3212 if (!AR_SREV_9100(ah)) {
3213 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3214 u32 isr5 = REG_READ(ah, AR_ISR_S5_S);
3215 if (isr5 & AR_ISR_S5_TIM_TIMER)
3216 *masked |= ATH9K_INT_TIM_TIMER;
3223 if (AR_SREV_9100(ah))
3226 if (isr & AR_ISR_GENTMR) {
3229 s5_s = REG_READ(ah, AR_ISR_S5_S);
3230 if (isr & AR_ISR_GENTMR) {
3231 ah->intr_gen_timer_trigger =
3232 MS(s5_s, AR_ISR_S5_GENTIMER_TRIG);
3234 ah->intr_gen_timer_thresh =
3235 MS(s5_s, AR_ISR_S5_GENTIMER_THRESH);
3237 if (ah->intr_gen_timer_trigger)
3238 *masked |= ATH9K_INT_GENTIMER;
3246 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
3250 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
3251 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
3252 "received PCI FATAL interrupt\n");
3254 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
3255 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
3256 "received PCI PERR interrupt\n");
3258 *masked |= ATH9K_INT_FATAL;
3260 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
3261 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3262 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n");
3263 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
3264 REG_WRITE(ah, AR_RC, 0);
3265 *masked |= ATH9K_INT_FATAL;
3267 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) {
3268 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3269 "AR_INTR_SYNC_LOCAL_TIMEOUT\n");
3272 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
3273 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
3279 enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints)
3281 u32 omask = ah->mask_reg;
3283 struct ath9k_hw_capabilities *pCap = &ah->caps;
3285 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints);
3287 if (omask & ATH9K_INT_GLOBAL) {
3288 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "disable IER\n");
3289 REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
3290 (void) REG_READ(ah, AR_IER);
3291 if (!AR_SREV_9100(ah)) {
3292 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0);
3293 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE);
3295 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
3296 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE);
3300 mask = ints & ATH9K_INT_COMMON;
3303 if (ints & ATH9K_INT_TX) {
3304 if (ah->txok_interrupt_mask)
3305 mask |= AR_IMR_TXOK;
3306 if (ah->txdesc_interrupt_mask)
3307 mask |= AR_IMR_TXDESC;
3308 if (ah->txerr_interrupt_mask)
3309 mask |= AR_IMR_TXERR;
3310 if (ah->txeol_interrupt_mask)
3311 mask |= AR_IMR_TXEOL;
3313 if (ints & ATH9K_INT_RX) {
3314 mask |= AR_IMR_RXERR;
3315 if (ah->config.intr_mitigation)
3316 mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM;
3318 mask |= AR_IMR_RXOK | AR_IMR_RXDESC;
3319 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
3320 mask |= AR_IMR_GENTMR;
3323 if (ints & (ATH9K_INT_BMISC)) {
3324 mask |= AR_IMR_BCNMISC;
3325 if (ints & ATH9K_INT_TIM)
3326 mask2 |= AR_IMR_S2_TIM;
3327 if (ints & ATH9K_INT_DTIM)
3328 mask2 |= AR_IMR_S2_DTIM;
3329 if (ints & ATH9K_INT_DTIMSYNC)
3330 mask2 |= AR_IMR_S2_DTIMSYNC;
3331 if (ints & ATH9K_INT_CABEND)
3332 mask2 |= AR_IMR_S2_CABEND;
3333 if (ints & ATH9K_INT_TSFOOR)
3334 mask2 |= AR_IMR_S2_TSFOOR;
3337 if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) {
3338 mask |= AR_IMR_BCNMISC;
3339 if (ints & ATH9K_INT_GTT)
3340 mask2 |= AR_IMR_S2_GTT;
3341 if (ints & ATH9K_INT_CST)
3342 mask2 |= AR_IMR_S2_CST;
3345 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask);
3346 REG_WRITE(ah, AR_IMR, mask);
3347 mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM |
3349 AR_IMR_S2_DTIMSYNC |
3353 AR_IMR_S2_GTT | AR_IMR_S2_CST);
3354 REG_WRITE(ah, AR_IMR_S2, mask | mask2);
3355 ah->mask_reg = ints;
3357 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3358 if (ints & ATH9K_INT_TIM_TIMER)
3359 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3361 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3364 if (ints & ATH9K_INT_GLOBAL) {
3365 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "enable IER\n");
3366 REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
3367 if (!AR_SREV_9100(ah)) {
3368 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE,
3370 REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ);
3373 REG_WRITE(ah, AR_INTR_SYNC_ENABLE,
3374 AR_INTR_SYNC_DEFAULT);
3375 REG_WRITE(ah, AR_INTR_SYNC_MASK,
3376 AR_INTR_SYNC_DEFAULT);
3378 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n",
3379 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER));
3385 /*******************/
3386 /* Beacon Handling */
3387 /*******************/
3389 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
3393 ah->beacon_interval = beacon_period;
3395 switch (ah->opmode) {
3396 case NL80211_IFTYPE_STATION:
3397 case NL80211_IFTYPE_MONITOR:
3398 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3399 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff);
3400 REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff);
3401 flags |= AR_TBTT_TIMER_EN;
3403 case NL80211_IFTYPE_ADHOC:
3404 case NL80211_IFTYPE_MESH_POINT:
3405 REG_SET_BIT(ah, AR_TXCFG,
3406 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
3407 REG_WRITE(ah, AR_NEXT_NDP_TIMER,
3408 TU_TO_USEC(next_beacon +
3409 (ah->atim_window ? ah->
3411 flags |= AR_NDP_TIMER_EN;
3412 case NL80211_IFTYPE_AP:
3413 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3414 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT,
3415 TU_TO_USEC(next_beacon -
3417 dma_beacon_response_time));
3418 REG_WRITE(ah, AR_NEXT_SWBA,
3419 TU_TO_USEC(next_beacon -
3421 sw_beacon_response_time));
3423 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
3426 DPRINTF(ah->ah_sc, ATH_DBG_BEACON,
3427 "%s: unsupported opmode: %d\n",
3428 __func__, ah->opmode);
3433 REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3434 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3435 REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period));
3436 REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period));
3438 beacon_period &= ~ATH9K_BEACON_ENA;
3439 if (beacon_period & ATH9K_BEACON_RESET_TSF) {
3440 beacon_period &= ~ATH9K_BEACON_RESET_TSF;
3441 ath9k_hw_reset_tsf(ah);
3444 REG_SET_BIT(ah, AR_TIMER_MODE, flags);
3447 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
3448 const struct ath9k_beacon_state *bs)
3450 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
3451 struct ath9k_hw_capabilities *pCap = &ah->caps;
3453 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
3455 REG_WRITE(ah, AR_BEACON_PERIOD,
3456 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3457 REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
3458 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3460 REG_RMW_FIELD(ah, AR_RSSI_THR,
3461 AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
3463 beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD;
3465 if (bs->bs_sleepduration > beaconintval)
3466 beaconintval = bs->bs_sleepduration;
3468 dtimperiod = bs->bs_dtimperiod;
3469 if (bs->bs_sleepduration > dtimperiod)
3470 dtimperiod = bs->bs_sleepduration;
3472 if (beaconintval == dtimperiod)
3473 nextTbtt = bs->bs_nextdtim;
3475 nextTbtt = bs->bs_nexttbtt;
3477 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim);
3478 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt);
3479 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "beacon period %d\n", beaconintval);
3480 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod);
3482 REG_WRITE(ah, AR_NEXT_DTIM,
3483 TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
3484 REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
3486 REG_WRITE(ah, AR_SLEEP1,
3487 SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
3488 | AR_SLEEP1_ASSUME_DTIM);
3490 if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
3491 beacontimeout = (BEACON_TIMEOUT_VAL << 3);
3493 beacontimeout = MIN_BEACON_TIMEOUT_VAL;
3495 REG_WRITE(ah, AR_SLEEP2,
3496 SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
3498 REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
3499 REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
3501 REG_SET_BIT(ah, AR_TIMER_MODE,
3502 AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
3505 /* TSF Out of Range Threshold */
3506 REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
3509 /*******************/
3510 /* HW Capabilities */
3511 /*******************/
3513 void ath9k_hw_fill_cap_info(struct ath_hw *ah)
3515 struct ath9k_hw_capabilities *pCap = &ah->caps;
3516 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3517 struct ath_btcoex_info *btcoex_info = &ah->ah_sc->btcoex_info;
3519 u16 capField = 0, eeval;
3521 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
3522 regulatory->current_rd = eeval;
3524 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1);
3525 if (AR_SREV_9285_10_OR_LATER(ah))
3526 eeval |= AR9285_RDEXT_DEFAULT;